support OTA payload.bin

- unpack: supported
- pack & others: Not Available
pull/94/head
cfig 3 years ago
parent cc2399df0b
commit 1dd865adf4
No known key found for this signature in database
GPG Key ID: B104C307F0FDABB7

@ -65,8 +65,9 @@ Well done you did it! The last step is to star this repo :smile
| boot images | boot.img, vendor_boot.img | all | |
| recovery images | recovery.img, recovery-two-step.img | all | |
| vbmeta images | vbmeta.img, vbmeta_system.img etc. | all | |
| dtbo images | dtbo.img | linux & mac | |
| dtbo images | dtbo.img | linux & mac | |
| sparse images | system.img, vendor.img, product.img etc. | linux & mac | need **hacking mode**\* |
| OTA payload | payload.bin | linux & mac | |
Please note that the boot.img MUST follows AOSP verified boot flow, either [Boot image signature](https://source.android.com/security/verifiedboot/verified-boot#signature_format) in VBoot 1.0 or [AVB HASH footer](https://android.googlesource.com/platform/external/avb/+/master/README.md#The-VBMeta-struct) (a.k.a. AVB) in VBoot 2.0.

@ -34,8 +34,8 @@ dependencies {
implementation("org.jetbrains.kotlin:kotlin-reflect")
implementation("cc.cfig:io:0.2")
implementation("org.slf4j:slf4j-simple:1.7.32")
implementation("org.slf4j:slf4j-api:1.7.32")
implementation("org.slf4j:slf4j-simple:1.7.36")
implementation("org.slf4j:slf4j-api:1.7.36")
implementation("com.fasterxml.jackson.core:jackson-annotations:2.13.1")
implementation("com.fasterxml.jackson.core:jackson-databind:2.13.1")
implementation("com.google.guava:guava:31.0.1-jre")
@ -46,6 +46,7 @@ dependencies {
implementation("junit:junit:4.13.2")
implementation("org.bouncycastle:bcprov-jdk15on:1.69")
implementation("de.vandermeer:asciitable:0.3.2")
implementation("com.google.protobuf:protobuf-java:3.19.4")
implementation(project(":helper"))
testImplementation("org.jetbrains.kotlin:kotlin-test")

@ -73,7 +73,7 @@ class Avb {
}
// image + padding
val imgPaddingNeeded = Helper.round_to_multiple(newImageSize, BLOCK_SIZE) - newImageSize
val imgPaddingNeeded = Helper.round_to_multiple(newImageSize.toInt(), BLOCK_SIZE) - newImageSize
// + vbmeta + padding
val vbmetaBlob = newAvbInfo.encode()

@ -41,7 +41,7 @@ data class AuthBlob(
log.debug("calc hash: NONE")
byteArrayOf()
} else {
MessageDigest.getInstance(Helper.pyAlg2java(alg.hash_name)).apply {
MessageDigest.getInstance(CryptoHelper.Hasher.pyAlg2java(alg.hash_name)).apply {
update(header_data_blob)
update(aux_data_blob)
}.digest().apply {

@ -17,6 +17,7 @@ package avb.desc
import avb.blob.Header
import cfig.helper.Helper
import cc.cfig.io.Struct
import cfig.helper.CryptoHelper.Hasher
import org.apache.commons.codec.binary.Hex
import org.slf4j.LoggerFactory
import java.io.File
@ -88,7 +89,7 @@ class HashDescriptor(var flags: Int = 0,
val ret: Array<Any> = arrayOf(false, "file not found")
for (item in image_files) {
if (File(item).exists()) {
val hasher = MessageDigest.getInstance(Helper.pyAlg2java(hash_algorithm))
val hasher = MessageDigest.getInstance(Hasher.pyAlg2java(hash_algorithm))
hasher.update(this.salt)
FileInputStream(item).use { fis ->
val data = ByteArray(this.image_size.toInt())
@ -112,7 +113,7 @@ class HashDescriptor(var flags: Int = 0,
//salt
if (this.salt.isEmpty()) {
//If salt is not explicitly specified, choose a hash that's the same size as the hash size
val expectedDigestSize = MessageDigest.getInstance(Helper.pyAlg2java(hash_algorithm)).digest().size
val expectedDigestSize = MessageDigest.getInstance(Hasher.pyAlg2java(hash_algorithm)).digest().size
FileInputStream(File("/dev/urandom")).use {
val randomSalt = ByteArray(expectedDigestSize)
it.read(randomSalt)
@ -136,7 +137,7 @@ class HashDescriptor(var flags: Int = 0,
if (!use_persistent_digest) {
//hash digest
val newDigest = MessageDigest.getInstance(Helper.pyAlg2java(hash_algorithm)).apply {
val newDigest = MessageDigest.getInstance(Hasher.pyAlg2java(hash_algorithm)).apply {
update(salt)
update(File(image_file).readBytes())
}.digest()

@ -143,7 +143,7 @@ class HashTreeDescriptor(
private fun calcStreamHashSize(inStreamSize: Long, inBlockSize: Int): Long {
val blockCount = (inStreamSize + inBlockSize - 1) / inBlockSize
return Helper.round_to_multiple(blockCount * calcSingleHashSize(true), inBlockSize)
return Helper.round_to_multiple(blockCount * calcSingleHashSize(true), inBlockSize.toLong())
}
fun hashStream(
@ -266,7 +266,7 @@ class HashTreeDescriptor(
break
}
//digest size in page of blockSize
val hashSize = Helper.round_to_multiple(blockCount * digestSize, blockSize)
val hashSize = Helper.round_to_multiple(blockCount * digestSize, blockSize.toLong())
tree.add(0, MerkleTree(levelDataSize, blockCount, hashSize))
levelDataSize = hashSize
levelNo++

@ -99,7 +99,7 @@ open class BootHeaderV2(
}
private fun get_recovery_dtbo_offset(): Long {
return Helper.round_to_multiple(this.headerSize.toLong(), pageSize) +
return Helper.round_to_multiple(this.headerSize.toLong(), pageSize.toLong()) +
Helper.round_to_multiple(this.kernelLength, pageSize) +
Helper.round_to_multiple(this.ramdiskLength, pageSize) +
Helper.round_to_multiple(this.secondBootloaderLength, pageSize)

@ -173,8 +173,8 @@ data class VendorBoot(
ret.ramdisk.size = header.vndRamdiskTotalSize
ret.ramdisk.loadAddr = header.ramdiskLoadAddr
ret.ramdisk.position = Helper.round_to_multiple(
VendorBootHeader.VENDOR_BOOT_IMAGE_HEADER_V3_SIZE.toLong(), header.pageSize
)
VendorBootHeader.VENDOR_BOOT_IMAGE_HEADER_V3_SIZE, header.pageSize
).toLong()
//dtb
ret.dtb.file = workDir + "dtb"
ret.dtb.size = header.dtbSize

@ -0,0 +1,65 @@
// Copyright 2022 yuyezhong@gmail.com
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cc.cfig.droid.ota
import cfig.helper.ZipHelper.Companion.getEntryOffset
import org.apache.commons.compress.archivers.zip.ZipFile
import org.slf4j.LoggerFactory
import java.io.File
// tuple(name, offset, size) of an zip entry
class BrilloProp(
var name: String,
var offset: Long,
var size: Long
) {
constructor(zf: ZipFile, entryName: String) : this("", 0, 0) {
val entry = zf.getEntry(entryName)
name = File(entryName).name
offset = entry.getEntryOffset()
size = entry.size
log.debug("extra size = " + entry.localFileDataExtra.size)
log.debug("file name len = " + entry.name.length)
}
companion object {
private val log = LoggerFactory.getLogger(BrilloProp::class.java)
}
override fun toString(): String {
return if (offset == 0L && size == 0L) {
name + " ".repeat(15)
} else {
"$name:$offset:$size"
}
}
override fun equals(other: Any?): Boolean {
if (this === other) return true
if (javaClass != other?.javaClass) return false
other as BrilloProp
if (name != other.name) return false
if (offset != other.offset) return false
if (size != other.size) return false
return true
}
override fun hashCode(): Int {
var result = name.hashCode()
result = 31 * result + offset.hashCode()
result = 31 * result + size.hashCode()
return result
}
}

@ -0,0 +1,168 @@
// Copyright 2022 yuyezhong@gmail.com
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cc.cfig.droid.ota
import cc.cfig.io.Struct
import cfig.helper.ZipHelper
import cfig.helper.ZipHelper.Companion.getEntryOffset
import org.apache.commons.compress.archivers.zip.ZipFile
import org.slf4j.LoggerFactory
import java.io.File
import java.lang.IllegalArgumentException
import java.util.*
open class BrilloPropString {
open val name: String = ""
open val required: MutableList<String> = mutableListOf()
open val optional: MutableList<String> = mutableListOf()
companion object {
private val log = LoggerFactory.getLogger(BrilloPropString::class.qualifiedName)
const val metaDataName = "META-INF/com/android/metadata"
fun Properties.toBrilloString(): String {
val metaValueList = this.map { "${it.key}=${it.value}\n" }.sorted()
return metaValueList.reduce { acc, s -> "$acc$s" }
}
fun computeAllPropertyStrings(fileName: String, inPropertyStrings: List<BrilloPropString>): Properties {
return Properties().let { metadata ->
inPropertyStrings.forEach {
metadata[it.name] = it.preCompute(fileName)
}
metadata
}
}
fun finalizeAllPropertyFiles(
fileName: String,
inPropertyStrings: List<BrilloPropString>,
preComputed: Properties
): Properties {
val metadata = Properties()
val zf = ZipFile(fileName)
inPropertyStrings.forEach {
metadata[it.name] = it.postCompute(fileName, (preComputed[it.name] as String).length)
}
zf.close()
return metadata
}
fun rmMetaData(fileName: String) {
ZipFile(fileName).use { zf ->
val metadataEntry = zf.getEntry(metaDataName)
if (metadataEntry != null) {
log.info("$metaDataName exists, needs to be erased")
ZipHelper.zipDelete(File(fileName), metaDataName)
} else {
log.info("$metaDataName doesn't exist")
}
}
}
}
/*
pre-compute: with mimiced "metadata"
*/
fun preCompute(fileName: String): String {
return this.fromZipFile(fileName, reserveSpace = true)
}
/*
finalize return string with padding spaces
*/
fun postCompute(fileName: String, reservedLen: Int): String {
val result = fromZipFile(fileName, reserveSpace = false)
if (result.length > reservedLen) {
throw IllegalArgumentException("Insufficient reserved space: reserved=$reservedLen, actual=${result.length}")
}
return result + " ".repeat(reservedLen - result.length)
}
fun verify(fileName: String, expected: String) {
log.info("verifying $fileName:${this.name} ...")
val actual = fromZipFile(fileName, reserveSpace = false)
if (actual != expected.trim()) {
throw RuntimeException("Mismatching streaming metadata: [$actual] vs [$expected]")
} else {
log.info("Verified $fileName:${this.name} against [$expected]")
}
}
private fun fromZipFile(fileName: String, reserveSpace: Boolean = false): String {
ZipFile(fileName).use { zf ->
val token: MutableList<BrilloProp> = computePrivateProps(fileName)
this.required.forEach {
token.add(BrilloProp(zf, it))
}
this.optional.filter { zf.getEntry(it) != null }.forEach {
token.add(BrilloProp(zf, it))
}
if (reserveSpace) {
token.add(BrilloProp("metadata", 0L, 0L))
} else {
log.info("$metaDataName is " + BrilloProp(zf, metaDataName).toString())
token.add(BrilloProp(zf, metaDataName))
}
val ret = token.map { it.toString() }.reduce { acc, s -> "$acc,$s" }
log.info("fromZipFile($fileName) = [$ret]")
return ret
}
}
open fun computePrivateProps(fileName: String): MutableList<BrilloProp> {
return mutableListOf()
}
}
open class StreamingBrilloPropString : BrilloPropString() {
override val name: String = "ota-streaming-property-files"
override val required: MutableList<String> = mutableListOf("payload.bin", "payload_properties.txt")
//care_map is available only if dm-verity is enabled
//compatibility.zip is available only if target supports Treble
override val optional: MutableList<String> = mutableListOf("care_map.pb", "care_map.txt", "compatibility.zip")
}
class NonAbBrilloPropString : BrilloPropString() {
override val name: String = "ota-property-files"
}
/*
AbBrilloPropString will replace StreamingBrilloPropString after P-timeframe
*/
@OptIn(ExperimentalUnsignedTypes::class)
class AbBrilloPropString : StreamingBrilloPropString() {
override val name: String = "ota-property-files"
override fun computePrivateProps(fileName: String): MutableList<BrilloProp> {
ZipFile(fileName).use { zf ->
val pb = zf.getEntry("payload.bin")
val headerFormat = Struct("!IQQL")
val header = headerFormat.unpack(zf.getInputStream(pb))
val magic = header[0] as UInt
val manifestSize = header[2] as ULong
val metaSigSize = header[3] as UInt
if (0x43724155U != magic) {//'CrAU'
throw IllegalArgumentException("Invalid magic 0x" + magic.toString(16))
}
val metaTotal = headerFormat.calcSize().toULong() + manifestSize + metaSigSize
if (metaTotal >= pb.size.toUInt()) {
throw IllegalArgumentException("metadata total size >= payload size")
}
return mutableListOf(BrilloProp("payload_metadata.bin", pb.getEntryOffset(), metaTotal.toLong()))
}
}
}

@ -0,0 +1,157 @@
// Copyright 2022 yuyezhong@gmail.com
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cc.cfig.droid.ota
import org.apache.commons.compress.archivers.zip.ZipFile
import org.slf4j.LoggerFactory
import java.io.InputStream
import java.nio.charset.StandardCharsets
import java.util.*
class Common {
companion object {
private val log = LoggerFactory.getLogger(Common::class.java)
private val PARTITIONS_WITH_CARE_MAP =
mutableListOf("system", "vendor", "product", "system_ext", "odm", "vendor_dlkm", "odm_dlkm")
val PARTITIONS_WITH_BUILD_PROP = PARTITIONS_WITH_CARE_MAP.apply { add("boot") }
private fun getEntryStream(zipFile: ZipFile, entryName: String): InputStream {
return zipFile.getInputStream(zipFile.getEntry(entryName))
}
fun Properties.getBuildProp(k: String): String? {
return (this.get("build.prop") as Properties).getProperty(k)
}
fun loadInfoDict(fileName: String): Properties {
val d = Properties()
ZipFile(fileName).use { zf ->
log.info("loading META/misc_info.txt ...")
//1: misc_info.txt
d.load(getEntryStream(zf, "META/misc_info.txt"))
if (null == d.getProperty("recovery_api_version")) {
throw IllegalArgumentException("Failed to find 'recovery_api_version'")
}
if (null == d.getProperty("fstab_version")) {
throw IllegalArgumentException("Failed to find 'fstab_version'")
}
if ("true" == d.getProperty("system_root_image")) {
throw IllegalArgumentException("BOARD_BUILD_SYSTEM_ROOT_IMAGE no longer supported")
}
val recoveryFstabPath = "BOOT/RAMDISK/system/etc/recovery.fstab"
val recoveryFstabPath2 = "VENDOR_BOOT/RAMDISK/system/etc/recovery.fstab"
val validFstab = if (zf.getEntry(recoveryFstabPath) != null) {
recoveryFstabPath
} else {
recoveryFstabPath2
}
//2: .fstab
d.put("fstab", loadRecoveryFstab(zf, validFstab, false))
//load all build.prop
PARTITIONS_WITH_BUILD_PROP.forEach { part ->
val subProps = Properties()
if (part == "boot") {
arrayOf("BOOT/RAMDISK/system/etc/ramdisk/build.prop",
"BOOT/RAMDISK/prop.default").forEach { bootBuildProp ->
zf.getEntry(bootBuildProp)?.let { entry ->
log.info("loading /$bootBuildProp ...")
subProps.load(zf.getInputStream(entry))
}
}
} else {
zf.getEntry("${part.uppercase(Locale.getDefault())}/build.prop")?.let { entry ->
log.info("loading /$part/build.prop ...")
subProps.load(zf.getInputStream(entry))
}
zf.getEntry("${part.uppercase(Locale.getDefault())}/etc/build.prop")?.let { entry ->
log.info("loading /$part/etc/build.prop ...")
subProps.load(zf.getInputStream(entry))
}
}
//3: .$part.build.prop
d.put("$part.build.prop", subProps)
}
//4: .build.prop == .system.build.prop
log.info("duplicating system.build.prop -> build.prop")
d.put("build.prop", d.get("system.build.prop"))
}
if (d.get("avb_enable") == "true") {
// 5: avb related
(d.get("build.prop") as Properties).let { buildprop ->
var fp: String?
fp = buildprop.get("ro.build.fingerprint") as String?
if (fp == null) {
fp = buildprop.get("ro.build.thumbprint") as String?
}
fp?.let {
log.warn("adding avb_salt from fingerprint ...")
d.put("avb_salt", "fp")
}
}
}
return d
}
private fun loadRecoveryFstab(zf: ZipFile, fstabPath: String, bSystemRoot: Boolean = false) {
class Partition(
var mount_point: String = "",
var fs_type: String = "",
var device: String = "",
var length: Long = 0,
var selinuxContext: String = "",
)
log.info("loading $fstabPath ...")
val ret: MutableMap<String, Partition> = mutableMapOf()
val rs = getEntryStream(zf, fstabPath).readBytes().toString(StandardCharsets.UTF_8)
log.debug(rs)
rs.lines().forEach rs@{ line ->
val item = line.trim()
if (item.isEmpty() || item.startsWith("#")) {
log.debug("ignore empty/comment line")
return@rs
}
val pieces = item.split("\\s+".toRegex())
if (pieces.size != 5) {
throw IllegalArgumentException("malformed recovery.fstab line: [$item]")
}
if (pieces[4].contains("voldmanaged=")) {
log.info("Ignore entries that are managed by vold: [$item]")
return@rs
}
val lengthOption = pieces[4].split(",").filter { it.startsWith("length=") }
val length = when (lengthOption.size) {
0 -> 0
1 -> lengthOption[0].substring(7).toLong()
else -> throw IllegalArgumentException("multiple 'length=' in options")
}
val mountFlags = pieces[3]
val mountContextFlags = mountFlags.split(",").filter { it.startsWith("context=") }
val context = if (mountContextFlags.size == 1) mountContextFlags[0] else ""
ret.put(pieces[1], Partition(pieces[1], pieces[2], pieces[0], length, context))
}
if (bSystemRoot) {
if (ret.keys.contains("/system") || !ret.keys.contains("/")) {
throw IllegalArgumentException("not allowed")
}
val systemPartition = ret.get("/") as Partition
systemPartition.mount_point = "/"
log.info("adding /system for system_as_root devices")
ret.put("/system", systemPartition)
}
}
}
}

@ -0,0 +1,138 @@
// Copyright 2022 yuyezhong@gmail.com
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cc.cfig.droid.ota
import cfig.helper.Helper
import cfig.helper.ZipHelper
import chromeos_update_engine.UpdateMetadata
import org.slf4j.LoggerFactory
import java.io.File
import java.io.FileInputStream
import java.util.*
import chromeos_update_engine.UpdateMetadata.InstallOperation.Type
import java.io.ByteArrayInputStream
class DeltaGenerator {
class ChunkProcessor(
val name: String,
val op: UpdateMetadata.InstallOperation,
val blockSize: Int,
val partFile: String,
) {
fun ProcessChunk() {
log.info("ChunkProcessor: $name")
FileInputStream(partFile).use { fis ->
val dst0 = op.getDstExtents(0)
fis.skip(dst0.startBlock * blockSize)
val data = ByteArray((dst0.numBlocks * blockSize).toInt())
if (data.size != fis.read(data)) {
throw RuntimeException("$name: read size != expected size")
}
val bestOp = GenerateBestFullOperation(data)
if (bestOp[0] as Boolean) {
log.info("bestType=" + bestOp[1] as Type + ", bestSize=" + bestOp[2] as Int)
} else {
throw IllegalStateException("GenerateBestFullOperation fail")
}
}
}
companion object {
fun GenerateBestFullOperation(inData: ByteArray): Array<Any> {
val ret: Array<Any> = Array(3) { 0 }
var bestType: Type = Type.REPLACE
var bestSize: Int = inData.size
//buffer MUST be valid
if (inData.isEmpty()) {
ret[0] = false
return ret
}
//zero
if (inData.all { it.toInt() == 0 }) {
bestType = Type.ZERO
log.info("raw=${inData.size}, ZERO")
ret[0] = true
ret[1] = bestType
return ret
}
//try xz
File.createTempFile("pre", "suf").let { tempFile ->
tempFile.deleteOnExit()
ZipHelper.xz(tempFile.absolutePath, ByteArrayInputStream(inData))
log.debug("raw=${inData.size}, xz=" + tempFile.length())
if (bestSize > tempFile.length()) {
bestType = Type.REPLACE_XZ
bestSize = tempFile.length().toInt()
}
}
//try bz
File.createTempFile("pre", "suf").let { tempFile ->
tempFile.deleteOnExit()
ZipHelper.bzip2(tempFile.absolutePath, ByteArrayInputStream(inData))
log.debug("raw=${inData.size}, bzip2=" + tempFile.length())
if (bestSize > tempFile.length()) {
bestType = Type.REPLACE_BZ
bestSize = tempFile.length().toInt()
}
}
ret[0] = true
ret[1] = bestType
ret[2] = bestSize
return ret
}
}
}
class FullPayloadGenerator {
fun GenerateOperations(partName: String, partFile: String) {
val config = Properties().apply {
put("full_chunk_size", (2 * 1024 * 1024).toInt())
put("block_size", (4 * 1024).toInt())
}
val fullChunkSize = config.get("full_chunk_size") as Int
val blockSize = config.get("block_size") as Int
if (fullChunkSize % blockSize != 0) {
throw IllegalArgumentException("BUG: illegal (chunk_size, block_size)=($fullChunkSize, $blockSize)")
}
val fileLen = File(partFile).length()
log.warn("fcs=$fullChunkSize, file size=$fileLen")
val partitionBlocks: Long = fileLen / blockSize
val chunkBloks: Long = (fullChunkSize / blockSize).toLong() //typically 512
val numChunks = Helper.Companion.round_to_multiple(partitionBlocks, chunkBloks) / chunkBloks
log.warn("partitionBlocks=$partitionBlocks, numChunks=$numChunks")
for (i in 0 until numChunks) {
val startBlock = i * chunkBloks
val numBlocks = minOf(chunkBloks, partitionBlocks - i * chunkBloks)
val dstExtent = UpdateMetadata.Extent.newBuilder()
.setStartBlock(startBlock)
.setNumBlocks(numBlocks)
.build()
val op = UpdateMetadata.InstallOperation.newBuilder()
.setType(Type.REPLACE)
.addDstExtents(dstExtent)
.build()
log.info("op<${i}> $op")
ChunkProcessor("$partName-operation-${i}/$numChunks", op, blockSize, partFile).ProcessChunk()
}
}
fun appendData() {
}
}
companion object {
val log = LoggerFactory.getLogger(DeltaGenerator::class.java.name)
}
}

@ -0,0 +1,21 @@
// Copyright 2022 yuyezhong@gmail.com
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cc.cfig.droid.ota
data class OtaOptions(
var wipe_user_data: Boolean = false,
var skip_postinstall: Boolean = false,
var include_secondary: Boolean = false,
var downgrade: Boolean = false
)

@ -0,0 +1,299 @@
// Copyright 2022 yuyezhong@gmail.com
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cc.cfig.droid.ota
import cc.cfig.io.Struct
import cfig.helper.CryptoHelper.Hasher
import cfig.helper.Helper
import chromeos_update_engine.UpdateMetadata
import com.fasterxml.jackson.annotation.JsonInclude
import com.fasterxml.jackson.databind.ObjectMapper
import com.google.protobuf.ByteString
import org.apache.commons.exec.CommandLine
import org.apache.commons.exec.DefaultExecutor
import org.apache.commons.exec.PumpStreamHandler
import org.slf4j.LoggerFactory
import java.io.*
import java.nio.file.Files
import java.nio.file.Paths
import java.time.Instant
import java.time.ZoneId
import java.time.format.DateTimeFormatter
class Payload {
var fileName: String = ""
var header = PayloadHeader()
var manifest: UpdateMetadata.DeltaArchiveManifest = UpdateMetadata.DeltaArchiveManifest.newBuilder().build()
var metaSig: UpdateMetadata.Signatures = UpdateMetadata.Signatures.newBuilder().build()
var metaSize: Int = 0
var dataOffset: Long = 0L
var dataSig: UpdateMetadata.Signatures = UpdateMetadata.Signatures.newBuilder().build()
companion object {
private val log = LoggerFactory.getLogger(Payload::class.java)
val workDir = Helper.prop("payloadDir")
fun parse(inFileName: String): Payload {
val ret = Payload()
ret.fileName = inFileName
FileInputStream(inFileName).use { fis ->
ret.header = PayloadHeader(fis)
ret.metaSize = ret.header.headerSize + ret.header.manifestLen.toInt()
ret.dataOffset = (ret.metaSize + ret.header.metaSigLen).toLong()
//manifest
ret.manifest = ByteArray(ret.header.manifestLen.toInt()).let { buf ->
fis.read(buf)
UpdateMetadata.DeltaArchiveManifest.parseFrom(buf)
}
//meta sig
ret.metaSig = ByteArray(ret.header.metaSigLen).let { buf2 ->
fis.read(buf2)
UpdateMetadata.Signatures.parseFrom(buf2)
}
//data sig
if (ret.manifest.hasSignaturesOffset()) {
log.debug("payload sig offset = " + ret.manifest.signaturesOffset)
log.debug("payload sig size = " + ret.manifest.signaturesSize)
fis.skip(ret.manifest.signaturesOffset)
ret.dataSig = ByteArray(ret.manifest.signaturesSize.toInt()).let { buf ->
fis.read(buf)
UpdateMetadata.Signatures.parseFrom(buf)
}
} else {
log.warn("payload has no signatures")
}
} //end-of-fis
run {//CHECK_EQ(payload.size(), signatures_offset + manifest.signatures_size())
val calculatedSize = ret.header.headerSize + ret.header.manifestLen + ret.header.metaSigLen +
ret.manifest.signaturesOffset + ret.manifest.signaturesSize
if (File(inFileName).length() == calculatedSize) {
log.info("payload.bin size info check PASS")
} else {
throw IllegalStateException("calculated payload size doesn't match file size")
}
}
val calcMetadataHash =
Hasher.hash(inFileName, listOf(Pair(0L, ret.metaSize.toLong())), "sha-256")
log.info("calc meta hash: " + Helper.toHexString(calcMetadataHash))
val calcPayloadHash = Hasher.hash(
inFileName, listOf(
Pair(0L, ret.metaSize.toLong()),
Pair(ret.metaSize.toLong() + ret.header.metaSigLen, ret.manifest.signaturesOffset)
), "sha-256"
)
check(calcPayloadHash.size == 32)
log.info("calc payload hash: " + Helper.toHexString(calcPayloadHash))
val readPayloadSignature = UpdateMetadata.Signatures.parseFrom(
Helper.readFully(
inFileName,
ret.dataOffset + ret.manifest.signaturesOffset,
ret.manifest.signaturesSize.toInt()
)
)
log.info("Found sig count: " + readPayloadSignature.signaturesCount)
readPayloadSignature.signaturesList.forEach {
//pass
log.info(it.data.toString())
log.info("sig_data size = " + it.data.toByteArray().size)
log.info(Helper.toHexString(it.data.toByteArray()))
Files.write(Paths.get("sig_data"), it.data.toByteArray())
}
return ret
}
class PayloadVerifier {
fun getRawHashFromSignature(sig_data: ByteString, pubkey: String, sigHash: ByteArray) {
}
}
fun displaySignatureBlob(sigName: String, sig: UpdateMetadata.Signatures): String {
return StringBuilder().let { sb ->
sb.append(String.format("%s signatures: (%d entries)\n", sigName, sig.signaturesCount))
sig.signaturesList.forEach {
sb.append(String.format(" hex_data: (%d bytes)\n", it.data.size()))
sb.append(" Data: " + Helper.toHexString(it.data.toByteArray()) + "\n")
}
sb
}.toString()
}
}
fun printInfo() {
val mi = ManifestInfo(blockSize = this.manifest.blockSize,
minorVersion = this.manifest.minorVersion,
maxTimeStamp = this.manifest.maxTimestamp,
signatureOffset = this.manifest.signaturesOffset,
signatureSize = this.manifest.signaturesSize,
partialUpdate = this.manifest.hasPartialUpdate(),
partsToUpdate = this.manifest.partitionsList.map {
ManifestInfo.PartitionToUpdate(
it.partitionName, it.operationsCount,
if (it.hasRunPostinstall()) it.runPostinstall else null,
if (it.hasPostinstallPath()) it.postinstallPath else null
)
},
enableSnapshot = this.manifest.dynamicPartitionMetadata.hasSnapshotEnabled(),
dynamicGroups = this.manifest.dynamicPartitionMetadata.groupsList.map {
ManifestInfo.DynamicPartGroup(name = it.name, size = it.size, partName = it.partitionNamesList)
})
ObjectMapper().writerWithDefaultPrettyPrinter().writeValue(File("$workDir/header.json"), this.header)
log.info(" header info dumped to ${workDir}header.json")
ObjectMapper().writerWithDefaultPrettyPrinter().writeValue(File("$workDir/manifest.json"), mi)
log.info(" manifest info dumped to ${workDir}manifest.json")
val signatureFile = "${workDir}signatures.txt"
FileOutputStream(signatureFile, false).use { fos ->
fos.writer().use { fWriter ->
fWriter.write("<Metadata> signatures: offset=" + this.header.manifestLen + ", size=" + this.header.metaSigLen + "\n")
fWriter.write(Payload.displaySignatureBlob("<Metadata>", this.metaSig))
fWriter.write("<Payload> signatures: base= offset=" + manifest.signaturesOffset + ", size=" + this.header.metaSigLen + "\n")
fWriter.write((Payload.displaySignatureBlob("<Payload>", this.dataSig)))
}
}
log.info("signature info dumped to $signatureFile")
}
private fun decompress(inBytes: ByteArray, opType: UpdateMetadata.InstallOperation.Type): ByteArray {
val baosO = ByteArrayOutputStream()
val baosE = ByteArrayOutputStream()
val bais = ByteArrayInputStream(inBytes)
DefaultExecutor().let { exec ->
exec.streamHandler = PumpStreamHandler(baosO, baosE, bais)
val cmd = when (opType) {
UpdateMetadata.InstallOperation.Type.REPLACE_XZ -> CommandLine("xzcat")
UpdateMetadata.InstallOperation.Type.REPLACE_BZ -> CommandLine("bzcat")
UpdateMetadata.InstallOperation.Type.REPLACE -> return inBytes
else -> throw IllegalArgumentException(opType.toString())
}
cmd.addArgument("-")
exec.execute(cmd)
}
return baosO.toByteArray()
}
private fun unpackInternal(ras: RandomAccessFile, pu: UpdateMetadata.PartitionUpdate, logPrefix: String = "") {
log.info(String.format("[%s] extracting %13s.img (%d ops)", logPrefix, pu.partitionName, pu.operationsCount))
FileOutputStream("$workDir/${pu.partitionName}.img").use { outFile ->
val ops = pu.operationsList.toMutableList().apply {
sortBy { it.getDstExtents(0).startBlock }
}
ops.forEach { op ->
log.debug(pu.partitionName + ": " + (op.getDstExtents(0).startBlock * this.manifest.blockSize) + ", size=" + op.dataLength)
val piece = ByteArray(op.dataLength.toInt()).let {
ras.seek(this.dataOffset + op.dataOffset)
ras.read(it)
it
}
outFile.write(decompress(piece, op.type))
}
}
}
fun setUp() {
File(workDir).let {
if (it.exists()) {
log.info("Removing $workDir")
it.deleteRecursively()
}
log.info("Creating $workDir")
it.mkdirs()
}
}
fun unpack() {
RandomAccessFile(this.fileName, "r").use { ras ->
var currentNum = 1
val totalNum = this.manifest.partitionsCount
val parts = this.manifest.partitionsList.map { it.partitionName }
log.info("There are $totalNum partitions $parts")
log.info("dumping images to $workDir")
this.manifest.partitionsList.forEach { pu ->
unpackInternal(ras, pu, String.format("%2d/%d", currentNum, totalNum))
currentNum++
}
}
}
data class PayloadHeader(
var version: Long = 0,
var manifestLen: Long = 0,
var metaSigLen: Int = 0,
var headerSize: Int = 0
) {
private val magic = "CrAU"
private val FORMAT_STRING = ">4sqq" //magic, version, manifestLen
private val CHROMEOS_MAJOR_PAYLOAD_VERSION = 1L
private val BRILLO_MAJOR_PAYLOAD_VERSION = 2L
val typeOfVersion: String
get() = when (version) {
CHROMEOS_MAJOR_PAYLOAD_VERSION -> "chromeOs"
BRILLO_MAJOR_PAYLOAD_VERSION -> "brillo"
else -> throw IllegalArgumentException()
}
constructor(fis: InputStream) : this() {
val info = Struct(FORMAT_STRING).unpack(fis)
check((info[0] as String) == magic) { "${info[0]} is not payload magic" }
version = info[1] as Long
manifestLen = info[2] as Long
headerSize = Struct(FORMAT_STRING).calcSize()
if (version == BRILLO_MAJOR_PAYLOAD_VERSION) {
headerSize += Int.SIZE_BYTES
metaSigLen = Struct(">i").unpack(fis)[0] as Int
}
}
}
@JsonInclude(JsonInclude.Include.NON_NULL)
data class ManifestInfo(
var blockSize: Int? = null,
var minorVersion: Int? = null,
var maxTimeStamp: Long = 0L,
var maxTimeReadable: String? = null,
var partialUpdate: Boolean? = null,
val signatureOffset: Long? = null,
val signatureSize: Long? = null,
var dynamicGroups: List<DynamicPartGroup> = listOf(),
var enableSnapshot: Boolean? = null,
var partsToUpdate: List<PartitionToUpdate> = listOf()
) {
init {
val ldt = Instant.ofEpochMilli(maxTimeStamp * 1000)
.atZone(ZoneId.systemDefault())
.toLocalDateTime()
maxTimeReadable = DateTimeFormatter.ISO_LOCAL_DATE_TIME.format(ldt) + " (${ZoneId.systemDefault()})"
}
@JsonInclude(JsonInclude.Include.NON_NULL)
data class PartitionToUpdate(
var name: String = "",
var ops: Int = 0,
var runPostInstall: Boolean? = null,
var postInstallPath: String? = null
)
data class DynamicPartGroup(
var name: String = "",
var size: Long = 0L,
var partName: List<String> = listOf()
)
}
}

@ -0,0 +1,247 @@
// Copyright 2022 yuyezhong@gmail.com
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cc.cfig.droid.ota
import cc.cfig.io.Struct
import cfig.helper.Helper.Companion.check_call
import cfig.helper.ZipHelper.Companion.dumpEntry
import org.apache.commons.compress.archivers.zip.ZipFile
import org.slf4j.LoggerFactory
import java.io.File
import java.io.FileInputStream
import java.io.FileOutputStream
import java.nio.file.Files
import java.nio.file.Paths
import java.nio.file.StandardOpenOption
import java.util.*
import kotlin.system.exitProcess
@OptIn(ExperimentalUnsignedTypes::class)
class PayloadGenerator {
private val log = LoggerFactory.getLogger(PayloadGenerator::class.java)
val workDir = "build/staging_ota"
val signedPayload = "$workDir/signed-payload.bin"
val propertiesFile = "$workDir/payload-properties.txt"
fun generate(maxTs: String, targetFile: String, payload: String) {
("brillo_update_payload generate" +
" --max_timestamp $maxTs" +
" --target_image $targetFile" +
" --payload $payload").check_call()
}
fun sign(inSigner: PayloadSigner, options: OtaOptions) {
// 1. Generate hashes of the payload and metadata files
("brillo_update_payload hash" +
" --unsigned_payload $workDir/payload.bin" +
" --signature_size " + inSigner.keySize +
" --metadata_hash_file $workDir/meta.hash" +
" --payload_hash_file $workDir/payload.hash").check_call()
// 2. Sign the hashes.
inSigner.sign("$workDir/meta.hash", "$workDir/signed-meta.hash")
inSigner.sign("$workDir/payload.hash", "$workDir/signed-payload.hash")
// 3. Insert the signatures back into the payload file.
("brillo_update_payload sign" +
" --unsigned_payload $workDir/payload.bin" +
" --payload $signedPayload" +
" --signature_size " + inSigner.keySize +
" --metadata_signature_file $workDir/signed-meta.hash" +
" --payload_signature_file $workDir/signed-payload.hash").check_call()
// 4. Dump the signed payload properties.
("brillo_update_payload properties" +
" --payload $signedPayload" +
" --properties_file $propertiesFile").check_call()
// 5.
if (options.wipe_user_data) {
FileOutputStream(propertiesFile, true).use {
it.write("POWERWASH=1\n".toByteArray())
}
}
options.include_secondary.let { includeSec ->
if (includeSec) {
FileOutputStream(propertiesFile, true).use {
it.write("SWITCH_SLOT_ON_REBOOT=0\n".toByteArray())
}
}
}
}
fun tryToDumpEntry(inputFile: ZipFile, entryItem: String, outFile: String) {
val entry = inputFile.getEntry(entryItem)
if (entry != null) {
inputFile.dumpEntry(entry.name, File(outFile))
} else {
log.info("$entryItem not found")
}
}
fun generateMine(maxTs: String, inTargetFile: String, payload: String, infoDict: Properties) {
val targetFile = ZipFile(inTargetFile)
val abPartitions =
String(targetFile.getInputStream(targetFile.getEntry("META/ab_partitions.txt")).readBytes())
.lines().filter { it.isNotBlank() }
log.info("Dumping ${abPartitions.size} images from target file ...")
abPartitions.forEach { part ->
val partEntryName = listOfNotNull(
targetFile.getEntry("IMAGES/$part.img"), targetFile.getEntry("RADIO/$part.img")
)
.let { parts ->
if (parts.size != 1) {
log.error("Found multiple images for partition $part")
exitProcess(1)
}
parts[0].name
}
//dump image
targetFile.dumpEntry(partEntryName, File("$workDir/$part.img"))
run {//unsparse image
Struct(">I").unpack(FileInputStream("$workDir/$part.img")).let { fileHeader ->
if (fileHeader[0] as UInt == 0x3aff26ed.toUInt()) {
log.debug("$part is sparse, convert to raw image")
"simg2img $workDir/$part.img $workDir/tmp.img".check_call()
File("$workDir/tmp.img").renameTo(File("$workDir/$part.img"))
}
}
}
run {//dump map file
val mapFile = targetFile.getEntry(partEntryName.replace(".img", ".map"))
if (mapFile != null) {
log.debug("$part.map found, dump it to $workDir/$part.map")
targetFile.dumpEntry(mapFile.name, File("$workDir/$part.map"))
} else {
log.debug("$part.map not found")
}
}
File("$workDir/$part.img").let { partFile ->
val partSize = partFile.length()
if (partSize % 4096 != 0L) {
log.info("Padding $workDir/$part.img ...")
Files.write(
Paths.get("$workDir/$part.img"),
ByteArray(4096 - (partSize % 4096).toInt()),
StandardOpenOption.APPEND
)
}
}
}
targetFile.dumpEntry("META/postinstall_config.txt", File("$workDir/postinstall_config.txt"))
targetFile.dumpEntry("META/dynamic_partitions_info.txt", File("$workDir/dynamic_partitions_info.txt"))
tryToDumpEntry(targetFile, "META/apex_info.pb", "$workDir/apex_info.pb")
targetFile.close()
data class DeltaGenParam(
var partitionNames: String = "",
var newImages: String = "",
var newMapFiles: String = "",
var newPostInstallConfig: String = "",
var dynamicPartitionInfo: String = "",
var apexInfoFile: String = "",
var partitionTimeStamps: String = "",
)
//partition timestamps
val pTs: MutableList<Pair<String, String>> = mutableListOf()
Common.PARTITIONS_WITH_BUILD_PROP.forEach { it ->
val item: Pair<String, String?> = Pair(it,
when (it) {
"boot" -> {
log.info("boot:" + infoDict.get("$it.build.prop") as Properties)
(infoDict.get("$it.build.prop") as Properties).getProperty("ro.${it}image.build.date.utc")
}
else -> (infoDict.get("$it.build.prop") as Properties).getProperty("ro.${it}.build.date.utc")
})
if (item.second != null) {
pTs.add(item as Pair<String, String>)
}
}
val dp = DeltaGenParam().apply {
partitionNames = abPartitions.reduce { acc, s -> "$acc:$s" }
newImages = abPartitions.map { "$workDir/$it.img" }.reduce { acc, s -> "$acc:$s" }
newMapFiles = abPartitions
.map { if (File("$workDir/$it.map").exists()) "$workDir/$it.map" else "" }
.reduce { acc, s -> "$acc:$s" }
newPostInstallConfig = "$workDir/postinstall_config.txt"
dynamicPartitionInfo = "$workDir/dynamic_partitions_info.txt"
if (File("$workDir/apex_info.pb").exists()) {
apexInfoFile = "$workDir/apex_info.pb"
}
partitionTimeStamps = pTs.map { it.first + ":" + it.second }.reduce { acc, s -> "$s,$acc" }
}
("delta_generator" +
" --out_file=$payload" +
" --partition_names=${dp.partitionNames}" +
" --new_partitions=${dp.newImages}" +
" --new_mapfiles=${dp.newMapFiles}" +
" --major_version=2" +
" --max_timestamp=$maxTs" +
" --partition_timestamps=${dp.partitionTimeStamps}" +
" --new_postinstall_config_file=${dp.newPostInstallConfig}" +
" --dynamic_partition_info_file=${dp.dynamicPartitionInfo}" +
if (dp.apexInfoFile.isNotBlank()) " --apex_info_file=${dp.apexInfoFile}" else ""
).check_call()
}
fun signMine(inSigner: PayloadSigner, options: OtaOptions) {
//1: hash and meta of payload
("delta_generator" +
" --in_file=$workDir/payload.bin.mine" +
" --signature_size=${inSigner.keySize}" +
" --out_hash_file=$workDir/payload.hash.mine" +
" --out_metadata_hash_file=$workDir/meta.hash.mine").check_call()
//Helper.assertFileEquals("$workDir/meta.hash", "$workDir/meta.hash.mine")
//Helper.assertFileEquals("$workDir/payload.hash", "$workDir/payload.hash.mine")
//2: sign hash and meta
inSigner.sign("$workDir/meta.hash.mine", "$workDir/signed-meta.hash.mine")
inSigner.sign("$workDir/payload.hash.mine", "$workDir/signed-payload.hash.mine")
//Helper.assertFileEquals("$workDir/signed-meta.hash", "$workDir/signed-meta.hash.mine")
//Helper.assertFileEquals("$workDir/payload.hash", "$workDir/payload.hash.mine")
//3: hash, meta, payload.bin -> signed-payload.bin
("delta_generator" +
" --in_file=$workDir/payload.bin.mine" +
" --signature_size=" + inSigner.keySize +
" --payload_signature_file=$workDir/signed-payload.hash.mine" +
" --metadata_signature_file=$workDir/signed-meta.hash.mine" +
" --out_file=$signedPayload.mine").check_call()
//Helper.assertFileEquals(signedPayload, "$signedPayload.mine")
//4: payload-properties.txt
("delta_generator" +
" --in_file=$signedPayload.mine" +
" --properties_file=$propertiesFile.mine").check_call()
//Helper.assertFileEquals(propertiesFile, "$propertiesFile.mine")
// 5: payload-properties.txt appending
if (options.wipe_user_data) {
FileOutputStream(propertiesFile, true).use {
it.write("POWERWASH=1\n".toByteArray())
}
}
if (options.include_secondary) {
FileOutputStream(propertiesFile, true).use {
it.write("SWITCH_SLOT_ON_REBOOT=0\n".toByteArray())
}
}
}
}

@ -0,0 +1,62 @@
// Copyright 2022 yuyezhong@gmail.com
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cc.cfig.droid.ota
import org.apache.commons.exec.CommandLine
import org.apache.commons.exec.DefaultExecutor
import org.slf4j.LoggerFactory
import java.io.File
class PayloadSigner {
private val log = LoggerFactory.getLogger(PayloadSigner::class.java)
var keySize = 0
private val workDir = "build/staging_ota"
val signingKey = "signing.key"
val privKey = "aosp/security/testkey.pk8"
val modulusFile = "$workDir/modulus.file"
init {
CommandLine.parse("openssl pkcs8 -in $privKey -inform DER -nocrypt -out $workDir/$signingKey").let { cmd ->
log.info(cmd.toString())
DefaultExecutor().execute(cmd)
}
CommandLine.parse("openssl rsa -inform PEM -in $workDir/$signingKey -modulus -noout -out $modulusFile").let { cmd ->
log.info(cmd.toString())
DefaultExecutor().execute(cmd)
}
val modulusString = File(modulusFile).readText()
log.info(modulusString)
val MODULUS_PREFIX = "Modulus="
if (!modulusString.startsWith(MODULUS_PREFIX)) {
throw IllegalArgumentException("Invalid modulus string")
}
keySize = modulusString.substring(MODULUS_PREFIX.length).length / 2
log.info("key size = $keySize bytes")
if (keySize !in listOf(256, 512)) {
throw IllegalArgumentException("Unsupported key size")
}
}
fun sign(inFile: String, outFile: String) {
CommandLine.parse("openssl pkeyutl -sign").let { cmd ->
cmd.addArguments("-inkey $workDir/$signingKey -pkeyopt digest:sha256")
cmd.addArguments("-in $inFile")
cmd.addArguments("-out $outFile")
log.info(cmd.toString())
DefaultExecutor().execute(cmd)
}
}
}

@ -28,7 +28,7 @@ class PackableLauncher
fun main(args: Array<String>) {
val log = LoggerFactory.getLogger(PackableLauncher::class.java)
val packablePool = mutableMapOf<List<String>, KClass<IPackable>>()
listOf(DtboParser(), VBMetaParser(), BootImgParser(), SparseImgParser(), VendorBootParser()).forEach {
listOf(DtboParser(), VBMetaParser(), BootImgParser(), SparseImgParser(), VendorBootParser(), PayloadBinParser()).forEach {
@Suppress("UNCHECKED_CAST")
packablePool.put(it.capabilities(), it::class as KClass<IPackable>)
}

@ -0,0 +1,52 @@
// Copyright 2022 yuyezhong@gmail.com
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cfig.packable
import cc.cfig.droid.ota.Payload
import org.slf4j.LoggerFactory
class PayloadBinParser : IPackable {
override val loopNo: Int = 0
private val log = LoggerFactory.getLogger(PayloadBinParser::class.java)
override fun capabilities(): List<String> {
return listOf("^payload\\.bin$")
}
override fun unpack(fileName: String) {
cleanUp()
Payload.parse(fileName).let { pl ->
pl.setUp()
pl.printInfo()
pl.unpack()
}
}
override fun pack(fileName: String) {
}
override fun `@verify`(fileName: String) {
super.`@verify`(fileName)
}
override fun pull(fileName: String, deviceName: String) {
super.pull(fileName, deviceName)
}
fun clean(fileName: String) {
}
override fun flash(fileName: String, deviceName: String) {
}
}

@ -9,3 +9,4 @@ kernelConfigFile = build/unzip_boot/kernel_configs.txt
kernelExtracter = aosp/build/tools/extract_kernel.py
mkbootimg = aosp/system/tools/mkbootimg/mkbootimg.py
dtboMaker = aosp/system/libufdt/utils/src/mkdtboimg.py
payloadDir = build/payload/

@ -36,7 +36,7 @@ tasks {
group = GROUP_ANDROID
main = "cfig.packable.PackableLauncherKt"
classpath = files("bbootimg/build/libs/bbootimg.jar")
this.maxHeapSize = "512m"
this.maxHeapSize = "128m"
enableAssertions = true
args("unpack")
}

@ -11,10 +11,7 @@ import org.apache.commons.exec.PumpStreamHandler
import org.bouncycastle.pkcs.PKCS10CertificationRequest
import org.bouncycastle.util.io.pem.PemReader
import org.slf4j.LoggerFactory
import java.io.ByteArrayInputStream
import java.io.ByteArrayOutputStream
import java.io.IOException
import java.io.InputStreamReader
import java.io.*
import java.math.BigInteger
import java.math.RoundingMode
import java.security.KeyFactory
@ -36,6 +33,7 @@ class CryptoHelper {
PEM, //header + metadata + base64 der
DER, // der format
}
class KeyBox(val fmt: KeyFormat, val clazz: KClass<*>, val key: Any) {
companion object {
fun parse4(data: ByteArray): KeyBox {
@ -231,6 +229,53 @@ class CryptoHelper {
fun sha256(inData: ByteArray): ByteArray {
return MessageDigest.getInstance("SHA-256").digest(inData)
}
//fun hash(file: String, algorithm: String): ByteArray {
// val md = MessageDigest.getInstance(algorithm)
// FileInputStream(file).use { fis ->
// val buffer = ByteArray(1024 * 1024)
// while (true) {
// val bytesRead = fis.read(buffer)
// if (bytesRead <= 0) break
// md.update(buffer, 0, bytesRead)
// }
// }
// return md.digest()
//}
fun hash(file: String, algorithm: String): ByteArray {
return hash(file, listOf(Pair(0, File(file).length())), algorithm)
}
fun hash(file: String, coordinates: List<Pair<Long, Long>>, algorithm: String): ByteArray {
require(coordinates.isNotEmpty())
coordinates.forEach {
require(it.first >= 0 && it.second > 0)
}
return MessageDigest.getInstance(algorithm).let { md ->
coordinates.forEach { coordinate ->
FileInputStream(file).use { fis ->
fis.skip(coordinate.first)
val ibs = 1024 * 1024
val buffer = ByteArray(ibs)
var bytesRemaining = coordinate.second
while (bytesRemaining > 0) {
log.debug("Remain $bytesRemaining, reading ...")
val bytesRead = fis.read(buffer)
if (bytesRemaining > ibs) {
check(bytesRead == ibs)
md.update(buffer, 0, bytesRead)
} else {
check(bytesRead >= bytesRemaining)
md.update(buffer, 0, bytesRemaining.toInt())
}
bytesRemaining -= bytesRead
log.debug("Read $bytesRead, remain $bytesRemaining")
}
}
}
md
}.digest()
}
}
}

@ -132,7 +132,7 @@ class Helper {
}
}
fun round_to_multiple(size: Long, page: Int): Long {
fun round_to_multiple(size: Long, page: Long): Long {
val remainder = size % page
return if (remainder == 0L) {
size
@ -154,17 +154,6 @@ class Helper {
return 2.0.pow((num - 1).toBigInteger().bitLength().toDouble()).toLong()
}
fun pyAlg2java(alg: String): String {
return when (alg) {
"sha1" -> "sha-1"
"sha224" -> "sha-224"
"sha256" -> "sha-256"
"sha384" -> "sha-384"
"sha512" -> "sha-512"
else -> throw IllegalArgumentException("unknown algorithm: [$alg]")
}
}
fun dumpToFile(dumpFile: String, data: ByteArray) {
log.info("Dumping data to $dumpFile ...")
FileOutputStream(dumpFile, false).use { fos ->

@ -22,6 +22,8 @@ import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream
import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream
import org.apache.commons.compress.archivers.zip.ZipFile
import org.apache.commons.compress.archivers.zip.ZipMethod
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream
import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream
import org.apache.commons.compress.compressors.gzip.GzipParameters
import org.apache.commons.compress.compressors.lzma.LZMACompressorInputStream
@ -428,5 +430,31 @@ class ZipHelper {
}
log.info("compress(gz) done: $compressedFile")
}
}
fun isBzip2(compressedFile: String): Boolean {
return try {
FileInputStream(compressedFile).use { fis ->
BZip2CompressorInputStream(fis).use { }
}
true
} catch (e: IOException) {
false
}
}
fun bzip2(compressedFile: String, fis: InputStream) {
log.info("Compress(bzip2) ... ")
FileOutputStream(compressedFile).use { fos ->
BZip2CompressorOutputStream(fos).use { zos ->
val buffer = ByteArray(1024)
while (true) {
val bytesRead = fis.read(buffer)
if (bytesRead <= 0) break
zos.write(buffer, 0, bytesRead)
}
}
}
log.info("compress(bzip2) done: $compressedFile")
}
} // end-of-companion
}

Loading…
Cancel
Save