support OTA payload.bin
- unpack: supported - pack & others: Not Availablepull/94/head
parent
cc2399df0b
commit
1dd865adf4
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,65 @@
|
|||||||
|
// Copyright 2022 yuyezhong@gmail.com
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package cc.cfig.droid.ota
|
||||||
|
|
||||||
|
import cfig.helper.ZipHelper.Companion.getEntryOffset
|
||||||
|
import org.apache.commons.compress.archivers.zip.ZipFile
|
||||||
|
import org.slf4j.LoggerFactory
|
||||||
|
import java.io.File
|
||||||
|
|
||||||
|
// tuple(name, offset, size) of an zip entry
|
||||||
|
class BrilloProp(
|
||||||
|
var name: String,
|
||||||
|
var offset: Long,
|
||||||
|
var size: Long
|
||||||
|
) {
|
||||||
|
constructor(zf: ZipFile, entryName: String) : this("", 0, 0) {
|
||||||
|
val entry = zf.getEntry(entryName)
|
||||||
|
name = File(entryName).name
|
||||||
|
offset = entry.getEntryOffset()
|
||||||
|
size = entry.size
|
||||||
|
log.debug("extra size = " + entry.localFileDataExtra.size)
|
||||||
|
log.debug("file name len = " + entry.name.length)
|
||||||
|
}
|
||||||
|
|
||||||
|
companion object {
|
||||||
|
private val log = LoggerFactory.getLogger(BrilloProp::class.java)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun toString(): String {
|
||||||
|
return if (offset == 0L && size == 0L) {
|
||||||
|
name + " ".repeat(15)
|
||||||
|
} else {
|
||||||
|
"$name:$offset:$size"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun equals(other: Any?): Boolean {
|
||||||
|
if (this === other) return true
|
||||||
|
if (javaClass != other?.javaClass) return false
|
||||||
|
other as BrilloProp
|
||||||
|
if (name != other.name) return false
|
||||||
|
if (offset != other.offset) return false
|
||||||
|
if (size != other.size) return false
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun hashCode(): Int {
|
||||||
|
var result = name.hashCode()
|
||||||
|
result = 31 * result + offset.hashCode()
|
||||||
|
result = 31 * result + size.hashCode()
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,168 @@
|
|||||||
|
// Copyright 2022 yuyezhong@gmail.com
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package cc.cfig.droid.ota
|
||||||
|
|
||||||
|
import cc.cfig.io.Struct
|
||||||
|
import cfig.helper.ZipHelper
|
||||||
|
import cfig.helper.ZipHelper.Companion.getEntryOffset
|
||||||
|
import org.apache.commons.compress.archivers.zip.ZipFile
|
||||||
|
import org.slf4j.LoggerFactory
|
||||||
|
import java.io.File
|
||||||
|
import java.lang.IllegalArgumentException
|
||||||
|
import java.util.*
|
||||||
|
|
||||||
|
open class BrilloPropString {
|
||||||
|
open val name: String = ""
|
||||||
|
open val required: MutableList<String> = mutableListOf()
|
||||||
|
open val optional: MutableList<String> = mutableListOf()
|
||||||
|
|
||||||
|
companion object {
|
||||||
|
private val log = LoggerFactory.getLogger(BrilloPropString::class.qualifiedName)
|
||||||
|
const val metaDataName = "META-INF/com/android/metadata"
|
||||||
|
|
||||||
|
fun Properties.toBrilloString(): String {
|
||||||
|
val metaValueList = this.map { "${it.key}=${it.value}\n" }.sorted()
|
||||||
|
return metaValueList.reduce { acc, s -> "$acc$s" }
|
||||||
|
}
|
||||||
|
|
||||||
|
fun computeAllPropertyStrings(fileName: String, inPropertyStrings: List<BrilloPropString>): Properties {
|
||||||
|
return Properties().let { metadata ->
|
||||||
|
inPropertyStrings.forEach {
|
||||||
|
metadata[it.name] = it.preCompute(fileName)
|
||||||
|
}
|
||||||
|
metadata
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun finalizeAllPropertyFiles(
|
||||||
|
fileName: String,
|
||||||
|
inPropertyStrings: List<BrilloPropString>,
|
||||||
|
preComputed: Properties
|
||||||
|
): Properties {
|
||||||
|
val metadata = Properties()
|
||||||
|
val zf = ZipFile(fileName)
|
||||||
|
inPropertyStrings.forEach {
|
||||||
|
metadata[it.name] = it.postCompute(fileName, (preComputed[it.name] as String).length)
|
||||||
|
}
|
||||||
|
zf.close()
|
||||||
|
return metadata
|
||||||
|
}
|
||||||
|
|
||||||
|
fun rmMetaData(fileName: String) {
|
||||||
|
ZipFile(fileName).use { zf ->
|
||||||
|
val metadataEntry = zf.getEntry(metaDataName)
|
||||||
|
if (metadataEntry != null) {
|
||||||
|
log.info("$metaDataName exists, needs to be erased")
|
||||||
|
ZipHelper.zipDelete(File(fileName), metaDataName)
|
||||||
|
} else {
|
||||||
|
log.info("$metaDataName doesn't exist")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
pre-compute: with mimiced "metadata"
|
||||||
|
*/
|
||||||
|
fun preCompute(fileName: String): String {
|
||||||
|
return this.fromZipFile(fileName, reserveSpace = true)
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
finalize return string with padding spaces
|
||||||
|
*/
|
||||||
|
fun postCompute(fileName: String, reservedLen: Int): String {
|
||||||
|
val result = fromZipFile(fileName, reserveSpace = false)
|
||||||
|
if (result.length > reservedLen) {
|
||||||
|
throw IllegalArgumentException("Insufficient reserved space: reserved=$reservedLen, actual=${result.length}")
|
||||||
|
}
|
||||||
|
return result + " ".repeat(reservedLen - result.length)
|
||||||
|
}
|
||||||
|
|
||||||
|
fun verify(fileName: String, expected: String) {
|
||||||
|
log.info("verifying $fileName:${this.name} ...")
|
||||||
|
val actual = fromZipFile(fileName, reserveSpace = false)
|
||||||
|
if (actual != expected.trim()) {
|
||||||
|
throw RuntimeException("Mismatching streaming metadata: [$actual] vs [$expected]")
|
||||||
|
} else {
|
||||||
|
log.info("Verified $fileName:${this.name} against [$expected]")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun fromZipFile(fileName: String, reserveSpace: Boolean = false): String {
|
||||||
|
ZipFile(fileName).use { zf ->
|
||||||
|
val token: MutableList<BrilloProp> = computePrivateProps(fileName)
|
||||||
|
this.required.forEach {
|
||||||
|
token.add(BrilloProp(zf, it))
|
||||||
|
}
|
||||||
|
this.optional.filter { zf.getEntry(it) != null }.forEach {
|
||||||
|
token.add(BrilloProp(zf, it))
|
||||||
|
}
|
||||||
|
if (reserveSpace) {
|
||||||
|
token.add(BrilloProp("metadata", 0L, 0L))
|
||||||
|
} else {
|
||||||
|
log.info("$metaDataName is " + BrilloProp(zf, metaDataName).toString())
|
||||||
|
token.add(BrilloProp(zf, metaDataName))
|
||||||
|
}
|
||||||
|
val ret = token.map { it.toString() }.reduce { acc, s -> "$acc,$s" }
|
||||||
|
log.info("fromZipFile($fileName) = [$ret]")
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
open fun computePrivateProps(fileName: String): MutableList<BrilloProp> {
|
||||||
|
return mutableListOf()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
open class StreamingBrilloPropString : BrilloPropString() {
|
||||||
|
override val name: String = "ota-streaming-property-files"
|
||||||
|
override val required: MutableList<String> = mutableListOf("payload.bin", "payload_properties.txt")
|
||||||
|
|
||||||
|
//care_map is available only if dm-verity is enabled
|
||||||
|
//compatibility.zip is available only if target supports Treble
|
||||||
|
override val optional: MutableList<String> = mutableListOf("care_map.pb", "care_map.txt", "compatibility.zip")
|
||||||
|
}
|
||||||
|
|
||||||
|
class NonAbBrilloPropString : BrilloPropString() {
|
||||||
|
override val name: String = "ota-property-files"
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
AbBrilloPropString will replace StreamingBrilloPropString after P-timeframe
|
||||||
|
*/
|
||||||
|
@OptIn(ExperimentalUnsignedTypes::class)
|
||||||
|
class AbBrilloPropString : StreamingBrilloPropString() {
|
||||||
|
override val name: String = "ota-property-files"
|
||||||
|
|
||||||
|
override fun computePrivateProps(fileName: String): MutableList<BrilloProp> {
|
||||||
|
ZipFile(fileName).use { zf ->
|
||||||
|
val pb = zf.getEntry("payload.bin")
|
||||||
|
val headerFormat = Struct("!IQQL")
|
||||||
|
val header = headerFormat.unpack(zf.getInputStream(pb))
|
||||||
|
val magic = header[0] as UInt
|
||||||
|
val manifestSize = header[2] as ULong
|
||||||
|
val metaSigSize = header[3] as UInt
|
||||||
|
if (0x43724155U != magic) {//'CrAU'
|
||||||
|
throw IllegalArgumentException("Invalid magic 0x" + magic.toString(16))
|
||||||
|
}
|
||||||
|
val metaTotal = headerFormat.calcSize().toULong() + manifestSize + metaSigSize
|
||||||
|
if (metaTotal >= pb.size.toUInt()) {
|
||||||
|
throw IllegalArgumentException("metadata total size >= payload size")
|
||||||
|
}
|
||||||
|
return mutableListOf(BrilloProp("payload_metadata.bin", pb.getEntryOffset(), metaTotal.toLong()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,157 @@
|
|||||||
|
// Copyright 2022 yuyezhong@gmail.com
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
package cc.cfig.droid.ota
|
||||||
|
|
||||||
|
import org.apache.commons.compress.archivers.zip.ZipFile
|
||||||
|
import org.slf4j.LoggerFactory
|
||||||
|
import java.io.InputStream
|
||||||
|
import java.nio.charset.StandardCharsets
|
||||||
|
import java.util.*
|
||||||
|
|
||||||
|
class Common {
|
||||||
|
companion object {
|
||||||
|
private val log = LoggerFactory.getLogger(Common::class.java)
|
||||||
|
private val PARTITIONS_WITH_CARE_MAP =
|
||||||
|
mutableListOf("system", "vendor", "product", "system_ext", "odm", "vendor_dlkm", "odm_dlkm")
|
||||||
|
val PARTITIONS_WITH_BUILD_PROP = PARTITIONS_WITH_CARE_MAP.apply { add("boot") }
|
||||||
|
|
||||||
|
private fun getEntryStream(zipFile: ZipFile, entryName: String): InputStream {
|
||||||
|
return zipFile.getInputStream(zipFile.getEntry(entryName))
|
||||||
|
}
|
||||||
|
|
||||||
|
fun Properties.getBuildProp(k: String): String? {
|
||||||
|
return (this.get("build.prop") as Properties).getProperty(k)
|
||||||
|
}
|
||||||
|
|
||||||
|
fun loadInfoDict(fileName: String): Properties {
|
||||||
|
val d = Properties()
|
||||||
|
ZipFile(fileName).use { zf ->
|
||||||
|
log.info("loading META/misc_info.txt ...")
|
||||||
|
//1: misc_info.txt
|
||||||
|
d.load(getEntryStream(zf, "META/misc_info.txt"))
|
||||||
|
if (null == d.getProperty("recovery_api_version")) {
|
||||||
|
throw IllegalArgumentException("Failed to find 'recovery_api_version'")
|
||||||
|
}
|
||||||
|
if (null == d.getProperty("fstab_version")) {
|
||||||
|
throw IllegalArgumentException("Failed to find 'fstab_version'")
|
||||||
|
}
|
||||||
|
if ("true" == d.getProperty("system_root_image")) {
|
||||||
|
throw IllegalArgumentException("BOARD_BUILD_SYSTEM_ROOT_IMAGE no longer supported")
|
||||||
|
}
|
||||||
|
val recoveryFstabPath = "BOOT/RAMDISK/system/etc/recovery.fstab"
|
||||||
|
val recoveryFstabPath2 = "VENDOR_BOOT/RAMDISK/system/etc/recovery.fstab"
|
||||||
|
val validFstab = if (zf.getEntry(recoveryFstabPath) != null) {
|
||||||
|
recoveryFstabPath
|
||||||
|
} else {
|
||||||
|
recoveryFstabPath2
|
||||||
|
}
|
||||||
|
//2: .fstab
|
||||||
|
d.put("fstab", loadRecoveryFstab(zf, validFstab, false))
|
||||||
|
|
||||||
|
//load all build.prop
|
||||||
|
PARTITIONS_WITH_BUILD_PROP.forEach { part ->
|
||||||
|
val subProps = Properties()
|
||||||
|
if (part == "boot") {
|
||||||
|
arrayOf("BOOT/RAMDISK/system/etc/ramdisk/build.prop",
|
||||||
|
"BOOT/RAMDISK/prop.default").forEach { bootBuildProp ->
|
||||||
|
zf.getEntry(bootBuildProp)?.let { entry ->
|
||||||
|
log.info("loading /$bootBuildProp ...")
|
||||||
|
subProps.load(zf.getInputStream(entry))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
zf.getEntry("${part.uppercase(Locale.getDefault())}/build.prop")?.let { entry ->
|
||||||
|
log.info("loading /$part/build.prop ...")
|
||||||
|
subProps.load(zf.getInputStream(entry))
|
||||||
|
}
|
||||||
|
zf.getEntry("${part.uppercase(Locale.getDefault())}/etc/build.prop")?.let { entry ->
|
||||||
|
log.info("loading /$part/etc/build.prop ...")
|
||||||
|
subProps.load(zf.getInputStream(entry))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//3: .$part.build.prop
|
||||||
|
d.put("$part.build.prop", subProps)
|
||||||
|
}
|
||||||
|
//4: .build.prop == .system.build.prop
|
||||||
|
log.info("duplicating system.build.prop -> build.prop")
|
||||||
|
d.put("build.prop", d.get("system.build.prop"))
|
||||||
|
}
|
||||||
|
if (d.get("avb_enable") == "true") {
|
||||||
|
// 5: avb related
|
||||||
|
(d.get("build.prop") as Properties).let { buildprop ->
|
||||||
|
var fp: String?
|
||||||
|
fp = buildprop.get("ro.build.fingerprint") as String?
|
||||||
|
if (fp == null) {
|
||||||
|
fp = buildprop.get("ro.build.thumbprint") as String?
|
||||||
|
}
|
||||||
|
fp?.let {
|
||||||
|
log.warn("adding avb_salt from fingerprint ...")
|
||||||
|
d.put("avb_salt", "fp")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun loadRecoveryFstab(zf: ZipFile, fstabPath: String, bSystemRoot: Boolean = false) {
|
||||||
|
class Partition(
|
||||||
|
var mount_point: String = "",
|
||||||
|
var fs_type: String = "",
|
||||||
|
var device: String = "",
|
||||||
|
var length: Long = 0,
|
||||||
|
var selinuxContext: String = "",
|
||||||
|
)
|
||||||
|
log.info("loading $fstabPath ...")
|
||||||
|
val ret: MutableMap<String, Partition> = mutableMapOf()
|
||||||
|
val rs = getEntryStream(zf, fstabPath).readBytes().toString(StandardCharsets.UTF_8)
|
||||||
|
log.debug(rs)
|
||||||
|
rs.lines().forEach rs@{ line ->
|
||||||
|
val item = line.trim()
|
||||||
|
if (item.isEmpty() || item.startsWith("#")) {
|
||||||
|
log.debug("ignore empty/comment line")
|
||||||
|
return@rs
|
||||||
|
}
|
||||||
|
val pieces = item.split("\\s+".toRegex())
|
||||||
|
if (pieces.size != 5) {
|
||||||
|
throw IllegalArgumentException("malformed recovery.fstab line: [$item]")
|
||||||
|
}
|
||||||
|
if (pieces[4].contains("voldmanaged=")) {
|
||||||
|
log.info("Ignore entries that are managed by vold: [$item]")
|
||||||
|
return@rs
|
||||||
|
}
|
||||||
|
val lengthOption = pieces[4].split(",").filter { it.startsWith("length=") }
|
||||||
|
val length = when (lengthOption.size) {
|
||||||
|
0 -> 0
|
||||||
|
1 -> lengthOption[0].substring(7).toLong()
|
||||||
|
else -> throw IllegalArgumentException("multiple 'length=' in options")
|
||||||
|
}
|
||||||
|
|
||||||
|
val mountFlags = pieces[3]
|
||||||
|
val mountContextFlags = mountFlags.split(",").filter { it.startsWith("context=") }
|
||||||
|
val context = if (mountContextFlags.size == 1) mountContextFlags[0] else ""
|
||||||
|
|
||||||
|
ret.put(pieces[1], Partition(pieces[1], pieces[2], pieces[0], length, context))
|
||||||
|
}
|
||||||
|
if (bSystemRoot) {
|
||||||
|
if (ret.keys.contains("/system") || !ret.keys.contains("/")) {
|
||||||
|
throw IllegalArgumentException("not allowed")
|
||||||
|
}
|
||||||
|
val systemPartition = ret.get("/") as Partition
|
||||||
|
systemPartition.mount_point = "/"
|
||||||
|
log.info("adding /system for system_as_root devices")
|
||||||
|
ret.put("/system", systemPartition)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,138 @@
|
|||||||
|
// Copyright 2022 yuyezhong@gmail.com
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
package cc.cfig.droid.ota
|
||||||
|
|
||||||
|
import cfig.helper.Helper
|
||||||
|
import cfig.helper.ZipHelper
|
||||||
|
import chromeos_update_engine.UpdateMetadata
|
||||||
|
import org.slf4j.LoggerFactory
|
||||||
|
import java.io.File
|
||||||
|
import java.io.FileInputStream
|
||||||
|
import java.util.*
|
||||||
|
import chromeos_update_engine.UpdateMetadata.InstallOperation.Type
|
||||||
|
import java.io.ByteArrayInputStream
|
||||||
|
|
||||||
|
class DeltaGenerator {
|
||||||
|
class ChunkProcessor(
|
||||||
|
val name: String,
|
||||||
|
val op: UpdateMetadata.InstallOperation,
|
||||||
|
val blockSize: Int,
|
||||||
|
val partFile: String,
|
||||||
|
) {
|
||||||
|
fun ProcessChunk() {
|
||||||
|
log.info("ChunkProcessor: $name")
|
||||||
|
FileInputStream(partFile).use { fis ->
|
||||||
|
val dst0 = op.getDstExtents(0)
|
||||||
|
fis.skip(dst0.startBlock * blockSize)
|
||||||
|
val data = ByteArray((dst0.numBlocks * blockSize).toInt())
|
||||||
|
if (data.size != fis.read(data)) {
|
||||||
|
throw RuntimeException("$name: read size != expected size")
|
||||||
|
}
|
||||||
|
val bestOp = GenerateBestFullOperation(data)
|
||||||
|
if (bestOp[0] as Boolean) {
|
||||||
|
log.info("bestType=" + bestOp[1] as Type + ", bestSize=" + bestOp[2] as Int)
|
||||||
|
} else {
|
||||||
|
throw IllegalStateException("GenerateBestFullOperation fail")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
companion object {
|
||||||
|
fun GenerateBestFullOperation(inData: ByteArray): Array<Any> {
|
||||||
|
val ret: Array<Any> = Array(3) { 0 }
|
||||||
|
var bestType: Type = Type.REPLACE
|
||||||
|
var bestSize: Int = inData.size
|
||||||
|
//buffer MUST be valid
|
||||||
|
if (inData.isEmpty()) {
|
||||||
|
ret[0] = false
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
//zero
|
||||||
|
if (inData.all { it.toInt() == 0 }) {
|
||||||
|
bestType = Type.ZERO
|
||||||
|
log.info("raw=${inData.size}, ZERO")
|
||||||
|
ret[0] = true
|
||||||
|
ret[1] = bestType
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
//try xz
|
||||||
|
File.createTempFile("pre", "suf").let { tempFile ->
|
||||||
|
tempFile.deleteOnExit()
|
||||||
|
ZipHelper.xz(tempFile.absolutePath, ByteArrayInputStream(inData))
|
||||||
|
log.debug("raw=${inData.size}, xz=" + tempFile.length())
|
||||||
|
if (bestSize > tempFile.length()) {
|
||||||
|
bestType = Type.REPLACE_XZ
|
||||||
|
bestSize = tempFile.length().toInt()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//try bz
|
||||||
|
File.createTempFile("pre", "suf").let { tempFile ->
|
||||||
|
tempFile.deleteOnExit()
|
||||||
|
ZipHelper.bzip2(tempFile.absolutePath, ByteArrayInputStream(inData))
|
||||||
|
log.debug("raw=${inData.size}, bzip2=" + tempFile.length())
|
||||||
|
if (bestSize > tempFile.length()) {
|
||||||
|
bestType = Type.REPLACE_BZ
|
||||||
|
bestSize = tempFile.length().toInt()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ret[0] = true
|
||||||
|
ret[1] = bestType
|
||||||
|
ret[2] = bestSize
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class FullPayloadGenerator {
|
||||||
|
fun GenerateOperations(partName: String, partFile: String) {
|
||||||
|
val config = Properties().apply {
|
||||||
|
put("full_chunk_size", (2 * 1024 * 1024).toInt())
|
||||||
|
put("block_size", (4 * 1024).toInt())
|
||||||
|
}
|
||||||
|
val fullChunkSize = config.get("full_chunk_size") as Int
|
||||||
|
val blockSize = config.get("block_size") as Int
|
||||||
|
if (fullChunkSize % blockSize != 0) {
|
||||||
|
throw IllegalArgumentException("BUG: illegal (chunk_size, block_size)=($fullChunkSize, $blockSize)")
|
||||||
|
}
|
||||||
|
val fileLen = File(partFile).length()
|
||||||
|
log.warn("fcs=$fullChunkSize, file size=$fileLen")
|
||||||
|
val partitionBlocks: Long = fileLen / blockSize
|
||||||
|
val chunkBloks: Long = (fullChunkSize / blockSize).toLong() //typically 512
|
||||||
|
val numChunks = Helper.Companion.round_to_multiple(partitionBlocks, chunkBloks) / chunkBloks
|
||||||
|
log.warn("partitionBlocks=$partitionBlocks, numChunks=$numChunks")
|
||||||
|
for (i in 0 until numChunks) {
|
||||||
|
val startBlock = i * chunkBloks
|
||||||
|
val numBlocks = minOf(chunkBloks, partitionBlocks - i * chunkBloks)
|
||||||
|
val dstExtent = UpdateMetadata.Extent.newBuilder()
|
||||||
|
.setStartBlock(startBlock)
|
||||||
|
.setNumBlocks(numBlocks)
|
||||||
|
.build()
|
||||||
|
val op = UpdateMetadata.InstallOperation.newBuilder()
|
||||||
|
.setType(Type.REPLACE)
|
||||||
|
.addDstExtents(dstExtent)
|
||||||
|
.build()
|
||||||
|
log.info("op<${i}> $op")
|
||||||
|
ChunkProcessor("$partName-operation-${i}/$numChunks", op, blockSize, partFile).ProcessChunk()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun appendData() {
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
companion object {
|
||||||
|
val log = LoggerFactory.getLogger(DeltaGenerator::class.java.name)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,21 @@
|
|||||||
|
// Copyright 2022 yuyezhong@gmail.com
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
package cc.cfig.droid.ota
|
||||||
|
|
||||||
|
data class OtaOptions(
|
||||||
|
var wipe_user_data: Boolean = false,
|
||||||
|
var skip_postinstall: Boolean = false,
|
||||||
|
var include_secondary: Boolean = false,
|
||||||
|
var downgrade: Boolean = false
|
||||||
|
)
|
@ -0,0 +1,299 @@
|
|||||||
|
// Copyright 2022 yuyezhong@gmail.com
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package cc.cfig.droid.ota
|
||||||
|
|
||||||
|
import cc.cfig.io.Struct
|
||||||
|
import cfig.helper.CryptoHelper.Hasher
|
||||||
|
import cfig.helper.Helper
|
||||||
|
import chromeos_update_engine.UpdateMetadata
|
||||||
|
import com.fasterxml.jackson.annotation.JsonInclude
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper
|
||||||
|
import com.google.protobuf.ByteString
|
||||||
|
import org.apache.commons.exec.CommandLine
|
||||||
|
import org.apache.commons.exec.DefaultExecutor
|
||||||
|
import org.apache.commons.exec.PumpStreamHandler
|
||||||
|
import org.slf4j.LoggerFactory
|
||||||
|
import java.io.*
|
||||||
|
import java.nio.file.Files
|
||||||
|
import java.nio.file.Paths
|
||||||
|
import java.time.Instant
|
||||||
|
import java.time.ZoneId
|
||||||
|
import java.time.format.DateTimeFormatter
|
||||||
|
|
||||||
|
class Payload {
|
||||||
|
var fileName: String = ""
|
||||||
|
var header = PayloadHeader()
|
||||||
|
var manifest: UpdateMetadata.DeltaArchiveManifest = UpdateMetadata.DeltaArchiveManifest.newBuilder().build()
|
||||||
|
var metaSig: UpdateMetadata.Signatures = UpdateMetadata.Signatures.newBuilder().build()
|
||||||
|
var metaSize: Int = 0
|
||||||
|
var dataOffset: Long = 0L
|
||||||
|
var dataSig: UpdateMetadata.Signatures = UpdateMetadata.Signatures.newBuilder().build()
|
||||||
|
|
||||||
|
companion object {
|
||||||
|
private val log = LoggerFactory.getLogger(Payload::class.java)
|
||||||
|
val workDir = Helper.prop("payloadDir")
|
||||||
|
|
||||||
|
fun parse(inFileName: String): Payload {
|
||||||
|
val ret = Payload()
|
||||||
|
ret.fileName = inFileName
|
||||||
|
FileInputStream(inFileName).use { fis ->
|
||||||
|
ret.header = PayloadHeader(fis)
|
||||||
|
ret.metaSize = ret.header.headerSize + ret.header.manifestLen.toInt()
|
||||||
|
ret.dataOffset = (ret.metaSize + ret.header.metaSigLen).toLong()
|
||||||
|
//manifest
|
||||||
|
ret.manifest = ByteArray(ret.header.manifestLen.toInt()).let { buf ->
|
||||||
|
fis.read(buf)
|
||||||
|
UpdateMetadata.DeltaArchiveManifest.parseFrom(buf)
|
||||||
|
}
|
||||||
|
//meta sig
|
||||||
|
ret.metaSig = ByteArray(ret.header.metaSigLen).let { buf2 ->
|
||||||
|
fis.read(buf2)
|
||||||
|
UpdateMetadata.Signatures.parseFrom(buf2)
|
||||||
|
}
|
||||||
|
|
||||||
|
//data sig
|
||||||
|
if (ret.manifest.hasSignaturesOffset()) {
|
||||||
|
log.debug("payload sig offset = " + ret.manifest.signaturesOffset)
|
||||||
|
log.debug("payload sig size = " + ret.manifest.signaturesSize)
|
||||||
|
fis.skip(ret.manifest.signaturesOffset)
|
||||||
|
ret.dataSig = ByteArray(ret.manifest.signaturesSize.toInt()).let { buf ->
|
||||||
|
fis.read(buf)
|
||||||
|
UpdateMetadata.Signatures.parseFrom(buf)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log.warn("payload has no signatures")
|
||||||
|
}
|
||||||
|
} //end-of-fis
|
||||||
|
|
||||||
|
run {//CHECK_EQ(payload.size(), signatures_offset + manifest.signatures_size())
|
||||||
|
val calculatedSize = ret.header.headerSize + ret.header.manifestLen + ret.header.metaSigLen +
|
||||||
|
ret.manifest.signaturesOffset + ret.manifest.signaturesSize
|
||||||
|
if (File(inFileName).length() == calculatedSize) {
|
||||||
|
log.info("payload.bin size info check PASS")
|
||||||
|
} else {
|
||||||
|
throw IllegalStateException("calculated payload size doesn't match file size")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
val calcMetadataHash =
|
||||||
|
Hasher.hash(inFileName, listOf(Pair(0L, ret.metaSize.toLong())), "sha-256")
|
||||||
|
log.info("calc meta hash: " + Helper.toHexString(calcMetadataHash))
|
||||||
|
val calcPayloadHash = Hasher.hash(
|
||||||
|
inFileName, listOf(
|
||||||
|
Pair(0L, ret.metaSize.toLong()),
|
||||||
|
Pair(ret.metaSize.toLong() + ret.header.metaSigLen, ret.manifest.signaturesOffset)
|
||||||
|
), "sha-256"
|
||||||
|
)
|
||||||
|
check(calcPayloadHash.size == 32)
|
||||||
|
log.info("calc payload hash: " + Helper.toHexString(calcPayloadHash))
|
||||||
|
|
||||||
|
val readPayloadSignature = UpdateMetadata.Signatures.parseFrom(
|
||||||
|
Helper.readFully(
|
||||||
|
inFileName,
|
||||||
|
ret.dataOffset + ret.manifest.signaturesOffset,
|
||||||
|
ret.manifest.signaturesSize.toInt()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
log.info("Found sig count: " + readPayloadSignature.signaturesCount)
|
||||||
|
readPayloadSignature.signaturesList.forEach {
|
||||||
|
//pass
|
||||||
|
log.info(it.data.toString())
|
||||||
|
log.info("sig_data size = " + it.data.toByteArray().size)
|
||||||
|
log.info(Helper.toHexString(it.data.toByteArray()))
|
||||||
|
Files.write(Paths.get("sig_data"), it.data.toByteArray())
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
class PayloadVerifier {
|
||||||
|
fun getRawHashFromSignature(sig_data: ByteString, pubkey: String, sigHash: ByteArray) {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun displaySignatureBlob(sigName: String, sig: UpdateMetadata.Signatures): String {
|
||||||
|
return StringBuilder().let { sb ->
|
||||||
|
sb.append(String.format("%s signatures: (%d entries)\n", sigName, sig.signaturesCount))
|
||||||
|
sig.signaturesList.forEach {
|
||||||
|
sb.append(String.format(" hex_data: (%d bytes)\n", it.data.size()))
|
||||||
|
sb.append(" Data: " + Helper.toHexString(it.data.toByteArray()) + "\n")
|
||||||
|
}
|
||||||
|
sb
|
||||||
|
}.toString()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun printInfo() {
|
||||||
|
val mi = ManifestInfo(blockSize = this.manifest.blockSize,
|
||||||
|
minorVersion = this.manifest.minorVersion,
|
||||||
|
maxTimeStamp = this.manifest.maxTimestamp,
|
||||||
|
signatureOffset = this.manifest.signaturesOffset,
|
||||||
|
signatureSize = this.manifest.signaturesSize,
|
||||||
|
partialUpdate = this.manifest.hasPartialUpdate(),
|
||||||
|
partsToUpdate = this.manifest.partitionsList.map {
|
||||||
|
ManifestInfo.PartitionToUpdate(
|
||||||
|
it.partitionName, it.operationsCount,
|
||||||
|
if (it.hasRunPostinstall()) it.runPostinstall else null,
|
||||||
|
if (it.hasPostinstallPath()) it.postinstallPath else null
|
||||||
|
)
|
||||||
|
},
|
||||||
|
enableSnapshot = this.manifest.dynamicPartitionMetadata.hasSnapshotEnabled(),
|
||||||
|
dynamicGroups = this.manifest.dynamicPartitionMetadata.groupsList.map {
|
||||||
|
ManifestInfo.DynamicPartGroup(name = it.name, size = it.size, partName = it.partitionNamesList)
|
||||||
|
})
|
||||||
|
ObjectMapper().writerWithDefaultPrettyPrinter().writeValue(File("$workDir/header.json"), this.header)
|
||||||
|
log.info(" header info dumped to ${workDir}header.json")
|
||||||
|
ObjectMapper().writerWithDefaultPrettyPrinter().writeValue(File("$workDir/manifest.json"), mi)
|
||||||
|
log.info(" manifest info dumped to ${workDir}manifest.json")
|
||||||
|
|
||||||
|
val signatureFile = "${workDir}signatures.txt"
|
||||||
|
FileOutputStream(signatureFile, false).use { fos ->
|
||||||
|
fos.writer().use { fWriter ->
|
||||||
|
fWriter.write("<Metadata> signatures: offset=" + this.header.manifestLen + ", size=" + this.header.metaSigLen + "\n")
|
||||||
|
fWriter.write(Payload.displaySignatureBlob("<Metadata>", this.metaSig))
|
||||||
|
fWriter.write("<Payload> signatures: base= offset=" + manifest.signaturesOffset + ", size=" + this.header.metaSigLen + "\n")
|
||||||
|
fWriter.write((Payload.displaySignatureBlob("<Payload>", this.dataSig)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log.info("signature info dumped to $signatureFile")
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun decompress(inBytes: ByteArray, opType: UpdateMetadata.InstallOperation.Type): ByteArray {
|
||||||
|
val baosO = ByteArrayOutputStream()
|
||||||
|
val baosE = ByteArrayOutputStream()
|
||||||
|
val bais = ByteArrayInputStream(inBytes)
|
||||||
|
DefaultExecutor().let { exec ->
|
||||||
|
exec.streamHandler = PumpStreamHandler(baosO, baosE, bais)
|
||||||
|
val cmd = when (opType) {
|
||||||
|
UpdateMetadata.InstallOperation.Type.REPLACE_XZ -> CommandLine("xzcat")
|
||||||
|
UpdateMetadata.InstallOperation.Type.REPLACE_BZ -> CommandLine("bzcat")
|
||||||
|
UpdateMetadata.InstallOperation.Type.REPLACE -> return inBytes
|
||||||
|
else -> throw IllegalArgumentException(opType.toString())
|
||||||
|
}
|
||||||
|
cmd.addArgument("-")
|
||||||
|
exec.execute(cmd)
|
||||||
|
}
|
||||||
|
return baosO.toByteArray()
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun unpackInternal(ras: RandomAccessFile, pu: UpdateMetadata.PartitionUpdate, logPrefix: String = "") {
|
||||||
|
log.info(String.format("[%s] extracting %13s.img (%d ops)", logPrefix, pu.partitionName, pu.operationsCount))
|
||||||
|
FileOutputStream("$workDir/${pu.partitionName}.img").use { outFile ->
|
||||||
|
val ops = pu.operationsList.toMutableList().apply {
|
||||||
|
sortBy { it.getDstExtents(0).startBlock }
|
||||||
|
}
|
||||||
|
ops.forEach { op ->
|
||||||
|
log.debug(pu.partitionName + ": " + (op.getDstExtents(0).startBlock * this.manifest.blockSize) + ", size=" + op.dataLength)
|
||||||
|
val piece = ByteArray(op.dataLength.toInt()).let {
|
||||||
|
ras.seek(this.dataOffset + op.dataOffset)
|
||||||
|
ras.read(it)
|
||||||
|
it
|
||||||
|
}
|
||||||
|
outFile.write(decompress(piece, op.type))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun setUp() {
|
||||||
|
File(workDir).let {
|
||||||
|
if (it.exists()) {
|
||||||
|
log.info("Removing $workDir")
|
||||||
|
it.deleteRecursively()
|
||||||
|
}
|
||||||
|
log.info("Creating $workDir")
|
||||||
|
it.mkdirs()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun unpack() {
|
||||||
|
RandomAccessFile(this.fileName, "r").use { ras ->
|
||||||
|
var currentNum = 1
|
||||||
|
val totalNum = this.manifest.partitionsCount
|
||||||
|
val parts = this.manifest.partitionsList.map { it.partitionName }
|
||||||
|
log.info("There are $totalNum partitions $parts")
|
||||||
|
log.info("dumping images to $workDir")
|
||||||
|
this.manifest.partitionsList.forEach { pu ->
|
||||||
|
unpackInternal(ras, pu, String.format("%2d/%d", currentNum, totalNum))
|
||||||
|
currentNum++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
data class PayloadHeader(
|
||||||
|
var version: Long = 0,
|
||||||
|
var manifestLen: Long = 0,
|
||||||
|
var metaSigLen: Int = 0,
|
||||||
|
var headerSize: Int = 0
|
||||||
|
) {
|
||||||
|
private val magic = "CrAU"
|
||||||
|
private val FORMAT_STRING = ">4sqq" //magic, version, manifestLen
|
||||||
|
private val CHROMEOS_MAJOR_PAYLOAD_VERSION = 1L
|
||||||
|
private val BRILLO_MAJOR_PAYLOAD_VERSION = 2L
|
||||||
|
val typeOfVersion: String
|
||||||
|
get() = when (version) {
|
||||||
|
CHROMEOS_MAJOR_PAYLOAD_VERSION -> "chromeOs"
|
||||||
|
BRILLO_MAJOR_PAYLOAD_VERSION -> "brillo"
|
||||||
|
else -> throw IllegalArgumentException()
|
||||||
|
}
|
||||||
|
|
||||||
|
constructor(fis: InputStream) : this() {
|
||||||
|
val info = Struct(FORMAT_STRING).unpack(fis)
|
||||||
|
check((info[0] as String) == magic) { "${info[0]} is not payload magic" }
|
||||||
|
version = info[1] as Long
|
||||||
|
manifestLen = info[2] as Long
|
||||||
|
headerSize = Struct(FORMAT_STRING).calcSize()
|
||||||
|
|
||||||
|
if (version == BRILLO_MAJOR_PAYLOAD_VERSION) {
|
||||||
|
headerSize += Int.SIZE_BYTES
|
||||||
|
metaSigLen = Struct(">i").unpack(fis)[0] as Int
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||||
|
data class ManifestInfo(
|
||||||
|
var blockSize: Int? = null,
|
||||||
|
var minorVersion: Int? = null,
|
||||||
|
var maxTimeStamp: Long = 0L,
|
||||||
|
var maxTimeReadable: String? = null,
|
||||||
|
var partialUpdate: Boolean? = null,
|
||||||
|
val signatureOffset: Long? = null,
|
||||||
|
val signatureSize: Long? = null,
|
||||||
|
var dynamicGroups: List<DynamicPartGroup> = listOf(),
|
||||||
|
var enableSnapshot: Boolean? = null,
|
||||||
|
var partsToUpdate: List<PartitionToUpdate> = listOf()
|
||||||
|
) {
|
||||||
|
init {
|
||||||
|
val ldt = Instant.ofEpochMilli(maxTimeStamp * 1000)
|
||||||
|
.atZone(ZoneId.systemDefault())
|
||||||
|
.toLocalDateTime()
|
||||||
|
maxTimeReadable = DateTimeFormatter.ISO_LOCAL_DATE_TIME.format(ldt) + " (${ZoneId.systemDefault()})"
|
||||||
|
}
|
||||||
|
|
||||||
|
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||||
|
data class PartitionToUpdate(
|
||||||
|
var name: String = "",
|
||||||
|
var ops: Int = 0,
|
||||||
|
var runPostInstall: Boolean? = null,
|
||||||
|
var postInstallPath: String? = null
|
||||||
|
)
|
||||||
|
|
||||||
|
data class DynamicPartGroup(
|
||||||
|
var name: String = "",
|
||||||
|
var size: Long = 0L,
|
||||||
|
var partName: List<String> = listOf()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,247 @@
|
|||||||
|
// Copyright 2022 yuyezhong@gmail.com
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
package cc.cfig.droid.ota
|
||||||
|
|
||||||
|
import cc.cfig.io.Struct
|
||||||
|
import cfig.helper.Helper.Companion.check_call
|
||||||
|
import cfig.helper.ZipHelper.Companion.dumpEntry
|
||||||
|
import org.apache.commons.compress.archivers.zip.ZipFile
|
||||||
|
import org.slf4j.LoggerFactory
|
||||||
|
import java.io.File
|
||||||
|
import java.io.FileInputStream
|
||||||
|
import java.io.FileOutputStream
|
||||||
|
import java.nio.file.Files
|
||||||
|
import java.nio.file.Paths
|
||||||
|
import java.nio.file.StandardOpenOption
|
||||||
|
import java.util.*
|
||||||
|
import kotlin.system.exitProcess
|
||||||
|
|
||||||
|
@OptIn(ExperimentalUnsignedTypes::class)
|
||||||
|
class PayloadGenerator {
|
||||||
|
private val log = LoggerFactory.getLogger(PayloadGenerator::class.java)
|
||||||
|
val workDir = "build/staging_ota"
|
||||||
|
val signedPayload = "$workDir/signed-payload.bin"
|
||||||
|
val propertiesFile = "$workDir/payload-properties.txt"
|
||||||
|
|
||||||
|
fun generate(maxTs: String, targetFile: String, payload: String) {
|
||||||
|
("brillo_update_payload generate" +
|
||||||
|
" --max_timestamp $maxTs" +
|
||||||
|
" --target_image $targetFile" +
|
||||||
|
" --payload $payload").check_call()
|
||||||
|
}
|
||||||
|
|
||||||
|
fun sign(inSigner: PayloadSigner, options: OtaOptions) {
|
||||||
|
// 1. Generate hashes of the payload and metadata files
|
||||||
|
("brillo_update_payload hash" +
|
||||||
|
" --unsigned_payload $workDir/payload.bin" +
|
||||||
|
" --signature_size " + inSigner.keySize +
|
||||||
|
" --metadata_hash_file $workDir/meta.hash" +
|
||||||
|
" --payload_hash_file $workDir/payload.hash").check_call()
|
||||||
|
|
||||||
|
// 2. Sign the hashes.
|
||||||
|
inSigner.sign("$workDir/meta.hash", "$workDir/signed-meta.hash")
|
||||||
|
inSigner.sign("$workDir/payload.hash", "$workDir/signed-payload.hash")
|
||||||
|
|
||||||
|
// 3. Insert the signatures back into the payload file.
|
||||||
|
("brillo_update_payload sign" +
|
||||||
|
" --unsigned_payload $workDir/payload.bin" +
|
||||||
|
" --payload $signedPayload" +
|
||||||
|
" --signature_size " + inSigner.keySize +
|
||||||
|
" --metadata_signature_file $workDir/signed-meta.hash" +
|
||||||
|
" --payload_signature_file $workDir/signed-payload.hash").check_call()
|
||||||
|
|
||||||
|
// 4. Dump the signed payload properties.
|
||||||
|
("brillo_update_payload properties" +
|
||||||
|
" --payload $signedPayload" +
|
||||||
|
" --properties_file $propertiesFile").check_call()
|
||||||
|
|
||||||
|
// 5.
|
||||||
|
if (options.wipe_user_data) {
|
||||||
|
FileOutputStream(propertiesFile, true).use {
|
||||||
|
it.write("POWERWASH=1\n".toByteArray())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
options.include_secondary.let { includeSec ->
|
||||||
|
if (includeSec) {
|
||||||
|
FileOutputStream(propertiesFile, true).use {
|
||||||
|
it.write("SWITCH_SLOT_ON_REBOOT=0\n".toByteArray())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun tryToDumpEntry(inputFile: ZipFile, entryItem: String, outFile: String) {
|
||||||
|
val entry = inputFile.getEntry(entryItem)
|
||||||
|
if (entry != null) {
|
||||||
|
inputFile.dumpEntry(entry.name, File(outFile))
|
||||||
|
} else {
|
||||||
|
log.info("$entryItem not found")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun generateMine(maxTs: String, inTargetFile: String, payload: String, infoDict: Properties) {
|
||||||
|
val targetFile = ZipFile(inTargetFile)
|
||||||
|
val abPartitions =
|
||||||
|
String(targetFile.getInputStream(targetFile.getEntry("META/ab_partitions.txt")).readBytes())
|
||||||
|
.lines().filter { it.isNotBlank() }
|
||||||
|
log.info("Dumping ${abPartitions.size} images from target file ...")
|
||||||
|
|
||||||
|
abPartitions.forEach { part ->
|
||||||
|
val partEntryName = listOfNotNull(
|
||||||
|
targetFile.getEntry("IMAGES/$part.img"), targetFile.getEntry("RADIO/$part.img")
|
||||||
|
)
|
||||||
|
.let { parts ->
|
||||||
|
if (parts.size != 1) {
|
||||||
|
log.error("Found multiple images for partition $part")
|
||||||
|
exitProcess(1)
|
||||||
|
}
|
||||||
|
parts[0].name
|
||||||
|
}
|
||||||
|
//dump image
|
||||||
|
targetFile.dumpEntry(partEntryName, File("$workDir/$part.img"))
|
||||||
|
|
||||||
|
run {//unsparse image
|
||||||
|
Struct(">I").unpack(FileInputStream("$workDir/$part.img")).let { fileHeader ->
|
||||||
|
if (fileHeader[0] as UInt == 0x3aff26ed.toUInt()) {
|
||||||
|
log.debug("$part is sparse, convert to raw image")
|
||||||
|
"simg2img $workDir/$part.img $workDir/tmp.img".check_call()
|
||||||
|
File("$workDir/tmp.img").renameTo(File("$workDir/$part.img"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
run {//dump map file
|
||||||
|
val mapFile = targetFile.getEntry(partEntryName.replace(".img", ".map"))
|
||||||
|
if (mapFile != null) {
|
||||||
|
log.debug("$part.map found, dump it to $workDir/$part.map")
|
||||||
|
targetFile.dumpEntry(mapFile.name, File("$workDir/$part.map"))
|
||||||
|
} else {
|
||||||
|
log.debug("$part.map not found")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
File("$workDir/$part.img").let { partFile ->
|
||||||
|
val partSize = partFile.length()
|
||||||
|
if (partSize % 4096 != 0L) {
|
||||||
|
log.info("Padding $workDir/$part.img ...")
|
||||||
|
Files.write(
|
||||||
|
Paths.get("$workDir/$part.img"),
|
||||||
|
ByteArray(4096 - (partSize % 4096).toInt()),
|
||||||
|
StandardOpenOption.APPEND
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
targetFile.dumpEntry("META/postinstall_config.txt", File("$workDir/postinstall_config.txt"))
|
||||||
|
targetFile.dumpEntry("META/dynamic_partitions_info.txt", File("$workDir/dynamic_partitions_info.txt"))
|
||||||
|
tryToDumpEntry(targetFile, "META/apex_info.pb", "$workDir/apex_info.pb")
|
||||||
|
targetFile.close()
|
||||||
|
|
||||||
|
data class DeltaGenParam(
|
||||||
|
var partitionNames: String = "",
|
||||||
|
var newImages: String = "",
|
||||||
|
var newMapFiles: String = "",
|
||||||
|
var newPostInstallConfig: String = "",
|
||||||
|
var dynamicPartitionInfo: String = "",
|
||||||
|
var apexInfoFile: String = "",
|
||||||
|
var partitionTimeStamps: String = "",
|
||||||
|
)
|
||||||
|
|
||||||
|
//partition timestamps
|
||||||
|
val pTs: MutableList<Pair<String, String>> = mutableListOf()
|
||||||
|
Common.PARTITIONS_WITH_BUILD_PROP.forEach { it ->
|
||||||
|
val item: Pair<String, String?> = Pair(it,
|
||||||
|
when (it) {
|
||||||
|
"boot" -> {
|
||||||
|
log.info("boot:" + infoDict.get("$it.build.prop") as Properties)
|
||||||
|
(infoDict.get("$it.build.prop") as Properties).getProperty("ro.${it}image.build.date.utc")
|
||||||
|
}
|
||||||
|
else -> (infoDict.get("$it.build.prop") as Properties).getProperty("ro.${it}.build.date.utc")
|
||||||
|
})
|
||||||
|
if (item.second != null) {
|
||||||
|
pTs.add(item as Pair<String, String>)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
val dp = DeltaGenParam().apply {
|
||||||
|
partitionNames = abPartitions.reduce { acc, s -> "$acc:$s" }
|
||||||
|
newImages = abPartitions.map { "$workDir/$it.img" }.reduce { acc, s -> "$acc:$s" }
|
||||||
|
newMapFiles = abPartitions
|
||||||
|
.map { if (File("$workDir/$it.map").exists()) "$workDir/$it.map" else "" }
|
||||||
|
.reduce { acc, s -> "$acc:$s" }
|
||||||
|
newPostInstallConfig = "$workDir/postinstall_config.txt"
|
||||||
|
dynamicPartitionInfo = "$workDir/dynamic_partitions_info.txt"
|
||||||
|
if (File("$workDir/apex_info.pb").exists()) {
|
||||||
|
apexInfoFile = "$workDir/apex_info.pb"
|
||||||
|
}
|
||||||
|
partitionTimeStamps = pTs.map { it.first + ":" + it.second }.reduce { acc, s -> "$s,$acc" }
|
||||||
|
}
|
||||||
|
|
||||||
|
("delta_generator" +
|
||||||
|
" --out_file=$payload" +
|
||||||
|
" --partition_names=${dp.partitionNames}" +
|
||||||
|
" --new_partitions=${dp.newImages}" +
|
||||||
|
" --new_mapfiles=${dp.newMapFiles}" +
|
||||||
|
" --major_version=2" +
|
||||||
|
" --max_timestamp=$maxTs" +
|
||||||
|
" --partition_timestamps=${dp.partitionTimeStamps}" +
|
||||||
|
" --new_postinstall_config_file=${dp.newPostInstallConfig}" +
|
||||||
|
" --dynamic_partition_info_file=${dp.dynamicPartitionInfo}" +
|
||||||
|
if (dp.apexInfoFile.isNotBlank()) " --apex_info_file=${dp.apexInfoFile}" else ""
|
||||||
|
).check_call()
|
||||||
|
}
|
||||||
|
|
||||||
|
fun signMine(inSigner: PayloadSigner, options: OtaOptions) {
|
||||||
|
//1: hash and meta of payload
|
||||||
|
("delta_generator" +
|
||||||
|
" --in_file=$workDir/payload.bin.mine" +
|
||||||
|
" --signature_size=${inSigner.keySize}" +
|
||||||
|
" --out_hash_file=$workDir/payload.hash.mine" +
|
||||||
|
" --out_metadata_hash_file=$workDir/meta.hash.mine").check_call()
|
||||||
|
//Helper.assertFileEquals("$workDir/meta.hash", "$workDir/meta.hash.mine")
|
||||||
|
//Helper.assertFileEquals("$workDir/payload.hash", "$workDir/payload.hash.mine")
|
||||||
|
|
||||||
|
//2: sign hash and meta
|
||||||
|
inSigner.sign("$workDir/meta.hash.mine", "$workDir/signed-meta.hash.mine")
|
||||||
|
inSigner.sign("$workDir/payload.hash.mine", "$workDir/signed-payload.hash.mine")
|
||||||
|
//Helper.assertFileEquals("$workDir/signed-meta.hash", "$workDir/signed-meta.hash.mine")
|
||||||
|
//Helper.assertFileEquals("$workDir/payload.hash", "$workDir/payload.hash.mine")
|
||||||
|
|
||||||
|
//3: hash, meta, payload.bin -> signed-payload.bin
|
||||||
|
("delta_generator" +
|
||||||
|
" --in_file=$workDir/payload.bin.mine" +
|
||||||
|
" --signature_size=" + inSigner.keySize +
|
||||||
|
" --payload_signature_file=$workDir/signed-payload.hash.mine" +
|
||||||
|
" --metadata_signature_file=$workDir/signed-meta.hash.mine" +
|
||||||
|
" --out_file=$signedPayload.mine").check_call()
|
||||||
|
//Helper.assertFileEquals(signedPayload, "$signedPayload.mine")
|
||||||
|
|
||||||
|
//4: payload-properties.txt
|
||||||
|
("delta_generator" +
|
||||||
|
" --in_file=$signedPayload.mine" +
|
||||||
|
" --properties_file=$propertiesFile.mine").check_call()
|
||||||
|
//Helper.assertFileEquals(propertiesFile, "$propertiesFile.mine")
|
||||||
|
|
||||||
|
// 5: payload-properties.txt appending
|
||||||
|
if (options.wipe_user_data) {
|
||||||
|
FileOutputStream(propertiesFile, true).use {
|
||||||
|
it.write("POWERWASH=1\n".toByteArray())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (options.include_secondary) {
|
||||||
|
FileOutputStream(propertiesFile, true).use {
|
||||||
|
it.write("SWITCH_SLOT_ON_REBOOT=0\n".toByteArray())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,62 @@
|
|||||||
|
// Copyright 2022 yuyezhong@gmail.com
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
package cc.cfig.droid.ota
|
||||||
|
|
||||||
|
import org.apache.commons.exec.CommandLine
|
||||||
|
import org.apache.commons.exec.DefaultExecutor
|
||||||
|
import org.slf4j.LoggerFactory
|
||||||
|
import java.io.File
|
||||||
|
|
||||||
|
class PayloadSigner {
|
||||||
|
private val log = LoggerFactory.getLogger(PayloadSigner::class.java)
|
||||||
|
var keySize = 0
|
||||||
|
private val workDir = "build/staging_ota"
|
||||||
|
val signingKey = "signing.key"
|
||||||
|
val privKey = "aosp/security/testkey.pk8"
|
||||||
|
val modulusFile = "$workDir/modulus.file"
|
||||||
|
|
||||||
|
init {
|
||||||
|
CommandLine.parse("openssl pkcs8 -in $privKey -inform DER -nocrypt -out $workDir/$signingKey").let { cmd ->
|
||||||
|
log.info(cmd.toString())
|
||||||
|
DefaultExecutor().execute(cmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
CommandLine.parse("openssl rsa -inform PEM -in $workDir/$signingKey -modulus -noout -out $modulusFile").let { cmd ->
|
||||||
|
log.info(cmd.toString())
|
||||||
|
DefaultExecutor().execute(cmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
val modulusString = File(modulusFile).readText()
|
||||||
|
log.info(modulusString)
|
||||||
|
val MODULUS_PREFIX = "Modulus="
|
||||||
|
if (!modulusString.startsWith(MODULUS_PREFIX)) {
|
||||||
|
throw IllegalArgumentException("Invalid modulus string")
|
||||||
|
}
|
||||||
|
keySize = modulusString.substring(MODULUS_PREFIX.length).length / 2
|
||||||
|
log.info("key size = $keySize bytes")
|
||||||
|
if (keySize !in listOf(256, 512)) {
|
||||||
|
throw IllegalArgumentException("Unsupported key size")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun sign(inFile: String, outFile: String) {
|
||||||
|
CommandLine.parse("openssl pkeyutl -sign").let { cmd ->
|
||||||
|
cmd.addArguments("-inkey $workDir/$signingKey -pkeyopt digest:sha256")
|
||||||
|
cmd.addArguments("-in $inFile")
|
||||||
|
cmd.addArguments("-out $outFile")
|
||||||
|
log.info(cmd.toString())
|
||||||
|
DefaultExecutor().execute(cmd)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,52 @@
|
|||||||
|
// Copyright 2022 yuyezhong@gmail.com
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package cfig.packable
|
||||||
|
|
||||||
|
import cc.cfig.droid.ota.Payload
|
||||||
|
import org.slf4j.LoggerFactory
|
||||||
|
|
||||||
|
class PayloadBinParser : IPackable {
|
||||||
|
override val loopNo: Int = 0
|
||||||
|
private val log = LoggerFactory.getLogger(PayloadBinParser::class.java)
|
||||||
|
override fun capabilities(): List<String> {
|
||||||
|
return listOf("^payload\\.bin$")
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun unpack(fileName: String) {
|
||||||
|
cleanUp()
|
||||||
|
Payload.parse(fileName).let { pl ->
|
||||||
|
pl.setUp()
|
||||||
|
pl.printInfo()
|
||||||
|
pl.unpack()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun pack(fileName: String) {
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun `@verify`(fileName: String) {
|
||||||
|
super.`@verify`(fileName)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun pull(fileName: String, deviceName: String) {
|
||||||
|
super.pull(fileName, deviceName)
|
||||||
|
}
|
||||||
|
|
||||||
|
fun clean(fileName: String) {
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun flash(fileName: String, deviceName: String) {
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue