mass update

dependent modules:
 aosp update
 kotlin: 1.4.0 -> 1.4.10
 unify helpers between projects
 commons.compress 1.20
bbootimage:
 fix page size error in vendor_boot
 supress compile warnings
lz4:
 change lz4 compress parameter if lz4 is not up-to-date
 use external lz4 to decompress kernel for Ubuntu <=18.04
cpio:
 use java cpio to decompress ramdisk
 add skipcpio to read contatenate cpio
for/win
cfig 5 years ago
parent fed5daeeba
commit ddd1ad5ef2
No known key found for this signature in database
GPG Key ID: B104C307F0FDABB7

@ -1,9 +1,9 @@
# Android_boot_image_editor # Android_boot_image_editor
[![Codacy Badge](https://api.codacy.com/project/badge/Grade/fa6a49bb22b84307b12e7a8878867c1e)](https://app.codacy.com/manual/cfig97/Android_boot_image_editor?utm_source=github.com&utm_medium=referral&utm_content=cfig/Android_boot_image_editor&utm_campaign=Badge_Grade_Dashboard)
[![Build Status](https://travis-ci.org/cfig/Android_boot_image_editor.svg?branch=master)](https://travis-ci.org/cfig/Android_boot_image_editor) [![Build Status](https://travis-ci.org/cfig/Android_boot_image_editor.svg?branch=master)](https://travis-ci.org/cfig/Android_boot_image_editor)
[![Codacy Badge](https://api.codacy.com/project/badge/Grade/fa6a49bb22b84307b12e7a8878867c1e)](https://app.codacy.com/manual/cfig97/Android_boot_image_editor?utm_source=github.com&utm_medium=referral&utm_content=cfig/Android_boot_image_editor&utm_campaign=Badge_Grade_Dashboard)
[![License](http://img.shields.io/:license-apache-blue.svg?style=flat-square)](http://www.apache.org/licenses/LICENSE-2.0.html) [![License](http://img.shields.io/:license-apache-blue.svg?style=flat-square)](http://www.apache.org/licenses/LICENSE-2.0.html)
A tool for reverse engineering Android ROM images. (working on ![Linux](doc/linux24.png) and ![Mac](doc/apple24.png)) A tool for reverse engineering Android ROM images. (working on ![Linux](doc/linux24.png)(Ubuntu 18.04+) and ![Mac](doc/apple24.png))
## Getting Started ## Getting Started

@ -2385,12 +2385,13 @@ class Avb(object):
misc_image.seek(self.AB_MISC_METADATA_OFFSET) misc_image.seek(self.AB_MISC_METADATA_OFFSET)
misc_image.write(ab_data) misc_image.write(ab_data)
def info_image(self, image_filename, output): def info_image(self, image_filename, output, atx):
"""Implements the 'info_image' command. """Implements the 'info_image' command.
Arguments: Arguments:
image_filename: Image file to get information from (file object). image_filename: Image file to get information from (file object).
output: Output file to write human-readable information to (file object). output: Output file to write human-readable information to (file object).
atx: If True, show information about Android Things eXtension (ATX).
""" """
image = ImageHandler(image_filename, read_only=True) image = ImageHandler(image_filename, read_only=True)
o = output o = output
@ -2443,6 +2444,31 @@ class Avb(object):
if num_printed == 0: if num_printed == 0:
o.write(' (none)\n') o.write(' (none)\n')
if atx and header.public_key_metadata_size:
o.write('Android Things eXtension (ATX):\n')
key_metadata_offset = (header.SIZE +
header.authentication_data_block_size +
header.public_key_metadata_offset)
key_metadata_blob = vbmeta_blob[key_metadata_offset: key_metadata_offset
+ header.public_key_metadata_size]
version, pik, psk = struct.unpack('<I1620s1620s', key_metadata_blob)
o.write(' Metadata version: {}\n'.format(version))
def print_atx_certificate(cert):
version, public_key, subject, usage, key_version, _signature = \
struct.unpack('<I1032s32s32sQ512s', cert)
o.write(' Version: {}\n'.format(version))
o.write(' Public key (sha1): {}\n'.format(
hashlib.sha1(public_key).hexdigest()))
o.write(' Subject: {}\n'.format(subject.hex()))
o.write(' Usage: {}\n'.format(usage.hex()))
o.write(' Key version: {}\n'.format(key_version))
o.write(' Product Intermediate Key:\n')
print_atx_certificate(pik)
o.write(' Product Signing Key:\n')
print_atx_certificate(psk)
def verify_image(self, image_filename, key_path, expected_chain_partitions, def verify_image(self, image_filename, key_path, expected_chain_partitions,
follow_chain_partitions, accept_zeroed_hashtree): follow_chain_partitions, accept_zeroed_hashtree):
"""Implements the 'verify_image' command. """Implements the 'verify_image' command.
@ -4428,6 +4454,10 @@ class AvbTool(object):
help='Write info to file', help='Write info to file',
type=argparse.FileType('wt'), type=argparse.FileType('wt'),
default=sys.stdout) default=sys.stdout)
sub_parser.add_argument('--atx',
help=('Show information about Android Things '
'eXtension (ATX).'),
action='store_true')
sub_parser.set_defaults(func=self.info_image) sub_parser.set_defaults(func=self.info_image)
sub_parser = subparsers.add_parser( sub_parser = subparsers.add_parser(
@ -4765,7 +4795,7 @@ class AvbTool(object):
def info_image(self, args): def info_image(self, args):
"""Implements the 'info_image' sub-command.""" """Implements the 'info_image' sub-command."""
self.avb.info_image(args.image.name, args.output) self.avb.info_image(args.image.name, args.output, args.atx)
def verify_image(self, args): def verify_image(self, args):
"""Implements the 'verify_image' sub-command.""" """Implements the 'verify_image' sub-command."""

@ -1,4 +1,4 @@
#!/usr/bin/env python2.7 #!/usr/bin/env python
# #
# Copyright (C) 2018 The Android Open Source Project # Copyright (C) 2018 The Android Open Source Project
# #
@ -100,25 +100,19 @@ def dump_configs(input_bytes):
return o return o
def try_decompress_bytes(cmd, input_bytes): def try_decompress(cmd, search_bytes, input_bytes):
idx = input_bytes.find(search_bytes)
if idx < 0:
return None
idx = 0
sp = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, sp = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE) stderr=subprocess.PIPE)
o, _ = sp.communicate(input=input_bytes) o, _ = sp.communicate(input=input_bytes[idx:])
# ignore errors # ignore errors
return o return o
def try_decompress(cmd, search_bytes, input_bytes):
idx = 0
while True:
idx = input_bytes.find(search_bytes, idx)
if idx < 0:
raise StopIteration()
yield try_decompress_bytes(cmd, input_bytes[idx:])
idx += 1
def decompress_dump(func, input_bytes): def decompress_dump(func, input_bytes):
""" """
Run func(input_bytes) first; and if that fails (returns value evaluates to Run func(input_bytes) first; and if that fails (returns value evaluates to
@ -128,15 +122,15 @@ def decompress_dump(func, input_bytes):
if o: if o:
return o return o
for cmd, search_bytes in COMPRESSION_ALGO: for cmd, search_bytes in COMPRESSION_ALGO:
for decompressed in try_decompress(cmd, search_bytes, input_bytes): decompressed = try_decompress(cmd, search_bytes, input_bytes)
if decompressed: if decompressed:
o = decompress_dump(func, decompressed) o = func(decompressed)
if o: if o:
return o return o
# Force decompress the whole file even if header doesn't match # Force decompress the whole file even if header doesn't match
decompressed = try_decompress_bytes(cmd, input_bytes) decompressed = try_decompress(cmd, b"", input_bytes)
if decompressed: if decompressed:
o = decompress_dump(func, decompressed) o = func(decompressed)
if o: if o:
return o return o

@ -0,0 +1 @@
https://github.com/dracutdevs/dracut/tree/master/skipcpio

@ -0,0 +1,122 @@
/* dracut-install.c -- install files and executables
Copyright (C) 2012 Harald Hoyer
Copyright (C) 2012 Red Hat, Inc. All rights reserved.
This program is free software: you can redistribute it and/or modify
under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program; If not, see <http://www.gnu.org/licenses/>.
*/
#define PROGRAM_VERSION_STRING "1"
#ifndef _GNU_SOURCE
#define _GNU_SOURCE
#endif
#include <stdbool.h>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <string.h>
#define CPIO_END "TRAILER!!!"
#define CPIO_ENDLEN (sizeof(CPIO_END)-1)
static char buf[CPIO_ENDLEN * 2 + 1];
int main(int argc, char **argv)
{
FILE *f;
size_t s;
if (argc != 2) {
fprintf(stderr, "Usage: %s <file>\n", argv[0]);
exit(1);
}
f = fopen(argv[1], "r");
if (f == NULL) {
fprintf(stderr, "Cannot open file '%s'\n", argv[1]);
exit(1);
}
s = fread(buf, 6, 1, f);
if (s <= 0) {
fprintf(stderr, "Read error from file '%s'\n", argv[1]);
fclose(f);
exit(1);
}
fseek(f, 0, SEEK_SET);
/* check, if this is a cpio archive */
if (buf[0] == '0' && buf[1] == '7' && buf[2] == '0' && buf[3] == '7' && buf[4] == '0' && buf[5] == '1') {
long pos = 0;
/* Search for CPIO_END */
do {
char *h;
fseek(f, pos, SEEK_SET);
buf[sizeof(buf) - 1] = 0;
s = fread(buf, CPIO_ENDLEN, 2, f);
if (s <= 0)
break;
h = strstr(buf, CPIO_END);
if (h) {
pos = (h - buf) + pos + CPIO_ENDLEN;
fseek(f, pos, SEEK_SET);
break;
}
pos += CPIO_ENDLEN;
} while (!feof(f));
if (feof(f)) {
/* CPIO_END not found, just cat the whole file */
fseek(f, 0, SEEK_SET);
} else {
/* skip zeros */
while (!feof(f)) {
size_t i;
buf[sizeof(buf) - 1] = 0;
s = fread(buf, 1, sizeof(buf) - 1, f);
if (s <= 0)
break;
for (i = 0; (i < s) && (buf[i] == 0); i++) ;
if (buf[i] != 0) {
pos += i;
fseek(f, pos, SEEK_SET);
break;
}
pos += s;
}
}
}
/* cat out the rest */
while (!feof(f)) {
s = fread(buf, 1, sizeof(buf), f);
if (s <= 0)
break;
s = fwrite(buf, 1, s, stdout);
if (s <= 0)
break;
}
fclose(f);
return EXIT_SUCCESS;
}

@ -1,4 +1,4 @@
#! /usr/bin/env python2.7 #! /usr/bin/env python
# Copyright 2017, The Android Open Source Project # Copyright 2017, The Android Open Source Project
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");

@ -1,8 +1,8 @@
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
plugins { plugins {
kotlin("jvm") version "1.4.0" kotlin("jvm") version "1.4.10"
kotlin("plugin.serialization") version "1.4.0" kotlin("plugin.serialization") version "1.4.10"
application application
} }
@ -16,14 +16,13 @@ dependencies {
implementation("org.slf4j:slf4j-simple:1.7.30") implementation("org.slf4j:slf4j-simple:1.7.30")
implementation("org.slf4j:slf4j-api:1.7.30") implementation("org.slf4j:slf4j-api:1.7.30")
implementation("com.fasterxml.jackson.core:jackson-annotations:2.11.2") implementation("com.fasterxml.jackson.core:jackson-annotations:2.11.3")
implementation("com.fasterxml.jackson.core:jackson-databind:2.11.2") implementation("com.fasterxml.jackson.core:jackson-databind:2.11.3")
implementation("org.jetbrains.kotlinx:kotlinx-serialization-core:1.0.0-RC")
implementation("com.google.guava:guava:18.0") implementation("com.google.guava:guava:18.0")
implementation("org.apache.commons:commons-exec:1.3") implementation("org.apache.commons:commons-exec:1.3")
implementation("org.apache.commons:commons-compress:1.16.1") implementation("org.apache.commons:commons-compress:1.20")
implementation("org.tukaani:xz:1.8") implementation("org.tukaani:xz:1.8")
implementation("commons-codec:commons-codec:1.11") implementation("commons-codec:commons-codec:1.15")
implementation("junit:junit:4.12") implementation("junit:junit:4.12")
implementation("org.bouncycastle:bcprov-jdk15on:1.57") implementation("org.bouncycastle:bcprov-jdk15on:1.57")
implementation("de.vandermeer:asciitable:0.3.2") implementation("de.vandermeer:asciitable:0.3.2")

@ -7,7 +7,8 @@ import avb.blob.AuxBlob
import avb.blob.Footer import avb.blob.Footer
import avb.blob.Header import avb.blob.Header
import avb.desc.* import avb.desc.*
import cfig.Helper.Companion.paddingWith import cfig.helper.Helper
import cfig.helper.Helper.Companion.paddingWith
import cfig.io.Struct3 import cfig.io.Struct3
import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.databind.ObjectMapper
import org.apache.commons.codec.binary.Hex import org.apache.commons.codec.binary.Hex
@ -26,6 +27,7 @@ class Avb {
private val MAX_VBMETA_SIZE = 64 * 1024 private val MAX_VBMETA_SIZE = 64 * 1024
private val MAX_FOOTER_SIZE = 4096 private val MAX_FOOTER_SIZE = 4096
private val BLOCK_SIZE = 4096 private val BLOCK_SIZE = 4096
private val DEBUG = false
//migrated from: avbtool::Avb::addHashFooter //migrated from: avbtool::Avb::addHashFooter
fun addHashFooter(image_file: String, fun addHashFooter(image_file: String,
@ -59,7 +61,9 @@ class Avb {
val vbmetaBlob = packVbMeta(newAvbInfo) val vbmetaBlob = packVbMeta(newAvbInfo)
log.debug("vbmeta_blob: " + Helper.toHexString(vbmetaBlob)) log.debug("vbmeta_blob: " + Helper.toHexString(vbmetaBlob))
if (DEBUG) {
Helper.dumpToFile("hashDescriptor.vbmeta.blob", vbmetaBlob) Helper.dumpToFile("hashDescriptor.vbmeta.blob", vbmetaBlob)
}
// image + padding // image + padding
val imgPaddingNeeded = Helper.round_to_multiple(newImageSize, BLOCK_SIZE) - newImageSize val imgPaddingNeeded = Helper.round_to_multiple(newImageSize, BLOCK_SIZE) - newImageSize
@ -196,7 +200,7 @@ class Avb {
} }
// aux - desc // aux - desc
var descriptors = listOf<Any>() var descriptors: List<Any>
if (vbMetaHeader.descriptors_size > 0) { if (vbMetaHeader.descriptors_size > 0) {
FileInputStream(image_file).use { fis -> FileInputStream(image_file).use { fis ->
fis.skip(descStartOffset) fis.skip(descStartOffset)

@ -1,7 +1,7 @@
package avb.blob package avb.blob
import avb.alg.Algorithms import avb.alg.Algorithms
import cfig.Helper import cfig.helper.Helper
import cfig.io.Struct3 import cfig.io.Struct3
import org.slf4j.LoggerFactory import org.slf4j.LoggerFactory
import java.security.MessageDigest import java.security.MessageDigest

@ -2,9 +2,8 @@ package avb.blob
import avb.alg.Algorithm import avb.alg.Algorithm
import avb.desc.* import avb.desc.*
import cfig.Helper import cfig.helper.Helper
import cfig.io.Struct3 import cfig.io.Struct3
import com.fasterxml.jackson.annotation.JsonIgnore
import com.fasterxml.jackson.annotation.JsonIgnoreProperties import com.fasterxml.jackson.annotation.JsonIgnoreProperties
import org.slf4j.LoggerFactory import org.slf4j.LoggerFactory
import java.nio.file.Files import java.nio.file.Files

@ -1,6 +1,6 @@
package avb.desc package avb.desc
import cfig.Helper import cfig.helper.Helper
import cfig.io.Struct3 import cfig.io.Struct3
import java.io.InputStream import java.io.InputStream
import java.security.MessageDigest import java.security.MessageDigest

@ -1,7 +1,7 @@
package avb.desc package avb.desc
import avb.blob.Header import avb.blob.Header
import cfig.Helper import cfig.helper.Helper
import cfig.io.Struct3 import cfig.io.Struct3
import org.apache.commons.codec.binary.Hex import org.apache.commons.codec.binary.Hex
import org.slf4j.LoggerFactory import org.slf4j.LoggerFactory
@ -76,6 +76,7 @@ class HashDescriptor(var flags: Int = 0,
hasher.update(this.salt) hasher.update(this.salt)
hasher.update(File(image_file).readBytes()) hasher.update(File(image_file).readBytes())
val digest = hasher.digest() val digest = hasher.digest()
log.info("digest:" + Helper.toHexString(digest))
} }
fun update(image_file: String, use_persistent_digest: Boolean = false): HashDescriptor { fun update(image_file: String, use_persistent_digest: Boolean = false): HashDescriptor {

@ -1,7 +1,7 @@
package avb.desc package avb.desc
import avb.blob.Header import avb.blob.Header
import cfig.Helper import cfig.helper.Helper
import cfig.io.Struct3 import cfig.io.Struct3
import java.io.InputStream import java.io.InputStream
import java.util.* import java.util.*

@ -1,6 +1,6 @@
package avb.desc package avb.desc
import cfig.Helper import cfig.helper.Helper
import cfig.io.Struct3 import cfig.io.Struct3
import java.io.InputStream import java.io.InputStream

@ -1,6 +1,6 @@
package avb.desc package avb.desc
import cfig.Helper import cfig.helper.Helper
import cfig.io.Struct3 import cfig.io.Struct3
import java.io.InputStream import java.io.InputStream

@ -1,6 +1,6 @@
package avb.desc package avb.desc
import cfig.Helper import cfig.helper.Helper
import cfig.io.Struct3 import cfig.io.Struct3
import org.apache.commons.codec.binary.Hex import org.apache.commons.codec.binary.Hex
import org.slf4j.LoggerFactory import org.slf4j.LoggerFactory

@ -1,8 +1,10 @@
package cfig.bootimg package cfig.bootimg
import cfig.EnvironmentVerifier import cfig.EnvironmentVerifier
import cfig.Helper
import cfig.dtb_util.DTC import cfig.dtb_util.DTC
import cfig.helper.Helper
import cfig.helper.Helper.Companion.check_call
import cfig.helper.ZipHelper
import cfig.io.Struct3.InputStreamExt.Companion.getInt import cfig.io.Struct3.InputStreamExt.Companion.getInt
import cfig.kernel_util.KernelExtractor import cfig.kernel_util.KernelExtractor
import org.apache.commons.exec.CommandLine import org.apache.commons.exec.CommandLine
@ -110,14 +112,14 @@ class Common {
var ret = "gz" var ret = "gz"
Helper.extractFile(s.srcFile, s.dumpFile, s.offset.toLong(), s.length) Helper.extractFile(s.srcFile, s.dumpFile, s.offset.toLong(), s.length)
when { when {
Helper.isGZ(s.dumpFile) -> { ZipHelper.isGZ(s.dumpFile) -> {
File(s.dumpFile).renameTo(File(s.dumpFile + ".gz")) File(s.dumpFile).renameTo(File(s.dumpFile + ".gz"))
Helper.unGnuzipFile(s.dumpFile + ".gz", s.dumpFile) ZipHelper.unGnuzipFile(s.dumpFile + ".gz", s.dumpFile)
} }
Helper.isLZ4(s.dumpFile) -> { ZipHelper.isLZ4(s.dumpFile) -> {
log.info("ramdisk is compressed lz4") log.info("ramdisk is compressed lz4")
File(s.dumpFile).renameTo(File(s.dumpFile + ".lz4")) File(s.dumpFile).renameTo(File(s.dumpFile + ".lz4"))
Helper.decompressLZ4(s.dumpFile + ".lz4", s.dumpFile) ZipHelper.decompressLZ4Ext(s.dumpFile + ".lz4", s.dumpFile)
ret = "lz4" ret = "lz4"
} }
else -> { else -> {
@ -201,10 +203,10 @@ class Common {
} }
when { when {
ramdiskGz.endsWith(".gz") -> { ramdiskGz.endsWith(".gz") -> {
Helper.gnuZipFile2(ramdiskGz, ByteArrayInputStream(outputStream.toByteArray())) ZipHelper.gnuZipFile2(ramdiskGz, ByteArrayInputStream(outputStream.toByteArray()))
} }
ramdiskGz.endsWith(".lz4") -> { ramdiskGz.endsWith(".lz4") -> {
Helper.compressLZ4(ramdiskGz, ByteArrayInputStream(outputStream.toByteArray())) ZipHelper.compressLZ4(ramdiskGz, ByteArrayInputStream(outputStream.toByteArray()))
} }
else -> { else -> {
throw IllegalArgumentException("$ramdiskGz is not supported") throw IllegalArgumentException("$ramdiskGz is not supported")
@ -249,7 +251,7 @@ class Common {
} }
} }
fun unpackRamdisk(ramdisk: String, root: String) { private fun unpackRamdisk(ramdisk: String, root: String) {
val rootFile = File(root).apply { val rootFile = File(root).apply {
if (exists()) { if (exists()) {
log.info("Cleaning [$root] before ramdisk unpacking") log.info("Cleaning [$root] before ramdisk unpacking")
@ -258,11 +260,9 @@ class Common {
mkdirs() mkdirs()
} }
DefaultExecutor().let { exe -> //("cpio -idmv -F " + File(ramdisk).canonicalPath).check_call(rootFile.canonicalPath)
exe.workingDirectory = rootFile ZipHelper.decompressCPIO(File(ramdisk).canonicalPath, rootFile.canonicalPath, File(ramdisk).canonicalPath + ".filelist")
exe.execute(CommandLine.parse("cpio -i -m -F " + File(ramdisk).canonicalPath)) log.info(" ramdisk extracted : $ramdisk -> $rootFile")
log.info(" ramdisk extracted : $ramdisk -> ${rootFile}")
}
} }
fun probeHeaderVersion(fileName: String): Int { fun probeHeaderVersion(fileName: String): Int {

@ -4,7 +4,7 @@ import avb.AVBInfo
import avb.alg.Algorithms import avb.alg.Algorithms
import cfig.Avb import cfig.Avb
import cfig.Avb.Companion.getJsonFileName import cfig.Avb.Companion.getJsonFileName
import cfig.Helper import cfig.helper.Helper
import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.databind.ObjectMapper
import org.apache.commons.exec.CommandLine import org.apache.commons.exec.CommandLine
import org.apache.commons.exec.DefaultExecutor import org.apache.commons.exec.DefaultExecutor

@ -1,6 +1,6 @@
package cfig.bootimg.v2 package cfig.bootimg.v2
import cfig.Helper import cfig.helper.Helper
import cfig.bootimg.Common import cfig.bootimg.Common
import cfig.io.Struct3 import cfig.io.Struct3
import org.slf4j.LoggerFactory import org.slf4j.LoggerFactory

@ -1,12 +1,11 @@
package cfig.bootimg.v2 package cfig.bootimg.v2
import cfig.Avb import cfig.Avb
import cfig.Helper import cfig.helper.Helper
import cfig.bootimg.Common import cfig.bootimg.Common
import cfig.bootimg.Common.Companion.deleleIfExists import cfig.bootimg.Common.Companion.deleleIfExists
import cfig.bootimg.Common.Slice import cfig.bootimg.Common.Slice
import cfig.bootimg.Signer import cfig.bootimg.Signer
import cfig.bootimg.v3.BootV3
import cfig.packable.VBMetaParser import cfig.packable.VBMetaParser
import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.databind.ObjectMapper
import de.vandermeer.asciitable.AsciiTable import de.vandermeer.asciitable.AsciiTable

@ -1,7 +1,7 @@
package cfig.bootimg.v3 package cfig.bootimg.v3
import cfig.Avb import cfig.Avb
import cfig.Helper import cfig.helper.Helper
import cfig.bootimg.Common.Companion.deleleIfExists import cfig.bootimg.Common.Companion.deleleIfExists
import cfig.bootimg.Common.Companion.getPaddingSize import cfig.bootimg.Common.Companion.getPaddingSize
import cfig.bootimg.Signer import cfig.bootimg.Signer

@ -1,9 +1,9 @@
package cfig.bootimg.v3 package cfig.bootimg.v3
import cfig.Avb import cfig.Avb
import cfig.Helper
import cfig.bootimg.Common.Companion.deleleIfExists import cfig.bootimg.Common.Companion.deleleIfExists
import cfig.bootimg.Signer import cfig.bootimg.Signer
import cfig.helper.Helper
import cfig.packable.VBMetaParser import cfig.packable.VBMetaParser
import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.databind.ObjectMapper
import de.vandermeer.asciitable.AsciiTable import de.vandermeer.asciitable.AsciiTable
@ -101,10 +101,13 @@ data class VendorBoot(var info: MiscInfo = MiscInfo(),
} }
//data //data
log.info("Writing data ...") log.info("Writing data ...")
val bf = ByteBuffer.allocate(1024 * 1024 * 128)//assume total SIZE small than 64MB //assume total SIZE is smaller than 64MB
bf.order(ByteOrder.LITTLE_ENDIAN) val bf = ByteBuffer.allocate(1024 * 1024 * 128).let {
C.writePaddedFile(bf, this.ramdisk.file, this.info.pageSize) it.order(ByteOrder.LITTLE_ENDIAN)
C.writePaddedFile(bf, this.dtb.file, this.info.pageSize) C.writePaddedFile(it, this.ramdisk.file, this.info.pageSize)
C.writePaddedFile(it, this.dtb.file, this.info.pageSize)
it
}
//write //write
FileOutputStream("${this.info.output}.clear", true).use { fos -> FileOutputStream("${this.info.output}.clear", true).use { fos ->
fos.write(bf.array(), 0, bf.position()) fos.write(bf.array(), 0, bf.position())
@ -210,8 +213,8 @@ data class VendorBoot(var info: MiscInfo = MiscInfo(),
private fun toCommandLine(): CommandLine { private fun toCommandLine(): CommandLine {
return CommandLine(Helper.prop("mkbootimg")) return CommandLine(Helper.prop("mkbootimg"))
.addArgument("--vendor_ramdisk").addArgument(this.ramdisk.file) .addArgument("--vendor_ramdisk").addArgument(ramdisk.file)
.addArgument("--dtb").addArgument(this.dtb.file) .addArgument("--dtb").addArgument(dtb.file)
.addArgument("--vendor_cmdline").addArgument(info.cmdline, false) .addArgument("--vendor_cmdline").addArgument(info.cmdline, false)
.addArgument("--header_version").addArgument(info.headerVersion.toString()) .addArgument("--header_version").addArgument(info.headerVersion.toString())
.addArgument("--base").addArgument("0") .addArgument("--base").addArgument("0")
@ -219,6 +222,7 @@ data class VendorBoot(var info: MiscInfo = MiscInfo(),
.addArgument("--kernel_offset").addArgument(info.kernelLoadAddr.toString()) .addArgument("--kernel_offset").addArgument(info.kernelLoadAddr.toString())
.addArgument("--ramdisk_offset").addArgument(ramdisk.loadAddr.toString()) .addArgument("--ramdisk_offset").addArgument(ramdisk.loadAddr.toString())
.addArgument("--dtb_offset").addArgument(dtb.loadAddr.toString()) .addArgument("--dtb_offset").addArgument(dtb.loadAddr.toString())
.addArgument("--pagesize").addArgument(info.pageSize.toString())
.addArgument("--vendor_boot") .addArgument("--vendor_boot")
} }
} }

@ -1,5 +1,6 @@
package cfig package cfig.helper
import cfig.KeyUtil
import cfig.io.Struct3 import cfig.io.Struct3
import com.google.common.math.BigIntegerMath import com.google.common.math.BigIntegerMath
import org.apache.commons.codec.binary.Hex import org.apache.commons.codec.binary.Hex
@ -14,8 +15,12 @@ import org.slf4j.LoggerFactory
import java.io.* import java.io.*
import java.math.BigInteger import java.math.BigInteger
import java.math.RoundingMode import java.math.RoundingMode
import java.nio.ByteBuffer
import java.nio.ByteOrder
import java.nio.file.Files import java.nio.file.Files
import java.nio.file.Paths import java.nio.file.Paths
import java.nio.file.attribute.PosixFilePermission
import java.security.MessageDigest
import java.util.* import java.util.*
import java.util.zip.GZIPInputStream import java.util.zip.GZIPInputStream
import java.util.zip.GZIPOutputStream import java.util.zip.GZIPOutputStream
@ -91,138 +96,6 @@ class Helper {
return data return data
} }
fun isGZ(compressedFile: String): Boolean {
return try {
GZIPInputStream(FileInputStream(compressedFile)).use { }
true
} catch (e: ZipException) {
false
}
}
fun isXZ(compressedFile: String): Boolean {
return try {
XZCompressorInputStream(FileInputStream(compressedFile)).use { }
true
} catch (e: ZipException) {
false
}
}
fun isLZ4(compressedFile: String): Boolean {
return try {
"lz4 -t $compressedFile".check_call()
true
} catch (e: Exception) {
false
}
}
fun decompressLZ4(lz4File: String, outFile: String) {
"lz4 -d -fv $lz4File $outFile".check_call()
}
fun compressLZ4(lz4File: String, inputStream: InputStream) {
val fos = FileOutputStream(File(lz4File))
val baosE = ByteArrayOutputStream()
DefaultExecutor().let { exec ->
exec.streamHandler = PumpStreamHandler(fos, baosE, inputStream)
val cmd = CommandLine.parse("lz4 -l -12 --favor-decSpeed")
log.info(cmd.toString())
exec.execute(cmd)
}
baosE.toByteArray().let {
if (it.isNotEmpty()) {
log.warn(String(it))
}
}
fos.close()
}
@Throws(IOException::class)
fun gnuZipFile(compressedFile: String, decompressedFile: String) {
val buffer = ByteArray(1024)
FileOutputStream(compressedFile).use { fos ->
GZIPOutputStream(fos).use { gos ->
FileInputStream(decompressedFile).use { fis ->
var bytesRead: Int
while (true) {
bytesRead = fis.read(buffer)
if (bytesRead <= 0) break
gos.write(buffer, 0, bytesRead)
}
gos.finish()
log.info("gzip done: $decompressedFile -> $compressedFile")
}//file-input-stream
}//gzip-output-stream
}//file-output-stream
}
@Throws(IOException::class)
fun unGnuzipFile(compressedFile: String, decompressedFile: String) {
val buffer = ByteArray(1024)
FileInputStream(compressedFile).use { fileIn ->
//src
GZIPInputStream(fileIn).use { gZIPInputStream ->
//src
FileOutputStream(decompressedFile).use { fileOutputStream ->
var bytesRead: Int
while (true) {
bytesRead = gZIPInputStream.read(buffer)
if (bytesRead <= 0) break
fileOutputStream.write(buffer, 0, bytesRead)
}
log.info("decompress(gz) done: $compressedFile -> $decompressedFile")
}
}
}
}
/*
caution: about gzip header - OS (Operating System)
According to https://docs.oracle.com/javase/8/docs/api/java/util/zip/package-summary.html and
GZIP spec RFC-1952(http://www.ietf.org/rfc/rfc1952.txt), gzip files created from java.util.zip.GZIPOutputStream
will mark the OS field with
0 - FAT filesystem (MS-DOS, OS/2, NT/Win32)
But default image built from Android source code has the OS field:
3 - Unix
This MAY not be a problem, at least we didn't find it till now.
*/
@Throws(IOException::class)
@Deprecated("this function misses features")
fun gnuZipFile(compressedFile: String, fis: InputStream) {
val buffer = ByteArray(1024)
FileOutputStream(compressedFile).use { fos ->
GZIPOutputStream(fos).use { gos ->
var bytesRead: Int
while (true) {
bytesRead = fis.read(buffer)
if (bytesRead <= 0) break
gos.write(buffer, 0, bytesRead)
}
log.info("compress(gz) done: $compressedFile")
}
}
}
fun gnuZipFile2(compressedFile: String, fis: InputStream) {
val buffer = ByteArray(1024)
val p = GzipParameters()
p.operatingSystem = 3
FileOutputStream(compressedFile).use { fos ->
GzipCompressorOutputStream(fos, p).use { gos ->
var bytesRead: Int
while (true) {
bytesRead = fis.read(buffer)
if (bytesRead <= 0) break
gos.write(buffer, 0, bytesRead)
}
log.info("compress(gz) done: $compressedFile")
}
}
}
fun extractFile(fileName: String, outImgName: String, offset: Long, length: Int) { fun extractFile(fileName: String, outImgName: String, offset: Long, length: Int) {
if (0 == length) { if (0 == length) {
return return
@ -354,12 +227,23 @@ class Helper {
log.info("Dumping data to $dumpFile done") log.info("Dumping data to $dumpFile done")
} }
fun String.check_call(): Boolean { fun String.deleteIfExists() {
if (File(this).exists()) {
log.info("deleting $this")
File(this).delete()
}
}
fun String.check_call(inWorkdir: String? = null): Boolean {
val ret: Boolean val ret: Boolean
try { try {
val cmd = CommandLine.parse(this) val cmd = CommandLine.parse(this)
log.info(cmd.toString()) log.run {
DefaultExecutor().execute(cmd) info("CMD: $cmd, workDir: $inWorkdir")
}
val exec = DefaultExecutor()
inWorkdir?.let { exec.workingDirectory = File(it) }
exec.execute(cmd)
ret = true ret = true
} catch (e: java.lang.IllegalArgumentException) { } catch (e: java.lang.IllegalArgumentException) {
log.error("$e: can not parse command: [$this]") log.error("$e: can not parse command: [$this]")
@ -381,10 +265,143 @@ class Helper {
it.streamHandler = PumpStreamHandler(outputStream) it.streamHandler = PumpStreamHandler(outputStream)
it.execute(CommandLine.parse(this)) it.execute(CommandLine.parse(this))
} }
log.info(outputStream.toString()) log.info(outputStream.toString().trim())
return outputStream.toString().trim() return outputStream.toString().trim()
} }
fun String.pumpRun(): Array<ByteArrayOutputStream> {
val outStream = ByteArrayOutputStream()
val errStream = ByteArrayOutputStream()
log.info("CMD: $this")
DefaultExecutor().let {
it.streamHandler = PumpStreamHandler(outStream, errStream)
it.execute(CommandLine.parse(this))
}
log.info("stdout [$outStream]")
log.info("stderr [$errStream]")
return arrayOf(outStream, errStream)
}
fun powerRun3(cmdline: CommandLine, inputStream: InputStream?): Array<Any> {
var ret = true
val outStream = ByteArrayOutputStream()
val errStream = ByteArrayOutputStream()
log.info("CMD: $cmdline")
try {
DefaultExecutor().let {
it.streamHandler = PumpStreamHandler(outStream, errStream, inputStream)
it.execute(cmdline)
}
} catch (e: ExecuteException) {
log.error("fail to execute [${cmdline}]")
ret = false
}
log.debug("stdout [$outStream]")
log.debug("stderr [$errStream]")
return arrayOf(ret, outStream.toByteArray(), errStream.toByteArray())
}
fun powerRun2(cmd: String, inputStream: InputStream?): Array<Any> {
var ret = true
val outStream = ByteArrayOutputStream()
val errStream = ByteArrayOutputStream()
log.info("CMD: $cmd")
try {
DefaultExecutor().let {
it.streamHandler = PumpStreamHandler(outStream, errStream, inputStream)
it.execute(CommandLine.parse(cmd))
}
} catch (e: ExecuteException) {
log.error("fail to execute [$cmd]")
ret = false
}
log.debug("stdout [$outStream]")
log.debug("stderr [$errStream]")
return arrayOf(ret, outStream.toByteArray(), errStream.toByteArray())
}
fun powerRun(cmd: String, inputStream: InputStream?): Array<ByteArray> {
val outStream = ByteArrayOutputStream()
val errStream = ByteArrayOutputStream()
log.info("CMD: $cmd")
try {
DefaultExecutor().let {
it.streamHandler = PumpStreamHandler(outStream, errStream, inputStream)
it.execute(CommandLine.parse(cmd))
}
} catch (e: ExecuteException) {
log.error("fail to execute [$cmd]")
}
log.debug("stdout [$outStream]")
log.debug("stderr [$errStream]")
return arrayOf(outStream.toByteArray(), errStream.toByteArray())
}
fun hashFileAndSize(vararg inFiles: String?): ByteArray {
val md = MessageDigest.getInstance("SHA1")
for (item in inFiles) {
if (null == item) {
md.update(ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN)
.putInt(0)
.array())
log.debug("update null $item: " + toHexString((md.clone() as MessageDigest).digest()))
} else {
val currentFile = File(item)
FileInputStream(currentFile).use { iS ->
var byteRead: Int
val dataRead = ByteArray(1024)
while (true) {
byteRead = iS.read(dataRead)
if (-1 == byteRead) {
break
}
md.update(dataRead, 0, byteRead)
}
log.debug("update file $item: " + toHexString((md.clone() as MessageDigest).digest()))
md.update(ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN)
.putInt(currentFile.length().toInt())
.array())
log.debug("update SIZE $item: " + toHexString((md.clone() as MessageDigest).digest()))
}
}
}
return md.digest()
}
fun assertFileEquals(file1: String, file2: String) {
val hash1 = hashFileAndSize(file1)
val hash2 = hashFileAndSize(file2)
log.info("$file1 hash ${toHexString(hash1)}, $file2 hash ${toHexString(hash2)}")
if (hash1.contentEquals(hash2)) {
log.info("Hash verification passed: ${toHexString(hash1)}")
} else {
log.error("Hash verification failed")
throw UnknownError("Do not know why hash verification fails, maybe a bug")
}
}
fun modeToPermissions(inMode: Int): Set<PosixFilePermission> {
var mode = inMode
val PERMISSIONS_MASK = 4095
// setgid/setuid/sticky are not supported.
val MAX_SUPPORTED_MODE = 511
mode = mode and PERMISSIONS_MASK
if (mode and MAX_SUPPORTED_MODE != mode) {
throw IOException("Invalid mode: $mode")
}
val allPermissions = PosixFilePermission.values()
val result: MutableSet<PosixFilePermission> = EnumSet.noneOf(PosixFilePermission::class.java)
for (i in allPermissions.indices) {
if (mode and 1 == 1) {
result.add(allPermissions[allPermissions.size - i - 1])
}
mode = mode shr 1
}
return result
}
private val log = LoggerFactory.getLogger("Helper") private val log = LoggerFactory.getLogger("Helper")
} }
} }

@ -0,0 +1,416 @@
package cfig.helper
import cfig.helper.Helper.Companion.check_call
import cfig.helper.Helper.Companion.check_output
import cfig.io.Struct3
import org.apache.commons.compress.archivers.cpio.CpioArchiveInputStream
import org.apache.commons.compress.archivers.zip.*
import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream
import org.apache.commons.compress.compressors.gzip.GzipParameters
import org.apache.commons.compress.compressors.lz4.FramedLZ4CompressorInputStream
import org.apache.commons.compress.compressors.xz.XZCompressorInputStream
import org.apache.commons.compress.utils.IOUtils
import org.apache.commons.exec.CommandLine
import org.apache.commons.exec.DefaultExecutor
import org.apache.commons.exec.PumpStreamHandler
import org.slf4j.LoggerFactory
import java.io.*
import java.lang.IllegalArgumentException
import java.net.URI
import java.nio.file.FileSystems
import java.nio.file.Files
import java.nio.file.Paths
import java.nio.file.StandardCopyOption
import java.util.zip.GZIPInputStream
import java.util.zip.GZIPOutputStream
import java.util.zip.ZipException
import kotlin.reflect.full.declaredFunctions
import kotlin.reflect.jvm.isAccessible
class ZipHelper {
class ZipEntryRecipe(val data: ByteArray, val name: String, val method: ZipMethod)
companion object {
private val log = LoggerFactory.getLogger("ZipHelper")
fun unZipFile2(fileName: String, outDir: String) {
val zis = ZipArchiveInputStream(BufferedInputStream(FileInputStream(fileName)))
while (true) {
val entry = zis.nextZipEntry ?: break
val entryOut = File(outDir + "/" + entry.name)
when {
entry.isDirectory -> {
log.error("Found dir : " + entry.name)
throw IllegalArgumentException("this should not happen")
}
entry.isUnixSymlink -> {
log.error("Found link: " + entry.name)
throw IllegalArgumentException("this should not happen")
}
else -> {
if (entry.name.contains("/")) {
log.debug("Createing dir: " + entryOut.parentFile.canonicalPath)
entryOut.parentFile.mkdirs()
}
log.info("Unzipping " + entry.name)
IOUtils.copy(zis, FileOutputStream(entryOut))
}
}
}
}
/*
https://github.com/python/cpython/blob/3.8/Lib/zipfile.py
The "local file header" structure, magic number, size, and indices
(section V.A in the format document)
structFileHeader = "<4s2B4HL2L2H"
stringFileHeader = b"PK\003\004"
sizeFileHeader = struct.calcsize(structFileHeader)
*/
fun ZipArchiveEntry.getEntryOffset(): Long {
val zipFileHeaderSize = Struct3("<4s2B4HL2L2H").calcSize()
val funGetLocalHeaderOffset = ZipArchiveEntry::class.declaredFunctions.filter { funcItem ->
funcItem.name == "getLocalHeaderOffset"
}[0]
funGetLocalHeaderOffset.isAccessible = true
val headerOffset = funGetLocalHeaderOffset.call(this) as Long
val offset: Long = headerOffset + zipFileHeaderSize + this.localFileDataExtra.size + this.name.length
log.debug("headerOffset = $headerOffset")
log.debug("calcSize: $zipFileHeaderSize")
return offset
}
fun dumpZipEntry(inFile: String, entryName: String, outFile: String) {
log.info("dumping: $inFile#$entryName -> $outFile")
val zf = ZipFile(inFile)
val entry = zf.getEntry(entryName)
FileOutputStream(outFile).use { outStream ->
zf.getInputStream(entry).copyTo(outStream)
}
zf.close()
}
fun getEntryStream(zipFile: ZipFile, entryName: String): InputStream {
return zipFile.getInputStream(zipFile.getEntry(entryName))
}
fun ZipFile.dumpEntryIfExists(entryName: String, outFile: File) {
val entry = this.getEntry(entryName)
if (entry != null) {
log.info("dumping entry: $entryName -> $outFile")
FileOutputStream(outFile).use { outStream ->
this.getInputStream(entry).copyTo(outStream)
}
} else {
log.info("dumping entry: $entryName : entry not found, skip")
}
}
fun ZipFile.dumpEntry(entryName: String, outFile: File) {
log.info("dumping entry: $entryName -> $outFile")
val entry = this.getEntry(entryName)
FileOutputStream(outFile).use { outStream ->
this.getInputStream(entry).copyTo(outStream)
}
}
fun ZipFile.dumpEntry(entryName: String, outFile: String) {
log.info("dumping entry: $entryName -> $outFile")
val entry = this.getEntry(entryName)
FileOutputStream(outFile).use { outStream ->
this.getInputStream(entry).copyTo(outStream)
}
}
fun ZipArchiveOutputStream.packFile(inFile: File, entryName: String, zipMethod: ZipMethod = ZipMethod.DEFLATED) {
log.info("packing $entryName($zipMethod) from file $inFile (size=${inFile.length()} ...")
val entry = ZipArchiveEntry(inFile, entryName)
entry.method = zipMethod.ordinal
this.putArchiveEntry(entry)
IOUtils.copy(Files.newInputStream(inFile.toPath()), this)
this.closeArchiveEntry()
}
fun ZipArchiveOutputStream.packEntry(inBuf: ByteArray, entryName: String, zipMethod: ZipMethod = ZipMethod.DEFLATED) {
log.info("packing $entryName($zipMethod) from memory data (size=${inBuf.size}...")
val entry = ZipArchiveEntry(entryName)
entry.method = zipMethod.ordinal
this.putArchiveEntry(entry)
IOUtils.copy(ByteArrayInputStream(inBuf), this)
this.closeArchiveEntry()
}
fun ZipArchiveOutputStream.packStream(inStream: InputStream, entryName: String, zipMethod: ZipMethod = ZipMethod.DEFLATED) {
log.info("packing $entryName($zipMethod) from input stream (size=unknown...")
val entry = ZipArchiveEntry(entryName)
entry.method = zipMethod.ordinal
this.putArchiveEntry(entry)
IOUtils.copy(inStream, this)
this.closeArchiveEntry()
}
fun zipDelete(zipFile: File, entryName: String) {
val zipProperties = mutableMapOf("create" to "false")
val zipURI = URI.create("jar:file:" + zipFile.canonicalPath)
FileSystems.newFileSystem(zipURI, zipProperties).use { zipfs ->
val entryPath = zipfs.getPath(entryName)
log.info("deleting " + entryPath.toUri() + " from ZIP File ${zipFile.name}")
Files.delete(entryPath)
}
}
fun zipClone(inFile: String, outFile: String) {
ZipFile(inFile).use { zf ->
val zaos = ZipArchiveOutputStream(FileOutputStream(outFile))
val e = zf.entries
while (e.hasMoreElements()) {
val entry = e.nextElement()
zaos.putArchiveEntry(entry)
IOUtils.copy(zf.getInputStream(entry), zaos)
zaos.closeArchiveEntry()
}
zaos.finish()
zaos.close()
}
}
fun zipEdit(inFile: String, entryRecipe: ZipEntryRecipe) {
val tmpFile = File.createTempFile("edit.", ".zip")
log.info("transforming $inFile --> $tmpFile ...")
ZipFile(inFile).use { zf ->
val zaos = ZipArchiveOutputStream(tmpFile)
val e = zf.entries
if (zf.getEntry(entryRecipe.name) == null) {
log.info("adding new entry [${entryRecipe.name}(${entryRecipe.method})] into [${tmpFile.canonicalPath}]")
val entry = ZipArchiveEntry(entryRecipe.name)
entry.method = entryRecipe.method.ordinal
zaos.putArchiveEntry(entry)
IOUtils.copy(ByteArrayInputStream(entryRecipe.data), zaos)
}
while (e.hasMoreElements()) {
val entry = e.nextElement()
zaos.putArchiveEntry(entry)
if (entry.name == entryRecipe.name) {
log.info("modifying existent entry [${entryRecipe.name}(${entryRecipe.method})] into [${tmpFile.canonicalPath}]")
IOUtils.copy(ByteArrayInputStream(entryRecipe.data), zaos)
} else {
log.debug("cloning entry ${entry.name} ...")
IOUtils.copy(zf.getInputStream(entry), zaos)
}
zaos.closeArchiveEntry()
}
zaos.finish()
zaos.close()
}
log.info("transforming $inFile --> ${tmpFile.name} done")
Files.move(tmpFile.toPath(), File(inFile).toPath(), StandardCopyOption.REPLACE_EXISTING)
log.info("renaming ${tmpFile.canonicalPath} --> $inFile done")
}
fun isGZ(compressedFile: String): Boolean {
return try {
GZIPInputStream(FileInputStream(compressedFile)).use { }
true
} catch (e: ZipException) {
false
}
}
fun isXZ(compressedFile: String): Boolean {
return try {
XZCompressorInputStream(FileInputStream(compressedFile)).use { }
true
} catch (e: ZipException) {
false
}
}
fun isLZ4(compressedFile: String): Boolean {
return try {
"lz4 -t $compressedFile".check_call()
true
} catch (e: Exception) {
false
}
}
fun decompressLZ4Ext(lz4File: String, outFile: String) {
"lz4 -d -fv $lz4File $outFile".check_call()
}
fun compressLZ4(lz4File: String, inputStream: InputStream) {
FileOutputStream(File(lz4File)).use { fos ->
val baosE = ByteArrayOutputStream()
DefaultExecutor().let { exec ->
exec.streamHandler = PumpStreamHandler(fos, baosE, inputStream)
val cmd = CommandLine.parse("lz4 -l -12")
if ("lz4 --version".check_output().contains("r\\d+,".toRegex())) {
log.warn("lz4 version obsolete, needs update")
} else {
cmd.addArgument("--favor-decSpeed")
}
log.info(cmd.toString())
exec.execute(cmd)
}
baosE.toByteArray().let {
if (it.isNotEmpty()) {
log.warn(String(it))
}
}
}
}
fun decompressLZ4(framedLz4: String, outFile: String) {
FramedLZ4CompressorInputStream(
Files.newInputStream(Paths.get(framedLz4))).use { zIn ->
Files.newOutputStream(Paths.get(outFile)).use { out ->
log.info("decompress lz4: $framedLz4 -> $outFile")
val buffer = ByteArray(8192)
var n: Int
while (-1 != zIn.read(buffer).also { n = it }) {
out.write(buffer, 0, n)
}
}
}
}
@Throws(IOException::class)
fun gnuZipFile(compressedFile: String, decompressedFile: String) {
val buffer = ByteArray(1024)
FileOutputStream(compressedFile).use { fos ->
GZIPOutputStream(fos).use { gos ->
FileInputStream(decompressedFile).use { fis ->
var bytesRead: Int
while (true) {
bytesRead = fis.read(buffer)
if (bytesRead <= 0) break
gos.write(buffer, 0, bytesRead)
}
gos.finish()
log.info("gzip done: $decompressedFile -> $compressedFile")
}//file-input-stream
}//gzip-output-stream
}//file-output-stream
}
@Throws(IOException::class)
fun unGnuzipFile(compressedFile: String, decompressedFile: String) {
val buffer = ByteArray(1024)
FileInputStream(compressedFile).use { fileIn ->
//src
GZIPInputStream(fileIn).use { gZIPInputStream ->
//src
FileOutputStream(decompressedFile).use { fileOutputStream ->
var bytesRead: Int
while (true) {
bytesRead = gZIPInputStream.read(buffer)
if (bytesRead <= 0) break
fileOutputStream.write(buffer, 0, bytesRead)
}
log.info("decompress(gz) done: $compressedFile -> $decompressedFile")
}
}
}
}
/*
caution: about gzip header - OS (Operating System)
According to https://docs.oracle.com/javase/8/docs/api/java/util/zip/package-summary.html and
GZIP spec RFC-1952(http://www.ietf.org/rfc/rfc1952.txt), gzip files created from java.util.zip.GZIPOutputStream
will mark the OS field with
0 - FAT filesystem (MS-DOS, OS/2, NT/Win32)
But default image built from Android source code has the OS field:
3 - Unix
This MAY not be a problem, at least we didn't find it till now.
*/
@Throws(IOException::class)
@Deprecated("this function misses features")
fun gnuZipFile(compressedFile: String, fis: InputStream) {
val buffer = ByteArray(1024)
FileOutputStream(compressedFile).use { fos ->
GZIPOutputStream(fos).use { gos ->
var bytesRead: Int
while (true) {
bytesRead = fis.read(buffer)
if (bytesRead <= 0) break
gos.write(buffer, 0, bytesRead)
}
log.info("compress(gz) done: $compressedFile")
}
}
}
fun gnuZipFile2(compressedFile: String, fis: InputStream) {
val buffer = ByteArray(1024)
val p = GzipParameters()
p.operatingSystem = 3
FileOutputStream(compressedFile).use { fos ->
GzipCompressorOutputStream(fos, p).use { gos ->
var bytesRead: Int
while (true) {
bytesRead = fis.read(buffer)
if (bytesRead <= 0) break
gos.write(buffer, 0, bytesRead)
}
log.info("compress(gz) done: $compressedFile")
}
}
}
fun decompressCPIO(cpioFile: String, outDir: String, fileList: String? = null) {
run { //clean up
if (File(outDir).exists()) {
log.info("Cleaning $outDir ...")
File(outDir).deleteRecursively()
}
File(outDir).mkdir()
}
val cis = CpioArchiveInputStream(FileInputStream(cpioFile))
val fileListDump = if (fileList != null) FileOutputStream(fileList) else null
data class CpioEntryInfo(var type: String = "", var mode: String = "",
var uid_gid: String = "", var name: String = "",
var size: Long = 0, var linkTarget: String = "")
while (true) {
val entry = cis.nextCPIOEntry ?: break
val entryInfo = CpioEntryInfo(name = entry.name,
size = entry.size,
mode = String.format("%6s", java.lang.Long.toOctalString(entry.mode)),
uid_gid = "${entry.uid}/${entry.gid}")
if (!cis.canReadEntryData(entry)) {
throw RuntimeException("can not read entry ??")
}
val buffer = ByteArray(entry.size.toInt())
cis.read(buffer)
val outEntryName = File(outDir + "/" + entry.name).path
when {
entry.isRegularFile -> {
entryInfo.type = "REG"
File(outEntryName).writeBytes(buffer)
Files.setPosixFilePermissions(Paths.get(outEntryName),
Helper.modeToPermissions((entry.mode and 0xfff).toInt()))
}
entry.isSymbolicLink -> {
entryInfo.type = "LNK"
entryInfo.linkTarget = String(buffer)
Files.createSymbolicLink(Paths.get(outEntryName), Paths.get(String(buffer)))
}
entry.isDirectory -> {
entryInfo.type = "DIR"
File(outEntryName).mkdir()
Files.setPosixFilePermissions(Paths.get(outEntryName),
Helper.modeToPermissions((entry.mode and 0xfff).toInt()))
}
else -> throw IllegalArgumentException("??? type unknown")
}
File(outEntryName).setLastModified(entry.time)
log.debug(entryInfo.toString() + (", read " + cis.bytesRead))
fileListDump?.write((entryInfo.toString() + ", read " + cis.bytesRead + "\n").toByteArray())
}
fileListDump?.close()
}
}
}

@ -19,6 +19,7 @@ class Reboot {
const val lastRebootReasonKey = "persist.sys.boot.reason" const val lastRebootReasonKey = "persist.sys.boot.reason"
private fun doReboot(cmd: RB_TYPE, reason: String, rebootTarget: String) { private fun doReboot(cmd: RB_TYPE, reason: String, rebootTarget: String) {
log.info("DoReboot: cmd=$cmd, reason=$reason, tgt=$rebootTarget")
val reasons = reason.split(",").toTypedArray() val reasons = reason.split(",").toTypedArray()
val props = Properties() val props = Properties()
props.setProperty(lastRebootReasonKey, reason) props.setProperty(lastRebootReasonKey, reason)

@ -1,7 +1,7 @@
package cfig.kernel_util package cfig.kernel_util
import cfig.EnvironmentVerifier import cfig.EnvironmentVerifier
import cfig.Helper import cfig.helper.Helper
import org.apache.commons.exec.CommandLine import org.apache.commons.exec.CommandLine
import org.apache.commons.exec.DefaultExecutor import org.apache.commons.exec.DefaultExecutor
import org.slf4j.Logger import org.slf4j.Logger

@ -3,7 +3,7 @@ package cfig.packable
import avb.AVBInfo import avb.AVBInfo
import avb.blob.Footer import avb.blob.Footer
import cfig.Avb import cfig.Avb
import cfig.Helper import cfig.helper.Helper
import cfig.bootimg.Common.Companion.probeHeaderVersion import cfig.bootimg.Common.Companion.probeHeaderVersion
import cfig.bootimg.v2.BootV2 import cfig.bootimg.v2.BootV2
import cfig.bootimg.v3.BootV3 import cfig.bootimg.v3.BootV3
@ -95,8 +95,8 @@ class BootImgParser() : IPackable {
private val log = LoggerFactory.getLogger(BootImgParser::class.java) private val log = LoggerFactory.getLogger(BootImgParser::class.java)
fun updateVbmeta(fileName: String) { fun updateVbmeta(fileName: String) {
log.info("Updating vbmeta.img side by side ...")
if (File("vbmeta.img").exists()) { if (File("vbmeta.img").exists()) {
log.info("Updating vbmeta.img side by side ...")
val partitionName = ObjectMapper().readValue(File(Avb.getJsonFileName(fileName)), AVBInfo::class.java).let { val partitionName = ObjectMapper().readValue(File(Avb.getJsonFileName(fileName)), AVBInfo::class.java).let {
it.auxBlob!!.hashDescriptors.get(0).partition_name it.auxBlob!!.hashDescriptors.get(0).partition_name
} }
@ -118,6 +118,8 @@ class BootImgParser() : IPackable {
this.auxBlob!!.hashDescriptors.add(hd) this.auxBlob!!.hashDescriptors.add(hd)
} }
Avb().packVbMetaWithPadding("vbmeta.img", mainVBMeta) Avb().packVbMetaWithPadding("vbmeta.img", mainVBMeta)
} else {
log.info("no companion vbmeta.img")
} }
} }
} }

@ -1,8 +1,8 @@
package cfig.packable package cfig.packable
import cfig.EnvironmentVerifier import cfig.EnvironmentVerifier
import cfig.Helper
import cfig.dtb_util.DTC import cfig.dtb_util.DTC
import cfig.helper.Helper
import org.apache.commons.exec.CommandLine import org.apache.commons.exec.CommandLine
import org.apache.commons.exec.DefaultExecutor import org.apache.commons.exec.DefaultExecutor
import org.slf4j.LoggerFactory import org.slf4j.LoggerFactory

@ -1,8 +1,8 @@
package cfig.packable package cfig.packable
import cfig.Helper import cfig.helper.Helper
import cfig.Helper.Companion.check_call import cfig.helper.Helper.Companion.check_call
import cfig.Helper.Companion.check_output import cfig.helper.Helper.Companion.check_output
import org.slf4j.Logger import org.slf4j.Logger
import org.slf4j.LoggerFactory import org.slf4j.LoggerFactory
import java.io.File import java.io.File

@ -1,7 +1,7 @@
package cfig.packable package cfig.packable
import cfig.Avb import cfig.Avb
import cfig.Helper import cfig.helper.Helper
import java.io.File import java.io.File
@OptIn(ExperimentalUnsignedTypes::class) @OptIn(ExperimentalUnsignedTypes::class)

@ -1,6 +1,6 @@
package cfig.packable package cfig.packable
import cfig.Helper import cfig.helper.Helper
import cfig.bootimg.v3.VendorBoot import cfig.bootimg.v3.VendorBoot
import cfig.packable.BootImgParser.Companion.updateVbmeta import cfig.packable.BootImgParser.Companion.updateVbmeta
import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.databind.ObjectMapper

@ -3,7 +3,7 @@ package cfig.sparse_util
import cfig.EnvironmentVerifier import cfig.EnvironmentVerifier
import cfig.packable.IPackable import cfig.packable.IPackable
import org.slf4j.LoggerFactory import org.slf4j.LoggerFactory
import cfig.Helper.Companion.check_call import cfig.helper.Helper.Companion.check_call
@OptIn(ExperimentalUnsignedTypes::class) @OptIn(ExperimentalUnsignedTypes::class)
class SparseImgParser : IPackable { class SparseImgParser : IPackable {

@ -1,11 +1,14 @@
import avb.alg.Algorithms import avb.alg.Algorithms
import cfig.Helper
import cfig.KeyUtil import cfig.KeyUtil
import cfig.helper.Helper
import com.google.common.math.BigIntegerMath import com.google.common.math.BigIntegerMath
import org.apache.commons.codec.binary.Hex import org.apache.commons.codec.binary.Hex
import org.apache.commons.compress.compressors.lz4.FramedLZ4CompressorInputStream
import org.bouncycastle.jce.provider.BouncyCastleProvider import org.bouncycastle.jce.provider.BouncyCastleProvider
import org.junit.Assert.* import org.junit.Assert.assertEquals
import org.junit.Test import org.junit.Test
import org.slf4j.LoggerFactory
import java.io.*
import java.math.BigInteger import java.math.BigInteger
import java.math.RoundingMode import java.math.RoundingMode
import java.nio.file.Files import java.nio.file.Files
@ -17,14 +20,12 @@ import java.security.Signature
import java.security.spec.PKCS8EncodedKeySpec import java.security.spec.PKCS8EncodedKeySpec
import java.security.spec.X509EncodedKeySpec import java.security.spec.X509EncodedKeySpec
import javax.crypto.Cipher import javax.crypto.Cipher
import java.security.spec.RSAPublicKeySpec
import java.security.PublicKey
import java.security.spec.RSAPrivateKeySpec
import java.security.PrivateKey
@OptIn(ExperimentalUnsignedTypes::class) @OptIn(ExperimentalUnsignedTypes::class)
class HelperTest { class HelperTest {
private val log = LoggerFactory.getLogger(HelperTest::class.java)
@Test @Test
fun rawSignTest() { fun rawSignTest() {
val data = Hex.decodeHex("0001ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff003031300d0609608648016503040201050004206317a4c8d86accc8258c1ac23ef0ebd18bc33010d7afb43b241802646360b4ab") val data = Hex.decodeHex("0001ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff003031300d0609608648016503040201050004206317a4c8d86accc8258c1ac23ef0ebd18bc33010d7afb43b241802646360b4ab")

@ -1,7 +1,6 @@
package avb.alg package avb.alg
import avb.alg.Algorithms import cfig.helper.Helper
import cfig.Helper
import org.junit.Assert import org.junit.Assert
import org.junit.Test import org.junit.Test

@ -1,4 +1,4 @@
import cfig.Helper import cfig.helper.Helper
import cfig.io.Struct3 import cfig.io.Struct3
import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.databind.ObjectMapper
import org.junit.Assert import org.junit.Assert

@ -5,8 +5,8 @@ import org.apache.commons.exec.DefaultExecutor
import org.apache.commons.exec.PumpStreamHandler import org.apache.commons.exec.PumpStreamHandler
val GROUP_ANDROID = "android" val GROUP_ANDROID = "android"
if (parseGradleVersion(gradle.gradleVersion) < 5) { if (parseGradleVersion(gradle.gradleVersion) < 6) {
logger.error("ERROR: Gradle Version MUST >= 5.0, current is {}", gradle.gradleVersion) logger.error("ERROR: Gradle Version MUST >= 6.0, current is {}", gradle.gradleVersion)
throw RuntimeException("ERROR: Gradle Version") throw RuntimeException("ERROR: Gradle Version")
} else { } else {
logger.info("Gradle Version {}", gradle.gradleVersion) logger.info("Gradle Version {}", gradle.gradleVersion)

@ -83,6 +83,7 @@ def verifySingleDir(inResourceDir, inImageDir):
log.warning("calling %s" % pyFile) log.warning("calling %s" % pyFile)
subprocess.check_call(pyFile, shell = True) subprocess.check_call(pyFile, shell = True)
cleanUp() cleanUp()
log.info("Leave %s" % os.path.join(resDir, imgDir))
def decompressXZ(inFile, outFile): def decompressXZ(inFile, outFile):
with lzma.open(inFile) as f: with lzma.open(inFile) as f:
@ -114,6 +115,8 @@ def main():
verifySingleDir(resDir, "Q_preview_blueline_qpp2.190228.023") verifySingleDir(resDir, "Q_preview_blueline_qpp2.190228.023")
# 10 # 10
verifySingleDir(resDir, "10.0.0_coral-qq1d.200205.002") verifySingleDir(resDir, "10.0.0_coral-qq1d.200205.002")
# 11
verifySingleDir(resDir, "11.0.0_redfin.rd1a.200810.021.a1")
log.info(successLogo) log.info(successLogo)

@ -1 +1 @@
Subproject commit 9aa59964ee1d0c828c8655ba7916f162c7721703 Subproject commit de9ef14f57d6c39031710d015a4b3d13132bf5d7
Loading…
Cancel
Save