support add_hash_footer
parent
001d61ba7a
commit
f21e851abd
@ -0,0 +1,71 @@
|
||||
import java.io.*
|
||||
import org.apache.commons.exec.CommandLine
|
||||
import org.apache.commons.exec.DefaultExecutor
|
||||
import org.apache.commons.exec.PumpStreamHandler
|
||||
import com.fasterxml.jackson.databind.ObjectMapper
|
||||
|
||||
fun adbCmd(cmd: String): String {
|
||||
val outputStream = ByteArrayOutputStream()
|
||||
val exec = DefaultExecutor()
|
||||
exec.streamHandler = PumpStreamHandler(outputStream)
|
||||
val cmdline = "adb shell $cmd"
|
||||
//println(cmdline)
|
||||
exec.execute(CommandLine.parse(cmdline))
|
||||
//println(outputStream)
|
||||
return outputStream.toString().trim()
|
||||
}
|
||||
|
||||
val cpufreqDir = "/sys/devices/system/cpu/cpufreq/policy0"
|
||||
val interactGov = "/sys/devices/system/cpu/cpufreq/interactive"
|
||||
|
||||
val scaling_governor = adbCmd("cat $cpufreqDir/scaling_governor")
|
||||
val avail_governer = adbCmd("cat $cpufreqDir/scaling_available_governors")
|
||||
val avail_freq = adbCmd("cat $cpufreqDir/scaling_available_frequencies")
|
||||
println("Available governers: " + avail_governer)
|
||||
println("Available frequency: " + avail_freq)
|
||||
|
||||
val scaleMax = adbCmd("cat $cpufreqDir/scaling_max_freq")
|
||||
val scaleMin = adbCmd("cat $cpufreqDir/scaling_min_freq")
|
||||
println("scaling_X_freq: [$scaleMin, $scaleMax]")
|
||||
println("Current governer: $scaling_governor")
|
||||
|
||||
fun getInteractValue(k: String): String {
|
||||
return adbCmd("cat $interactGov/$k")
|
||||
}
|
||||
fun getInteractInt(k: String): Int {
|
||||
return Integer.decode(adbCmd("cat $interactGov/$k"))
|
||||
}
|
||||
|
||||
data class Boost(
|
||||
var boost: Int,
|
||||
var boostpulse_duration_ms: Int)
|
||||
val boostInfo = Boost(getInteractInt("boost"), getInteractInt("boostpulse_duration") / 1000)
|
||||
|
||||
data class HiSpeed(
|
||||
var load: Int,
|
||||
var above_delay_Ms: Int,
|
||||
var freq_GHz: Double)
|
||||
val hiSpeedInfo = HiSpeed(
|
||||
getInteractInt("go_hispeed_load"),
|
||||
getInteractInt("above_hispeed_delay") / 1000,
|
||||
getInteractInt("hispeed_freq") / 1000000.0)
|
||||
|
||||
data class InteractiveGov(
|
||||
var target_loads: Int,
|
||||
var boost: Boost,
|
||||
var hiSpeed: HiSpeed,
|
||||
var minSampleTimeMs: Int,
|
||||
var timerRateMs: Int,
|
||||
var timerSlackMs: Int,
|
||||
var io_is_busy: Int)
|
||||
|
||||
val info = InteractiveGov(
|
||||
getInteractInt("target_loads"),
|
||||
boostInfo,
|
||||
hiSpeedInfo,
|
||||
getInteractInt("min_sample_time") / 1000,
|
||||
getInteractInt("timer_rate") / 1000,
|
||||
getInteractInt("timer_slack") / 1000,
|
||||
getInteractInt("io_is_busy"))
|
||||
|
||||
println(ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(info))
|
@ -0,0 +1,21 @@
|
||||
|
||||
item size in bytes position
|
||||
+--------------------------------+-------------------------+
|
||||
| Hash Descriptor | total 132 |
|
||||
| | |
|
||||
| - tag | 8 | --> +0
|
||||
| - num_bytes_following | 8 | --> +8
|
||||
| - hash algorithm | 8 | --> +16
|
||||
| - partition name | 32 |
|
||||
| - salt length | 4 |
|
||||
| - digest length | 4 |
|
||||
| - reserved | 60 |
|
||||
+--------------------------------+-------------------------+
|
||||
| Partition name | |
|
||||
+--------------------------------+-------------------------+
|
||||
| salt | |
|
||||
+--------------------------------+-------------------------+
|
||||
| digest | |
|
||||
+--------------------------------+-------------------------+
|
||||
| Padding | align by 8 |
|
||||
+--------------------------------+-------------------------+ --> +16 + num_bytes_following
|
@ -1,10 +0,0 @@
|
||||
diff --git a/avb/avbtool b/avb/avbtool
|
||||
index 2830e20..647d344 100755
|
||||
--- a/avb/avbtool
|
||||
+++ b/avb/avbtool
|
||||
@@ -1,4 +1,4 @@
|
||||
-#!/usr/bin/env python
|
||||
+#!/usr/bin/env python2.7
|
||||
|
||||
# Copyright 2016, The Android Open Source Project
|
||||
#
|
@ -0,0 +1,359 @@
|
||||
package cfig.io;
|
||||
|
||||
import cfig.Helper;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class Struct {
|
||||
private static Logger log = LoggerFactory.getLogger(Struct.class);
|
||||
|
||||
public ByteOrder byteOrder = ByteOrder.LITTLE_ENDIAN;
|
||||
public List<Object[]> formats = new ArrayList<>();
|
||||
|
||||
public Struct(String formatString) {
|
||||
Matcher m = Pattern.compile("(\\d*)([a-zA-Z])").matcher(formatString);
|
||||
|
||||
if (formatString.startsWith(">") || formatString.startsWith("!")) {
|
||||
this.byteOrder = ByteOrder.BIG_ENDIAN;
|
||||
log.debug("Parsing BIG_ENDIAN format: " + formatString);
|
||||
} else {
|
||||
log.debug("Parsing LITTLE_ENDIAN format: " + formatString);
|
||||
}
|
||||
|
||||
while (m.find()) {
|
||||
boolean bExpand = true;
|
||||
int mul = 1;
|
||||
if (!m.group(1).isEmpty()) {
|
||||
mul = Integer.decode(m.group(1));
|
||||
}
|
||||
Object item[] = new Object[2];
|
||||
switch (m.group(2)) {
|
||||
case "x": {//byte 1
|
||||
item[0] = PadByte.class;
|
||||
bExpand = false;
|
||||
break;
|
||||
}
|
||||
case "b": {//byte 1
|
||||
item[0] = Byte.class;
|
||||
bExpand = false;
|
||||
break;
|
||||
}
|
||||
case "s": {//python: char 1
|
||||
item[0] = Character.class;
|
||||
bExpand = false;
|
||||
break;
|
||||
}
|
||||
case "h": {//2
|
||||
item[0] = Short.class;
|
||||
break;
|
||||
}
|
||||
case "H": {//2
|
||||
item[0] = UnsignedShort.class;
|
||||
break;
|
||||
}
|
||||
case "i":
|
||||
case "l": {//4
|
||||
item[0] = Integer.class;
|
||||
break;
|
||||
}
|
||||
case "I":
|
||||
case "L": {//4
|
||||
item[0] = UnsignedInt.class;
|
||||
break;
|
||||
}
|
||||
case "q": {//8
|
||||
item[0] = Long.class;
|
||||
break;
|
||||
}
|
||||
case "Q": {//8
|
||||
item[0] = UnsignedLong.class;
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
throw new IllegalArgumentException("type [" + m.group(2) + "] not supported");
|
||||
}
|
||||
}
|
||||
if (bExpand) {
|
||||
item[1] = 1;
|
||||
for (int i = 0; i < mul; i++) {
|
||||
formats.add(item);
|
||||
}
|
||||
} else {
|
||||
item[1] = mul;
|
||||
formats.add(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Integer calcsize() {
|
||||
Integer ret = 0;
|
||||
for (Object[] format : formats) {
|
||||
if (format[0] == Byte.class || format[0] == Character.class || format[0] == PadByte.class) {
|
||||
ret += 1 * (int) format[1];
|
||||
continue;
|
||||
}
|
||||
if (format[0] == Short.class) {
|
||||
ret += 2 * (int) format[1];
|
||||
continue;
|
||||
}
|
||||
if (format[0] == UnsignedShort.class) {
|
||||
ret += 2 * (int) format[1];
|
||||
continue;
|
||||
}
|
||||
if (format[0] == Integer.class) {
|
||||
ret += 4 * (int) format[1];
|
||||
continue;
|
||||
}
|
||||
if (format[0] == UnsignedInt.class) {
|
||||
ret += 4 * (int) format[1];
|
||||
continue;
|
||||
}
|
||||
if (format[0] == Long.class || format[0] == UnsignedLong.class) {
|
||||
ret += 8 * (int) format[1];
|
||||
continue;
|
||||
}
|
||||
throw new IllegalArgumentException("Class [" + format[0] + "] not supported");
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
public void dump() {
|
||||
log.info("--- Format ---");
|
||||
log.info("Endian: " + this.byteOrder);
|
||||
for (Object[] formatItem : formats) {
|
||||
log.info(formatItem[0] + ":" + formatItem[1]);
|
||||
}
|
||||
log.info("--- Format ---");
|
||||
}
|
||||
|
||||
public List unpack(InputStream iS) throws IOException {
|
||||
List<Object> ret = new ArrayList<>();
|
||||
ByteBuffer bf = ByteBuffer.allocate(32);
|
||||
bf.order(this.byteOrder);
|
||||
for (Object[] format : this.formats) {
|
||||
//return 'null' for padding bytes
|
||||
if (format[0] == PadByte.class) {
|
||||
iS.skip((Integer) format[1]);
|
||||
ret.add(null);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (format[0] == Byte.class || format[0] == Character.class || format[0] == PadByte.class) {
|
||||
byte[] data = new byte[(Integer) format[1]];
|
||||
assertEquals((int) format[1], iS.read(data));
|
||||
ret.add(data);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (format[0] == Short.class) {
|
||||
byte[] data = new byte[2];
|
||||
assertEquals(2, iS.read(data));
|
||||
bf.clear();
|
||||
bf.put(data);
|
||||
bf.flip();
|
||||
ret.add(bf.getShort());
|
||||
continue;
|
||||
}
|
||||
|
||||
if (format[0] == UnsignedShort.class) {
|
||||
byte[] data = new byte[2];
|
||||
assertEquals(2, iS.read(data));
|
||||
log.debug("UnsignedShort: " + Helper.Companion.toHexString(data));
|
||||
bf.clear();
|
||||
if (this.byteOrder == ByteOrder.LITTLE_ENDIAN) {
|
||||
bf.put(data);
|
||||
bf.put(new byte[2]); //complete high bits with 0
|
||||
} else {
|
||||
bf.put(new byte[2]); //complete high bits with 0
|
||||
bf.put(data);
|
||||
}
|
||||
bf.flip();
|
||||
ret.add(bf.getInt());
|
||||
continue;
|
||||
}
|
||||
|
||||
if (format[0] == Integer.class) {
|
||||
byte[] data = new byte[4];
|
||||
assertEquals(4, iS.read(data));
|
||||
log.debug("Integer: " + Helper.Companion.toHexString(data));
|
||||
bf.clear();
|
||||
bf.put(data);
|
||||
bf.flip();
|
||||
ret.add(bf.getInt());
|
||||
continue;
|
||||
}
|
||||
|
||||
if (format[0] == UnsignedInt.class) {
|
||||
byte[] data = new byte[4];
|
||||
assertEquals(4, iS.read(data));
|
||||
bf.clear();
|
||||
log.debug("UnsignedInt: " + Helper.Companion.toHexString(data));
|
||||
if (this.byteOrder == ByteOrder.LITTLE_ENDIAN) {
|
||||
bf.put(data);
|
||||
bf.put(new byte[4]); //complete high bits with 0
|
||||
} else {
|
||||
bf.put(new byte[4]); //complete high bits with 0
|
||||
bf.put(data);
|
||||
}
|
||||
bf.flip();
|
||||
ret.add(bf.getLong());
|
||||
continue;
|
||||
}
|
||||
|
||||
//TODO: maybe exceeds limits of Long.class ?
|
||||
if (format[0] == Long.class || format[0] == UnsignedLong.class) {
|
||||
byte[] data = new byte[8];
|
||||
assertEquals(8, iS.read(data));
|
||||
bf.clear();
|
||||
bf.put(data);
|
||||
bf.flip();
|
||||
ret.add(bf.getLong());
|
||||
continue;
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException("Class [" + format[0] + "] not supported");
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
public byte[] pack(Object... args) {
|
||||
if (args.length != this.formats.size()) {
|
||||
throw new IllegalArgumentException("argument size " + args.length +
|
||||
" doesn't match format size " + this.formats.size());
|
||||
}
|
||||
ByteBuffer bf = ByteBuffer.allocate(this.calcsize());
|
||||
bf.order(this.byteOrder);
|
||||
for (int i = 0; i < args.length; i++) {
|
||||
Object arg = args[i];
|
||||
Class<?> format = (Class<?>) formats.get(i)[0];
|
||||
Integer size = (int) formats.get(i)[1];
|
||||
log.debug("Index[" + i + "], fmt = " + format + ", arg = " + arg + ", multi = " + size);
|
||||
|
||||
//padding
|
||||
if (format == PadByte.class) {
|
||||
byte b[] = new byte[size];
|
||||
if (arg == null) {
|
||||
Arrays.fill(b, (byte) 0);
|
||||
} else if (arg instanceof Byte) {
|
||||
Arrays.fill(b, (byte) arg);
|
||||
} else if (arg instanceof Integer) {
|
||||
Arrays.fill(b, ((Integer) arg).byteValue());
|
||||
} else {
|
||||
throw new IllegalArgumentException("Index[" + i + "] Unsupported arg [" + arg + "] with type [" + format + "]");
|
||||
}
|
||||
bf.put(b);
|
||||
continue;
|
||||
}
|
||||
|
||||
//signed byte
|
||||
if (arg instanceof byte[]) {
|
||||
bf.put((byte[]) arg);
|
||||
int paddingSize = size - ((byte[]) arg).length;
|
||||
if (0 < paddingSize) {
|
||||
byte padBytes[] = new byte[size - ((byte[]) arg).length];
|
||||
Arrays.fill(padBytes, (byte) 0);
|
||||
bf.put(padBytes);
|
||||
} else if (0 > paddingSize) {
|
||||
log.error("container size " + size + ", value size " + ((byte[]) arg).length);
|
||||
throw new IllegalArgumentException("Index[" + i + "] arg [" + arg + "] with type [" + format + "] size overflow");
|
||||
} else {
|
||||
//perfect match
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
//unsigned byte
|
||||
if (arg instanceof int[] && format == Byte.class) {
|
||||
for (int v : (int[]) arg) {
|
||||
if (v > 255 || v < 0) {
|
||||
throw new IllegalArgumentException("Index[" + i + "] Unsupported [int array] arg [" + arg + "] with type [" + format + "]");
|
||||
}
|
||||
bf.put((byte) v);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg instanceof Short) {
|
||||
bf.putShort((short) arg);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg instanceof Integer) {
|
||||
if (format == Integer.class) {
|
||||
bf.putInt((int) arg);
|
||||
} else if (format == UnsignedShort.class) {
|
||||
ByteBuffer bf2 = ByteBuffer.allocate(4);
|
||||
bf2.order(this.byteOrder);
|
||||
bf2.putInt((int) arg);
|
||||
bf2.flip();
|
||||
if (this.byteOrder == ByteOrder.LITTLE_ENDIAN) {//LE
|
||||
bf.putShort(bf2.getShort());
|
||||
bf2.getShort();//discard
|
||||
} else {//BE
|
||||
bf2.getShort();//discard
|
||||
bf.putShort(bf2.getShort());
|
||||
}
|
||||
} else if (format == UnsignedInt.class) {
|
||||
if ((Integer) arg < 0) {
|
||||
throw new IllegalArgumentException("Index[" + i + "] Unsupported [Integer] arg [" + arg + "] with type [" + format + "]");
|
||||
}
|
||||
bf.putInt((int) arg);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Index[" + i + "] Unsupported [Integer] arg [" + arg + "] with type [" + format + "]");
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg instanceof Long) {
|
||||
//XXX: maybe run into issue if we meet REAL Unsigned Long
|
||||
if (format == Long.class || format == UnsignedLong.class) {
|
||||
bf.putLong((long) arg);
|
||||
} else if (format == UnsignedInt.class) {
|
||||
if ((Long) arg < 0L || (Long) arg > (Integer.MAX_VALUE * 2L + 1)) {
|
||||
throw new IllegalArgumentException("Index[" + i + "] Unsupported [Long] arg [" + arg + "] with type [" + format + "]");
|
||||
}
|
||||
ByteBuffer bf2 = ByteBuffer.allocate(8);
|
||||
bf2.order(this.byteOrder);
|
||||
bf2.putLong((long) arg);
|
||||
bf2.flip();
|
||||
if (this.byteOrder == ByteOrder.LITTLE_ENDIAN) {//LE
|
||||
bf.putInt(bf2.getInt());
|
||||
bf2.getInt();//discard
|
||||
} else {//BE
|
||||
bf2.getInt();//discard
|
||||
bf.putInt(bf2.getInt());
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("Index[" + i + "] Unsupported arg [" + arg + "] with type [" + format + "]");
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
log.debug("Pack Result:" + Helper.Companion.toHexString(bf.array()));
|
||||
return bf.array();
|
||||
}
|
||||
|
||||
public static class UnsignedInt {
|
||||
}
|
||||
|
||||
public static class UnsignedLong {
|
||||
}
|
||||
|
||||
public static class UnsignedShort {
|
||||
}
|
||||
|
||||
public static class PadByte {
|
||||
}
|
||||
}
|
@ -0,0 +1,51 @@
|
||||
package cfig
|
||||
|
||||
import avb.*
|
||||
import avb.desc.*
|
||||
import org.bouncycastle.util.encoders.Hex
|
||||
|
||||
/*
|
||||
a wonderfaul base64 encoder/decoder: https://cryptii.com/base64-to-hex
|
||||
*/
|
||||
class AVBInfo(var header: Header? = null,
|
||||
var authBlob: AuthBlob? = null,
|
||||
var auxBlob: AuxBlob? = null,
|
||||
var footer: Footer? = null) {
|
||||
data class AuthBlob(
|
||||
var offset: Long = 0L,
|
||||
var size: Long = 0L,
|
||||
var hash: String? = null,
|
||||
var signature: String? = null)
|
||||
|
||||
data class AuxBlob(
|
||||
var pubkey: PubKeyInfo? = null,
|
||||
var pubkeyMeta: PubKeyMetadataInfo? = null,
|
||||
var hashTreeDescriptor: MutableList<HashTreeDescriptor> = mutableListOf(),
|
||||
var hashDescriptors: MutableList<HashDescriptor> = mutableListOf(),
|
||||
var kernelCmdlineDescriptor: MutableList<KernelCmdlineDescriptor> = mutableListOf(),
|
||||
var unknownDescriptors: MutableList<UnknownDescriptor> = mutableListOf()
|
||||
) {
|
||||
data class PubKeyInfo(
|
||||
var offset: Long = 0L,
|
||||
var size: Long = 0L,
|
||||
var pubkey: ByteArray = byteArrayOf()
|
||||
)
|
||||
|
||||
data class PubKeyMetadataInfo(
|
||||
var offset: Long = 0L,
|
||||
var size: Long = 0L
|
||||
)
|
||||
|
||||
fun encodeDescriptors(): ByteArray {
|
||||
var descList: MutableList<Descriptor> = mutableListOf()
|
||||
this.hashTreeDescriptor.forEach { descList.add(it) }
|
||||
this.hashDescriptors.forEach { descList.add(it) }
|
||||
this.kernelCmdlineDescriptor.forEach { descList.add(it) }
|
||||
this.unknownDescriptors.forEach { descList.add(it) }
|
||||
descList.sortBy { it.sequence }
|
||||
var ret = byteArrayOf()
|
||||
descList.forEach { ret = Helper.join(ret, it.encode()) }
|
||||
return ret
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,405 @@
|
||||
package cfig
|
||||
|
||||
import avb.*
|
||||
import avb.alg.Algorithms
|
||||
import avb.desc.*
|
||||
import cfig.io.Struct
|
||||
import com.fasterxml.jackson.databind.ObjectMapper
|
||||
import org.bouncycastle.util.encoders.Hex
|
||||
import org.slf4j.LoggerFactory
|
||||
import java.io.*
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Paths
|
||||
import java.nio.file.StandardOpenOption
|
||||
import java.security.MessageDigest
|
||||
|
||||
class Avb {
|
||||
val MAX_VBMETA_SIZE = 64 * 1024
|
||||
val MAX_FOOTER_SIZE = 4096
|
||||
val BLOCK_SIZE = 4096
|
||||
|
||||
private var required_libavb_version_minor = 0
|
||||
|
||||
fun add_hash_footer(image_file: String,
|
||||
partition_size: Long,
|
||||
use_persistent_digest: Boolean,
|
||||
do_not_use_ab: Boolean,
|
||||
salt: String,
|
||||
hash_algorithm: String,
|
||||
partition_name: String,
|
||||
rollback_index: Long,
|
||||
common_algorithm: String,
|
||||
common_key_path: String) {
|
||||
var original_image_size = 0L
|
||||
//required libavb version
|
||||
if (use_persistent_digest || do_not_use_ab) {
|
||||
required_libavb_version_minor = 1
|
||||
}
|
||||
log.info("Required_libavb_version: 1.$required_libavb_version_minor")
|
||||
|
||||
// SIZE + metadata (footer + vbmeta struct)
|
||||
val max_metadata_size = MAX_VBMETA_SIZE + MAX_FOOTER_SIZE
|
||||
if (partition_size < max_metadata_size) {
|
||||
throw IllegalArgumentException("Parition SIZE of $partition_size is too small. " +
|
||||
"Needs to be at least $max_metadata_size")
|
||||
}
|
||||
val max_image_size = partition_size - max_metadata_size
|
||||
log.info("max_image_size: $max_image_size")
|
||||
|
||||
if (partition_size % 4096L != 0L) {
|
||||
throw IllegalArgumentException("Partition SIZE of $partition_size is not a multiple of the image block SIZE 4096")
|
||||
}
|
||||
|
||||
val fis = FileInputStream(image_file)
|
||||
fis.skip(File(image_file).length() - 64)
|
||||
try {
|
||||
val footer = Footer(fis)
|
||||
original_image_size = footer.originalImageSize
|
||||
FileOutputStream(File(image_file), true).channel.use {
|
||||
log.info("truncate $image_file to its original SIZE ${footer.originalImageSize}")
|
||||
it.truncate(footer.originalImageSize)
|
||||
}
|
||||
} catch (e: IllegalArgumentException) {
|
||||
log.info("original image doesn't have footer")
|
||||
original_image_size = File(image_file).length()
|
||||
}
|
||||
|
||||
val saltByteArray = Helper.fromHexString(salt)
|
||||
val digest = MessageDigest.getInstance(Helper.pyAlg2java(hash_algorithm)).apply {
|
||||
update(saltByteArray)
|
||||
update(File(image_file).readBytes())
|
||||
}.digest()
|
||||
log.info("Digest: " + Helper.toHexString(digest))
|
||||
|
||||
val hd = HashDescriptor()
|
||||
hd.image_size = File(image_file).length()
|
||||
hd.hash_algorithm = hash_algorithm.toByteArray()
|
||||
hd.partition_name = partition_name
|
||||
hd.salt = saltByteArray
|
||||
hd.flags = 0
|
||||
if (do_not_use_ab) hd.flags = hd.flags or 1
|
||||
if (!use_persistent_digest) hd.digest = digest
|
||||
log.info("encoded hash descriptor:" + String(Hex.encode(hd.encode())))
|
||||
val vbmeta_blob = generateVbMetaBlob(common_algorithm,
|
||||
common_key_path,
|
||||
null,
|
||||
arrayOf(hd as Descriptor),
|
||||
null,
|
||||
rollback_index,
|
||||
0,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
false,
|
||||
null,
|
||||
false,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
false,
|
||||
0)
|
||||
log.debug("vbmeta_blob: " + Helper.toHexString(vbmeta_blob))
|
||||
|
||||
if (hd.image_size % BLOCK_SIZE != 0L) {
|
||||
val padding_needed = BLOCK_SIZE - (hd.image_size % BLOCK_SIZE)
|
||||
FileOutputStream(image_file, true).use { fos ->
|
||||
fos.write(ByteArray(padding_needed.toInt()))
|
||||
}
|
||||
log.info("$image_file padded: ${hd.image_size} -> ${File(image_file).length()}")
|
||||
} else {
|
||||
log.info("$image_file doesn't need padding")
|
||||
}
|
||||
val vbmeta_offset = hd.image_size
|
||||
val padding_needed = Helper.round_to_multiple(vbmeta_blob.size.toLong(), BLOCK_SIZE) - vbmeta_blob.size
|
||||
val vbmeta_blob_with_padding = Helper.join(vbmeta_blob, Struct("${padding_needed}x").pack(null))
|
||||
FileOutputStream(image_file, true).use { fos ->
|
||||
fos.write(vbmeta_blob_with_padding)
|
||||
}
|
||||
val vbmeta_end_offset = vbmeta_offset + vbmeta_blob_with_padding.size
|
||||
FileOutputStream(image_file, true).use { fos ->
|
||||
fos.write(Struct("${partition_size - vbmeta_end_offset - 1 * BLOCK_SIZE}x").pack(null))
|
||||
}
|
||||
|
||||
val footer = Footer()
|
||||
footer.originalImageSize = original_image_size
|
||||
footer.vbMetaOffset = vbmeta_offset
|
||||
footer.vbMetaSize = vbmeta_blob.size.toLong()
|
||||
val footer_blob = footer.encode()
|
||||
val footer_blob_with_padding = Helper.join(
|
||||
Struct("${BLOCK_SIZE - Footer.SIZE}x").pack(null), footer_blob)
|
||||
log.info("footer:" + Helper.toHexString(footer_blob))
|
||||
log.info(footer.toString())
|
||||
FileOutputStream(image_file, true).use { fos ->
|
||||
fos.write(footer_blob_with_padding)
|
||||
}
|
||||
}
|
||||
|
||||
fun generateVbMetaBlob(algorithm_name: String,
|
||||
key_path: String?,
|
||||
public_key_metadata_path: String?,
|
||||
descriptors: Array<Descriptor>,
|
||||
chain_partitions: String?,
|
||||
inRollbackIndex: Long,
|
||||
inFlags: Long,
|
||||
props: String?,
|
||||
props_from_file: String?,
|
||||
kernel_cmdlines: String?,
|
||||
setup_rootfs_from_kernel: Boolean,
|
||||
ht_desc_to_setup: String?,
|
||||
include_descriptors_from_image: Boolean,
|
||||
signing_helper: String?,
|
||||
signing_helper_with_files: String?,
|
||||
release_string: String?,
|
||||
append_to_release_string: Boolean,
|
||||
required_libavb_version_minor: Int): ByteArray {
|
||||
//encoded descriptors
|
||||
var encodedDesc: ByteArray = byteArrayOf()
|
||||
descriptors.forEach { encodedDesc = Helper.join(encodedDesc, it.encode()) }
|
||||
//algorithm
|
||||
val alg = Algorithms.get(algorithm_name)!!
|
||||
//encoded pubkey
|
||||
val encodedKey = Blob.encodePubKey(alg, Files.readAllBytes(Paths.get(key_path)))
|
||||
|
||||
//3 - whole aux blob
|
||||
val auxBlob = Blob.getAuxDataBlob(encodedDesc, encodedKey)
|
||||
|
||||
//1 - whole header blob
|
||||
val headerBlob = Header().apply {
|
||||
bump_required_libavb_version_minor(required_libavb_version_minor)
|
||||
auxiliary_data_block_size = auxBlob.size.toLong()
|
||||
|
||||
authentication_data_block_size = Helper.round_to_multiple(
|
||||
(alg.hash_num_bytes + alg.signature_num_bytes).toLong(), 64)
|
||||
|
||||
algorithm_type = alg.algorithm_type.toLong()
|
||||
|
||||
hash_offset = 0
|
||||
hash_size = alg.hash_num_bytes.toLong()
|
||||
|
||||
signature_offset = alg.hash_num_bytes.toLong()
|
||||
signature_size = alg.signature_num_bytes.toLong()
|
||||
|
||||
public_key_offset = descriptors_size
|
||||
public_key_size = encodedKey.size.toLong()
|
||||
|
||||
//TODO: support pubkey metadata
|
||||
public_key_metadata_size = 0
|
||||
public_key_metadata_offset = public_key_offset + public_key_size
|
||||
|
||||
descriptors_offset = 0
|
||||
descriptors_size = encodedDesc.size.toLong()
|
||||
|
||||
rollback_index = inRollbackIndex
|
||||
flags = inFlags
|
||||
}.encode()
|
||||
|
||||
//2 - auth blob
|
||||
var authBlob = Blob.getAuthBlob(headerBlob, auxBlob, algorithm_name, key_path)
|
||||
|
||||
return Helper.join(headerBlob, authBlob, auxBlob)
|
||||
}
|
||||
|
||||
fun parseVbMeta(image_file: String): AVBInfo {
|
||||
log.info("parsing $image_file ...")
|
||||
val jsonFile = getJsonFileName(image_file)
|
||||
var footer: Footer? = null
|
||||
var vbMetaOffset = 0L
|
||||
FileInputStream(image_file).use { fis ->
|
||||
fis.skip(File(image_file).length() - Footer.SIZE)
|
||||
try {
|
||||
footer = Footer(fis)
|
||||
vbMetaOffset = footer!!.vbMetaOffset
|
||||
log.info("$image_file: $footer")
|
||||
} catch (e: IllegalArgumentException) {
|
||||
log.info("image $image_file has no AVB Footer")
|
||||
}
|
||||
}
|
||||
|
||||
var vbMetaHeader = Header()
|
||||
FileInputStream(image_file).use { fis ->
|
||||
fis.skip(vbMetaOffset)
|
||||
vbMetaHeader = Header(fis)
|
||||
}
|
||||
log.info(vbMetaHeader.toString())
|
||||
log.debug(ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(vbMetaHeader))
|
||||
|
||||
val authBlockOffset = vbMetaOffset + Header.SIZE
|
||||
val auxBlockOffset = authBlockOffset + vbMetaHeader.authentication_data_block_size
|
||||
val descStartOffset = auxBlockOffset + vbMetaHeader.descriptors_offset
|
||||
|
||||
val ai = AVBInfo()
|
||||
ai.footer = footer
|
||||
ai.auxBlob = AVBInfo.AuxBlob()
|
||||
ai.header = vbMetaHeader
|
||||
if (vbMetaHeader.public_key_size > 0L) {
|
||||
ai.auxBlob!!.pubkey = AVBInfo.AuxBlob.PubKeyInfo()
|
||||
ai.auxBlob!!.pubkey!!.offset = vbMetaHeader.public_key_offset
|
||||
ai.auxBlob!!.pubkey!!.size = vbMetaHeader.public_key_size
|
||||
}
|
||||
if (vbMetaHeader.public_key_metadata_size > 0L) {
|
||||
ai.auxBlob!!.pubkeyMeta = AVBInfo.AuxBlob.PubKeyMetadataInfo()
|
||||
ai.auxBlob!!.pubkeyMeta!!.offset = vbMetaHeader.public_key_metadata_offset
|
||||
ai.auxBlob!!.pubkeyMeta!!.size = vbMetaHeader.public_key_metadata_size
|
||||
}
|
||||
|
||||
var descriptors: List<Any> = mutableListOf()
|
||||
if (vbMetaHeader.descriptors_size > 0) {
|
||||
FileInputStream(image_file).use { fis ->
|
||||
fis.skip(descStartOffset)
|
||||
descriptors = UnknownDescriptor.parseDescriptors2(fis, vbMetaHeader.descriptors_size)
|
||||
}
|
||||
|
||||
descriptors.forEach {
|
||||
log.debug(it.toString())
|
||||
}
|
||||
}
|
||||
|
||||
if (vbMetaHeader.public_key_size > 0) {
|
||||
FileInputStream(image_file).use { fis ->
|
||||
fis.skip(auxBlockOffset)
|
||||
fis.skip(vbMetaHeader.public_key_offset)
|
||||
ai.auxBlob!!.pubkey!!.pubkey = ByteArray(vbMetaHeader.public_key_size.toInt())
|
||||
fis.read(ai.auxBlob!!.pubkey!!.pubkey)
|
||||
log.debug("Parsed Pub Key: " + String(Hex.encode(ai.auxBlob!!.pubkey!!.pubkey)))
|
||||
}
|
||||
}
|
||||
|
||||
if (vbMetaHeader.public_key_metadata_size > 0) {
|
||||
FileInputStream(image_file).use { fis ->
|
||||
fis.skip(vbMetaOffset)
|
||||
fis.skip(Header.SIZE.toLong())
|
||||
fis.skip(vbMetaHeader.public_key_metadata_offset)
|
||||
val ba = ByteArray(vbMetaHeader.public_key_metadata_size.toInt())
|
||||
fis.read(ba)
|
||||
log.debug("Parsed Pub Key Metadata: " + String(Hex.encode(ba)))
|
||||
}
|
||||
}
|
||||
|
||||
if (vbMetaHeader.authentication_data_block_size > 0) {
|
||||
FileInputStream(image_file).use { fis ->
|
||||
fis.skip(vbMetaOffset)
|
||||
fis.skip(Header.SIZE.toLong())
|
||||
fis.skip(vbMetaHeader.hash_offset)
|
||||
val ba = ByteArray(vbMetaHeader.hash_size.toInt())
|
||||
fis.read(ba)
|
||||
log.debug("Parsed Auth Hash (Header & Aux Blob): " + Hex.encode(ba))
|
||||
val bb = ByteArray(vbMetaHeader.signature_size.toInt())
|
||||
fis.read(bb)
|
||||
log.debug("Parsed Auth Signature (of hash): " + String(Hex.encode(bb)))
|
||||
|
||||
ai.authBlob = AVBInfo.AuthBlob()
|
||||
ai.authBlob!!.offset = authBlockOffset
|
||||
ai.authBlob!!.size = vbMetaHeader.authentication_data_block_size
|
||||
ai.authBlob!!.hash = String(Hex.encode(ba))
|
||||
ai.authBlob!!.signature = String(Hex.encode(bb))
|
||||
}
|
||||
}
|
||||
|
||||
descriptors.forEach {
|
||||
when (it) {
|
||||
is HashDescriptor -> {
|
||||
ai.auxBlob!!.hashDescriptors.add(it)
|
||||
}
|
||||
is KernelCmdlineDescriptor -> {
|
||||
ai.auxBlob!!.kernelCmdlineDescriptor.add(it)
|
||||
}
|
||||
is HashTreeDescriptor -> {
|
||||
ai.auxBlob!!.hashTreeDescriptor.add(it)
|
||||
}
|
||||
is UnknownDescriptor -> {
|
||||
ai.auxBlob!!.unknownDescriptors.add(it)
|
||||
}
|
||||
else -> {
|
||||
throw IllegalArgumentException("invalid descriptor: $it")
|
||||
}
|
||||
}
|
||||
}
|
||||
val aiStr = ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(ai)
|
||||
log.debug(aiStr)
|
||||
ObjectMapper().writerWithDefaultPrettyPrinter().writeValue(File(jsonFile), ai)
|
||||
log.info("vbmeta info written to $jsonFile")
|
||||
|
||||
return ai
|
||||
}
|
||||
|
||||
fun packVbMeta(key_path: String, info: AVBInfo? = null): ByteArray {
|
||||
val ai = info ?: ObjectMapper().readValue(File(getJsonFileName("vbmeta.img")), AVBInfo::class.java)
|
||||
val alg = Algorithms.get(ai.header!!.algorithm_type.toInt())!!
|
||||
val encodedDesc = ai.auxBlob!!.encodeDescriptors()
|
||||
//encoded pubkey
|
||||
val encodedKey = Blob.encodePubKey(alg, Files.readAllBytes(Paths.get(key_path)))
|
||||
|
||||
//3 - whole aux blob
|
||||
var auxBlob = byteArrayOf()
|
||||
if (ai.header!!.auxiliary_data_block_size > 0) {
|
||||
if (encodedKey.contentEquals(ai.auxBlob!!.pubkey!!.pubkey)) {
|
||||
log.info("Using the same key as original vbmeta")
|
||||
} else {
|
||||
log.warn("Using different key from original vbmeta")
|
||||
}
|
||||
auxBlob = Blob.getAuxDataBlob(encodedDesc, encodedKey)
|
||||
} else {
|
||||
log.info("No aux blob")
|
||||
}
|
||||
|
||||
//1 - whole header blob
|
||||
val headerBlob = ai.header!!.apply {
|
||||
auxiliary_data_block_size = auxBlob.size.toLong()
|
||||
authentication_data_block_size = Helper.round_to_multiple(
|
||||
(alg.hash_num_bytes + alg.signature_num_bytes).toLong(), 64)
|
||||
|
||||
hash_offset = 0
|
||||
hash_size = alg.hash_num_bytes.toLong()
|
||||
|
||||
signature_offset = alg.hash_num_bytes.toLong()
|
||||
signature_size = alg.signature_num_bytes.toLong()
|
||||
|
||||
public_key_offset = descriptors_size
|
||||
public_key_size = encodedKey.size.toLong()
|
||||
|
||||
//TODO: support pubkey metadata
|
||||
public_key_metadata_size = 0
|
||||
public_key_metadata_offset = public_key_offset + public_key_size
|
||||
|
||||
descriptors_offset = 0
|
||||
descriptors_size = encodedDesc.size.toLong()
|
||||
}.encode()
|
||||
|
||||
//2 - auth blob
|
||||
var authBlob = byteArrayOf()
|
||||
if (ai.authBlob != null) {
|
||||
authBlob = Blob.getAuthBlob(headerBlob, auxBlob, alg.name, key_path)
|
||||
} else {
|
||||
log.info("No auth blob")
|
||||
}
|
||||
|
||||
return Helper.join(headerBlob, authBlob, auxBlob)
|
||||
}
|
||||
|
||||
fun packVbMetaWithPadding(key_path: String, info: AVBInfo? = null) {
|
||||
val rawBlob = packVbMeta(key_path, info)
|
||||
val paddingSize = Helper.round_to_multiple(rawBlob.size.toLong(), BLOCK_SIZE) - rawBlob.size
|
||||
val paddedBlob = Helper.join(rawBlob, Struct("${paddingSize}x").pack(null))
|
||||
log.info("raw vbmeta size ${rawBlob.size}, padding size $paddingSize, total blob size ${paddedBlob.size}")
|
||||
log.info("Writing padded vbmeta to file: vbmeta.img.signed")
|
||||
Files.write(Paths.get("vbmeta.img.signed"), paddedBlob, StandardOpenOption.CREATE)
|
||||
}
|
||||
|
||||
companion object {
|
||||
private val log = LoggerFactory.getLogger(Avb::class.java)
|
||||
val AVB_VERSION_MAJOR = 1
|
||||
val AVB_VERSION_MINOR = 1
|
||||
val AVB_VERSION_SUB = 0
|
||||
|
||||
//Keep in sync with libavb/avb_footer.h.
|
||||
val AVB_FOOTER_VERSION_MAJOR = 1
|
||||
val AVB_FOOTER_VERSION_MINOR = 0
|
||||
|
||||
fun getJsonFileName(image_file: String): String {
|
||||
val fileName = File(image_file).name
|
||||
// val jsonFile = fileName.substring(0, fileName.lastIndexOf(".")) + ".json"
|
||||
val jsonFile = "$fileName.avb.json"
|
||||
return UnifiedConfig.workDir + jsonFile
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,76 @@
|
||||
package cfig
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper
|
||||
import org.apache.commons.exec.CommandLine
|
||||
import org.apache.commons.exec.DefaultExecutor
|
||||
import org.slf4j.LoggerFactory
|
||||
import java.io.File
|
||||
|
||||
class Signer {
|
||||
companion object {
|
||||
private val log = LoggerFactory.getLogger(Signer::class.java)
|
||||
private val workDir = UnifiedConfig.workDir
|
||||
|
||||
fun sign(avbtool: String, bootSigner: String) {
|
||||
log.info("Loading config from ${workDir}bootimg.json")
|
||||
val cfg = ObjectMapper().readValue(File(workDir + "bootimg.json"), UnifiedConfig::class.java)
|
||||
val readBack = cfg.toArgs()
|
||||
val args = readBack[0] as ImgArgs
|
||||
val info = readBack[1] as ImgInfo
|
||||
|
||||
when (args.verifyType) {
|
||||
ImgArgs.VerifyType.VERIFY -> {
|
||||
log.info("Signing with verified-boot 1.0 style")
|
||||
val sig = ObjectMapper().readValue(
|
||||
mapToJson(info.signature as LinkedHashMap<*, *>), ImgInfo.VeritySignature::class.java)
|
||||
DefaultExecutor().execute(CommandLine.parse("java -jar $bootSigner " +
|
||||
"${sig.path} ${args.output}.clear ${sig.verity_pk8} ${sig.verity_pem} ${args.output}.signed"))
|
||||
}
|
||||
ImgArgs.VerifyType.AVB -> {
|
||||
log.info("Adding hash_footer with verified-boot 2.0 style")
|
||||
val sig = ObjectMapper().readValue(
|
||||
mapToJson(info.signature as LinkedHashMap<*, *>), ImgInfo.AvbSignature::class.java)
|
||||
File(args.output + ".clear").copyTo(File(args.output + ".signed"))
|
||||
val cmdlineStr = "$avbtool add_hash_footer " +
|
||||
"--image ${args.output}.signed " +
|
||||
"--partition_size ${sig.imageSize} " +
|
||||
"--salt ${sig.salt} " +
|
||||
"--partition_name ${sig.partName} " +
|
||||
"--hash_algorithm ${sig.hashAlgorithm} " +
|
||||
"--algorithm ${sig.algorithm} " +
|
||||
"--key avb/avb_test_data/testkey_rsa4096.pem"
|
||||
log.warn(cmdlineStr)
|
||||
DefaultExecutor().execute(CommandLine.parse(cmdlineStr))
|
||||
verifyAVBIntegrity(args, avbtool)
|
||||
|
||||
File(args.output + ".clear").copyTo(File(args.output + ".signed2"))
|
||||
Avb().add_hash_footer(args.output + ".signed2",
|
||||
sig.imageSize!!.toLong(),
|
||||
false, false,
|
||||
salt = sig.salt,
|
||||
hash_algorithm = sig.hashAlgorithm!!,
|
||||
partition_name = sig.partName!!,
|
||||
rollback_index = 0,
|
||||
common_algorithm = sig.algorithm!!,
|
||||
common_key_path = "avb/avb_test_data/testkey_rsa4096.pem")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun verifyAVBIntegrity(args: ImgArgs, avbtool: String) {
|
||||
val tgt = args.output + ".signed"
|
||||
log.info("Verifying AVB: $tgt")
|
||||
DefaultExecutor().execute(CommandLine.parse("$avbtool verify_image --image $tgt"))
|
||||
log.info("Verifying image passed: $tgt")
|
||||
}
|
||||
|
||||
fun mapToJson(m: LinkedHashMap<*, *>): String {
|
||||
val sb = StringBuilder()
|
||||
m.forEach { k, v ->
|
||||
if (sb.isNotEmpty()) sb.append(", ")
|
||||
sb.append("\"$k\": \"$v\"")
|
||||
}
|
||||
return "{ $sb }"
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,64 @@
|
||||
package avb
|
||||
|
||||
import avb.alg.Algorithm
|
||||
import avb.alg.Algorithms
|
||||
import cfig.Helper
|
||||
import cfig.io.Struct
|
||||
import org.junit.Assert
|
||||
import org.slf4j.LoggerFactory
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Paths
|
||||
import java.security.MessageDigest
|
||||
|
||||
class Blob {
|
||||
companion object {
|
||||
fun encodePubKey(alg: Algorithm, key: ByteArray): ByteArray {
|
||||
var encodedKey = byteArrayOf()
|
||||
if (alg.public_key_num_bytes > 0) {
|
||||
encodedKey = Helper.encodeRSAkey(key)
|
||||
log.info("encodePubKey(): size = ${alg.public_key_num_bytes}, algorithm key size: ${encodedKey.size}")
|
||||
Assert.assertEquals(alg.public_key_num_bytes, encodedKey.size)
|
||||
} else {
|
||||
log.info("encodePubKey(): No key to use")
|
||||
}
|
||||
return encodedKey
|
||||
}
|
||||
|
||||
//TODO: support pkmd_blob
|
||||
//encoded_descriptors + encoded_key + pkmd_blob + (padding)
|
||||
fun getAuxDataBlob(encodedDesc: ByteArray, encodedKey: ByteArray): ByteArray {
|
||||
val auxSize = Helper.round_to_multiple(
|
||||
encodedDesc.size + encodedKey.size /* encoded key */ + 0L /* pkmd_blob */,
|
||||
64)
|
||||
return Struct("${auxSize}b").pack(Helper.join(encodedDesc, encodedKey))
|
||||
}
|
||||
|
||||
fun getAuthBlob(header_data_blob: ByteArray,
|
||||
aux_data_blob: ByteArray,
|
||||
algorithm_name: String,
|
||||
key_path: String?): ByteArray {
|
||||
val alg = Algorithms.get(algorithm_name)!!
|
||||
val authBlockSize = Helper.round_to_multiple((alg.hash_num_bytes + alg.signature_num_bytes).toLong(), 64)
|
||||
if (authBlockSize == 0L) {
|
||||
log.info("No auth blob")
|
||||
return byteArrayOf()
|
||||
}
|
||||
|
||||
//hash & signature
|
||||
var binaryHash: ByteArray = byteArrayOf()
|
||||
var binarySignature: ByteArray = byteArrayOf()
|
||||
if (algorithm_name != "NONE") {
|
||||
val hasher = MessageDigest.getInstance(Helper.pyAlg2java(alg.hash_name))
|
||||
binaryHash = hasher.apply {
|
||||
update(header_data_blob)
|
||||
update(aux_data_blob)
|
||||
}.digest()
|
||||
binarySignature = Helper.rawSign(key_path!!, Helper.join(alg.padding, binaryHash))
|
||||
}
|
||||
val authData = Helper.join(binaryHash, binarySignature)
|
||||
return Helper.join(authData, Struct("${authBlockSize - authData.size}x").pack(0))
|
||||
}
|
||||
|
||||
private val log = LoggerFactory.getLogger(Blob::class.java)
|
||||
}
|
||||
}
|
@ -0,0 +1,16 @@
|
||||
package avb
|
||||
|
||||
import com.fasterxml.jackson.core.JsonGenerator
|
||||
import com.fasterxml.jackson.databind.JsonSerializer
|
||||
import com.fasterxml.jackson.databind.SerializerProvider
|
||||
import org.bouncycastle.util.encoders.Hex
|
||||
|
||||
class ByteArraySerializer: JsonSerializer<ByteArray>() {
|
||||
override fun serialize(value: ByteArray?, gen: JsonGenerator?, serializers: SerializerProvider?) {
|
||||
if (value != null) {
|
||||
gen!!.writeString(String(Hex.encode(value!!)))
|
||||
} else {
|
||||
gen!!.writeString("")
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,50 @@
|
||||
package avb
|
||||
|
||||
import cfig.io.Struct
|
||||
import org.junit.Assert
|
||||
import java.io.InputStream
|
||||
|
||||
data class Footer constructor(
|
||||
var versionMajor: Long = FOOTER_VERSION_MAJOR,
|
||||
var versionMinor: Long = FOOTER_VERSION_MINOR,
|
||||
var originalImageSize: Long = 0L,
|
||||
var vbMetaOffset: Long = 0L,
|
||||
var vbMetaSize: Long = 0L
|
||||
) {
|
||||
companion object {
|
||||
const val MAGIC = "AVBf"
|
||||
const val SIZE = 64
|
||||
const val RESERVED = 28
|
||||
const val FOOTER_VERSION_MAJOR = 1L
|
||||
const val FOOTER_VERSION_MINOR = 0L
|
||||
private const val FORMAT_STRING = "!4s2L3Q${RESERVED}x"
|
||||
|
||||
init {
|
||||
Assert.assertEquals(SIZE, Struct(FORMAT_STRING).calcsize())
|
||||
}
|
||||
}
|
||||
|
||||
@Throws(IllegalArgumentException::class)
|
||||
constructor(iS: InputStream) : this() {
|
||||
val info = Struct(FORMAT_STRING).unpack(iS)
|
||||
Assert.assertEquals(7, info.size)
|
||||
if (!MAGIC.toByteArray().contentEquals(info[0] as ByteArray)) {
|
||||
throw IllegalArgumentException("stream doesn't look like valid AVB Footer")
|
||||
}
|
||||
versionMajor = info[1] as Long
|
||||
versionMinor = info[2] as Long
|
||||
originalImageSize = info[3] as Long
|
||||
vbMetaOffset = info[4] as Long
|
||||
vbMetaSize = info[5] as Long
|
||||
}
|
||||
|
||||
fun encode(): ByteArray {
|
||||
return Struct(FORMAT_STRING).pack(Footer.MAGIC.toByteArray(),
|
||||
this.versionMajor,
|
||||
this.versionMinor,
|
||||
this.originalImageSize,
|
||||
this.vbMetaOffset,
|
||||
this.vbMetaSize,
|
||||
null)
|
||||
}
|
||||
}
|
@ -0,0 +1,90 @@
|
||||
package avb
|
||||
|
||||
import cfig.Avb
|
||||
import cfig.Helper
|
||||
import cfig.io.Struct
|
||||
import org.junit.Assert
|
||||
import java.io.InputStream
|
||||
|
||||
data class Header(
|
||||
var required_libavb_version_major: Int = Avb.AVB_VERSION_MAJOR,
|
||||
var required_libavb_version_minor: Int = 0,
|
||||
var authentication_data_block_size: Long = 0L,
|
||||
var auxiliary_data_block_size: Long = 0L,
|
||||
var algorithm_type: Long = 0L,
|
||||
var hash_offset: Long = 0L,
|
||||
var hash_size: Long = 0L,
|
||||
var signature_offset: Long = 0L,
|
||||
var signature_size: Long = 0L,
|
||||
var public_key_offset: Long = 0L,
|
||||
var public_key_size: Long = 0L,
|
||||
var public_key_metadata_offset: Long = 0L,
|
||||
var public_key_metadata_size: Long = 0L,
|
||||
var descriptors_offset: Long = 0L,
|
||||
var descriptors_size: Long = 0L,
|
||||
var rollback_index: Long = 0L,
|
||||
var flags: Long = 0,
|
||||
var release_string: String = "avbtool ${Avb.AVB_VERSION_MAJOR}.${Avb.AVB_VERSION_MINOR}.${Avb.AVB_VERSION_SUB}") {
|
||||
fun bump_required_libavb_version_minor(minor: Int) {
|
||||
this.required_libavb_version_minor = maxOf(required_libavb_version_minor, minor)
|
||||
}
|
||||
|
||||
@Throws(IllegalArgumentException::class)
|
||||
constructor(iS: InputStream) : this() {
|
||||
val info = Struct(FORMAT_STRING).unpack(iS)
|
||||
Assert.assertEquals(22, info.size)
|
||||
if (!(info[0] as ByteArray).contentEquals(magic.toByteArray())) {
|
||||
throw IllegalArgumentException("stream doesn't look like valid VBMeta Header")
|
||||
}
|
||||
this.required_libavb_version_major = (info[1] as Long).toInt()
|
||||
this.required_libavb_version_minor = (info[2] as Long).toInt()
|
||||
this.authentication_data_block_size = info[3] as Long
|
||||
this.auxiliary_data_block_size = info[4] as Long
|
||||
this.algorithm_type = info[5] as Long
|
||||
this.hash_offset = info[6] as Long
|
||||
this.hash_size = info[7] as Long
|
||||
this.signature_offset = info[8] as Long
|
||||
this.signature_size = info[9] as Long
|
||||
this.public_key_offset = info[10] as Long
|
||||
this.public_key_size = info[11] as Long
|
||||
this.public_key_metadata_offset = info[12] as Long
|
||||
this.public_key_metadata_size = info[13] as Long
|
||||
this.descriptors_offset = info[14] as Long
|
||||
this.descriptors_size = info[15] as Long
|
||||
this.rollback_index = info[16] as Long
|
||||
this.flags = info[17] as Long
|
||||
//padding
|
||||
this.release_string = Helper.toCString(info[19] as ByteArray)
|
||||
}
|
||||
|
||||
fun encode(): ByteArray {
|
||||
return Struct(FORMAT_STRING).pack(
|
||||
magic.toByteArray(),
|
||||
this.required_libavb_version_major, this.required_libavb_version_minor,
|
||||
this.authentication_data_block_size, this.auxiliary_data_block_size,
|
||||
this.algorithm_type,
|
||||
this.hash_offset, this.hash_size,
|
||||
this.signature_offset, this.signature_size,
|
||||
this.public_key_offset, this.public_key_size,
|
||||
this.public_key_metadata_offset, this.public_key_metadata_size,
|
||||
this.descriptors_offset, this.descriptors_size,
|
||||
this.rollback_index,
|
||||
this.flags,
|
||||
null,
|
||||
this.release_string.toByteArray(),
|
||||
null,
|
||||
null)
|
||||
}
|
||||
|
||||
companion object {
|
||||
const val magic: String = "AVB0"
|
||||
const val SIZE = 256
|
||||
const val REVERSED0 = 4
|
||||
const val REVERSED = 80
|
||||
const val FORMAT_STRING = ("!4s2L2QL11QL${REVERSED0}x47sx" + "${REVERSED}x")
|
||||
|
||||
init {
|
||||
Assert.assertEquals(SIZE, Struct(FORMAT_STRING).calcsize())
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,10 @@
|
||||
package avb.alg
|
||||
|
||||
data class Algorithm(
|
||||
val name: String = "NONE",
|
||||
val algorithm_type: Int = 0,
|
||||
val hash_name: String = "",
|
||||
val hash_num_bytes: Int = 0,
|
||||
val signature_num_bytes: Int = 0,
|
||||
val public_key_num_bytes: Int = 0,
|
||||
val padding: ByteArray = byteArrayOf())
|
@ -0,0 +1,128 @@
|
||||
package avb.alg
|
||||
|
||||
import cfig.io.Struct
|
||||
|
||||
class Algorithms {
|
||||
companion object {
|
||||
private val algMap = mutableMapOf<String, Algorithm>()
|
||||
fun get(name: String): Algorithm? {
|
||||
return algMap[name]
|
||||
}
|
||||
|
||||
fun get(algorithm_type: Int): Algorithm? {
|
||||
for (item in algMap) {
|
||||
if (item.value.algorithm_type == algorithm_type) {
|
||||
return item.value
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
init {
|
||||
val NONE = Algorithm(name = "NONE")
|
||||
|
||||
val SHA256_RSA2048 = Algorithm(
|
||||
algorithm_type = 1,
|
||||
name = "SHA256_RSA2048",
|
||||
hash_name = "sha256",
|
||||
hash_num_bytes = 32,
|
||||
signature_num_bytes = 256,
|
||||
public_key_num_bytes = 8 + 2 * 2048 / 8,
|
||||
padding = Struct("2b202x1b19b").pack(
|
||||
byteArrayOf(0x00, 0x01),
|
||||
0xff,
|
||||
byteArrayOf(0x00),
|
||||
intArrayOf(0x30, 0x31, 0x30, 0x0d, 0x06, 0x09, 0x60, 0x86,
|
||||
0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x01, 0x05,
|
||||
0x00, 0x04, 0x20)))
|
||||
|
||||
val SHA256_RSA4096 = Algorithm(
|
||||
name = "SHA256_RSA4096",
|
||||
algorithm_type = 2,
|
||||
hash_name = "sha256",
|
||||
hash_num_bytes = 32,
|
||||
signature_num_bytes = 512,
|
||||
public_key_num_bytes = 8 + 2 * 4096 / 8,
|
||||
padding = Struct("2b458x1x19b").pack(
|
||||
byteArrayOf(0x00, 0x01),
|
||||
0xff,
|
||||
0x00,
|
||||
intArrayOf(0x30, 0x31, 0x30, 0x0d, 0x06, 0x09, 0x60, 0x86,
|
||||
0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x01, 0x05,
|
||||
0x00, 0x04, 0x20)
|
||||
)
|
||||
)
|
||||
|
||||
val SHA256_RSA8192 = Algorithm(
|
||||
name = "SHA256_RSA8192",
|
||||
algorithm_type = 3,
|
||||
hash_name = "sha256",
|
||||
hash_num_bytes = 32,
|
||||
signature_num_bytes = 1024,
|
||||
public_key_num_bytes = 8 + 2 * 8192 / 8,
|
||||
padding = Struct("2b970x1x19b").pack(
|
||||
intArrayOf(0x00, 0x01),
|
||||
0xff,
|
||||
0x00,
|
||||
intArrayOf(0x30, 0x31, 0x30, 0x0d, 0x06, 0x09, 0x60, 0x86,
|
||||
0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x01, 0x05,
|
||||
0x00, 0x04, 0x20)))
|
||||
|
||||
val SHA512_RSA2048 = Algorithm(
|
||||
name = "SHA512_RSA2048",
|
||||
algorithm_type = 4,
|
||||
hash_name = "sha512",
|
||||
hash_num_bytes = 64,
|
||||
signature_num_bytes = 256,
|
||||
public_key_num_bytes = 8 + 2 * 2048 / 8,
|
||||
padding = Struct("2b170x1x19b").pack(
|
||||
intArrayOf(0x00, 0x01),
|
||||
0xff,
|
||||
0x00,
|
||||
intArrayOf(0x30, 0x51, 0x30, 0x0d, 0x06, 0x09, 0x60, 0x86,
|
||||
0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x03, 0x05,
|
||||
0x00, 0x04, 0x40)))
|
||||
|
||||
val SHA512_RSA4096 = Algorithm(
|
||||
name = "SHA512_RSA4096",
|
||||
algorithm_type = 5,
|
||||
hash_name = "sha512",
|
||||
hash_num_bytes = 64,
|
||||
signature_num_bytes = 512,
|
||||
public_key_num_bytes = 8 + 2 * 4096 / 8,
|
||||
padding = Struct("2b426x1x19b").pack(
|
||||
intArrayOf(0x00, 0x01),
|
||||
0xff,
|
||||
0x00,
|
||||
intArrayOf(0x30, 0x51, 0x30, 0x0d, 0x06, 0x09, 0x60, 0x86,
|
||||
0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x03, 0x05,
|
||||
0x00, 0x04, 0x40)))
|
||||
|
||||
val SHA512_RSA8192 = Algorithm(
|
||||
name = "SHA512_RSA8192",
|
||||
algorithm_type = 6,
|
||||
hash_name = "sha512",
|
||||
hash_num_bytes = 64,
|
||||
signature_num_bytes = 1024,
|
||||
public_key_num_bytes = 8 + 2 * 8192 / 8,
|
||||
|
||||
padding = Struct("2b938x1x19b").pack(
|
||||
intArrayOf(0x00, 0x01),
|
||||
0xff,
|
||||
0x00,
|
||||
intArrayOf(0x30, 0x51, 0x30, 0x0d, 0x06, 0x09, 0x60, 0x86,
|
||||
0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x03, 0x05,
|
||||
0x00, 0x04, 0x40)))
|
||||
|
||||
algMap[NONE.name] = NONE
|
||||
|
||||
algMap[SHA256_RSA2048.name] = SHA256_RSA2048
|
||||
algMap[SHA256_RSA4096.name] = SHA256_RSA4096
|
||||
algMap[SHA256_RSA8192.name] = SHA256_RSA8192
|
||||
|
||||
algMap[SHA512_RSA2048.name] = SHA512_RSA2048
|
||||
algMap[SHA512_RSA4096.name] = SHA512_RSA4096
|
||||
algMap[SHA512_RSA8192.name] = SHA512_RSA8192
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,5 @@
|
||||
package avb.desc
|
||||
|
||||
abstract class Descriptor(var tag: Long, var num_bytes_following: Long, var sequence: Int = 0) {
|
||||
abstract fun encode(): ByteArray
|
||||
}
|
@ -0,0 +1,76 @@
|
||||
package avb.desc
|
||||
|
||||
import cfig.Helper
|
||||
import cfig.io.Struct
|
||||
import org.junit.Assert
|
||||
import java.io.File
|
||||
import java.io.InputStream
|
||||
import java.security.MessageDigest
|
||||
|
||||
class HashDescriptor(var image_size: Long = 0L,
|
||||
var hash_algorithm: ByteArray = byteArrayOf(),
|
||||
var partition_name_len: Long = 0L,
|
||||
var salt_len: Long = 0L,
|
||||
var digest_len: Long = 0L,
|
||||
var flags: Long = 0L,
|
||||
var partition_name: String = "",
|
||||
var salt: ByteArray = byteArrayOf(),
|
||||
var digest: ByteArray = byteArrayOf()) : Descriptor(TAG, 0, 0) {
|
||||
constructor(data: InputStream, seq: Int = 0) : this() {
|
||||
val info = Struct(FORMAT_STRING).unpack(data)
|
||||
this.tag = info[0] as Long
|
||||
this.num_bytes_following = info[1] as Long
|
||||
this.image_size = info[2] as Long
|
||||
this.hash_algorithm = info[3] as ByteArray
|
||||
this.partition_name_len = info[4] as Long
|
||||
this.salt_len = info[5] as Long
|
||||
this.digest_len = info[6] as Long
|
||||
this.flags = info[7] as Long
|
||||
this.sequence = seq
|
||||
val expectedSize = Helper.round_to_multiple(SIZE - 16 + partition_name_len + salt_len + digest_len, 8)
|
||||
if (this.tag != TAG || expectedSize != this.num_bytes_following) {
|
||||
throw IllegalArgumentException("Given data does not look like a |hash| descriptor")
|
||||
}
|
||||
val payload = Struct("${this.partition_name_len}s${this.salt_len}b${this.digest_len}b").unpack(data)
|
||||
Assert.assertEquals(3, payload.size)
|
||||
this.partition_name = Helper.toCString(payload[0] as ByteArray)
|
||||
this.salt = payload[1] as ByteArray
|
||||
this.digest = payload[2] as ByteArray
|
||||
}
|
||||
|
||||
override fun encode(): ByteArray {
|
||||
val payload_bytes_following = SIZE + this.partition_name.length + this.salt.size + this.digest.size - 16L
|
||||
this.num_bytes_following = Helper.round_to_multiple(payload_bytes_following, 8)
|
||||
val padding_size = num_bytes_following - payload_bytes_following
|
||||
val desc = Struct(FORMAT_STRING).pack(
|
||||
TAG,
|
||||
this.num_bytes_following,
|
||||
this.image_size,
|
||||
this.hash_algorithm,
|
||||
this.partition_name.length,
|
||||
this.salt.size,
|
||||
this.digest.size,
|
||||
this.flags,
|
||||
null)
|
||||
val padding = Struct("${padding_size}x").pack(null)
|
||||
return Helper.join(desc, partition_name.toByteArray(), this.salt, this.digest, padding)
|
||||
}
|
||||
|
||||
fun verify(image_file: String) {
|
||||
val hasher = MessageDigest.getInstance(Helper.pyAlg2java(hash_algorithm.toString()))
|
||||
hasher.update(this.salt)
|
||||
hasher.update(File(image_file).readBytes())
|
||||
val digest = hasher.digest()
|
||||
}
|
||||
|
||||
companion object {
|
||||
const val TAG = 2L
|
||||
private const val RESERVED = 60
|
||||
private const val SIZE = 72 + RESERVED
|
||||
private const val FORMAT_STRING = "!3Q32s4L${RESERVED}s"
|
||||
}
|
||||
|
||||
override fun toString(): String {
|
||||
return "HashDescriptor(TAG=$TAG, image_size=$image_size, hash_algorithm=${Helper.toCString(hash_algorithm)}, flags=$flags, partition_name='$partition_name', salt=${Helper.toHexString(salt)}, digest=${Helper.toHexString(digest)})"
|
||||
}
|
||||
}
|
@ -0,0 +1,91 @@
|
||||
package avb.desc
|
||||
|
||||
import cfig.Helper
|
||||
import cfig.io.Struct
|
||||
import org.slf4j.LoggerFactory
|
||||
import java.io.InputStream
|
||||
import java.util.*
|
||||
|
||||
class HashTreeDescriptor(
|
||||
var dm_verity_version: Long = 0L,
|
||||
var image_size: Long = 0L,
|
||||
var tree_offset: Long = 0L,
|
||||
var tree_size: Long = 0L,
|
||||
var data_block_size: Long = 0L,
|
||||
var hash_block_size: Long = 0L,
|
||||
var fec_num_roots: Long = 0L,
|
||||
var fec_offset: Long = 0L,
|
||||
var fec_size: Long = 0L,
|
||||
var hash_algorithm: String = "",
|
||||
var partition_name: String = "",
|
||||
var salt: ByteArray = byteArrayOf(),
|
||||
var root_digest: ByteArray = byteArrayOf(),
|
||||
var flags: Long = 0L) : Descriptor(TAG, 0, 0) {
|
||||
constructor(data: InputStream, seq: Int = 0) : this() {
|
||||
this.sequence = seq
|
||||
val info = Struct(FORMAT_STRING).unpack(data)
|
||||
this.tag = info[0] as Long
|
||||
this.num_bytes_following = info[1] as Long
|
||||
this.dm_verity_version = info[2] as Long
|
||||
this.image_size = info[3] as Long
|
||||
this.tree_offset = info[4] as Long
|
||||
this.tree_size = info[5] as Long
|
||||
this.data_block_size = info[6] as Long
|
||||
this.hash_block_size = info[7] as Long
|
||||
this.fec_num_roots = info[8] as Long
|
||||
this.fec_offset = info[9] as Long
|
||||
this.fec_size = info[10] as Long
|
||||
this.hash_algorithm = Helper.toCString(info[11] as ByteArray)
|
||||
val partition_name_len = info[12] as Long
|
||||
val salt_len = info[13] as Long
|
||||
val root_digest_len = info[14] as Long
|
||||
this.flags = info[15] as Long
|
||||
val expectedSize = Helper.round_to_multiple(SIZE - 16 + partition_name_len + salt_len + root_digest_len, 8)
|
||||
if (this.tag != TAG || this.num_bytes_following != expectedSize) {
|
||||
throw IllegalArgumentException("Given data does not look like a hashtree descriptor")
|
||||
}
|
||||
|
||||
val info2 = Struct("${partition_name_len}s${salt_len}s${root_digest_len}s").unpack(data)
|
||||
this.partition_name = Helper.toCString(info2[0] as ByteArray)
|
||||
this.salt = info2[1] as ByteArray
|
||||
this.root_digest = info2[2] as ByteArray
|
||||
}
|
||||
|
||||
override fun encode(): ByteArray {
|
||||
this.num_bytes_following = SIZE + this.partition_name.length + this.salt.size + this.root_digest.size - 16
|
||||
val nbf_with_padding = Helper.round_to_multiple(this.num_bytes_following, 8)
|
||||
val padding_size = nbf_with_padding - this.num_bytes_following
|
||||
val desc = Struct(FORMAT_STRING).pack(
|
||||
TAG,
|
||||
nbf_with_padding,
|
||||
this.dm_verity_version,
|
||||
this.image_size,
|
||||
this.tree_offset,
|
||||
this.tree_size,
|
||||
this.data_block_size,
|
||||
this.hash_block_size,
|
||||
this.fec_num_roots,
|
||||
this.fec_offset,
|
||||
this.fec_size,
|
||||
this.hash_algorithm.toByteArray(),
|
||||
this.partition_name.length,
|
||||
this.salt.size,
|
||||
this.root_digest.size,
|
||||
this.flags,
|
||||
null)
|
||||
val padding = Struct("${padding_size}x").pack(null)
|
||||
return Helper.join(desc, this.partition_name.toByteArray(), this.salt, this.root_digest, padding)
|
||||
}
|
||||
|
||||
override fun toString(): String {
|
||||
return "HashTreeDescriptor(dm_verity_version=$dm_verity_version, image_size=$image_size, tree_offset=$tree_offset, tree_size=$tree_size, data_block_size=$data_block_size, hash_block_size=$hash_block_size, fec_num_roots=$fec_num_roots, fec_offset=$fec_offset, fec_size=$fec_size, hash_algorithm='$hash_algorithm', partition_name='$partition_name', salt=${Arrays.toString(salt)}, root_digest=${Arrays.toString(root_digest)}, flags=$flags)"
|
||||
}
|
||||
|
||||
companion object {
|
||||
const val TAG = 1L
|
||||
private const val RESERVED = 60L
|
||||
private const val SIZE = 120 + RESERVED
|
||||
private const val FORMAT_STRING = "!2QL3Q3L2Q32s4L${RESERVED}s"
|
||||
private val log = LoggerFactory.getLogger(HashTreeDescriptor::class.java)
|
||||
}
|
||||
}
|
@ -0,0 +1,51 @@
|
||||
package avb.desc
|
||||
|
||||
import cfig.Helper
|
||||
import cfig.io.Struct
|
||||
import org.junit.Assert
|
||||
import java.io.InputStream
|
||||
|
||||
class KernelCmdlineDescriptor(
|
||||
var flags: Long = 0,
|
||||
var cmdlineLength: Long = 0,
|
||||
var cmdline: String = "") : Descriptor(TAG, 0, 0) {
|
||||
@Throws(IllegalArgumentException::class)
|
||||
constructor(data: InputStream, seq: Int = 0) : this() {
|
||||
val info = Struct(FORMAT_STRING).unpack(data)
|
||||
this.tag = info[0] as Long
|
||||
this.num_bytes_following = info[1] as Long
|
||||
this.flags = info[2] as Long
|
||||
this.cmdlineLength = info[3] as Long
|
||||
this.sequence = seq
|
||||
val expectedSize = Helper.round_to_multiple(SIZE - 16 + this.cmdlineLength, 8)
|
||||
if ((this.tag != TAG) || (this.num_bytes_following != expectedSize)) {
|
||||
throw IllegalArgumentException("Given data does not look like a kernel cmdline descriptor")
|
||||
}
|
||||
this.cmdline = Helper.toCString(Struct("${this.cmdlineLength}s").unpack(data)[0] as ByteArray)
|
||||
}
|
||||
|
||||
override fun encode(): ByteArray {
|
||||
val num_bytes_following = SIZE - 16 + cmdline.toByteArray().size
|
||||
val nbf_with_padding = Helper.round_to_multiple(num_bytes_following.toLong(), 8)
|
||||
val padding_size = nbf_with_padding - num_bytes_following
|
||||
val desc = Struct(FORMAT_STRING).pack(
|
||||
TAG,
|
||||
nbf_with_padding,
|
||||
this.flags,
|
||||
cmdline.toByteArray().size)
|
||||
val padding = Struct("${padding_size}x").pack(null)
|
||||
return Helper.join(desc, cmdline.toByteArray(), padding)
|
||||
}
|
||||
|
||||
companion object {
|
||||
const val TAG = 3L
|
||||
const val SIZE = 24
|
||||
const val FORMAT_STRING = "!2Q2L" //# tag, num_bytes_following (descriptor header), flags, cmdline length (bytes)
|
||||
const val flagHashTreeEnabled = 1
|
||||
const val flagHashTreeDisabled = 2
|
||||
|
||||
init {
|
||||
Assert.assertEquals(SIZE, Struct(FORMAT_STRING).calcsize())
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,30 @@
|
||||
package avb.desc
|
||||
|
||||
import cfig.Helper
|
||||
import cfig.io.Struct
|
||||
|
||||
class PropertyDescriptor(
|
||||
var key: String = "",
|
||||
var value: String = "") : Descriptor(TAG, 0, 0) {
|
||||
override fun encode(): ByteArray {
|
||||
this.num_bytes_following = SIZE + this.key.length + this.value.length + 2 - 16
|
||||
val nbf_with_padding = Helper.round_to_multiple(this.num_bytes_following, 8)
|
||||
val padding_size = nbf_with_padding - num_bytes_following
|
||||
val padding = Struct("${padding_size}x").pack(0)
|
||||
val desc = Struct(FORMAT_STRING).pack(
|
||||
TAG,
|
||||
nbf_with_padding,
|
||||
this.key.length,
|
||||
this.value.length)
|
||||
return Helper.join(desc,
|
||||
this.key.toByteArray(), ByteArray(1),
|
||||
this.value.toByteArray(), ByteArray(1),
|
||||
padding)
|
||||
}
|
||||
|
||||
companion object {
|
||||
val TAG = 0L
|
||||
val SIZE = 32L
|
||||
val FORMAT_STRING = "!4Q"
|
||||
}
|
||||
}
|
@ -0,0 +1,107 @@
|
||||
package avb.desc
|
||||
|
||||
import cfig.Helper
|
||||
import cfig.io.Struct
|
||||
import org.bouncycastle.util.encoders.Hex
|
||||
import org.junit.Assert
|
||||
import org.slf4j.LoggerFactory
|
||||
import java.io.ByteArrayInputStream
|
||||
import java.io.InputStream
|
||||
|
||||
class UnknownDescriptor(var data: ByteArray = byteArrayOf()) : Descriptor(0, 0, 0) {
|
||||
@Throws(IllegalArgumentException::class)
|
||||
constructor(stream: InputStream, seq: Int = 0) : this() {
|
||||
this.sequence = seq
|
||||
val info = Struct(FORMAT).unpack(stream)
|
||||
this.tag = info[0] as Long
|
||||
this.num_bytes_following = info[1] as Long
|
||||
log.debug("UnknownDescriptor: tag = $tag, len = ${this.num_bytes_following}")
|
||||
this.data = ByteArray(this.num_bytes_following.toInt())
|
||||
if (this.num_bytes_following.toInt() != stream.read(data)) {
|
||||
throw IllegalArgumentException("descriptor SIZE mismatch")
|
||||
}
|
||||
}
|
||||
|
||||
override fun encode(): ByteArray {
|
||||
return Helper.join(Struct(FORMAT).pack(this.tag, this.data.size.toLong()), data)
|
||||
}
|
||||
|
||||
override fun toString(): String {
|
||||
return "UnknownDescriptor(tag=$tag, SIZE=${data.size}, data=${Hex.toHexString(data)})"
|
||||
}
|
||||
|
||||
fun analyze(): Any {
|
||||
return when (this.tag) {
|
||||
1L -> {
|
||||
HashTreeDescriptor(ByteArrayInputStream(this.encode()), this.sequence)
|
||||
}
|
||||
2L -> {
|
||||
HashDescriptor(ByteArrayInputStream(this.encode()), this.sequence)
|
||||
}
|
||||
3L -> {
|
||||
KernelCmdlineDescriptor(ByteArrayInputStream(this.encode()), this.sequence)
|
||||
}
|
||||
else -> {
|
||||
this
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
companion object {
|
||||
private const val SIZE = 16
|
||||
private const val FORMAT = "!QQ"
|
||||
private val log = LoggerFactory.getLogger(UnknownDescriptor::class.java)
|
||||
|
||||
fun parseDescriptors(stream: InputStream, totalSize: Long): List<UnknownDescriptor> {
|
||||
log.debug("Parse descriptors stream, SIZE = $totalSize")
|
||||
val ret: MutableList<UnknownDescriptor> = mutableListOf()
|
||||
var currentSize = 0L
|
||||
while (true) {
|
||||
val desc = UnknownDescriptor(stream)
|
||||
currentSize += desc.data.size + SIZE
|
||||
log.debug("current SIZE = $currentSize")
|
||||
ret.add(desc)
|
||||
if (currentSize == totalSize) {
|
||||
log.debug("parse descriptor done")
|
||||
break
|
||||
} else if (currentSize > totalSize) {
|
||||
log.error("Read more than expected")
|
||||
throw IllegalStateException("Read more than expected")
|
||||
} else {
|
||||
log.debug(desc.toString())
|
||||
log.debug("read another descriptor")
|
||||
}
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
fun parseDescriptors2(stream: InputStream, totalSize: Long): List<Any> {
|
||||
log.info("Parse descriptors stream, SIZE = $totalSize")
|
||||
val ret: MutableList<Any> = mutableListOf()
|
||||
var currentSize = 0L
|
||||
var seq = 0
|
||||
while (true) {
|
||||
val desc = UnknownDescriptor(stream, ++seq)
|
||||
currentSize += desc.data.size + SIZE
|
||||
log.debug("current SIZE = $currentSize")
|
||||
log.debug(desc.toString())
|
||||
ret.add(desc.analyze())
|
||||
if (currentSize == totalSize) {
|
||||
log.debug("parse descriptor done")
|
||||
break
|
||||
} else if (currentSize > totalSize) {
|
||||
log.error("Read more than expected")
|
||||
throw IllegalStateException("Read more than expected")
|
||||
} else {
|
||||
log.debug(desc.toString())
|
||||
log.debug("read another descriptor")
|
||||
}
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
init {
|
||||
Assert.assertEquals(SIZE, Struct(FORMAT).calcsize())
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,30 @@
|
||||
import avb.desc.UnknownDescriptor
|
||||
import avb.desc.HashDescriptor
|
||||
import org.bouncycastle.util.encoders.Hex
|
||||
import org.junit.Test
|
||||
|
||||
import org.junit.Assert.*
|
||||
import org.slf4j.LoggerFactory
|
||||
import java.io.ByteArrayInputStream
|
||||
|
||||
class AvbTest {
|
||||
private val log = LoggerFactory.getLogger(AvbTest::class.java)
|
||||
|
||||
@Test
|
||||
fun readDescriptors() {
|
||||
//output by "xxd -p <file>"
|
||||
val descStr = "000000000000000200000000000000b800000000017b9000736861323536" +
|
||||
"000000000000000000000000000000000000000000000000000000000004" +
|
||||
"000000200000002000000000000000000000000000000000000000000000" +
|
||||
"000000000000000000000000000000000000000000000000000000000000" +
|
||||
"000000000000000000000000626f6f7428f6d60b554d9532bd45874ab0cd" +
|
||||
"cb2219c4f437c9350f484fa189a881878ab6156408cd763ff119635ec9db" +
|
||||
"2a9656e220fa1dc27e26e59bd3d85025b412ffc3"
|
||||
val desc = UnknownDescriptor(ByteArrayInputStream(Hex.decode(descStr)))
|
||||
val hashdDesc = HashDescriptor(ByteArrayInputStream(Hex.decode(descStr)))
|
||||
log.info(desc.toString())
|
||||
log.info(hashdDesc.toString())
|
||||
val descAnalyzed = desc.analyze()
|
||||
assertTrue(descAnalyzed is HashDescriptor)
|
||||
}
|
||||
}
|
File diff suppressed because one or more lines are too long
@ -0,0 +1,37 @@
|
||||
package avb
|
||||
|
||||
import org.bouncycastle.util.encoders.Hex
|
||||
import org.junit.Test
|
||||
|
||||
import org.junit.Assert.*
|
||||
import java.io.ByteArrayInputStream
|
||||
|
||||
class FooterTest {
|
||||
|
||||
@Test
|
||||
fun readAVBfooter() {
|
||||
val footerBytes = this.javaClass.classLoader.getResourceAsStream("taimen.avbfooter").readBytes()
|
||||
ByteArrayInputStream(footerBytes).use {
|
||||
it.skip(footerBytes.size - 64L)
|
||||
val footer = Footer(it)
|
||||
println(footer.toString())
|
||||
assertEquals(1, footer.versionMajor)
|
||||
assertEquals(0, footer.versionMinor)
|
||||
assertEquals(512, footer.vbMetaSize)
|
||||
assertEquals(28983296, footer.vbMetaOffset)
|
||||
assertEquals(28983296, footer.originalImageSize)
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun readInvalidFooterShouldFail() {
|
||||
val vbmetaHeaderStr = "4156423000000001000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c8000000000000000000000000000000c80000000000000000000000000000000000000000000000c800000000000000000000000000000000617662746f6f6c20312e312e3000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
|
||||
ByteArrayInputStream(Hex.decode(vbmetaHeaderStr)).use {
|
||||
try {
|
||||
Footer(it)
|
||||
assertEquals("Should never reach here", true, false)
|
||||
} catch (e: IllegalArgumentException) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
package avb
|
||||
|
||||
import org.bouncycastle.util.encoders.Hex
|
||||
import org.junit.Test
|
||||
import java.io.ByteArrayInputStream
|
||||
|
||||
class HeaderTest {
|
||||
|
||||
@Test
|
||||
fun readHeader() {
|
||||
val vbmetaHeaderStr = "4156423000000001000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c8000000000000000000000000000000c80000000000000000000000000000000000000000000000c800000000000000000000000000000000617662746f6f6c20312e312e3000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
|
||||
val header = Header(ByteArrayInputStream(Hex.decode(vbmetaHeaderStr)))
|
||||
println(header.toString())
|
||||
}
|
||||
}
|
@ -0,0 +1,14 @@
|
||||
package avb.alg
|
||||
|
||||
import cfig.io.Struct
|
||||
import org.junit.Test
|
||||
|
||||
import org.junit.Assert.*
|
||||
import java.nio.ByteBuffer
|
||||
|
||||
class AlgorithmTest {
|
||||
|
||||
@Test
|
||||
fun getName() {
|
||||
}
|
||||
}
|
@ -0,0 +1,16 @@
|
||||
package avb.alg
|
||||
|
||||
import avb.alg.Algorithms
|
||||
import cfig.Helper
|
||||
import org.junit.Assert
|
||||
import org.junit.Test
|
||||
|
||||
class AlgorithmsTest {
|
||||
@Test
|
||||
fun test() {
|
||||
val alg = Algorithms.get("NONE")!!
|
||||
|
||||
Assert.assertEquals(Helper.toHexString(Algorithms.get("SHA256_RSA4096")!!.padding),
|
||||
"0001ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff003031300d060960864801650304020105000420")
|
||||
}
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
package avb.desc
|
||||
|
||||
import org.bouncycastle.util.encoders.Hex
|
||||
import org.junit.Assert
|
||||
import org.junit.Test
|
||||
import org.slf4j.LoggerFactory
|
||||
import java.io.ByteArrayInputStream
|
||||
|
||||
class HashDescriptorTest {
|
||||
private val log = LoggerFactory.getLogger(HashDescriptorTest::class.java)
|
||||
|
||||
@Test
|
||||
fun parseHashDescriptor() {
|
||||
val descStr = "000000000000000200000000000000b80000000001ba4000736861323536000000000000000000000000000000000000000000000000000000000004000000200000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000626f6f74fbfb8e13c8082e0a16582163ad5075668903cc1237c6c007fed69de05957432103ae125531271eeeb83662cbe21543e3025f2d65268fb6b53c8718a90e3b03c7"
|
||||
val desc = HashDescriptor(ByteArrayInputStream(Hex.decode(descStr)))
|
||||
log.info(desc.toString())
|
||||
Assert.assertEquals(descStr, String(Hex.encode(desc.encode())))
|
||||
}
|
||||
}
|
@ -0,0 +1,28 @@
|
||||
package avb.desc
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper
|
||||
import org.bouncycastle.util.encoders.Hex
|
||||
import org.junit.Test
|
||||
|
||||
import org.junit.Assert.*
|
||||
import java.io.ByteArrayInputStream
|
||||
|
||||
class HashTreeDescriptorTest {
|
||||
|
||||
@Test
|
||||
fun encode() {
|
||||
val treeStr1 = "000000000000000100000000000000e000000001000000009d787000000000009d78700000000000013d9000000010000000100000000002000000009eb60000000000000141400073686131000000000000000000000000000000000000000000000000000000000000000600000020000000140000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000073797374656d28f6d60b554d9532bd45874ab0cdcb2219c4f437c9350f484fa189a881878ab609c2b0ad5852fc0f4a2d03ef9d2be5372e2bd1390000"
|
||||
val treeStr2 = "000000000000000100000000000000e000000001000000001ec09000000000001ec0900000000000003e2000000010000000100000000002000000001efeb00000000000003ec00073686131000000000000000000000000000000000000000000000000000000000000000600000020000000140000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000076656e646f7228f6d60b554d9532bd45874ab0cdcb2219c4f437c9350f484fa189a881878ab698cea1ea79a3fa7277255355d42f19af3378b0110000"
|
||||
|
||||
val tree1 = HashTreeDescriptor(ByteArrayInputStream(Hex.decode(treeStr1)), 0)
|
||||
println(ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(tree1))
|
||||
assertEquals(treeStr1, String(Hex.encode(tree1.encode())))
|
||||
|
||||
val reDecoded = HashTreeDescriptor(ByteArrayInputStream(tree1.encode()), 0)
|
||||
println(ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(reDecoded))
|
||||
|
||||
val tree2 = HashTreeDescriptor(ByteArrayInputStream(Hex.decode(treeStr2)), 0)
|
||||
println(ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(tree2))
|
||||
assertEquals(treeStr2, String(Hex.encode(tree2.encode())))
|
||||
}
|
||||
}
|
@ -0,0 +1,22 @@
|
||||
package avb.desc
|
||||
|
||||
import org.bouncycastle.util.encoders.Hex
|
||||
import org.junit.Assert.assertEquals
|
||||
import org.junit.Test
|
||||
import org.slf4j.LoggerFactory
|
||||
import java.io.ByteArrayInputStream
|
||||
|
||||
class KernelCmdlineDescriptorTest {
|
||||
private val log = LoggerFactory.getLogger(KernelCmdlineDescriptorTest::class.java)
|
||||
|
||||
@Test
|
||||
fun encode() {
|
||||
val cmdStr1 = "000000000000000300000000000001a8000000010000019b646d3d22312076726f6f74206e6f6e6520726f20312c3020353135393939322076657269747920312050415254555549443d2428414e44524f49445f53595354454d5f5041525455554944292050415254555549443d2428414e44524f49445f53595354454d5f504152545555494429203430393620343039362036343439393920363434393939207368613120303963326230616435383532666330663461326430336566396432626535333732653262643133392032386636643630623535346439353332626434353837346162306364636232323139633466343337633933353066343834666131383961383831383738616236203130202428414e44524f49445f5645524954595f4d4f4445292069676e6f72655f7a65726f5f626c6f636b73207573655f6665635f66726f6d5f6465766963652050415254555549443d2428414e44524f49445f53595354454d5f504152545555494429206665635f726f6f74732032206665635f626c6f636b7320363530303830206665635f7374617274203635303038302220726f6f743d2f6465762f646d2d300000000000"
|
||||
val cmdStr2 = "000000000000000300000000000000300000000200000028726f6f743d50415254555549443d2428414e44524f49445f53595354454d5f504152545555494429"
|
||||
val cmd1 = KernelCmdlineDescriptor(ByteArrayInputStream(Hex.decode(cmdStr1)), 0)
|
||||
assertEquals(cmdStr1, String(Hex.encode(cmd1.encode())))
|
||||
|
||||
val cmd2 = KernelCmdlineDescriptor(ByteArrayInputStream(Hex.decode(cmdStr2)), 0)
|
||||
assertEquals(cmdStr2, String(Hex.encode(cmd2.encode())))
|
||||
}
|
||||
}
|
@ -0,0 +1,28 @@
|
||||
package avb.desc
|
||||
|
||||
import org.bouncycastle.util.encoders.Hex
|
||||
import org.junit.Test
|
||||
|
||||
import org.slf4j.LoggerFactory
|
||||
import java.io.ByteArrayInputStream
|
||||
|
||||
class UnknownDescriptorTest {
|
||||
private val log = LoggerFactory.getLogger(UnknownDescriptorTest::class.java)
|
||||
|
||||
@Test
|
||||
fun readDescriptors() {
|
||||
//output by "xxd -p <file>"
|
||||
val descStr = "000000000000000200000000000000b800000000017b9000736861323536" +
|
||||
"000000000000000000000000000000000000000000000000000000000004" +
|
||||
"000000200000002000000000000000000000000000000000000000000000" +
|
||||
"000000000000000000000000000000000000000000000000000000000000" +
|
||||
"000000000000000000000000626f6f7428f6d60b554d9532bd45874ab0cd" +
|
||||
"cb2219c4f437c9350f484fa189a881878ab6156408cd763ff119635ec9db" +
|
||||
"2a9656e220fa1dc27e26e59bd3d85025b412ffc3"
|
||||
val descBA = Hex.decode(descStr + descStr)
|
||||
val descList = UnknownDescriptor.parseDescriptors(ByteArrayInputStream(descBA), descBA.size.toLong())
|
||||
descList.forEach{
|
||||
log.info(it.toString())
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,107 @@
|
||||
import cfig.Avb
|
||||
import cfig.Helper
|
||||
import cfig.io.Struct
|
||||
import org.junit.Assert
|
||||
import org.junit.Test
|
||||
|
||||
import org.junit.Assert.*
|
||||
import java.io.ByteArrayInputStream
|
||||
|
||||
class StructTest {
|
||||
@Test
|
||||
fun constructTest() {
|
||||
assertEquals(16, Struct("<2i4b4b").calcsize())
|
||||
assertEquals(16, Struct("<Q8b").calcsize())
|
||||
assertEquals(2, Struct(">h").calcsize())
|
||||
assertEquals(3, Struct(">3s").calcsize())
|
||||
assertEquals(4, Struct("!Hh").calcsize())
|
||||
|
||||
try {
|
||||
Struct("abcd")
|
||||
throw Exception("should not reach here")
|
||||
} catch (e: IllegalArgumentException) {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun integerLE() {
|
||||
//int (4B)
|
||||
assertTrue(Struct("<2i").pack(1, 7321).contentEquals(Helper.fromHexString("01000000991c0000")))
|
||||
val ret = Struct("<2i").unpack(ByteArrayInputStream(Helper.fromHexString("01000000991c0000")))
|
||||
assertEquals(2, ret.size)
|
||||
assertTrue(ret[0] is Int)
|
||||
assertTrue(ret[1] is Int)
|
||||
assertEquals(1, ret[0] as Int)
|
||||
assertEquals(7321, ret[1] as Int)
|
||||
|
||||
//unsigned int (4B)
|
||||
assertTrue(Struct("<I").pack(2L).contentEquals(Helper.fromHexString("02000000")))
|
||||
assertTrue(Struct("<I").pack(2).contentEquals(Helper.fromHexString("02000000")))
|
||||
//greater than Int.MAX_VALUE
|
||||
assertTrue(Struct("<I").pack(2147483748L).contentEquals(Helper.fromHexString("64000080")))
|
||||
assertTrue(Struct("<I").pack(2147483748).contentEquals(Helper.fromHexString("64000080")))
|
||||
try {
|
||||
Struct("<I").pack(-12)
|
||||
throw Exception("should not reach here")
|
||||
} catch (e: IllegalArgumentException) {
|
||||
}
|
||||
|
||||
//negative int
|
||||
assertTrue(Struct("<i").pack(-333).contentEquals(Helper.fromHexString("b3feffff")))
|
||||
}
|
||||
|
||||
@Test
|
||||
fun integerBE() {
|
||||
run {
|
||||
assertTrue(Struct(">2i").pack(1, 7321).contentEquals(Helper.fromHexString("0000000100001c99")))
|
||||
val ret = Struct(">2i").unpack(ByteArrayInputStream(Helper.fromHexString("0000000100001c99")))
|
||||
assertEquals(1, ret[0] as Int)
|
||||
assertEquals(7321, ret[1] as Int)
|
||||
}
|
||||
|
||||
run {
|
||||
assertTrue(Struct("!i").pack(-333).contentEquals(Helper.fromHexString("fffffeb3")))
|
||||
val ret2 = Struct("!i").unpack(ByteArrayInputStream(Helper.fromHexString("fffffeb3")))
|
||||
assertEquals(-333, ret2[0] as Int)
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun byteArrayTest() {
|
||||
//byte array
|
||||
assertTrue(Struct("<4b").pack(byteArrayOf(-128, 2, 55, 127)).contentEquals(Helper.fromHexString("8002377f")))
|
||||
assertTrue(Struct("<4b").pack(intArrayOf(0, 55, 202, 0xff)).contentEquals(Helper.fromHexString("0037caff")))
|
||||
try {
|
||||
Struct("b").pack(intArrayOf(256))
|
||||
throw Exception("should not reach here")
|
||||
} catch (e: IllegalArgumentException) {
|
||||
}
|
||||
try {
|
||||
Struct("b").pack(intArrayOf(-1))
|
||||
throw Exception("should not reach here")
|
||||
} catch (e: IllegalArgumentException) {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun packCombinedTest() {
|
||||
assertTrue(Struct("<2i4b4b").pack(
|
||||
1, 7321, byteArrayOf(1, 2, 3, 4), byteArrayOf(200.toByte(), 201.toByte(), 202.toByte(), 203.toByte()))!!
|
||||
.contentEquals(Helper.fromHexString("01000000991c000001020304c8c9cacb")))
|
||||
assertTrue(Struct("<2i4b4b").pack(
|
||||
1, 7321, byteArrayOf(1, 2, 3, 4), intArrayOf(200, 201, 202, 203))!!
|
||||
.contentEquals(Helper.fromHexString("01000000991c000001020304c8c9cacb")))
|
||||
}
|
||||
|
||||
@Test
|
||||
fun paddingTest() {
|
||||
assertTrue(Struct("b2x").pack(byteArrayOf(0x13), null).contentEquals(Helper.fromHexString("130000")))
|
||||
assertTrue(Struct("b2xi").pack(byteArrayOf(0x13), null, 55).contentEquals(Helper.fromHexString("13000037000000")))
|
||||
}
|
||||
|
||||
@Test
|
||||
fun stringTest() {
|
||||
Struct("5s").pack("Good".toByteArray()).contentEquals(Helper.fromHexString("476f6f6400"))
|
||||
Struct("5s1b").pack("Good".toByteArray(), byteArrayOf(13)).contentEquals(Helper.fromHexString("476f6f64000d"))
|
||||
}
|
||||
}
|
@ -0,0 +1 @@
|
||||
org.slf4j.simpleLogger.defaultLogLevel = debug
|
Binary file not shown.
Loading…
Reference in New Issue