fully support @verify

ChainDesc: verify public key and all
    {ChainDesc|HashDesc|HashTreeDesc} inside
HashDesc: verify hash
HashTreeDesc: verify whole hash tree and root hash
pull/66/head
cfig 4 years ago
parent 2d5a7b7019
commit e66477ce65
No known key found for this signature in database
GPG Key ID: B104C307F0FDABB7

@ -17,13 +17,13 @@ import org.apache.commons.codec.binary.Hex
import org.apache.commons.exec.CommandLine import org.apache.commons.exec.CommandLine
import org.apache.commons.exec.DefaultExecutor import org.apache.commons.exec.DefaultExecutor
import org.slf4j.LoggerFactory import org.slf4j.LoggerFactory
import java.io.ByteArrayInputStream
import java.io.File import java.io.File
import java.io.FileInputStream import java.io.FileInputStream
import java.io.FileOutputStream import java.io.FileOutputStream
import java.nio.file.Files import java.nio.file.Files
import java.nio.file.Paths import java.nio.file.Paths
import java.nio.file.StandardOpenOption import java.nio.file.StandardOpenOption
import java.security.PrivateKey
@OptIn(ExperimentalUnsignedTypes::class) @OptIn(ExperimentalUnsignedTypes::class)
class Avb { class Avb {
@ -160,7 +160,7 @@ class Avb {
} }
fun parseVbMeta(image_file: String, dumpFile: Boolean = true): AVBInfo { fun parseVbMeta(image_file: String, dumpFile: Boolean = true): AVBInfo {
log.info("parsing $image_file ...") log.info("parseVbMeta($image_file) ...")
val jsonFile = getJsonFileName(image_file) val jsonFile = getJsonFileName(image_file)
var footer: Footer? = null var footer: Footer? = null
var vbMetaOffset: Long = 0 var vbMetaOffset: Long = 0
@ -177,17 +177,18 @@ class Avb {
} }
// header // header
var vbMetaHeader: Header val rawHeaderBlob = ByteArray(Header.SIZE).apply {
FileInputStream(image_file).use { fis -> FileInputStream(image_file).use { fis ->
fis.skip(vbMetaOffset) fis.skip(vbMetaOffset)
vbMetaHeader = Header(fis) fis.read(this)
}
} }
val vbMetaHeader = Header(ByteArrayInputStream(rawHeaderBlob))
log.debug(vbMetaHeader.toString()) log.debug(vbMetaHeader.toString())
log.debug(ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(vbMetaHeader)) log.debug(ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(vbMetaHeader))
val authBlockOffset = vbMetaOffset + Header.SIZE val authBlockOffset = vbMetaOffset + Header.SIZE
val auxBlockOffset = authBlockOffset + vbMetaHeader.authentication_data_block_size val auxBlockOffset = authBlockOffset + vbMetaHeader.authentication_data_block_size
val descStartOffset = auxBlockOffset + vbMetaHeader.descriptors_offset
val ai = AVBInfo(vbMetaHeader, null, AuxBlob(), footer) val ai = AVBInfo(vbMetaHeader, null, AuxBlob(), footer)
@ -212,30 +213,37 @@ class Avb {
} }
} }
// aux
val rawAuxBlob = ByteArray(vbMetaHeader.auxiliary_data_block_size.toInt()).apply {
FileInputStream(image_file).use { fis ->
fis.skip(auxBlockOffset)
fis.read(this)
}
}
// aux - desc // aux - desc
var descriptors: List<Any> var descriptors: List<Any>
if (vbMetaHeader.descriptors_size > 0) { if (vbMetaHeader.descriptors_size > 0) {
FileInputStream(image_file).use { fis -> ByteArrayInputStream(rawAuxBlob).use { bis ->
fis.skip(descStartOffset) bis.skip(vbMetaHeader.descriptors_offset)
descriptors = UnknownDescriptor.parseDescriptors2(fis, vbMetaHeader.descriptors_size) descriptors = UnknownDescriptor.parseDescriptors2(bis, vbMetaHeader.descriptors_size)
} }
descriptors.forEach { descriptors.forEach {
log.debug(it.toString()) log.debug(it.toString())
when (it) { when (it) {
is PropertyDescriptor -> { is PropertyDescriptor -> {
ai.auxBlob!!.propertyDescriptor.add(it) ai.auxBlob!!.propertyDescriptors.add(it)
} }
is HashDescriptor -> { is HashDescriptor -> {
ai.auxBlob!!.hashDescriptors.add(it) ai.auxBlob!!.hashDescriptors.add(it)
} }
is KernelCmdlineDescriptor -> { is KernelCmdlineDescriptor -> {
ai.auxBlob!!.kernelCmdlineDescriptor.add(it) ai.auxBlob!!.kernelCmdlineDescriptors.add(it)
} }
is HashTreeDescriptor -> { is HashTreeDescriptor -> {
ai.auxBlob!!.hashTreeDescriptor.add(it) ai.auxBlob!!.hashTreeDescriptors.add(it)
} }
is ChainPartitionDescriptor -> { is ChainPartitionDescriptor -> {
ai.auxBlob!!.chainPartitionDescriptor.add(it) ai.auxBlob!!.chainPartitionDescriptors.add(it)
} }
is UnknownDescriptor -> { is UnknownDescriptor -> {
ai.auxBlob!!.unknownDescriptors.add(it) ai.auxBlob!!.unknownDescriptors.add(it)
@ -252,11 +260,10 @@ class Avb {
ai.auxBlob!!.pubkey!!.offset = vbMetaHeader.public_key_offset ai.auxBlob!!.pubkey!!.offset = vbMetaHeader.public_key_offset
ai.auxBlob!!.pubkey!!.size = vbMetaHeader.public_key_size ai.auxBlob!!.pubkey!!.size = vbMetaHeader.public_key_size
FileInputStream(image_file).use { fis -> ByteArrayInputStream(rawAuxBlob).use { bis ->
fis.skip(auxBlockOffset) bis.skip(vbMetaHeader.public_key_offset)
fis.skip(vbMetaHeader.public_key_offset)
ai.auxBlob!!.pubkey!!.pubkey = ByteArray(vbMetaHeader.public_key_size.toInt()) ai.auxBlob!!.pubkey!!.pubkey = ByteArray(vbMetaHeader.public_key_size.toInt())
fis.read(ai.auxBlob!!.pubkey!!.pubkey) bis.read(ai.auxBlob!!.pubkey!!.pubkey)
log.debug("Parsed Pub Key: " + Hex.encodeHexString(ai.auxBlob!!.pubkey!!.pubkey)) log.debug("Parsed Pub Key: " + Hex.encodeHexString(ai.auxBlob!!.pubkey!!.pubkey))
} }
} }
@ -266,64 +273,115 @@ class Avb {
ai.auxBlob!!.pubkeyMeta!!.offset = vbMetaHeader.public_key_metadata_offset ai.auxBlob!!.pubkeyMeta!!.offset = vbMetaHeader.public_key_metadata_offset
ai.auxBlob!!.pubkeyMeta!!.size = vbMetaHeader.public_key_metadata_size ai.auxBlob!!.pubkeyMeta!!.size = vbMetaHeader.public_key_metadata_size
FileInputStream(image_file).use { fis -> ByteArrayInputStream(rawAuxBlob).use { bis ->
fis.skip(auxBlockOffset) bis.skip(vbMetaHeader.public_key_metadata_offset)
fis.skip(vbMetaHeader.public_key_metadata_offset)
ai.auxBlob!!.pubkeyMeta!!.pkmd = ByteArray(vbMetaHeader.public_key_metadata_size.toInt()) ai.auxBlob!!.pubkeyMeta!!.pkmd = ByteArray(vbMetaHeader.public_key_metadata_size.toInt())
fis.read(ai.auxBlob!!.pubkeyMeta!!.pkmd) bis.read(ai.auxBlob!!.pubkeyMeta!!.pkmd)
log.debug("Parsed Pub Key Metadata: " + Helper.toHexString(ai.auxBlob!!.pubkeyMeta!!.pkmd)) log.debug("Parsed Pub Key Metadata: " + Helper.toHexString(ai.auxBlob!!.pubkeyMeta!!.pkmd))
} }
} }
if (dumpFile) {
ObjectMapper().writerWithDefaultPrettyPrinter().writeValue(File(jsonFile), ai)
log.info("parseVbMeta($image_file) done. Result: $jsonFile")
} else {
log.debug("vbmeta info of [$image_file] has been analyzed, no dummping")
}
return ai
}
fun verify(ai: AVBInfo, image_file: String, parent: String = ""): Array<Any> {
val ret: Array<Any> = arrayOf(true, "")
val localParent = if (parent.isEmpty()) image_file else parent
//header
val rawHeaderBlob = ByteArray(Header.SIZE).apply {
FileInputStream(image_file).use { fis ->
ai.footer?.let {
fis.skip(it.vbMetaOffset)
}
fis.read(this)
}
}
// aux
val rawAuxBlob = ByteArray(ai.header!!.auxiliary_data_block_size.toInt()).apply {
FileInputStream(image_file).use { fis ->
val vbOffset = if (ai.footer == null) 0 else ai.footer!!.vbMetaOffset
fis.skip(vbOffset + Header.SIZE + ai.header!!.authentication_data_block_size)
fis.read(this)
}
}
//integrity check //integrity check
val declaredAlg = Algorithms.get(ai.header!!.algorithm_type) val declaredAlg = Algorithms.get(ai.header!!.algorithm_type)
if (declaredAlg!!.public_key_num_bytes > 0) { if (declaredAlg!!.public_key_num_bytes > 0) {
if (AuxBlob.encodePubKey(declaredAlg).contentEquals(ai.auxBlob!!.pubkey!!.pubkey)) { if (AuxBlob.encodePubKey(declaredAlg).contentEquals(ai.auxBlob!!.pubkey!!.pubkey)) {
log.info("VERIFY: signed with dev key: " + declaredAlg.defaultKey) log.info("VERIFY($localParent): signed with dev key: " + declaredAlg.defaultKey)
} else { } else {
log.info("VERIFY: signed with release key") log.info("VERIFY($localParent): signed with release key")
}
val headerBlob = ByteArray(Header.SIZE).apply {
FileInputStream(image_file).use { fis ->
fis.skip(vbMetaOffset)
fis.read(this)
}
}
val auxBlob = ByteArray(vbMetaHeader.auxiliary_data_block_size.toInt()).apply {
FileInputStream(image_file).use { fis ->
fis.skip(auxBlockOffset)
fis.read(this)
}
} }
val calcHash = Helper.join(declaredAlg.padding, AuthBlob.calcHash(headerBlob, auxBlob, declaredAlg.name)) val calcHash = Helper.join(declaredAlg.padding, AuthBlob.calcHash(rawHeaderBlob, rawAuxBlob, declaredAlg.name))
val readHash = Helper.join(declaredAlg.padding, Helper.fromHexString(ai.authBlob!!.hash!!)) val readHash = Helper.join(declaredAlg.padding, Helper.fromHexString(ai.authBlob!!.hash!!))
if (calcHash.contentEquals(readHash)) { if (calcHash.contentEquals(readHash)) {
log.info("VERIFY: vbmeta hash... PASS") log.info("VERIFY($localParent->AuthBlob): verify hash... PASS")
val readPubKey = KeyHelper.decodeRSAkey(ai.auxBlob!!.pubkey!!.pubkey) val readPubKey = KeyHelper.decodeRSAkey(ai.auxBlob!!.pubkey!!.pubkey)
val hashFromSig = KeyHelper2.rawRsa(readPubKey, Helper.fromHexString(ai.authBlob!!.signature!!)) val hashFromSig = KeyHelper2.rawRsa(readPubKey, Helper.fromHexString(ai.authBlob!!.signature!!))
if (hashFromSig.contentEquals(readHash)) { if (hashFromSig.contentEquals(readHash)) {
log.info("VERIFY: vbmeta signature... PASS") log.info("VERIFY($localParent->AuthBlob): verify signature... PASS")
} else { } else {
ret[0] = false
ret[1] = ret[1] as String + " verify signature fail;"
log.warn("read=" + Helper.toHexString(readHash) + ", calc=" + Helper.toHexString(calcHash)) log.warn("read=" + Helper.toHexString(readHash) + ", calc=" + Helper.toHexString(calcHash))
log.warn("VERIFY: vbmeta signature... FAIL") log.warn("VERIFY($localParent->AuthBlob): verify signature... FAIL")
} }
} else { } else {
ret[0] = false
ret[1] = ret[1] as String + " verify hash fail"
log.warn("read=" + ai.authBlob!!.hash!! + ", calc=" + Helper.toHexString(calcHash)) log.warn("read=" + ai.authBlob!!.hash!! + ", calc=" + Helper.toHexString(calcHash))
log.warn("VERIFY: vbmeta hash... FAIL") log.warn("VERIFY($localParent->AuthBlob): verify hash... FAIL")
} }
} else { } else {
log.warn("no signing key for current algorithm") log.warn("VERIFY($localParent->AuthBlob): algorithm=[${declaredAlg.name}], no signature, skip")
} }
if (dumpFile) { val morePath = System.getenv("more")
ObjectMapper().writerWithDefaultPrettyPrinter().writeValue(File(jsonFile), ai) val morePrefix = if (!morePath.isNullOrBlank()) "$morePath/" else ""
log.info("vbmeta info of [$image_file] has been analyzed") ai.auxBlob!!.chainPartitionDescriptors.forEach {
log.info("vbmeta info written to $jsonFile") val vRet = it.verify(listOf(morePrefix + it.partition_name + ".img", it.partition_name + ".img"),
} else { image_file + "->Chain[${it.partition_name}]")
log.warn("vbmeta info of [$image_file] has been analyzed, no dummping") if (vRet[0] as Boolean) {
log.info("VERIFY($localParent->Chain[${it.partition_name}]): " + "PASS")
} else {
ret[0] = false
ret[1] = ret[1] as String + "; " + vRet[1] as String
log.info("VERIFY($localParent->Chain[${it.partition_name}]): " + vRet[1] as String + "... FAIL")
}
} }
return ai ai.auxBlob!!.hashDescriptors.forEach {
val vRet = it.verify(listOf(morePrefix + it.partition_name + ".img", it.partition_name + ".img"),
image_file + "->HashDescriptor[${it.partition_name}]")
if (vRet[0] as Boolean) {
log.info("VERIFY($localParent->HashDescriptor[${it.partition_name}]): ${it.hash_algorithm} " + "PASS")
} else {
ret[0] = false
ret[1] = ret[1] as String + "; " + vRet[1] as String
log.info("VERIFY($localParent->HashDescriptor[${it.partition_name}]): ${it.hash_algorithm} " + vRet[1] as String + "... FAIL")
}
}
ai.auxBlob!!.hashTreeDescriptors.forEach {
val vRet = it.verify(listOf(morePrefix + it.partition_name + ".img", it.partition_name + ".img"),
image_file + "->HashTreeDescriptor[${it.partition_name}]")
if (vRet[0] as Boolean) {
log.info("VERIFY($localParent->HashTreeDescriptor[${it.partition_name}]): ${it.hash_algorithm} " + "PASS")
} else {
ret[0] = false
ret[1] = ret[1] as String + "; " + vRet[1] as String
log.info("VERIFY($localParent->HashTreeDescriptor[${it.partition_name}]): ${it.hash_algorithm} " + vRet[1] as String + "... FAIL")
}
}
return ret
} }
private fun packVbMeta(info: AVBInfo? = null, image_file: String? = null): ByteArray { private fun packVbMeta(info: AVBInfo? = null, image_file: String? = null): ByteArray {

@ -1,6 +1,5 @@
package avb.blob package avb.blob
import avb.alg.Algorithm
import avb.alg.Algorithms import avb.alg.Algorithms
import cfig.helper.Helper import cfig.helper.Helper
import cfig.helper.KeyHelper import cfig.helper.KeyHelper
@ -63,7 +62,7 @@ data class AuthBlob(
//hash & signature //hash & signature
val binaryHash = calcHash(header_data_blob, aux_data_blob, algorithm_name) val binaryHash = calcHash(header_data_blob, aux_data_blob, algorithm_name)
var binarySignature = calcSignature(binaryHash, algorithm_name) val binarySignature = calcSignature(binaryHash, algorithm_name)
val authData = Helper.join(binaryHash, binarySignature) val authData = Helper.join(binaryHash, binarySignature)
return Helper.join(authData, Struct3("${authBlockSize - authData.size}x").pack(0)) return Helper.join(authData, Struct3("${authBlockSize - authData.size}x").pack(0))
} }

@ -4,26 +4,24 @@ import avb.alg.Algorithm
import avb.desc.* import avb.desc.*
import cfig.helper.Helper import cfig.helper.Helper
import cfig.helper.KeyHelper import cfig.helper.KeyHelper
import cfig.helper.KeyHelper2
import cfig.io.Struct3 import cfig.io.Struct3
import com.fasterxml.jackson.annotation.JsonIgnoreProperties import com.fasterxml.jackson.annotation.JsonIgnoreProperties
import org.bouncycastle.asn1.pkcs.RSAPrivateKey import org.bouncycastle.asn1.pkcs.RSAPrivateKey
import org.slf4j.LoggerFactory import org.slf4j.LoggerFactory
import java.io.ByteArrayInputStream
import java.nio.file.Files import java.nio.file.Files
import java.nio.file.Paths import java.nio.file.Paths
@OptIn(ExperimentalUnsignedTypes::class) @OptIn(ExperimentalUnsignedTypes::class)
@JsonIgnoreProperties("descriptorSize") @JsonIgnoreProperties("descriptorSize")
class AuxBlob( class AuxBlob(
var pubkey: PubKeyInfo? = null, var pubkey: PubKeyInfo? = null,
var pubkeyMeta: PubKeyMetadataInfo? = null, var pubkeyMeta: PubKeyMetadataInfo? = null,
var propertyDescriptor: MutableList<PropertyDescriptor> = mutableListOf(), var propertyDescriptors: MutableList<PropertyDescriptor> = mutableListOf(),
var hashTreeDescriptor: MutableList<HashTreeDescriptor> = mutableListOf(), var hashTreeDescriptors: MutableList<HashTreeDescriptor> = mutableListOf(),
var hashDescriptors: MutableList<HashDescriptor> = mutableListOf(), var hashDescriptors: MutableList<HashDescriptor> = mutableListOf(),
var kernelCmdlineDescriptor: MutableList<KernelCmdlineDescriptor> = mutableListOf(), var kernelCmdlineDescriptors: MutableList<KernelCmdlineDescriptor> = mutableListOf(),
var chainPartitionDescriptor: MutableList<ChainPartitionDescriptor> = mutableListOf(), var chainPartitionDescriptors: MutableList<ChainPartitionDescriptor> = mutableListOf(),
var unknownDescriptors: MutableList<UnknownDescriptor> = mutableListOf()) { var unknownDescriptors: MutableList<UnknownDescriptor> = mutableListOf()) {
val descriptorSize: Int val descriptorSize: Int
get(): Int { get(): Int {
@ -44,12 +42,12 @@ class AuxBlob(
private fun encodeDescriptors(): ByteArray { private fun encodeDescriptors(): ByteArray {
return mutableListOf<Descriptor>().let { descList -> return mutableListOf<Descriptor>().let { descList ->
arrayOf(this.propertyDescriptor, //tag 0 arrayOf(this.propertyDescriptors, //tag 0
this.hashTreeDescriptor, //tag 1 this.hashTreeDescriptors, //tag 1
this.hashDescriptors, //tag 2 this.hashDescriptors, //tag 2
this.kernelCmdlineDescriptor, //tag 3 this.kernelCmdlineDescriptors, //tag 3
this.chainPartitionDescriptor, //tag 4 this.chainPartitionDescriptors, //tag 4
this.unknownDescriptors //tag X this.unknownDescriptors //tag X
).forEach { typedList -> ).forEach { typedList ->
typedList.forEach { descList.add(it) } typedList.forEach { descList.add(it) }
} }

@ -1,10 +1,12 @@
package avb.desc package avb.desc
import cfig.Avb
import cfig.helper.Helper import cfig.helper.Helper
import cfig.io.Struct3 import cfig.io.Struct3
import java.io.File
import java.io.InputStream import java.io.InputStream
import java.security.MessageDigest import java.security.MessageDigest
import java.util.* import org.slf4j.LoggerFactory
@OptIn(ExperimentalUnsignedTypes::class) @OptIn(ExperimentalUnsignedTypes::class)
class ChainPartitionDescriptor( class ChainPartitionDescriptor(
@ -37,6 +39,7 @@ class ChainPartitionDescriptor(
const val RESERVED = 64 const val RESERVED = 64
const val SIZE = 28L + RESERVED const val SIZE = 28L + RESERVED
const val FORMAT_STRING = "!2Q3L" const val FORMAT_STRING = "!2Q3L"
private val log = LoggerFactory.getLogger(ChainPartitionDescriptor::class.java)
} }
constructor(data: InputStream, seq: Int = 0) : this() { constructor(data: InputStream, seq: Int = 0) : this() {
@ -50,20 +53,40 @@ class ChainPartitionDescriptor(
this.rollback_index_location = (info[2] as UInt).toInt() this.rollback_index_location = (info[2] as UInt).toInt()
this.partition_name_len = (info[3] as UInt).toInt() this.partition_name_len = (info[3] as UInt).toInt()
this.public_key_len = (info[4] as UInt).toInt() this.public_key_len = (info[4] as UInt).toInt()
val expectedSize = Helper.round_to_multiple( val expectedSize = Helper.round_to_multiple(SIZE - 16 + this.partition_name_len + this.public_key_len, 8)
SIZE - 16 + this.partition_name_len + this.public_key_len, 8) if (this.tag != TAG || this.num_bytes_following != expectedSize) {
if (this.tag != TAG || this.num_bytes_following != expectedSize.toLong()) {
throw IllegalArgumentException("Given data does not look like a chain/delegation descriptor") throw IllegalArgumentException("Given data does not look like a chain/delegation descriptor")
} }
val info2 = Struct3("${this.partition_name_len}s${this.public_key_len}b").unpack(data) val info2 = Struct3("${this.partition_name_len}s${this.public_key_len}b").unpack(data)
this.partition_name = info2[0] as String this.partition_name = info2[0] as String
this.pubkey = info2[1] as ByteArray this.pubkey = info2[1] as ByteArray
val md = MessageDigest.getInstance("SHA1") val md = MessageDigest.getInstance("SHA1").let {
md.update(this.pubkey) it.update(this.pubkey)
this.pubkey_sha1 = Helper.toHexString(md.digest()) it.digest()
}
this.pubkey_sha1 = Helper.toHexString(md)
}
fun verify(image_files: List<String>, parent: String = ""): Array<Any> {
val ret: Array<Any> = arrayOf(false, "file not found")
for (item in image_files) {
if (File(item).exists()) {
val subAi = Avb().parseVbMeta(item, false)
if (pubkey.contentEquals(subAi.auxBlob!!.pubkey!!.pubkey)) {
log.info("VERIFY($parent): public key matches, PASS")
return Avb().verify(subAi, item, parent)
} else {
log.info("VERIFY($parent): public key mismatch, FAIL")
ret[1] = "public key mismatch"
return ret
}
}
}
log.info("VERIFY($parent): " + ret[1] as String + "... FAIL")
return ret
} }
override fun toString(): String { override fun toString(): String {
return "ChainPartitionDescriptor(partition=${this.partition_name}, pubkey=${Arrays.toString(this.pubkey)}" return "ChainPartitionDescriptor(partition=${this.partition_name}, pubkey=${this.pubkey.contentToString()}"
} }
} }

@ -71,12 +71,28 @@ class HashDescriptor(var flags: Int = 0,
return Helper.join(desc, partition_name.toByteArray(), this.salt, this.digest, padding) return Helper.join(desc, partition_name.toByteArray(), this.salt, this.digest, padding)
} }
fun verify(image_file: String) { fun verify(image_files: List<String>, parent: String = ""): Array<Any> {
val hasher = MessageDigest.getInstance(Helper.pyAlg2java(hash_algorithm)) val ret: Array<Any> = arrayOf(false, "file not found")
hasher.update(this.salt) for (item in image_files) {
hasher.update(File(image_file).readBytes()) if (File(item).exists()) {
val digest = hasher.digest() val hasher = MessageDigest.getInstance(Helper.pyAlg2java(hash_algorithm))
log.info("digest:" + Helper.toHexString(digest)) hasher.update(this.salt)
FileInputStream(item).use { fis ->
val data = ByteArray(this.image_size.toInt())
fis.read(data)
hasher.update(data)
}
val dg = hasher.digest()
if (dg.contentEquals(this.digest)) {
ret[0] = true
ret[1] = "PASS"
} else {
ret[1] = "hash mismatch"
}
return ret
}
}
return ret
} }
fun update(image_file: String, use_persistent_digest: Boolean = false): HashDescriptor { fun update(image_file: String, use_persistent_digest: Boolean = false): HashDescriptor {

@ -2,26 +2,30 @@ package avb.desc
import avb.blob.Header import avb.blob.Header
import cfig.helper.Helper import cfig.helper.Helper
import cfig.helper.KeyHelper2
import cfig.io.Struct3 import cfig.io.Struct3
import java.io.InputStream import org.slf4j.LoggerFactory
import java.io.*
import java.security.MessageDigest
import java.util.* import java.util.*
@OptIn(ExperimentalUnsignedTypes::class) @OptIn(ExperimentalUnsignedTypes::class)
class HashTreeDescriptor( class HashTreeDescriptor(
var flags: Int = 0, var flags: Int = 0,
var dm_verity_version: Int = 0, var dm_verity_version: Int = 0,
var image_size: Long = 0, var image_size: Long = 0,
var tree_offset: Long = 0, var tree_offset: Long = 0,
var tree_size: Long = 0, var tree_size: Long = 0,
var data_block_size: Int = 0, var data_block_size: Int = 0,
var hash_block_size: Int = 0, var hash_block_size: Int = 0,
var fec_num_roots: Int = 0, var fec_num_roots: Int = 0,
var fec_offset: Long = 0, var fec_offset: Long = 0,
var fec_size: Long = 0, var fec_size: Long = 0,
var hash_algorithm: String = "", var hash_algorithm: String = "",
var partition_name: String = "", var partition_name: String = "",
var salt: ByteArray = byteArrayOf(), var salt: ByteArray = byteArrayOf(),
var root_digest: ByteArray = byteArrayOf()) : Descriptor(TAG, 0, 0) { var root_digest: ByteArray = byteArrayOf()
) : Descriptor(TAG, 0, 0) {
var flagsInterpretation: String = "" var flagsInterpretation: String = ""
get() { get() {
var ret = "" var ret = ""
@ -52,7 +56,8 @@ class HashTreeDescriptor(
val salt_len = info[13] as UInt val salt_len = info[13] as UInt
val root_digest_len = info[14] as UInt val root_digest_len = info[14] as UInt
this.flags = (info[15] as UInt).toInt() this.flags = (info[15] as UInt).toInt()
val expectedSize = Helper.round_to_multiple(SIZE.toUInt() - 16U + partition_name_len + salt_len + root_digest_len, 8U) val expectedSize =
Helper.round_to_multiple(SIZE.toUInt() - 16U + partition_name_len + salt_len + root_digest_len, 8U)
if (this.tag != TAG || this.num_bytes_following != expectedSize.toLong()) { if (this.tag != TAG || this.num_bytes_following != expectedSize.toLong()) {
throw IllegalArgumentException("Given data does not look like a hashtree descriptor") throw IllegalArgumentException("Given data does not look like a hashtree descriptor")
} }
@ -68,29 +73,148 @@ class HashTreeDescriptor(
val nbf_with_padding = Helper.round_to_multiple(this.num_bytes_following.toLong(), 8) val nbf_with_padding = Helper.round_to_multiple(this.num_bytes_following.toLong(), 8)
val padding_size = nbf_with_padding - this.num_bytes_following.toLong() val padding_size = nbf_with_padding - this.num_bytes_following.toLong()
val desc = Struct3(FORMAT_STRING).pack( val desc = Struct3(FORMAT_STRING).pack(
TAG, TAG,
nbf_with_padding.toULong(), nbf_with_padding.toULong(),
this.dm_verity_version, this.dm_verity_version,
this.image_size, this.image_size,
this.tree_offset, this.tree_offset,
this.tree_size, this.tree_size,
this.data_block_size, this.data_block_size,
this.hash_block_size, this.hash_block_size,
this.fec_num_roots, this.fec_num_roots,
this.fec_offset, this.fec_offset,
this.fec_size, this.fec_size,
this.hash_algorithm, this.hash_algorithm,
this.partition_name.length, this.partition_name.length,
this.salt.size, this.salt.size,
this.root_digest.size, this.root_digest.size,
this.flags, this.flags,
null) null
)
val padding = Struct3("${padding_size}x").pack(null) val padding = Struct3("${padding_size}x").pack(null)
return Helper.join(desc, this.partition_name.toByteArray(), this.salt, this.root_digest, padding) return Helper.join(desc, this.partition_name.toByteArray(), this.salt, this.root_digest, padding)
} }
fun verify(fileNames: List<String>, parent: String = ""): Array<Any> {
for (item in fileNames) {
if (File(item).exists()) {
val trimmedHash = this.genMerkleTree(item, "hash.tree")
val readTree = ByteArray(this.tree_size.toInt())
FileInputStream(item).use { fis ->
fis.skip(this.tree_offset)
fis.read(readTree)
}
val ourHtHash = KeyHelper2.sha256(File("hash.tree").readBytes())
val diskHtHash = KeyHelper2.sha256(readTree)
if (!ourHtHash.contentEquals(diskHtHash)) {
return arrayOf(false, "MerkleTree corrupted")
} else {
log.info("VERIFY($parent): MerkleTree integrity check... PASS")
}
if (!this.root_digest.contentEquals(trimmedHash)) {
return arrayOf(false, "MerkleTree root hash mismatch")
} else {
log.info("VERIFY($parent): MerkleTree root hash check... PASS")
}
return arrayOf(true, "")
}
}
return arrayOf(false, "file not found")
}
private fun calcSingleHashSize(padded: Boolean = false): Int {
val digSize = MessageDigest.getInstance(KeyHelper2.pyAlg2java(this.hash_algorithm)).digest().size
val padSize = Helper.round_to_pow2(digSize.toLong()) - digSize
return (digSize + (if (padded) padSize else 0)).toInt()
}
private fun calcStreamHashSize(inStreamSize: Long, inBlockSize: Int): Long {
val blockCount = (inStreamSize + inBlockSize - 1) / inBlockSize
return Helper.round_to_multiple(blockCount * calcSingleHashSize(true), inBlockSize)
}
fun hashStream(
inputStream: InputStream,
streamSz: Long,
blockSz: Int
): ByteArray {
val hashSize = calcStreamHashSize(streamSz, blockSz)
val bos = ByteArrayOutputStream(hashSize.toInt())
run hashing@{
val padSz = calcSingleHashSize(true) - calcSingleHashSize(false)
val padding = Struct3("${padSz}x").pack(0)
var totalRead = 0L
while (true) {
val data = ByteArray(blockSz)
MessageDigest.getInstance(KeyHelper2.pyAlg2java(this.hash_algorithm)).let {
val bytesRead = inputStream.read(data)
if (bytesRead <= 0) {
return@hashing
}
totalRead += bytesRead
if (totalRead > streamSz) {
return@hashing
}
it.update(this.salt)
it.update(data)
val dg = it.digest()
bos.write(dg)
bos.write(padding)
//log.info(Helper.toHexString(dg))
}
}
}//hashing
if (hashSize > bos.size()) {
bos.write(Struct3("${hashSize - bos.size()}x").pack(0))
}
return bos.toByteArray()
}
fun genMerkleTree(fileName: String, treeFile: String? = null): ByteArray {
log.info("generate Merkle tree()")
val plannedTree = calcMerkleTree(this.image_size, this.hash_block_size, calcSingleHashSize(true))
val calcRootHash: ByteArray
treeFile?.let { File(treeFile).let { if (it.exists()) it.delete() }}
val raf = if (treeFile.isNullOrBlank()) null else RandomAccessFile(treeFile, "rw")
val l0: ByteArray
log.info("Hashing Level #${plannedTree.size}..." + plannedTree.get(plannedTree.size - 1))
FileInputStream(fileName).use { fis ->
l0 = hashStream(
fis, this.image_size,
this.data_block_size
)
}
if (DEBUG) FileOutputStream("hash.file" + plannedTree.size).use { it.write(l0) }
raf?.seek(plannedTree.get(plannedTree.size - 1).hashOffset)
raf?.write(l0)
var dataToHash: ByteArray = l0
var i = plannedTree.size - 1
while (true) {
val levelHash = hashStream(dataToHash.inputStream(), dataToHash.size.toLong(), this.hash_block_size)
if (DEBUG) FileOutputStream("hash.file$i").use { it.write(levelHash) }
if (dataToHash.size <= this.hash_block_size) {
log.debug("Got root hash: " + Helper.toHexString(levelHash))
calcRootHash = levelHash
break
}
log.info("Hashing Level #$i..." + plannedTree.get(i - 1))
raf?.seek(plannedTree.get(i - 1).hashOffset)
raf?.write(levelHash)
dataToHash = levelHash
i--
}
raf?.close()
raf?.let { log.info("MerkleTree(${this.partition_name}) saved to $treeFile") }
return calcRootHash.sliceArray(0 until calcSingleHashSize(false))
}
override fun toString(): String { override fun toString(): String {
return "HashTreeDescriptor(dm_verity_version=$dm_verity_version, image_size=$image_size, tree_offset=$tree_offset, tree_size=$tree_size, data_block_size=$data_block_size, hash_block_size=$hash_block_size, fec_num_roots=$fec_num_roots, fec_offset=$fec_offset, fec_size=$fec_size, hash_algorithm='$hash_algorithm', partition_name='$partition_name', salt=${Arrays.toString(salt)}, root_digest=${Arrays.toString(root_digest)}, flags=$flags)" return "HashTreeDescriptor(dm_verity_version=$dm_verity_version, image_size=$image_size, " +
"tree_offset=$tree_offset, tree_size=$tree_size, data_block_size=$data_block_size, " +
"hash_block_size=$hash_block_size, fec_num_roots=$fec_num_roots, fec_offset=$fec_offset, " +
"fec_size=$fec_size, hash_algorithm='$hash_algorithm', partition_name='$partition_name', " +
"salt=${salt.contentToString()}, root_digest=${Arrays.toString(root_digest)}, flags=$flags)"
} }
companion object { companion object {
@ -98,5 +222,51 @@ class HashTreeDescriptor(
private const val RESERVED = 60L private const val RESERVED = 60L
private const val SIZE = 120 + RESERVED private const val SIZE = 120 + RESERVED
private const val FORMAT_STRING = "!2QL3Q3L2Q32s4L${RESERVED}x" private const val FORMAT_STRING = "!2QL3Q3L2Q32s4L${RESERVED}x"
private val log = LoggerFactory.getLogger(HashTreeDescriptor::class.java)
private const val DEBUG = false
class MerkleTree(
var dataSize: Long = 0,
var dataBlockCount: Long = 0,
var hashSize: Long = 0,
var hashOffset: Long = 0
) {
override fun toString(): String {
return String.format(
"MT{data: %10s(%6s blocks), hash: %7s @%-5s}",
dataSize,
dataBlockCount,
hashSize,
hashOffset
)
}
}
fun calcMerkleTree(fileSize: Long, blockSize: Int, digestSize: Int): List<MerkleTree> {
var levelDataSize: Long = fileSize
var levelNo = 0
val tree: MutableList<MerkleTree> = mutableListOf()
while (true) {
//raw data in page of blockSize
val blockCount = (levelDataSize + blockSize - 1) / blockSize
if (1L == blockCount) {
break
}
//digest size in page of blockSize
val hashSize = Helper.round_to_multiple(blockCount * digestSize, blockSize)
tree.add(0, MerkleTree(levelDataSize, blockCount, hashSize))
levelDataSize = hashSize
levelNo++
}
for (i in 1 until tree.size) {
tree[i].hashOffset = tree[i - 1].hashOffset + tree[i - 1].hashSize
}
tree.forEachIndexed { index, merkleTree ->
log.info("Level #${index + 1}: $merkleTree")
}
val treeSize = tree.sumOf { it.hashSize }
log.info("tree size: $treeSize(" + Helper.humanReadableByteCountBin(treeSize) + ")")
return tree
}
} }
} }

@ -44,7 +44,7 @@ class Signer {
addArguments("--key ${alg.defaultKey}") addArguments("--key ${alg.defaultKey}")
} }
newAvbInfo.auxBlob?.let { newAuxblob -> newAvbInfo.auxBlob?.let { newAuxblob ->
newAuxblob.propertyDescriptor.forEach { newProp -> newAuxblob.propertyDescriptors.forEach { newProp ->
addArguments(arrayOf("--prop", "${newProp.key}:${newProp.value}")) addArguments(arrayOf("--prop", "${newProp.key}:${newProp.value}"))
} }
} }

@ -86,6 +86,10 @@ class BootImgParser() : IPackable {
} }
} }
override fun `@verify`(fileName: String) {
super.`@verify`(fileName)
}
override fun pull(fileName: String, deviceName: String) { override fun pull(fileName: String, deviceName: String) {
super.pull(fileName, deviceName) super.pull(fileName, deviceName)
} }

@ -1,8 +1,11 @@
package cfig.packable package cfig.packable
import avb.blob.Footer
import cfig.EnvironmentVerifier import cfig.EnvironmentVerifier
import cfig.dtb_util.DTC import cfig.dtb_util.DTC
import cfig.helper.Helper import cfig.helper.Helper
import cfig.Avb
import com.fasterxml.jackson.databind.ObjectMapper
import org.apache.commons.exec.CommandLine import org.apache.commons.exec.CommandLine
import org.apache.commons.exec.DefaultExecutor import org.apache.commons.exec.DefaultExecutor
import org.slf4j.LoggerFactory import org.slf4j.LoggerFactory
@ -75,6 +78,23 @@ class DtboParser(val workDir: File) : IPackable {
execInDirectory(cmd, this.workDir) execInDirectory(cmd, this.workDir)
} }
override fun `@verify`(fileName: String) {
super.`@verify`(fileName)
}
// invoked solely by reflection
fun `@footer`(fileName: String) {
FileInputStream(fileName).use { fis ->
fis.skip(File(fileName).length() - Footer.SIZE)
try {
val footer = Footer(fis)
log.info("\n" + ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(footer))
} catch (e: IllegalArgumentException) {
log.info("image $fileName has no AVB Footer")
}
}
}
private fun execInDirectory(cmd: CommandLine, inWorkDir: File) { private fun execInDirectory(cmd: CommandLine, inWorkDir: File) {
DefaultExecutor().let { DefaultExecutor().let {
it.workingDirectory = inWorkDir it.workingDirectory = inWorkDir

@ -1,5 +1,6 @@
package cfig.packable package cfig.packable
import cfig.Avb
import cfig.helper.Helper import cfig.helper.Helper
import cfig.helper.Helper.Companion.check_call import cfig.helper.Helper.Companion.check_call
import cfig.helper.Helper.Companion.check_output import cfig.helper.Helper.Companion.check_output
@ -46,6 +47,12 @@ interface IPackable {
"adb shell rm /cache/file.to.pull".check_call() "adb shell rm /cache/file.to.pull".check_call()
} }
// invoked solely by reflection
fun `@verify`(fileName: String) {
val ai = Avb().parseVbMeta(fileName, true)
Avb().verify(ai, fileName)
}
fun cleanUp() { fun cleanUp() {
val workDir = Helper.prop("workDir") val workDir = Helper.prop("workDir")
if (File(workDir).exists()) File(workDir).deleteRecursively() if (File(workDir).exists()) File(workDir).deleteRecursively()

@ -31,6 +31,10 @@ class VBMetaParser: IPackable {
super.flash("$fileName.signed", stem) super.flash("$fileName.signed", stem)
} }
override fun `@verify`(fileName: String) {
super.`@verify`(fileName)
}
override fun pull(fileName: String, deviceName: String) { override fun pull(fileName: String, deviceName: String) {
super.pull(fileName, deviceName) super.pull(fileName, deviceName)
} }

@ -34,6 +34,10 @@ class VendorBootParser : IPackable {
Avb.updateVbmeta(fileName) Avb.updateVbmeta(fileName)
} }
override fun `@verify`(fileName: String) {
super.`@verify`(fileName)
}
override fun pull(fileName: String, deviceName: String) { override fun pull(fileName: String, deviceName: String) {
super.pull(fileName, deviceName) super.pull(fileName, deviceName)
} }

@ -4,6 +4,11 @@ import cfig.EnvironmentVerifier
import cfig.packable.IPackable import cfig.packable.IPackable
import org.slf4j.LoggerFactory import org.slf4j.LoggerFactory
import cfig.helper.Helper.Companion.check_call import cfig.helper.Helper.Companion.check_call
import java.io.FileInputStream
import java.io.File
import com.fasterxml.jackson.databind.ObjectMapper
import avb.blob.Footer
import cfig.Avb
@OptIn(ExperimentalUnsignedTypes::class) @OptIn(ExperimentalUnsignedTypes::class)
class SparseImgParser : IPackable { class SparseImgParser : IPackable {
@ -32,6 +37,23 @@ class SparseImgParser : IPackable {
img2simg("$fileName.unsparse", "$fileName.new") img2simg("$fileName.unsparse", "$fileName.new")
} }
// invoked solely by reflection
fun `@footer`(fileName: String) {
FileInputStream(fileName).use { fis ->
fis.skip(File(fileName).length() - Footer.SIZE)
try {
val footer = Footer(fis)
log.info("\n" + ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(footer))
} catch (e: IllegalArgumentException) {
log.info("image $fileName has no AVB Footer")
}
}
}
override fun `@verify`(fileName: String) {
super.`@verify`(fileName)
}
private fun simg2img(sparseIn: String, flatOut: String) { private fun simg2img(sparseIn: String, flatOut: String) {
log.info("parsing Android sparse image $sparseIn ...") log.info("parsing Android sparse image $sparseIn ...")
"$simg2imgBin $sparseIn $flatOut".check_call() "$simg2imgBin $sparseIn $flatOut".check_call()

@ -1,19 +1,22 @@
package avb.desc package avb.desc
import cfig.helper.KeyHelper2
import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.databind.ObjectMapper
import org.apache.commons.codec.binary.Hex import org.apache.commons.codec.binary.Hex
import org.junit.Assert.assertEquals
import org.junit.Test import org.junit.Test
import org.junit.Assert.*
import java.io.ByteArrayInputStream import java.io.ByteArrayInputStream
import java.security.MessageDigest
@OptIn(ExperimentalUnsignedTypes::class) @OptIn(ExperimentalUnsignedTypes::class)
class HashTreeDescriptorTest { class HashTreeDescriptorTest {
@Test @Test
fun encode() { fun encode() {
val treeStr1 = "000000000000000100000000000000e000000001000000009d787000000000009d78700000000000013d9000000010000000100000000002000000009eb60000000000000141400073686131000000000000000000000000000000000000000000000000000000000000000600000020000000140000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000073797374656d28f6d60b554d9532bd45874ab0cdcb2219c4f437c9350f484fa189a881878ab609c2b0ad5852fc0f4a2d03ef9d2be5372e2bd1390000" val treeStr1 =
val treeStr2 = "000000000000000100000000000000e000000001000000001ec09000000000001ec0900000000000003e2000000010000000100000000002000000001efeb00000000000003ec00073686131000000000000000000000000000000000000000000000000000000000000000600000020000000140000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000076656e646f7228f6d60b554d9532bd45874ab0cdcb2219c4f437c9350f484fa189a881878ab698cea1ea79a3fa7277255355d42f19af3378b0110000" "000000000000000100000000000000e000000001000000009d787000000000009d78700000000000013d9000000010000000100000000002000000009eb60000000000000141400073686131000000000000000000000000000000000000000000000000000000000000000600000020000000140000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000073797374656d28f6d60b554d9532bd45874ab0cdcb2219c4f437c9350f484fa189a881878ab609c2b0ad5852fc0f4a2d03ef9d2be5372e2bd1390000"
val treeStr2 =
"000000000000000100000000000000e000000001000000001ec09000000000001ec0900000000000003e2000000010000000100000000002000000001efeb00000000000003ec00073686131000000000000000000000000000000000000000000000000000000000000000600000020000000140000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000076656e646f7228f6d60b554d9532bd45874ab0cdcb2219c4f437c9350f484fa189a881878ab698cea1ea79a3fa7277255355d42f19af3378b0110000"
val tree1 = HashTreeDescriptor(ByteArrayInputStream(Hex.decodeHex(treeStr1)), 0) val tree1 = HashTreeDescriptor(ByteArrayInputStream(Hex.decodeHex(treeStr1)), 0)
println(ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(tree1)) println(ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(tree1))
@ -26,4 +29,11 @@ class HashTreeDescriptorTest {
println(ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(tree2)) println(ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(tree2))
assertEquals(treeStr2, Hex.encodeHexString(tree2.encode())) assertEquals(treeStr2, Hex.encodeHexString(tree2.encode()))
} }
@Test
fun x1() {
HashTreeDescriptor.calcMerkleTree(120721408, 4096, 32)
println(MessageDigest.getInstance(KeyHelper2.pyAlg2java("sha1")).digest().size)
println(MessageDigest.getInstance(KeyHelper2.pyAlg2java("sha256")).digest().size)
}
} }

@ -12,6 +12,9 @@ import java.nio.ByteOrder
import java.nio.file.attribute.PosixFilePermission import java.nio.file.attribute.PosixFilePermission
import java.security.MessageDigest import java.security.MessageDigest
import java.util.* import java.util.*
import kotlin.math.pow
import java.text.StringCharacterIterator
import java.text.CharacterIterator
@OptIn(ExperimentalUnsignedTypes::class) @OptIn(ExperimentalUnsignedTypes::class)
class Helper { class Helper {
@ -123,6 +126,10 @@ class Helper {
} }
} }
fun round_to_pow2(num: Long): Long {
return 2.0.pow((num - 1).toBigInteger().bitLength().toDouble()).toLong()
}
fun pyAlg2java(alg: String): String { fun pyAlg2java(alg: String): String {
return when (alg) { return when (alg) {
"sha1" -> "sha-1" "sha1" -> "sha-1"
@ -256,9 +263,11 @@ class Helper {
val md = MessageDigest.getInstance("SHA1") val md = MessageDigest.getInstance("SHA1")
for (item in inFiles) { for (item in inFiles) {
if (null == item) { if (null == item) {
md.update(ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN) md.update(
ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN)
.putInt(0) .putInt(0)
.array()) .array()
)
log.debug("update null $item: " + toHexString((md.clone() as MessageDigest).digest())) log.debug("update null $item: " + toHexString((md.clone() as MessageDigest).digest()))
} else { } else {
val currentFile = File(item) val currentFile = File(item)
@ -273,9 +282,11 @@ class Helper {
md.update(dataRead, 0, byteRead) md.update(dataRead, 0, byteRead)
} }
log.debug("update file $item: " + toHexString((md.clone() as MessageDigest).digest())) log.debug("update file $item: " + toHexString((md.clone() as MessageDigest).digest()))
md.update(ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN) md.update(
ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN)
.putInt(currentFile.length().toInt()) .putInt(currentFile.length().toInt())
.array()) .array()
)
log.debug("update SIZE $item: " + toHexString((md.clone() as MessageDigest).digest())) log.debug("update SIZE $item: " + toHexString((md.clone() as MessageDigest).digest()))
} }
} }
@ -316,6 +327,26 @@ class Helper {
return result return result
} }
/*
https://stackoverflow.com/questions/3758606/how-can-i-convert-byte-size-into-a-human-readable-format-in-java
*/
fun humanReadableByteCountBin(bytes: Long): String {
val absB = if (bytes == Long.MIN_VALUE) Long.MAX_VALUE else Math.abs(bytes)
if (absB < 1024) {
return "$bytes B"
}
var value = absB
val ci: CharacterIterator = StringCharacterIterator("KMGTPE")
var i = 40
while (i >= 0 && absB > 0xfffccccccccccccL shr i) {
value = value shr 10
ci.next()
i -= 10
}
value *= java.lang.Long.signum(bytes).toLong()
return String.format("%.1f %ciB", value / 1024.0, ci.current())
}
private val log = LoggerFactory.getLogger("Helper") private val log = LoggerFactory.getLogger("Helper")
} }
} }

Loading…
Cancel
Save