mirror of
https://github.com/revanced/revanced-cli.git
synced 2024-11-12 06:39:24 +01:00
feat: more efficient zipalign
This commit is contained in:
parent
718df54bdc
commit
a942a57364
@ -1,63 +1,29 @@
|
||||
package app.revanced.utils.signing.align
|
||||
|
||||
import app.revanced.utils.signing.align.stream.MultiOutputStream
|
||||
import app.revanced.utils.signing.align.stream.PeekingFakeStream
|
||||
import java.io.BufferedOutputStream
|
||||
import app.revanced.utils.signing.align.zip.ZipFile
|
||||
import java.io.File
|
||||
import java.util.*
|
||||
import java.util.zip.ZipEntry
|
||||
import java.util.zip.ZipFile
|
||||
import java.util.zip.ZipOutputStream
|
||||
|
||||
internal object ZipAligner {
|
||||
fun align(input: File, output: File, alignment: Int = 4) {
|
||||
val zipFile = ZipFile(input)
|
||||
const val DEFAULT_ALIGNMENT = 4
|
||||
const val LIBRARY_ALIGNEMNT = 4096
|
||||
|
||||
val entries: Enumeration<out ZipEntry?> = zipFile.entries()
|
||||
fun align(input: File, output: File) {
|
||||
val inputZip = ZipFile(input)
|
||||
val outputZip = ZipFile(output)
|
||||
|
||||
// fake
|
||||
val peekingFakeStream = PeekingFakeStream()
|
||||
val fakeOutputStream = ZipOutputStream(peekingFakeStream)
|
||||
// real
|
||||
val zipOutputStream = ZipOutputStream(BufferedOutputStream(output.outputStream()))
|
||||
for (entry in inputZip.entries) {
|
||||
val data = inputZip.getDataForEntry(entry)
|
||||
|
||||
val multiOutputStream = MultiOutputStream(
|
||||
listOf(
|
||||
fakeOutputStream, // fake, used to add the data to the fake stream
|
||||
zipOutputStream // real
|
||||
)
|
||||
)
|
||||
if (entry.compression == 0.toUShort()) {
|
||||
val alignment = if (entry.fileName.endsWith(".so")) LIBRARY_ALIGNEMNT else DEFAULT_ALIGNMENT
|
||||
|
||||
var bias = 0
|
||||
while (entries.hasMoreElements()) {
|
||||
var padding = 0
|
||||
|
||||
val entry: ZipEntry = entries.nextElement()!!
|
||||
// fake, used to calculate the file offset of the entry
|
||||
fakeOutputStream.putNextEntry(entry)
|
||||
|
||||
if (entry.size == entry.compressedSize) {
|
||||
val fileOffset = peekingFakeStream.peek()
|
||||
val newOffset = fileOffset + bias
|
||||
padding = ((alignment - (newOffset % alignment)) % alignment).toInt()
|
||||
|
||||
// real
|
||||
entry.extra = if (entry.extra == null) ByteArray(padding)
|
||||
else Arrays.copyOf(entry.extra, entry.extra.size + padding)
|
||||
outputZip.addEntryAligned(entry, data, alignment)
|
||||
} else {
|
||||
outputZip.addEntry(entry, data)
|
||||
}
|
||||
}
|
||||
|
||||
zipOutputStream.putNextEntry(entry)
|
||||
zipFile.getInputStream(entry).copyTo(multiOutputStream)
|
||||
|
||||
// fake, used to add remaining bytes
|
||||
fakeOutputStream.closeEntry()
|
||||
// real
|
||||
zipOutputStream.closeEntry()
|
||||
|
||||
bias += padding
|
||||
}
|
||||
|
||||
zipFile.close()
|
||||
zipOutputStream.close()
|
||||
outputZip.finish()
|
||||
}
|
||||
}
|
@ -1,20 +0,0 @@
|
||||
package app.revanced.utils.signing.align.stream
|
||||
|
||||
import java.io.OutputStream
|
||||
|
||||
internal class MultiOutputStream(
|
||||
private val streams: Iterable<OutputStream>,
|
||||
) : OutputStream() {
|
||||
override fun write(b: ByteArray, off: Int, len: Int) = streams.forEach {
|
||||
it.write(b, off, len)
|
||||
}
|
||||
|
||||
override fun write(b: ByteArray) = streams.forEach {
|
||||
it.write(b)
|
||||
}
|
||||
|
||||
override fun write(b: Int) = streams.forEach {
|
||||
it.write(b)
|
||||
}
|
||||
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
package app.revanced.utils.signing.align.stream
|
||||
|
||||
import java.io.OutputStream
|
||||
|
||||
internal class PeekingFakeStream : OutputStream() {
|
||||
private var numberOfBytes: Long = 0
|
||||
|
||||
fun peek() = numberOfBytes
|
||||
|
||||
override fun write(b: Int) {
|
||||
numberOfBytes++
|
||||
}
|
||||
|
||||
override fun write(b: ByteArray) {
|
||||
numberOfBytes += b.size
|
||||
}
|
||||
|
||||
override fun write(b: ByteArray, offset: Int, len: Int) {
|
||||
numberOfBytes += len
|
||||
}
|
||||
}
|
@ -0,0 +1,33 @@
|
||||
package app.revanced.utils.signing.align.zip
|
||||
|
||||
import java.io.DataInput
|
||||
import java.io.DataOutput
|
||||
import java.nio.ByteBuffer
|
||||
|
||||
fun UInt.toLittleEndian() =
|
||||
(((this.toInt() and 0xff000000.toInt()) shr 24) or ((this.toInt() and 0x00ff0000) shr 8) or ((this.toInt() and 0x0000ff00) shl 8) or (this.toInt() shl 24)).toUInt()
|
||||
|
||||
fun UShort.toLittleEndian() = (this.toUInt() shl 16).toLittleEndian().toUShort()
|
||||
|
||||
fun UInt.toBigEndian() = (((this.toInt() and 0xff) shl 24) or ((this.toInt() and 0xff00) shl 8)
|
||||
or ((this.toInt() and 0x00ff0000) ushr 8) or (this.toInt() ushr 24)).toUInt()
|
||||
|
||||
fun UShort.toBigEndian() = (this.toUInt() shl 16).toBigEndian().toUShort()
|
||||
|
||||
fun ByteBuffer.getUShort() = this.getShort().toUShort()
|
||||
fun ByteBuffer.getUInt() = this.getInt().toUInt()
|
||||
|
||||
fun ByteBuffer.putUShort(ushort: UShort) = this.putShort(ushort.toShort())
|
||||
fun ByteBuffer.putUInt(uint: UInt) = this.putInt(uint.toInt())
|
||||
|
||||
fun DataInput.readUShort() = this.readShort().toUShort()
|
||||
fun DataInput.readUInt() = this.readInt().toUInt()
|
||||
|
||||
fun DataOutput.writeUShort(ushort: UShort) = this.writeShort(ushort.toInt())
|
||||
fun DataOutput.writeUInt(uint: UInt) = this.writeInt(uint.toInt())
|
||||
|
||||
fun DataInput.readUShortLE() = this.readUShort().toBigEndian()
|
||||
fun DataInput.readUIntLE() = this.readUInt().toBigEndian()
|
||||
|
||||
fun DataOutput.writeUShortLE(ushort: UShort) = this.writeUShort(ushort.toLittleEndian())
|
||||
fun DataOutput.writeUIntLE(uint: UInt) = this.writeUInt(uint.toLittleEndian())
|
128
src/main/kotlin/app/revanced/utils/signing/align/zip/ZipFile.kt
Normal file
128
src/main/kotlin/app/revanced/utils/signing/align/zip/ZipFile.kt
Normal file
@ -0,0 +1,128 @@
|
||||
package app.revanced.utils.signing.align.zip
|
||||
|
||||
import app.revanced.utils.signing.align.zip.structures.ZipEndRecord
|
||||
import app.revanced.utils.signing.align.zip.structures.ZipEntry
|
||||
import java.io.File
|
||||
import java.io.RandomAccessFile
|
||||
import java.nio.ByteBuffer
|
||||
import java.nio.channels.FileChannel
|
||||
|
||||
class ZipFile(val file: File) {
|
||||
var entries: MutableList<ZipEntry> = mutableListOf()
|
||||
|
||||
private val filePointer: RandomAccessFile = RandomAccessFile(file, "rw")
|
||||
|
||||
init {
|
||||
//if file isn't empty try to load entries
|
||||
if (file.length() > 0) {
|
||||
val endRecord = findEndRecord()
|
||||
|
||||
if (endRecord.diskNumber > 0u || endRecord.totalEntries != endRecord.diskEntries)
|
||||
throw IllegalArgumentException("Multi-file archives are not supported")
|
||||
|
||||
entries = readEntries(endRecord).toMutableList()
|
||||
}
|
||||
|
||||
//seek back to start for writing
|
||||
filePointer.seek(0)
|
||||
}
|
||||
|
||||
private fun findEndRecord(): ZipEndRecord {
|
||||
//look from end to start since end record is at the end
|
||||
for (i in filePointer.length() - 1 downTo 0) {
|
||||
filePointer.seek(i)
|
||||
//possible beginning of signature
|
||||
if (filePointer.readByte() == 0x50.toByte()) {
|
||||
//seek back to get the full int
|
||||
filePointer.seek(i)
|
||||
val possibleSignature = filePointer.readUIntLE()
|
||||
if (possibleSignature == ZipEndRecord.ECD_SIGNATURE) {
|
||||
filePointer.seek(i)
|
||||
return ZipEndRecord.fromECD(filePointer)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw Exception("Couldn't find end record")
|
||||
}
|
||||
|
||||
private fun readEntries(endRecord: ZipEndRecord): List<ZipEntry> {
|
||||
filePointer.seek(endRecord.centralDirectoryStartOffset.toLong())
|
||||
|
||||
val numberOfEntries = endRecord.diskEntries.toInt()
|
||||
|
||||
return buildList(numberOfEntries) {
|
||||
for (i in 1..numberOfEntries) {
|
||||
add(ZipEntry.fromCDE(filePointer).also
|
||||
{
|
||||
//for some reason the local extra field can be different from the central one
|
||||
it.readLocalExtra(
|
||||
filePointer.channel.map(
|
||||
FileChannel.MapMode.READ_ONLY,
|
||||
it.localHeaderOffset.toLong() + 28,
|
||||
2
|
||||
)
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun writeCDE() {
|
||||
val CDEStart = filePointer.channel.position().toUInt()
|
||||
|
||||
entries.forEach {
|
||||
filePointer.channel.write(it.toCDE())
|
||||
}
|
||||
|
||||
val endRecord = ZipEndRecord(
|
||||
0u,
|
||||
0u,
|
||||
entries.count().toUShort(),
|
||||
entries.count().toUShort(),
|
||||
filePointer.channel.position().toUInt() - CDEStart,
|
||||
CDEStart,
|
||||
""
|
||||
)
|
||||
|
||||
filePointer.channel.write(endRecord.toECD())
|
||||
}
|
||||
|
||||
fun addEntry(entry: ZipEntry, data: ByteBuffer) {
|
||||
entry.localHeaderOffset = filePointer.channel.position().toUInt()
|
||||
|
||||
filePointer.channel.write(entry.toLFH())
|
||||
filePointer.channel.write(data)
|
||||
|
||||
entries.add(entry)
|
||||
}
|
||||
|
||||
fun addEntryAligned(entry: ZipEntry, data: ByteBuffer, alignment: Int) {
|
||||
//calculate where data would end up
|
||||
val dataOffset = filePointer.filePointer + entry.LFHSize
|
||||
|
||||
val mod = dataOffset % alignment
|
||||
|
||||
//wrong alignment
|
||||
if (mod != 0L) {
|
||||
//add padding at end of extra field
|
||||
entry.localExtraField =
|
||||
entry.localExtraField.copyOf((entry.localExtraField.size + (alignment - mod)).toInt())
|
||||
}
|
||||
|
||||
addEntry(entry, data)
|
||||
}
|
||||
|
||||
fun getDataForEntry(entry: ZipEntry): ByteBuffer {
|
||||
return filePointer.channel.map(
|
||||
FileChannel.MapMode.READ_ONLY,
|
||||
entry.dataOffset.toLong(),
|
||||
entry.compressedSize.toLong()
|
||||
)
|
||||
}
|
||||
|
||||
fun finish() {
|
||||
writeCDE()
|
||||
filePointer.close()
|
||||
}
|
||||
}
|
@ -0,0 +1,77 @@
|
||||
package app.revanced.utils.signing.align.zip.structures
|
||||
|
||||
import app.revanced.utils.signing.align.zip.putUInt
|
||||
import app.revanced.utils.signing.align.zip.putUShort
|
||||
import app.revanced.utils.signing.align.zip.readUIntLE
|
||||
import app.revanced.utils.signing.align.zip.readUShortLE
|
||||
import java.io.DataInput
|
||||
import java.nio.ByteBuffer
|
||||
import java.nio.ByteOrder
|
||||
|
||||
data class ZipEndRecord(
|
||||
val diskNumber: UShort,
|
||||
val startingDiskNumber: UShort,
|
||||
val diskEntries: UShort,
|
||||
val totalEntries: UShort,
|
||||
val centralDirectorySize: UInt,
|
||||
val centralDirectoryStartOffset: UInt,
|
||||
val fileComment: String,
|
||||
) {
|
||||
|
||||
companion object {
|
||||
const val ECD_HEADER_SIZE = 22
|
||||
const val ECD_SIGNATURE = 0x06054b50u
|
||||
|
||||
fun fromECD(input: DataInput): ZipEndRecord {
|
||||
val signature = input.readUIntLE()
|
||||
|
||||
if (signature != ECD_SIGNATURE)
|
||||
throw IllegalArgumentException("Input doesn't start with end record signature")
|
||||
|
||||
val diskNumber = input.readUShortLE()
|
||||
val startingDiskNumber = input.readUShortLE()
|
||||
val diskEntries = input.readUShortLE()
|
||||
val totalEntries = input.readUShortLE()
|
||||
val centralDirectorySize = input.readUIntLE()
|
||||
val centralDirectoryStartOffset = input.readUIntLE()
|
||||
val fileCommentLength = input.readUShortLE()
|
||||
var fileComment = ""
|
||||
|
||||
if (fileCommentLength > 0u) {
|
||||
val fileCommentBytes = ByteArray(fileCommentLength.toInt())
|
||||
input.readFully(fileCommentBytes)
|
||||
fileComment = fileCommentBytes.toString(Charsets.UTF_8)
|
||||
}
|
||||
|
||||
return ZipEndRecord(
|
||||
diskNumber,
|
||||
startingDiskNumber,
|
||||
diskEntries,
|
||||
totalEntries,
|
||||
centralDirectorySize,
|
||||
centralDirectoryStartOffset,
|
||||
fileComment
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fun toECD(): ByteBuffer {
|
||||
val commentBytes = fileComment.toByteArray(Charsets.UTF_8)
|
||||
|
||||
val buffer = ByteBuffer.allocate(ECD_HEADER_SIZE + commentBytes.size).also { it.order(ByteOrder.LITTLE_ENDIAN) }
|
||||
|
||||
buffer.putUInt(ECD_SIGNATURE)
|
||||
buffer.putUShort(diskNumber)
|
||||
buffer.putUShort(startingDiskNumber)
|
||||
buffer.putUShort(diskEntries)
|
||||
buffer.putUShort(totalEntries)
|
||||
buffer.putUInt(centralDirectorySize)
|
||||
buffer.putUInt(centralDirectoryStartOffset)
|
||||
buffer.putUShort(commentBytes.size.toUShort())
|
||||
|
||||
buffer.put(commentBytes)
|
||||
|
||||
buffer.flip()
|
||||
return buffer
|
||||
}
|
||||
}
|
@ -0,0 +1,170 @@
|
||||
package app.revanced.utils.signing.align.zip.structures
|
||||
|
||||
import app.revanced.utils.signing.align.zip.getUShort
|
||||
import app.revanced.utils.signing.align.zip.putUInt
|
||||
import app.revanced.utils.signing.align.zip.putUShort
|
||||
import app.revanced.utils.signing.align.zip.readUIntLE
|
||||
import app.revanced.utils.signing.align.zip.readUShortLE
|
||||
import java.io.DataInput
|
||||
import java.nio.ByteBuffer
|
||||
import java.nio.ByteOrder
|
||||
|
||||
data class ZipEntry(
|
||||
val version: UShort,
|
||||
val versionNeeded: UShort,
|
||||
val flags: UShort,
|
||||
val compression: UShort,
|
||||
val modificationTime: UShort,
|
||||
val modificationDate: UShort,
|
||||
val crc32: UInt,
|
||||
val compressedSize: UInt,
|
||||
val uncompressedSize: UInt,
|
||||
val diskNumber: UShort,
|
||||
val internalAttributes: UShort,
|
||||
val externalAttributes: UInt,
|
||||
var localHeaderOffset: UInt,
|
||||
val fileName: String,
|
||||
val extraField: ByteArray,
|
||||
val fileComment: String,
|
||||
var localExtraField: ByteArray = ByteArray(0), //seperate for alignment
|
||||
) {
|
||||
val LFHSize: Int
|
||||
get() = LFH_HEADER_SIZE + fileName.toByteArray(Charsets.UTF_8).size + localExtraField.size
|
||||
|
||||
val dataOffset: UInt
|
||||
get() = localHeaderOffset + LFHSize.toUInt()
|
||||
|
||||
companion object {
|
||||
const val CDE_HEADER_SIZE = 46
|
||||
const val CDE_SIGNATURE = 0x02014b50u
|
||||
|
||||
const val LFH_HEADER_SIZE = 30
|
||||
const val LFH_SIGNATURE = 0x04034b50u
|
||||
|
||||
fun fromCDE(input: DataInput): ZipEntry {
|
||||
val signature = input.readUIntLE()
|
||||
|
||||
if (signature != CDE_SIGNATURE)
|
||||
throw IllegalArgumentException("Input doesn't start with central directory entry signature")
|
||||
|
||||
val version = input.readUShortLE()
|
||||
val versionNeeded = input.readUShortLE()
|
||||
var flags = input.readUShortLE()
|
||||
val compression = input.readUShortLE()
|
||||
val modificationTime = input.readUShortLE()
|
||||
val modificationDate = input.readUShortLE()
|
||||
val crc32 = input.readUIntLE()
|
||||
val compressedSize = input.readUIntLE()
|
||||
val uncompressedSize = input.readUIntLE()
|
||||
val fileNameLength = input.readUShortLE()
|
||||
var fileName = ""
|
||||
val extraFieldLength = input.readUShortLE()
|
||||
var extraField = ByteArray(extraFieldLength.toInt())
|
||||
val fileCommentLength = input.readUShortLE()
|
||||
var fileComment = ""
|
||||
val diskNumber = input.readUShortLE()
|
||||
val internalAttributes = input.readUShortLE()
|
||||
val externalAttributes = input.readUIntLE()
|
||||
val localHeaderOffset = input.readUIntLE()
|
||||
|
||||
val variableFieldsLength = fileNameLength.toInt() + extraFieldLength.toInt() + fileCommentLength.toInt()
|
||||
|
||||
if (variableFieldsLength > 0) {
|
||||
val fileNameBytes = ByteArray(fileNameLength.toInt())
|
||||
input.readFully(fileNameBytes)
|
||||
fileName = fileNameBytes.toString(Charsets.UTF_8)
|
||||
|
||||
input.readFully(extraField)
|
||||
|
||||
val fileCommentBytes = ByteArray(fileCommentLength.toInt())
|
||||
input.readFully(fileCommentBytes)
|
||||
fileComment = fileCommentBytes.toString(Charsets.UTF_8)
|
||||
}
|
||||
|
||||
flags = (flags and 0b1000u.inv().toUShort()) //disable data descriptor flag as they are not used
|
||||
|
||||
return ZipEntry(
|
||||
version,
|
||||
versionNeeded,
|
||||
flags,
|
||||
compression,
|
||||
modificationTime,
|
||||
modificationDate,
|
||||
crc32,
|
||||
compressedSize,
|
||||
uncompressedSize,
|
||||
diskNumber,
|
||||
internalAttributes,
|
||||
externalAttributes,
|
||||
localHeaderOffset,
|
||||
fileName,
|
||||
extraField,
|
||||
fileComment,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fun readLocalExtra(buffer: ByteBuffer) {
|
||||
buffer.order(ByteOrder.LITTLE_ENDIAN)
|
||||
localExtraField = ByteArray(buffer.getUShort().toInt())
|
||||
}
|
||||
|
||||
fun toLFH(): ByteBuffer {
|
||||
val nameBytes = fileName.toByteArray(Charsets.UTF_8)
|
||||
|
||||
val buffer = ByteBuffer.allocate(LFH_HEADER_SIZE + nameBytes.size + localExtraField.size)
|
||||
.also { it.order(ByteOrder.LITTLE_ENDIAN) }
|
||||
|
||||
buffer.putUInt(LFH_SIGNATURE)
|
||||
buffer.putUShort(versionNeeded)
|
||||
buffer.putUShort(flags)
|
||||
buffer.putUShort(compression)
|
||||
buffer.putUShort(modificationTime)
|
||||
buffer.putUShort(modificationDate)
|
||||
buffer.putUInt(crc32)
|
||||
buffer.putUInt(compressedSize)
|
||||
buffer.putUInt(uncompressedSize)
|
||||
buffer.putUShort(nameBytes.size.toUShort())
|
||||
buffer.putUShort(localExtraField.size.toUShort())
|
||||
|
||||
buffer.put(nameBytes)
|
||||
buffer.put(localExtraField)
|
||||
|
||||
buffer.flip()
|
||||
return buffer
|
||||
}
|
||||
|
||||
fun toCDE(): ByteBuffer {
|
||||
val nameBytes = fileName.toByteArray(Charsets.UTF_8)
|
||||
val commentBytes = fileComment.toByteArray(Charsets.UTF_8)
|
||||
|
||||
val buffer = ByteBuffer.allocate(CDE_HEADER_SIZE + nameBytes.size + extraField.size + commentBytes.size)
|
||||
.also { it.order(ByteOrder.LITTLE_ENDIAN) }
|
||||
|
||||
buffer.putUInt(CDE_SIGNATURE)
|
||||
buffer.putUShort(version)
|
||||
buffer.putUShort(versionNeeded)
|
||||
buffer.putUShort(flags)
|
||||
buffer.putUShort(compression)
|
||||
buffer.putUShort(modificationTime)
|
||||
buffer.putUShort(modificationDate)
|
||||
buffer.putUInt(crc32)
|
||||
buffer.putUInt(compressedSize)
|
||||
buffer.putUInt(uncompressedSize)
|
||||
buffer.putUShort(nameBytes.size.toUShort())
|
||||
buffer.putUShort(extraField.size.toUShort())
|
||||
buffer.putUShort(commentBytes.size.toUShort())
|
||||
buffer.putUShort(diskNumber)
|
||||
buffer.putUShort(internalAttributes)
|
||||
buffer.putUInt(externalAttributes)
|
||||
buffer.putUInt(localHeaderOffset)
|
||||
|
||||
buffer.put(nameBytes)
|
||||
buffer.put(extraField)
|
||||
buffer.put(commentBytes)
|
||||
|
||||
buffer.flip()
|
||||
return buffer
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user