Add uncompressed_{length,crc32} columns to the containers table

There's no real use for these yet, but they might be useful with NXT
caches.

We don't need a compressed_length column because it's easy to determine
the length of a BYTEA column within the database.

Signed-off-by: Graham <gpe@openrs2.org>
Graham 4 years ago
parent 177376f47e
commit 3289af5ddf
  1. 95
      archive/src/main/kotlin/org/openrs2/archive/cache/CacheImporter.kt
  2. 79
      archive/src/main/kotlin/org/openrs2/archive/cache/Js5ChannelHandler.kt
  3. 22
      archive/src/main/kotlin/org/openrs2/archive/key/KeyBruteForcer.kt
  4. 2
      archive/src/main/resources/org/openrs2/archive/V1__init.sql

@ -3,7 +3,6 @@ package org.openrs2.archive.cache
import io.netty.buffer.ByteBuf
import io.netty.buffer.ByteBufAllocator
import io.netty.buffer.ByteBufUtil
import io.netty.buffer.DefaultByteBufHolder
import io.netty.buffer.Unpooled
import org.openrs2.buffer.crc32
import org.openrs2.buffer.use
@ -34,33 +33,44 @@ public class CacheImporter @Inject constructor(
private val alloc: ByteBufAllocator
) {
public abstract class Container(
data: ByteBuf,
public val encrypted: Boolean
) : DefaultByteBufHolder(data) {
public val bytes: ByteArray = ByteBufUtil.getBytes(data, data.readerIndex(), data.readableBytes(), false)
public val crc32: Int = data.crc32()
private val compressed: ByteBuf,
private val uncompressed: ByteBuf?
) {
public val bytes: ByteArray =
ByteBufUtil.getBytes(compressed, compressed.readerIndex(), compressed.readableBytes(), false)
public val crc32: Int = compressed.crc32()
public val whirlpool: ByteArray = Whirlpool.whirlpool(bytes)
public val encrypted: Boolean = uncompressed == null
public val uncompressedLen: Int? = uncompressed?.readableBytes()
public val uncompressedCrc32: Int? = uncompressed?.crc32()
public fun release() {
compressed.release()
uncompressed?.release()
}
}
private class MasterIndex(
val index: Js5MasterIndex,
data: ByteBuf,
) : Container(data, false)
compressed: ByteBuf,
uncompressed: ByteBuf
) : Container(compressed, uncompressed)
public class Index(
archive: Int,
public val index: Js5Index,
data: ByteBuf,
) : Group(Js5Archive.ARCHIVESET, archive, data, index.version, false, false)
compressed: ByteBuf,
uncompressed: ByteBuf
) : Group(Js5Archive.ARCHIVESET, archive, compressed, uncompressed, index.version, false)
public open class Group(
public val archive: Int,
public val group: Int,
data: ByteBuf,
compressed: ByteBuf,
uncompressed: ByteBuf?,
public val version: Int,
public val versionTruncated: Boolean,
encrypted: Boolean
) : Container(data, encrypted)
public val versionTruncated: Boolean
) : Container(compressed, uncompressed)
public suspend fun import(
store: Store,
@ -78,7 +88,7 @@ public class CacheImporter @Inject constructor(
// import master index
val masterIndex = createMasterIndex(store)
try {
addMasterIndex(connection, masterIndex, gameId, build, timestamp, name, description, false)
addMasterIndex(connection, masterIndex, gameId, build, timestamp, name, description, overwrite = false)
} finally {
masterIndex.release()
}
@ -142,13 +152,13 @@ public class CacheImporter @Inject constructor(
description: String?
) {
Js5Compression.uncompress(buf.slice()).use { uncompressed ->
val masterIndex = MasterIndex(Js5MasterIndex.read(uncompressed.slice(), format), buf)
val masterIndex = MasterIndex(Js5MasterIndex.read(uncompressed.slice(), format), buf, uncompressed)
database.execute { connection ->
prepare(connection)
val gameId = getGameId(connection, game)
addMasterIndex(connection, masterIndex, gameId, build, timestamp, name, description, false)
addMasterIndex(connection, masterIndex, gameId, build, timestamp, name, description, overwrite = false)
}
}
}
@ -156,6 +166,7 @@ public class CacheImporter @Inject constructor(
public suspend fun importMasterIndexAndGetIndexes(
masterIndex: Js5MasterIndex,
buf: ByteBuf,
uncompressed: ByteBuf,
gameId: Int,
build: Int,
timestamp: Instant,
@ -177,7 +188,16 @@ public class CacheImporter @Inject constructor(
stmt.execute()
}
addMasterIndex(connection, MasterIndex(masterIndex, buf), gameId, build, timestamp, name, null, true)
addMasterIndex(
connection,
MasterIndex(masterIndex, buf, uncompressed),
gameId,
build,
timestamp,
name,
description = null,
overwrite = true
)
connection.prepareStatement(
"""
@ -239,10 +259,15 @@ public class CacheImporter @Inject constructor(
}
}
public suspend fun importIndexAndGetMissingGroups(archive: Int, index: Js5Index, buf: ByteBuf): List<Int> {
public suspend fun importIndexAndGetMissingGroups(
archive: Int,
index: Js5Index,
buf: ByteBuf,
uncompressed: ByteBuf
): List<Int> {
return database.execute { connection ->
prepare(connection)
addIndex(connection, Index(archive, index, buf))
addIndex(connection, Index(archive, index, buf, uncompressed))
connection.prepareStatement(
"""
@ -321,8 +346,8 @@ public class CacheImporter @Inject constructor(
alloc.buffer().use { uncompressed ->
index.write(uncompressed)
Js5Compression.compress(uncompressed, Js5CompressionType.UNCOMPRESSED).use { buf ->
return MasterIndex(index, buf.retain())
Js5Compression.compress(uncompressed.slice(), Js5CompressionType.UNCOMPRESSED).use { buf ->
return MasterIndex(index, buf.retain(), uncompressed.retain())
}
}
}
@ -499,7 +524,6 @@ public class CacheImporter @Inject constructor(
store.read(archive, group).use { buf ->
var version = VersionTrailer.strip(buf) ?: return null
var versionTruncated = true
val encrypted = Js5Compression.isEncrypted(buf.slice())
/*
* Grab the non-truncated version from the Js5Index if we can
@ -513,7 +537,14 @@ public class CacheImporter @Inject constructor(
}
}
return Group(archive, group, buf.retain(), version, versionTruncated, encrypted)
// TODO(gpe): avoid uncompressing twice (we do it in isEncrypted and uncompress)
val uncompressed = if (Js5Compression.isEncrypted(buf.slice())) {
null
} else {
Js5Compression.uncompress(buf.slice())
}
return Group(archive, group, buf.retain(), uncompressed, version, versionTruncated)
}
} catch (ex: IOException) {
return null
@ -552,7 +583,7 @@ public class CacheImporter @Inject constructor(
private fun readIndex(store: Store, archive: Int): Index {
return store.read(Js5Archive.ARCHIVESET, archive).use { buf ->
Js5Compression.uncompress(buf.slice()).use { uncompressed ->
Index(archive, Js5Index.read(uncompressed), buf.retain())
Index(archive, Js5Index.read(uncompressed.slice()), buf.retain(), uncompressed.retain())
}
}
}
@ -671,6 +702,8 @@ public class CacheImporter @Inject constructor(
index INTEGER NOT NULL,
crc32 INTEGER NOT NULL,
whirlpool BYTEA NOT NULL,
uncompressed_length INTEGER NULL,
uncompressed_crc32 INTEGER NULL,
data BYTEA NOT NULL,
encrypted BOOLEAN NOT NULL
) ON COMMIT DROP
@ -695,8 +728,8 @@ public class CacheImporter @Inject constructor(
connection.prepareStatement(
"""
INSERT INTO tmp_containers (index, crc32, whirlpool, data, encrypted)
VALUES (?, ?, ?, ?, ?)
INSERT INTO tmp_containers (index, crc32, whirlpool, data, uncompressed_length, uncompressed_crc32, encrypted)
VALUES (?, ?, ?, ?, ?, ?, ?)
""".trimIndent()
).use { stmt ->
for ((i, container) in containers.withIndex()) {
@ -704,7 +737,9 @@ public class CacheImporter @Inject constructor(
stmt.setInt(2, container.crc32)
stmt.setBytes(3, container.whirlpool)
stmt.setBytes(4, container.bytes)
stmt.setBoolean(5, container.encrypted)
stmt.setObject(5, container.uncompressedLen, Types.INTEGER)
stmt.setObject(6, container.uncompressedCrc32, Types.INTEGER)
stmt.setBoolean(7, container.encrypted)
stmt.addBatch()
}
@ -713,8 +748,8 @@ public class CacheImporter @Inject constructor(
connection.prepareStatement(
"""
INSERT INTO containers (crc32, whirlpool, data, encrypted)
SELECT t.crc32, t.whirlpool, t.data, t.encrypted
INSERT INTO containers (crc32, whirlpool, data, uncompressed_length, uncompressed_crc32, encrypted)
SELECT t.crc32, t.whirlpool, t.data, t.uncompressed_length, t.uncompressed_crc32, t.encrypted
FROM tmp_containers t
LEFT JOIN containers c ON c.whirlpool = t.whirlpool
WHERE c.whirlpool IS NULL

@ -133,13 +133,20 @@ public class Js5ChannelHandler(
throw Exception("Group checksum invalid")
}
// TODO(gpe): avoid uncompressing twice (we do it in isEncrypted and uncompress)
val uncompressed = if (Js5Compression.isEncrypted(response.data.slice())) {
null
} else {
Js5Compression.uncompress(response.data.slice())
}
groups += CacheImporter.Group(
response.archive,
response.group,
response.data.retain(),
uncompressed,
entry.version,
versionTruncated = false,
Js5Compression.isEncrypted(response.data.slice())
versionTruncated = false
)
}
@ -160,26 +167,34 @@ public class Js5ChannelHandler(
}
private fun processMasterIndex(buf: ByteBuf) {
masterIndex = Js5Compression.uncompress(buf.slice()).use { uncompressed ->
Js5MasterIndex.read(uncompressed, masterIndexFormat)
}
val rawIndexes = runBlocking {
val name = "Downloaded from $hostname:$port"
importer.importMasterIndexAndGetIndexes(masterIndex!!, buf, gameId, build, Instant.now(), name)
}
try {
indexes = arrayOfNulls(rawIndexes.size)
for ((archive, index) in rawIndexes.withIndex()) {
if (index != null) {
processIndex(archive, index)
} else {
request(Js5Archive.ARCHIVESET, archive)
Js5Compression.uncompress(buf.slice()).use { uncompressed ->
masterIndex = Js5MasterIndex.read(uncompressed.slice(), masterIndexFormat)
val rawIndexes = runBlocking {
val name = "Downloaded from $hostname:$port"
importer.importMasterIndexAndGetIndexes(
masterIndex!!,
buf,
uncompressed,
gameId,
build,
timestamp = Instant.now(),
name
)
}
try {
indexes = arrayOfNulls(rawIndexes.size)
for ((archive, index) in rawIndexes.withIndex()) {
if (index != null) {
processIndex(archive, index)
} else {
request(Js5Archive.ARCHIVESET, archive)
}
}
} finally {
rawIndexes.filterNotNull().forEach(ByteBuf::release)
}
} finally {
rawIndexes.filterNotNull().forEach(ByteBuf::release)
}
}
@ -189,20 +204,20 @@ public class Js5ChannelHandler(
throw Exception("Index checksum invalid")
}
val index = Js5Compression.uncompress(buf.slice()).use { uncompressed ->
Js5Index.read(uncompressed)
}
indexes[archive] = index
Js5Compression.uncompress(buf.slice()).use { uncompressed ->
val index = Js5Index.read(uncompressed.slice())
indexes[archive] = index
if (index.version != entry.version) {
throw Exception("Index version invalid")
}
if (index.version != entry.version) {
throw Exception("Index version invalid")
}
val groups = runBlocking {
importer.importIndexAndGetMissingGroups(archive, index, buf)
}
for (group in groups) {
request(archive, group)
val groups = runBlocking {
importer.importIndexAndGetMissingGroups(archive, index, buf, uncompressed)
}
for (group in groups) {
request(archive, group)
}
}
}

@ -1,6 +1,7 @@
package org.openrs2.archive.key
import io.netty.buffer.Unpooled
import org.openrs2.buffer.crc32
import org.openrs2.buffer.use
import org.openrs2.cache.Js5Compression
import org.openrs2.crypto.XteaKey
@ -202,9 +203,7 @@ public class KeyBruteForcer @Inject constructor(
val containerId = rows.getLong(1)
val data = rows.getBytes(2)
if (validateKey(connection, data, key, keyId, containerId)) {
break
}
validateKey(connection, data, key, keyId, containerId)
}
}
}
@ -268,15 +267,28 @@ public class KeyBruteForcer @Inject constructor(
return false
}
// TODO(gpe): avoid uncompressing twice (we do it here and in isKeyValid)
var len = 0
var crc32 = 0
Unpooled.wrappedBuffer(data).use { buf ->
Js5Compression.uncompress(buf, key).use { uncompressed ->
len = uncompressed.readableBytes()
crc32 = uncompressed.crc32()
}
}
connection.prepareStatement(
"""
UPDATE containers
SET key_id = ?
SET key_id = ?, uncompressed_length = ?, uncompressed_crc32 = ?
WHERE id = ?
""".trimIndent()
).use { stmt ->
stmt.setLong(1, keyId)
stmt.setLong(2, containerId)
stmt.setInt(2, len)
stmt.setInt(3, crc32)
stmt.setLong(4, containerId)
stmt.execute()
}

@ -31,6 +31,8 @@ CREATE TABLE containers (
crc32 INTEGER NOT NULL,
whirlpool BYTEA UNIQUE NOT NULL,
data BYTEA NOT NULL,
uncompressed_length INTEGER NULL,
uncompressed_crc32 INTEGER NULL,
encrypted BOOLEAN NOT NULL,
key_id BIGINT NULL REFERENCES keys (id)
);

Loading…
Cancel
Save