Add uncompressed_{length,crc32} columns to the containers table

There's no real use for these yet, but they might be useful with NXT
caches.

We don't need a compressed_length column because it's easy to determine
the length of a BYTEA column within the database.

Signed-off-by: Graham <gpe@openrs2.org>
Graham 4 years ago
parent 177376f47e
commit 3289af5ddf
  1. 95
      archive/src/main/kotlin/org/openrs2/archive/cache/CacheImporter.kt
  2. 35
      archive/src/main/kotlin/org/openrs2/archive/cache/Js5ChannelHandler.kt
  3. 22
      archive/src/main/kotlin/org/openrs2/archive/key/KeyBruteForcer.kt
  4. 2
      archive/src/main/resources/org/openrs2/archive/V1__init.sql

@ -3,7 +3,6 @@ package org.openrs2.archive.cache
import io.netty.buffer.ByteBuf import io.netty.buffer.ByteBuf
import io.netty.buffer.ByteBufAllocator import io.netty.buffer.ByteBufAllocator
import io.netty.buffer.ByteBufUtil import io.netty.buffer.ByteBufUtil
import io.netty.buffer.DefaultByteBufHolder
import io.netty.buffer.Unpooled import io.netty.buffer.Unpooled
import org.openrs2.buffer.crc32 import org.openrs2.buffer.crc32
import org.openrs2.buffer.use import org.openrs2.buffer.use
@ -34,33 +33,44 @@ public class CacheImporter @Inject constructor(
private val alloc: ByteBufAllocator private val alloc: ByteBufAllocator
) { ) {
public abstract class Container( public abstract class Container(
data: ByteBuf, private val compressed: ByteBuf,
public val encrypted: Boolean private val uncompressed: ByteBuf?
) : DefaultByteBufHolder(data) { ) {
public val bytes: ByteArray = ByteBufUtil.getBytes(data, data.readerIndex(), data.readableBytes(), false) public val bytes: ByteArray =
public val crc32: Int = data.crc32() ByteBufUtil.getBytes(compressed, compressed.readerIndex(), compressed.readableBytes(), false)
public val crc32: Int = compressed.crc32()
public val whirlpool: ByteArray = Whirlpool.whirlpool(bytes) public val whirlpool: ByteArray = Whirlpool.whirlpool(bytes)
public val encrypted: Boolean = uncompressed == null
public val uncompressedLen: Int? = uncompressed?.readableBytes()
public val uncompressedCrc32: Int? = uncompressed?.crc32()
public fun release() {
compressed.release()
uncompressed?.release()
}
} }
private class MasterIndex( private class MasterIndex(
val index: Js5MasterIndex, val index: Js5MasterIndex,
data: ByteBuf, compressed: ByteBuf,
) : Container(data, false) uncompressed: ByteBuf
) : Container(compressed, uncompressed)
public class Index( public class Index(
archive: Int, archive: Int,
public val index: Js5Index, public val index: Js5Index,
data: ByteBuf, compressed: ByteBuf,
) : Group(Js5Archive.ARCHIVESET, archive, data, index.version, false, false) uncompressed: ByteBuf
) : Group(Js5Archive.ARCHIVESET, archive, compressed, uncompressed, index.version, false)
public open class Group( public open class Group(
public val archive: Int, public val archive: Int,
public val group: Int, public val group: Int,
data: ByteBuf, compressed: ByteBuf,
uncompressed: ByteBuf?,
public val version: Int, public val version: Int,
public val versionTruncated: Boolean, public val versionTruncated: Boolean
encrypted: Boolean ) : Container(compressed, uncompressed)
) : Container(data, encrypted)
public suspend fun import( public suspend fun import(
store: Store, store: Store,
@ -78,7 +88,7 @@ public class CacheImporter @Inject constructor(
// import master index // import master index
val masterIndex = createMasterIndex(store) val masterIndex = createMasterIndex(store)
try { try {
addMasterIndex(connection, masterIndex, gameId, build, timestamp, name, description, false) addMasterIndex(connection, masterIndex, gameId, build, timestamp, name, description, overwrite = false)
} finally { } finally {
masterIndex.release() masterIndex.release()
} }
@ -142,13 +152,13 @@ public class CacheImporter @Inject constructor(
description: String? description: String?
) { ) {
Js5Compression.uncompress(buf.slice()).use { uncompressed -> Js5Compression.uncompress(buf.slice()).use { uncompressed ->
val masterIndex = MasterIndex(Js5MasterIndex.read(uncompressed.slice(), format), buf) val masterIndex = MasterIndex(Js5MasterIndex.read(uncompressed.slice(), format), buf, uncompressed)
database.execute { connection -> database.execute { connection ->
prepare(connection) prepare(connection)
val gameId = getGameId(connection, game) val gameId = getGameId(connection, game)
addMasterIndex(connection, masterIndex, gameId, build, timestamp, name, description, false) addMasterIndex(connection, masterIndex, gameId, build, timestamp, name, description, overwrite = false)
} }
} }
} }
@ -156,6 +166,7 @@ public class CacheImporter @Inject constructor(
public suspend fun importMasterIndexAndGetIndexes( public suspend fun importMasterIndexAndGetIndexes(
masterIndex: Js5MasterIndex, masterIndex: Js5MasterIndex,
buf: ByteBuf, buf: ByteBuf,
uncompressed: ByteBuf,
gameId: Int, gameId: Int,
build: Int, build: Int,
timestamp: Instant, timestamp: Instant,
@ -177,7 +188,16 @@ public class CacheImporter @Inject constructor(
stmt.execute() stmt.execute()
} }
addMasterIndex(connection, MasterIndex(masterIndex, buf), gameId, build, timestamp, name, null, true) addMasterIndex(
connection,
MasterIndex(masterIndex, buf, uncompressed),
gameId,
build,
timestamp,
name,
description = null,
overwrite = true
)
connection.prepareStatement( connection.prepareStatement(
""" """
@ -239,10 +259,15 @@ public class CacheImporter @Inject constructor(
} }
} }
public suspend fun importIndexAndGetMissingGroups(archive: Int, index: Js5Index, buf: ByteBuf): List<Int> { public suspend fun importIndexAndGetMissingGroups(
archive: Int,
index: Js5Index,
buf: ByteBuf,
uncompressed: ByteBuf
): List<Int> {
return database.execute { connection -> return database.execute { connection ->
prepare(connection) prepare(connection)
addIndex(connection, Index(archive, index, buf)) addIndex(connection, Index(archive, index, buf, uncompressed))
connection.prepareStatement( connection.prepareStatement(
""" """
@ -321,8 +346,8 @@ public class CacheImporter @Inject constructor(
alloc.buffer().use { uncompressed -> alloc.buffer().use { uncompressed ->
index.write(uncompressed) index.write(uncompressed)
Js5Compression.compress(uncompressed, Js5CompressionType.UNCOMPRESSED).use { buf -> Js5Compression.compress(uncompressed.slice(), Js5CompressionType.UNCOMPRESSED).use { buf ->
return MasterIndex(index, buf.retain()) return MasterIndex(index, buf.retain(), uncompressed.retain())
} }
} }
} }
@ -499,7 +524,6 @@ public class CacheImporter @Inject constructor(
store.read(archive, group).use { buf -> store.read(archive, group).use { buf ->
var version = VersionTrailer.strip(buf) ?: return null var version = VersionTrailer.strip(buf) ?: return null
var versionTruncated = true var versionTruncated = true
val encrypted = Js5Compression.isEncrypted(buf.slice())
/* /*
* Grab the non-truncated version from the Js5Index if we can * Grab the non-truncated version from the Js5Index if we can
@ -513,7 +537,14 @@ public class CacheImporter @Inject constructor(
} }
} }
return Group(archive, group, buf.retain(), version, versionTruncated, encrypted) // TODO(gpe): avoid uncompressing twice (we do it in isEncrypted and uncompress)
val uncompressed = if (Js5Compression.isEncrypted(buf.slice())) {
null
} else {
Js5Compression.uncompress(buf.slice())
}
return Group(archive, group, buf.retain(), uncompressed, version, versionTruncated)
} }
} catch (ex: IOException) { } catch (ex: IOException) {
return null return null
@ -552,7 +583,7 @@ public class CacheImporter @Inject constructor(
private fun readIndex(store: Store, archive: Int): Index { private fun readIndex(store: Store, archive: Int): Index {
return store.read(Js5Archive.ARCHIVESET, archive).use { buf -> return store.read(Js5Archive.ARCHIVESET, archive).use { buf ->
Js5Compression.uncompress(buf.slice()).use { uncompressed -> Js5Compression.uncompress(buf.slice()).use { uncompressed ->
Index(archive, Js5Index.read(uncompressed), buf.retain()) Index(archive, Js5Index.read(uncompressed.slice()), buf.retain(), uncompressed.retain())
} }
} }
} }
@ -671,6 +702,8 @@ public class CacheImporter @Inject constructor(
index INTEGER NOT NULL, index INTEGER NOT NULL,
crc32 INTEGER NOT NULL, crc32 INTEGER NOT NULL,
whirlpool BYTEA NOT NULL, whirlpool BYTEA NOT NULL,
uncompressed_length INTEGER NULL,
uncompressed_crc32 INTEGER NULL,
data BYTEA NOT NULL, data BYTEA NOT NULL,
encrypted BOOLEAN NOT NULL encrypted BOOLEAN NOT NULL
) ON COMMIT DROP ) ON COMMIT DROP
@ -695,8 +728,8 @@ public class CacheImporter @Inject constructor(
connection.prepareStatement( connection.prepareStatement(
""" """
INSERT INTO tmp_containers (index, crc32, whirlpool, data, encrypted) INSERT INTO tmp_containers (index, crc32, whirlpool, data, uncompressed_length, uncompressed_crc32, encrypted)
VALUES (?, ?, ?, ?, ?) VALUES (?, ?, ?, ?, ?, ?, ?)
""".trimIndent() """.trimIndent()
).use { stmt -> ).use { stmt ->
for ((i, container) in containers.withIndex()) { for ((i, container) in containers.withIndex()) {
@ -704,7 +737,9 @@ public class CacheImporter @Inject constructor(
stmt.setInt(2, container.crc32) stmt.setInt(2, container.crc32)
stmt.setBytes(3, container.whirlpool) stmt.setBytes(3, container.whirlpool)
stmt.setBytes(4, container.bytes) stmt.setBytes(4, container.bytes)
stmt.setBoolean(5, container.encrypted) stmt.setObject(5, container.uncompressedLen, Types.INTEGER)
stmt.setObject(6, container.uncompressedCrc32, Types.INTEGER)
stmt.setBoolean(7, container.encrypted)
stmt.addBatch() stmt.addBatch()
} }
@ -713,8 +748,8 @@ public class CacheImporter @Inject constructor(
connection.prepareStatement( connection.prepareStatement(
""" """
INSERT INTO containers (crc32, whirlpool, data, encrypted) INSERT INTO containers (crc32, whirlpool, data, uncompressed_length, uncompressed_crc32, encrypted)
SELECT t.crc32, t.whirlpool, t.data, t.encrypted SELECT t.crc32, t.whirlpool, t.data, t.uncompressed_length, t.uncompressed_crc32, t.encrypted
FROM tmp_containers t FROM tmp_containers t
LEFT JOIN containers c ON c.whirlpool = t.whirlpool LEFT JOIN containers c ON c.whirlpool = t.whirlpool
WHERE c.whirlpool IS NULL WHERE c.whirlpool IS NULL

@ -133,13 +133,20 @@ public class Js5ChannelHandler(
throw Exception("Group checksum invalid") throw Exception("Group checksum invalid")
} }
// TODO(gpe): avoid uncompressing twice (we do it in isEncrypted and uncompress)
val uncompressed = if (Js5Compression.isEncrypted(response.data.slice())) {
null
} else {
Js5Compression.uncompress(response.data.slice())
}
groups += CacheImporter.Group( groups += CacheImporter.Group(
response.archive, response.archive,
response.group, response.group,
response.data.retain(), response.data.retain(),
uncompressed,
entry.version, entry.version,
versionTruncated = false, versionTruncated = false
Js5Compression.isEncrypted(response.data.slice())
) )
} }
@ -160,13 +167,20 @@ public class Js5ChannelHandler(
} }
private fun processMasterIndex(buf: ByteBuf) { private fun processMasterIndex(buf: ByteBuf) {
masterIndex = Js5Compression.uncompress(buf.slice()).use { uncompressed -> Js5Compression.uncompress(buf.slice()).use { uncompressed ->
Js5MasterIndex.read(uncompressed, masterIndexFormat) masterIndex = Js5MasterIndex.read(uncompressed.slice(), masterIndexFormat)
}
val rawIndexes = runBlocking { val rawIndexes = runBlocking {
val name = "Downloaded from $hostname:$port" val name = "Downloaded from $hostname:$port"
importer.importMasterIndexAndGetIndexes(masterIndex!!, buf, gameId, build, Instant.now(), name) importer.importMasterIndexAndGetIndexes(
masterIndex!!,
buf,
uncompressed,
gameId,
build,
timestamp = Instant.now(),
name
)
} }
try { try {
indexes = arrayOfNulls(rawIndexes.size) indexes = arrayOfNulls(rawIndexes.size)
@ -182,6 +196,7 @@ public class Js5ChannelHandler(
rawIndexes.filterNotNull().forEach(ByteBuf::release) rawIndexes.filterNotNull().forEach(ByteBuf::release)
} }
} }
}
private fun processIndex(archive: Int, buf: ByteBuf) { private fun processIndex(archive: Int, buf: ByteBuf) {
val entry = masterIndex!!.entries[archive] val entry = masterIndex!!.entries[archive]
@ -189,9 +204,8 @@ public class Js5ChannelHandler(
throw Exception("Index checksum invalid") throw Exception("Index checksum invalid")
} }
val index = Js5Compression.uncompress(buf.slice()).use { uncompressed -> Js5Compression.uncompress(buf.slice()).use { uncompressed ->
Js5Index.read(uncompressed) val index = Js5Index.read(uncompressed.slice())
}
indexes[archive] = index indexes[archive] = index
if (index.version != entry.version) { if (index.version != entry.version) {
@ -199,12 +213,13 @@ public class Js5ChannelHandler(
} }
val groups = runBlocking { val groups = runBlocking {
importer.importIndexAndGetMissingGroups(archive, index, buf) importer.importIndexAndGetMissingGroups(archive, index, buf, uncompressed)
} }
for (group in groups) { for (group in groups) {
request(archive, group) request(archive, group)
} }
} }
}
private fun request(archive: Int, group: Int) { private fun request(archive: Int, group: Int) {
pendingRequests += Js5Request.Group(false, archive, group) pendingRequests += Js5Request.Group(false, archive, group)

@ -1,6 +1,7 @@
package org.openrs2.archive.key package org.openrs2.archive.key
import io.netty.buffer.Unpooled import io.netty.buffer.Unpooled
import org.openrs2.buffer.crc32
import org.openrs2.buffer.use import org.openrs2.buffer.use
import org.openrs2.cache.Js5Compression import org.openrs2.cache.Js5Compression
import org.openrs2.crypto.XteaKey import org.openrs2.crypto.XteaKey
@ -202,9 +203,7 @@ public class KeyBruteForcer @Inject constructor(
val containerId = rows.getLong(1) val containerId = rows.getLong(1)
val data = rows.getBytes(2) val data = rows.getBytes(2)
if (validateKey(connection, data, key, keyId, containerId)) { validateKey(connection, data, key, keyId, containerId)
break
}
} }
} }
} }
@ -268,15 +267,28 @@ public class KeyBruteForcer @Inject constructor(
return false return false
} }
// TODO(gpe): avoid uncompressing twice (we do it here and in isKeyValid)
var len = 0
var crc32 = 0
Unpooled.wrappedBuffer(data).use { buf ->
Js5Compression.uncompress(buf, key).use { uncompressed ->
len = uncompressed.readableBytes()
crc32 = uncompressed.crc32()
}
}
connection.prepareStatement( connection.prepareStatement(
""" """
UPDATE containers UPDATE containers
SET key_id = ? SET key_id = ?, uncompressed_length = ?, uncompressed_crc32 = ?
WHERE id = ? WHERE id = ?
""".trimIndent() """.trimIndent()
).use { stmt -> ).use { stmt ->
stmt.setLong(1, keyId) stmt.setLong(1, keyId)
stmt.setLong(2, containerId) stmt.setInt(2, len)
stmt.setInt(3, crc32)
stmt.setLong(4, containerId)
stmt.execute() stmt.execute()
} }

@ -31,6 +31,8 @@ CREATE TABLE containers (
crc32 INTEGER NOT NULL, crc32 INTEGER NOT NULL,
whirlpool BYTEA UNIQUE NOT NULL, whirlpool BYTEA UNIQUE NOT NULL,
data BYTEA NOT NULL, data BYTEA NOT NULL,
uncompressed_length INTEGER NULL,
uncompressed_crc32 INTEGER NULL,
encrypted BOOLEAN NOT NULL, encrypted BOOLEAN NOT NULL,
key_id BIGINT NULL REFERENCES keys (id) key_id BIGINT NULL REFERENCES keys (id)
); );

Loading…
Cancel
Save