diff --git a/all/build.gradle.kts b/all/build.gradle.kts index 62e01a5d..5420a580 100644 --- a/all/build.gradle.kts +++ b/all/build.gradle.kts @@ -15,6 +15,7 @@ application { } dependencies { + implementation(project(":archive")) implementation(project(":bundler")) implementation(project(":compress-cli")) implementation(project(":crc32")) diff --git a/all/src/main/java/dev/openrs2/Command.kt b/all/src/main/java/dev/openrs2/Command.kt index 30316d7a..5edf94a3 100644 --- a/all/src/main/java/dev/openrs2/Command.kt +++ b/all/src/main/java/dev/openrs2/Command.kt @@ -2,6 +2,7 @@ package dev.openrs2 import com.github.ajalt.clikt.core.NoOpCliktCommand import com.github.ajalt.clikt.core.subcommands +import dev.openrs2.archive.ArchiveCommand import dev.openrs2.bundler.BundleCommand import dev.openrs2.compress.cli.CompressCommand import dev.openrs2.crc32.Crc32Command @@ -15,6 +16,7 @@ public fun main(args: Array): Unit = Command().main(args) public class Command : NoOpCliktCommand(name = "openrs2") { init { subcommands( + ArchiveCommand(), BundleCommand(), CompressCommand(), Crc32Command(), diff --git a/archive/build.gradle.kts b/archive/build.gradle.kts new file mode 100644 index 00000000..86290287 --- /dev/null +++ b/archive/build.gradle.kts @@ -0,0 +1,40 @@ +plugins { + `maven-publish` + application + kotlin("jvm") +} + +application { + mainClassName = "dev.openrs2.archive.ArchiveCommandKt" +} + +dependencies { + api("com.github.ajalt.clikt:clikt:${Versions.clikt}") + + implementation(project(":buffer")) + implementation(project(":cache")) + implementation(project(":db")) + implementation(project(":json")) + implementation(project(":util")) + implementation("com.google.guava:guava:${Versions.guava}") + implementation("org.flywaydb:flyway-core:${Versions.flyway}") + implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:${Versions.kotlinCoroutines}") + implementation("org.postgresql:postgresql:${Versions.postgres}") +} + +publishing { + publications.create("maven") { + from(components["java"]) + + pom { + packaging = "jar" + name.set("OpenRS2 Archive") + description.set( + """ + Service for archiving clients, caches and XTEA keys in an + efficient deduplicated format. + """.trimIndent() + ) + } + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/ArchiveCommand.kt b/archive/src/main/java/dev/openrs2/archive/ArchiveCommand.kt new file mode 100644 index 00000000..28831b68 --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/ArchiveCommand.kt @@ -0,0 +1,19 @@ +package dev.openrs2.archive + +import com.github.ajalt.clikt.core.NoOpCliktCommand +import com.github.ajalt.clikt.core.subcommands +import dev.openrs2.archive.cache.CacheCommand +import dev.openrs2.archive.key.KeyCommand +import dev.openrs2.archive.name.NameCommand + +public fun main(args: Array): Unit = ArchiveCommand().main(args) + +public class ArchiveCommand : NoOpCliktCommand(name = "archive") { + init { + subcommands( + CacheCommand(), + KeyCommand(), + NameCommand() + ) + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/ArchiveModule.kt b/archive/src/main/java/dev/openrs2/archive/ArchiveModule.kt new file mode 100644 index 00000000..fc292ac6 --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/ArchiveModule.kt @@ -0,0 +1,18 @@ +package dev.openrs2.archive + +import com.google.inject.AbstractModule +import com.google.inject.Scopes +import dev.openrs2.buffer.BufferModule +import dev.openrs2.db.Database +import dev.openrs2.json.JsonModule + +public object ArchiveModule : AbstractModule() { + override fun configure() { + install(BufferModule) + install(JsonModule) + + bind(Database::class.java) + .toProvider(DatabaseProvider::class.java) + .`in`(Scopes.SINGLETON) + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/DatabaseProvider.kt b/archive/src/main/java/dev/openrs2/archive/DatabaseProvider.kt new file mode 100644 index 00000000..f22a0119 --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/DatabaseProvider.kt @@ -0,0 +1,25 @@ +package dev.openrs2.archive + +import dev.openrs2.db.Database +import dev.openrs2.db.PostgresDeadlockDetector +import javax.inject.Provider +import org.flywaydb.core.Flyway +import org.postgresql.ds.PGSimpleDataSource + +public class DatabaseProvider : Provider { + override fun get(): Database { + val dataSource = PGSimpleDataSource() + // TODO(gpe): make the URL configurable + dataSource.setUrl("jdbc:postgresql://localhost/runearchive?user=gpe&password=gpe") + + Flyway.configure() + .dataSource(dataSource) + .locations("classpath:/dev/openrs2/archive") + .load() + .migrate() + + // TODO(gpe): wrap dataSource with HikariCP? how do we close the pool? + + return Database(dataSource, deadlockDetector = PostgresDeadlockDetector) + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/cache/CacheCommand.kt b/archive/src/main/java/dev/openrs2/archive/cache/CacheCommand.kt new file mode 100644 index 00000000..6793b34e --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/cache/CacheCommand.kt @@ -0,0 +1,13 @@ +package dev.openrs2.archive.cache + +import com.github.ajalt.clikt.core.NoOpCliktCommand +import com.github.ajalt.clikt.core.subcommands + +public class CacheCommand : NoOpCliktCommand(name = "cache") { + init { + subcommands( + ImportCommand(), + ExportCommand() + ) + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/cache/CacheExporter.kt b/archive/src/main/java/dev/openrs2/archive/cache/CacheExporter.kt new file mode 100644 index 00000000..07d026be --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/cache/CacheExporter.kt @@ -0,0 +1,65 @@ +package dev.openrs2.archive.cache + +import dev.openrs2.buffer.use +import dev.openrs2.cache.Store +import dev.openrs2.db.Database +import io.netty.buffer.ByteBufAllocator +import io.netty.buffer.Unpooled +import javax.inject.Inject +import javax.inject.Singleton + +@Singleton +public class CacheExporter @Inject constructor( + private val database: Database, + private val alloc: ByteBufAllocator +) { + public suspend fun export(id: Long, store: Store) { + // TODO(gpe): think about what to do if there is a collision + database.execute { connection -> + connection.prepareStatement( + """ + SELECT 255::uint1, ci.archive_id::INTEGER, c.data, NULL + FROM cache_indexes ci + JOIN containers c ON c.id = ci.container_id + WHERE ci.cache_id = ? + UNION ALL + SELECT ci.archive_id, ig.group_id, c.data, g.truncated_version + FROM cache_indexes ci + JOIN index_groups ig ON ig.container_id = ci.container_id + JOIN groups g ON g.archive_id = ci.archive_id AND g.group_id = ig.group_id AND g.truncated_version = ig.version & 65535 + JOIN containers c ON c.id = g.container_id AND c.crc32 = ig.crc32 + WHERE ci.cache_id = ? + """.trimIndent() + ).use { stmt -> + stmt.fetchSize = BATCH_SIZE + stmt.setLong(1, id) + stmt.setLong(2, id) + + stmt.executeQuery().use { rows -> + alloc.buffer(2, 2).use { versionBuf -> + while (rows.next()) { + val archive = rows.getInt(1) + val group = rows.getInt(2) + val bytes = rows.getBytes(3) + val version = rows.getInt(4) + val versionNull = rows.wasNull() + + versionBuf.clear() + if (!versionNull) { + versionBuf.writeShort(version) + } + + Unpooled.wrappedBuffer(Unpooled.wrappedBuffer(bytes), versionBuf.retain()).use { buf -> + store.write(archive, group, buf) + } + } + } + } + } + } + } + + private companion object { + private val BATCH_SIZE = 1024 + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/cache/CacheImporter.kt b/archive/src/main/java/dev/openrs2/archive/cache/CacheImporter.kt new file mode 100644 index 00000000..9acbb9cc --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/cache/CacheImporter.kt @@ -0,0 +1,375 @@ +package dev.openrs2.archive.cache + +import dev.openrs2.buffer.crc32 +import dev.openrs2.buffer.use +import dev.openrs2.cache.Js5Archive +import dev.openrs2.cache.Js5Compression +import dev.openrs2.cache.Js5Index +import dev.openrs2.cache.Store +import dev.openrs2.cache.VersionTrailer +import dev.openrs2.crypto.Whirlpool +import dev.openrs2.db.Database +import io.netty.buffer.ByteBuf +import io.netty.buffer.ByteBufAllocator +import io.netty.buffer.ByteBufUtil +import io.netty.util.ReferenceCounted +import java.io.IOException +import java.sql.Connection +import java.sql.Types +import javax.inject.Inject +import javax.inject.Singleton + +@Singleton +public class CacheImporter @Inject constructor( + private val database: Database, + private val alloc: ByteBufAllocator +) { + private abstract class Container( + private val data: ByteBuf + ) : ReferenceCounted by data { + val bytes: ByteArray = ByteBufUtil.getBytes(data, data.readerIndex(), data.readableBytes(), false) + val crc32 = data.crc32() + val whirlpool = Whirlpool.whirlpool(bytes) + abstract val encrypted: Boolean + } + + private class Index( + val archive: Int, + val index: Js5Index, + data: ByteBuf + ) : Container(data) { + override val encrypted: Boolean = false + } + + private class Group( + val archive: Int, + val group: Int, + data: ByteBuf, + val version: Int, + override val encrypted: Boolean + ) : Container(data) + + public suspend fun import(store: Store) { + database.execute { connection -> + connection.prepareStatement( + """ + LOCK TABLE containers IN EXCLUSIVE MODE + """.trimIndent() + ).use { stmt -> + stmt.execute() + } + + // create temporary tables + connection.prepareStatement( + """ + CREATE TEMPORARY TABLE tmp_containers ( + index INTEGER NOT NULL, + crc32 INTEGER NOT NULL, + whirlpool BYTEA NOT NULL, + data BYTEA NOT NULL, + encrypted BOOLEAN NOT NULL + ) ON COMMIT DROP + """.trimIndent() + ).use { stmt -> + stmt.execute() + } + + // import indexes + val indexes = mutableListOf() + try { + for (archive in store.list(Js5Archive.ARCHIVESET)) { + indexes += readIndex(store, archive) + } + + val cacheId = addCache(connection, indexes) + + for (index in indexes) { + addIndex(connection, cacheId, index) + } + } finally { + indexes.forEach(Index::release) + } + + // import groups + val groups = mutableListOf() + try { + for (archive in store.list()) { + if (archive == Js5Archive.ARCHIVESET) { + continue + } + + for (id in store.list(archive)) { + val group = readGroup(store, archive, id) ?: continue + groups += group + + if (groups.size >= BATCH_SIZE) { + addGroups(connection, groups) + + groups.forEach(Group::release) + groups.clear() + } + } + } + + if (groups.isNotEmpty()) { + addGroups(connection, groups) + } + } finally { + groups.forEach(Group::release) + } + } + } + + private fun readGroup(store: Store, archive: Int, group: Int): Group? { + try { + store.read(archive, group).use { buf -> + val version = VersionTrailer.strip(buf) ?: return null + val encrypted = Js5Compression.isEncrypted(buf.slice()) + return Group(archive, group, buf.retain(), version, encrypted) + } + } catch (ex: IOException) { + return null + } + } + + private fun addContainer(connection: Connection, container: Container): Long { + return addContainers(connection, listOf(container)).single() + } + + private fun addContainers(connection: Connection, containers: List): List { + connection.prepareStatement( + """ + TRUNCATE TABLE tmp_containers + """.trimIndent() + ).use { stmt -> + stmt.execute() + } + + connection.prepareStatement( + """ + INSERT INTO tmp_containers (index, crc32, whirlpool, data, encrypted) + VALUES (?, ?, ?, ?, ?) + """.trimIndent() + ).use { stmt -> + for ((i, container) in containers.withIndex()) { + stmt.setInt(1, i) + stmt.setInt(2, container.crc32) + stmt.setBytes(3, container.whirlpool) + stmt.setBytes(4, container.bytes) + stmt.setBoolean(5, container.encrypted) + stmt.addBatch() + } + + stmt.executeBatch() + } + + connection.prepareStatement( + """ + INSERT INTO containers (crc32, whirlpool, data, encrypted) + SELECT t.crc32, t.whirlpool, t.data, t.encrypted + FROM tmp_containers t + LEFT JOIN containers c ON c.whirlpool = t.whirlpool + WHERE c.whirlpool IS NULL + ON CONFLICT DO NOTHING + """.trimIndent() + ).use { stmt -> + stmt.execute() + } + + val ids = mutableListOf() + + connection.prepareStatement( + """ + SELECT c.id + FROM tmp_containers t + JOIN containers c ON c.whirlpool = t.whirlpool + ORDER BY t.index ASC + """.trimIndent() + ).use { stmt -> + stmt.executeQuery().use { rows -> + while (rows.next()) { + ids += rows.getLong(1) + } + } + } + + check(ids.size == containers.size) + return ids + } + + private fun addGroups(connection: Connection, groups: List) { + val containerIds = addContainers(connection, groups) + + connection.prepareStatement( + """ + INSERT INTO groups (archive_id, group_id, container_id, truncated_version) + VALUES (?, ?, ?, ?) + ON CONFLICT DO NOTHING + """.trimIndent() + ).use { stmt -> + for ((i, group) in groups.withIndex()) { + stmt.setInt(1, group.archive) + stmt.setInt(2, group.group) + stmt.setLong(3, containerIds[i]) + stmt.setInt(4, group.version) + stmt.addBatch() + } + + stmt.executeBatch() + } + } + + private fun readIndex(store: Store, archive: Int): Index { + return store.read(Js5Archive.ARCHIVESET, archive).use { buf -> + Js5Compression.uncompress(buf.slice()).use { uncompressed -> + Index(archive, Js5Index.read(uncompressed), buf.retain()) + } + } + } + + private fun addCache(connection: Connection, indexes: List): Long { + val len = indexes.size * (1 + Whirlpool.DIGESTBYTES) + val whirlpool = alloc.buffer(len, len).use { buf -> + for (index in indexes) { + buf.writeByte(index.archive) + buf.writeBytes(index.whirlpool) + } + + Whirlpool.whirlpool(ByteBufUtil.getBytes(buf, 0, buf.readableBytes(), false)) + } + + connection.prepareStatement( + """ + SELECT id + FROM caches + WHERE whirlpool = ? + """.trimIndent() + ).use { stmt -> + stmt.setBytes(1, whirlpool) + + stmt.executeQuery().use { rows -> + if (rows.next()) { + return rows.getLong(1) + } + } + } + + connection.prepareStatement( + """ + INSERT INTO caches (whirlpool) + VALUES (?) + ON CONFLICT DO NOTHING + RETURNING id + """.trimIndent() + ).use { stmt -> + stmt.setBytes(1, whirlpool) + + stmt.executeQuery().use { rows -> + if (rows.next()) { + rows.getLong(1) + } + } + } + + connection.prepareStatement( + """ + SELECT id + FROM caches + WHERE whirlpool = ? + """.trimIndent() + ).use { stmt -> + stmt.setBytes(1, whirlpool) + + stmt.executeQuery().use { rows -> + check(rows.next()) + return rows.getLong(1) + } + } + } + + // TODO(gpe): skip most of this function if we encounter a conflict? + private fun addIndex(connection: Connection, cacheId: Long, index: Index) { + val containerId = addContainer(connection, index) + + connection.prepareStatement( + """ + INSERT INTO indexes (container_id, version) + VALUES (?, ?) + ON CONFLICT DO NOTHING + """.trimIndent() + ).use { stmt -> + stmt.setLong(1, containerId) + stmt.setInt(2, index.index.version) + stmt.execute() + } + + connection.prepareStatement( + """ + INSERT INTO cache_indexes (cache_id, archive_id, container_id) + VALUES (?, ?, ?) + ON CONFLICT DO NOTHING + """.trimIndent() + ).use { stmt -> + stmt.setLong(1, cacheId) + stmt.setInt(2, index.archive) + stmt.setLong(3, containerId) + stmt.execute() + } + + connection.prepareStatement( + """ + INSERT INTO index_groups (container_id, group_id, crc32, whirlpool, version, name_hash) + VALUES (?, ?, ?, ?, ?, ?) + ON CONFLICT DO NOTHING + """.trimIndent() + ).use { stmt -> + for (group in index.index) { + stmt.setLong(1, containerId) + stmt.setInt(2, group.id) + stmt.setInt(3, group.checksum) + stmt.setBytes(4, group.digest) + stmt.setInt(5, group.version) + + if (index.index.hasNames) { + stmt.setInt(6, group.nameHash) + } else { + stmt.setNull(6, Types.INTEGER) + } + + stmt.addBatch() + } + + stmt.executeBatch() + } + + connection.prepareStatement( + """ + INSERT INTO index_files (container_id, group_id, file_id, name_hash) + VALUES (?, ?, ?, ?) + ON CONFLICT DO NOTHING + """.trimIndent() + ).use { stmt -> + for (group in index.index) { + for (file in group) { + stmt.setLong(1, containerId) + stmt.setInt(2, group.id) + stmt.setInt(3, file.id) + + if (index.index.hasNames) { + stmt.setInt(4, file.nameHash) + } else { + stmt.setNull(4, Types.INTEGER) + } + + stmt.addBatch() + } + } + + stmt.executeBatch() + } + } + + private companion object { + private val BATCH_SIZE = 1024 + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/cache/ExportCommand.kt b/archive/src/main/java/dev/openrs2/archive/cache/ExportCommand.kt new file mode 100644 index 00000000..73051805 --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/cache/ExportCommand.kt @@ -0,0 +1,29 @@ +package dev.openrs2.archive.cache + +import com.github.ajalt.clikt.core.CliktCommand +import com.github.ajalt.clikt.parameters.arguments.argument +import com.github.ajalt.clikt.parameters.types.long +import com.github.ajalt.clikt.parameters.types.path +import com.google.inject.Guice +import dev.openrs2.archive.ArchiveModule +import dev.openrs2.cache.DiskStore +import kotlinx.coroutines.runBlocking + +public class ExportCommand : CliktCommand(name = "export") { + private val id by argument().long() + private val output by argument().path( + mustExist = true, + canBeFile = false, + mustBeReadable = true, + mustBeWritable = true + ) + + override fun run(): Unit = runBlocking { + val injector = Guice.createInjector(ArchiveModule) + val exporter = injector.getInstance(CacheExporter::class.java) + + DiskStore.create(output).use { store -> + exporter.export(id, store) + } + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/cache/ImportCommand.kt b/archive/src/main/java/dev/openrs2/archive/cache/ImportCommand.kt new file mode 100644 index 00000000..f6b062ca --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/cache/ImportCommand.kt @@ -0,0 +1,26 @@ +package dev.openrs2.archive.cache + +import com.github.ajalt.clikt.core.CliktCommand +import com.github.ajalt.clikt.parameters.arguments.argument +import com.github.ajalt.clikt.parameters.types.path +import com.google.inject.Guice +import dev.openrs2.archive.ArchiveModule +import dev.openrs2.cache.Store +import kotlinx.coroutines.runBlocking + +public class ImportCommand : CliktCommand(name = "import") { + private val input by argument().path( + mustExist = true, + canBeFile = false, + mustBeReadable = true + ) + + override fun run(): Unit = runBlocking { + val injector = Guice.createInjector(ArchiveModule) + val importer = injector.getInstance(CacheImporter::class.java) + + Store.open(input).use { store -> + importer.import(store) + } + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/key/BruteForceCommand.kt b/archive/src/main/java/dev/openrs2/archive/key/BruteForceCommand.kt new file mode 100644 index 00000000..c21b4be8 --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/key/BruteForceCommand.kt @@ -0,0 +1,15 @@ +package dev.openrs2.archive.key + +import com.github.ajalt.clikt.core.CliktCommand +import com.google.inject.Guice +import dev.openrs2.archive.ArchiveModule +import kotlinx.coroutines.runBlocking + +public class BruteForceCommand : CliktCommand(name = "brute-force") { + override fun run(): Unit = runBlocking { + val injector = Guice.createInjector(ArchiveModule) + val bruteForcer = injector.getInstance(KeyBruteForcer::class.java) + + bruteForcer.bruteForce() + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/key/HexKeyReader.kt b/archive/src/main/java/dev/openrs2/archive/key/HexKeyReader.kt new file mode 100644 index 00000000..086e8e98 --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/key/HexKeyReader.kt @@ -0,0 +1,13 @@ +package dev.openrs2.archive.key + +import dev.openrs2.crypto.XteaKey +import java.io.InputStream + +public object HexKeyReader : KeyReader { + override fun read(input: InputStream): Sequence { + return input.bufferedReader() + .lineSequence() + .map(XteaKey::fromHexOrNull) + .filterNotNull() + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/key/ImportCommand.kt b/archive/src/main/java/dev/openrs2/archive/key/ImportCommand.kt new file mode 100644 index 00000000..2f4d7827 --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/key/ImportCommand.kt @@ -0,0 +1,22 @@ +package dev.openrs2.archive.key + +import com.github.ajalt.clikt.core.CliktCommand +import com.github.ajalt.clikt.parameters.arguments.argument +import com.github.ajalt.clikt.parameters.types.path +import com.google.inject.Guice +import dev.openrs2.archive.ArchiveModule +import kotlinx.coroutines.runBlocking + +public class ImportCommand : CliktCommand(name = "import") { + private val input by argument().path( + mustExist = true, + mustBeReadable = true + ) + + override fun run(): Unit = runBlocking { + val injector = Guice.createInjector(ArchiveModule) + val importer = injector.getInstance(KeyImporter::class.java) + + importer.import(input) + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/key/JsonKeyReader.kt b/archive/src/main/java/dev/openrs2/archive/key/JsonKeyReader.kt new file mode 100644 index 00000000..fe39ec21 --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/key/JsonKeyReader.kt @@ -0,0 +1,32 @@ +package dev.openrs2.archive.key + +import com.fasterxml.jackson.databind.ObjectMapper +import dev.openrs2.crypto.XteaKey +import dev.openrs2.json.Json +import java.io.InputStream +import javax.inject.Inject +import javax.inject.Singleton + +@Singleton +public class JsonKeyReader @Inject constructor( + @Json private val mapper: ObjectMapper +) : KeyReader { + override fun read(input: InputStream): Sequence { + return sequence { + for (mapSquare in mapper.readTree(input)) { + val key = mapSquare["key"] ?: mapSquare["keys"] ?: continue + + if (key.size() != 4) { + continue + } + + val k0 = key[0].asText().toIntOrNull() ?: continue + val k1 = key[1].asText().toIntOrNull() ?: continue + val k2 = key[2].asText().toIntOrNull() ?: continue + val k3 = key[3].asText().toIntOrNull() ?: continue + + yield(XteaKey(k0, k1, k2, k3)) + } + } + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/key/KeyBruteForcer.kt b/archive/src/main/java/dev/openrs2/archive/key/KeyBruteForcer.kt new file mode 100644 index 00000000..f0448706 --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/key/KeyBruteForcer.kt @@ -0,0 +1,288 @@ +package dev.openrs2.archive.key + +import dev.openrs2.buffer.use +import dev.openrs2.cache.Js5Compression +import dev.openrs2.crypto.XteaKey +import dev.openrs2.db.Database +import io.netty.buffer.Unpooled +import java.sql.Connection +import java.sql.Types +import javax.inject.Inject +import javax.inject.Singleton + +@Singleton +public class KeyBruteForcer @Inject constructor( + private val database: Database +) { + /* + * The code for writing to the containers and keys tables ensures that the + * row IDs are allocated monotonically (by forbidding any other + * transactions from writing simultaneously with an EXCLUSIVE table lock). + * + * Rather than storing a list of (container, key) pairs which have yet to + * be tested (or have already been tested), which would take O(n*m) space, + * the containers/keys are tested in order. This means we only need to + * store the IDs of the last container/key tested. + * + * If the container/key combinations are represented as a matrix, it looks + * like the diagram below: + * + * containers -> + * +----------+----------+ + * k |##########| | + * e |##########| A | + * y |##########| | + * s +----------+----------+ + * | | | + * | | C | B | + * v | | | + * +----------+----------+ + * + * The shaded are represents combinations that have already been tried. + * + * When a new container is inserted, we test it against every key in the + * shaded area (quadrant A). + * + * When a new key is inserted, we test it against every container in the + * shaded area (quadrant C). + * + * If keys and containers are inserted simultaneously, we take care to + * avoid testing them twice (quadrant B) by testing new containers against + * all keys but not vice-versa. + * + * This code can't tolerate new IDs being inserted while it runs, so it + * locks the tables in SHARE mode. This prevents the import process from + * running (which takes EXCLUSIVE locks) but allows other processes to read + * from the tables. + */ + public suspend fun bruteForce() { + database.execute { connection -> + connection.prepareStatement( + """ + LOCK TABLE containers, keys IN SHARE MODE + """.trimIndent() + ).use { stmt -> + stmt.execute() + } + + bruteForceNewContainers(connection) // A, B + bruteForceNewKeys(connection) // C + } + } + + private fun bruteForceNewContainers(connection: Connection) { + var lastContainerId: Long? + + connection.prepareStatement( + """ + SELECT last_container_id + FROM brute_force_iterator + FOR UPDATE + """.trimIndent() + ).use { stmt -> + stmt.executeQuery().use { rows -> + check(rows.next()) + + lastContainerId = rows.getLong(1) + if (rows.wasNull()) { + lastContainerId = null + } + } + } + + while (true) { + val pair = nextContainer(connection, lastContainerId) ?: break + val (containerId, data) = pair + + connection.prepareStatement( + """ + SELECT id, (key).k0, (key).k1, (key).k2, (key).k3 + FROM keys + """.trimIndent() + ).use { stmt -> + stmt.fetchSize = BATCH_SIZE + + stmt.executeQuery().use { rows -> + while (rows.next()) { + val keyId = rows.getLong(1) + + val k0 = rows.getInt(2) + val k1 = rows.getInt(3) + val k2 = rows.getInt(4) + val k3 = rows.getInt(5) + val key = XteaKey(k0, k1, k2, k3) + + val valid = Unpooled.wrappedBuffer(data).use { buf -> + Js5Compression.isKeyValid(buf, key) + } + + if (valid) { + connection.prepareStatement( + """ + UPDATE containers + SET key_id = ? + WHERE id = ? + """.trimIndent() + ).use { stmt -> + stmt.setLong(1, keyId) + stmt.setLong(2, containerId) + stmt.execute() + } + break + } + } + } + + lastContainerId = containerId + } + } + + connection.prepareStatement( + """ + UPDATE brute_force_iterator + SET last_container_id = ? + """.trimIndent() + ).use { stmt -> + stmt.setObject(1, lastContainerId, Types.BIGINT) + stmt.execute() + } + } + + private fun nextContainer(connection: Connection, lastContainerId: Long?): Pair? { + connection.prepareStatement( + """ + SELECT id, data + FROM containers + WHERE (? IS NULL OR id > ?) AND encrypted AND key_id IS NULL + LIMIT 1 + """.trimIndent() + ).use { stmt -> + stmt.setObject(1, lastContainerId, Types.BIGINT) + stmt.setObject(2, lastContainerId, Types.BIGINT) + + stmt.executeQuery().use { rows -> + if (!rows.next()) { + return null + } + + val containerId = rows.getLong(1) + val data = rows.getBytes(2) + return Pair(containerId, data) + } + } + } + + private fun bruteForceNewKeys(connection: Connection) { + var lastKeyId: Long? + var lastContainerId: Long + + connection.prepareStatement( + """ + SELECT last_key_id, last_container_id + FROM brute_force_iterator + FOR UPDATE + """.trimIndent() + ).use { stmt -> + stmt.executeQuery().use { rows -> + check(rows.next()) + + lastKeyId = rows.getLong(1) + if (rows.wasNull()) { + lastKeyId = null + } + + lastContainerId = rows.getLong(2) + if (rows.wasNull()) { + return@bruteForceNewKeys + } + } + } + + while (true) { + val pair = nextKey(connection, lastKeyId) ?: break + val (keyId, key) = pair + + connection.prepareStatement( + """ + SELECT id, data + FROM containers + WHERE encrypted AND key_id IS NULL AND id < ? + """.trimIndent() + ).use { stmt -> + stmt.fetchSize = BATCH_SIZE + stmt.setLong(1, lastContainerId) + + stmt.executeQuery().use { rows -> + while (rows.next()) { + val containerId = rows.getLong(1) + val data = rows.getBytes(2) + + val valid = Unpooled.wrappedBuffer(data).use { buf -> + Js5Compression.isKeyValid(buf, key) + } + + if (valid) { + connection.prepareStatement( + """ + UPDATE containers + SET key_id = ? + WHERE id = ? + """.trimIndent() + ).use { stmt -> + stmt.setLong(1, keyId) + stmt.setLong(2, containerId) + stmt.execute() + } + } + } + } + } + + lastKeyId = keyId + } + + connection.prepareStatement( + """ + UPDATE brute_force_iterator + SET last_key_id = ? + """.trimIndent() + ).use { stmt -> + stmt.setObject(1, lastKeyId, Types.BIGINT) + stmt.execute() + } + } + + private fun nextKey(connection: Connection, lastKeyId: Long?): Pair? { + connection.prepareStatement( + """ + SELECT id, (key).k0, (key).k1, (key).k2, (key).k3 + FROM keys + WHERE ? IS NULL OR id > ? + LIMIT 1 + """.trimIndent() + ).use { stmt -> + stmt.setObject(1, lastKeyId, Types.BIGINT) + stmt.setObject(2, lastKeyId, Types.BIGINT) + + stmt.executeQuery().use { rows -> + if (!rows.next()) { + return null + } + + val keyId = rows.getLong(1) + + val k0 = rows.getInt(2) + val k1 = rows.getInt(3) + val k2 = rows.getInt(4) + val k3 = rows.getInt(5) + val key = XteaKey(k0, k1, k2, k3) + + return Pair(keyId, key) + } + } + } + + private companion object { + private const val BATCH_SIZE = 1024 + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/key/KeyCommand.kt b/archive/src/main/java/dev/openrs2/archive/key/KeyCommand.kt new file mode 100644 index 00000000..2205d70d --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/key/KeyCommand.kt @@ -0,0 +1,13 @@ +package dev.openrs2.archive.key + +import com.github.ajalt.clikt.core.NoOpCliktCommand +import com.github.ajalt.clikt.core.subcommands + +public class KeyCommand : NoOpCliktCommand(name = "key") { + init { + subcommands( + BruteForceCommand(), + ImportCommand() + ) + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/key/KeyImporter.kt b/archive/src/main/java/dev/openrs2/archive/key/KeyImporter.kt new file mode 100644 index 00000000..93f7bb55 --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/key/KeyImporter.kt @@ -0,0 +1,96 @@ +package dev.openrs2.archive.key + +import dev.openrs2.crypto.XteaKey +import dev.openrs2.db.Database +import java.nio.file.Files +import java.nio.file.Path +import javax.inject.Inject +import javax.inject.Singleton + +@Singleton +public class KeyImporter @Inject constructor( + private val database: Database, + private val jsonKeyReader: JsonKeyReader +) { + public suspend fun import(path: Path) { + val keys = mutableSetOf() + + for (file in Files.walk(path)) { + if (!Files.isRegularFile(file)) { + continue + } + + val name = file.fileName.toString() + val reader = when { + name.endsWith(".hex") -> HexKeyReader + name.endsWith(".txt") -> TextKeyReader + name.endsWith(".json") -> jsonKeyReader + else -> continue + } + + Files.newInputStream(file).use { input -> + keys += reader.read(input) + } + } + + keys -= XteaKey.ZERO + + return import(keys) + } + + public suspend fun import(keys: Iterable) { + database.execute { connection -> + connection.prepareStatement( + """ + LOCK TABLE keys IN EXCLUSIVE MODE + """.trimIndent() + ).use { stmt -> + stmt.execute() + } + + connection.prepareStatement( + """ + CREATE TEMPORARY TABLE tmp_keys ( + key xtea_key NOT NULL + ) ON COMMIT DROP + """.trimIndent() + ).use { stmt -> + stmt.execute() + } + + connection.prepareStatement( + """ + INSERT INTO tmp_keys (key) + VALUES (ROW(?, ?, ?, ?)) + """.trimIndent() + ).use { stmt -> + for (key in keys) { + if (key.isZero) { + continue + } + + stmt.setInt(1, key.k0) + stmt.setInt(2, key.k1) + stmt.setInt(3, key.k2) + stmt.setInt(4, key.k3) + stmt.addBatch() + } + + stmt.executeBatch() + } + + connection.prepareStatement( + """ + INSERT INTO keys (key) + SELECT t.key + FROM tmp_keys t + LEFT JOIN keys k ON k.key = t.key + WHERE k.key IS NULL + ON CONFLICT DO NOTHING + """.trimIndent() + ).use { stmt -> + stmt.execute() + } + } + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/key/KeyReader.kt b/archive/src/main/java/dev/openrs2/archive/key/KeyReader.kt new file mode 100644 index 00000000..a0344a9a --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/key/KeyReader.kt @@ -0,0 +1,8 @@ +package dev.openrs2.archive.key + +import dev.openrs2.crypto.XteaKey +import java.io.InputStream + +public interface KeyReader { + public fun read(input: InputStream): Sequence +} diff --git a/archive/src/main/java/dev/openrs2/archive/key/TextKeyReader.kt b/archive/src/main/java/dev/openrs2/archive/key/TextKeyReader.kt new file mode 100644 index 00000000..133e8a05 --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/key/TextKeyReader.kt @@ -0,0 +1,17 @@ +package dev.openrs2.archive.key + +import dev.openrs2.crypto.XteaKey +import java.io.InputStream + +public object TextKeyReader : KeyReader { + override fun read(input: InputStream): Sequence { + val reader = input.bufferedReader() + + val k0 = reader.readLine().toIntOrNull() ?: return emptySequence() + val k1 = reader.readLine().toIntOrNull() ?: return emptySequence() + val k2 = reader.readLine().toIntOrNull() ?: return emptySequence() + val k3 = reader.readLine().toIntOrNull() ?: return emptySequence() + + return sequenceOf(XteaKey(k0, k1, k2, k3)) + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/name/GenerateCommand.kt b/archive/src/main/java/dev/openrs2/archive/name/GenerateCommand.kt new file mode 100644 index 00000000..23e14611 --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/name/GenerateCommand.kt @@ -0,0 +1,29 @@ +package dev.openrs2.archive.name + +import com.github.ajalt.clikt.core.CliktCommand +import com.google.inject.Guice +import dev.openrs2.archive.ArchiveModule +import kotlinx.coroutines.runBlocking + +public class GenerateCommand : CliktCommand(name = "generate") { + override fun run(): Unit = runBlocking { + val injector = Guice.createInjector(ArchiveModule) + val importer = injector.getInstance(NameImporter::class.java) + + val names = mutableSetOf() + + for (x in 0..255) { + for (z in 0..255) { + for (prefix in PREFIXES) { + names += "$prefix${x}_$z" + } + } + } + + importer.import(names) + } + + private companion object { + private val PREFIXES = setOf("m", "um", "l", "ul", "n") + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/name/ImportCommand.kt b/archive/src/main/java/dev/openrs2/archive/name/ImportCommand.kt new file mode 100644 index 00000000..f5155f60 --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/name/ImportCommand.kt @@ -0,0 +1,22 @@ +package dev.openrs2.archive.name + +import com.github.ajalt.clikt.core.CliktCommand +import com.github.ajalt.clikt.parameters.options.option +import com.github.ajalt.clikt.parameters.types.defaultStdin +import com.github.ajalt.clikt.parameters.types.inputStream +import com.google.inject.Guice +import dev.openrs2.archive.ArchiveModule +import kotlinx.coroutines.runBlocking + +public class ImportCommand : CliktCommand(name = "import") { + private val input by option().inputStream().defaultStdin() + + override fun run(): Unit = runBlocking { + val injector = Guice.createInjector(ArchiveModule) + val importer = injector.getInstance(NameImporter::class.java) + + input.bufferedReader().useLines { lines -> + importer.import(lines.toSet()) + } + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/name/NameCommand.kt b/archive/src/main/java/dev/openrs2/archive/name/NameCommand.kt new file mode 100644 index 00000000..33c29633 --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/name/NameCommand.kt @@ -0,0 +1,13 @@ +package dev.openrs2.archive.name + +import com.github.ajalt.clikt.core.NoOpCliktCommand +import com.github.ajalt.clikt.core.subcommands + +public class NameCommand : NoOpCliktCommand(name = "name") { + init { + subcommands( + GenerateCommand(), + ImportCommand() + ) + } +} diff --git a/archive/src/main/java/dev/openrs2/archive/name/NameImporter.kt b/archive/src/main/java/dev/openrs2/archive/name/NameImporter.kt new file mode 100644 index 00000000..bbb5676f --- /dev/null +++ b/archive/src/main/java/dev/openrs2/archive/name/NameImporter.kt @@ -0,0 +1,31 @@ +package dev.openrs2.archive.name + +import dev.openrs2.db.Database +import dev.openrs2.util.krHashCode +import javax.inject.Inject +import javax.inject.Singleton + +@Singleton +public class NameImporter @Inject constructor( + private val database: Database +) { + public suspend fun import(names: Iterable) { + database.execute { connection -> + connection.prepareStatement( + """ + INSERT INTO names (hash, name) + VALUES (?, ?) + ON CONFLICT DO NOTHING + """.trimIndent() + ).use { stmt -> + for (name in names) { + stmt.setInt(1, name.krHashCode()) + stmt.setString(2, name) + stmt.addBatch() + } + + stmt.executeBatch() + } + } + } +} diff --git a/archive/src/main/resources/dev/openrs2/archive/V1__init.sql b/archive/src/main/resources/dev/openrs2/archive/V1__init.sql new file mode 100644 index 00000000..8071b038 --- /dev/null +++ b/archive/src/main/resources/dev/openrs2/archive/V1__init.sql @@ -0,0 +1,94 @@ +-- @formatter:off +CREATE EXTENSION uint; + +CREATE TYPE xtea_key AS ( + k0 INTEGER, + k1 INTEGER, + k2 INTEGER, + k3 INTEGER +); + +CREATE TABLE keys ( + id BIGSERIAL PRIMARY KEY NOT NULL, + key xtea_key UNIQUE NOT NULL CHECK ((key).k0 <> 0 OR (key).k1 <> 0 OR (key).k2 <> 0 OR (key).k3 <> 0) +); + +CREATE TABLE containers ( + id BIGSERIAL PRIMARY KEY NOT NULL, + crc32 INT NOT NULL, + whirlpool BYTEA UNIQUE NOT NULL, + data BYTEA NOT NULL, + encrypted BOOLEAN NOT NULL, + key_id BIGINT NULL REFERENCES keys (id) +); + +CREATE INDEX ON containers USING HASH (crc32); + +CREATE TABLE brute_force_iterator ( + last_container_id BIGINT NULL, + last_key_id BIGINT NULL +); + +CREATE UNIQUE INDEX ON brute_force_iterator ((TRUE)); + +INSERT INTO brute_force_iterator (last_container_id, last_key_id) +VALUES (NULL, NULL); + +CREATE TABLE groups ( + archive_id uint1 NOT NULL, + group_id INTEGER NOT NULL, + container_id BIGINT NOT NULL REFERENCES containers (id), + truncated_version uint2 NOT NULL, + PRIMARY KEY (archive_id, group_id, container_id, truncated_version) +); + +CREATE TABLE indexes ( + container_id BIGINT PRIMARY KEY NOT NULL REFERENCES containers (id), + version INTEGER NOT NULL +); + +CREATE TABLE index_groups ( + container_id BIGINT NOT NULL REFERENCES indexes (container_id), + group_id INTEGER NOT NULL, + crc32 INTEGER NOT NULL, + whirlpool BYTEA NULL, + version INTEGER NOT NULL, + name_hash INTEGER NULL, + PRIMARY KEY (container_id, group_id) +); + +CREATE TABLE index_files ( + container_id BIGINT NOT NULL, + group_id INTEGER NOT NULL, + file_id INTEGER NOT NULL, + name_hash INTEGER NULL, + PRIMARY KEY (container_id, group_id, file_id), + FOREIGN KEY (container_id, group_id) REFERENCES index_groups (container_id, group_id) +); + +CREATE TABLE caches ( + id BIGSERIAL PRIMARY KEY NOT NULL, + -- This doesn't correspond to a hash used by the client - it was just + -- convenient to re-use Whirlpool given we already use it elsewhere in the + -- codebase. + -- + -- It is a hash over the whirlpool hashes of a cache's Js5Indexes, used to + -- make it easier to identify an individual cache row in a relational + -- database. + whirlpool BYTEA UNIQUE NOT NULL +); + +CREATE TABLE cache_indexes ( + cache_id BIGINT NOT NULL REFERENCES caches (id), + archive_id uint1 NOT NULL, + container_id BIGINT NOT NULL REFERENCES indexes (container_id), + PRIMARY KEY (cache_id, archive_id) +); + +CREATE TABLE names ( + hash INTEGER NOT NULL, + name TEXT PRIMARY KEY NOT NULL +); + +CREATE UNIQUE INDEX ON names (hash, name); +-- @formatter:on diff --git a/buildSrc/src/main/java/Versions.kt b/buildSrc/src/main/java/Versions.kt index 1ba5acae..71193f58 100644 --- a/buildSrc/src/main/java/Versions.kt +++ b/buildSrc/src/main/java/Versions.kt @@ -8,6 +8,7 @@ object Versions { const val dokka = "1.4.0" const val fastutil = "8.4.2" const val fernflower = "1.0.4-SNAPSHOT" + const val flyway = "6.5.6" const val guava = "29.0-jre" const val guice = "4.2.3" const val h2 = "1.4.200" @@ -24,6 +25,7 @@ object Versions { const val logback = "1.2.3" const val netty = "4.1.52.Final" const val openrs2Natives = "2.0.1" + const val postgres = "42.2.16" const val shadowPlugin = "6.0.0" const val versionsPlugin = "0.33.0" const val xz = "1.8" diff --git a/crypto/src/main/java/dev/openrs2/crypto/Xtea.kt b/crypto/src/main/java/dev/openrs2/crypto/Xtea.kt index 3dd77b98..d38c1a20 100644 --- a/crypto/src/main/java/dev/openrs2/crypto/Xtea.kt +++ b/crypto/src/main/java/dev/openrs2/crypto/Xtea.kt @@ -41,14 +41,24 @@ public class XteaKey( } public fun fromHex(s: String): XteaKey { - require(s.length == 32) - - val k0 = Integer.parseUnsignedInt(s, 0, 8, 16) - val k1 = Integer.parseUnsignedInt(s, 8, 16, 16) - val k2 = Integer.parseUnsignedInt(s, 16, 24, 16) - val k3 = Integer.parseUnsignedInt(s, 24, 32, 16) + return fromHexOrNull(s) ?: throw IllegalArgumentException() + } - return XteaKey(k0, k1, k2, k3) + public fun fromHexOrNull(s: String): XteaKey? { + if (s.length != 32) { + return null + } + + try { + val k0 = Integer.parseUnsignedInt(s, 0, 8, 16) + val k1 = Integer.parseUnsignedInt(s, 8, 16, 16) + val k2 = Integer.parseUnsignedInt(s, 16, 24, 16) + val k3 = Integer.parseUnsignedInt(s, 24, 32, 16) + + return XteaKey(k0, k1, k2, k3) + } catch (ex: NumberFormatException) { + return null + } } } } diff --git a/settings.gradle.kts b/settings.gradle.kts index 802100e4..d189456a 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -2,6 +2,7 @@ rootProject.name = "openrs2" include( "all", + "archive", "asm", "buffer", "bundler",