diff --git a/archive/src/main/kotlin/org/openrs2/archive/cache/CacheExporter.kt b/archive/src/main/kotlin/org/openrs2/archive/cache/CacheExporter.kt index ef99505e..39cf69bb 100644 --- a/archive/src/main/kotlin/org/openrs2/archive/cache/CacheExporter.kt +++ b/archive/src/main/kotlin/org/openrs2/archive/cache/CacheExporter.kt @@ -9,6 +9,7 @@ import org.openrs2.cache.Store import org.openrs2.crypto.XteaKey import org.openrs2.db.Database import java.time.Instant +import java.util.Collections import java.util.SortedSet import javax.inject.Inject import javax.inject.Singleton @@ -50,13 +51,16 @@ public class CacheExporter @Inject constructor( public data class Cache( val id: Int, - val game: String, + val games: SortedSet, val builds: SortedSet, val timestamp: Instant?, - val name: String?, - val description: String?, + val names: SortedSet, + val descriptions: List, + val urls: SortedSet, val stats: Stats? - ) + ) { + val game: String = games.single() + } public data class Key( val archive: Int, @@ -72,15 +76,23 @@ public class CacheExporter @Inject constructor( connection.prepareStatement( """ SELECT - m.id, g.name, array_remove(array_agg(b.build ORDER BY b.build ASC), NULL), m.timestamp, m.name, - s.valid_indexes, s.indexes, s.valid_groups, s.groups, s.valid_keys, s.keys + m.id, + g.name, + array_remove(array_agg(DISTINCT s.build ORDER BY s.build ASC), NULL), + MIN(s.timestamp), + array_remove(array_agg(DISTINCT s.name ORDER BY s.name ASC), NULL), + ms.valid_indexes, + ms.indexes, + ms.valid_groups, + ms.groups, + ms.valid_keys, + ms.keys FROM master_indexes m - JOIN games g ON g.id = m.game_id - JOIN containers c ON c.id = m.container_id - LEFT JOIN master_index_builds b ON b.master_index_id = m.id - LEFT JOIN master_index_stats s ON s.master_index_id = m.id - GROUP BY m.id, g.name, s.valid_indexes, s.indexes, s.valid_groups, s.groups, s.valid_keys, s.keys - ORDER BY g.name ASC, MIN(b.build) ASC, m.timestamp ASC + LEFT JOIN sources s ON s.master_index_id = m.id + LEFT JOIN games g ON g.id = s.game_id + LEFT JOIN master_index_stats ms ON s.master_index_id = m.id + GROUP BY m.id, g.name, ms.valid_indexes, ms.indexes, ms.valid_groups, ms.groups, ms.valid_keys, ms.keys + ORDER BY g.name ASC, MIN(s.build) ASC, MIN(s.timestamp) ASC """.trimIndent() ).use { stmt -> stmt.executeQuery().use { rows -> @@ -91,7 +103,7 @@ public class CacheExporter @Inject constructor( val game = rows.getString(2) val builds = rows.getArray(3).array as Array val timestamp = rows.getTimestamp(4)?.toInstant() - val name = rows.getString(5) + val names = rows.getArray(5).array as Array val validIndexes = rows.getLong(6) val stats = if (!rows.wasNull()) { @@ -105,7 +117,16 @@ public class CacheExporter @Inject constructor( null } - caches += Cache(id, game, builds.toSortedSet(), timestamp, name, description = null, stats) + caches += Cache( + id, + sortedSetOf(game), + builds.toSortedSet(), + timestamp, + names.toSortedSet(), + emptyList(), + Collections.emptySortedSet(), + stats + ) } caches @@ -119,15 +140,24 @@ public class CacheExporter @Inject constructor( connection.prepareStatement( """ SELECT - g.name, array_remove(array_agg(b.build ORDER BY b.build ASC), NULL), m.timestamp, m.name, - m.description, s.valid_indexes, s.indexes, s.valid_groups, s.groups, s.valid_keys, s.keys + array_remove(array_agg(DISTINCT g.name ORDER BY g.name ASC), NULL), + array_remove(array_agg(DISTINCT s.build ORDER BY s.build ASC), NULL), + MIN(s.timestamp), + array_remove(array_agg(DISTINCT s.name ORDER BY s.name ASC), NULL), + array_remove(array_agg(s.description), NULL), + array_remove(array_agg(DISTINCT s.url ORDER BY s.url ASC), NULL), + ms.valid_indexes, + ms.indexes, + ms.valid_groups, + ms.groups, + ms.valid_keys, + ms.keys FROM master_indexes m - JOIN games g ON g.id = m.game_id - JOIN containers c ON c.id = m.container_id - LEFT JOIN master_index_builds b ON b.master_index_id = m.id - LEFT JOIN master_index_stats s ON s.master_index_id = m.id + LEFT JOIN sources s ON s.master_index_id = m.id + LEFT JOIN games g ON g.id = s.game_id + LEFT JOIN master_index_stats ms ON s.master_index_id = m.id WHERE m.id = ? - GROUP BY m.id, g.name, s.valid_indexes, s.indexes, s.valid_groups, s.groups, s.valid_keys, s.keys + GROUP BY m.id, ms.valid_indexes, ms.indexes, ms.valid_groups, ms.groups, ms.valid_keys, ms.keys """.trimIndent() ).use { stmt -> stmt.setInt(1, id) @@ -137,25 +167,35 @@ public class CacheExporter @Inject constructor( return@execute null } - val game = rows.getString(1) + val games = rows.getArray(1).array as Array val builds = rows.getArray(2).array as Array val timestamp = rows.getTimestamp(3)?.toInstant() - val name = rows.getString(4) - val description = rows.getString(5) + val names = rows.getArray(4).array as Array + val descriptions = rows.getArray(5).array as Array + val urls = rows.getArray(6).array as Array - val validIndexes = rows.getLong(6) + val validIndexes = rows.getLong(7) val stats = if (!rows.wasNull()) { - val indexes = rows.getLong(7) - val validGroups = rows.getLong(8) - val groups = rows.getLong(9) - val validKeys = rows.getLong(10) - val keys = rows.getLong(11) + val indexes = rows.getLong(8) + val validGroups = rows.getLong(9) + val groups = rows.getLong(10) + val validKeys = rows.getLong(11) + val keys = rows.getLong(12) Stats(validIndexes, indexes, validGroups, groups, validKeys, keys) } else { null } - return@execute Cache(id, game, builds.toSortedSet(), timestamp, name, description, stats) + return@execute Cache( + id, + games.toSortedSet(), + builds.toSortedSet(), + timestamp, + names.toSortedSet(), + descriptions.toList(), + urls.toSortedSet(), + stats + ) } } } diff --git a/archive/src/main/kotlin/org/openrs2/archive/cache/CacheImporter.kt b/archive/src/main/kotlin/org/openrs2/archive/cache/CacheImporter.kt index 5cbbd152..d78b0509 100644 --- a/archive/src/main/kotlin/org/openrs2/archive/cache/CacheImporter.kt +++ b/archive/src/main/kotlin/org/openrs2/archive/cache/CacheImporter.kt @@ -23,7 +23,6 @@ import java.sql.Connection import java.sql.SQLException import java.sql.Types import java.time.Instant -import java.time.OffsetDateTime import java.time.ZoneOffset import javax.inject.Inject import javax.inject.Singleton @@ -74,13 +73,25 @@ public class CacheImporter @Inject constructor( public val versionTruncated: Boolean ) : Container(compressed, uncompressed) + private enum class SourceType { + DISK, + JS5REMOTE + } + + public data class MasterIndexResult( + val masterIndexId: Int, + val sourceId: Int, + val indexes: List + ) + public suspend fun import( store: Store, game: String, build: Int?, timestamp: Instant?, name: String?, - description: String? + description: String?, + url: String? ) { database.execute { connection -> prepare(connection) @@ -89,12 +100,25 @@ public class CacheImporter @Inject constructor( // import master index val masterIndex = createMasterIndex(store) - try { - addMasterIndex(connection, masterIndex, gameId, build, timestamp, name, description, overwrite = false) + val masterIndexId = try { + addMasterIndex(connection, masterIndex) } finally { masterIndex.release() } + // create source + val sourceId = addSource( + connection, + SourceType.DISK, + masterIndexId, + gameId, + build, + timestamp, + name, + description, + url + ) + // import indexes val indexes = arrayOfNulls(Js5Archive.ARCHIVESET) val indexGroups = mutableListOf() @@ -106,7 +130,7 @@ public class CacheImporter @Inject constructor( } for (index in indexGroups) { - addIndex(connection, index) + addIndex(connection, sourceId, index) } } finally { indexGroups.forEach(Index::release) @@ -127,7 +151,7 @@ public class CacheImporter @Inject constructor( groups += group if (groups.size >= BATCH_SIZE) { - addGroups(connection, groups) + addGroups(connection, sourceId, groups) groups.forEach(Group::release) groups.clear() @@ -136,7 +160,7 @@ public class CacheImporter @Inject constructor( } if (groups.isNotEmpty()) { - addGroups(connection, groups) + addGroups(connection, sourceId, groups) } } finally { groups.forEach(Group::release) @@ -151,7 +175,8 @@ public class CacheImporter @Inject constructor( build: Int?, timestamp: Instant?, name: String?, - description: String? + description: String?, + url: String? ) { Js5Compression.uncompress(buf.slice()).use { uncompressed -> val masterIndex = MasterIndex(Js5MasterIndex.read(uncompressed.slice(), format), buf, uncompressed) @@ -160,7 +185,8 @@ public class CacheImporter @Inject constructor( prepare(connection) val gameId = getGameId(connection, game) - addMasterIndex(connection, masterIndex, gameId, build, timestamp, name, description, overwrite = false) + val masterIndexId = addMasterIndex(connection, masterIndex) + addSource(connection, SourceType.DISK, masterIndexId, gameId, build, timestamp, name, description, url) } } } @@ -172,9 +198,8 @@ public class CacheImporter @Inject constructor( gameId: Int, build: Int, lastId: Int?, - timestamp: Instant, - name: String, - ): Pair> { + timestamp: Instant + ): MasterIndexResult { return database.execute { connection -> prepare(connection) @@ -191,15 +216,21 @@ public class CacheImporter @Inject constructor( stmt.execute() } - val id = addMasterIndex( + val masterIndexId = addMasterIndex( + connection, + MasterIndex(masterIndex, buf, uncompressed) + ) + + val sourceId = addSource( connection, - MasterIndex(masterIndex, buf, uncompressed), + SourceType.JS5REMOTE, + masterIndexId, gameId, build, timestamp, - name, + name = "Original", description = null, - overwrite = true + url = null ) /* @@ -226,7 +257,7 @@ public class CacheImporter @Inject constructor( """.trimIndent() ).use { stmt -> stmt.setObject(1, lastId, Types.INTEGER) - stmt.setInt(2, id) + stmt.setInt(2, masterIndexId) stmt.executeQuery().use { rows -> val indexes = mutableListOf() @@ -241,7 +272,7 @@ public class CacheImporter @Inject constructor( } indexes.filterNotNull().forEach(ByteBuf::retain) - return@execute Pair(id, indexes) + return@execute MasterIndexResult(masterIndexId, sourceId, indexes) } finally { indexes.filterNotNull().forEach(ByteBuf::release) } @@ -251,6 +282,7 @@ public class CacheImporter @Inject constructor( } public suspend fun importIndexAndGetMissingGroups( + sourceId: Int, archive: Int, index: Js5Index, buf: ByteBuf, @@ -259,7 +291,7 @@ public class CacheImporter @Inject constructor( ): List { return database.execute { connection -> prepare(connection) - val id = addIndex(connection, Index(archive, index, buf, uncompressed)) + val id = addIndex(connection, sourceId, Index(archive, index, buf, uncompressed)) /* * In order to defend against (crc32, version) collisions, we only @@ -304,14 +336,14 @@ public class CacheImporter @Inject constructor( } } - public suspend fun importGroups(groups: List) { + public suspend fun importGroups(sourceId: Int, groups: List) { if (groups.isEmpty()) { return } database.execute { connection -> prepare(connection) - addGroups(connection, groups) + addGroups(connection, sourceId, groups) } } @@ -329,27 +361,15 @@ public class CacheImporter @Inject constructor( private fun addMasterIndex( connection: Connection, - masterIndex: MasterIndex, - gameId: Int, - build: Int?, - timestamp: Instant?, - name: String?, - description: String?, - overwrite: Boolean + masterIndex: MasterIndex ): Int { val containerId = addContainer(connection, masterIndex) - var masterIndexId: Int? = null - - var newTimestamp: Instant? - var newName: String? - var newDescription: String? connection.prepareStatement( """ - SELECT id, game_id, timestamp, name, description + SELECT id FROM master_indexes WHERE container_id = ? AND format = ?::master_index_format - FOR UPDATE """.trimIndent() ).use { stmt -> stmt.setLong(1, containerId) @@ -357,161 +377,129 @@ public class CacheImporter @Inject constructor( stmt.executeQuery().use { rows -> if (rows.next()) { - masterIndexId = rows.getInt(1) + return rows.getInt(1) } + } + } - if (masterIndexId != null) { - val oldGameId = rows.getInt(2) - - val oldTimestamp: Instant? = rows.getTimestamp(3)?.toInstant() - val oldName: String? = rows.getString(4) - val oldDescription: String? = rows.getString(5) + val masterIndexId: Int - check(oldGameId == gameId) + connection.prepareStatement( + """ + INSERT INTO master_indexes (container_id, format) + VALUES (?, ?::master_index_format) + RETURNING id + """.trimIndent() + ).use { stmt -> + stmt.setLong(1, containerId) + stmt.setString(2, masterIndex.index.format.name.toLowerCase()) - if (oldTimestamp != null && timestamp != null) { - newTimestamp = if (oldTimestamp.isBefore(timestamp)) { - oldTimestamp - } else { - timestamp - } - } else if (oldTimestamp != null) { - newTimestamp = oldTimestamp - } else { - newTimestamp = timestamp - } + stmt.executeQuery().use { rows -> + check(rows.next()) + masterIndexId = rows.getInt(1) + } + } - if (overwrite) { - newName = name - } else if (oldName != null && name != null && oldName != name) { - newName = "$oldName/$name" - } else if (oldName != null) { - newName = oldName - } else { - newName = name - } + connection.prepareStatement( + """ + INSERT INTO master_index_archives ( + master_index_id, archive_id, crc32, version, whirlpool, groups, total_uncompressed_length + ) + VALUES (?, ?, ?, ?, ?, ?, ?) + """.trimIndent() + ).use { stmt -> + for ((i, entry) in masterIndex.index.entries.withIndex()) { + stmt.setInt(1, masterIndexId) + stmt.setInt(2, i) + stmt.setInt(3, entry.checksum) - if (overwrite) { - newDescription = description - } else if (oldDescription != null && description != null && oldDescription != description) { - newDescription = "$oldDescription\n\n$description" - } else if (oldDescription != null) { - newDescription = oldDescription - } else { - newDescription = description - } + if (masterIndex.index.format >= MasterIndexFormat.VERSIONED) { + stmt.setInt(4, entry.version) } else { - newTimestamp = timestamp - newName = name - newDescription = description + stmt.setInt(4, 0) } - } - } - if (masterIndexId != null) { - connection.prepareStatement( - """ - UPDATE master_indexes - SET timestamp = ?, name = ?, description = ? - WHERE id = ? - """.trimIndent() - ).use { stmt -> - if (newTimestamp != null) { - val offsetDateTime = OffsetDateTime.ofInstant(newTimestamp, ZoneOffset.UTC) - stmt.setObject(1, offsetDateTime, Types.TIMESTAMP_WITH_TIMEZONE) + if (masterIndex.index.format >= MasterIndexFormat.DIGESTS) { + stmt.setBytes(5, entry.digest ?: ByteArray(Whirlpool.DIGESTBYTES)) } else { - stmt.setNull(1, Types.TIMESTAMP_WITH_TIMEZONE) + stmt.setNull(5, Types.BINARY) } - stmt.setString(2, newName) - stmt.setString(3, newDescription) - stmt.setInt(4, masterIndexId!!) - - stmt.execute() - } - } else { - connection.prepareStatement( - """ - INSERT INTO master_indexes (container_id, format, game_id, timestamp, name, description) - VALUES (?, ?::master_index_format, ?, ?, ?, ?) - RETURNING id - """.trimIndent() - ).use { stmt -> - stmt.setLong(1, containerId) - stmt.setString(2, masterIndex.index.format.name.toLowerCase()) - stmt.setInt(3, gameId) - - if (newTimestamp != null) { - val offsetDateTime = OffsetDateTime.ofInstant(newTimestamp, ZoneOffset.UTC) - stmt.setObject(4, offsetDateTime, Types.TIMESTAMP_WITH_TIMEZONE) + if (masterIndex.index.format >= MasterIndexFormat.LENGTHS) { + stmt.setInt(6, entry.groups) + stmt.setInt(7, entry.totalUncompressedLength) } else { - stmt.setNull(4, Types.TIMESTAMP_WITH_TIMEZONE) + stmt.setNull(6, Types.INTEGER) + stmt.setNull(7, Types.INTEGER) } - stmt.setString(5, newName) - stmt.setString(6, newDescription) - - stmt.executeQuery().use { rows -> - check(rows.next()) - masterIndexId = rows.getInt(1) - } + stmt.addBatch() } + stmt.executeBatch() + } + + return masterIndexId + } + + private fun addSource( + connection: Connection, + type: SourceType, + masterIndexId: Int, + gameId: Int, + build: Int?, + timestamp: Instant?, + name: String?, + description: String?, + url: String? + ): Int { + if (type == SourceType.JS5REMOTE && build != null) { connection.prepareStatement( """ - INSERT INTO master_index_archives ( - master_index_id, archive_id, crc32, version, whirlpool, groups, total_uncompressed_length - ) - VALUES (?, ?, ?, ?, ?, ?, ?) + SELECT id + FROM sources + WHERE type = 'js5remote' AND master_index_id = ? AND game_id = ? AND build = ? """.trimIndent() ).use { stmt -> - for ((i, entry) in masterIndex.index.entries.withIndex()) { - stmt.setInt(1, masterIndexId!!) - stmt.setInt(2, i) - stmt.setInt(3, entry.checksum) - - if (masterIndex.index.format >= MasterIndexFormat.VERSIONED) { - stmt.setInt(4, entry.version) - } else { - stmt.setInt(4, 0) - } - - if (masterIndex.index.format >= MasterIndexFormat.DIGESTS) { - stmt.setBytes(5, entry.digest ?: ByteArray(Whirlpool.DIGESTBYTES)) - } else { - stmt.setNull(5, Types.BINARY) - } + stmt.setInt(1, masterIndexId) + stmt.setInt(2, gameId) + stmt.setInt(3, build) - if (masterIndex.index.format >= MasterIndexFormat.LENGTHS) { - stmt.setInt(6, entry.groups) - stmt.setInt(7, entry.totalUncompressedLength) - } else { - stmt.setNull(6, Types.INTEGER) - stmt.setNull(7, Types.INTEGER) + stmt.executeQuery().use { rows -> + if (rows.next()) { + return rows.getInt(1) } - - stmt.addBatch() } - - stmt.executeBatch() } } - if (build != null) { - connection.prepareStatement( - """ - INSERT INTO master_index_builds (master_index_id, build) - VALUES (?, ?) - ON CONFLICT DO NOTHING - """.trimIndent() - ).use { stmt -> - stmt.setInt(1, masterIndexId!!) - stmt.setInt(2, build) - stmt.execute() + connection.prepareStatement( + """ + INSERT INTO sources (type, master_index_id, game_id, build, timestamp, name, description, url) + VALUES (?::source_type, ?, ?, ?, ?, ?, ?, ?) + RETURNING id + """.trimIndent() + ).use { stmt -> + stmt.setString(1, type.toString().toLowerCase()) + stmt.setInt(2, masterIndexId) + stmt.setInt(3, gameId) + stmt.setObject(4, build, Types.INTEGER) + + if (timestamp != null) { + stmt.setObject(5, timestamp.atOffset(ZoneOffset.UTC), Types.TIMESTAMP_WITH_TIMEZONE) + } else { + stmt.setNull(5, Types.TIMESTAMP_WITH_TIMEZONE) } - } - return masterIndexId!! + stmt.setString(6, name) + stmt.setString(7, description) + stmt.setString(8, url) + + stmt.executeQuery().use { rows -> + check(rows.next()) + return rows.getInt(1) + } + } } private fun readGroup(store: Store, archive: Int, index: Js5Index?, group: Int): Group? { @@ -547,7 +535,7 @@ public class CacheImporter @Inject constructor( } } - private fun addGroups(connection: Connection, groups: List): List { + private fun addGroups(connection: Connection, sourceId: Int, groups: List): List { val containerIds = addContainers(connection, groups) connection.prepareStatement( @@ -569,11 +557,31 @@ public class CacheImporter @Inject constructor( stmt.executeBatch() } + connection.prepareStatement( + """ + INSERT INTO source_groups (source_id, archive_id, group_id, version, version_truncated, container_id) + VALUES (?, ?, ?, ?, ?, ?) + ON CONFLICT DO NOTHING + """.trimIndent() + ).use { stmt -> + for ((i, group) in groups.withIndex()) { + stmt.setInt(1, sourceId) + stmt.setInt(2, group.archive) + stmt.setInt(3, group.group) + stmt.setInt(4, group.version) + stmt.setBoolean(5, group.versionTruncated) + stmt.setLong(6, containerIds[i]) + stmt.addBatch() + } + + stmt.executeBatch() + } + return containerIds } - private fun addGroup(connection: Connection, group: Group): Long { - return addGroups(connection, listOf(group)).single() + private fun addGroup(connection: Connection, sourceId: Int, group: Group): Long { + return addGroups(connection, sourceId, listOf(group)).single() } private fun readIndex(store: Store, archive: Int): Index { @@ -584,8 +592,8 @@ public class CacheImporter @Inject constructor( } } - private fun addIndex(connection: Connection, index: Index): Long { - val containerId = addGroup(connection, index) + private fun addIndex(connection: Connection, sourceId: Int, index: Index): Long { + val containerId = addGroup(connection, sourceId, index) val savepoint = connection.setSavepoint() connection.prepareStatement( diff --git a/archive/src/main/kotlin/org/openrs2/archive/cache/ImportCommand.kt b/archive/src/main/kotlin/org/openrs2/archive/cache/ImportCommand.kt index 67e496e0..2c0aadb4 100644 --- a/archive/src/main/kotlin/org/openrs2/archive/cache/ImportCommand.kt +++ b/archive/src/main/kotlin/org/openrs2/archive/cache/ImportCommand.kt @@ -17,6 +17,7 @@ public class ImportCommand : CliktCommand(name = "import") { private val timestamp by option().instant() private val name by option() private val description by option() + private val url by option() private val game by argument() private val input by argument().path( @@ -30,7 +31,7 @@ public class ImportCommand : CliktCommand(name = "import") { val importer = injector.getInstance(CacheImporter::class.java) Store.open(input).use { store -> - importer.import(store, game, build, timestamp, name, description) + importer.import(store, game, build, timestamp, name, description, url) } } } diff --git a/archive/src/main/kotlin/org/openrs2/archive/cache/ImportMasterIndexCommand.kt b/archive/src/main/kotlin/org/openrs2/archive/cache/ImportMasterIndexCommand.kt index 6ee05bed..e3ca8efe 100644 --- a/archive/src/main/kotlin/org/openrs2/archive/cache/ImportMasterIndexCommand.kt +++ b/archive/src/main/kotlin/org/openrs2/archive/cache/ImportMasterIndexCommand.kt @@ -21,6 +21,7 @@ public class ImportMasterIndexCommand : CliktCommand(name = "import-master-index private val timestamp by option().instant() private val name by option() private val description by option() + private val url by option() private val game by argument() private val format by argument().enum() @@ -35,7 +36,7 @@ public class ImportMasterIndexCommand : CliktCommand(name = "import-master-index val importer = injector.getInstance(CacheImporter::class.java) Unpooled.wrappedBuffer(Files.readAllBytes(input)).use { buf -> - importer.importMasterIndex(buf, format, game, build, timestamp, name, description) + importer.importMasterIndex(buf, format, game, build, timestamp, name, description, url) } } } diff --git a/archive/src/main/kotlin/org/openrs2/archive/cache/Js5ChannelHandler.kt b/archive/src/main/kotlin/org/openrs2/archive/cache/Js5ChannelHandler.kt index 9e232165..d0ca5bf6 100644 --- a/archive/src/main/kotlin/org/openrs2/archive/cache/Js5ChannelHandler.kt +++ b/archive/src/main/kotlin/org/openrs2/archive/cache/Js5ChannelHandler.kt @@ -53,6 +53,7 @@ public class Js5ChannelHandler( private val inFlightRequests = mutableSetOf() private val pendingRequests = ArrayDeque() private var masterIndexId: Int = 0 + private var sourceId: Int = 0 private var masterIndex: Js5MasterIndex? = null private lateinit var indexes: Array private val groups = mutableListOf() @@ -161,7 +162,7 @@ public class Js5ChannelHandler( if (groups.size >= CacheImporter.BATCH_SIZE || complete) { runBlocking { - importer.importGroups(groups) + importer.importGroups(sourceId, groups) } releaseGroups() @@ -182,7 +183,7 @@ public class Js5ChannelHandler( Js5Compression.uncompress(buf.slice()).use { uncompressed -> masterIndex = Js5MasterIndex.read(uncompressed.slice(), masterIndexFormat) - val (id, rawIndexes) = runBlocking { + val (masterIndexId, sourceId, rawIndexes) = runBlocking { importer.importMasterIndexAndGetIndexes( masterIndex!!, buf, @@ -190,10 +191,13 @@ public class Js5ChannelHandler( gameId, build, lastMasterIndexId, - timestamp = Instant.now(), - name = "Original" + timestamp = Instant.now() ) } + + this.masterIndexId = masterIndexId + this.sourceId = sourceId + try { indexes = arrayOfNulls(rawIndexes.size) @@ -207,8 +211,6 @@ public class Js5ChannelHandler( } finally { rawIndexes.filterNotNull().forEach(ByteBuf::release) } - - masterIndexId = id } } @@ -228,7 +230,7 @@ public class Js5ChannelHandler( } val groups = runBlocking { - importer.importIndexAndGetMissingGroups(archive, index, buf, uncompressed, lastMasterIndexId) + importer.importIndexAndGetMissingGroups(sourceId, archive, index, buf, uncompressed, lastMasterIndexId) } for (group in groups) { request(archive, group) diff --git a/archive/src/main/resources/org/openrs2/archive/migrations/V1__init.sql b/archive/src/main/resources/org/openrs2/archive/migrations/V1__init.sql index f9eec7cd..f668698a 100644 --- a/archive/src/main/resources/org/openrs2/archive/migrations/V1__init.sql +++ b/archive/src/main/resources/org/openrs2/archive/migrations/V1__init.sql @@ -107,21 +107,11 @@ CREATE TABLE master_indexes ( id SERIAL PRIMARY KEY NOT NULL, container_id BIGINT NOT NULL REFERENCES containers (id), format master_index_format NOT NULL, - game_id INTEGER NOT NULL REFERENCES games (id), - timestamp TIMESTAMPTZ NULL, - name TEXT NULL, - description TEXT NULL, UNIQUE (container_id, format) ); ALTER TABLE games ADD COLUMN last_master_index_id INT NULL REFERENCES master_indexes (id); -CREATE TABLE master_index_builds ( - master_index_id INTEGER NOT NULL REFERENCES master_indexes (id), - build INTEGER NOT NULL, - PRIMARY KEY (master_index_id, build) -); - CREATE TABLE master_index_archives ( master_index_id INTEGER NOT NULL REFERENCES master_indexes (id), archive_id uint1 NOT NULL, @@ -133,6 +123,38 @@ CREATE TABLE master_index_archives ( PRIMARY KEY (master_index_id, archive_id) ); +CREATE TYPE source_type AS ENUM ( + 'disk', + 'js5remote' +); + +CREATE TABLE sources ( + id SERIAL PRIMARY KEY NOT NULL, + type source_type NOT NULL, + master_index_id INTEGER NOT NULL REFERENCES master_indexes (id), + game_id INTEGER NOT NULL REFERENCES games (id), + build INTEGER NULL, + timestamp TIMESTAMPTZ NULL, + name TEXT NULL, + description TEXT NULL, + url TEXT NULL +); + +CREATE INDEX ON sources (master_index_id); +CREATE UNIQUE INDEX ON sources (master_index_id, game_id, build) WHERE type = 'js5remote'; + +CREATE TABLE source_groups ( + source_id INTEGER NOT NULL REFERENCES sources (id), + archive_id uint1 NOT NULL, + group_id INTEGER NOT NULL, + version INTEGER NOT NULL, + version_truncated BOOLEAN NOT NULL, + container_id BIGINT NOT NULL REFERENCES containers (id), + PRIMARY KEY (source_id, archive_id, group_id) +); + +CREATE INDEX ON source_groups (archive_id, group_id, version, version_truncated, container_id); + CREATE TABLE names ( hash INTEGER NOT NULL, name TEXT PRIMARY KEY NOT NULL diff --git a/archive/src/main/resources/org/openrs2/archive/templates/caches/index.html b/archive/src/main/resources/org/openrs2/archive/templates/caches/index.html index bfd6d965..52f509ea 100644 --- a/archive/src/main/resources/org/openrs2/archive/templates/caches/index.html +++ b/archive/src/main/resources/org/openrs2/archive/templates/caches/index.html @@ -38,7 +38,7 @@
- + Game - runescape + runescape Build(s) @@ -26,11 +26,21 @@ Name - + Description - + + + + URL(s) + + + Indexes