Add group source tracking

There are a few collisions in the production archive. I suspect these
are due to poorly modified caches, and tracking the source(s) of each
group will make it easier to determine which cache is probably
problematic.

This change also has the benefit of removing a lot of the hacky source
name/description merging logic.

Signed-off-by: Graham <gpe@openrs2.org>
pull/132/head
Graham 3 years ago
parent 4856f79152
commit b98d045cfe
  1. 102
      archive/src/main/kotlin/org/openrs2/archive/cache/CacheExporter.kt
  2. 340
      archive/src/main/kotlin/org/openrs2/archive/cache/CacheImporter.kt
  3. 3
      archive/src/main/kotlin/org/openrs2/archive/cache/ImportCommand.kt
  4. 3
      archive/src/main/kotlin/org/openrs2/archive/cache/ImportMasterIndexCommand.kt
  5. 16
      archive/src/main/kotlin/org/openrs2/archive/cache/Js5ChannelHandler.kt
  6. 42
      archive/src/main/resources/org/openrs2/archive/migrations/V1__init.sql
  7. 2
      archive/src/main/resources/org/openrs2/archive/templates/caches/index.html
  8. 16
      archive/src/main/resources/org/openrs2/archive/templates/caches/show.html

@ -9,6 +9,7 @@ import org.openrs2.cache.Store
import org.openrs2.crypto.XteaKey import org.openrs2.crypto.XteaKey
import org.openrs2.db.Database import org.openrs2.db.Database
import java.time.Instant import java.time.Instant
import java.util.Collections
import java.util.SortedSet import java.util.SortedSet
import javax.inject.Inject import javax.inject.Inject
import javax.inject.Singleton import javax.inject.Singleton
@ -50,13 +51,16 @@ public class CacheExporter @Inject constructor(
public data class Cache( public data class Cache(
val id: Int, val id: Int,
val game: String, val games: SortedSet<String>,
val builds: SortedSet<Int>, val builds: SortedSet<Int>,
val timestamp: Instant?, val timestamp: Instant?,
val name: String?, val names: SortedSet<String>,
val description: String?, val descriptions: List<String>,
val urls: SortedSet<String>,
val stats: Stats? val stats: Stats?
) ) {
val game: String = games.single()
}
public data class Key( public data class Key(
val archive: Int, val archive: Int,
@ -72,15 +76,23 @@ public class CacheExporter @Inject constructor(
connection.prepareStatement( connection.prepareStatement(
""" """
SELECT SELECT
m.id, g.name, array_remove(array_agg(b.build ORDER BY b.build ASC), NULL), m.timestamp, m.name, m.id,
s.valid_indexes, s.indexes, s.valid_groups, s.groups, s.valid_keys, s.keys g.name,
array_remove(array_agg(DISTINCT s.build ORDER BY s.build ASC), NULL),
MIN(s.timestamp),
array_remove(array_agg(DISTINCT s.name ORDER BY s.name ASC), NULL),
ms.valid_indexes,
ms.indexes,
ms.valid_groups,
ms.groups,
ms.valid_keys,
ms.keys
FROM master_indexes m FROM master_indexes m
JOIN games g ON g.id = m.game_id LEFT JOIN sources s ON s.master_index_id = m.id
JOIN containers c ON c.id = m.container_id LEFT JOIN games g ON g.id = s.game_id
LEFT JOIN master_index_builds b ON b.master_index_id = m.id LEFT JOIN master_index_stats ms ON s.master_index_id = m.id
LEFT JOIN master_index_stats s ON s.master_index_id = m.id GROUP BY m.id, g.name, ms.valid_indexes, ms.indexes, ms.valid_groups, ms.groups, ms.valid_keys, ms.keys
GROUP BY m.id, g.name, s.valid_indexes, s.indexes, s.valid_groups, s.groups, s.valid_keys, s.keys ORDER BY g.name ASC, MIN(s.build) ASC, MIN(s.timestamp) ASC
ORDER BY g.name ASC, MIN(b.build) ASC, m.timestamp ASC
""".trimIndent() """.trimIndent()
).use { stmt -> ).use { stmt ->
stmt.executeQuery().use { rows -> stmt.executeQuery().use { rows ->
@ -91,7 +103,7 @@ public class CacheExporter @Inject constructor(
val game = rows.getString(2) val game = rows.getString(2)
val builds = rows.getArray(3).array as Array<Int> val builds = rows.getArray(3).array as Array<Int>
val timestamp = rows.getTimestamp(4)?.toInstant() val timestamp = rows.getTimestamp(4)?.toInstant()
val name = rows.getString(5) val names = rows.getArray(5).array as Array<String>
val validIndexes = rows.getLong(6) val validIndexes = rows.getLong(6)
val stats = if (!rows.wasNull()) { val stats = if (!rows.wasNull()) {
@ -105,7 +117,16 @@ public class CacheExporter @Inject constructor(
null null
} }
caches += Cache(id, game, builds.toSortedSet(), timestamp, name, description = null, stats) caches += Cache(
id,
sortedSetOf(game),
builds.toSortedSet(),
timestamp,
names.toSortedSet(),
emptyList(),
Collections.emptySortedSet(),
stats
)
} }
caches caches
@ -119,15 +140,24 @@ public class CacheExporter @Inject constructor(
connection.prepareStatement( connection.prepareStatement(
""" """
SELECT SELECT
g.name, array_remove(array_agg(b.build ORDER BY b.build ASC), NULL), m.timestamp, m.name, array_remove(array_agg(DISTINCT g.name ORDER BY g.name ASC), NULL),
m.description, s.valid_indexes, s.indexes, s.valid_groups, s.groups, s.valid_keys, s.keys array_remove(array_agg(DISTINCT s.build ORDER BY s.build ASC), NULL),
MIN(s.timestamp),
array_remove(array_agg(DISTINCT s.name ORDER BY s.name ASC), NULL),
array_remove(array_agg(s.description), NULL),
array_remove(array_agg(DISTINCT s.url ORDER BY s.url ASC), NULL),
ms.valid_indexes,
ms.indexes,
ms.valid_groups,
ms.groups,
ms.valid_keys,
ms.keys
FROM master_indexes m FROM master_indexes m
JOIN games g ON g.id = m.game_id LEFT JOIN sources s ON s.master_index_id = m.id
JOIN containers c ON c.id = m.container_id LEFT JOIN games g ON g.id = s.game_id
LEFT JOIN master_index_builds b ON b.master_index_id = m.id LEFT JOIN master_index_stats ms ON s.master_index_id = m.id
LEFT JOIN master_index_stats s ON s.master_index_id = m.id
WHERE m.id = ? WHERE m.id = ?
GROUP BY m.id, g.name, s.valid_indexes, s.indexes, s.valid_groups, s.groups, s.valid_keys, s.keys GROUP BY m.id, ms.valid_indexes, ms.indexes, ms.valid_groups, ms.groups, ms.valid_keys, ms.keys
""".trimIndent() """.trimIndent()
).use { stmt -> ).use { stmt ->
stmt.setInt(1, id) stmt.setInt(1, id)
@ -137,25 +167,35 @@ public class CacheExporter @Inject constructor(
return@execute null return@execute null
} }
val game = rows.getString(1) val games = rows.getArray(1).array as Array<String>
val builds = rows.getArray(2).array as Array<Int> val builds = rows.getArray(2).array as Array<Int>
val timestamp = rows.getTimestamp(3)?.toInstant() val timestamp = rows.getTimestamp(3)?.toInstant()
val name = rows.getString(4) val names = rows.getArray(4).array as Array<String>
val description = rows.getString(5) val descriptions = rows.getArray(5).array as Array<String>
val urls = rows.getArray(6).array as Array<String>
val validIndexes = rows.getLong(6) val validIndexes = rows.getLong(7)
val stats = if (!rows.wasNull()) { val stats = if (!rows.wasNull()) {
val indexes = rows.getLong(7) val indexes = rows.getLong(8)
val validGroups = rows.getLong(8) val validGroups = rows.getLong(9)
val groups = rows.getLong(9) val groups = rows.getLong(10)
val validKeys = rows.getLong(10) val validKeys = rows.getLong(11)
val keys = rows.getLong(11) val keys = rows.getLong(12)
Stats(validIndexes, indexes, validGroups, groups, validKeys, keys) Stats(validIndexes, indexes, validGroups, groups, validKeys, keys)
} else { } else {
null null
} }
return@execute Cache(id, game, builds.toSortedSet(), timestamp, name, description, stats) return@execute Cache(
id,
games.toSortedSet(),
builds.toSortedSet(),
timestamp,
names.toSortedSet(),
descriptions.toList(),
urls.toSortedSet(),
stats
)
} }
} }
} }

@ -23,7 +23,6 @@ import java.sql.Connection
import java.sql.SQLException import java.sql.SQLException
import java.sql.Types import java.sql.Types
import java.time.Instant import java.time.Instant
import java.time.OffsetDateTime
import java.time.ZoneOffset import java.time.ZoneOffset
import javax.inject.Inject import javax.inject.Inject
import javax.inject.Singleton import javax.inject.Singleton
@ -74,13 +73,25 @@ public class CacheImporter @Inject constructor(
public val versionTruncated: Boolean public val versionTruncated: Boolean
) : Container(compressed, uncompressed) ) : Container(compressed, uncompressed)
private enum class SourceType {
DISK,
JS5REMOTE
}
public data class MasterIndexResult(
val masterIndexId: Int,
val sourceId: Int,
val indexes: List<ByteBuf?>
)
public suspend fun import( public suspend fun import(
store: Store, store: Store,
game: String, game: String,
build: Int?, build: Int?,
timestamp: Instant?, timestamp: Instant?,
name: String?, name: String?,
description: String? description: String?,
url: String?
) { ) {
database.execute { connection -> database.execute { connection ->
prepare(connection) prepare(connection)
@ -89,12 +100,25 @@ public class CacheImporter @Inject constructor(
// import master index // import master index
val masterIndex = createMasterIndex(store) val masterIndex = createMasterIndex(store)
try { val masterIndexId = try {
addMasterIndex(connection, masterIndex, gameId, build, timestamp, name, description, overwrite = false) addMasterIndex(connection, masterIndex)
} finally { } finally {
masterIndex.release() masterIndex.release()
} }
// create source
val sourceId = addSource(
connection,
SourceType.DISK,
masterIndexId,
gameId,
build,
timestamp,
name,
description,
url
)
// import indexes // import indexes
val indexes = arrayOfNulls<Js5Index>(Js5Archive.ARCHIVESET) val indexes = arrayOfNulls<Js5Index>(Js5Archive.ARCHIVESET)
val indexGroups = mutableListOf<Index>() val indexGroups = mutableListOf<Index>()
@ -106,7 +130,7 @@ public class CacheImporter @Inject constructor(
} }
for (index in indexGroups) { for (index in indexGroups) {
addIndex(connection, index) addIndex(connection, sourceId, index)
} }
} finally { } finally {
indexGroups.forEach(Index::release) indexGroups.forEach(Index::release)
@ -127,7 +151,7 @@ public class CacheImporter @Inject constructor(
groups += group groups += group
if (groups.size >= BATCH_SIZE) { if (groups.size >= BATCH_SIZE) {
addGroups(connection, groups) addGroups(connection, sourceId, groups)
groups.forEach(Group::release) groups.forEach(Group::release)
groups.clear() groups.clear()
@ -136,7 +160,7 @@ public class CacheImporter @Inject constructor(
} }
if (groups.isNotEmpty()) { if (groups.isNotEmpty()) {
addGroups(connection, groups) addGroups(connection, sourceId, groups)
} }
} finally { } finally {
groups.forEach(Group::release) groups.forEach(Group::release)
@ -151,7 +175,8 @@ public class CacheImporter @Inject constructor(
build: Int?, build: Int?,
timestamp: Instant?, timestamp: Instant?,
name: String?, name: String?,
description: String? description: String?,
url: String?
) { ) {
Js5Compression.uncompress(buf.slice()).use { uncompressed -> Js5Compression.uncompress(buf.slice()).use { uncompressed ->
val masterIndex = MasterIndex(Js5MasterIndex.read(uncompressed.slice(), format), buf, uncompressed) val masterIndex = MasterIndex(Js5MasterIndex.read(uncompressed.slice(), format), buf, uncompressed)
@ -160,7 +185,8 @@ public class CacheImporter @Inject constructor(
prepare(connection) prepare(connection)
val gameId = getGameId(connection, game) val gameId = getGameId(connection, game)
addMasterIndex(connection, masterIndex, gameId, build, timestamp, name, description, overwrite = false) val masterIndexId = addMasterIndex(connection, masterIndex)
addSource(connection, SourceType.DISK, masterIndexId, gameId, build, timestamp, name, description, url)
} }
} }
} }
@ -172,9 +198,8 @@ public class CacheImporter @Inject constructor(
gameId: Int, gameId: Int,
build: Int, build: Int,
lastId: Int?, lastId: Int?,
timestamp: Instant, timestamp: Instant
name: String, ): MasterIndexResult {
): Pair<Int, List<ByteBuf?>> {
return database.execute { connection -> return database.execute { connection ->
prepare(connection) prepare(connection)
@ -191,15 +216,21 @@ public class CacheImporter @Inject constructor(
stmt.execute() stmt.execute()
} }
val id = addMasterIndex( val masterIndexId = addMasterIndex(
connection,
MasterIndex(masterIndex, buf, uncompressed)
)
val sourceId = addSource(
connection, connection,
MasterIndex(masterIndex, buf, uncompressed), SourceType.JS5REMOTE,
masterIndexId,
gameId, gameId,
build, build,
timestamp, timestamp,
name, name = "Original",
description = null, description = null,
overwrite = true url = null
) )
/* /*
@ -226,7 +257,7 @@ public class CacheImporter @Inject constructor(
""".trimIndent() """.trimIndent()
).use { stmt -> ).use { stmt ->
stmt.setObject(1, lastId, Types.INTEGER) stmt.setObject(1, lastId, Types.INTEGER)
stmt.setInt(2, id) stmt.setInt(2, masterIndexId)
stmt.executeQuery().use { rows -> stmt.executeQuery().use { rows ->
val indexes = mutableListOf<ByteBuf?>() val indexes = mutableListOf<ByteBuf?>()
@ -241,7 +272,7 @@ public class CacheImporter @Inject constructor(
} }
indexes.filterNotNull().forEach(ByteBuf::retain) indexes.filterNotNull().forEach(ByteBuf::retain)
return@execute Pair(id, indexes) return@execute MasterIndexResult(masterIndexId, sourceId, indexes)
} finally { } finally {
indexes.filterNotNull().forEach(ByteBuf::release) indexes.filterNotNull().forEach(ByteBuf::release)
} }
@ -251,6 +282,7 @@ public class CacheImporter @Inject constructor(
} }
public suspend fun importIndexAndGetMissingGroups( public suspend fun importIndexAndGetMissingGroups(
sourceId: Int,
archive: Int, archive: Int,
index: Js5Index, index: Js5Index,
buf: ByteBuf, buf: ByteBuf,
@ -259,7 +291,7 @@ public class CacheImporter @Inject constructor(
): List<Int> { ): List<Int> {
return database.execute { connection -> return database.execute { connection ->
prepare(connection) prepare(connection)
val id = addIndex(connection, Index(archive, index, buf, uncompressed)) val id = addIndex(connection, sourceId, Index(archive, index, buf, uncompressed))
/* /*
* In order to defend against (crc32, version) collisions, we only * In order to defend against (crc32, version) collisions, we only
@ -304,14 +336,14 @@ public class CacheImporter @Inject constructor(
} }
} }
public suspend fun importGroups(groups: List<Group>) { public suspend fun importGroups(sourceId: Int, groups: List<Group>) {
if (groups.isEmpty()) { if (groups.isEmpty()) {
return return
} }
database.execute { connection -> database.execute { connection ->
prepare(connection) prepare(connection)
addGroups(connection, groups) addGroups(connection, sourceId, groups)
} }
} }
@ -329,27 +361,15 @@ public class CacheImporter @Inject constructor(
private fun addMasterIndex( private fun addMasterIndex(
connection: Connection, connection: Connection,
masterIndex: MasterIndex, masterIndex: MasterIndex
gameId: Int,
build: Int?,
timestamp: Instant?,
name: String?,
description: String?,
overwrite: Boolean
): Int { ): Int {
val containerId = addContainer(connection, masterIndex) val containerId = addContainer(connection, masterIndex)
var masterIndexId: Int? = null
var newTimestamp: Instant?
var newName: String?
var newDescription: String?
connection.prepareStatement( connection.prepareStatement(
""" """
SELECT id, game_id, timestamp, name, description SELECT id
FROM master_indexes FROM master_indexes
WHERE container_id = ? AND format = ?::master_index_format WHERE container_id = ? AND format = ?::master_index_format
FOR UPDATE
""".trimIndent() """.trimIndent()
).use { stmt -> ).use { stmt ->
stmt.setLong(1, containerId) stmt.setLong(1, containerId)
@ -357,161 +377,129 @@ public class CacheImporter @Inject constructor(
stmt.executeQuery().use { rows -> stmt.executeQuery().use { rows ->
if (rows.next()) { if (rows.next()) {
masterIndexId = rows.getInt(1) return rows.getInt(1)
} }
}
}
if (masterIndexId != null) { val masterIndexId: Int
val oldGameId = rows.getInt(2)
val oldTimestamp: Instant? = rows.getTimestamp(3)?.toInstant()
val oldName: String? = rows.getString(4)
val oldDescription: String? = rows.getString(5)
check(oldGameId == gameId) connection.prepareStatement(
"""
INSERT INTO master_indexes (container_id, format)
VALUES (?, ?::master_index_format)
RETURNING id
""".trimIndent()
).use { stmt ->
stmt.setLong(1, containerId)
stmt.setString(2, masterIndex.index.format.name.toLowerCase())
if (oldTimestamp != null && timestamp != null) { stmt.executeQuery().use { rows ->
newTimestamp = if (oldTimestamp.isBefore(timestamp)) { check(rows.next())
oldTimestamp masterIndexId = rows.getInt(1)
} else { }
timestamp }
}
} else if (oldTimestamp != null) {
newTimestamp = oldTimestamp
} else {
newTimestamp = timestamp
}
if (overwrite) { connection.prepareStatement(
newName = name """
} else if (oldName != null && name != null && oldName != name) { INSERT INTO master_index_archives (
newName = "$oldName/$name" master_index_id, archive_id, crc32, version, whirlpool, groups, total_uncompressed_length
} else if (oldName != null) { )
newName = oldName VALUES (?, ?, ?, ?, ?, ?, ?)
} else { """.trimIndent()
newName = name ).use { stmt ->
} for ((i, entry) in masterIndex.index.entries.withIndex()) {
stmt.setInt(1, masterIndexId)
stmt.setInt(2, i)
stmt.setInt(3, entry.checksum)
if (overwrite) { if (masterIndex.index.format >= MasterIndexFormat.VERSIONED) {
newDescription = description stmt.setInt(4, entry.version)
} else if (oldDescription != null && description != null && oldDescription != description) {
newDescription = "$oldDescription\n\n$description"
} else if (oldDescription != null) {
newDescription = oldDescription
} else {
newDescription = description
}
} else { } else {
newTimestamp = timestamp stmt.setInt(4, 0)
newName = name
newDescription = description
} }
}
}
if (masterIndexId != null) { if (masterIndex.index.format >= MasterIndexFormat.DIGESTS) {
connection.prepareStatement( stmt.setBytes(5, entry.digest ?: ByteArray(Whirlpool.DIGESTBYTES))
"""
UPDATE master_indexes
SET timestamp = ?, name = ?, description = ?
WHERE id = ?
""".trimIndent()
).use { stmt ->
if (newTimestamp != null) {
val offsetDateTime = OffsetDateTime.ofInstant(newTimestamp, ZoneOffset.UTC)
stmt.setObject(1, offsetDateTime, Types.TIMESTAMP_WITH_TIMEZONE)
} else { } else {
stmt.setNull(1, Types.TIMESTAMP_WITH_TIMEZONE) stmt.setNull(5, Types.BINARY)
} }
stmt.setString(2, newName) if (masterIndex.index.format >= MasterIndexFormat.LENGTHS) {
stmt.setString(3, newDescription) stmt.setInt(6, entry.groups)
stmt.setInt(4, masterIndexId!!) stmt.setInt(7, entry.totalUncompressedLength)
stmt.execute()
}
} else {
connection.prepareStatement(
"""
INSERT INTO master_indexes (container_id, format, game_id, timestamp, name, description)
VALUES (?, ?::master_index_format, ?, ?, ?, ?)
RETURNING id
""".trimIndent()
).use { stmt ->
stmt.setLong(1, containerId)
stmt.setString(2, masterIndex.index.format.name.toLowerCase())
stmt.setInt(3, gameId)
if (newTimestamp != null) {
val offsetDateTime = OffsetDateTime.ofInstant(newTimestamp, ZoneOffset.UTC)
stmt.setObject(4, offsetDateTime, Types.TIMESTAMP_WITH_TIMEZONE)
} else { } else {
stmt.setNull(4, Types.TIMESTAMP_WITH_TIMEZONE) stmt.setNull(6, Types.INTEGER)
stmt.setNull(7, Types.INTEGER)
} }
stmt.setString(5, newName) stmt.addBatch()
stmt.setString(6, newDescription)
stmt.executeQuery().use { rows ->
check(rows.next())
masterIndexId = rows.getInt(1)
}
} }
stmt.executeBatch()
}
return masterIndexId
}
private fun addSource(
connection: Connection,
type: SourceType,
masterIndexId: Int,
gameId: Int,
build: Int?,
timestamp: Instant?,
name: String?,
description: String?,
url: String?
): Int {
if (type == SourceType.JS5REMOTE && build != null) {
connection.prepareStatement( connection.prepareStatement(
""" """
INSERT INTO master_index_archives ( SELECT id
master_index_id, archive_id, crc32, version, whirlpool, groups, total_uncompressed_length FROM sources
) WHERE type = 'js5remote' AND master_index_id = ? AND game_id = ? AND build = ?
VALUES (?, ?, ?, ?, ?, ?, ?)
""".trimIndent() """.trimIndent()
).use { stmt -> ).use { stmt ->
for ((i, entry) in masterIndex.index.entries.withIndex()) { stmt.setInt(1, masterIndexId)
stmt.setInt(1, masterIndexId!!) stmt.setInt(2, gameId)
stmt.setInt(2, i) stmt.setInt(3, build)
stmt.setInt(3, entry.checksum)
if (masterIndex.index.format >= MasterIndexFormat.VERSIONED) {
stmt.setInt(4, entry.version)
} else {
stmt.setInt(4, 0)
}
if (masterIndex.index.format >= MasterIndexFormat.DIGESTS) {
stmt.setBytes(5, entry.digest ?: ByteArray(Whirlpool.DIGESTBYTES))
} else {
stmt.setNull(5, Types.BINARY)
}
if (masterIndex.index.format >= MasterIndexFormat.LENGTHS) { stmt.executeQuery().use { rows ->
stmt.setInt(6, entry.groups) if (rows.next()) {
stmt.setInt(7, entry.totalUncompressedLength) return rows.getInt(1)
} else {
stmt.setNull(6, Types.INTEGER)
stmt.setNull(7, Types.INTEGER)
} }
stmt.addBatch()
} }
stmt.executeBatch()
} }
} }
if (build != null) { connection.prepareStatement(
connection.prepareStatement( """
""" INSERT INTO sources (type, master_index_id, game_id, build, timestamp, name, description, url)
INSERT INTO master_index_builds (master_index_id, build) VALUES (?::source_type, ?, ?, ?, ?, ?, ?, ?)
VALUES (?, ?) RETURNING id
ON CONFLICT DO NOTHING """.trimIndent()
""".trimIndent() ).use { stmt ->
).use { stmt -> stmt.setString(1, type.toString().toLowerCase())
stmt.setInt(1, masterIndexId!!) stmt.setInt(2, masterIndexId)
stmt.setInt(2, build) stmt.setInt(3, gameId)
stmt.execute() stmt.setObject(4, build, Types.INTEGER)
if (timestamp != null) {
stmt.setObject(5, timestamp.atOffset(ZoneOffset.UTC), Types.TIMESTAMP_WITH_TIMEZONE)
} else {
stmt.setNull(5, Types.TIMESTAMP_WITH_TIMEZONE)
} }
}
return masterIndexId!! stmt.setString(6, name)
stmt.setString(7, description)
stmt.setString(8, url)
stmt.executeQuery().use { rows ->
check(rows.next())
return rows.getInt(1)
}
}
} }
private fun readGroup(store: Store, archive: Int, index: Js5Index?, group: Int): Group? { private fun readGroup(store: Store, archive: Int, index: Js5Index?, group: Int): Group? {
@ -547,7 +535,7 @@ public class CacheImporter @Inject constructor(
} }
} }
private fun addGroups(connection: Connection, groups: List<Group>): List<Long> { private fun addGroups(connection: Connection, sourceId: Int, groups: List<Group>): List<Long> {
val containerIds = addContainers(connection, groups) val containerIds = addContainers(connection, groups)
connection.prepareStatement( connection.prepareStatement(
@ -569,11 +557,31 @@ public class CacheImporter @Inject constructor(
stmt.executeBatch() stmt.executeBatch()
} }
connection.prepareStatement(
"""
INSERT INTO source_groups (source_id, archive_id, group_id, version, version_truncated, container_id)
VALUES (?, ?, ?, ?, ?, ?)
ON CONFLICT DO NOTHING
""".trimIndent()
).use { stmt ->
for ((i, group) in groups.withIndex()) {
stmt.setInt(1, sourceId)
stmt.setInt(2, group.archive)
stmt.setInt(3, group.group)
stmt.setInt(4, group.version)
stmt.setBoolean(5, group.versionTruncated)
stmt.setLong(6, containerIds[i])
stmt.addBatch()
}
stmt.executeBatch()
}
return containerIds return containerIds
} }
private fun addGroup(connection: Connection, group: Group): Long { private fun addGroup(connection: Connection, sourceId: Int, group: Group): Long {
return addGroups(connection, listOf(group)).single() return addGroups(connection, sourceId, listOf(group)).single()
} }
private fun readIndex(store: Store, archive: Int): Index { private fun readIndex(store: Store, archive: Int): Index {
@ -584,8 +592,8 @@ public class CacheImporter @Inject constructor(
} }
} }
private fun addIndex(connection: Connection, index: Index): Long { private fun addIndex(connection: Connection, sourceId: Int, index: Index): Long {
val containerId = addGroup(connection, index) val containerId = addGroup(connection, sourceId, index)
val savepoint = connection.setSavepoint() val savepoint = connection.setSavepoint()
connection.prepareStatement( connection.prepareStatement(

@ -17,6 +17,7 @@ public class ImportCommand : CliktCommand(name = "import") {
private val timestamp by option().instant() private val timestamp by option().instant()
private val name by option() private val name by option()
private val description by option() private val description by option()
private val url by option()
private val game by argument() private val game by argument()
private val input by argument().path( private val input by argument().path(
@ -30,7 +31,7 @@ public class ImportCommand : CliktCommand(name = "import") {
val importer = injector.getInstance(CacheImporter::class.java) val importer = injector.getInstance(CacheImporter::class.java)
Store.open(input).use { store -> Store.open(input).use { store ->
importer.import(store, game, build, timestamp, name, description) importer.import(store, game, build, timestamp, name, description, url)
} }
} }
} }

@ -21,6 +21,7 @@ public class ImportMasterIndexCommand : CliktCommand(name = "import-master-index
private val timestamp by option().instant() private val timestamp by option().instant()
private val name by option() private val name by option()
private val description by option() private val description by option()
private val url by option()
private val game by argument() private val game by argument()
private val format by argument().enum<MasterIndexFormat>() private val format by argument().enum<MasterIndexFormat>()
@ -35,7 +36,7 @@ public class ImportMasterIndexCommand : CliktCommand(name = "import-master-index
val importer = injector.getInstance(CacheImporter::class.java) val importer = injector.getInstance(CacheImporter::class.java)
Unpooled.wrappedBuffer(Files.readAllBytes(input)).use { buf -> Unpooled.wrappedBuffer(Files.readAllBytes(input)).use { buf ->
importer.importMasterIndex(buf, format, game, build, timestamp, name, description) importer.importMasterIndex(buf, format, game, build, timestamp, name, description, url)
} }
} }
} }

@ -53,6 +53,7 @@ public class Js5ChannelHandler(
private val inFlightRequests = mutableSetOf<Js5Request.Group>() private val inFlightRequests = mutableSetOf<Js5Request.Group>()
private val pendingRequests = ArrayDeque<Js5Request.Group>() private val pendingRequests = ArrayDeque<Js5Request.Group>()
private var masterIndexId: Int = 0 private var masterIndexId: Int = 0
private var sourceId: Int = 0
private var masterIndex: Js5MasterIndex? = null private var masterIndex: Js5MasterIndex? = null
private lateinit var indexes: Array<Js5Index?> private lateinit var indexes: Array<Js5Index?>
private val groups = mutableListOf<CacheImporter.Group>() private val groups = mutableListOf<CacheImporter.Group>()
@ -161,7 +162,7 @@ public class Js5ChannelHandler(
if (groups.size >= CacheImporter.BATCH_SIZE || complete) { if (groups.size >= CacheImporter.BATCH_SIZE || complete) {
runBlocking { runBlocking {
importer.importGroups(groups) importer.importGroups(sourceId, groups)
} }
releaseGroups() releaseGroups()
@ -182,7 +183,7 @@ public class Js5ChannelHandler(
Js5Compression.uncompress(buf.slice()).use { uncompressed -> Js5Compression.uncompress(buf.slice()).use { uncompressed ->
masterIndex = Js5MasterIndex.read(uncompressed.slice(), masterIndexFormat) masterIndex = Js5MasterIndex.read(uncompressed.slice(), masterIndexFormat)
val (id, rawIndexes) = runBlocking { val (masterIndexId, sourceId, rawIndexes) = runBlocking {
importer.importMasterIndexAndGetIndexes( importer.importMasterIndexAndGetIndexes(
masterIndex!!, masterIndex!!,
buf, buf,
@ -190,10 +191,13 @@ public class Js5ChannelHandler(
gameId, gameId,
build, build,
lastMasterIndexId, lastMasterIndexId,
timestamp = Instant.now(), timestamp = Instant.now()
name = "Original"
) )
} }
this.masterIndexId = masterIndexId
this.sourceId = sourceId
try { try {
indexes = arrayOfNulls(rawIndexes.size) indexes = arrayOfNulls(rawIndexes.size)
@ -207,8 +211,6 @@ public class Js5ChannelHandler(
} finally { } finally {
rawIndexes.filterNotNull().forEach(ByteBuf::release) rawIndexes.filterNotNull().forEach(ByteBuf::release)
} }
masterIndexId = id
} }
} }
@ -228,7 +230,7 @@ public class Js5ChannelHandler(
} }
val groups = runBlocking { val groups = runBlocking {
importer.importIndexAndGetMissingGroups(archive, index, buf, uncompressed, lastMasterIndexId) importer.importIndexAndGetMissingGroups(sourceId, archive, index, buf, uncompressed, lastMasterIndexId)
} }
for (group in groups) { for (group in groups) {
request(archive, group) request(archive, group)

@ -107,21 +107,11 @@ CREATE TABLE master_indexes (
id SERIAL PRIMARY KEY NOT NULL, id SERIAL PRIMARY KEY NOT NULL,
container_id BIGINT NOT NULL REFERENCES containers (id), container_id BIGINT NOT NULL REFERENCES containers (id),
format master_index_format NOT NULL, format master_index_format NOT NULL,
game_id INTEGER NOT NULL REFERENCES games (id),
timestamp TIMESTAMPTZ NULL,
name TEXT NULL,
description TEXT NULL,
UNIQUE (container_id, format) UNIQUE (container_id, format)
); );
ALTER TABLE games ADD COLUMN last_master_index_id INT NULL REFERENCES master_indexes (id); ALTER TABLE games ADD COLUMN last_master_index_id INT NULL REFERENCES master_indexes (id);
CREATE TABLE master_index_builds (
master_index_id INTEGER NOT NULL REFERENCES master_indexes (id),
build INTEGER NOT NULL,
PRIMARY KEY (master_index_id, build)
);
CREATE TABLE master_index_archives ( CREATE TABLE master_index_archives (
master_index_id INTEGER NOT NULL REFERENCES master_indexes (id), master_index_id INTEGER NOT NULL REFERENCES master_indexes (id),
archive_id uint1 NOT NULL, archive_id uint1 NOT NULL,
@ -133,6 +123,38 @@ CREATE TABLE master_index_archives (
PRIMARY KEY (master_index_id, archive_id) PRIMARY KEY (master_index_id, archive_id)
); );
CREATE TYPE source_type AS ENUM (
'disk',
'js5remote'
);
CREATE TABLE sources (
id SERIAL PRIMARY KEY NOT NULL,
type source_type NOT NULL,
master_index_id INTEGER NOT NULL REFERENCES master_indexes (id),
game_id INTEGER NOT NULL REFERENCES games (id),
build INTEGER NULL,
timestamp TIMESTAMPTZ NULL,
name TEXT NULL,
description TEXT NULL,
url TEXT NULL
);
CREATE INDEX ON sources (master_index_id);
CREATE UNIQUE INDEX ON sources (master_index_id, game_id, build) WHERE type = 'js5remote';
CREATE TABLE source_groups (
source_id INTEGER NOT NULL REFERENCES sources (id),
archive_id uint1 NOT NULL,
group_id INTEGER NOT NULL,
version INTEGER NOT NULL,
version_truncated BOOLEAN NOT NULL,
container_id BIGINT NOT NULL REFERENCES containers (id),
PRIMARY KEY (source_id, archive_id, group_id)
);
CREATE INDEX ON source_groups (archive_id, group_id, version, version_truncated, container_id);
CREATE TABLE names ( CREATE TABLE names (
hash INTEGER NOT NULL, hash INTEGER NOT NULL,
name TEXT PRIMARY KEY NOT NULL name TEXT PRIMARY KEY NOT NULL

@ -38,7 +38,7 @@
<br /> <br />
<span th:text="${#temporals.format(cache.timestamp, 'HH:mm:ss')}"></span> <span th:text="${#temporals.format(cache.timestamp, 'HH:mm:ss')}"></span>
</td> </td>
<td th:text="${cache.name}"></td> <td th:text="${#strings.setJoin(cache.names, '/')}"></td>
<td th:classappend="${cache.stats}? (${cache.stats.allIndexesValid}? 'table-success' : 'table-danger')" <td th:classappend="${cache.stats}? (${cache.stats.allIndexesValid}? 'table-success' : 'table-danger')"
class="text-right"> class="text-right">
<span <span

@ -14,7 +14,7 @@
<table class="table table-striped table-bordered table-hover"> <table class="table table-striped table-bordered table-hover">
<tr class="thead-dark"> <tr class="thead-dark">
<th>Game</th> <th>Game</th>
<td th:text="${cache.game}">runescape</td> <td th:text="${#strings.setJoin(cache.games, ', ')}">runescape</td>
</tr> </tr>
<tr class="thead-dark"> <tr class="thead-dark">
<th>Build(s)</th> <th>Build(s)</th>
@ -26,11 +26,21 @@
</tr> </tr>
<tr class="thead-dark"> <tr class="thead-dark">
<th>Name</th> <th>Name</th>
<td th:text="${cache.name}"></td> <td th:text="${#strings.setJoin(cache.names, '/')}"></td>
</tr> </tr>
<tr class="thead-dark"> <tr class="thead-dark">
<th>Description</th> <th>Description</th>
<td th:text="${cache.description}"></td> <td th:text="${#strings.listJoin(cache.descriptions, ' ')}"></td>
</tr>
<tr class="thead-dark">
<th>URL(s)</th>
<td>
<ul>
<li th:each="url : ${cache.urls}">
<a th:href="${url}">${url}</a>
</li>
</ul>
</td>
</tr> </tr>
<tr class="thead-dark"> <tr class="thead-dark">
<th>Indexes</th> <th>Indexes</th>

Loading…
Cancel
Save