Add initial support for separate scopes to the archiving service

This will allow us to import FunOrb caches without worrying about the
risk of collisions with the main set of RuneScape caches.

Signed-off-by: Graham <gpe@openrs2.org>
Graham 3 years ago
parent 2c31776c54
commit d186f5aef4
  1. 2
      archive/src/main/kotlin/org/openrs2/archive/cache/CacheDownloader.kt
  2. 85
      archive/src/main/kotlin/org/openrs2/archive/cache/CacheExporter.kt
  3. 70
      archive/src/main/kotlin/org/openrs2/archive/cache/CacheImporter.kt
  4. 5
      archive/src/main/kotlin/org/openrs2/archive/cache/ExportCommand.kt
  5. 13
      archive/src/main/kotlin/org/openrs2/archive/cache/Js5ChannelHandler.kt
  6. 2
      archive/src/main/kotlin/org/openrs2/archive/cache/NxtJs5ChannelHandler.kt
  7. 2
      archive/src/main/kotlin/org/openrs2/archive/cache/OsrsJs5ChannelHandler.kt
  8. 3
      archive/src/main/kotlin/org/openrs2/archive/game/Game.kt
  9. 5
      archive/src/main/kotlin/org/openrs2/archive/game/GameDatabase.kt
  10. 63
      archive/src/main/kotlin/org/openrs2/archive/map/MapRenderer.kt
  11. 37
      archive/src/main/kotlin/org/openrs2/archive/web/CachesController.kt
  12. 61
      archive/src/main/kotlin/org/openrs2/archive/web/WebServer.kt
  13. 176
      archive/src/main/resources/org/openrs2/archive/migrations/V12__scopes.sql
  14. 12
      archive/src/main/resources/org/openrs2/archive/templates/caches/index.html
  15. 10
      archive/src/main/resources/org/openrs2/archive/templates/caches/show.html

@ -43,6 +43,7 @@ public class CacheDownloader @Inject constructor(
OsrsJs5ChannelInitializer(
OsrsJs5ChannelHandler(
bootstrap,
game.scopeId,
game.id,
hostname,
PORT,
@ -66,6 +67,7 @@ public class CacheDownloader @Inject constructor(
NxtJs5ChannelInitializer(
NxtJs5ChannelHandler(
bootstrap,
game.scopeId,
game.id,
hostname,
PORT,

@ -111,6 +111,7 @@ public class CacheExporter @Inject constructor(
public data class CacheSummary(
val id: Int,
val scope: String,
val game: String,
val environment: String,
val language: String,
@ -159,6 +160,7 @@ public class CacheExporter @Inject constructor(
SELECT
c.id,
g.name AS game,
sc.name AS scope,
e.name AS environment,
l.iso_code AS language,
array_remove(array_agg(DISTINCT ROW(s.build_major, s.build_minor)::build ORDER BY ROW(s.build_major, s.build_minor)::build ASC), NULL) builds,
@ -176,11 +178,12 @@ public class CacheExporter @Inject constructor(
JOIN sources s ON s.cache_id = c.id
JOIN game_variants v ON v.id = s.game_id
JOIN games g ON g.id = v.game_id
JOIN scopes sc ON sc.id = g.scope_id
JOIN environments e ON e.id = v.environment_id
JOIN languages l ON l.id = v.language_id
LEFT JOIN cache_stats cs ON cs.cache_id = c.id
GROUP BY c.id, g.name, e.name, l.iso_code, cs.valid_indexes, cs.indexes, cs.valid_groups, cs.groups,
cs.valid_keys, cs.keys, cs.size, cs.blocks
GROUP BY sc.name, c.id, g.name, e.name, l.iso_code, cs.valid_indexes, cs.indexes, cs.valid_groups,
cs.groups, cs.valid_keys, cs.keys, cs.size, cs.blocks
) t
ORDER BY t.game ASC, t.environment ASC, t.language ASC, t.builds[1] ASC, t.timestamp ASC
""".trimIndent()
@ -191,21 +194,22 @@ public class CacheExporter @Inject constructor(
while (rows.next()) {
val id = rows.getInt(1)
val game = rows.getString(2)
val environment = rows.getString(3)
val language = rows.getString(4)
val builds = rows.getArray(5).array as Array<*>
val timestamp = rows.getTimestamp(6)?.toInstant()
@Suppress("UNCHECKED_CAST") val sources = rows.getArray(7).array as Array<String>
val validIndexes = rows.getLong(8)
val scope = rows.getString(3)
val environment = rows.getString(4)
val language = rows.getString(5)
val builds = rows.getArray(6).array as Array<*>
val timestamp = rows.getTimestamp(7)?.toInstant()
@Suppress("UNCHECKED_CAST") val sources = rows.getArray(8).array as Array<String>
val validIndexes = rows.getLong(9)
val stats = if (!rows.wasNull()) {
val indexes = rows.getLong(9)
val validGroups = rows.getLong(10)
val groups = rows.getLong(11)
val validKeys = rows.getLong(12)
val keys = rows.getLong(13)
val size = rows.getLong(14)
val blocks = rows.getLong(15)
val indexes = rows.getLong(10)
val validGroups = rows.getLong(11)
val groups = rows.getLong(12)
val validKeys = rows.getLong(13)
val keys = rows.getLong(14)
val size = rows.getLong(15)
val blocks = rows.getLong(16)
Stats(validIndexes, indexes, validGroups, groups, validKeys, keys, size, blocks)
} else {
null
@ -213,6 +217,7 @@ public class CacheExporter @Inject constructor(
caches += CacheSummary(
id,
scope,
game,
environment,
language,
@ -229,7 +234,7 @@ public class CacheExporter @Inject constructor(
}
}
public suspend fun get(id: Int): Cache? {
public suspend fun get(scope: String, id: Int): Cache? {
return database.execute { connection ->
val masterIndex: Js5MasterIndex?
val checksumTable: ChecksumTable?
@ -250,15 +255,17 @@ public class CacheExporter @Inject constructor(
cs.size,
cs.blocks
FROM caches c
CROSS JOIN scopes s
LEFT JOIN master_indexes m ON m.id = c.id
LEFT JOIN containers mc ON mc.id = m.container_id
LEFT JOIN crc_tables t ON t.id = c.id
LEFT JOIN blobs b ON b.id = t.blob_id
LEFT JOIN cache_stats cs ON cs.cache_id = c.id
WHERE c.id = ?
LEFT JOIN cache_stats cs ON cs.scope_id = s.id AND cs.cache_id = c.id
WHERE s.name = ? AND c.id = ?
""".trimIndent()
).use { stmt ->
stmt.setInt(1, id)
stmt.setString(1, scope)
stmt.setInt(2, id)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
@ -310,13 +317,15 @@ public class CacheExporter @Inject constructor(
FROM sources s
JOIN game_variants v ON v.id = s.game_id
JOIN games g ON g.id = v.game_id
JOIN scopes sc ON sc.id = g.scope_id
JOIN environments e ON e.id = v.environment_id
JOIN languages l ON l.id = v.language_id
WHERE s.cache_id = ?
WHERE sc.name = ? AND s.cache_id = ?
ORDER BY s.name ASC
""".trimIndent()
).use { stmt ->
stmt.setInt(1, id)
stmt.setString(1, scope)
stmt.setInt(2, id)
stmt.executeQuery().use { rows ->
while (rows.next()) {
@ -372,7 +381,7 @@ public class CacheExporter @Inject constructor(
}
}
public suspend fun getFileName(id: Int): String? {
public suspend fun getFileName(scope: String, id: Int): String? {
return database.execute { connection ->
// TODO(gpe): what if a cache is from multiple games?
connection.prepareStatement(
@ -386,14 +395,16 @@ public class CacheExporter @Inject constructor(
FROM sources s
JOIN game_variants v ON v.id = s.game_id
JOIN games g ON g.id = v.game_id
JOIN scopes sc ON sc.id = g.scope_id
JOIN environments e ON e.id = v.environment_id
JOIN languages l ON l.id = v.language_id
WHERE s.cache_id = ?
WHERE sc.name = ? AND s.cache_id = ?
GROUP BY g.name, e.name, l.iso_code
LIMIT 1
""".trimIndent()
).use { stmt ->
stmt.setInt(1, id)
stmt.setString(1, scope)
stmt.setInt(2, id)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
@ -431,7 +442,7 @@ public class CacheExporter @Inject constructor(
}
}
public fun export(id: Int, storeFactory: (Boolean) -> Store) {
public fun export(scope: String, id: Int, storeFactory: (Boolean) -> Store) {
database.executeOnce { connection ->
val legacy = connection.prepareStatement(
"""
@ -451,22 +462,24 @@ public class CacheExporter @Inject constructor(
if (legacy) {
exportLegacy(connection, id, store)
} else {
export(connection, id, store)
export(connection, scope, id, store)
}
}
}
}
private fun export(connection: Connection, id: Int, store: Store) {
private fun export(connection: Connection, scope: String, id: Int, store: Store) {
connection.prepareStatement(
"""
SELECT archive_id, group_id, data, version
FROM resolved_groups
WHERE master_index_id = ?
SELECT g.archive_id, g.group_id, g.data, g.version
FROM resolved_groups g
JOIN scopes s ON s.id = g.scope_id
WHERE s.name = ? AND g.master_index_id = ?
""".trimIndent()
).use { stmt ->
stmt.fetchSize = BATCH_SIZE
stmt.setInt(1, id)
stmt.setString(1, scope)
stmt.setInt(2, id)
stmt.executeQuery().use { rows ->
alloc.buffer(2, 2).use { versionBuf ->
@ -534,18 +547,20 @@ public class CacheExporter @Inject constructor(
}
}
public suspend fun exportKeys(id: Int): List<Key> {
public suspend fun exportKeys(scope: String, id: Int): List<Key> {
return database.execute { connection ->
connection.prepareStatement(
"""
SELECT g.archive_id, g.group_id, g.name_hash, n.name, (k.key).k0, (k.key).k1, (k.key).k2, (k.key).k3
FROM resolved_groups g
JOIN scopes s ON s.id = g.scope_id
JOIN keys k ON k.id = g.key_id
LEFT JOIN names n ON n.hash = g.name_hash AND n.name ~ '^l(?:[0-9]|[1-9][0-9])_(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$'
WHERE g.master_index_id = ?
WHERE s.name = ? AND g.master_index_id = ?
""".trimIndent()
).use { stmt ->
stmt.setInt(1, id)
stmt.setString(1, scope)
stmt.setInt(2, id)
stmt.executeQuery().use { rows ->
val keys = mutableListOf<Key>()

@ -116,9 +116,14 @@ public class CacheImporter @Inject constructor(
val indexes: List<ByteBuf?>
)
private data class Game(
val id: Int,
val scopeId: Int
)
public suspend fun import(
store: Store,
game: String,
gameName: String,
environment: String,
language: String,
buildMajor: Int?,
@ -131,12 +136,12 @@ public class CacheImporter @Inject constructor(
database.execute { connection ->
prepare(connection)
val gameId = getGameId(connection, game, environment, language)
val game = getGame(connection, gameName, environment, language)
if (store is DiskStore && store.legacy) {
importLegacy(connection, store, gameId, buildMajor, buildMinor, timestamp, name, description, url)
importLegacy(connection, store, game.id, buildMajor, buildMinor, timestamp, name, description, url)
} else {
importJs5(connection, store, gameId, buildMajor, buildMinor, timestamp, name, description, url)
importJs5(connection, store, game, buildMajor, buildMinor, timestamp, name, description, url)
}
}
}
@ -144,7 +149,7 @@ public class CacheImporter @Inject constructor(
private fun importJs5(
connection: Connection,
store: Store,
gameId: Int,
game: Game,
buildMajor: Int?,
buildMinor: Int?,
timestamp: Instant?,
@ -169,7 +174,7 @@ public class CacheImporter @Inject constructor(
connection,
SourceType.DISK,
masterIndexId,
gameId,
game.id,
buildMajor,
buildMinor,
timestamp,
@ -194,7 +199,7 @@ public class CacheImporter @Inject constructor(
}
for (index in indexGroups) {
addIndex(connection, sourceId, index)
addIndex(connection, game.scopeId, sourceId, index)
}
} finally {
indexGroups.forEach(Index::release)
@ -215,7 +220,7 @@ public class CacheImporter @Inject constructor(
groups += group
if (groups.size >= BATCH_SIZE) {
addGroups(connection, sourceId, groups)
addGroups(connection, game.scopeId, sourceId, groups)
groups.forEach(Group::release)
groups.clear()
@ -224,7 +229,7 @@ public class CacheImporter @Inject constructor(
}
if (groups.isNotEmpty()) {
addGroups(connection, sourceId, groups)
addGroups(connection, game.scopeId, sourceId, groups)
}
} finally {
groups.forEach(Group::release)
@ -234,7 +239,7 @@ public class CacheImporter @Inject constructor(
public suspend fun importMasterIndex(
buf: ByteBuf,
format: MasterIndexFormat,
game: String,
gameName: String,
environment: String,
language: String,
buildMajor: Int?,
@ -254,14 +259,14 @@ public class CacheImporter @Inject constructor(
database.execute { connection ->
prepare(connection)
val gameId = getGameId(connection, game, environment, language)
val game = getGame(connection, gameName, environment, language)
val masterIndexId = addMasterIndex(connection, masterIndex)
addSource(
connection,
SourceType.DISK,
masterIndexId,
gameId,
game.id,
buildMajor,
buildMinor,
timestamp,
@ -363,6 +368,7 @@ public class CacheImporter @Inject constructor(
}
public suspend fun importIndexAndGetMissingGroups(
scopeId: Int,
sourceId: Int,
archive: Int,
index: Js5Index,
@ -372,7 +378,7 @@ public class CacheImporter @Inject constructor(
): List<Int> {
return database.execute { connection ->
prepare(connection)
val id = addIndex(connection, sourceId, Index(archive, index, buf, uncompressed))
val id = addIndex(connection, scopeId, sourceId, Index(archive, index, buf, uncompressed))
/*
* In order to defend against (crc32, version) collisions, we only
@ -415,14 +421,14 @@ public class CacheImporter @Inject constructor(
}
}
public suspend fun importGroups(sourceId: Int, groups: List<Group>) {
public suspend fun importGroups(scopeId: Int, sourceId: Int, groups: List<Group>) {
if (groups.isEmpty()) {
return
}
database.execute { connection ->
prepare(connection)
addGroups(connection, sourceId, groups)
addGroups(connection, scopeId, sourceId, groups)
}
}
@ -627,22 +633,23 @@ public class CacheImporter @Inject constructor(
}
}
private fun addGroups(connection: Connection, sourceId: Int, groups: List<Group>): List<Long> {
private fun addGroups(connection: Connection, scopeId: Int, sourceId: Int, groups: List<Group>): List<Long> {
val containerIds = addContainers(connection, groups)
connection.prepareStatement(
"""
INSERT INTO groups (archive_id, group_id, version, version_truncated, container_id)
VALUES (?, ?, ?, ?, ?)
INSERT INTO groups (scope_id, archive_id, group_id, version, version_truncated, container_id)
VALUES (?, ?, ?, ?, ?, ?)
ON CONFLICT DO NOTHING
""".trimIndent()
).use { stmt ->
for ((i, group) in groups.withIndex()) {
stmt.setInt(1, group.archive)
stmt.setInt(2, group.group)
stmt.setInt(3, group.version)
stmt.setBoolean(4, group.versionTruncated)
stmt.setLong(5, containerIds[i])
stmt.setInt(1, scopeId)
stmt.setInt(2, group.archive)
stmt.setInt(3, group.group)
stmt.setInt(4, group.version)
stmt.setBoolean(5, group.versionTruncated)
stmt.setLong(6, containerIds[i])
stmt.addBatch()
}
@ -672,8 +679,8 @@ public class CacheImporter @Inject constructor(
return containerIds
}
private fun addGroup(connection: Connection, sourceId: Int, group: Group): Long {
return addGroups(connection, sourceId, listOf(group)).single()
private fun addGroup(connection: Connection, scopeId: Int, sourceId: Int, group: Group): Long {
return addGroups(connection, scopeId, sourceId, listOf(group)).single()
}
private fun readIndex(store: Store, archive: Int): Index {
@ -684,8 +691,8 @@ public class CacheImporter @Inject constructor(
}
}
private fun addIndex(connection: Connection, sourceId: Int, index: Index): Long {
val containerId = addGroup(connection, sourceId, index)
private fun addIndex(connection: Connection, scopeId: Int, sourceId: Int, index: Index): Long {
val containerId = addGroup(connection, scopeId, sourceId, index)
val savepoint = connection.setSavepoint()
connection.prepareStatement(
@ -964,10 +971,10 @@ public class CacheImporter @Inject constructor(
return ids
}
private fun getGameId(connection: Connection, name: String, environment: String, language: String): Int {
private fun getGame(connection: Connection, name: String, environment: String, language: String): Game {
connection.prepareStatement(
"""
SELECT v.id
SELECT v.id, g.scope_id
FROM game_variants v
JOIN games g ON g.id = v.game_id
JOIN environments e ON e.id = v.environment_id
@ -984,7 +991,10 @@ public class CacheImporter @Inject constructor(
throw Exception("Game not found")
}
return rows.getInt(1)
val id = rows.getInt(1)
val scopeId = rows.getInt(2)
return Game(id, scopeId)
}
}
}

@ -2,6 +2,8 @@ package org.openrs2.archive.cache
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.options.default
import com.github.ajalt.clikt.parameters.options.option
import com.github.ajalt.clikt.parameters.types.int
import com.github.ajalt.clikt.parameters.types.path
import com.google.inject.Guice
@ -11,6 +13,7 @@ import org.openrs2.cache.DiskStore
import org.openrs2.inject.CloseableInjector
public class ExportCommand : CliktCommand(name = "export") {
private val scope by option().default("runescape")
private val id by argument().int()
private val output by argument().path(
mustExist = true,
@ -23,7 +26,7 @@ public class ExportCommand : CliktCommand(name = "export") {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val exporter = injector.getInstance(CacheExporter::class.java)
exporter.export(id) { legacy ->
exporter.export(scope, id) { legacy ->
DiskStore.create(output, legacy = legacy)
}
}

@ -25,6 +25,7 @@ import kotlin.coroutines.resumeWithException
@ChannelHandler.Sharable
public abstract class Js5ChannelHandler(
private val bootstrap: Bootstrap,
private val scopeId: Int,
private val gameId: Int,
private val hostname: String,
private val port: Int,
@ -236,7 +237,7 @@ public abstract class Js5ChannelHandler(
if (groups.size >= CacheImporter.BATCH_SIZE || complete) {
runBlocking {
importer.importGroups(sourceId, groups)
importer.importGroups(sourceId, scopeId, groups)
}
releaseGroups()
@ -315,7 +316,15 @@ public abstract class Js5ChannelHandler(
}
val groups = runBlocking {
importer.importIndexAndGetMissingGroups(sourceId, archive, index, buf, uncompressed, lastMasterIndexId)
importer.importIndexAndGetMissingGroups(
scopeId,
sourceId,
archive,
index,
buf,
uncompressed,
lastMasterIndexId
)
}
for (group in groups) {
val groupEntry = index[group]!!

@ -24,6 +24,7 @@ import kotlin.coroutines.Continuation
public class NxtJs5ChannelHandler(
bootstrap: Bootstrap,
scopeId: Int,
gameId: Int,
hostname: String,
port: Int,
@ -38,6 +39,7 @@ public class NxtJs5ChannelHandler(
private val maxMinorBuildAttempts: Int = 5
) : Js5ChannelHandler(
bootstrap,
scopeId,
gameId,
hostname,
port,

@ -17,6 +17,7 @@ import kotlin.coroutines.Continuation
public class OsrsJs5ChannelHandler(
bootstrap: Bootstrap,
scopeId: Int,
gameId: Int,
hostname: String,
port: Int,
@ -26,6 +27,7 @@ public class OsrsJs5ChannelHandler(
importer: CacheImporter,
) : Js5ChannelHandler(
bootstrap,
scopeId,
gameId,
hostname,
port,

@ -6,5 +6,6 @@ public data class Game(
public val buildMajor: Int?,
public val buildMinor: Int?,
public val lastMasterIndexId: Int?,
public val languageId: Int
public val languageId: Int,
public val scopeId: Int
)

@ -12,7 +12,7 @@ public class GameDatabase @Inject constructor(
return database.execute { connection ->
connection.prepareStatement(
"""
SELECT v.id, v.url, v.build_major, v.build_minor, v.last_master_index_id, v.language_id
SELECT v.id, v.url, v.build_major, v.build_minor, v.last_master_index_id, v.language_id, g.scope_id
FROM game_variants v
JOIN games g ON g.id = v.game_id
JOIN environments e ON e.id = v.environment_id
@ -48,8 +48,9 @@ public class GameDatabase @Inject constructor(
}
val languageId = rows.getInt(6)
val scopeId = rows.getInt(7)
return@execute Game(id, url, buildMajor, buildMinor, lastMasterIndexId, languageId)
return@execute Game(id, url, buildMajor, buildMinor, lastMasterIndexId, languageId, scopeId)
}
}
}

@ -31,10 +31,28 @@ public class MapRenderer @Inject constructor(
val fillColor = Color(outlineColor.red, outlineColor.green, outlineColor.blue, 128)
}
public suspend fun render(masterIndexId: Int): BufferedImage {
public suspend fun render(scope: String, masterIndexId: Int): BufferedImage {
return database.execute { connection ->
val scopeId = connection.prepareStatement(
"""
SELECT id
FROM scopes
WHERE name = ?
""".trimIndent()
).use { stmt ->
stmt.setString(1, scope)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
throw IllegalArgumentException("Invalid scope")
}
rows.getInt(1)
}
}
// read config index
val configIndex = readIndex(connection, masterIndexId, Js5Archive.CONFIG)
val configIndex = readIndex(connection, scopeId, masterIndexId, Js5Archive.CONFIG)
?: throw IllegalArgumentException("Config index missing")
// read FluType group
@ -43,7 +61,7 @@ public class MapRenderer @Inject constructor(
val underlayGroup = configIndex[Js5ConfigGroup.FLUTYPE]
?: throw IllegalArgumentException("FluType group missing in index")
val underlayFiles = readGroup(connection, masterIndexId, Js5Archive.CONFIG, underlayGroup)
val underlayFiles = readGroup(connection, scopeId, masterIndexId, Js5Archive.CONFIG, underlayGroup)
?: throw IllegalArgumentException("FluType group missing")
try {
for ((id, file) in underlayFiles) {
@ -59,7 +77,7 @@ public class MapRenderer @Inject constructor(
val overlayGroup = configIndex[Js5ConfigGroup.FLOTYPE]
?: throw IllegalArgumentException("FloType group missing in index")
val overlayFiles = readGroup(connection, masterIndexId, Js5Archive.CONFIG, overlayGroup)
val overlayFiles = readGroup(connection, scopeId, masterIndexId, Js5Archive.CONFIG, overlayGroup)
?: throw IllegalArgumentException("FloType group missing")
try {
for ((id, file) in overlayFiles) {
@ -71,13 +89,13 @@ public class MapRenderer @Inject constructor(
// read textures
val textures = mutableMapOf<Int, Int>()
val materialsIndex = readIndex(connection, masterIndexId, Js5Archive.MATERIALS)
val materialsIndex = readIndex(connection, scopeId, masterIndexId, Js5Archive.MATERIALS)
if (materialsIndex != null) {
val materialsGroup = materialsIndex[0]
?: throw IllegalArgumentException("Materials group missing in index")
val materialsFiles = readGroup(connection, masterIndexId, Js5Archive.MATERIALS, materialsGroup)
val materialsFiles = readGroup(connection, scopeId, masterIndexId, Js5Archive.MATERIALS, materialsGroup)
?: throw IllegalArgumentException("Materials group missing")
try {
val metadata = materialsFiles[0]
@ -123,13 +141,13 @@ public class MapRenderer @Inject constructor(
materialsFiles.values.forEach(ByteBuf::release)
}
} else {
val textureIndex = readIndex(connection, masterIndexId, Js5Archive.TEXTURES)
val textureIndex = readIndex(connection, scopeId, masterIndexId, Js5Archive.TEXTURES)
?: throw IllegalArgumentException("Textures index missing")
val textureGroup = textureIndex[0]
?: throw IllegalArgumentException("Textures group missing from index")
val textureFiles = readGroup(connection, masterIndexId, Js5Archive.TEXTURES, textureGroup)
val textureFiles = readGroup(connection, scopeId, masterIndexId, Js5Archive.TEXTURES, textureGroup)
?: throw IllegalArgumentException("Textures group missing")
try {
for ((id, file) in textureFiles) {
@ -155,11 +173,12 @@ public class MapRenderer @Inject constructor(
SELECT n.name, g.encrypted, g.empty_loc, g.key_id
FROM resolved_groups g
JOIN names n ON n.hash = g.name_hash
WHERE g.master_index_id = ? AND g.archive_id = ${Js5Archive.MAPS} AND
WHERE g.scope_id = ? AND g.master_index_id = ? AND g.archive_id = ${Js5Archive.MAPS} AND
n.name ~ '^[lm](?:[0-9]|[1-9][0-9])_(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$'
""".trimIndent()
).use { stmt ->
stmt.setInt(1, masterIndexId)
stmt.setInt(1, scopeId)
stmt.setInt(2, masterIndexId)
stmt.executeQuery().use { rows ->
while (rows.next()) {
@ -207,11 +226,12 @@ public class MapRenderer @Inject constructor(
SELECT n.name, g.data
FROM resolved_groups g
JOIN names n ON n.hash = g.name_hash
WHERE g.master_index_id = ? AND g.archive_id = ${Js5Archive.MAPS} AND
WHERE g.scope_id = ? AND g.master_index_id = ? AND g.archive_id = ${Js5Archive.MAPS} AND
n.name ~ '^m(?:[0-9]|[1-9][0-9])_(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$'
""".trimIndent()
).use { stmt ->
stmt.setInt(1, masterIndexId)
stmt.setInt(1, scopeId)
stmt.setInt(2, masterIndexId)
stmt.executeQuery().use { rows ->
while (rows.next()) {
@ -246,16 +266,17 @@ public class MapRenderer @Inject constructor(
}
}
private fun readIndex(connection: Connection, masterIndexId: Int, archiveId: Int): Js5Index? {
private fun readIndex(connection: Connection, scopeId: Int, masterIndexId: Int, archiveId: Int): Js5Index? {
connection.prepareStatement(
"""
SELECT data
FROM resolved_indexes
WHERE master_index_id = ? AND archive_id = ?
WHERE scope_id = ? AND master_index_id = ? AND archive_id = ?
""".trimIndent()
).use { stmt ->
stmt.setInt(1, masterIndexId)
stmt.setInt(2, archiveId)
stmt.setInt(1, scopeId)
stmt.setInt(2, masterIndexId)
stmt.setInt(3, archiveId)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
@ -275,6 +296,7 @@ public class MapRenderer @Inject constructor(
private fun readGroup(
connection: Connection,
scopeId: Int,
masterIndexId: Int,
archiveId: Int,
group: Js5Index.Group<*>
@ -283,12 +305,13 @@ public class MapRenderer @Inject constructor(
"""
SELECT data
FROM resolved_groups
WHERE master_index_id = ? AND archive_id = ? AND group_id = ?
WHERE scope_id = ? AND master_index_id = ? AND archive_id = ? AND group_id = ?
""".trimIndent()
).use { stmt ->
stmt.setInt(1, masterIndexId)
stmt.setInt(2, archiveId)
stmt.setInt(3, group.id)
stmt.setInt(1, scopeId)
stmt.setInt(2, masterIndexId)
stmt.setInt(3, archiveId)
stmt.setInt(4, group.id)
stmt.executeQuery().use { rows ->
if (!rows.next()) {

@ -46,29 +46,38 @@ public class CachesController @Inject constructor(
}
public suspend fun show(call: ApplicationCall) {
val scope = call.parameters["scope"]!!
val id = call.parameters["id"]?.toIntOrNull()
if (id == null) {
call.respond(HttpStatusCode.NotFound)
return
}
val cache = exporter.get(id)
val cache = exporter.get(scope, id)
if (cache == null) {
call.respond(HttpStatusCode.NotFound)
return
}
call.respond(ThymeleafContent("caches/show.html", mapOf("cache" to cache)))
call.respond(
ThymeleafContent(
"caches/show.html", mapOf(
"cache" to cache,
"scope" to scope,
)
)
)
}
public suspend fun exportDisk(call: ApplicationCall) {
val scope = call.parameters["scope"]!!
val id = call.parameters["id"]?.toIntOrNull()
if (id == null) {
call.respond(HttpStatusCode.NotFound)
return
}
val name = exporter.getFileName(id)
val name = exporter.getFileName(scope, id)
if (name == null) {
call.respond(HttpStatusCode.NotFound)
return
@ -82,20 +91,21 @@ public class CachesController @Inject constructor(
)
call.respondOutputStream(contentType = ContentType.Application.Zip) {
exporter.export(id) { legacy ->
exporter.export(scope, id) { legacy ->
DiskStoreZipWriter(ZipOutputStream(this), alloc = alloc, legacy = legacy)
}
}
}
public suspend fun exportFlatFile(call: ApplicationCall) {
val scope = call.parameters["scope"]!!
val id = call.parameters["id"]?.toIntOrNull()
if (id == null) {
call.respond(HttpStatusCode.NotFound)
return
}
val name = exporter.getFileName(id)
val name = exporter.getFileName(scope, id)
if (name == null) {
call.respond(HttpStatusCode.NotFound)
return
@ -109,20 +119,21 @@ public class CachesController @Inject constructor(
)
call.respondOutputStream(contentType = ContentType.Application.GZip) {
exporter.export(id) {
exporter.export(scope, id) {
FlatFileStoreTarWriter(TarArchiveOutputStream(GzipLevelOutputStream(this, Deflater.BEST_COMPRESSION)))
}
}
}
public suspend fun exportKeysJson(call: ApplicationCall) {
val scope = call.parameters["scope"]!!
val id = call.parameters["id"]?.toIntOrNull()
if (id == null) {
call.respond(HttpStatusCode.NotFound)
return
}
val name = exporter.getFileName(id)
val name = exporter.getFileName(scope, id)
if (name == null) {
call.respond(HttpStatusCode.NotFound)
return
@ -135,17 +146,18 @@ public class CachesController @Inject constructor(
.toString()
)
call.respond(exporter.exportKeys(id))
call.respond(exporter.exportKeys(scope, id))
}
public suspend fun exportKeysZip(call: ApplicationCall) {
val scope = call.parameters["scope"]!!
val id = call.parameters["id"]?.toIntOrNull()
if (id == null) {
call.respond(HttpStatusCode.NotFound)
return
}
val name = exporter.getFileName(id)
val name = exporter.getFileName(scope, id)
if (name == null) {
call.respond(HttpStatusCode.NotFound)
return
@ -165,7 +177,7 @@ public class CachesController @Inject constructor(
val timestamp = FileTime.from(Instant.EPOCH)
for (key in exporter.exportKeys(id)) {
for (key in exporter.exportKeys(scope, id)) {
if (key.mapSquare == null) {
continue
}
@ -197,13 +209,14 @@ public class CachesController @Inject constructor(
}
public suspend fun renderMap(call: ApplicationCall) {
val scope = call.parameters["scope"]!!
val id = call.parameters["id"]?.toIntOrNull()
if (id == null) {
call.respond(HttpStatusCode.NotFound)
return
}
val name = exporter.getFileName(id)
val name = exporter.getFileName(scope, id)
if (name == null) {
call.respond(HttpStatusCode.NotFound)
return
@ -222,7 +235,7 @@ public class CachesController @Inject constructor(
* performed in parallel to prevent OOMs.
*/
renderSemaphore.withPermit {
val image = renderer.render(id)
val image = renderer.render(scope, id)
call.respondOutputStream(contentType = ContentType.Image.PNG) {
ImageIO.write(image, "PNG", this)

@ -1,12 +1,12 @@
package org.openrs2.archive.web
import com.fasterxml.jackson.databind.ObjectMapper
import io.ktor.application.ApplicationCall
import io.ktor.application.call
import io.ktor.application.install
import io.ktor.features.ContentNegotiation
import io.ktor.features.XForwardedHeaderSupport
import io.ktor.http.ContentType
import io.ktor.http.HttpStatusCode
import io.ktor.http.content.resources
import io.ktor.http.content.static
import io.ktor.jackson.JacksonConverter
@ -56,40 +56,43 @@ public class WebServer @Inject constructor(
get("/") { call.respond(ThymeleafContent("index.html", emptyMap())) }
get("/caches") { cachesController.index(call) }
get("/caches.json") { cachesController.indexJson(call) }
get("/caches/{id}") { cachesController.show(call) }
get("/caches/{id}.zip") {
val id = call.parameters["id"]
if (id == null) {
call.respond(HttpStatusCode.NotFound)
return@get
}
call.respondRedirect(permanent = true) {
path("caches", id, "disk.zip")
}
}
get("/caches/{id}.json") {
val id = call.parameters["id"]
if (id == null) {
call.respond(HttpStatusCode.NotFound)
return@get
}
call.respondRedirect(permanent = true) {
path("caches", id, "keys.json")
}
}
get("/caches/{id}/disk.zip") { cachesController.exportDisk(call) }
get("/caches/{id}/flat-file.tar.gz") { cachesController.exportFlatFile(call) }
get("/caches/{id}/keys.json") { cachesController.exportKeysJson(call) }
get("/caches/{id}/keys.zip") { cachesController.exportKeysZip(call) }
get("/caches/{id}/map.png") { cachesController.renderMap(call) }
get("/caches/{scope}/{id}") { cachesController.show(call) }
get("/caches/{scope}/{id}/disk.zip") { cachesController.exportDisk(call) }
get("/caches/{scope}/{id}/flat-file.tar.gz") { cachesController.exportFlatFile(call) }
get("/caches/{scope}/{id}/keys.json") { cachesController.exportKeysJson(call) }
get("/caches/{scope}/{id}/keys.zip") { cachesController.exportKeysZip(call) }
get("/caches/{scope}/{id}/map.png") { cachesController.renderMap(call) }
get("/keys") { keysController.index(call) }
post("/keys") { keysController.import(call) }
get("/keys/all.json") { keysController.exportAll(call) }
get("/keys/valid.json") { keysController.exportValid(call) }
static("/static") { resources("/org/openrs2/archive/static") }
// compatibility redirects
get("/caches/{id}") { redirect(call, permanent = true, "/caches/runescape/{id}") }
get("/caches/{id}.json") { redirect(call, permanent = true, "/caches/runescape/{id}/keys.json") }
get("/caches/{id}.zip") { redirect(call, permanent = true, "/caches/runescape/{id}/disk.zip") }
get("/caches/{id}/disk.zip") { redirect(call, permanent = true, "/caches/runescape/{id}/disk.zip") }
get("/caches/{id}/flat-file.tar.gz") {
redirect(call, permanent = true, "/caches/runescape/{id}/flat-file.tar.gz")
}
get("/caches/{id}/keys.json") { redirect(call, permanent = true, "/caches/runescape/{id}/keys.json") }
get("/caches/{id}/keys.zip") { redirect(call, permanent = true, "/caches/runescape/{id}/keys.zip") }
get("/caches/{id}/map.png") { redirect(call, permanent = true, "/caches/runescape/{id}/map.png") }
}
}.start(wait = true)
}
private suspend fun redirect(call: ApplicationCall, permanent: Boolean, path: String) {
val destination = path.replace(PARAMETER) { match ->
val (name) = match.destructured
call.parameters[name] ?: throw IllegalArgumentException()
}
call.respondRedirect(destination, permanent)
}
private companion object {
private val PARAMETER = Regex("\\{([^}]*)}")
}
}

@ -0,0 +1,176 @@
-- @formatter:off
CREATE TABLE scopes (
id SERIAL PRIMARY KEY NOT NULL,
name TEXT UNIQUE NOT NULL
);
INSERT INTO scopes (name) VALUES ('runescape');
ALTER TABLE games
ADD COLUMN scope_id INTEGER DEFAULT 1 NOT NULL REFERENCES scopes (id);
ALTER TABLE games
ALTER COLUMN scope_id DROP DEFAULT;
-- XXX(gpe): I don't think we can easily replace this as the source_groups
-- table doesn't contain a scope_id directly - only indirectly via the sources
-- and games tables.
ALTER TABLE source_groups
DROP CONSTRAINT source_groups_archive_id_group_id_version_version_truncate_fkey;
ALTER TABLE groups
ADD COLUMN scope_id INTEGER DEFAULT 1 NOT NULL REFERENCES scopes (id),
DROP CONSTRAINT groups_pkey,
ADD PRIMARY KEY (scope_id, archive_id, group_id, version, version_truncated, container_id);
ALTER TABLE groups
ALTER COLUMN scope_id DROP DEFAULT;
CREATE FUNCTION resolve_index(_scope_id INTEGER, _archive_id uint1, _crc32 INTEGER, _version INTEGER) RETURNS SETOF containers AS $$
SELECT c.*
FROM groups g
JOIN containers c ON c.id = g.container_id
JOIN indexes i ON i.container_id = c.id
WHERE g.scope_id = _scope_id AND g.archive_id = 255 AND g.group_id = _archive_id::INTEGER AND c.crc32 = _crc32 AND
g.version = _version AND NOT g.version_truncated AND i.version = _version
ORDER BY c.id ASC
LIMIT 1;
$$ LANGUAGE SQL STABLE PARALLEL SAFE ROWS 1;
CREATE FUNCTION resolve_group(_scope_id INTEGER, _archive_id uint1, _group_id INTEGER, _crc32 INTEGER, _version INTEGER) RETURNS SETOF containers AS $$
SELECT c.*
FROM groups g
JOIN containers c ON c.id = g.container_id
WHERE g.scope_id = _scope_id AND g.archive_id = _archive_id AND g.group_id = _group_id AND c.crc32 = _crc32 AND (
(g.version = _version AND NOT g.version_truncated) OR
(g.version = _version & 65535 AND g.version_truncated)
)
ORDER BY g.version_truncated ASC, c.id ASC
LIMIT 1;
$$ LANGUAGE SQL STABLE PARALLEL SAFE ROWS 1;
DROP VIEW resolved_groups;
DROP VIEW resolved_indexes;
CREATE VIEW resolved_indexes AS
SELECT s.id AS scope_id, m.id AS master_index_id, a.archive_id, c.data, c.id AS container_id
FROM scopes s
CROSS JOIN master_indexes m
JOIN master_index_archives a ON a.master_index_id = m.id
JOIN resolve_index(s.id, a.archive_id, a.crc32, a.version) c ON TRUE;
CREATE VIEW resolved_groups (scope_id, master_index_id, archive_id, group_id, name_hash, version, data, encrypted, empty_loc, key_id) AS
WITH i AS NOT MATERIALIZED (
SELECT scope_id, master_index_id, archive_id, data, container_id
FROM resolved_indexes
)
SELECT i.scope_id, i.master_index_id, 255::uint1, i.archive_id::INTEGER, NULL, NULL, i.data, FALSE, FALSE, NULL
FROM i
UNION ALL
SELECT i.scope_id, i.master_index_id, i.archive_id, ig.group_id, ig.name_hash, ig.version, c.data, c.encrypted, c.empty_loc, c.key_id
FROM i
JOIN index_groups ig ON ig.container_id = i.container_id
JOIN resolve_group(i.scope_id, i.archive_id, ig.group_id, ig.crc32, ig.version) c ON TRUE;
DROP VIEW colliding_groups;
CREATE VIEW colliding_groups (scope_id, archive_id, group_id, crc32, truncated_version, versions, containers) AS
SELECT
g.scope_id,
g.archive_id,
g.group_id,
c.crc32,
g.version & 65535 AS truncated_version,
array_agg(DISTINCT g.version ORDER BY g.version ASC),
array_agg(DISTINCT c.id ORDER BY c.id ASC)
FROM groups g
JOIN containers c ON c.id = g.container_id
GROUP BY g.scope_id, g.archive_id, g.group_id, c.crc32, truncated_version
HAVING COUNT(DISTINCT c.id) > 1;
DROP VIEW cache_stats;
DROP MATERIALIZED VIEW master_index_stats;
DROP MATERIALIZED VIEW index_stats;
CREATE MATERIALIZED VIEW index_stats (
scope_id,
archive_id,
container_id,
valid_groups,
groups,
valid_keys,
keys,
size,
blocks
) AS
SELECT
s.id AS scope_id,
g.group_id AS archive_id,
i.container_id,
COUNT(*) FILTER (WHERE c.id IS NOT NULL) AS valid_groups,
COUNT(*) AS groups,
COUNT(*) FILTER (WHERE c.encrypted AND (c.key_id IS NOT NULL OR c.empty_loc)) AS valid_keys,
COUNT(*) FILTER (WHERE c.encrypted) AS keys,
SUM(length(c.data) + 2) FILTER (WHERE c.id IS NOT NULL) AS size,
SUM(group_blocks(ig.group_id, length(c.data) + 2)) FILTER (WHERE c.id IS NOT NULL) AS blocks
FROM scopes s
CROSS JOIN indexes i
JOIN groups g ON g.container_id = i.container_id AND g.archive_id = 255 AND NOT g.version_truncated AND
g.version = i.version
JOIN index_groups ig ON ig.container_id = i.container_id
LEFT JOIN resolve_group(s.id, g.group_id::uint1, ig.group_id, ig.crc32, ig.version) c ON TRUE
GROUP BY s.id, g.group_id, i.container_id;
CREATE UNIQUE INDEX ON index_stats (scope_id, archive_id, container_id);
CREATE MATERIALIZED VIEW master_index_stats (
scope_id,
master_index_id,
valid_indexes,
indexes,
valid_groups,
groups,
valid_keys,
keys,
size,
blocks
) AS
SELECT
sc.id,
m.id,
COUNT(*) FILTER (WHERE c.id IS NOT NULL OR (a.version = 0 AND a.crc32 = 0)) AS valid_indexes,
COUNT(*) FILTER (WHERE a.master_index_id IS NOT NULL) AS indexes,
SUM(COALESCE(s.valid_groups, 0)) AS valid_groups,
SUM(COALESCE(s.groups, 0)) AS groups,
SUM(COALESCE(s.valid_keys, 0)) AS valid_keys,
SUM(COALESCE(s.keys, 0)) AS keys,
SUM(COALESCE(s.size, 0)) + SUM(COALESCE(length(c.data), 0)) AS size,
SUM(COALESCE(s.blocks, 0)) + SUM(COALESCE(group_blocks(a.archive_id, length(c.data)), 0)) AS blocks
FROM scopes sc
CROSS JOIN master_indexes m
LEFT JOIN master_index_archives a ON a.master_index_id = m.id
LEFT JOIN resolve_index(sc.id, a.archive_id, a.crc32, a.version) c ON TRUE
LEFT JOIN index_stats s ON s.scope_id = sc.id AND s.archive_id = a.archive_id AND s.container_id = c.id
GROUP BY sc.id, m.id;
CREATE UNIQUE INDEX ON master_index_stats (scope_id, master_index_id);
CREATE VIEW cache_stats AS
SELECT
s.id AS scope_id,
c.id AS cache_id,
COALESCE(ms.valid_indexes, cs.valid_archives) AS valid_indexes,
COALESCE(ms.indexes, cs.archives) AS indexes,
COALESCE(ms.valid_groups, cs.valid_files) AS valid_groups,
COALESCE(ms.groups, cs.files) AS groups,
COALESCE(ms.valid_keys, 0) AS valid_keys,
COALESCE(ms.keys, 0) AS keys,
COALESCE(ms.size, cs.size) AS size,
COALESCE(ms.blocks, cs.blocks) AS blocks
FROM scopes s
CROSS JOIN caches c
LEFT JOIN master_index_stats ms ON ms.scope_id = s.id AND ms.master_index_id = c.id
LEFT JOIN crc_table_stats cs ON s.name = 'runescape' AND cs.crc_table_id = c.id;
DROP FUNCTION resolve_group(_archive_id uint1, _group_id INTEGER, _crc32 INTEGER, _version INTEGER);
DROP FUNCTION resolve_index(_archive_id uint1, _crc32 INTEGER, _version INTEGER);

@ -84,25 +84,25 @@
</button>
<ul class="dropdown-menu">
<li th:if="${cache.stats != null and cache.stats.diskStoreValid}"><a
th:href="${'/caches/' + cache.id + '/disk.zip'}"
th:href="${'/caches/' + cache.scope + '/' + cache.id + '/disk.zip'}"
class="dropdown-item">Cache (.dat2/.idx)</a></li>
<li><a th:href="${'/caches/' + cache.id + '/flat-file.tar.gz'}"
<li><a th:href="${'/caches/' + cache.scope + '/' + cache.id + '/flat-file.tar.gz'}"
class="dropdown-item">Cache (Flat file)</a></li>
<li>
<hr class="dropdown-divider" />
</li>
<li><a th:href="${'/caches/' + cache.id + '/keys.json'}"
<li><a th:href="${'/caches/' + cache.scope + '/' + cache.id + '/keys.json'}"
class="dropdown-item">Keys (JSON)</a></li>
<li><a th:href="${'/caches/' + cache.id + '/keys.zip'}"
<li><a th:href="${'/caches/' + cache.scope + '/' + cache.id + '/keys.zip'}"
class="dropdown-item">Keys (Text)</a></li>
<li>
<hr class="dropdown-divider" />
</li>
<li><a th:href="${'/caches/' + cache.id + '/map.png'}"
<li><a th:href="${'/caches/' + cache.scope + '/' + cache.id + '/map.png'}"
class="dropdown-item">Map</a></li>
</ul>
</div>
<a th:href="${'/caches/' + cache.id}"
<a th:href="${'/caches/' + cache.scope + '/' + cache.id}"
class="btn btn-secondary btn-sm">More</a>
</div>
</td>

@ -52,19 +52,19 @@
<div class="btn-toolbar">
<div class="btn-group me-2">
<a th:if="${cache.stats != null and cache.stats.diskStoreValid}"
th:href="${'/caches/' + cache.id + '/disk.zip'}"
th:href="${'/caches/' + scope + '/' + cache.id + '/disk.zip'}"
class="btn btn-primary btn-sm">Cache (.dat2/.idx)</a>
<a th:href="${'/caches/' + cache.id + '/flat-file.tar.gz'}"
<a th:href="${'/caches/' + scope + '/' + cache.id + '/flat-file.tar.gz'}"
class="btn btn-primary btn-sm">Cache (Flat file)</a>
</div>
<div class="btn-group me-2">
<a th:href="${'/caches/' + cache.id + '/keys.json'}"
<a th:href="${'/caches/' + scope + '/' + cache.id + '/keys.json'}"
class="btn btn-primary btn-sm">Keys (JSON)</a>
<a th:href="${'/caches/' + cache.id + '/keys.zip'}"
<a th:href="${'/caches/' + scope + '/' + cache.id + '/keys.zip'}"
class="btn btn-primary btn-sm">Keys (Text)</a>
</div>
<div class="btn-group">
<a th:href="${'/caches/' + cache.id + '/map.png'}"
<a th:href="${'/caches/' + scope + '/' + cache.id + '/map.png'}"
class="btn btn-primary btn-sm">Map</a>
</div>
</div>

Loading…
Cancel
Save