Add group source tracking

There are a few collisions in the production archive. I suspect these
are due to poorly modified caches, and tracking the source(s) of each
group will make it easier to determine which cache is probably
problematic.

This change also has the benefit of removing a lot of the hacky source
name/description merging logic.

Signed-off-by: Graham <gpe@openrs2.org>
Graham 4 years ago
parent 4856f79152
commit b98d045cfe
  1. 102
      archive/src/main/kotlin/org/openrs2/archive/cache/CacheExporter.kt
  2. 268
      archive/src/main/kotlin/org/openrs2/archive/cache/CacheImporter.kt
  3. 3
      archive/src/main/kotlin/org/openrs2/archive/cache/ImportCommand.kt
  4. 3
      archive/src/main/kotlin/org/openrs2/archive/cache/ImportMasterIndexCommand.kt
  5. 16
      archive/src/main/kotlin/org/openrs2/archive/cache/Js5ChannelHandler.kt
  6. 42
      archive/src/main/resources/org/openrs2/archive/migrations/V1__init.sql
  7. 2
      archive/src/main/resources/org/openrs2/archive/templates/caches/index.html
  8. 16
      archive/src/main/resources/org/openrs2/archive/templates/caches/show.html

@ -9,6 +9,7 @@ import org.openrs2.cache.Store
import org.openrs2.crypto.XteaKey
import org.openrs2.db.Database
import java.time.Instant
import java.util.Collections
import java.util.SortedSet
import javax.inject.Inject
import javax.inject.Singleton
@ -50,13 +51,16 @@ public class CacheExporter @Inject constructor(
public data class Cache(
val id: Int,
val game: String,
val games: SortedSet<String>,
val builds: SortedSet<Int>,
val timestamp: Instant?,
val name: String?,
val description: String?,
val names: SortedSet<String>,
val descriptions: List<String>,
val urls: SortedSet<String>,
val stats: Stats?
)
) {
val game: String = games.single()
}
public data class Key(
val archive: Int,
@ -72,15 +76,23 @@ public class CacheExporter @Inject constructor(
connection.prepareStatement(
"""
SELECT
m.id, g.name, array_remove(array_agg(b.build ORDER BY b.build ASC), NULL), m.timestamp, m.name,
s.valid_indexes, s.indexes, s.valid_groups, s.groups, s.valid_keys, s.keys
m.id,
g.name,
array_remove(array_agg(DISTINCT s.build ORDER BY s.build ASC), NULL),
MIN(s.timestamp),
array_remove(array_agg(DISTINCT s.name ORDER BY s.name ASC), NULL),
ms.valid_indexes,
ms.indexes,
ms.valid_groups,
ms.groups,
ms.valid_keys,
ms.keys
FROM master_indexes m
JOIN games g ON g.id = m.game_id
JOIN containers c ON c.id = m.container_id
LEFT JOIN master_index_builds b ON b.master_index_id = m.id
LEFT JOIN master_index_stats s ON s.master_index_id = m.id
GROUP BY m.id, g.name, s.valid_indexes, s.indexes, s.valid_groups, s.groups, s.valid_keys, s.keys
ORDER BY g.name ASC, MIN(b.build) ASC, m.timestamp ASC
LEFT JOIN sources s ON s.master_index_id = m.id
LEFT JOIN games g ON g.id = s.game_id
LEFT JOIN master_index_stats ms ON s.master_index_id = m.id
GROUP BY m.id, g.name, ms.valid_indexes, ms.indexes, ms.valid_groups, ms.groups, ms.valid_keys, ms.keys
ORDER BY g.name ASC, MIN(s.build) ASC, MIN(s.timestamp) ASC
""".trimIndent()
).use { stmt ->
stmt.executeQuery().use { rows ->
@ -91,7 +103,7 @@ public class CacheExporter @Inject constructor(
val game = rows.getString(2)
val builds = rows.getArray(3).array as Array<Int>
val timestamp = rows.getTimestamp(4)?.toInstant()
val name = rows.getString(5)
val names = rows.getArray(5).array as Array<String>
val validIndexes = rows.getLong(6)
val stats = if (!rows.wasNull()) {
@ -105,7 +117,16 @@ public class CacheExporter @Inject constructor(
null
}
caches += Cache(id, game, builds.toSortedSet(), timestamp, name, description = null, stats)
caches += Cache(
id,
sortedSetOf(game),
builds.toSortedSet(),
timestamp,
names.toSortedSet(),
emptyList(),
Collections.emptySortedSet(),
stats
)
}
caches
@ -119,15 +140,24 @@ public class CacheExporter @Inject constructor(
connection.prepareStatement(
"""
SELECT
g.name, array_remove(array_agg(b.build ORDER BY b.build ASC), NULL), m.timestamp, m.name,
m.description, s.valid_indexes, s.indexes, s.valid_groups, s.groups, s.valid_keys, s.keys
array_remove(array_agg(DISTINCT g.name ORDER BY g.name ASC), NULL),
array_remove(array_agg(DISTINCT s.build ORDER BY s.build ASC), NULL),
MIN(s.timestamp),
array_remove(array_agg(DISTINCT s.name ORDER BY s.name ASC), NULL),
array_remove(array_agg(s.description), NULL),
array_remove(array_agg(DISTINCT s.url ORDER BY s.url ASC), NULL),
ms.valid_indexes,
ms.indexes,
ms.valid_groups,
ms.groups,
ms.valid_keys,
ms.keys
FROM master_indexes m
JOIN games g ON g.id = m.game_id
JOIN containers c ON c.id = m.container_id
LEFT JOIN master_index_builds b ON b.master_index_id = m.id
LEFT JOIN master_index_stats s ON s.master_index_id = m.id
LEFT JOIN sources s ON s.master_index_id = m.id
LEFT JOIN games g ON g.id = s.game_id
LEFT JOIN master_index_stats ms ON s.master_index_id = m.id
WHERE m.id = ?
GROUP BY m.id, g.name, s.valid_indexes, s.indexes, s.valid_groups, s.groups, s.valid_keys, s.keys
GROUP BY m.id, ms.valid_indexes, ms.indexes, ms.valid_groups, ms.groups, ms.valid_keys, ms.keys
""".trimIndent()
).use { stmt ->
stmt.setInt(1, id)
@ -137,25 +167,35 @@ public class CacheExporter @Inject constructor(
return@execute null
}
val game = rows.getString(1)
val games = rows.getArray(1).array as Array<String>
val builds = rows.getArray(2).array as Array<Int>
val timestamp = rows.getTimestamp(3)?.toInstant()
val name = rows.getString(4)
val description = rows.getString(5)
val names = rows.getArray(4).array as Array<String>
val descriptions = rows.getArray(5).array as Array<String>
val urls = rows.getArray(6).array as Array<String>
val validIndexes = rows.getLong(6)
val validIndexes = rows.getLong(7)
val stats = if (!rows.wasNull()) {
val indexes = rows.getLong(7)
val validGroups = rows.getLong(8)
val groups = rows.getLong(9)
val validKeys = rows.getLong(10)
val keys = rows.getLong(11)
val indexes = rows.getLong(8)
val validGroups = rows.getLong(9)
val groups = rows.getLong(10)
val validKeys = rows.getLong(11)
val keys = rows.getLong(12)
Stats(validIndexes, indexes, validGroups, groups, validKeys, keys)
} else {
null
}
return@execute Cache(id, game, builds.toSortedSet(), timestamp, name, description, stats)
return@execute Cache(
id,
games.toSortedSet(),
builds.toSortedSet(),
timestamp,
names.toSortedSet(),
descriptions.toList(),
urls.toSortedSet(),
stats
)
}
}
}

@ -23,7 +23,6 @@ import java.sql.Connection
import java.sql.SQLException
import java.sql.Types
import java.time.Instant
import java.time.OffsetDateTime
import java.time.ZoneOffset
import javax.inject.Inject
import javax.inject.Singleton
@ -74,13 +73,25 @@ public class CacheImporter @Inject constructor(
public val versionTruncated: Boolean
) : Container(compressed, uncompressed)
private enum class SourceType {
DISK,
JS5REMOTE
}
public data class MasterIndexResult(
val masterIndexId: Int,
val sourceId: Int,
val indexes: List<ByteBuf?>
)
public suspend fun import(
store: Store,
game: String,
build: Int?,
timestamp: Instant?,
name: String?,
description: String?
description: String?,
url: String?
) {
database.execute { connection ->
prepare(connection)
@ -89,12 +100,25 @@ public class CacheImporter @Inject constructor(
// import master index
val masterIndex = createMasterIndex(store)
try {
addMasterIndex(connection, masterIndex, gameId, build, timestamp, name, description, overwrite = false)
val masterIndexId = try {
addMasterIndex(connection, masterIndex)
} finally {
masterIndex.release()
}
// create source
val sourceId = addSource(
connection,
SourceType.DISK,
masterIndexId,
gameId,
build,
timestamp,
name,
description,
url
)
// import indexes
val indexes = arrayOfNulls<Js5Index>(Js5Archive.ARCHIVESET)
val indexGroups = mutableListOf<Index>()
@ -106,7 +130,7 @@ public class CacheImporter @Inject constructor(
}
for (index in indexGroups) {
addIndex(connection, index)
addIndex(connection, sourceId, index)
}
} finally {
indexGroups.forEach(Index::release)
@ -127,7 +151,7 @@ public class CacheImporter @Inject constructor(
groups += group
if (groups.size >= BATCH_SIZE) {
addGroups(connection, groups)
addGroups(connection, sourceId, groups)
groups.forEach(Group::release)
groups.clear()
@ -136,7 +160,7 @@ public class CacheImporter @Inject constructor(
}
if (groups.isNotEmpty()) {
addGroups(connection, groups)
addGroups(connection, sourceId, groups)
}
} finally {
groups.forEach(Group::release)
@ -151,7 +175,8 @@ public class CacheImporter @Inject constructor(
build: Int?,
timestamp: Instant?,
name: String?,
description: String?
description: String?,
url: String?
) {
Js5Compression.uncompress(buf.slice()).use { uncompressed ->
val masterIndex = MasterIndex(Js5MasterIndex.read(uncompressed.slice(), format), buf, uncompressed)
@ -160,7 +185,8 @@ public class CacheImporter @Inject constructor(
prepare(connection)
val gameId = getGameId(connection, game)
addMasterIndex(connection, masterIndex, gameId, build, timestamp, name, description, overwrite = false)
val masterIndexId = addMasterIndex(connection, masterIndex)
addSource(connection, SourceType.DISK, masterIndexId, gameId, build, timestamp, name, description, url)
}
}
}
@ -172,9 +198,8 @@ public class CacheImporter @Inject constructor(
gameId: Int,
build: Int,
lastId: Int?,
timestamp: Instant,
name: String,
): Pair<Int, List<ByteBuf?>> {
timestamp: Instant
): MasterIndexResult {
return database.execute { connection ->
prepare(connection)
@ -191,15 +216,21 @@ public class CacheImporter @Inject constructor(
stmt.execute()
}
val id = addMasterIndex(
val masterIndexId = addMasterIndex(
connection,
MasterIndex(masterIndex, buf, uncompressed),
MasterIndex(masterIndex, buf, uncompressed)
)
val sourceId = addSource(
connection,
SourceType.JS5REMOTE,
masterIndexId,
gameId,
build,
timestamp,
name,
name = "Original",
description = null,
overwrite = true
url = null
)
/*
@ -226,7 +257,7 @@ public class CacheImporter @Inject constructor(
""".trimIndent()
).use { stmt ->
stmt.setObject(1, lastId, Types.INTEGER)
stmt.setInt(2, id)
stmt.setInt(2, masterIndexId)
stmt.executeQuery().use { rows ->
val indexes = mutableListOf<ByteBuf?>()
@ -241,7 +272,7 @@ public class CacheImporter @Inject constructor(
}
indexes.filterNotNull().forEach(ByteBuf::retain)
return@execute Pair(id, indexes)
return@execute MasterIndexResult(masterIndexId, sourceId, indexes)
} finally {
indexes.filterNotNull().forEach(ByteBuf::release)
}
@ -251,6 +282,7 @@ public class CacheImporter @Inject constructor(
}
public suspend fun importIndexAndGetMissingGroups(
sourceId: Int,
archive: Int,
index: Js5Index,
buf: ByteBuf,
@ -259,7 +291,7 @@ public class CacheImporter @Inject constructor(
): List<Int> {
return database.execute { connection ->
prepare(connection)
val id = addIndex(connection, Index(archive, index, buf, uncompressed))
val id = addIndex(connection, sourceId, Index(archive, index, buf, uncompressed))
/*
* In order to defend against (crc32, version) collisions, we only
@ -304,14 +336,14 @@ public class CacheImporter @Inject constructor(
}
}
public suspend fun importGroups(groups: List<Group>) {
public suspend fun importGroups(sourceId: Int, groups: List<Group>) {
if (groups.isEmpty()) {
return
}
database.execute { connection ->
prepare(connection)
addGroups(connection, groups)
addGroups(connection, sourceId, groups)
}
}
@ -329,27 +361,15 @@ public class CacheImporter @Inject constructor(
private fun addMasterIndex(
connection: Connection,
masterIndex: MasterIndex,
gameId: Int,
build: Int?,
timestamp: Instant?,
name: String?,
description: String?,
overwrite: Boolean
masterIndex: MasterIndex
): Int {
val containerId = addContainer(connection, masterIndex)
var masterIndexId: Int? = null
var newTimestamp: Instant?
var newName: String?
var newDescription: String?
connection.prepareStatement(
"""
SELECT id, game_id, timestamp, name, description
SELECT id
FROM master_indexes
WHERE container_id = ? AND format = ?::master_index_format
FOR UPDATE
""".trimIndent()
).use { stmt ->
stmt.setLong(1, containerId)
@ -357,99 +377,22 @@ public class CacheImporter @Inject constructor(
stmt.executeQuery().use { rows ->
if (rows.next()) {
masterIndexId = rows.getInt(1)
}
if (masterIndexId != null) {
val oldGameId = rows.getInt(2)
val oldTimestamp: Instant? = rows.getTimestamp(3)?.toInstant()
val oldName: String? = rows.getString(4)
val oldDescription: String? = rows.getString(5)
check(oldGameId == gameId)
if (oldTimestamp != null && timestamp != null) {
newTimestamp = if (oldTimestamp.isBefore(timestamp)) {
oldTimestamp
} else {
timestamp
}
} else if (oldTimestamp != null) {
newTimestamp = oldTimestamp
} else {
newTimestamp = timestamp
}
if (overwrite) {
newName = name
} else if (oldName != null && name != null && oldName != name) {
newName = "$oldName/$name"
} else if (oldName != null) {
newName = oldName
} else {
newName = name
}
if (overwrite) {
newDescription = description
} else if (oldDescription != null && description != null && oldDescription != description) {
newDescription = "$oldDescription\n\n$description"
} else if (oldDescription != null) {
newDescription = oldDescription
} else {
newDescription = description
}
} else {
newTimestamp = timestamp
newName = name
newDescription = description
return rows.getInt(1)
}
}
}
if (masterIndexId != null) {
connection.prepareStatement(
"""
UPDATE master_indexes
SET timestamp = ?, name = ?, description = ?
WHERE id = ?
""".trimIndent()
).use { stmt ->
if (newTimestamp != null) {
val offsetDateTime = OffsetDateTime.ofInstant(newTimestamp, ZoneOffset.UTC)
stmt.setObject(1, offsetDateTime, Types.TIMESTAMP_WITH_TIMEZONE)
} else {
stmt.setNull(1, Types.TIMESTAMP_WITH_TIMEZONE)
}
stmt.setString(2, newName)
stmt.setString(3, newDescription)
stmt.setInt(4, masterIndexId!!)
val masterIndexId: Int
stmt.execute()
}
} else {
connection.prepareStatement(
"""
INSERT INTO master_indexes (container_id, format, game_id, timestamp, name, description)
VALUES (?, ?::master_index_format, ?, ?, ?, ?)
INSERT INTO master_indexes (container_id, format)
VALUES (?, ?::master_index_format)
RETURNING id
""".trimIndent()
).use { stmt ->
stmt.setLong(1, containerId)
stmt.setString(2, masterIndex.index.format.name.toLowerCase())
stmt.setInt(3, gameId)
if (newTimestamp != null) {
val offsetDateTime = OffsetDateTime.ofInstant(newTimestamp, ZoneOffset.UTC)
stmt.setObject(4, offsetDateTime, Types.TIMESTAMP_WITH_TIMEZONE)
} else {
stmt.setNull(4, Types.TIMESTAMP_WITH_TIMEZONE)
}
stmt.setString(5, newName)
stmt.setString(6, newDescription)
stmt.executeQuery().use { rows ->
check(rows.next())
@ -466,7 +409,7 @@ public class CacheImporter @Inject constructor(
""".trimIndent()
).use { stmt ->
for ((i, entry) in masterIndex.index.entries.withIndex()) {
stmt.setInt(1, masterIndexId!!)
stmt.setInt(1, masterIndexId)
stmt.setInt(2, i)
stmt.setInt(3, entry.checksum)
@ -495,23 +438,68 @@ public class CacheImporter @Inject constructor(
stmt.executeBatch()
}
return masterIndexId
}
if (build != null) {
private fun addSource(
connection: Connection,
type: SourceType,
masterIndexId: Int,
gameId: Int,
build: Int?,
timestamp: Instant?,
name: String?,
description: String?,
url: String?
): Int {
if (type == SourceType.JS5REMOTE && build != null) {
connection.prepareStatement(
"""
INSERT INTO master_index_builds (master_index_id, build)
VALUES (?, ?)
ON CONFLICT DO NOTHING
SELECT id
FROM sources
WHERE type = 'js5remote' AND master_index_id = ? AND game_id = ? AND build = ?
""".trimIndent()
).use { stmt ->
stmt.setInt(1, masterIndexId!!)
stmt.setInt(2, build)
stmt.execute()
stmt.setInt(1, masterIndexId)
stmt.setInt(2, gameId)
stmt.setInt(3, build)
stmt.executeQuery().use { rows ->
if (rows.next()) {
return rows.getInt(1)
}
}
}
}
return masterIndexId!!
connection.prepareStatement(
"""
INSERT INTO sources (type, master_index_id, game_id, build, timestamp, name, description, url)
VALUES (?::source_type, ?, ?, ?, ?, ?, ?, ?)
RETURNING id
""".trimIndent()
).use { stmt ->
stmt.setString(1, type.toString().toLowerCase())
stmt.setInt(2, masterIndexId)
stmt.setInt(3, gameId)
stmt.setObject(4, build, Types.INTEGER)
if (timestamp != null) {
stmt.setObject(5, timestamp.atOffset(ZoneOffset.UTC), Types.TIMESTAMP_WITH_TIMEZONE)
} else {
stmt.setNull(5, Types.TIMESTAMP_WITH_TIMEZONE)
}
stmt.setString(6, name)
stmt.setString(7, description)
stmt.setString(8, url)
stmt.executeQuery().use { rows ->
check(rows.next())
return rows.getInt(1)
}
}
}
private fun readGroup(store: Store, archive: Int, index: Js5Index?, group: Int): Group? {
@ -547,7 +535,7 @@ public class CacheImporter @Inject constructor(
}
}
private fun addGroups(connection: Connection, groups: List<Group>): List<Long> {
private fun addGroups(connection: Connection, sourceId: Int, groups: List<Group>): List<Long> {
val containerIds = addContainers(connection, groups)
connection.prepareStatement(
@ -569,11 +557,31 @@ public class CacheImporter @Inject constructor(
stmt.executeBatch()
}
connection.prepareStatement(
"""
INSERT INTO source_groups (source_id, archive_id, group_id, version, version_truncated, container_id)
VALUES (?, ?, ?, ?, ?, ?)
ON CONFLICT DO NOTHING
""".trimIndent()
).use { stmt ->
for ((i, group) in groups.withIndex()) {
stmt.setInt(1, sourceId)
stmt.setInt(2, group.archive)
stmt.setInt(3, group.group)
stmt.setInt(4, group.version)
stmt.setBoolean(5, group.versionTruncated)
stmt.setLong(6, containerIds[i])
stmt.addBatch()
}
stmt.executeBatch()
}
return containerIds
}
private fun addGroup(connection: Connection, group: Group): Long {
return addGroups(connection, listOf(group)).single()
private fun addGroup(connection: Connection, sourceId: Int, group: Group): Long {
return addGroups(connection, sourceId, listOf(group)).single()
}
private fun readIndex(store: Store, archive: Int): Index {
@ -584,8 +592,8 @@ public class CacheImporter @Inject constructor(
}
}
private fun addIndex(connection: Connection, index: Index): Long {
val containerId = addGroup(connection, index)
private fun addIndex(connection: Connection, sourceId: Int, index: Index): Long {
val containerId = addGroup(connection, sourceId, index)
val savepoint = connection.setSavepoint()
connection.prepareStatement(

@ -17,6 +17,7 @@ public class ImportCommand : CliktCommand(name = "import") {
private val timestamp by option().instant()
private val name by option()
private val description by option()
private val url by option()
private val game by argument()
private val input by argument().path(
@ -30,7 +31,7 @@ public class ImportCommand : CliktCommand(name = "import") {
val importer = injector.getInstance(CacheImporter::class.java)
Store.open(input).use { store ->
importer.import(store, game, build, timestamp, name, description)
importer.import(store, game, build, timestamp, name, description, url)
}
}
}

@ -21,6 +21,7 @@ public class ImportMasterIndexCommand : CliktCommand(name = "import-master-index
private val timestamp by option().instant()
private val name by option()
private val description by option()
private val url by option()
private val game by argument()
private val format by argument().enum<MasterIndexFormat>()
@ -35,7 +36,7 @@ public class ImportMasterIndexCommand : CliktCommand(name = "import-master-index
val importer = injector.getInstance(CacheImporter::class.java)
Unpooled.wrappedBuffer(Files.readAllBytes(input)).use { buf ->
importer.importMasterIndex(buf, format, game, build, timestamp, name, description)
importer.importMasterIndex(buf, format, game, build, timestamp, name, description, url)
}
}
}

@ -53,6 +53,7 @@ public class Js5ChannelHandler(
private val inFlightRequests = mutableSetOf<Js5Request.Group>()
private val pendingRequests = ArrayDeque<Js5Request.Group>()
private var masterIndexId: Int = 0
private var sourceId: Int = 0
private var masterIndex: Js5MasterIndex? = null
private lateinit var indexes: Array<Js5Index?>
private val groups = mutableListOf<CacheImporter.Group>()
@ -161,7 +162,7 @@ public class Js5ChannelHandler(
if (groups.size >= CacheImporter.BATCH_SIZE || complete) {
runBlocking {
importer.importGroups(groups)
importer.importGroups(sourceId, groups)
}
releaseGroups()
@ -182,7 +183,7 @@ public class Js5ChannelHandler(
Js5Compression.uncompress(buf.slice()).use { uncompressed ->
masterIndex = Js5MasterIndex.read(uncompressed.slice(), masterIndexFormat)
val (id, rawIndexes) = runBlocking {
val (masterIndexId, sourceId, rawIndexes) = runBlocking {
importer.importMasterIndexAndGetIndexes(
masterIndex!!,
buf,
@ -190,10 +191,13 @@ public class Js5ChannelHandler(
gameId,
build,
lastMasterIndexId,
timestamp = Instant.now(),
name = "Original"
timestamp = Instant.now()
)
}
this.masterIndexId = masterIndexId
this.sourceId = sourceId
try {
indexes = arrayOfNulls(rawIndexes.size)
@ -207,8 +211,6 @@ public class Js5ChannelHandler(
} finally {
rawIndexes.filterNotNull().forEach(ByteBuf::release)
}
masterIndexId = id
}
}
@ -228,7 +230,7 @@ public class Js5ChannelHandler(
}
val groups = runBlocking {
importer.importIndexAndGetMissingGroups(archive, index, buf, uncompressed, lastMasterIndexId)
importer.importIndexAndGetMissingGroups(sourceId, archive, index, buf, uncompressed, lastMasterIndexId)
}
for (group in groups) {
request(archive, group)

@ -107,21 +107,11 @@ CREATE TABLE master_indexes (
id SERIAL PRIMARY KEY NOT NULL,
container_id BIGINT NOT NULL REFERENCES containers (id),
format master_index_format NOT NULL,
game_id INTEGER NOT NULL REFERENCES games (id),
timestamp TIMESTAMPTZ NULL,
name TEXT NULL,
description TEXT NULL,
UNIQUE (container_id, format)
);
ALTER TABLE games ADD COLUMN last_master_index_id INT NULL REFERENCES master_indexes (id);
CREATE TABLE master_index_builds (
master_index_id INTEGER NOT NULL REFERENCES master_indexes (id),
build INTEGER NOT NULL,
PRIMARY KEY (master_index_id, build)
);
CREATE TABLE master_index_archives (
master_index_id INTEGER NOT NULL REFERENCES master_indexes (id),
archive_id uint1 NOT NULL,
@ -133,6 +123,38 @@ CREATE TABLE master_index_archives (
PRIMARY KEY (master_index_id, archive_id)
);
CREATE TYPE source_type AS ENUM (
'disk',
'js5remote'
);
CREATE TABLE sources (
id SERIAL PRIMARY KEY NOT NULL,
type source_type NOT NULL,
master_index_id INTEGER NOT NULL REFERENCES master_indexes (id),
game_id INTEGER NOT NULL REFERENCES games (id),
build INTEGER NULL,
timestamp TIMESTAMPTZ NULL,
name TEXT NULL,
description TEXT NULL,
url TEXT NULL
);
CREATE INDEX ON sources (master_index_id);
CREATE UNIQUE INDEX ON sources (master_index_id, game_id, build) WHERE type = 'js5remote';
CREATE TABLE source_groups (
source_id INTEGER NOT NULL REFERENCES sources (id),
archive_id uint1 NOT NULL,
group_id INTEGER NOT NULL,
version INTEGER NOT NULL,
version_truncated BOOLEAN NOT NULL,
container_id BIGINT NOT NULL REFERENCES containers (id),
PRIMARY KEY (source_id, archive_id, group_id)
);
CREATE INDEX ON source_groups (archive_id, group_id, version, version_truncated, container_id);
CREATE TABLE names (
hash INTEGER NOT NULL,
name TEXT PRIMARY KEY NOT NULL

@ -38,7 +38,7 @@
<br />
<span th:text="${#temporals.format(cache.timestamp, 'HH:mm:ss')}"></span>
</td>
<td th:text="${cache.name}"></td>
<td th:text="${#strings.setJoin(cache.names, '/')}"></td>
<td th:classappend="${cache.stats}? (${cache.stats.allIndexesValid}? 'table-success' : 'table-danger')"
class="text-right">
<span

@ -14,7 +14,7 @@
<table class="table table-striped table-bordered table-hover">
<tr class="thead-dark">
<th>Game</th>
<td th:text="${cache.game}">runescape</td>
<td th:text="${#strings.setJoin(cache.games, ', ')}">runescape</td>
</tr>
<tr class="thead-dark">
<th>Build(s)</th>
@ -26,11 +26,21 @@
</tr>
<tr class="thead-dark">
<th>Name</th>
<td th:text="${cache.name}"></td>
<td th:text="${#strings.setJoin(cache.names, '/')}"></td>
</tr>
<tr class="thead-dark">
<th>Description</th>
<td th:text="${cache.description}"></td>
<td th:text="${#strings.listJoin(cache.descriptions, ' ')}"></td>
</tr>
<tr class="thead-dark">
<th>URL(s)</th>
<td>
<ul>
<li th:each="url : ${cache.urls}">
<a th:href="${url}">${url}</a>
</li>
</ul>
</td>
</tr>
<tr class="thead-dark">
<th>Indexes</th>

Loading…
Cancel
Save