Add support for storing multiple build numbers per master index

Signed-off-by: Graham <gpe@openrs2.org>
Graham 4 years ago
parent 9a672153f9
commit 509d88b18f
  1. 33
      archive/src/main/kotlin/org/openrs2/archive/cache/CacheExporter.kt
  2. 163
      archive/src/main/kotlin/org/openrs2/archive/cache/CacheImporter.kt
  3. 7
      archive/src/main/resources/org/openrs2/archive/migrations/V1__init.sql
  4. 9
      archive/src/main/resources/org/openrs2/archive/templates/caches/index.html
  5. 4
      archive/src/main/resources/org/openrs2/archive/templates/caches/show.html

@ -9,6 +9,7 @@ import org.openrs2.cache.Store
import org.openrs2.crypto.XteaKey
import org.openrs2.db.Database
import java.time.Instant
import java.util.SortedSet
import javax.inject.Inject
import javax.inject.Singleton
@ -50,7 +51,7 @@ public class CacheExporter @Inject constructor(
public data class Cache(
val id: Int,
val game: String,
val build: Int?,
val builds: SortedSet<Int>,
val timestamp: Instant?,
val name: String?,
val description: String?,
@ -71,13 +72,15 @@ public class CacheExporter @Inject constructor(
connection.prepareStatement(
"""
SELECT
m.id, g.name, m.build, m.timestamp, m.name,
m.id, g.name, array_remove(array_agg(b.build ORDER BY b.build ASC), NULL), m.timestamp, m.name,
s.valid_indexes, s.indexes, s.valid_groups, s.groups, s.valid_keys, s.keys
FROM master_indexes m
JOIN games g ON g.id = m.game_id
JOIN containers c ON c.id = m.container_id
LEFT JOIN master_index_builds b ON b.master_index_id = m.id
LEFT JOIN master_index_stats s ON s.master_index_id = m.id
ORDER BY g.name ASC, m.build ASC, m.timestamp ASC
GROUP BY m.id, g.name, s.valid_indexes, s.indexes, s.valid_groups, s.groups, s.valid_keys, s.keys
ORDER BY g.name ASC, MIN(b.build) ASC, m.timestamp ASC
""".trimIndent()
).use { stmt ->
stmt.executeQuery().use { rows ->
@ -86,12 +89,7 @@ public class CacheExporter @Inject constructor(
while (rows.next()) {
val id = rows.getInt(1)
val game = rows.getString(2)
var build: Int? = rows.getInt(3)
if (rows.wasNull()) {
build = null
}
val builds = rows.getArray(3).array as Array<Int>
val timestamp = rows.getTimestamp(4)?.toInstant()
val name = rows.getString(5)
@ -107,7 +105,7 @@ public class CacheExporter @Inject constructor(
null
}
caches += Cache(id, game, build, timestamp, name, description = null, stats)
caches += Cache(id, game, builds.toSortedSet(), timestamp, name, description = null, stats)
}
caches
@ -121,13 +119,15 @@ public class CacheExporter @Inject constructor(
connection.prepareStatement(
"""
SELECT
g.name, m.build, m.timestamp, m.name, m.description,
s.valid_indexes, s.indexes, s.valid_groups, s.groups, s.valid_keys, s.keys
g.name, array_remove(array_agg(b.build ORDER BY b.build ASC), NULL), m.timestamp, m.name,
m.description, s.valid_indexes, s.indexes, s.valid_groups, s.groups, s.valid_keys, s.keys
FROM master_indexes m
JOIN games g ON g.id = m.game_id
JOIN containers c ON c.id = m.container_id
LEFT JOIN master_index_builds b ON b.master_index_id = m.id
LEFT JOIN master_index_stats s ON s.master_index_id = m.id
WHERE m.id = ?
GROUP BY m.id, g.name, s.valid_indexes, s.indexes, s.valid_groups, s.groups, s.valid_keys, s.keys
""".trimIndent()
).use { stmt ->
stmt.setInt(1, id)
@ -138,12 +138,7 @@ public class CacheExporter @Inject constructor(
}
val game = rows.getString(1)
var build: Int? = rows.getInt(2)
if (rows.wasNull()) {
build = null
}
val builds = rows.getArray(2).array as Array<Int>
val timestamp = rows.getTimestamp(3)?.toInstant()
val name = rows.getString(4)
val description = rows.getString(5)
@ -160,7 +155,7 @@ public class CacheExporter @Inject constructor(
null
}
return@execute Cache(id, game, build, timestamp, name, description, stats)
return@execute Cache(id, game, builds.toSortedSet(), timestamp, name, description, stats)
}
}
}

@ -27,7 +27,6 @@ import java.time.OffsetDateTime
import java.time.ZoneOffset
import javax.inject.Inject
import javax.inject.Singleton
import kotlin.math.min
@Singleton
public class CacheImporter @Inject constructor(
@ -341,14 +340,13 @@ public class CacheImporter @Inject constructor(
val containerId = addContainer(connection, masterIndex)
var masterIndexId: Int? = null
var newBuild: Int?
var newTimestamp: Instant?
var newName: String?
var newDescription: String?
connection.prepareStatement(
"""
SELECT id, game_id, build, timestamp, name, description
SELECT id, game_id, timestamp, name, description
FROM master_indexes
WHERE container_id = ? AND format = ?::master_index_format
FOR UPDATE
@ -365,25 +363,12 @@ public class CacheImporter @Inject constructor(
if (masterIndexId != null) {
val oldGameId = rows.getInt(2)
var oldBuild: Int? = rows.getInt(3)
if (rows.wasNull()) {
oldBuild = null
}
val oldTimestamp: Instant? = rows.getTimestamp(4)?.toInstant()
val oldName: String? = rows.getString(5)
val oldDescription: String? = rows.getString(6)
val oldTimestamp: Instant? = rows.getTimestamp(3)?.toInstant()
val oldName: String? = rows.getString(4)
val oldDescription: String? = rows.getString(5)
check(oldGameId == gameId)
if (oldBuild != null && build != null) {
newBuild = min(oldBuild, build)
} else if (oldBuild != null) {
newBuild = oldBuild
} else {
newBuild = build
}
if (oldTimestamp != null && timestamp != null) {
newTimestamp = if (oldTimestamp.isBefore(timestamp)) {
oldTimestamp
@ -416,7 +401,6 @@ public class CacheImporter @Inject constructor(
newDescription = description
}
} else {
newBuild = build
newTimestamp = timestamp
newName = name
newDescription = description
@ -428,94 +412,103 @@ public class CacheImporter @Inject constructor(
connection.prepareStatement(
"""
UPDATE master_indexes
SET build = ?, timestamp = ?, name = ?, description = ?
SET timestamp = ?, name = ?, description = ?
WHERE id = ?
""".trimIndent()
).use { stmt ->
stmt.setObject(1, newBuild, Types.INTEGER)
if (newTimestamp != null) {
val offsetDateTime = OffsetDateTime.ofInstant(newTimestamp, ZoneOffset.UTC)
stmt.setObject(2, offsetDateTime, Types.TIMESTAMP_WITH_TIMEZONE)
stmt.setObject(1, offsetDateTime, Types.TIMESTAMP_WITH_TIMEZONE)
} else {
stmt.setNull(2, Types.TIMESTAMP_WITH_TIMEZONE)
stmt.setNull(1, Types.TIMESTAMP_WITH_TIMEZONE)
}
stmt.setString(3, newName)
stmt.setString(4, newDescription)
stmt.setInt(5, masterIndexId!!)
stmt.setString(2, newName)
stmt.setString(3, newDescription)
stmt.setInt(4, masterIndexId!!)
stmt.execute()
return@addMasterIndex masterIndexId!!
}
}
} else {
connection.prepareStatement(
"""
INSERT INTO master_indexes (container_id, format, game_id, timestamp, name, description)
VALUES (?, ?::master_index_format, ?, ?, ?, ?)
RETURNING id
""".trimIndent()
).use { stmt ->
stmt.setLong(1, containerId)
stmt.setString(2, masterIndex.index.format.name.toLowerCase())
stmt.setInt(3, gameId)
connection.prepareStatement(
"""
INSERT INTO master_indexes (container_id, format, game_id, build, timestamp, name, description)
VALUES (?, ?::master_index_format, ?, ?, ?, ?, ?)
RETURNING id
""".trimIndent()
).use { stmt ->
stmt.setLong(1, containerId)
stmt.setString(2, masterIndex.index.format.name.toLowerCase())
stmt.setInt(3, gameId)
stmt.setObject(4, newBuild, Types.INTEGER)
if (newTimestamp != null) {
val offsetDateTime = OffsetDateTime.ofInstant(newTimestamp, ZoneOffset.UTC)
stmt.setObject(5, offsetDateTime, Types.TIMESTAMP_WITH_TIMEZONE)
} else {
stmt.setNull(5, Types.TIMESTAMP_WITH_TIMEZONE)
}
if (newTimestamp != null) {
val offsetDateTime = OffsetDateTime.ofInstant(newTimestamp, ZoneOffset.UTC)
stmt.setObject(4, offsetDateTime, Types.TIMESTAMP_WITH_TIMEZONE)
} else {
stmt.setNull(4, Types.TIMESTAMP_WITH_TIMEZONE)
}
stmt.setString(6, newName)
stmt.setString(7, newDescription)
stmt.setString(5, newName)
stmt.setString(6, newDescription)
stmt.executeQuery().use { rows ->
check(rows.next())
masterIndexId = rows.getInt(1)
stmt.executeQuery().use { rows ->
check(rows.next())
masterIndexId = rows.getInt(1)
}
}
}
connection.prepareStatement(
"""
INSERT INTO master_index_archives (
master_index_id, archive_id, crc32, version, whirlpool, groups, total_uncompressed_length
)
VALUES (?, ?, ?, ?, ?, ?, ?)
""".trimIndent()
).use { stmt ->
for ((i, entry) in masterIndex.index.entries.withIndex()) {
stmt.setInt(1, masterIndexId!!)
stmt.setInt(2, i)
stmt.setInt(3, entry.checksum)
connection.prepareStatement(
"""
INSERT INTO master_index_archives (
master_index_id, archive_id, crc32, version, whirlpool, groups, total_uncompressed_length
)
VALUES (?, ?, ?, ?, ?, ?, ?)
""".trimIndent()
).use { stmt ->
for ((i, entry) in masterIndex.index.entries.withIndex()) {
stmt.setInt(1, masterIndexId!!)
stmt.setInt(2, i)
stmt.setInt(3, entry.checksum)
if (masterIndex.index.format >= MasterIndexFormat.VERSIONED) {
stmt.setInt(4, entry.version)
} else {
stmt.setInt(4, 0)
}
if (masterIndex.index.format >= MasterIndexFormat.VERSIONED) {
stmt.setInt(4, entry.version)
} else {
stmt.setInt(4, 0)
}
if (masterIndex.index.format >= MasterIndexFormat.DIGESTS) {
stmt.setBytes(5, entry.digest ?: ByteArray(Whirlpool.DIGESTBYTES))
} else {
stmt.setNull(5, Types.BINARY)
}
if (masterIndex.index.format >= MasterIndexFormat.DIGESTS) {
stmt.setBytes(5, entry.digest ?: ByteArray(Whirlpool.DIGESTBYTES))
} else {
stmt.setNull(5, Types.BINARY)
}
if (masterIndex.index.format >= MasterIndexFormat.LENGTHS) {
stmt.setInt(6, entry.groups)
stmt.setInt(7, entry.totalUncompressedLength)
} else {
stmt.setNull(6, Types.INTEGER)
stmt.setNull(7, Types.INTEGER)
if (masterIndex.index.format >= MasterIndexFormat.LENGTHS) {
stmt.setInt(6, entry.groups)
stmt.setInt(7, entry.totalUncompressedLength)
} else {
stmt.setNull(6, Types.INTEGER)
stmt.setNull(7, Types.INTEGER)
}
stmt.addBatch()
}
stmt.addBatch()
stmt.executeBatch()
}
}
stmt.executeBatch()
if (build != null) {
connection.prepareStatement(
"""
INSERT INTO master_index_builds (master_index_id, build)
VALUES (?, ?)
ON CONFLICT DO NOTHING
""".trimIndent()
).use { stmt ->
stmt.setInt(1, masterIndexId!!)
stmt.setInt(2, build)
stmt.execute()
}
}
return masterIndexId!!

@ -107,7 +107,6 @@ CREATE TABLE master_indexes (
container_id BIGINT NOT NULL REFERENCES containers (id),
format master_index_format NOT NULL,
game_id INTEGER NOT NULL REFERENCES games (id),
build INTEGER NULL,
timestamp TIMESTAMPTZ NULL,
name TEXT NULL,
description TEXT NULL,
@ -116,6 +115,12 @@ CREATE TABLE master_indexes (
ALTER TABLE games ADD COLUMN last_master_index_id INT NULL REFERENCES master_indexes (id);
CREATE TABLE master_index_builds (
master_index_id INTEGER NOT NULL REFERENCES master_indexes (id),
build INTEGER NOT NULL,
PRIMARY KEY (master_index_id, build)
);
CREATE TABLE master_index_archives (
master_index_id INTEGER NOT NULL REFERENCES master_indexes (id),
archive_id uint1 NOT NULL,

@ -13,7 +13,7 @@
<thead class="thead-dark">
<tr>
<th>Game</th>
<th>Build</th>
<th>Build(s)</th>
<th>Timestamp</th>
<th>Name</th>
<th>Indexes</th>
@ -26,7 +26,12 @@
<!--/*@thymesVar id="caches" type="java.util.List<org.openrs2.archive.cache.CacheExporter.Cache>"*/-->
<tr th:each="cache : ${caches}">
<td th:text="${cache.game}">runescape</td>
<td th:text="${cache.build}" class="text-right">550</td>
<td class="text-right">
<span th:each="build, it : ${cache.builds}" th:remove="tag">
<span th:text="${build}">550</span>
<br th:remove="${it.last}? 'all' : 'none'" />
</span>
</td>
<td>
<span th:text="${#temporals.format(cache.timestamp, 'yyyy-MM-dd')}"></span>
<br />

@ -16,8 +16,8 @@
<td th:text="${cache.game}">runescape</td>
</tr>
<tr class="thead-dark">
<th>Build</th>
<td th:text="${cache.build}">550</td>
<th>Build(s)</th>
<td th:text="${#strings.setJoin(cache.builds, ', ')}">550</td>
</tr>
<tr class="thead-dark">
<th>Timestamp</th>

Loading…
Cancel
Save