Serve flat file caches as .tar.gz files instead of .zip files

Streaming .tar.gz files requires less memory, as we don't need to
remember metadata about each file for the end of directory record.

Signed-off-by: Graham <gpe@openrs2.org>
bzip2
Graham 3 years ago
parent 3508a01c02
commit 36f5efa1ad
  1. 1
      archive/build.gradle.kts
  2. 11
      archive/src/main/kotlin/org/openrs2/archive/web/CachesController.kt
  3. 2
      archive/src/main/kotlin/org/openrs2/archive/web/WebServer.kt
  4. 2
      archive/src/main/resources/org/openrs2/archive/templates/caches/index.html
  5. 2
      archive/src/main/resources/org/openrs2/archive/templates/caches/show.html
  6. 1
      cache/build.gradle.kts
  7. 35
      cache/src/main/kotlin/org/openrs2/cache/FlatFileStoreTarWriter.kt
  8. 16
      cache/src/test/kotlin/org/openrs2/cache/FlatFileStoreTarWriterTest.kt
  9. BIN
      cache/src/test/resources/org/openrs2/cache/flat-file-store-tar/cache.tar
  10. BIN
      cache/src/test/resources/org/openrs2/cache/flat-file-store-zip/cache.zip

@ -14,6 +14,7 @@ dependencies {
implementation(projects.buffer) implementation(projects.buffer)
implementation(projects.cache550) implementation(projects.cache550)
implementation(projects.cli) implementation(projects.cli)
implementation(projects.compress)
implementation(projects.db) implementation(projects.db)
implementation(projects.http) implementation(projects.http)
implementation(projects.inject) implementation(projects.inject)

@ -12,10 +12,12 @@ import io.ktor.thymeleaf.ThymeleafContent
import io.netty.buffer.ByteBufAllocator import io.netty.buffer.ByteBufAllocator
import kotlinx.coroutines.sync.Semaphore import kotlinx.coroutines.sync.Semaphore
import kotlinx.coroutines.sync.withPermit import kotlinx.coroutines.sync.withPermit
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream
import org.openrs2.archive.cache.CacheExporter import org.openrs2.archive.cache.CacheExporter
import org.openrs2.archive.map.MapRenderer import org.openrs2.archive.map.MapRenderer
import org.openrs2.cache.DiskStoreZipWriter import org.openrs2.cache.DiskStoreZipWriter
import org.openrs2.cache.FlatFileStoreZipWriter import org.openrs2.cache.FlatFileStoreTarWriter
import org.openrs2.compress.gzip.GzipLevelOutputStream
import java.nio.file.attribute.FileTime import java.nio.file.attribute.FileTime
import java.time.Instant import java.time.Instant
import java.util.zip.Deflater import java.util.zip.Deflater
@ -85,12 +87,13 @@ public class CachesController @Inject constructor(
call.response.header( call.response.header(
HttpHeaders.ContentDisposition, HttpHeaders.ContentDisposition,
ContentDisposition.Attachment ContentDisposition.Attachment
.withParameter(ContentDisposition.Parameters.FileName, "cache.zip") .withParameter(ContentDisposition.Parameters.FileName, "cache.tar.gz")
.toString() .toString()
) )
call.respondOutputStream(contentType = ContentType.Application.Zip) { call.respondOutputStream(contentType = ContentType.Application.GZip) {
FlatFileStoreZipWriter(ZipOutputStream(this)).use { store -> val output = TarArchiveOutputStream(GzipLevelOutputStream(this, Deflater.BEST_COMPRESSION))
FlatFileStoreTarWriter(output).use { store ->
exporter.export(id, store) exporter.export(id, store)
} }
} }

@ -78,7 +78,7 @@ public class WebServer @Inject constructor(
} }
} }
get("/caches/{id}/disk.zip") { cachesController.exportDisk(call) } get("/caches/{id}/disk.zip") { cachesController.exportDisk(call) }
get("/caches/{id}/flat-file.zip") { cachesController.exportFlatFile(call) } get("/caches/{id}/flat-file.tar.gz") { cachesController.exportFlatFile(call) }
get("/caches/{id}/keys.json") { cachesController.exportKeysJson(call) } get("/caches/{id}/keys.json") { cachesController.exportKeysJson(call) }
get("/caches/{id}/keys.zip") { cachesController.exportKeysZip(call) } get("/caches/{id}/keys.zip") { cachesController.exportKeysZip(call) }
get("/caches/{id}/map.png") { cachesController.renderMap(call) } get("/caches/{id}/map.png") { cachesController.renderMap(call) }

@ -82,7 +82,7 @@
<li th:if="${cache.stats != null and cache.stats.diskStoreValid}"><a <li th:if="${cache.stats != null and cache.stats.diskStoreValid}"><a
th:href="${'/caches/' + cache.id + '/disk.zip'}" th:href="${'/caches/' + cache.id + '/disk.zip'}"
class="dropdown-item">Cache (.dat2/.idx)</a></li> class="dropdown-item">Cache (.dat2/.idx)</a></li>
<li><a th:href="${'/caches/' + cache.id + '/flat-file.zip'}" <li><a th:href="${'/caches/' + cache.id + '/flat-file.tar.gz'}"
class="dropdown-item">Cache (Flat file)</a></li> class="dropdown-item">Cache (Flat file)</a></li>
<li> <li>
<hr class="dropdown-divider" /> <hr class="dropdown-divider" />

@ -54,7 +54,7 @@
<a th:if="${cache.stats != null and cache.stats.diskStoreValid}" <a th:if="${cache.stats != null and cache.stats.diskStoreValid}"
th:href="${'/caches/' + cache.id + '/disk.zip'}" th:href="${'/caches/' + cache.id + '/disk.zip'}"
class="btn btn-primary btn-sm">Cache (.dat2/.idx)</a> class="btn btn-primary btn-sm">Cache (.dat2/.idx)</a>
<a th:href="${'/caches/' + cache.id + '/flat-file.zip'}" <a th:href="${'/caches/' + cache.id + '/flat-file.tar.gz'}"
class="btn btn-primary btn-sm">Cache (Flat file)</a> class="btn btn-primary btn-sm">Cache (Flat file)</a>
</div> </div>
<div class="btn-group me-2"> <div class="btn-group me-2">

@ -5,6 +5,7 @@ plugins {
dependencies { dependencies {
api(projects.crypto) api(projects.crypto)
api(libs.commons.compress)
api(libs.fastutil) api(libs.fastutil)
api(libs.guice) api(libs.guice)
api(libs.netty.buffer) api(libs.netty.buffer)

@ -1,15 +1,14 @@
package org.openrs2.cache package org.openrs2.cache
import io.netty.buffer.ByteBuf import io.netty.buffer.ByteBuf
import java.nio.file.attribute.FileTime import org.apache.commons.compress.archivers.tar.TarArchiveEntry
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream
import java.time.Instant import java.time.Instant
import java.util.zip.Deflater import java.util.Date
import java.util.zip.ZipEntry
import java.util.zip.ZipOutputStream
/** /**
* A specialised [Store] implementation that writes a cache in the * A specialised [Store] implementation that writes a cache in the
* [FlatFileStore] format to a [ZipOutputStream]. * [FlatFileStore] format to a [TarArchiveOutputStream].
* *
* The cache is not buffered to disk. * The cache is not buffered to disk.
* *
@ -18,23 +17,17 @@ import java.util.zip.ZipOutputStream
* *
* It is only intended for use by the cache archiving service's web interface. * It is only intended for use by the cache archiving service's web interface.
*/ */
public class FlatFileStoreZipWriter( public class FlatFileStoreTarWriter(
private val out: ZipOutputStream, private val out: TarArchiveOutputStream,
private val prefix: String = "cache/", private val prefix: String = "cache/",
level: Int = Deflater.BEST_COMPRESSION,
timestamp: Instant = Instant.EPOCH timestamp: Instant = Instant.EPOCH
) : Store { ) : Store {
private val timestamp = FileTime.from(timestamp) private val timestamp = Date.from(timestamp)
init { private fun createTarEntry(name: String, size: Int): TarArchiveEntry {
out.setLevel(level) val entry = TarArchiveEntry(prefix + name)
} entry.modTime = timestamp
entry.size = size.toLong()
private fun createZipEntry(name: String): ZipEntry {
val entry = ZipEntry(prefix + name)
entry.creationTime = timestamp
entry.lastAccessTime = timestamp
entry.lastModifiedTime = timestamp
return entry return entry
} }
@ -57,7 +50,8 @@ public class FlatFileStoreZipWriter(
override fun create(archive: Int) { override fun create(archive: Int) {
require(archive in 0..Store.MAX_ARCHIVE) require(archive in 0..Store.MAX_ARCHIVE)
out.putNextEntry(createZipEntry("$archive/")) out.putArchiveEntry(createTarEntry("$archive/", size = 0))
out.closeArchiveEntry()
} }
override fun read(archive: Int, group: Int): ByteBuf { override fun read(archive: Int, group: Int): ByteBuf {
@ -69,8 +63,9 @@ public class FlatFileStoreZipWriter(
require(group >= 0) require(group >= 0)
require(buf.readableBytes() <= Store.MAX_GROUP_SIZE) require(buf.readableBytes() <= Store.MAX_GROUP_SIZE)
out.putNextEntry(createZipEntry("$archive/$group.dat")) out.putArchiveEntry(createTarEntry("$archive/$group.dat", buf.readableBytes()))
buf.readBytes(out, buf.readableBytes()) buf.readBytes(out, buf.readableBytes())
out.closeArchiveEntry()
} }
override fun remove(archive: Int) { override fun remove(archive: Int) {

@ -3,21 +3,21 @@ package org.openrs2.cache
import com.google.common.jimfs.Configuration import com.google.common.jimfs.Configuration
import com.google.common.jimfs.Jimfs import com.google.common.jimfs.Jimfs
import io.netty.buffer.Unpooled import io.netty.buffer.Unpooled
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream
import org.openrs2.buffer.copiedBuffer import org.openrs2.buffer.copiedBuffer
import org.openrs2.buffer.use import org.openrs2.buffer.use
import org.openrs2.util.io.recursiveEquals import org.openrs2.util.io.recursiveEquals
import java.io.OutputStream import java.io.OutputStream
import java.nio.file.Files import java.nio.file.Files
import java.nio.file.Path import java.nio.file.Path
import java.util.zip.ZipOutputStream
import kotlin.test.Test import kotlin.test.Test
import kotlin.test.assertFailsWith import kotlin.test.assertFailsWith
import kotlin.test.assertTrue import kotlin.test.assertTrue
public class FlatFileStoreZipWriterTest { public class FlatFileStoreTarWriterTest {
@Test @Test
fun testBounds() { fun testBounds() {
FlatFileStoreZipWriter(ZipOutputStream(OutputStream.nullOutputStream())).use { store -> FlatFileStoreTarWriter(TarArchiveOutputStream(OutputStream.nullOutputStream())).use { store ->
// create // create
assertFailsWith<IllegalArgumentException> { assertFailsWith<IllegalArgumentException> {
store.create(-1) store.create(-1)
@ -58,7 +58,7 @@ public class FlatFileStoreZipWriterTest {
@Test @Test
fun testUnsupported() { fun testUnsupported() {
FlatFileStoreZipWriter(ZipOutputStream(OutputStream.nullOutputStream())).use { store -> FlatFileStoreTarWriter(TarArchiveOutputStream(OutputStream.nullOutputStream())).use { store ->
assertFailsWith<UnsupportedOperationException> { assertFailsWith<UnsupportedOperationException> {
store.exists(0) store.exists(0)
} }
@ -92,11 +92,11 @@ public class FlatFileStoreZipWriterTest {
@Test @Test
fun testWrite() { fun testWrite() {
Jimfs.newFileSystem(Configuration.forCurrentPlatform()).use { fs -> Jimfs.newFileSystem(Configuration.forCurrentPlatform()).use { fs ->
val actual = fs.rootDirectories.first().resolve("zip") val actual = fs.rootDirectories.first().resolve("tar")
Files.createDirectories(actual) Files.createDirectories(actual)
Files.newOutputStream(actual.resolve("cache.zip")).use { out -> Files.newOutputStream(actual.resolve("cache.tar")).use { out ->
FlatFileStoreZipWriter(ZipOutputStream(out)).use { store -> FlatFileStoreTarWriter(TarArchiveOutputStream(out)).use { store ->
store.create(0) store.create(0)
copiedBuffer("OpenRS2").use { buf -> copiedBuffer("OpenRS2").use { buf ->
@ -119,7 +119,7 @@ public class FlatFileStoreZipWriterTest {
private companion object { private companion object {
private val ROOT = Path.of( private val ROOT = Path.of(
FlatFileStoreZipWriterTest::class.java.getResource("flat-file-store-zip").toURI() FlatFileStoreTarWriterTest::class.java.getResource("flat-file-store-tar").toURI()
) )
} }
} }
Loading…
Cancel
Save