forked from openrs2/openrs2
It supports reading and writing a cache backed by a Store, as well as standalone .js5 files. I'm not quite as happy with this as I am with the lower-level API yet, and there are still a few remaining TODOs. Signed-off-by: Graham <gpe@openrs2.org>bzip2
parent
6f13a0a737
commit
ad0cdb6056
@ -0,0 +1,347 @@ |
||||
package org.openrs2.cache |
||||
|
||||
import io.netty.buffer.ByteBuf |
||||
import io.netty.buffer.ByteBufAllocator |
||||
import it.unimi.dsi.fastutil.ints.Int2ObjectAVLTreeMap |
||||
import it.unimi.dsi.fastutil.ints.Int2ObjectSortedMap |
||||
import it.unimi.dsi.fastutil.ints.Int2ObjectSortedMaps |
||||
import org.openrs2.buffer.crc32 |
||||
import org.openrs2.buffer.use |
||||
import org.openrs2.crypto.XteaKey |
||||
import org.openrs2.crypto.whirlpool |
||||
import org.openrs2.util.krHashCode |
||||
import java.io.FileNotFoundException |
||||
import java.io.Flushable |
||||
|
||||
public abstract class Archive internal constructor( |
||||
protected val alloc: ByteBufAllocator, |
||||
protected val index: Js5Index, |
||||
protected val archive: Int, |
||||
internal val unpackedCache: UnpackedCache |
||||
) : Flushable { |
||||
private var dirty = false |
||||
|
||||
internal inner class Unpacked( |
||||
private val entry: Js5Index.MutableGroup, |
||||
val key: XteaKey, |
||||
private var files: Int2ObjectSortedMap<ByteBuf> |
||||
) { |
||||
private var dirty = false |
||||
|
||||
private fun ensureWritable() { |
||||
if (files.size == 1 && files is Int2ObjectSortedMaps.Singleton) { |
||||
files = Int2ObjectAVLTreeMap(files) |
||||
} |
||||
} |
||||
|
||||
fun read(file: Int): ByteBuf { |
||||
val fileEntry = entry[file] ?: throw FileNotFoundException() |
||||
return files[fileEntry.id]!!.retainedSlice() |
||||
} |
||||
|
||||
fun readNamed(fileNameHash: Int): ByteBuf { |
||||
val fileEntry = entry.getNamed(fileNameHash) ?: throw FileNotFoundException() |
||||
return files[fileEntry.id]!!.retainedSlice() |
||||
} |
||||
|
||||
fun write(file: Int, buf: ByteBuf) { |
||||
ensureWritable() |
||||
|
||||
val fileEntry = entry.createOrGet(file) |
||||
files.put(fileEntry.id, buf.copy().asReadOnly())?.release() |
||||
dirty = true |
||||
} |
||||
|
||||
fun writeNamed(fileNameHash: Int, buf: ByteBuf) { |
||||
ensureWritable() |
||||
|
||||
val fileEntry = entry.createOrGetNamed(fileNameHash) |
||||
files.put(fileEntry.id, buf.copy().asReadOnly())?.release() |
||||
dirty = true |
||||
} |
||||
|
||||
fun remove(file: Int) { |
||||
ensureWritable() |
||||
|
||||
val fileEntry = entry.remove(file) ?: return |
||||
files.remove(fileEntry.id)?.release() |
||||
dirty = true |
||||
} |
||||
|
||||
fun removeNamed(fileNameHash: Int) { |
||||
ensureWritable() |
||||
|
||||
val fileEntry = entry.removeNamed(fileNameHash) ?: return |
||||
files.remove(fileEntry.id)?.release() |
||||
dirty = true |
||||
} |
||||
|
||||
fun flush() { |
||||
if (!dirty) { |
||||
return |
||||
} |
||||
|
||||
Group.pack(files).use { buf -> |
||||
if (index.hasLengths) { |
||||
entry.uncompressedLength = buf.readableBytes() |
||||
} |
||||
|
||||
if (index.hasUncompressedChecksums) { |
||||
entry.uncompressedChecksum = buf.crc32() |
||||
} |
||||
|
||||
Js5Compression.compressBest(buf, key = key).use { compressed -> |
||||
entry.checksum = compressed.crc32() |
||||
|
||||
if (index.hasLengths) { |
||||
entry.length = compressed.readableBytes() |
||||
} |
||||
|
||||
if (index.hasDigests) { |
||||
entry.digest = compressed.whirlpool() |
||||
} |
||||
|
||||
appendVersion(buf, ++entry.version) |
||||
writePacked(entry.id, compressed) |
||||
} |
||||
} |
||||
|
||||
dirty = false |
||||
} |
||||
|
||||
fun release() { |
||||
files.values.forEach(ByteBuf::release) |
||||
} |
||||
} |
||||
|
||||
// TODO(gpe): rename/move, reindex, rekey, method to go from name->id |
||||
|
||||
public fun exists(group: Int): Boolean { |
||||
require(group >= 0) |
||||
return index.contains(group) |
||||
} |
||||
|
||||
public fun existsNamed(groupNameHash: Int): Boolean { |
||||
return index.containsNamed(groupNameHash) |
||||
} |
||||
|
||||
public fun exists(group: String): Boolean { |
||||
return existsNamed(group.krHashCode()) |
||||
} |
||||
|
||||
public fun exists(group: Int, file: Int): Boolean { |
||||
require(group >= 0 && file >= 0) |
||||
|
||||
val entry = index[group] ?: return false |
||||
return entry.contains(file) |
||||
} |
||||
|
||||
public fun existsNamed(groupNameHash: Int, fileNameHash: Int): Boolean { |
||||
val entry = index.getNamed(groupNameHash) ?: return false |
||||
return entry.containsNamed(fileNameHash) |
||||
} |
||||
|
||||
public fun exists(group: String, file: String): Boolean { |
||||
return existsNamed(group.krHashCode(), file.krHashCode()) |
||||
} |
||||
|
||||
public fun list(): Iterator<Js5Index.Group<*>> { |
||||
return index.iterator() |
||||
} |
||||
|
||||
public fun list(group: Int): Iterator<Js5Index.File> { |
||||
require(group >= 0) |
||||
|
||||
val entry = index[group] ?: throw FileNotFoundException() |
||||
return entry.iterator() |
||||
} |
||||
|
||||
public fun listNamed(groupNameHash: Int): Iterator<Js5Index.File> { |
||||
val entry = index.getNamed(groupNameHash) ?: throw FileNotFoundException() |
||||
return entry.iterator() |
||||
} |
||||
|
||||
public fun list(group: String): Iterator<Js5Index.File> { |
||||
return listNamed(group.krHashCode()) |
||||
} |
||||
|
||||
public fun read(group: Int, file: Int, key: XteaKey = XteaKey.ZERO): ByteBuf { |
||||
require(group >= 0 && file >= 0) |
||||
|
||||
val entry = index[group] ?: throw FileNotFoundException() |
||||
val unpacked = getUnpacked(entry, key) |
||||
return unpacked.read(file) |
||||
} |
||||
|
||||
public fun readNamed(groupNameHash: Int, fileNameHash: Int, key: XteaKey = XteaKey.ZERO): ByteBuf { |
||||
val entry = index.getNamed(groupNameHash) ?: throw FileNotFoundException() |
||||
val unpacked = getUnpacked(entry, key) |
||||
return unpacked.readNamed(fileNameHash) |
||||
} |
||||
|
||||
public fun read(group: String, file: String, key: XteaKey = XteaKey.ZERO): ByteBuf { |
||||
return readNamed(group.krHashCode(), file.krHashCode(), key) |
||||
} |
||||
|
||||
public fun write(group: Int, file: Int, buf: ByteBuf, key: XteaKey = XteaKey.ZERO) { |
||||
require(group >= 0 && file >= 0) |
||||
|
||||
val entry = index.createOrGet(group) |
||||
val unpacked = createOrGetUnpacked(entry, key, isOverwriting(entry, file)) |
||||
unpacked.write(file, buf) |
||||
|
||||
dirty = true |
||||
} |
||||
|
||||
public fun writeNamed(groupNameHash: Int, fileNameHash: Int, buf: ByteBuf, key: XteaKey = XteaKey.ZERO) { |
||||
val entry = index.createOrGetNamed(groupNameHash) |
||||
val unpacked = createOrGetUnpacked(entry, key, isOverwritingNamed(entry, fileNameHash)) |
||||
unpacked.writeNamed(fileNameHash, buf) |
||||
|
||||
dirty = true |
||||
index.hasNames = true |
||||
} |
||||
|
||||
public fun write(group: String, file: String, buf: ByteBuf, key: XteaKey = XteaKey.ZERO) { |
||||
return writeNamed(group.krHashCode(), file.krHashCode(), buf, key) |
||||
} |
||||
|
||||
public fun remove(group: Int) { |
||||
require(group >= 0) |
||||
|
||||
val entry = index.remove(group) ?: return |
||||
unpackedCache.remove(archive, entry.id) |
||||
removePacked(entry.id) |
||||
|
||||
dirty = true |
||||
} |
||||
|
||||
public fun removeNamed(groupNameHash: Int) { |
||||
val entry = index.removeNamed(groupNameHash) ?: return |
||||
unpackedCache.remove(archive, entry.id) |
||||
removePacked(entry.id) |
||||
|
||||
dirty = true |
||||
} |
||||
|
||||
public fun remove(group: String) { |
||||
return removeNamed(group.krHashCode()) |
||||
} |
||||
|
||||
public fun remove(group: Int, file: Int, key: XteaKey = XteaKey.ZERO) { |
||||
require(group >= 0 && file >= 0) |
||||
|
||||
val entry = index[group] ?: return |
||||
|
||||
if (isOverwriting(entry, file)) { |
||||
remove(group) |
||||
return |
||||
} |
||||
|
||||
val unpacked = getUnpacked(entry, key) |
||||
unpacked.remove(file) |
||||
|
||||
dirty = true |
||||
} |
||||
|
||||
public fun removeNamed(groupNameHash: Int, fileNameHash: Int, key: XteaKey = XteaKey.ZERO) { |
||||
val entry = index.getNamed(groupNameHash) ?: return |
||||
|
||||
if (isOverwritingNamed(entry, fileNameHash)) { |
||||
removeNamed(groupNameHash) |
||||
return |
||||
} |
||||
|
||||
val unpacked = getUnpacked(entry, key) |
||||
unpacked.removeNamed(fileNameHash) |
||||
|
||||
dirty = true |
||||
} |
||||
|
||||
public fun remove(group: String, file: String, key: XteaKey = XteaKey.ZERO) { |
||||
return removeNamed(group.krHashCode(), file.krHashCode(), key) |
||||
} |
||||
|
||||
public override fun flush() { |
||||
if (!dirty) { |
||||
return |
||||
} |
||||
|
||||
index.version++ |
||||
|
||||
alloc.buffer().use { buf -> |
||||
index.write(buf) |
||||
|
||||
Js5Compression.compressBest(buf).use { compressed -> |
||||
writePackedIndex(compressed) |
||||
} |
||||
} |
||||
|
||||
dirty = false |
||||
} |
||||
|
||||
protected abstract fun packedExists(group: Int): Boolean |
||||
protected abstract fun readPacked(group: Int): ByteBuf |
||||
protected abstract fun writePacked(group: Int, buf: ByteBuf) |
||||
protected abstract fun writePackedIndex(buf: ByteBuf) |
||||
protected abstract fun removePacked(group: Int) |
||||
protected abstract fun appendVersion(buf: ByteBuf, version: Int) |
||||
protected abstract fun verifyCompressed(buf: ByteBuf, entry: Js5Index.MutableGroup) |
||||
protected abstract fun verifyUncompressed(buf: ByteBuf, entry: Js5Index.MutableGroup) |
||||
|
||||
private fun isOverwriting(entry: Js5Index.MutableGroup, file: Int): Boolean { |
||||
val fileEntry = entry.singleOrNull() ?: return false |
||||
return fileEntry.id == file |
||||
} |
||||
|
||||
private fun isOverwritingNamed(entry: Js5Index.MutableGroup, fileNameHash: Int): Boolean { |
||||
val fileEntry = entry.singleOrNull() ?: return false |
||||
return fileEntry.nameHash == fileNameHash |
||||
} |
||||
|
||||
private fun createOrGetUnpacked(entry: Js5Index.MutableGroup, key: XteaKey, overwrite: Boolean): Unpacked { |
||||
return if (entry.size == 0 || overwrite) { |
||||
val unpacked = Unpacked(entry, key, Int2ObjectAVLTreeMap()) |
||||
unpackedCache.put(archive, entry.id, unpacked) |
||||
return unpacked |
||||
} else { |
||||
getUnpacked(entry, key) |
||||
} |
||||
} |
||||
|
||||
private fun getUnpacked(entry: Js5Index.MutableGroup, key: XteaKey): Unpacked { |
||||
var unpacked = unpackedCache.get(archive, entry.id) |
||||
if (unpacked != null) { |
||||
/* |
||||
* If we've already unpacked the group, we check the programmer |
||||
* is using the correct key to ensure the code always works, |
||||
* regardless of group cache size/invalidation behaviour. |
||||
*/ |
||||
require(unpacked.key == key) { |
||||
"Invalid key for archive $archive group ${entry.id} (expected ${unpacked!!.key}, actual $key)" |
||||
} |
||||
return unpacked |
||||
} |
||||
|
||||
if (!packedExists(entry.id)) { |
||||
throw StoreCorruptException("Archive $archive group ${entry.id} is missing") |
||||
} |
||||
|
||||
val files = readPacked(entry.id).use { compressed -> |
||||
// TODO(gpe): check for trailing data? |
||||
verifyCompressed(compressed, entry) |
||||
|
||||
Js5Compression.uncompress(compressed, key).use { buf -> |
||||
verifyUncompressed(buf, entry) |
||||
|
||||
Group.unpack(buf, entry) |
||||
} |
||||
} |
||||
|
||||
files.replaceAll { _, buf -> buf.asReadOnly() } |
||||
|
||||
unpacked = Unpacked(entry, key, files) |
||||
unpackedCache.put(archive, entry.id, unpacked) |
||||
return unpacked |
||||
} |
||||
} |
@ -0,0 +1,246 @@ |
||||
package org.openrs2.cache |
||||
|
||||
import io.netty.buffer.ByteBuf |
||||
import io.netty.buffer.ByteBufAllocator |
||||
import org.openrs2.buffer.use |
||||
import org.openrs2.crypto.XteaKey |
||||
import org.openrs2.util.krHashCode |
||||
import java.io.Closeable |
||||
import java.io.FileNotFoundException |
||||
import java.io.Flushable |
||||
import java.nio.file.Path |
||||
|
||||
/** |
||||
* A high-level interface for reading and writing files to and from a |
||||
* collection of JS5 archives. |
||||
*/ |
||||
public class Cache private constructor( |
||||
private val store: Store, |
||||
private val alloc: ByteBufAllocator, |
||||
unpackedCacheSize: Int |
||||
) : Flushable, Closeable { |
||||
private val archives = arrayOfNulls<CacheArchive>(MAX_ARCHIVE + 1) |
||||
private val unpackedCache = UnpackedCache(unpackedCacheSize) |
||||
|
||||
private fun init() { |
||||
for (archive in store.list(Js5Archive.ARCHIVESET)) { |
||||
val index = store.read(Js5Archive.ARCHIVESET, archive).use { compressed -> |
||||
Js5Compression.uncompress(compressed).use { buf -> |
||||
Js5Index.read(buf) |
||||
} |
||||
} |
||||
|
||||
archives[archive] = CacheArchive(alloc, index, archive, unpackedCache, store) |
||||
} |
||||
} |
||||
|
||||
private fun createOrGetArchive(id: Int): Archive { |
||||
var archive = archives[id] |
||||
if (archive != null) { |
||||
return archive |
||||
} |
||||
|
||||
// TODO(gpe): protocol/flags should be configurable somehow |
||||
val index = Js5Index(Js5Protocol.VERSIONED) |
||||
archive = CacheArchive(alloc, index, id, unpackedCache, store) |
||||
archives[id] = archive |
||||
return archive |
||||
} |
||||
|
||||
// TODO(gpe): rename/move, reindex, rekey, method to go from name->id |
||||
|
||||
public fun create(archive: Int) { |
||||
checkArchive(archive) |
||||
createOrGetArchive(archive) |
||||
} |
||||
|
||||
public fun exists(archive: Int): Boolean { |
||||
checkArchive(archive) |
||||
return archives[archive] != null |
||||
} |
||||
|
||||
public fun exists(archive: Int, group: Int): Boolean { |
||||
checkArchive(archive) |
||||
return archives[archive]?.exists(group) ?: false |
||||
} |
||||
|
||||
public fun existsNamed(archive: Int, groupNameHash: Int): Boolean { |
||||
checkArchive(archive) |
||||
return archives[archive]?.existsNamed(groupNameHash) ?: false |
||||
} |
||||
|
||||
public fun exists(archive: Int, group: String): Boolean { |
||||
return existsNamed(archive, group.krHashCode()) |
||||
} |
||||
|
||||
public fun exists(archive: Int, group: Int, file: Int): Boolean { |
||||
checkArchive(archive) |
||||
return archives[archive]?.exists(group, file) ?: false |
||||
} |
||||
|
||||
public fun existsNamed(archive: Int, groupNameHash: Int, fileNameHash: Int): Boolean { |
||||
checkArchive(archive) |
||||
return archives[archive]?.existsNamed(groupNameHash, fileNameHash) ?: false |
||||
} |
||||
|
||||
public fun exists(archive: Int, group: String, file: String): Boolean { |
||||
return existsNamed(archive, group.krHashCode(), file.krHashCode()) |
||||
} |
||||
|
||||
public fun list(): Iterator<Int> { |
||||
return archives.withIndex() |
||||
.filter { it.value != null } |
||||
.map { it.index } |
||||
.iterator() |
||||
} |
||||
|
||||
public fun list(archive: Int): Iterator<Js5Index.Group<*>> { |
||||
checkArchive(archive) |
||||
return archives[archive]?.list() ?: throw FileNotFoundException() |
||||
} |
||||
|
||||
public fun list(archive: Int, group: Int): Iterator<Js5Index.File> { |
||||
checkArchive(archive) |
||||
return archives[archive]?.list(group) ?: throw FileNotFoundException() |
||||
} |
||||
|
||||
public fun listNamed(archive: Int, groupNameHash: Int): Iterator<Js5Index.File> { |
||||
checkArchive(archive) |
||||
return archives[archive]?.listNamed(groupNameHash) ?: throw FileNotFoundException() |
||||
} |
||||
|
||||
public fun list(archive: Int, group: String): Iterator<Js5Index.File> { |
||||
return listNamed(archive, group.krHashCode()) |
||||
} |
||||
|
||||
public fun read(archive: Int, group: Int, file: Int, key: XteaKey = XteaKey.ZERO): ByteBuf { |
||||
checkArchive(archive) |
||||
return archives[archive]?.read(group, file, key) ?: throw FileNotFoundException() |
||||
} |
||||
|
||||
public fun readNamed(archive: Int, groupNameHash: Int, fileNameHash: Int, key: XteaKey = XteaKey.ZERO): ByteBuf { |
||||
checkArchive(archive) |
||||
return archives[archive]?.readNamed(groupNameHash, fileNameHash, key) ?: throw FileNotFoundException() |
||||
} |
||||
|
||||
public fun read(archive: Int, group: String, file: String, key: XteaKey = XteaKey.ZERO): ByteBuf { |
||||
return readNamed(archive, group.krHashCode(), file.krHashCode(), key) |
||||
} |
||||
|
||||
public fun write(archive: Int, group: Int, file: Int, buf: ByteBuf, key: XteaKey = XteaKey.ZERO) { |
||||
checkArchive(archive) |
||||
createOrGetArchive(archive).write(group, file, buf, key) |
||||
} |
||||
|
||||
public fun writeNamed( |
||||
archive: Int, |
||||
groupNameHash: Int, |
||||
fileNameHash: Int, |
||||
buf: ByteBuf, |
||||
key: XteaKey = XteaKey.ZERO |
||||
) { |
||||
checkArchive(archive) |
||||
createOrGetArchive(archive).writeNamed(groupNameHash, fileNameHash, buf, key) |
||||
} |
||||
|
||||
public fun write(archive: Int, group: String, file: String, buf: ByteBuf, key: XteaKey = XteaKey.ZERO) { |
||||
writeNamed(archive, group.krHashCode(), file.krHashCode(), buf, key) |
||||
} |
||||
|
||||
public fun remove(archive: Int) { |
||||
checkArchive(archive) |
||||
|
||||
if (archives[archive] == null) { |
||||
return |
||||
} |
||||
|
||||
archives[archive] = null |
||||
|
||||
unpackedCache.remove(archive) |
||||
|
||||
store.remove(archive) |
||||
store.remove(Js5Archive.ARCHIVESET, archive) |
||||
} |
||||
|
||||
public fun remove(archive: Int, group: Int) { |
||||
checkArchive(archive) |
||||
archives[archive]?.remove(group) |
||||
} |
||||
|
||||
public fun removeNamed(archive: Int, groupNameHash: Int) { |
||||
checkArchive(archive) |
||||
archives[archive]?.removeNamed(groupNameHash) |
||||
} |
||||
|
||||
public fun remove(archive: Int, group: String) { |
||||
return removeNamed(archive, group.krHashCode()) |
||||
} |
||||
|
||||
public fun remove(archive: Int, group: Int, file: Int, key: XteaKey = XteaKey.ZERO) { |
||||
checkArchive(archive) |
||||
archives[archive]?.remove(group, file, key) |
||||
} |
||||
|
||||
public fun removeNamed(archive: Int, groupNameHash: Int, fileNameHash: Int, key: XteaKey = XteaKey.ZERO) { |
||||
checkArchive(archive) |
||||
archives[archive]?.removeNamed(groupNameHash, fileNameHash, key) |
||||
} |
||||
|
||||
public fun remove(archive: Int, group: String, file: String, key: XteaKey = XteaKey.ZERO) { |
||||
return removeNamed(archive, group.krHashCode(), file.krHashCode(), key) |
||||
} |
||||
|
||||
/** |
||||
* Writes pending changes back to the underlying [Store]. |
||||
*/ |
||||
override fun flush() { |
||||
unpackedCache.flush() |
||||
|
||||
for (archive in archives) { |
||||
archive?.flush() |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Writes pending changes back to the underlying [Store] and clears the |
||||
* internal group cache. |
||||
*/ |
||||
public fun clear() { |
||||
unpackedCache.clear() |
||||
|
||||
for (archive in archives) { |
||||
archive?.flush() |
||||
} |
||||
} |
||||
|
||||
override fun close() { |
||||
clear() |
||||
store.close() |
||||
} |
||||
|
||||
public companion object { |
||||
public const val MAX_ARCHIVE: Int = 254 |
||||
|
||||
public fun open( |
||||
root: Path, |
||||
alloc: ByteBufAllocator = ByteBufAllocator.DEFAULT, |
||||
unpackedCacheSize: Int = UnpackedCache.DEFAULT_CAPACITY |
||||
): Cache { |
||||
return open(Store.open(root, alloc), alloc, unpackedCacheSize) |
||||
} |
||||
|
||||
public fun open( |
||||
store: Store, |
||||
alloc: ByteBufAllocator = ByteBufAllocator.DEFAULT, |
||||
unpackedCacheSize: Int = UnpackedCache.DEFAULT_CAPACITY |
||||
): Cache { |
||||
val cache = Cache(store, alloc, unpackedCacheSize) |
||||
cache.init() |
||||
return cache |
||||
} |
||||
|
||||
private fun checkArchive(archive: Int) { |
||||
require(archive in 0..MAX_ARCHIVE) |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,99 @@ |
||||
package org.openrs2.cache |
||||
|
||||
import io.netty.buffer.ByteBuf |
||||
import io.netty.buffer.ByteBufAllocator |
||||
import io.netty.buffer.ByteBufUtil |
||||
import org.openrs2.buffer.crc32 |
||||
import org.openrs2.crypto.whirlpool |
||||
|
||||
public class CacheArchive internal constructor( |
||||
alloc: ByteBufAllocator, |
||||
index: Js5Index, |
||||
archive: Int, |
||||
unpackedCache: UnpackedCache, |
||||
private val store: Store |
||||
) : Archive(alloc, index, archive, unpackedCache) { |
||||
override fun packedExists(group: Int): Boolean { |
||||
return store.exists(archive, group) |
||||
} |
||||
|
||||
override fun readPacked(group: Int): ByteBuf { |
||||
return store.read(archive, group) |
||||
} |
||||
|
||||
override fun writePacked(group: Int, buf: ByteBuf) { |
||||
store.write(archive, group, buf) |
||||
} |
||||
|
||||
override fun writePackedIndex(buf: ByteBuf) { |
||||
store.write(Js5Archive.ARCHIVESET, archive, buf) |
||||
} |
||||
|
||||
override fun removePacked(group: Int) { |
||||
store.remove(archive, group) |
||||
} |
||||
|
||||
override fun appendVersion(buf: ByteBuf, version: Int) { |
||||
buf.writeShort(version) |
||||
} |
||||
|
||||
override fun verifyCompressed(buf: ByteBuf, entry: Js5Index.MutableGroup) { |
||||
val version = VersionTrailer.strip(buf) |
||||
val truncatedVersion = entry.version and 0xFFFF |
||||
if (version != truncatedVersion) { |
||||
throw StoreCorruptException( |
||||
"Archive $archive group ${entry.id} is out of date " + |
||||
"(expected version $truncatedVersion, actual version $version)" |
||||
) |
||||
} |
||||
|
||||
val checksum = buf.crc32() |
||||
if (checksum != entry.checksum) { |
||||
throw StoreCorruptException( |
||||
"Archive $archive group ${entry.id} is corrupt " + |
||||
"(expected checksum ${entry.checksum}, actual checksum $checksum)" |
||||
) |
||||
} |
||||
|
||||
val length = buf.readableBytes() |
||||
if (index.hasLengths && length != entry.length) { |
||||
throw StoreCorruptException( |
||||
"Archive $archive group ${entry.id} is corrupt " + |
||||
"(expected length ${entry.length}, actual length $length)" |
||||
) |
||||
} |
||||
|
||||
if (index.hasDigests) { |
||||
val digest = buf.whirlpool() |
||||
if (!digest.contentEquals(entry.digest!!)) { |
||||
throw StoreCorruptException( |
||||
"Archive $archive group ${entry.id} is corrupt " + |
||||
"(expected digest ${ByteBufUtil.hexDump(entry.digest)}, " + |
||||
"actual digest ${ByteBufUtil.hexDump(digest)})" |
||||
) |
||||
} |
||||
} |
||||
} |
||||
|
||||
override fun verifyUncompressed(buf: ByteBuf, entry: Js5Index.MutableGroup) { |
||||
val length = buf.readableBytes() |
||||
if (index.hasLengths && length != entry.uncompressedLength) { |
||||
throw StoreCorruptException( |
||||
"Archive $archive group ${entry.id} is corrupt " + |
||||
"(expected uncompressed length ${entry.uncompressedLength}, " + |
||||
"actual length $length)" |
||||
) |
||||
} |
||||
|
||||
if (index.hasUncompressedChecksums) { |
||||
val uncompressedChecksum = buf.crc32() |
||||
if (uncompressedChecksum != entry.uncompressedChecksum) { |
||||
throw StoreCorruptException( |
||||
"Archive $archive group ${entry.id} is corrupt " + |
||||
"(expected uncompressed checksum ${entry.uncompressedChecksum}, " + |
||||
"actual uncompressed checksum $uncompressedChecksum)" |
||||
) |
||||
} |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,170 @@ |
||||
package org.openrs2.cache |
||||
|
||||
import io.netty.buffer.ByteBuf |
||||
import io.netty.buffer.ByteBufAllocator |
||||
import it.unimi.dsi.fastutil.ints.Int2ObjectAVLTreeMap |
||||
import it.unimi.dsi.fastutil.ints.Int2ObjectSortedMap |
||||
import org.openrs2.buffer.use |
||||
import java.io.Closeable |
||||
import java.io.DataInputStream |
||||
import java.io.FileNotFoundException |
||||
import java.io.IOException |
||||
import java.io.InputStream |
||||
import java.io.OutputStream |
||||
import java.nio.file.Files |
||||
import java.nio.file.Path |
||||
|
||||
/** |
||||
* A high-level interface for reading and writing files to and from a |
||||
* single JS5 archive encoded in `.js5` format. |
||||
*/ |
||||
public class Js5Pack private constructor( |
||||
alloc: ByteBufAllocator, |
||||
index: Js5Index, |
||||
unpackedCacheSize: Int, |
||||
private var packedIndex: ByteBuf, |
||||
private val packed: Int2ObjectSortedMap<ByteBuf>, |
||||
) : Archive(alloc, index, 0, UnpackedCache(unpackedCacheSize)), Closeable { |
||||
override fun packedExists(group: Int): Boolean { |
||||
return packed.containsKey(group) |
||||
} |
||||
|
||||
override fun readPacked(group: Int): ByteBuf { |
||||
return packed[group]?.retainedSlice() ?: throw FileNotFoundException() |
||||
} |
||||
|
||||
override fun writePacked(group: Int, buf: ByteBuf) { |
||||
packed.put(group, buf.retain().asReadOnly())?.release() |
||||
} |
||||
|
||||
override fun writePackedIndex(buf: ByteBuf) { |
||||
packedIndex.release() |
||||
packedIndex = buf.retain().asReadOnly() |
||||
} |
||||
|
||||
override fun removePacked(group: Int) { |
||||
packed.remove(group)?.release() |
||||
} |
||||
|
||||
override fun appendVersion(buf: ByteBuf, version: Int) { |
||||
// empty |
||||
} |
||||
|
||||
override fun verifyCompressed(buf: ByteBuf, entry: Js5Index.MutableGroup) { |
||||
// empty |
||||
} |
||||
|
||||
override fun verifyUncompressed(buf: ByteBuf, entry: Js5Index.MutableGroup) { |
||||
// empty |
||||
} |
||||
|
||||
public fun write(path: Path) { |
||||
Files.newOutputStream(path).use { output -> |
||||
write(output) |
||||
} |
||||
} |
||||
|
||||
public fun write(output: OutputStream) { |
||||
flush() |
||||
|
||||
packedIndex.getBytes(packedIndex.readerIndex(), output, packedIndex.readableBytes()) |
||||
|
||||
for (compressed in packed.values) { |
||||
compressed.getBytes(compressed.readerIndex(), output, compressed.readableBytes()) |
||||
} |
||||
} |
||||
|
||||
override fun flush() { |
||||
unpackedCache.flush() |
||||
super.flush() |
||||
} |
||||
|
||||
public fun clear() { |
||||
unpackedCache.clear() |
||||
super.flush() |
||||
} |
||||
|
||||
override fun close() { |
||||
clear() |
||||
packedIndex.release() |
||||
packed.values.forEach(ByteBuf::release) |
||||
} |
||||
|
||||
public companion object { |
||||
public fun create( |
||||
alloc: ByteBufAllocator = ByteBufAllocator.DEFAULT, |
||||
unpackedCacheSize: Int = UnpackedCache.DEFAULT_CAPACITY |
||||
): Js5Pack { |
||||
// TODO(gpe): protocol/flags should be configurable somehow |
||||
val index = Js5Index(Js5Protocol.VERSIONED) |
||||
|
||||
alloc.buffer().use { uncompressed -> |
||||
index.write(uncompressed) |
||||
|
||||
Js5Compression.compressBest(uncompressed).use { compressed -> |
||||
return Js5Pack(alloc, index, unpackedCacheSize, compressed.retain(), Int2ObjectAVLTreeMap()) |
||||
} |
||||
} |
||||
} |
||||
|
||||
public fun read( |
||||
path: Path, |
||||
alloc: ByteBufAllocator = ByteBufAllocator.DEFAULT, |
||||
unpackedCacheSize: Int = UnpackedCache.DEFAULT_CAPACITY |
||||
): Js5Pack { |
||||
return Files.newInputStream(path).use { input -> |
||||
read(input, alloc, unpackedCacheSize) |
||||
} |
||||
} |
||||
|
||||
public fun read( |
||||
input: InputStream, |
||||
alloc: ByteBufAllocator = ByteBufAllocator.DEFAULT, |
||||
unpackedCacheSize: Int = UnpackedCache.DEFAULT_CAPACITY |
||||
): Js5Pack { |
||||
val dataInput = DataInputStream(input) |
||||
|
||||
readCompressed(dataInput, alloc).use { compressed -> |
||||
val index = Js5Compression.uncompress(compressed.slice()).use { uncompressed -> |
||||
Js5Index.read(uncompressed) |
||||
} |
||||
|
||||
val packed = Int2ObjectAVLTreeMap<ByteBuf>() |
||||
try { |
||||
for (group in index) { |
||||
packed[group.id] = readCompressed(dataInput, alloc).asReadOnly() |
||||
} |
||||
|
||||
packed.values.forEach(ByteBuf::retain) |
||||
return Js5Pack(alloc, index, unpackedCacheSize, compressed.retain(), packed) |
||||
} finally { |
||||
packed.values.forEach(ByteBuf::release) |
||||
} |
||||
} |
||||
} |
||||
|
||||
private fun readCompressed(input: DataInputStream, alloc: ByteBufAllocator): ByteBuf { |
||||
val typeId = input.readUnsignedByte() |
||||
val type = Js5CompressionType.fromOrdinal(typeId) |
||||
?: throw IOException("Invalid compression type: $typeId") |
||||
|
||||
val len = input.readInt() |
||||
if (len < 0) { |
||||
throw IOException("Length is negative: $len") |
||||
} |
||||
|
||||
val lenWithUncompressedLen = if (type == Js5CompressionType.UNCOMPRESSED) { |
||||
len |
||||
} else { |
||||
len + 4 |
||||
} |
||||
|
||||
alloc.buffer(lenWithUncompressedLen + 5, lenWithUncompressedLen + 5).use { buf -> |
||||
buf.writeByte(typeId) |
||||
buf.writeInt(len) |
||||
buf.writeBytes(input, lenWithUncompressedLen) |
||||
return buf.retain() |
||||
} |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,75 @@ |
||||
package org.openrs2.cache |
||||
|
||||
import it.unimi.dsi.fastutil.longs.Long2ObjectLinkedOpenHashMap |
||||
import it.unimi.dsi.fastutil.longs.LongArrayList |
||||
|
||||
internal class UnpackedCache( |
||||
private val capacity: Int |
||||
) { |
||||
private val cache = Long2ObjectLinkedOpenHashMap<Archive.Unpacked>() |
||||
|
||||
init { |
||||
require(capacity >= 1) |
||||
} |
||||
|
||||
fun get(archive: Int, group: Int): Archive.Unpacked? { |
||||
return cache.getAndMoveToLast(key(archive, group)) |
||||
} |
||||
|
||||
fun put(archive: Int, group: Int, unpacked: Archive.Unpacked) { |
||||
while (cache.size >= capacity) { |
||||
val lru = cache.removeFirst() |
||||
lru.flush() |
||||
lru.release() |
||||
} |
||||
|
||||
cache.putAndMoveToLast(key(archive, group), unpacked)?.release() |
||||
} |
||||
|
||||
fun remove(archive: Int) { |
||||
val start = key(archive, 0) |
||||
val end = key(archive + 1, 0) |
||||
|
||||
val keys = LongArrayList() |
||||
|
||||
val it = cache.keys.iterator(start) |
||||
while (it.hasNext()) { |
||||
val key = it.nextLong() |
||||
if (key >= end) { |
||||
break |
||||
} |
||||
keys += key |
||||
} |
||||
|
||||
for (i in 0 until keys.size) { |
||||
cache.remove(keys.getLong(i)).release() |
||||
} |
||||
} |
||||
|
||||
fun remove(archive: Int, group: Int) { |
||||
cache.remove(key(archive, group))?.release() |
||||
} |
||||
|
||||
fun flush() { |
||||
for (unpacked in cache.values) { |
||||
unpacked.flush() |
||||
} |
||||
} |
||||
|
||||
fun clear() { |
||||
for (unpacked in cache.values) { |
||||
unpacked.flush() |
||||
unpacked.release() |
||||
} |
||||
|
||||
cache.clear() |
||||
} |
||||
|
||||
private fun key(archive: Int, group: Int): Long { |
||||
return (archive.toLong() shl 32) or group.toLong() |
||||
} |
||||
|
||||
companion object { |
||||
const val DEFAULT_CAPACITY: Int = 1024 |
||||
} |
||||
} |
Loading…
Reference in new issue