diff --git a/archive/build.gradle.kts b/archive/build.gradle.kts index a8ab782566..87157b59c5 100644 --- a/archive/build.gradle.kts +++ b/archive/build.gradle.kts @@ -12,6 +12,7 @@ dependencies { api(libs.bundles.guice) api(libs.clikt) + implementation(projects.asm) implementation(projects.buffer) implementation(projects.cache550) implementation(projects.cli) @@ -30,15 +31,18 @@ dependencies { implementation(libs.bundles.ktor) implementation(libs.bundles.thymeleaf) implementation(libs.byteUnits) + implementation(libs.cabParser) implementation(libs.flyway) implementation(libs.guava) implementation(libs.hikaricp) implementation(libs.jackson.jsr310) implementation(libs.jdom) + implementation(libs.jelf) implementation(libs.jquery) implementation(libs.jsoup) implementation(libs.kotlin.coroutines.core) implementation(libs.netty.handler) + implementation(libs.pecoff4j) implementation(libs.postgres) } diff --git a/archive/src/main/kotlin/org/openrs2/archive/ArchiveCommand.kt b/archive/src/main/kotlin/org/openrs2/archive/ArchiveCommand.kt index 48f9d3c9d7..7a5fbfd630 100644 --- a/archive/src/main/kotlin/org/openrs2/archive/ArchiveCommand.kt +++ b/archive/src/main/kotlin/org/openrs2/archive/ArchiveCommand.kt @@ -3,6 +3,7 @@ package org.openrs2.archive import com.github.ajalt.clikt.core.NoOpCliktCommand import com.github.ajalt.clikt.core.subcommands import org.openrs2.archive.cache.CacheCommand +import org.openrs2.archive.client.ClientCommand import org.openrs2.archive.key.KeyCommand import org.openrs2.archive.name.NameCommand import org.openrs2.archive.web.WebCommand @@ -13,6 +14,7 @@ public class ArchiveCommand : NoOpCliktCommand(name = "archive") { init { subcommands( CacheCommand(), + ClientCommand(), KeyCommand(), NameCommand(), WebCommand() diff --git a/archive/src/main/kotlin/org/openrs2/archive/ArchiveModule.kt b/archive/src/main/kotlin/org/openrs2/archive/ArchiveModule.kt index 00058882d9..fb4f15fce8 100644 --- a/archive/src/main/kotlin/org/openrs2/archive/ArchiveModule.kt +++ b/archive/src/main/kotlin/org/openrs2/archive/ArchiveModule.kt @@ -10,6 +10,7 @@ import org.openrs2.archive.key.KeyDownloader import org.openrs2.archive.key.RuneLiteKeyDownloader import org.openrs2.archive.name.NameDownloader import org.openrs2.archive.name.RuneStarNameDownloader +import org.openrs2.asm.AsmModule import org.openrs2.buffer.BufferModule import org.openrs2.cache.CacheModule import org.openrs2.db.Database @@ -21,6 +22,7 @@ import javax.sql.DataSource public object ArchiveModule : AbstractModule() { override fun configure() { + install(AsmModule) install(BufferModule) install(CacheModule) install(HttpModule) diff --git a/archive/src/main/kotlin/org/openrs2/archive/cache/CacheImporter.kt b/archive/src/main/kotlin/org/openrs2/archive/cache/CacheImporter.kt index eaea710ad3..83a402ddc0 100644 --- a/archive/src/main/kotlin/org/openrs2/archive/cache/CacheImporter.kt +++ b/archive/src/main/kotlin/org/openrs2/archive/cache/CacheImporter.kt @@ -24,6 +24,8 @@ import org.openrs2.cache.StoreCorruptException import org.openrs2.cache.VersionList import org.openrs2.cache.VersionTrailer import org.openrs2.crypto.Whirlpool +import org.openrs2.crypto.sha1 +import org.openrs2.crypto.whirlpool import org.openrs2.db.Database import org.postgresql.util.PSQLState import java.io.IOException @@ -84,7 +86,8 @@ public class CacheImporter @Inject constructor( ) : DefaultByteBufHolder(buf) { public val bytes: ByteArray = ByteBufUtil.getBytes(buf, buf.readerIndex(), buf.readableBytes(), false) public val crc32: Int = buf.crc32() - public val whirlpool: ByteArray = Whirlpool.whirlpool(bytes) + public val sha1: ByteArray = buf.sha1() + public val whirlpool: ByteArray = buf.whirlpool() } public class ChecksumTableBlob( @@ -854,6 +857,7 @@ public class CacheImporter @Inject constructor( CREATE TEMPORARY TABLE tmp_blobs ( index INTEGER NOT NULL, crc32 INTEGER NOT NULL, + sha1 BYTEA NOT NULL, whirlpool BYTEA NOT NULL, data BYTEA NOT NULL ) ON COMMIT DROP @@ -992,11 +996,11 @@ public class CacheImporter @Inject constructor( return ids as List } - private fun addBlob(connection: Connection, blob: Blob): Long { + public fun addBlob(connection: Connection, blob: Blob): Long { return addBlobs(connection, listOf(blob)).single() } - private fun addBlobs(connection: Connection, blobs: List): List { + public fun addBlobs(connection: Connection, blobs: List): List { connection.prepareStatement( """ TRUNCATE TABLE tmp_blobs @@ -1007,15 +1011,16 @@ public class CacheImporter @Inject constructor( connection.prepareStatement( """ - INSERT INTO tmp_blobs (index, crc32, whirlpool, data) - VALUES (?, ?, ?, ?) + INSERT INTO tmp_blobs (index, crc32, sha1, whirlpool, data) + VALUES (?, ?, ?, ?, ?) """.trimIndent() ).use { stmt -> for ((i, blob) in blobs.withIndex()) { stmt.setInt(1, i) stmt.setInt(2, blob.crc32) - stmt.setBytes(3, blob.whirlpool) - stmt.setBytes(4, blob.bytes) + stmt.setBytes(3, blob.sha1) + stmt.setBytes(4, blob.whirlpool) + stmt.setBytes(5, blob.bytes) stmt.addBatch() } @@ -1025,8 +1030,8 @@ public class CacheImporter @Inject constructor( connection.prepareStatement( """ - INSERT INTO blobs (crc32, whirlpool, data) - SELECT t.crc32, t.whirlpool, t.data + INSERT INTO blobs (crc32, sha1, whirlpool, data) + SELECT t.crc32, t.sha1, t.whirlpool, t.data FROM tmp_blobs t LEFT JOIN blobs b ON b.whirlpool = t.whirlpool WHERE b.whirlpool IS NULL diff --git a/archive/src/main/kotlin/org/openrs2/archive/client/Architecture.kt b/archive/src/main/kotlin/org/openrs2/archive/client/Architecture.kt new file mode 100644 index 0000000000..fb4b4021e0 --- /dev/null +++ b/archive/src/main/kotlin/org/openrs2/archive/client/Architecture.kt @@ -0,0 +1,11 @@ +package org.openrs2.archive.client + +public enum class Architecture { + INDEPENDENT, + UNIVERSAL, + X86, + AMD64, + POWERPC, + SPARC, + SPARCV9 +} diff --git a/archive/src/main/kotlin/org/openrs2/archive/client/Artifact.kt b/archive/src/main/kotlin/org/openrs2/archive/client/Artifact.kt new file mode 100644 index 0000000000..034167fc70 --- /dev/null +++ b/archive/src/main/kotlin/org/openrs2/archive/client/Artifact.kt @@ -0,0 +1,35 @@ +package org.openrs2.archive.client + +import io.netty.buffer.ByteBuf +import io.netty.buffer.ByteBufUtil +import org.openrs2.archive.cache.CacheExporter +import org.openrs2.archive.cache.CacheImporter +import java.time.Instant + +public class Artifact( + data: ByteBuf, + public val game: String, + public val environment: String, + public val build: CacheExporter.Build?, + public val timestamp: Instant?, + public val type: ArtifactType, + public val format: ArtifactFormat, + public val os: OperatingSystem, + public val arch: Architecture, + public val jvm: Jvm, + public val links: List +) : CacheImporter.Blob(data) + +public data class ArtifactLink( + val type: ArtifactType, + val format: ArtifactFormat, + val os: OperatingSystem, + val arch: Architecture, + val jvm: Jvm, + val crc32: Int?, + val sha1: ByteArray, + val size: Int? +) { + public val sha1Hex: String + get() = ByteBufUtil.hexDump(sha1) +} diff --git a/archive/src/main/kotlin/org/openrs2/archive/client/ArtifactFormat.kt b/archive/src/main/kotlin/org/openrs2/archive/client/ArtifactFormat.kt new file mode 100644 index 0000000000..d97e1e9838 --- /dev/null +++ b/archive/src/main/kotlin/org/openrs2/archive/client/ArtifactFormat.kt @@ -0,0 +1,46 @@ +package org.openrs2.archive.client + +import io.ktor.http.ContentType + +public enum class ArtifactFormat { + CAB, + JAR, + NATIVE, + PACK200, + PACKCLASS; + + public fun getPrefix(os: OperatingSystem): String { + return when (this) { + NATIVE -> os.getPrefix() + else -> "" + } + } + + public fun getExtension(os: OperatingSystem): String { + return when (this) { + CAB -> "cab" + JAR -> "jar" + NATIVE -> os.getExtension() + PACK200 -> "pack200" + PACKCLASS -> "js5" + } + } + + public fun getContentType(os: OperatingSystem): ContentType { + return when (this) { + CAB -> CAB_MIME_TYPE + JAR -> JAR_MIME_TYPE + NATIVE -> os.getContentType() + PACK200, PACKCLASS -> ContentType.Application.OctetStream + } + } + + public fun isJar(): Boolean { + return this != NATIVE + } + + private companion object { + private val CAB_MIME_TYPE = ContentType("application", "vnd.ms-cab-compressed") + private val JAR_MIME_TYPE = ContentType("application", "java-archive") + } +} diff --git a/archive/src/main/kotlin/org/openrs2/archive/client/ArtifactType.kt b/archive/src/main/kotlin/org/openrs2/archive/client/ArtifactType.kt new file mode 100644 index 0000000000..21180ee3ac --- /dev/null +++ b/archive/src/main/kotlin/org/openrs2/archive/client/ArtifactType.kt @@ -0,0 +1,16 @@ +package org.openrs2.archive.client + +public enum class ArtifactType { + BROWSERCONTROL, + CLIENT, + CLIENT_GL, + GLUEGEN_RT, + JAGGL, + JAGGL_DRI, + JAGMISC, + JOGL, + JOGL_AWT, + LOADER, + LOADER_GL, + UNPACKCLASS +} diff --git a/archive/src/main/kotlin/org/openrs2/archive/client/ClientCommand.kt b/archive/src/main/kotlin/org/openrs2/archive/client/ClientCommand.kt new file mode 100644 index 0000000000..a83b2ff7f8 --- /dev/null +++ b/archive/src/main/kotlin/org/openrs2/archive/client/ClientCommand.kt @@ -0,0 +1,14 @@ +package org.openrs2.archive.client + +import com.github.ajalt.clikt.core.NoOpCliktCommand +import com.github.ajalt.clikt.core.subcommands + +public class ClientCommand : NoOpCliktCommand(name = "client") { + init { + subcommands( + ExportCommand(), + ImportCommand(), + RefreshCommand() + ) + } +} diff --git a/archive/src/main/kotlin/org/openrs2/archive/client/ClientExporter.kt b/archive/src/main/kotlin/org/openrs2/archive/client/ClientExporter.kt new file mode 100644 index 0000000000..35baf911a5 --- /dev/null +++ b/archive/src/main/kotlin/org/openrs2/archive/client/ClientExporter.kt @@ -0,0 +1,427 @@ +package org.openrs2.archive.client + +import io.netty.buffer.ByteBuf +import io.netty.buffer.ByteBufUtil +import io.netty.buffer.DefaultByteBufHolder +import io.netty.buffer.Unpooled +import jakarta.inject.Inject +import jakarta.inject.Singleton +import org.openrs2.archive.cache.CacheExporter +import org.openrs2.db.Database +import java.time.Instant +import java.time.ZoneOffset +import java.time.format.DateTimeFormatter + +@Singleton +public class ClientExporter @Inject constructor( + private val database: Database +) { + public data class ArtifactSummary( + public val id: Long, + public val game: String, + public val environment: String, + public val build: CacheExporter.Build?, + public val timestamp: Instant?, + public val type: ArtifactType, + public val format: ArtifactFormat, + public val os: OperatingSystem, + public val arch: Architecture, + public val jvm: Jvm, + public val size: Int + ) { + public val name: String + get() { + val builder = StringBuilder() + builder.append(format.getPrefix(os)) + + when (type) { + ArtifactType.CLIENT -> builder.append(game) + ArtifactType.CLIENT_GL -> builder.append("${game}_gl") + ArtifactType.GLUEGEN_RT -> builder.append("gluegen-rt") + else -> builder.append(type.name.lowercase()) + } + + if (jvm == Jvm.MICROSOFT) { + builder.append("ms") + } + + if (os != OperatingSystem.INDEPENDENT) { + builder.append('-') + builder.append(os.name.lowercase()) + } + + if (arch != Architecture.INDEPENDENT) { + builder.append('-') + builder.append(arch.name.lowercase()) + } + + if (build != null) { + builder.append("-b") + builder.append(build) + } + + if (timestamp != null) { + builder.append('-') + builder.append( + timestamp + .atOffset(ZoneOffset.UTC) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd-HH-mm-ss")) + ) + } + + builder.append("-openrs2#") + builder.append(id) + + builder.append('.') + builder.append(format.getExtension(os)) + + return builder.toString() + } + } + + public data class ArtifactLinkExport( + public val id: Long?, + public val build: CacheExporter.Build?, + public val timestamp: Instant?, + public val link: ArtifactLink + ) + + public class Artifact( + public val summary: ArtifactSummary, + public val crc32: Int, + public val sha1: ByteArray, + public val links: List + ) { + public val sha1Hex: String + get() = ByteBufUtil.hexDump(sha1) + } + + public class ArtifactExport( + public val summary: ArtifactSummary, + buf: ByteBuf + ) : DefaultByteBufHolder(buf) + + public suspend fun list(): List { + return database.execute { connection -> + connection.prepareStatement( + """ + SELECT + a.blob_id, + g.name, + e.name, + a.build_major, + a.build_minor, + a.timestamp, + a.type, + a.format, + a.os, + a.arch, + a.jvm, + length(b.data) AS size + FROM artifacts a + JOIN blobs b ON b.id = a.blob_id + JOIN games g ON g.id = a.game_id + JOIN environments e ON e.id = a.environment_id + ORDER BY a.build_major ASC, a.timestamp ASC, a.type ASC, a.format ASC, a.os ASC, a.arch ASC, a.jvm ASC + """.trimIndent() + ).use { stmt -> + stmt.executeQuery().use { rows -> + val artifacts = mutableListOf() + + while (rows.next()) { + val id = rows.getLong(1) + val game = rows.getString(2) + val environment = rows.getString(3) + + var buildMajor: Int? = rows.getInt(4) + if (rows.wasNull()) { + buildMajor = null + } + + var buildMinor: Int? = rows.getInt(5) + if (rows.wasNull()) { + buildMinor = null + } + + val build = if (buildMajor != null) { + CacheExporter.Build(buildMajor, buildMinor) + } else { + null + } + + val timestamp = rows.getTimestamp(6)?.toInstant() + val type = ArtifactType.valueOf(rows.getString(7).uppercase()) + val format = ArtifactFormat.valueOf(rows.getString(8).uppercase()) + val os = OperatingSystem.valueOf(rows.getString(9).uppercase()) + val arch = Architecture.valueOf(rows.getString(10).uppercase()) + val jvm = Jvm.valueOf(rows.getString(11).uppercase()) + val size = rows.getInt(12) + + artifacts += ArtifactSummary( + id, + game, + environment, + build, + timestamp, + type, + format, + os, + arch, + jvm, + size + ) + } + + return@execute artifacts + } + } + } + } + + public suspend fun get(id: Long): Artifact? { + return database.execute { connection -> + val links = mutableListOf() + + connection.prepareStatement( + """ + SELECT + a.blob_id, + a.build_major, + a.build_minor, + a.timestamp, + l.type, + l.format, + l.os, + l.arch, + l.jvm, + COALESCE(l.crc32, b.crc32), + l.sha1, + COALESCE(l.size, length(b.data)) + FROM artifact_links l + LEFT JOIN blobs b ON b.sha1 = l.sha1 + LEFT JOIN artifacts a ON a.blob_id = b.id + WHERE l.blob_id = ? + ORDER BY l.type, l.format, l.os, l.arch, l.jvm + """.trimIndent() + ).use { stmt -> + stmt.setLong(1, id) + + stmt.executeQuery().use { rows -> + while (rows.next()) { + var linkId: Long? = rows.getLong(1) + if (rows.wasNull()) { + linkId = null + } + + var buildMajor: Int? = rows.getInt(2) + if (rows.wasNull()) { + buildMajor = null + } + + var buildMinor: Int? = rows.getInt(3) + if (rows.wasNull()) { + buildMinor = null + } + + val build = if (buildMajor != null) { + CacheExporter.Build(buildMajor, buildMinor) + } else { + null + } + + val timestamp = rows.getTimestamp(4)?.toInstant() + val type = ArtifactType.valueOf(rows.getString(5).uppercase()) + val format = ArtifactFormat.valueOf(rows.getString(6).uppercase()) + val os = OperatingSystem.valueOf(rows.getString(7).uppercase()) + val arch = Architecture.valueOf(rows.getString(8).uppercase()) + val jvm = Jvm.valueOf(rows.getString(9).uppercase()) + + var crc32: Int? = rows.getInt(10) + if (rows.wasNull()) { + crc32 = null + } + + val sha1 = rows.getBytes(11) + + var size: Int? = rows.getInt(12) + if (rows.wasNull()) { + size = null + } + + links += ArtifactLinkExport( + linkId, + build, + timestamp, + ArtifactLink( + type, + format, + os, + arch, + jvm, + crc32, + sha1, + size + ) + ) + } + } + } + + connection.prepareStatement( + """ + SELECT + g.name, + e.name, + a.build_major, + a.build_minor, + a.timestamp, + a.type, + a.format, + a.os, + a.arch, + a.jvm, + length(b.data) AS size, + b.crc32, + b.sha1 + FROM artifacts a + JOIN games g ON g.id = a.game_id + JOIN environments e ON e.id = a.environment_id + JOIN blobs b ON b.id = a.blob_id + WHERE a.blob_id = ? + """.trimIndent() + ).use { stmt -> + stmt.setLong(1, id) + + stmt.executeQuery().use { rows -> + if (!rows.next()) { + return@execute null + } + + val game = rows.getString(1) + val environment = rows.getString(2) + + var buildMajor: Int? = rows.getInt(3) + if (rows.wasNull()) { + buildMajor = null + } + + var buildMinor: Int? = rows.getInt(4) + if (rows.wasNull()) { + buildMinor = null + } + + val build = if (buildMajor != null) { + CacheExporter.Build(buildMajor!!, buildMinor) + } else { + null + } + + val timestamp = rows.getTimestamp(5)?.toInstant() + val type = ArtifactType.valueOf(rows.getString(6).uppercase()) + val format = ArtifactFormat.valueOf(rows.getString(7).uppercase()) + val os = OperatingSystem.valueOf(rows.getString(8).uppercase()) + val arch = Architecture.valueOf(rows.getString(9).uppercase()) + val jvm = Jvm.valueOf(rows.getString(10).uppercase()) + val size = rows.getInt(11) + val crc32 = rows.getInt(12) + val sha1 = rows.getBytes(13) + + return@execute Artifact( + ArtifactSummary( + id, + game, + environment, + build, + timestamp, + type, + format, + os, + arch, + jvm, + size + ), crc32, sha1, links + ) + } + } + } + } + + public suspend fun export(id: Long): ArtifactExport? { + return database.execute { connection -> + connection.prepareStatement( + """ + SELECT + g.name, + e.name, + a.build_major, + a.build_minor, + a.timestamp, + a.type, + a.format, + a.os, + a.arch, + a.jvm, + b.data + FROM artifacts a + JOIN games g ON g.id = a.game_id + JOIN environments e ON e.id = a.environment_id + JOIN blobs b ON b.id = a.blob_id + WHERE a.blob_id = ? + """.trimIndent() + ).use { stmt -> + stmt.setLong(1, id) + + stmt.executeQuery().use { rows -> + if (!rows.next()) { + return@execute null + } + + val game = rows.getString(1) + val environment = rows.getString(2) + + var buildMajor: Int? = rows.getInt(3) + if (rows.wasNull()) { + buildMajor = null + } + + var buildMinor: Int? = rows.getInt(4) + if (rows.wasNull()) { + buildMinor = null + } + + val build = if (buildMajor != null) { + CacheExporter.Build(buildMajor, buildMinor) + } else { + null + } + + val timestamp = rows.getTimestamp(5)?.toInstant() + val type = ArtifactType.valueOf(rows.getString(6).uppercase()) + val format = ArtifactFormat.valueOf(rows.getString(7).uppercase()) + val os = OperatingSystem.valueOf(rows.getString(8).uppercase()) + val arch = Architecture.valueOf(rows.getString(9).uppercase()) + val jvm = Jvm.valueOf(rows.getString(10).uppercase()) + + val buf = Unpooled.wrappedBuffer(rows.getBytes(11)) + val size = buf.readableBytes() + + return@execute ArtifactExport( + ArtifactSummary( + id, + game, + environment, + build, + timestamp, + type, + format, + os, + arch, + jvm, + size + ), buf + ) + } + } + } + } +} diff --git a/archive/src/main/kotlin/org/openrs2/archive/client/ClientImporter.kt b/archive/src/main/kotlin/org/openrs2/archive/client/ClientImporter.kt new file mode 100644 index 0000000000..5d151e015d --- /dev/null +++ b/archive/src/main/kotlin/org/openrs2/archive/client/ClientImporter.kt @@ -0,0 +1,740 @@ +package org.openrs2.archive.client + +import com.github.michaelbull.logging.InlineLogger +import com.kichik.pecoff4j.PE +import com.kichik.pecoff4j.constant.MachineType +import com.kichik.pecoff4j.io.PEParser +import dorkbox.cabParser.CabParser +import dorkbox.cabParser.CabStreamSaver +import dorkbox.cabParser.structure.CabFileEntry +import io.netty.buffer.ByteBuf +import io.netty.buffer.ByteBufAllocator +import io.netty.buffer.ByteBufInputStream +import io.netty.buffer.ByteBufOutputStream +import io.netty.buffer.Unpooled +import io.netty.util.ByteProcessor +import jakarta.inject.Inject +import jakarta.inject.Singleton +import net.fornwall.jelf.ElfFile +import net.fornwall.jelf.ElfSymbol +import org.objectweb.asm.tree.ClassNode +import org.objectweb.asm.tree.LdcInsnNode +import org.objectweb.asm.tree.MethodInsnNode +import org.objectweb.asm.tree.TypeInsnNode +import org.openrs2.archive.cache.CacheExporter +import org.openrs2.archive.cache.CacheImporter +import org.openrs2.asm.InsnMatcher +import org.openrs2.asm.classpath.Library +import org.openrs2.asm.hasCode +import org.openrs2.asm.intConstant +import org.openrs2.asm.io.CabLibraryReader +import org.openrs2.asm.io.JarLibraryReader +import org.openrs2.asm.io.LibraryReader +import org.openrs2.asm.io.Pack200LibraryReader +import org.openrs2.asm.io.PackClassLibraryReader +import org.openrs2.buffer.use +import org.openrs2.compress.gzip.Gzip +import org.openrs2.db.Database +import org.openrs2.util.io.entries +import java.io.ByteArrayInputStream +import java.io.ByteArrayOutputStream +import java.io.InputStream +import java.io.OutputStream +import java.nio.file.Files +import java.nio.file.Path +import java.sql.Connection +import java.sql.Types +import java.time.Instant +import java.time.LocalDate +import java.time.Month +import java.time.ZoneOffset +import java.util.jar.JarInputStream +import java.util.jar.JarOutputStream +import java.util.jar.Pack200 + +@Singleton +public class ClientImporter @Inject constructor( + private val database: Database, + private val alloc: ByteBufAllocator, + private val packClassLibraryReader: PackClassLibraryReader, + private val importer: CacheImporter +) { + public suspend fun import(paths: Iterable) { + alloc.buffer().use { buf -> + for (path in paths) { + buf.clear() + + Files.newInputStream(path).use { input -> + ByteBufOutputStream(buf).use { output -> + input.copyTo(output) + } + } + + logger.info { "Importing $path" } + import(parse(buf)) + } + } + } + + public suspend fun import(artifact: Artifact) { + database.execute { connection -> + importer.prepare(connection) + import(connection, artifact) + } + } + + private fun import(connection: Connection, artifact: Artifact) { + val id = importer.addBlob(connection, artifact) + + val gameId = connection.prepareStatement( + """ + SELECT id + FROM games + WHERE name = ? + """.trimIndent() + ).use { stmt -> + stmt.setString(1, artifact.game) + + stmt.executeQuery().use { rows -> + if (!rows.next()) { + throw IllegalArgumentException() + } + + rows.getInt(1) + } + } + + val environmentId = connection.prepareStatement( + """ + SELECT id + FROM environments + WHERE name = ? + """.trimIndent() + ).use { stmt -> + stmt.setString(1, artifact.environment) + + stmt.executeQuery().use { rows -> + if (!rows.next()) { + throw IllegalArgumentException() + } + + rows.getInt(1) + } + } + + connection.prepareStatement( + """ + INSERT INTO artifacts (blob_id, game_id, environment_id, build_major, build_minor, timestamp, type, format, os, arch, jvm) + VALUES (?, ?, ?, ?, ?, ?, ?::artifact_type, ?::artifact_format, ?::os, ?::arch, ?::jvm) + ON CONFLICT (blob_id) DO UPDATE SET + game_id = EXCLUDED.game_id, + environment_id = EXCLUDED.environment_id, + build_major = EXCLUDED.build_major, + build_minor = EXCLUDED.build_minor, + timestamp = EXCLUDED.timestamp, + type = EXCLUDED.type, + format = EXCLUDED.format, + os = EXCLUDED.os, + arch = EXCLUDED.arch, + jvm = EXCLUDED.jvm + """.trimIndent() + ).use { stmt -> + stmt.setLong(1, id) + stmt.setInt(2, gameId) + stmt.setInt(3, environmentId) + stmt.setObject(4, artifact.build?.major, Types.INTEGER) + stmt.setObject(5, artifact.build?.minor, Types.INTEGER) + stmt.setObject(6, artifact.timestamp?.atOffset(ZoneOffset.UTC), Types.TIMESTAMP_WITH_TIMEZONE) + stmt.setString(7, artifact.type.name.lowercase()) + stmt.setString(8, artifact.format.name.lowercase()) + stmt.setString(9, artifact.os.name.lowercase()) + stmt.setString(10, artifact.arch.name.lowercase()) + stmt.setString(11, artifact.jvm.name.lowercase()) + + stmt.execute() + } + + connection.prepareStatement( + """ + DELETE FROM artifact_links + WHERE blob_id = ? + """.trimIndent() + ).use { stmt -> + stmt.setLong(1, id) + stmt.execute() + } + + connection.prepareStatement( + """ + INSERT INTO artifact_links (blob_id, type, format, os, arch, jvm, sha1, crc32, size) + VALUES (?, ?::artifact_type, ?::artifact_format, ?::os, ?::arch, ?::jvm, ?, ?, ?) + """.trimIndent() + ).use { stmt -> + for (link in artifact.links) { + stmt.setLong(1, id) + stmt.setString(2, link.type.name.lowercase()) + stmt.setString(3, link.format.name.lowercase()) + stmt.setString(4, link.os.name.lowercase()) + stmt.setString(5, link.arch.name.lowercase()) + stmt.setString(6, link.jvm.name.lowercase()) + stmt.setBytes(7, link.sha1) + stmt.setObject(8, link.crc32, Types.INTEGER) + stmt.setObject(9, link.size, Types.INTEGER) + + stmt.addBatch() + } + + stmt.executeBatch() + } + } + + public suspend fun refresh() { + database.execute { connection -> + importer.prepare(connection) + + var lastId: Long? = null + val blobs = mutableListOf() + + while (true) { + blobs.clear() + + connection.prepareStatement( + """ + SELECT a.blob_id, b.data + FROM artifacts a + JOIN blobs b ON b.id = a.blob_id + WHERE ? IS NULL OR a.blob_id > ? + ORDER BY a.blob_id ASC + LIMIT 1024 + """.trimIndent() + ).use { stmt -> + stmt.setObject(1, lastId, Types.BIGINT) + stmt.setObject(2, lastId, Types.BIGINT) + + stmt.executeQuery().use { rows -> + while (rows.next()) { + lastId = rows.getLong(1) + blobs += rows.getBytes(2) + } + } + } + + if (blobs.isEmpty()) { + return@execute + } + + for (blob in blobs) { + Unpooled.wrappedBuffer(blob).use { buf -> + import(connection, parse(buf)) + } + } + } + } + } + + private fun parse(buf: ByteBuf): Artifact { + return if (buf.hasPrefix(JAR)) { + parseJar(buf) + } else if (buf.hasPrefix(PACK200)) { + parsePack200(buf) + } else if (buf.hasPrefix(CAB)) { + parseCab(buf) + } else if ( + buf.hasPrefix(PACKCLASS_UNCOMPRESSED) || + buf.hasPrefix(PACKCLASS_BZIP2) || + buf.hasPrefix(PACKCLASS_GZIP) + ) { + parseLibrary(buf, packClassLibraryReader, ArtifactFormat.PACKCLASS) + } else if (buf.hasPrefix(ELF)) { + parseElf(buf) + } else if (buf.hasPrefix(PE)) { + parsePe(buf) + } else { + throw IllegalArgumentException() + } + } + + private fun parseElf(buf: ByteBuf): Artifact { + val elf = ElfFile.from(ByteBufInputStream(buf.slice())) + + val arch = when (elf.e_machine.toInt()) { + ElfFile.ARCH_i386 -> Architecture.X86 + ElfFile.ARCH_X86_64 -> Architecture.AMD64 + ElfFile.ARCH_SPARC -> Architecture.SPARC + ARCH_SPARCV9 -> Architecture.SPARCV9 + else -> throw IllegalArgumentException() + } + + val comment = String(elf.firstSectionByName(".comment").data) + val os = if (comment.contains(SOLARIS_COMMENT)) { + OperatingSystem.SOLARIS + } else { + OperatingSystem.LINUX + } + + val symbols = elf.dynamicSymbolTableSection ?: throw IllegalArgumentException() + val type = getArtifactType(symbols.symbols.asSequence().mapNotNull(ElfSymbol::getName)) + + return Artifact( + buf.retain(), + "shared", + "live", + null, + null, + type, + ArtifactFormat.NATIVE, + os, + arch, + Jvm.SUN, + emptyList() + ) + } + + private fun getArtifactType(symbols: Sequence): ArtifactType { + for (symbol in symbols) { + var name = symbol + if (name.startsWith('_')) { + name = name.substring(1) + } + if (name.startsWith("Java_")) { // RNI methods don't have a Java_ prefix + name = name.substring("Java_".length) + } + + if (name.startsWith("jaggl_X11_dri_")) { + return ArtifactType.JAGGL_DRI + } else if (name.startsWith("jaggl_opengl_")) { + return ArtifactType.JAGGL + } else if (name.startsWith("com_sun_opengl_impl_GLImpl_")) { + return ArtifactType.JOGL + } else if (name.startsWith("com_sun_opengl_impl_JAWT_")) { + return ArtifactType.JOGL_AWT + } else if (name.startsWith("com_sun_gluegen_runtime_")) { + return ArtifactType.GLUEGEN_RT + } else if (name.startsWith("jagex3_jagmisc_jagmisc_")) { + return ArtifactType.JAGMISC + } else if (name.startsWith("nativeadvert_browsercontrol_")) { + return ArtifactType.BROWSERCONTROL + } + } + + throw IllegalArgumentException() + } + + private fun parsePe(buf: ByteBuf): Artifact { + val pe = PEParser.parse(ByteBufInputStream(buf.slice())) + + val arch = when (pe.coffHeader.machine) { + MachineType.IMAGE_FILE_MACHINE_I386 -> Architecture.X86 + MachineType.IMAGE_FILE_MACHINE_AMD64 -> Architecture.AMD64 + else -> throw IllegalArgumentException() + } + + val symbols = parsePeExportNames(buf, pe).toSet() + + val type = getArtifactType(symbols.asSequence()) + val jvm = if (symbols.contains("RNIGetCompatibleVersion")) { + Jvm.MICROSOFT + } else { + Jvm.SUN + } + + return Artifact( + buf.retain(), + "shared", + "live", + null, + Instant.ofEpochSecond(pe.coffHeader.timeDateStamp.toLong()), + type, + ArtifactFormat.NATIVE, + OperatingSystem.WINDOWS, + arch, + jvm, + emptyList() + ) + } + + private fun parsePeExportNames(buf: ByteBuf, pe: PE): Sequence { + return sequence { + val exportTable = pe.imageData.exportTable + val namePointerTable = + pe.sectionTable.rvaConverter.convertVirtualAddressToRawDataPointer(exportTable.namePointerRVA.toInt()) + + for (i in 0 until exportTable.numberOfNamePointers.toInt()) { + val namePointer = buf.readerIndex() + buf.getIntLE(buf.readerIndex() + namePointerTable + 4 * i) + + val end = buf.forEachByte(namePointer, buf.writerIndex() - namePointer, ByteProcessor.FIND_NUL) + require(end != -1) { + "Unterminated string" + } + + yield(buf.toString(namePointer, end - namePointer, Charsets.US_ASCII)) + } + } + } + + private fun parseJar(buf: ByteBuf): Artifact { + val timestamp = getJarTimestamp(ByteBufInputStream(buf.slice())) + return parseLibrary(buf, JarLibraryReader, ArtifactFormat.JAR, timestamp) + } + + private fun parsePack200(buf: ByteBuf): Artifact { + val timestamp = ByteArrayOutputStream().use { tempOutput -> + Gzip.createHeaderlessInputStream(ByteBufInputStream(buf.slice())).use { gzipInput -> + JarOutputStream(tempOutput).use { jarOutput -> + Pack200.newUnpacker().unpack(gzipInput, jarOutput) + } + } + + getJarTimestamp(ByteArrayInputStream(tempOutput.toByteArray())) + } + + return parseLibrary(buf, Pack200LibraryReader, ArtifactFormat.PACK200, timestamp) + } + + private fun parseCab(buf: ByteBuf): Artifact { + val timestamp = getCabTimestamp(ByteBufInputStream(buf.slice())) + return parseLibrary(buf, CabLibraryReader, ArtifactFormat.CAB, timestamp) + } + + private fun getJarTimestamp(input: InputStream): Instant? { + var timestamp: Instant? = null + + JarInputStream(input).use { jar -> + for (entry in jar.entries) { + val t = entry.lastModifiedTime?.toInstant() + if (timestamp == null || (t != null && t < timestamp)) { + timestamp = t + } + } + } + + return timestamp + } + + private fun getCabTimestamp(input: InputStream): Instant? { + var timestamp: Instant? = null + + CabParser(input, object : CabStreamSaver { + override fun closeOutputStream(outputStream: OutputStream, entry: CabFileEntry) { + // entry + } + + override fun openOutputStream(entry: CabFileEntry): OutputStream { + val t = entry.date.toInstant() + if (timestamp == null || t < timestamp) { + timestamp = t + } + + return OutputStream.nullOutputStream() + } + + override fun saveReservedAreaData(data: ByteArray?, dataLength: Int): Boolean { + return false + } + }).extractStream() + + return timestamp + } + + private fun parseLibrary( + buf: ByteBuf, + reader: LibraryReader, + format: ArtifactFormat, + timestamp: Instant? = null + ): Artifact { + val library = Library.read("client", ByteBufInputStream(buf.slice()), reader) + + val game: String + val build: CacheExporter.Build? + val type: ArtifactType + val links: List + + val mudclient = library["mudclient"] + val client = library["client"] + val loader = library["loader"] + + if (mudclient != null) { + game = "classic" + build = null // TODO(gpe): classic support + type = ArtifactType.CLIENT + links = emptyList() + } else if (client != null) { + game = "runescape" + build = parseClientBuild(client) + type = if (build != null && build.major < COMBINED_BUILD && isClientGl(library)) { + ArtifactType.CLIENT_GL + } else { + ArtifactType.CLIENT + } + links = emptyList() + } else if (loader != null) { + if (isLoaderClassic(loader)) { + game = "classic" + build = null // TODO(gpe): classic support + type = ArtifactType.LOADER + links = emptyList() // TODO(gpe): classic support + } else { + game = "runescape" + build = parseLoaderBuild(library) + type = if (timestamp != null && timestamp < COMBINED_TIMESTAMP && isLoaderGl(library)) { + ArtifactType.LOADER_GL + } else { + ArtifactType.LOADER + } + links = parseLinks(library) + } + } else if (library.contains("mapview")) { + game = "mapview" + build = null + type = ArtifactType.CLIENT + links = emptyList() + } else if (library.contains("jaggl/opengl")) { + game = "shared" + type = ArtifactType.JAGGL + build = null + links = emptyList() + } else if (library.contains("com/sun/opengl/impl/GLImpl")) { + game = "shared" + type = ArtifactType.JOGL + build = null + links = emptyList() + } else if (library.contains("unpackclass")) { + game = "shared" + type = ArtifactType.UNPACKCLASS + build = null + links = emptyList() + } else { + throw IllegalArgumentException() + } + + return Artifact( + buf.retain(), + game, + "live", + build, + timestamp, + type, + format, + OperatingSystem.INDEPENDENT, + Architecture.INDEPENDENT, + Jvm.INDEPENDENT, + links + ) + } + + private fun isClientGl(library: Library): Boolean { + for (clazz in library) { + for (method in clazz.methods) { + if (!method.hasCode) { + continue + } + + for (insn in method.instructions) { + if (insn is MethodInsnNode && insn.name == "glBegin") { + return true + } + } + } + } + + return false + } + + private fun isLoaderClassic(clazz: ClassNode): Boolean { + for (method in clazz.methods) { + if (!method.hasCode) { + continue + } + + for (insn in method.instructions) { + if (insn is LdcInsnNode && insn.cst == "mudclient") { + return true + } + } + } + + return false + } + + private fun isLoaderGl(library: Library): Boolean { + for (clazz in library) { + for (method in clazz.methods) { + if (!method.hasCode || method.name != "") { + continue + } + + for (insn in method.instructions) { + if (insn !is LdcInsnNode) { + continue + } + + if (insn.cst == "jaggl.dll" || insn.cst == "jogl.dll") { + return true + } + } + } + } + + return false + } + + private fun parseClientBuild(clazz: ClassNode): CacheExporter.Build? { + for (method in clazz.methods) { + if (!method.hasCode || method.name != "main") { + continue + } + + for (match in OLD_ENGINE_VERSION_MATCHER.match(method)) { + val ldc = match[0] as LdcInsnNode + if (ldc.cst != OLD_ENGINE_VERSION_STRING) { + continue + } + + val version = match[2].intConstant + if (version != null) { + return CacheExporter.Build(version, null) + } + } + + for (match in NEW_ENGINE_VERSION_MATCHER.match(method)) { + val new = match[0] as TypeInsnNode + if (new.desc != "client") { + continue + } + + val candidates = mutableListOf() + + for (insn in match) { + val candidate = insn.intConstant + if (candidate != null && candidate in NEW_ENGINE_BUILDS) { + candidates += candidate + } + } + + candidates -= NEW_ENGINE_RESOLUTIONS + + val version = candidates.singleOrNull() + if (version != null) { + return CacheExporter.Build(version, null) + } + } + } + + return null + } + + private fun parseLoaderBuild(library: Library): CacheExporter.Build? { + val clazz = library["sign/signlink"] ?: return null + + for (field in clazz.fields) { + val value = field.value + if (field.name == "clientversion" && field.desc == "I" && value is Int) { + return CacheExporter.Build(value, null) + } + } + + return null + } + + private fun parseLinks(library: Library): List { + val sig = library["sig"] + if (sig != null) { + var size: Int? = null + var sha1: ByteArray? = null + + for (field in sig.fields) { + val value = field.value + if (field.name == "len" && field.desc == "I" && value is Int) { + size = value + } + } + + for (method in sig.methods) { + if (!method.hasCode || method.name != "") { + continue + } + + for (match in SHA1_MATCHER.match(method)) { + val len = match[0].intConstant + if (len != SHA1_BYTES) { + continue + } + + sha1 = ByteArray(SHA1_BYTES) + for (i in 2 until match.size step 4) { + val k = match[i + 1].intConstant!! + val v = match[i + 2].intConstant!! + sha1[k] = v.toByte() + } + } + } + + require(size != null && sha1 != null) + + return listOf( + ArtifactLink( + ArtifactType.CLIENT, + ArtifactFormat.JAR, + OperatingSystem.INDEPENDENT, + Architecture.INDEPENDENT, + Jvm.INDEPENDENT, + crc32 = null, + sha1, + size + ) + ) + } + + // TODO(gpe): new engine support + return emptyList() + } + + private fun ByteBuf.hasPrefix(bytes: ByteArray): Boolean { + Unpooled.wrappedBuffer(bytes).use { prefix -> + val len = prefix.readableBytes() + if (readableBytes() < len) { + return false + } + + return slice(readerIndex(), len) == prefix + } + } + + private companion object { + private val logger = InlineLogger() + + private val CAB = byteArrayOf('M'.code.toByte(), 'S'.code.toByte(), 'C'.code.toByte(), 'F'.code.toByte()) + private val ELF = byteArrayOf(0x7F, 'E'.code.toByte(), 'L'.code.toByte(), 'F'.code.toByte()) + private val JAR = byteArrayOf('P'.code.toByte(), 'K'.code.toByte(), 0x03, 0x04) + private val MACHO32BE = byteArrayOf(0xFE.toByte(), 0xED.toByte(), 0xFA.toByte(), 0xCE.toByte()) + private val MACHO32LE = byteArrayOf(0xCE.toByte(), 0xFA.toByte(), 0xED.toByte(), 0xFE.toByte()) + private val MACHO64BE = byteArrayOf(0xFE.toByte(), 0xED.toByte(), 0xFA.toByte(), 0xCF.toByte()) + private val MACHO64LE = byteArrayOf(0xCF.toByte(), 0xFA.toByte(), 0xED.toByte(), 0xFE.toByte()) + private val MACHO_UNIVERSAL = byteArrayOf(0xCA.toByte(), 0xFE.toByte(), 0xBA.toByte(), 0xBE.toByte()) + private val PACK200 = byteArrayOf(0x08) + private val PACKCLASS_UNCOMPRESSED = byteArrayOf(0x00) + private val PACKCLASS_BZIP2 = byteArrayOf(0x01) + private val PACKCLASS_GZIP = byteArrayOf(0x02) + private val PE = byteArrayOf('M'.code.toByte(), 'Z'.code.toByte()) + + private const val OLD_ENGINE_VERSION_STRING = "RS2 user client - release #" + private val OLD_ENGINE_VERSION_MATCHER = + InsnMatcher.compile("LDC INVOKESPECIAL (ICONST | BIPUSH | SIPUSH | LDC)") + + private val NEW_ENGINE_VERSION_MATCHER = InsnMatcher.compile("NEW .*? RETURN") + private val NEW_ENGINE_RESOLUTIONS = listOf(765, 503, 1024, 768) + private val NEW_ENGINE_BUILDS = 402..916 + + private const val COMBINED_BUILD = 555 + private val COMBINED_TIMESTAMP = LocalDate.of(2009, Month.SEPTEMBER, 2) + .atStartOfDay(ZoneOffset.UTC) + .toInstant() + + private const val ARCH_SPARCV9 = 43 + private const val SOLARIS_COMMENT = "Solaris Link Editors:" + + private const val SHA1_BYTES = 20 + private val SHA1_MATCHER = + InsnMatcher.compile("BIPUSH NEWARRAY (DUP (ICONST | BIPUSH) (ICONST | BIPUSH | SIPUSH) IASTORE)+") + } +} diff --git a/archive/src/main/kotlin/org/openrs2/archive/client/ExportCommand.kt b/archive/src/main/kotlin/org/openrs2/archive/client/ExportCommand.kt new file mode 100644 index 0000000000..5f9ed460c2 --- /dev/null +++ b/archive/src/main/kotlin/org/openrs2/archive/client/ExportCommand.kt @@ -0,0 +1,30 @@ +package org.openrs2.archive.client + +import com.github.ajalt.clikt.core.CliktCommand +import com.github.ajalt.clikt.parameters.arguments.argument +import com.github.ajalt.clikt.parameters.types.defaultStdout +import com.github.ajalt.clikt.parameters.types.long +import com.github.ajalt.clikt.parameters.types.outputStream +import com.google.inject.Guice +import kotlinx.coroutines.runBlocking +import org.openrs2.archive.ArchiveModule +import org.openrs2.inject.CloseableInjector +import java.io.FileNotFoundException + +public class ExportCommand : CliktCommand(name = "export") { + private val id by argument().long() + private val output by argument().outputStream().defaultStdout() + + override fun run(): Unit = runBlocking { + CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector -> + val exporter = injector.getInstance(ClientExporter::class.java) + val artifact = exporter.export(id) ?: throw FileNotFoundException() + try { + val buf = artifact.content() + buf.readBytes(output, buf.readableBytes()) + } finally { + artifact.release() + } + } + } +} diff --git a/archive/src/main/kotlin/org/openrs2/archive/client/ImportCommand.kt b/archive/src/main/kotlin/org/openrs2/archive/client/ImportCommand.kt new file mode 100644 index 0000000000..64a8b62ab1 --- /dev/null +++ b/archive/src/main/kotlin/org/openrs2/archive/client/ImportCommand.kt @@ -0,0 +1,25 @@ +package org.openrs2.archive.client + +import com.github.ajalt.clikt.core.CliktCommand +import com.github.ajalt.clikt.parameters.arguments.argument +import com.github.ajalt.clikt.parameters.arguments.multiple +import com.github.ajalt.clikt.parameters.types.path +import com.google.inject.Guice +import kotlinx.coroutines.runBlocking +import org.openrs2.archive.ArchiveModule +import org.openrs2.inject.CloseableInjector + +public class ImportCommand : CliktCommand(name = "import") { + private val input by argument().path( + mustExist = true, + canBeDir = false, + mustBeReadable = true, + ).multiple() + + override fun run(): Unit = runBlocking { + CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector -> + val importer = injector.getInstance(ClientImporter::class.java) + importer.import(input) + } + } +} diff --git a/archive/src/main/kotlin/org/openrs2/archive/client/Jvm.kt b/archive/src/main/kotlin/org/openrs2/archive/client/Jvm.kt new file mode 100644 index 0000000000..e8005e3a38 --- /dev/null +++ b/archive/src/main/kotlin/org/openrs2/archive/client/Jvm.kt @@ -0,0 +1,7 @@ +package org.openrs2.archive.client + +public enum class Jvm { + INDEPENDENT, + SUN, + MICROSOFT +} diff --git a/archive/src/main/kotlin/org/openrs2/archive/client/OperatingSystem.kt b/archive/src/main/kotlin/org/openrs2/archive/client/OperatingSystem.kt new file mode 100644 index 0000000000..3063b7e281 --- /dev/null +++ b/archive/src/main/kotlin/org/openrs2/archive/client/OperatingSystem.kt @@ -0,0 +1,43 @@ +package org.openrs2.archive.client + +import io.ktor.http.ContentType + +public enum class OperatingSystem { + INDEPENDENT, + WINDOWS, + MACOS, + LINUX, + SOLARIS; + + public fun getPrefix(): String { + return when (this) { + INDEPENDENT -> throw IllegalArgumentException() + WINDOWS -> "" + else -> "lib" + } + } + + public fun getExtension(): String { + return when (this) { + INDEPENDENT -> throw IllegalArgumentException() + WINDOWS -> "dll" + MACOS -> "dylib" + LINUX, SOLARIS -> "so" + } + } + + public fun getContentType(): ContentType { + return when (this) { + INDEPENDENT -> throw IllegalArgumentException() + WINDOWS -> PE + MACOS -> MACHO + LINUX, SOLARIS -> ELF_SHARED + } + } + + private companion object { + private val ELF_SHARED = ContentType("application", "x-sharedlib") + private val MACHO = ContentType("application", "x-mach-binary") + private val PE = ContentType("application", "vnd.microsoft.portable-executable") + } +} diff --git a/archive/src/main/kotlin/org/openrs2/archive/client/RefreshCommand.kt b/archive/src/main/kotlin/org/openrs2/archive/client/RefreshCommand.kt new file mode 100644 index 0000000000..3030213439 --- /dev/null +++ b/archive/src/main/kotlin/org/openrs2/archive/client/RefreshCommand.kt @@ -0,0 +1,16 @@ +package org.openrs2.archive.client + +import com.github.ajalt.clikt.core.CliktCommand +import com.google.inject.Guice +import kotlinx.coroutines.runBlocking +import org.openrs2.archive.ArchiveModule +import org.openrs2.inject.CloseableInjector + +public class RefreshCommand : CliktCommand(name = "refresh") { + override fun run(): Unit = runBlocking { + CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector -> + val importer = injector.getInstance(ClientImporter::class.java) + importer.refresh() + } + } +} diff --git a/archive/src/main/kotlin/org/openrs2/archive/web/ClientsController.kt b/archive/src/main/kotlin/org/openrs2/archive/web/ClientsController.kt new file mode 100644 index 0000000000..de7578d77e --- /dev/null +++ b/archive/src/main/kotlin/org/openrs2/archive/web/ClientsController.kt @@ -0,0 +1,82 @@ +package org.openrs2.archive.web + +import io.ktor.http.ContentDisposition +import io.ktor.http.HttpHeaders +import io.ktor.http.HttpStatusCode +import io.ktor.server.application.ApplicationCall +import io.ktor.server.response.header +import io.ktor.server.response.respond +import io.ktor.server.response.respondOutputStream +import io.ktor.server.thymeleaf.ThymeleafContent +import jakarta.inject.Inject +import jakarta.inject.Singleton +import org.openrs2.archive.client.ClientExporter + +@Singleton +public class ClientsController @Inject constructor( + private val exporter: ClientExporter +) { + public suspend fun index(call: ApplicationCall) { + val artifacts = exporter.list() + + call.respond( + ThymeleafContent( + "clients/index.html", mapOf( + "artifacts" to artifacts + ) + ) + ) + } + + public suspend fun show(call: ApplicationCall) { + val id = call.parameters["id"]?.toLongOrNull() + if (id == null) { + call.respond(HttpStatusCode.NotFound) + return + } + + val artifact = exporter.get(id) + if (artifact == null) { + call.respond(HttpStatusCode.NotFound) + return + } + + call.respond( + ThymeleafContent( + "clients/show.html", mapOf( + "artifact" to artifact + ) + ) + ) + } + + public suspend fun export(call: ApplicationCall) { + val id = call.parameters["id"]?.toLongOrNull() + if (id == null) { + call.respond(HttpStatusCode.NotFound) + return + } + + val artifact = exporter.export(id) + if (artifact == null) { + call.respond(HttpStatusCode.NotFound) + return + } + + call.response.header( + HttpHeaders.ContentLength, + artifact.summary.size.toString() + ) + + call.response.header( + HttpHeaders.ContentDisposition, + ContentDisposition.Attachment + .withParameter(ContentDisposition.Parameters.FileName, artifact.summary.name) + .toString() + ) + + call.respondOutputStream(artifact.summary.format.getContentType(artifact.summary.os)) { + artifact.content().readBytes(this, artifact.summary.size) + } + } +} diff --git a/archive/src/main/kotlin/org/openrs2/archive/web/WebServer.kt b/archive/src/main/kotlin/org/openrs2/archive/web/WebServer.kt index 6cc9dea8c6..b90c4de9f3 100644 --- a/archive/src/main/kotlin/org/openrs2/archive/web/WebServer.kt +++ b/archive/src/main/kotlin/org/openrs2/archive/web/WebServer.kt @@ -36,6 +36,7 @@ import org.thymeleaf.templateresolver.ClassLoaderTemplateResolver @Singleton public class WebServer @Inject constructor( private val cachesController: CachesController, + private val clientsController: ClientsController, private val keysController: KeysController, @Json private val mapper: ObjectMapper ) { @@ -84,6 +85,9 @@ public class WebServer @Inject constructor( get("/caches/{scope}/{id}/keys.json") { cachesController.exportKeysJson(call) } get("/caches/{scope}/{id}/keys.zip") { cachesController.exportKeysZip(call) } get("/caches/{scope}/{id}/map.png") { cachesController.renderMap(call) } + get("/clients") { clientsController.index(call) } + get("/clients/{id}.dat") { clientsController.export(call) } + get("/clients/{id}") { clientsController.show(call) } get("/keys") { keysController.index(call) } post("/keys") { keysController.import(call) } get("/keys/all.json") { keysController.exportAll(call) } diff --git a/archive/src/main/resources/org/openrs2/archive/migrations/V22__clients.sql b/archive/src/main/resources/org/openrs2/archive/migrations/V22__clients.sql new file mode 100644 index 0000000000..0491175793 --- /dev/null +++ b/archive/src/main/resources/org/openrs2/archive/migrations/V22__clients.sql @@ -0,0 +1,95 @@ +-- @formatter:off + +CREATE EXTENSION IF NOT EXISTS pgcrypto; + +ALTER TABLE blobs ADD COLUMN sha1 BYTEA NULL; + +UPDATE blobs SET sha1 = digest(data, 'sha1'); + +ALTER TABLE blobs ALTER COLUMN sha1 SET NOT NULL; + +-- not UNIQUE as SHA-1 collisions are possible +CREATE INDEX ON blobs USING HASH (sha1); + +INSERT INTO scopes (name) VALUES ('shared'); +INSERT INTO games (name, scope_id) VALUES ('shared', (SELECT id FROM scopes WHERE name = 'shared')); + +INSERT INTO scopes (name) VALUES ('classic'); +INSERT INTO games (name, scope_id) VALUES ('classic', (SELECT id FROM scopes WHERE name = 'classic')); + +INSERT INTO scopes (name) VALUES ('mapview'); +INSERT INTO games (name, scope_id) VALUES ('mapview', (SELECT id FROM scopes WHERE name = 'mapview')); + +CREATE TYPE artifact_type AS ENUM ( + 'browsercontrol', + 'client', + 'client_gl', + 'gluegen_rt', + 'jaggl', + 'jaggl_dri', + 'jagmisc', + 'jogl', + 'jogl_awt', + 'loader', + 'loader_gl', + 'unpackclass' +); + +CREATE TYPE artifact_format AS ENUM ( + 'cab', + 'jar', + 'native', + 'pack200', + 'packclass' +); + +CREATE TYPE os AS ENUM ( + 'independent', + 'windows', + 'macos', + 'linux', + 'solaris' +); + +CREATE TYPE arch AS ENUM ( + 'independent', + 'universal', + 'x86', + 'amd64', + 'powerpc', + 'sparc', + 'sparcv9' +); + +CREATE TYPE jvm AS ENUM ( + 'independent', + 'sun', + 'microsoft' +); + +CREATE TABLE artifacts ( + blob_id BIGINT PRIMARY KEY NOT NULL REFERENCES blobs (id), + game_id INTEGER NOT NULL REFERENCES games (id), + environment_id INTEGER NOT NULL REFERENCES environments (id), + build_major INTEGER NULL, + build_minor INTEGER NULL, + timestamp TIMESTAMPTZ NULL, + type artifact_type NOT NULL, + format artifact_format NOT NULL, + os os NOT NULL, + arch arch NOT NULL, + jvm jvm NOT NULL +); + +CREATE TABLE artifact_links ( + blob_id BIGINT NOT NULL REFERENCES artifacts (blob_id), + type artifact_type NOT NULL, + format artifact_format NOT NULL, + os os NOT NULL, + arch arch NOT NULL, + jvm jvm NOT NULL, + sha1 BYTEA NOT NULL, + crc32 INTEGER NULL, + size INTEGER NULL, + PRIMARY KEY (blob_id, type, format, os, arch, jvm) +); diff --git a/archive/src/main/resources/org/openrs2/archive/templates/clients/index.html b/archive/src/main/resources/org/openrs2/archive/templates/clients/index.html new file mode 100644 index 0000000000..ec467decca --- /dev/null +++ b/archive/src/main/resources/org/openrs2/archive/templates/clients/index.html @@ -0,0 +1,63 @@ + + + + Clients - OpenRS2 Archive + + + + + + + +
+

Clients

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
GameEnvBuildTimestampTypeFormatOSArchJVMSizeLinks
runescapelive550 + + +
+ +
+
client_glpack200independentindependentindependent494 KiB +
+ Download + More +
+
+
+
+ + diff --git a/archive/src/main/resources/org/openrs2/archive/templates/clients/show.html b/archive/src/main/resources/org/openrs2/archive/templates/clients/show.html new file mode 100644 index 0000000000..b49efa4744 --- /dev/null +++ b/archive/src/main/resources/org/openrs2/archive/templates/clients/show.html @@ -0,0 +1,129 @@ + + + + Client - OpenRS2 Archive + + + + + + + +
+

Client

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Gamerunescape
Environmentlive
Build550
Timestamp
Typeclient_gl
Formatpack200
OSindependent
Architectureindependent
JVMindependent
Size494 KiB
Checksum
SHA-1 + +
Download + Download +
+ +
+

Links

+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
BuildTimestampTypeFormatOSArchJVMChecksumSHA-1SizeLinks
550 + + +
+ +
+
client_glpack200independentindependentindependent494 KiB +
+ Download + More +
+
+
+
+
+ + diff --git a/archive/src/main/resources/org/openrs2/archive/templates/layout.html b/archive/src/main/resources/org/openrs2/archive/templates/layout.html index 57f78ad7d8..4ff0df3e16 100644 --- a/archive/src/main/resources/org/openrs2/archive/templates/layout.html +++ b/archive/src/main/resources/org/openrs2/archive/templates/layout.html @@ -24,6 +24,9 @@ + diff --git a/asm/src/main/kotlin/org/openrs2/asm/classpath/Library.kt b/asm/src/main/kotlin/org/openrs2/asm/classpath/Library.kt index 1d91ba7931..4142885cd4 100644 --- a/asm/src/main/kotlin/org/openrs2/asm/classpath/Library.kt +++ b/asm/src/main/kotlin/org/openrs2/asm/classpath/Library.kt @@ -5,6 +5,7 @@ import org.objectweb.asm.tree.ClassNode import org.openrs2.asm.io.LibraryReader import org.openrs2.asm.io.LibraryWriter import org.openrs2.util.io.useAtomicOutputStream +import java.io.InputStream import java.nio.file.Files import java.nio.file.Path import java.util.SortedMap @@ -59,9 +60,13 @@ public class Library(public val name: String) : Iterable { public fun read(name: String, path: Path, reader: LibraryReader): Library { logger.info { "Reading library $path" } - val classes = Files.newInputStream(path).use { input -> - reader.read(input) + Files.newInputStream(path).use { input -> + return read(name, input, reader) } + } + + public fun read(name: String, input: InputStream, reader: LibraryReader): Library { + val classes = reader.read(input) val library = Library(name) for (clazz in classes) { diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index a08fcae03d..30f1474edd 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -47,6 +47,7 @@ jackson-kotlin = { module = "com.fasterxml.jackson.module:jackson-module-kotlin" jackson-yaml = { module = "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml", version.ref = "jackson" } javaParser = { module = "com.github.javaparser:javaparser-symbol-solver-core", version = "3.25.5" } jdom = { module = "org.jdom:jdom2", version = "2.0.6.1" } +jelf = { module = "net.fornwall:jelf", version = "0.9.0" } jgrapht = { module = "org.jgrapht:jgrapht-core", version = "1.5.2" } jimfs = { module = "com.google.jimfs:jimfs", version.ref = "jimfs" } jquery = { module = "org.webjars:jquery", version = "3.7.1" } @@ -75,6 +76,7 @@ netty-codec-http = { module = "io.netty:netty-codec-http", version.ref = "netty" netty-handler = { module = "io.netty:netty-handler", version.ref = "netty" } netty-transport = { module = "io.netty:netty-transport", version.ref = "netty" } openrs2-natives = { module = "org.openrs2:openrs2-natives-all", version = "3.2.0" } +pecoff4j = { module = "com.kichik.pecoff4j:pecoff4j", version = "0.4.0" } postgres = { module = "org.postgresql:postgresql", version = "42.6.0" } result-core = { module = "com.michael-bull.kotlin-result:kotlin-result", version = "1.1.18" } result-coroutines = { module = "com.michael-bull.kotlin-result:kotlin-result-coroutines", version = "1.1.18" }