Compare commits

..

No commits in common. 'master' and 'master' have entirely different histories.

  1. 31
      .editorconfig
  2. 34
      .github/workflows/build.yaml
  3. 1
      .idea/.gitignore
  4. 28
      .idea/codeInsightSettings.xml
  5. 14
      .idea/codeStyles/Project.xml
  6. 6
      .idea/fileTemplates/internal/Kotlin Class.kt
  7. 6
      .idea/fileTemplates/internal/Kotlin Enum.kt
  8. 6
      .idea/fileTemplates/internal/Kotlin Interface.kt
  9. 11
      .idea/runConfigurations/AstDeobfuscator.xml
  10. 15
      .idea/runConfigurations/Bundler.xml
  11. 10
      .idea/runConfigurations/BytecodeDeobfuscator.xml
  12. 10
      .idea/runConfigurations/Decompiler.xml
  13. 12
      .idea/runConfigurations/Deobfuscator.xml
  14. 10
      .idea/runConfigurations/GameServer.xml
  15. 10
      .idea/runConfigurations/GenerateBuffer.xml
  16. 10
      .idea/runConfigurations/Patcher.xml
  17. 16
      .idea/runConfigurations/XteaPluginTest.xml
  18. 2
      .idea/runConfigurations/client.xml
  19. 12
      .idea/runConfigurations/dev_openrs2.xml
  20. 2
      .idea/scopes/exclude_nonfree.xml
  21. 2
      .mailmap
  22. 80
      CONTRIBUTING.md
  23. 37
      DCO
  24. 30
      Jenkinsfile
  25. 2
      LICENSE
  26. 188
      README.md
  27. 96
      all/build.gradle.kts
  28. 39
      all/src/main/java/dev/openrs2/Launcher.kt
  29. 29
      all/src/main/kotlin/org/openrs2/Command.kt
  30. 64
      archive/build.gradle.kts
  31. 23
      archive/src/main/kotlin/org/openrs2/archive/ArchiveCommand.kt
  32. 3
      archive/src/main/kotlin/org/openrs2/archive/ArchiveConfig.kt
  33. 27
      archive/src/main/kotlin/org/openrs2/archive/ArchiveConfigProvider.kt
  34. 55
      archive/src/main/kotlin/org/openrs2/archive/ArchiveModule.kt
  35. 28
      archive/src/main/kotlin/org/openrs2/archive/DataSourceProvider.kt
  36. 15
      archive/src/main/kotlin/org/openrs2/archive/DatabaseProvider.kt
  37. 19
      archive/src/main/kotlin/org/openrs2/archive/cache/CacheCommand.kt
  38. 158
      archive/src/main/kotlin/org/openrs2/archive/cache/CacheDownloader.kt
  39. 806
      archive/src/main/kotlin/org/openrs2/archive/cache/CacheExporter.kt
  40. 1490
      archive/src/main/kotlin/org/openrs2/archive/cache/CacheImporter.kt
  41. 16
      archive/src/main/kotlin/org/openrs2/archive/cache/CrossPollinateCommand.kt
  42. 223
      archive/src/main/kotlin/org/openrs2/archive/cache/CrossPollinator.kt
  43. 25
      archive/src/main/kotlin/org/openrs2/archive/cache/DownloadCommand.kt
  44. 34
      archive/src/main/kotlin/org/openrs2/archive/cache/ExportCommand.kt
  45. 53
      archive/src/main/kotlin/org/openrs2/archive/cache/ImportCommand.kt
  46. 116
      archive/src/main/kotlin/org/openrs2/archive/cache/ImportMasterIndexCommand.kt
  47. 376
      archive/src/main/kotlin/org/openrs2/archive/cache/Js5ChannelHandler.kt
  48. 158
      archive/src/main/kotlin/org/openrs2/archive/cache/NxtJs5ChannelHandler.kt
  49. 22
      archive/src/main/kotlin/org/openrs2/archive/cache/NxtJs5ChannelInitializer.kt
  50. 76
      archive/src/main/kotlin/org/openrs2/archive/cache/OsrsJs5ChannelHandler.kt
  51. 22
      archive/src/main/kotlin/org/openrs2/archive/cache/OsrsJs5ChannelInitializer.kt
  52. 16
      archive/src/main/kotlin/org/openrs2/archive/cache/RefreshViewsCommand.kt
  53. 149
      archive/src/main/kotlin/org/openrs2/archive/cache/finder/CacheFinderExtractor.kt
  54. 25
      archive/src/main/kotlin/org/openrs2/archive/cache/finder/ExtractCommand.kt
  55. 8
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/ClientOutOfDateCodec.kt
  56. 10
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/InitJs5RemoteConnection.kt
  57. 27
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/InitJs5RemoteConnectionCodec.kt
  58. 8
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/Js5OkCodec.kt
  59. 14
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/Js5Request.kt
  60. 37
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/Js5RequestEncoder.kt
  61. 11
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/Js5Response.kt
  62. 121
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/Js5ResponseDecoder.kt
  63. 8
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/LoginResponse.kt
  64. 32
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/MusicStreamClient.kt
  65. 11
      archive/src/main/kotlin/org/openrs2/archive/client/Architecture.kt
  66. 35
      archive/src/main/kotlin/org/openrs2/archive/client/Artifact.kt
  67. 46
      archive/src/main/kotlin/org/openrs2/archive/client/ArtifactFormat.kt
  68. 16
      archive/src/main/kotlin/org/openrs2/archive/client/ArtifactType.kt
  69. 14
      archive/src/main/kotlin/org/openrs2/archive/client/ClientCommand.kt
  70. 455
      archive/src/main/kotlin/org/openrs2/archive/client/ClientExporter.kt
  71. 997
      archive/src/main/kotlin/org/openrs2/archive/client/ClientImporter.kt
  72. 30
      archive/src/main/kotlin/org/openrs2/archive/client/ExportCommand.kt
  73. 32
      archive/src/main/kotlin/org/openrs2/archive/client/ImportCommand.kt
  74. 7
      archive/src/main/kotlin/org/openrs2/archive/client/Jvm.kt
  75. 116
      archive/src/main/kotlin/org/openrs2/archive/client/MachO.kt
  76. 43
      archive/src/main/kotlin/org/openrs2/archive/client/OperatingSystem.kt
  77. 16
      archive/src/main/kotlin/org/openrs2/archive/client/RefreshCommand.kt
  78. 11
      archive/src/main/kotlin/org/openrs2/archive/game/Game.kt
  79. 58
      archive/src/main/kotlin/org/openrs2/archive/game/GameDatabase.kt
  80. 70
      archive/src/main/kotlin/org/openrs2/archive/jav/JavConfig.kt
  81. 65
      archive/src/main/kotlin/org/openrs2/archive/key/BinaryKeyReader.kt
  82. 16
      archive/src/main/kotlin/org/openrs2/archive/key/BruteForceCommand.kt
  83. 16
      archive/src/main/kotlin/org/openrs2/archive/key/DownloadCommand.kt
  84. 16
      archive/src/main/kotlin/org/openrs2/archive/key/EntCommand.kt
  85. 57
      archive/src/main/kotlin/org/openrs2/archive/key/HdosKeyDownloader.kt
  86. 13
      archive/src/main/kotlin/org/openrs2/archive/key/HexKeyReader.kt
  87. 23
      archive/src/main/kotlin/org/openrs2/archive/key/ImportCommand.kt
  88. 34
      archive/src/main/kotlin/org/openrs2/archive/key/JsonKeyDownloader.kt
  89. 39
      archive/src/main/kotlin/org/openrs2/archive/key/JsonKeyReader.kt
  90. 403
      archive/src/main/kotlin/org/openrs2/archive/key/KeyBruteForcer.kt
  91. 15
      archive/src/main/kotlin/org/openrs2/archive/key/KeyCommand.kt
  92. 10
      archive/src/main/kotlin/org/openrs2/archive/key/KeyDownloader.kt
  93. 159
      archive/src/main/kotlin/org/openrs2/archive/key/KeyExporter.kt
  94. 155
      archive/src/main/kotlin/org/openrs2/archive/key/KeyImporter.kt
  95. 8
      archive/src/main/kotlin/org/openrs2/archive/key/KeyReader.kt
  96. 10
      archive/src/main/kotlin/org/openrs2/archive/key/KeySource.kt
  97. 54
      archive/src/main/kotlin/org/openrs2/archive/key/RuneLiteKeyDownloader.kt
  98. 17
      archive/src/main/kotlin/org/openrs2/archive/key/TextKeyReader.kt
  99. 107
      archive/src/main/kotlin/org/openrs2/archive/map/Colors.kt
  100. 57
      archive/src/main/kotlin/org/openrs2/archive/map/FloType.kt
  101. Some files were not shown because too many files have changed in this diff Show More

@ -7,34 +7,17 @@ trim_trailing_whitespace = true
max_line_length = 120
indent_style = tab
# Prevent IDEA from translating *.{kt,kts} -> *.{kt, kts}. This confuses ktlint.
# @formatter:off
[*.{kt,kts}]
# @formatter:on
[*.{kt, kts}]
indent_style = space
indent_size = 4
# see https://github.com/pinterest/ktlint/issues/527
# noinspection EditorConfigKeyCorrectness
ktlint_standard_argument-list-wrapping = disabled
# noinspection EditorConfigKeyCorrectness
ktlint_standard_indent = disabled
# noinspection EditorConfigKeyCorrectness
ktlint_standard_parameter-list-wrapping = disabled
# noinspection EditorConfigKeyCorrectness
ktlint_standard_trailing-comma-on-call-site = disabled
# noinspection EditorConfigKeyCorrectness
ktlint_standard_trailing-comma-on-declaration-site = disabled
# noinspection EditorConfigKeyCorrectness
ktlint_standard_wrapping = disabled
disabled_rules = import-ordering
[*.md]
max_line_length = 80
[*.sql]
[*.xml]
indent_style = space
indent_size = 4
indent_size = 2
# @formatter:off
[*.{json,xml,yaml,yml}]
# @formatter:on
[Jenkinsfile]
indent_style = space
indent_size = 2
indent_size = 4

@ -1,34 +0,0 @@
---
on: push
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/cache@v3
with:
path: ~/.ssh/known_hosts
key: ssh-known-hosts
- uses: actions/checkout@v3
- uses: actions/setup-java@v3
with:
distribution: temurin
java-version: 11
- uses: gradle/wrapper-validation-action@v1
- uses: gradle/gradle-build-action@v2
with:
arguments: build
- if: github.ref == 'refs/heads/master'
run: |
install -dm0700 ~/.ssh
touch ~/.ssh/id_ed25519
chmod 0600 ~/.ssh/id_ed25519
echo "${SSH_KEY}" > ~/.ssh/id_ed25519
env:
SSH_KEY: ${{ secrets.SSH_KEY }}
- if: github.ref == 'refs/heads/master'
uses: gradle/gradle-build-action@v2
with:
arguments: publish
env:
ORG_GRADLE_PROJECT_openrs2Username: ${{ secrets.REPO_USERNAME }}
ORG_GRADLE_PROJECT_openrs2Password: ${{ secrets.REPO_PASSWORD }}

1
.idea/.gitignore vendored

@ -1,6 +1,5 @@
/*
!.gitignore
!codeInsightSettings.xml
!codeStyles
!fileTemplates
!inspectionProfiles

@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="JavaProjectCodeInsightSettings">
<excluded-names>
<name>com.google.inject.BindingAnnotation</name>
<name>com.google.inject.Inject</name>
<name>com.google.inject.Named</name>
<name>com.google.inject.Provider</name>
<name>com.google.inject.ScopeAnnotation</name>
<name>com.google.inject.Singleton</name>
<name>java.nio.file.Paths.get</name>
<name>org.junit.jupiter.api.AfterEach</name>
<name>org.junit.jupiter.api.Assertions.assertEquals</name>
<name>org.junit.jupiter.api.Assertions.assertFalse</name>
<name>org.junit.jupiter.api.Assertions.assertNotEquals</name>
<name>org.junit.jupiter.api.Assertions.assertNotNull</name>
<name>org.junit.jupiter.api.Assertions.assertNotSame</name>
<name>org.junit.jupiter.api.Assertions.assertNull</name>
<name>org.junit.jupiter.api.Assertions.assertSame</name>
<name>org.junit.jupiter.api.Assertions.assertThrows</name>
<name>org.junit.jupiter.api.Assertions.assertTrue</name>
<name>org.junit.jupiter.api.Assertions.fail</name>
<name>org.junit.jupiter.api.BeforeEach</name>
<name>org.junit.jupiter.api.Disabled</name>
<name>org.junit.jupiter.api.Test</name>
</excluded-names>
</component>
</project>

@ -7,10 +7,6 @@
</value>
</option>
<option name="FORMATTER_TAGS_ENABLED" value="true" />
<HTMLCodeStyleSettings>
<option name="HTML_SPACE_INSIDE_EMPTY_TAG" value="true" />
<option name="HTML_DO_NOT_INDENT_CHILDREN_OF" value="" />
</HTMLCodeStyleSettings>
<JavaCodeStyleSettings>
<option name="CLASS_NAMES_IN_JAVADOC" value="3" />
<option name="CLASS_COUNT_TO_USE_IMPORT_ON_DEMAND" value="9999" />
@ -47,13 +43,6 @@
<option name="XML_ALIGN_ATTRIBUTES" value="false" />
<option name="XML_SPACE_INSIDE_EMPTY_TAG" value="true" />
</XML>
<codeStyleSettings language="HTML">
<indentOptions>
<option name="CONTINUATION_INDENT_SIZE" value="4" />
<option name="USE_TAB_CHARACTER" value="true" />
<option name="SMART_TABS" value="true" />
</indentOptions>
</codeStyleSettings>
<codeStyleSettings language="JAVA">
<option name="KEEP_FIRST_COLUMN_COMMENT" value="false" />
<option name="KEEP_CONTROL_STATEMENT_IN_ONE_LINE" value="false" />
@ -81,9 +70,6 @@
<option name="SMART_TABS" value="true" />
</indentOptions>
</codeStyleSettings>
<codeStyleSettings language="Markdown">
<option name="RIGHT_MARGIN" value="80" />
</codeStyleSettings>
<codeStyleSettings language="XML">
<indentOptions>
<option name="INDENT_SIZE" value="2" />

@ -1,6 +0,0 @@
#if (${PACKAGE_NAME} && ${PACKAGE_NAME} != "")package ${PACKAGE_NAME}
#end
#parse("File Header.java")
public class ${NAME} {
}

@ -1,6 +0,0 @@
#if (${PACKAGE_NAME} && ${PACKAGE_NAME} != "")package ${PACKAGE_NAME}
#end
#parse("File Header.java")
public enum class ${NAME} {
}

@ -1,6 +0,0 @@
#if (${PACKAGE_NAME} && ${PACKAGE_NAME} != "")package ${PACKAGE_NAME}
#end
#parse("File Header.java")
public interface ${NAME} {
}

@ -1,8 +1,13 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="AstDeobfuscator" type="JetRunConfigurationType">
<option name="MAIN_CLASS_NAME" value="org.openrs2.deob.ast.DeobfuscateAstCommandKt" />
<configuration default="false" name="AstDeobfuscator" type="JetRunConfigurationType" factoryName="Kotlin">
<module name="openrs2.deob-ast.main" />
<shortenClasspath name="NONE" />
<option name="VM_PARAMETERS" value="" />
<option name="PROGRAM_PARAMETERS" value="" />
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="false" />
<option name="ALTERNATIVE_JRE_PATH" />
<option name="PASS_PARENT_ENVS" value="true" />
<option name="MAIN_CLASS_NAME" value="dev.openrs2.deob.ast.AstDeobfuscatorKt" />
<option name="WORKING_DIRECTORY" value="" />
<method v="2">
<option name="Make" enabled="true" />
</method>

@ -0,0 +1,15 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Bundler" type="JetRunConfigurationType" factoryName="Kotlin">
<module name="openrs2.bundler.main" />
<option name="VM_PARAMETERS" value="" />
<option name="PROGRAM_PARAMETERS" value="" />
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="false" />
<option name="ALTERNATIVE_JRE_PATH" />
<option name="PASS_PARENT_ENVS" value="true" />
<option name="MAIN_CLASS_NAME" value="dev.openrs2.bundler.BundlerKt" />
<option name="WORKING_DIRECTORY" value="" />
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>

@ -1,10 +0,0 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="BytecodeDeobfuscator" type="JetRunConfigurationType">
<option name="MAIN_CLASS_NAME" value="org.openrs2.deob.bytecode.DeobfuscateBytecodeCommandKt" />
<module name="openrs2.deob-bytecode.main" />
<shortenClasspath name="NONE" />
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>

@ -1,9 +1,13 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Decompiler" type="JetRunConfigurationType">
<option name="MAIN_CLASS_NAME" value="org.openrs2.decompiler.DecompileCommandKt" />
<configuration default="false" name="Decompiler" type="JetRunConfigurationType" factoryName="Kotlin">
<module name="openrs2.decompiler.main" />
<shortenClasspath name="NONE" />
<option name="VM_PARAMETERS" value="-Xmx3G" />
<option name="PROGRAM_PARAMETERS" value="" />
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="false" />
<option name="ALTERNATIVE_JRE_PATH" />
<option name="PASS_PARENT_ENVS" value="true" />
<option name="MAIN_CLASS_NAME" value="dev.openrs2.decompiler.DecompilerKt" />
<option name="WORKING_DIRECTORY" value="" />
<method v="2">
<option name="Make" enabled="true" />
</method>

@ -1,9 +1,13 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Deobfuscator" type="JetRunConfigurationType">
<option name="MAIN_CLASS_NAME" value="org.openrs2.deob.DeobfuscateCommandKt" />
<configuration default="false" name="Deobfuscator" type="JetRunConfigurationType" factoryName="Kotlin">
<module name="openrs2.deob.main" />
<shortenClasspath name="NONE" />
<option name="VM_PARAMETERS" value="-Xmx3G" />
<option name="VM_PARAMETERS" value="" />
<option name="PROGRAM_PARAMETERS" value="" />
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="false" />
<option name="ALTERNATIVE_JRE_PATH" />
<option name="PASS_PARENT_ENVS" value="true" />
<option name="MAIN_CLASS_NAME" value="dev.openrs2.deob.DeobfuscatorKt" />
<option name="WORKING_DIRECTORY" value="" />
<method v="2">
<option name="Make" enabled="true" />
</method>

@ -1,9 +1,13 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="GameServer" type="JetRunConfigurationType">
<option name="MAIN_CLASS_NAME" value="org.openrs2.game.GameCommandKt" />
<configuration default="false" name="GameServer" type="JetRunConfigurationType" factoryName="Kotlin">
<module name="openrs2.game.main" />
<shortenClasspath name="NONE" />
<option name="VM_PARAMETERS" value="-Dio.netty.leakDetection.level=PARANOID" />
<option name="PROGRAM_PARAMETERS" value="" />
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="false" />
<option name="ALTERNATIVE_JRE_PATH" />
<option name="PASS_PARENT_ENVS" value="true" />
<option name="MAIN_CLASS_NAME" value="dev.openrs2.game.GameServerKt" />
<option name="WORKING_DIRECTORY" value="" />
<method v="2">
<option name="Make" enabled="true" />
</method>

@ -1,10 +0,0 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="GenerateBuffer" type="JetRunConfigurationType">
<option name="MAIN_CLASS_NAME" value="org.openrs2.buffer.generator.GenerateBufferCommandKt" />
<module name="openrs2.buffer-generator.main" />
<shortenClasspath name="NONE" />
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>

@ -1,10 +0,0 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Patcher" type="JetRunConfigurationType">
<option name="MAIN_CLASS_NAME" value="org.openrs2.patcher.PatchCommandKt" />
<module name="openrs2.patcher.main" />
<shortenClasspath name="NONE" />
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>

@ -1,16 +0,0 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="XteaPluginTest" type="Application" factoryName="Application" nameIsGenerated="true">
<option name="MAIN_CLASS_NAME" value="org.openrs2.xtea.XteaPluginTest" />
<module name="openrs2.xtea-plugin.test" />
<option name="VM_PARAMETERS" value="-ea" />
<extension name="coverage">
<pattern>
<option name="PATTERN" value="org.openrs2.xtea.*" />
<option name="ENABLED" value="true" />
</pattern>
</extension>
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>

@ -2,7 +2,7 @@
<configuration default="false" name="client" type="Application" factoryName="Application" nameIsGenerated="true">
<option name="MAIN_CLASS_NAME" value="client" />
<module name="openrs2.nonfree.client.main" />
<option name="PROGRAM_PARAMETERS" value="1 live en game0" />
<option name="PROGRAM_PARAMETERS" value="1 live english game0" />
<method v="2">
<option name="Make" enabled="true" />
</method>

@ -1,19 +1,11 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="org.openrs2" type="JUnit" factoryName="JUnit">
<useClassPathOnly />
<extension name="coverage">
<pattern>
<option name="PATTERN" value="org.openrs2.*" />
<option name="ENABLED" value="true" />
</pattern>
</extension>
<option name="PACKAGE_NAME" value="org.openrs2" />
<configuration default="false" name="dev.openrs2" type="JUnit" factoryName="JUnit">
<option name="PACKAGE_NAME" value="dev.openrs2" />
<option name="MAIN_CLASS_NAME" value="" />
<option name="METHOD_NAME" value="" />
<option name="TEST_OBJECT" value="package" />
<option name="VM_PARAMETERS" value="-ea -Dio.netty.leakDetection.level=PARANOID" />
<option name="PARAMETERS" value="" />
<option name="WORKING_DIRECTORY" value="" />
<option name="TEST_SEARCH_SCOPE">
<value defaultName="wholeProject" />
</option>

@ -1,3 +1,3 @@
<component name="DependencyValidationManager">
<scope name="exclude-nonfree" pattern="!file[openrs2.nonfree*]:*//*" />
<scope name="exclude-nonfree" pattern="!file[openrs2-client]:*/&amp;&amp;!file[openrs2-gl]:*/&amp;&amp;!file[openrs2-gl-dri]:*/&amp;&amp;!file[openrs2-loader]:*/&amp;&amp;!file[openrs2-nonfree]:*/&amp;&amp;!file[openrs2-signlink]:*/&amp;&amp;!file[openrs2-unpack]:*/&amp;&amp;!file[openrs2-unpacker]:*/" />
</component>

@ -1,2 +0,0 @@
Graham <gpe@openrs2.org> <gpe@openrs2.dev>
Scu11 <scu11@openrs2.org> <scu11@openrs2.dev>

@ -1,80 +0,0 @@
# Contributing to OpenRS2
## Introduction
OpenRS2 is still in the early stages of development. The current focus is on
building underlying infrastructure, such as the deobfuscator, rather than game
content. This approach will make it much quicker to build game content in the
long run, but it does mean OpenRS2 won't be particularly useful in the short
term.
If you're interested in contributing new features, you should discuss your plans
in our [Discord][discord] server first. I have rough plans in my head for the
future development direction. Communicating beforehand will avoid the need for
significant changes to be made at the code review stage and make it less likely
for your contribution to be dropped entirely.
## Code style
All source code must be formatted with [IntelliJ IDEA][idea]'s built-in
formatter before each commit. The 'Optimize imports' option should also be
selected. Do not select 'Rearrange entries'.
OpenRS2's code style settings are held in `.idea/codeStyles/Project.xml` in the
repository, and IDEA should use them automatically after importing the Gradle
project.
Kotlin code must pass all of [ktlint][ktlint]'s tests.
Always use `//` for single-line comments and `/*` for multi-line comments.
## Commit messages
Commit messages should follow the ['seven rules'][commitmsg] described in
'How to Write a Git Commit Message', with the exception that the summary line
can be up to 72 characters in length (as OpenRS2 does not use email-based
patches).
You should use tools like [interactive rebase][rewriting-history] to ensure the
commit history is tidy.
## Developer Certificate of Origin
OpenRS2 uses version 1.1 of the [Developer Certificate of Origin][dco] (DCO) to
certify that contributors agree to license their code under OpenRS2's license
(see the License section below). To confirm that a contribution meets the
requirements of the DCO, a `Signed-off-by:` line must be added to the Git commit
message by passing `--signoff` to the `git commit` invocation.
If you intend to make a large number of contributions, run the following
commands from the repository root to add `Signed-off-by:` line to all your
commit messages by default:
```
echo -e "\n\nSigned-off-by: $(git config user.name) <$(git config user.email)>" > .git/commit-template
git config commit.template .git/commit-template
```
The full text of the DCO is available in the `DCO` file.
OpenRS2 does not distribute any of Jagex's intellectual property in this
repository, and care should be taken to avoid inadvertently including any in
contributions.
## Versioning
OpenRS2 uses [Semantic Versioning][semver].
## Gitea
OpenRS2 only uses GitHub as a mirror. Issues and pull requests should be
submitted to [OpenRS2's self-hosted Gitea instance][gitea].
[commitmsg]: https://chris.beams.io/posts/git-commit/#seven-rules
[dco]: https://developercertificate.org/
[discord]: https://chat.openrs2.org/
[gitea]: https://git.openrs2.org/openrs2/openrs2
[idea]: https://www.jetbrains.com/idea/
[ktlint]: https://github.com/pinterest/ktlint#readme
[rewriting-history]: https://git-scm.com/book/en/v2/Git-Tools-Rewriting-History
[semver]: https://semver.org/

37
DCO

@ -1,37 +0,0 @@
Developer Certificate of Origin
Version 1.1
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
1 Letterman Drive
Suite D4700
San Francisco, CA, 94129
Everyone is permitted to copy and distribute verbatim copies of this
license document, but changing it is not allowed.
Developer's Certificate of Origin 1.1
By making a contribution to this project, I certify that:
(a) The contribution was created in whole or in part by me and I
have the right to submit it under the open source license
indicated in the file; or
(b) The contribution is based upon previous work that, to the best
of my knowledge, is covered under an appropriate open source
license and I have the right under that license to submit that
work with modifications, whether created in whole or in part
by me, under the same open source license (unless I am
permitted to submit under a different license), as indicated
in the file; or
(c) The contribution was provided directly to me by some other
person who certified (a), (b) or (c) and I have not modified
it.
(d) I understand and agree that this project and the contribution
are public and that a record of the contribution (including all
personal information I submit with it, including my sign-off) is
maintained indefinitely and may be redistributed consistent with
this project or the open source license(s) involved.

30
Jenkinsfile vendored

@ -0,0 +1,30 @@
pipeline {
agent any
stages {
stage('Build') {
steps {
withCredentials([usernamePassword(
credentialsId: 'archiva',
usernameVariable: 'ORG_GRADLE_PROJECT_openrs2RepoUsername',
passwordVariable: 'ORG_GRADLE_PROJECT_openrs2RepoPassword'
)]) {
withGradle {
sh './gradlew --no-daemon clean build publish'
}
}
}
}
}
post {
always {
junit '**/build/test-results/test/*.xml'
jacoco(
execPattern: '**/build/jacoco/test.exec',
classPattern: '**/build/classes/*/main',
sourcePattern: '**/src/main'
)
}
}
}

@ -1,4 +1,4 @@
Copyright (c) 2019-2023 OpenRS2 Authors
Copyright (c) 2019-2020 OpenRS2 Authors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above

@ -1,16 +1,16 @@
# OpenRS2
[![GitHub Actions][actions-badge]][actions] [![Discord][discord-badge]][discord] [![ISC license][isc-badge]][isc]
[![Build status badge](https://build.openrs2.dev/buildStatus/icon?job=openrs2&build=lastCompleted)](https://build.openrs2.dev/job/openrs2/)
## Introduction
OpenRS2 is an open-source multiplayer game server and suite of associated tools.
It is compatible with build 550 of the RuneScape client, which was released in
late 2009.
OpenRS2 is an open-source multiplayer game server and suite of associated
tools. It is compatible with build 550 of the RuneScape client, which was
released in late 2009.
## Prerequisites
OpenRS2 requires version 11 or later of the [Java Development Kit][jdk].
OpenRS2 requires version 8 or later of the [Java Development Kit][jdk].
The JDK is required even if a pre-built copy of OpenRS2 is used, as it depends
on JDK-only tools, such as `jarsigner`, at runtime.
@ -20,69 +20,159 @@ on JDK-only tools, such as `jarsigner`, at runtime.
OpenRS2 requires the original RuneScape client code, data and location file
encryption keys, which we cannot legally distribute.
These files must be manually placed in the `nonfree` directory (directly beneath
the root of the repository), in the following structure:
These files must be manually placed in the `nonfree` directory (directly
beneath the root of the repository), in the following structure:
```
nonfree
├── lib
│ ├── jaggl.pack200
│ ├── loader_gl.jar
│ ├── loader.jar
│ ├── runescape_gl.pack200
│ ├── runescape.jar
│ └── unpackclass.pack
└── share
├── cache
│ ├── 0
│ │ ├── 0.dat
│ │ └── ...
│ ├── ...
│ └── 255
│ ├── ...
│ └── 28.dat
└── keys.json
└── code
   ├── game_unpacker.dat
   ├── jaggl.pack200
   ├── loader_gl.jar
   ├── loader.jar
   ├── runescape_gl.pack200
   └── runescape.jar
```
The CRC-32 checksums and SHA-256 digests of the correct files are:
The SHA-256 checksums of the correct files are:
| CRC-32 checksum | SHA-256 digest | File |
|----------------:|--------------------------------------------------------------------|------------------------|
| `-1418094567` | `d39578f4a88a376bcb2571f05da1939a14a80d8c4ed89a4eb172d9e525795fe2` | `jaggl.pack200` |
| `-2129469231` | `31182683ba04dc0ad45859161c13f66424b10deb0b2df10aa58b48bba57402db` | `loader_gl.jar` |
| `-1516355035` | `ccdfaa86be07452ddd69f869ade86ea900dbb916fd853db16602edf2eb54211b` | `loader.jar` |
| `-132784534` | `4a5032ea8079d2154617ae1f21dfcc46a10e023c8ba23a4827d5e25e75c73045` | `runescape_gl.pack200` |
| `1692522675` | `0ab28a95e7c5993860ff439ebb331c0df02ad40aa1f544777ed91b46d30d3d24` | `runescape.jar` |
| `-1911426584` | `7c090e07f8d754d09804ff6e9733ef3ba227893b6b639436db90977b39122590` | `unpackclass.pack` |
```
7c090e07f8d754d09804ff6e9733ef3ba227893b6b639436db90977b39122590 nonfree/code/game_unpacker.dat
d39578f4a88a376bcb2571f05da1939a14a80d8c4ed89a4eb172d9e525795fe2 nonfree/code/jaggl.pack200
31182683ba04dc0ad45859161c13f66424b10deb0b2df10aa58b48bba57402db nonfree/code/loader_gl.jar
ccdfaa86be07452ddd69f869ade86ea900dbb916fd853db16602edf2eb54211b nonfree/code/loader.jar
4a5032ea8079d2154617ae1f21dfcc46a10e023c8ba23a4827d5e25e75c73045 nonfree/code/runescape_gl.pack200
0ab28a95e7c5993860ff439ebb331c0df02ad40aa1f544777ed91b46d30d3d24 nonfree/code/runescape.jar
```
The `.gitignore` file includes the `nonfree` directory to prevent any non-free
material from being accidentally included in the repository.
The `nonfree` directory is included in the `.gitignore` file to prevent any
non-free material from being accidentally included in the repository.
## Building
Run `./gradlew` to download the dependencies, build the code, run the unit tests
and package it.
## Links
## Contributing
OpenRS2 is still in the early stages of development. The current focus is on
building underlying infrastructure, such as the deobfuscator, rather than
game content. This approach will make it much quicker to build game content in
the long run, but it does mean OpenRS2 won't be particularly useful in the short
term.
If you're interested in contributing new features, you should discuss your
plans in our [Discord][discord] server first. I have rough plans in my head for
the future development direction. Communicating beforehand will avoid the need
for significant changes to be made at the code review stage and make it less
likely for your contribution to be dropped entirely.
### Code style
All source code must be formatted with [IntelliJ IDEA][idea]'s built-in
formatter before each commit. The 'Optimize imports' option should also be
selected. Do not select 'Rearrange entries'.
OpenRS2's code style settings are held in `.idea/codeStyles/Project.xml` in the
repository, and IDEA should use them automatically after importing the Gradle
project.
Kotlin code must pass all of [ktlint][ktlint]'s tests.
### Commit messages
Commit messages should follow the ['seven rules'][commitmsg] described in
'How to Write a Git Commit Message', with the exception that the summary line
can be up to 72 characters in length (as OpenRS2 does not use email-based
patches).
You should use tools like [interactive rebase][rewriting-history] to ensure the
commit history is tidy.
### Developer Certificate of Origin
OpenRS2 uses version 1.1 of the [Developer Certificate of Origin][dco] (DCO) to
certify that contributors agree to license their code under OpenRS2's license
(see the License section below). To confirm that a contribution meets the
requirements of the DCO, a `Signed-off-by:` line must be added to the Git
commit message by passing `--signoff` to the `git commit` invocation.
If you intend to make a large number of contributions, run the following
commands from the repository root to add `Signed-off-by:` line to all your
commit messages by default:
```
echo -e "\n\nSigned-off-by: $(git config user.name) <$(git config user.email)>" > .git/commit-template
git config commit.template .git/commit-template
```
The full text of the DCO is reproduced below:
```
Developer Certificate of Origin
Version 1.1
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
1 Letterman Drive
Suite D4700
San Francisco, CA, 94129
Everyone is permitted to copy and distribute verbatim copies of this
license document, but changing it is not allowed.
Developer's Certificate of Origin 1.1
By making a contribution to this project, I certify that:
(a) The contribution was created in whole or in part by me and I
have the right to submit it under the open source license
indicated in the file; or
(b) The contribution is based upon previous work that, to the best
of my knowledge, is covered under an appropriate open source
license and I have the right under that license to submit that
work with modifications, whether created in whole or in part
by me, under the same open source license (unless I am
permitted to submit under a different license), as indicated
in the file; or
(c) The contribution was provided directly to me by some other
person who certified (a), (b) or (c) and I have not modified
it.
(d) I understand and agree that this project and the contribution
are public and that a record of the contribution (including all
personal information I submit with it, including my sign-off) is
maintained indefinitely and may be redistributed consistent with
this project or the open source license(s) involved.
```
OpenRS2 does not distribute any of Jagex's intellectual property, and care
should be taken to avoid inadvertently including any in contributions.
### Versioning
OpenRS2 uses [Semantic Versioning][semver].
## Community
* [Discord][discord]
* [Issue tracker][issue-tracker]
* [KDoc][kdoc]
* [Website][www]
## License
OpenRS2 is available under the terms of the [ISC license][isc], which is similar
to the 2-clause BSD license. The full copyright notice and terms are available
in the `LICENSE` file.
OpenRS2 is available under the terms of the [ISC license][isc], which is
similar to the 2-clause BSD license. The full copyright notice and terms are
available in the `LICENSE` file.
[actions-badge]: https://github.com/openrs2/openrs2/actions/workflows/build.yaml/badge.svg?branch=master
[actions]: https://github.com/openrs2/openrs2/actions
[discord-badge]: https://img.shields.io/discord/684495254145335298
[discord]: https://chat.openrs2.org/
[isc-badge]: https://img.shields.io/badge/license-ISC-informational
[commitmsg]: https://chris.beams.io/posts/git-commit/#seven-rules
[dco]: https://developercertificate.org/
[discord]: https://chat.openrs2.dev/
[idea]: https://www.jetbrains.com/idea/
[isc]: https://opensource.org/licenses/ISC
[issue-tracker]: https://git.openrs2.org/openrs2/openrs2/issues
[jdk]: https://jdk.java.net/
[kdoc]: https://docs.openrs2.org/
[www]: https://www.openrs2.org/
[ktlint]: https://github.com/pinterest/ktlint#readme
[rewriting-history]: https://git-scm.com/book/en/v2/Git-Tools-Rewriting-History
[semver]: https://semver.org/
[www]: https://www.openrs2.dev/

@ -1,54 +1,29 @@
import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
import com.github.jk1.license.render.TextReportRenderer
import java.nio.file.Files
plugins {
`maven-publish`
application
alias(libs.plugins.dependencyLicenseReport)
alias(libs.plugins.shadow)
id("com.github.jk1.dependency-license-report")
id("com.github.johnrengelman.shadow")
kotlin("jvm")
}
application {
applicationName = "openrs2"
mainClass.set("org.openrs2.CommandKt")
mainClassName = "dev.openrs2.LauncherKt"
}
dependencies {
implementation(projects.archive)
implementation(projects.bufferGenerator)
implementation(projects.cacheCli)
implementation(projects.compressCli)
implementation(projects.crc32)
implementation(projects.deob)
implementation(projects.game)
implementation(projects.log)
implementation(projects.patcher)
implementation(project(":bundler"))
implementation(project(":decompiler"))
implementation(project(":deob"))
implementation(project(":deob-ast"))
implementation(project(":game"))
}
tasks.shadowJar {
archiveFileName.set("openrs2.jar")
minimize {
exclude(dependency("ch.qos.logback:logback-classic"))
exclude(dependency("com.github.jnr:jnr-ffi"))
exclude(dependency("org.flywaydb:flyway-core"))
exclude(dependency("org.jetbrains.kotlin:kotlin-reflect"))
}
}
tasks.register("generateAuthors") {
inputs.dir("$rootDir/.git")
outputs.file(layout.buildDirectory.file("AUTHORS"))
doLast {
Files.newOutputStream(layout.buildDirectory.file("AUTHORS").get().asFile.toPath()).use { out ->
exec {
commandLine("git", "shortlog", "-esn", "HEAD")
standardOutput = out
}.assertNormalExitValue()
}
}
tasks.withType<ShadowJar> {
minimize()
}
licenseReport {
@ -58,57 +33,40 @@ licenseReport {
val distTasks = listOf(
"distTar",
"distZip",
"installDist"
)
configure(tasks.filter { it.name in distTasks }) {
enabled = false
}
val shadowDistTasks = listOf(
"installDist",
"installShadowDist",
"shadowDistTar",
"shadowDistZip"
)
configure(tasks.filter { it.name in shadowDistTasks }) {
dependsOn("generateAuthors", "generateLicenseReport")
configure(tasks.filter { it.name in distTasks }) {
dependsOn("generateLicenseReport")
}
distributions {
named("shadow") {
distributionBaseName.set("openrs2")
all {
contents {
from(layout.buildDirectory.file("AUTHORS"))
from("$rootDir/CONTRIBUTING.md")
from("$rootDir/DCO")
from("$rootDir/LICENSE")
from("$rootDir/README.md")
from("$rootDir/etc/archive.example.yaml") {
rename { "archive.yaml" }
into("etc")
from("${rootProject.projectDir}/COPYING")
from("${rootProject.projectDir}/README.md")
from("${rootProject.projectDir}/docs") {
include("*.md")
into("docs")
}
from("$rootDir/etc/config.example.yaml") {
rename { "config.yaml" }
into("etc")
}
from("$rootDir/share") {
exclude(".*", "*~")
into("share")
}
from(layout.buildDirectory.file("reports/dependency-license/THIRD-PARTY-NOTICES.txt")) {
from("$buildDir/reports/dependency-license/THIRD-PARTY-NOTICES.txt") {
rename { "third-party-licenses.txt" }
into("share/doc")
into("docs")
}
}
}
named("shadow") {
distributionBaseName.set("openrs2-shadow")
}
}
publishing {
publications.create<MavenPublication>("maven") {
artifactId = "openrs2"
setArtifacts(listOf(tasks.named("shadowDistZip").get()))
artifact(tasks.named("shadowDistZip").get())
pom {
packaging = "zip"

@ -0,0 +1,39 @@
package dev.openrs2
import kotlin.system.exitProcess
import dev.openrs2.bundler.main as bundlerMain
import dev.openrs2.decompiler.main as decompilerMain
import dev.openrs2.deob.ast.main as astDeobfuscatorMain
import dev.openrs2.deob.main as deobfuscatorMain
import dev.openrs2.game.main as gameMain
fun main(args: Array<String>) {
val command: String
val commandArgs: Array<String>
if (args.isEmpty()) {
command = "game"
commandArgs = emptyArray()
} else {
command = args[0]
commandArgs = args.copyOfRange(1, args.size)
}
when (command) {
"bundle" -> bundlerMain()
"decompile" -> decompilerMain()
"deob" -> deobfuscatorMain()
"deob-ast" -> astDeobfuscatorMain()
"game" -> gameMain()
else -> {
System.err.println("Usage: openrs2 [<command> [<args>]]")
System.err.println()
System.err.println("Commands:")
System.err.println(" bundle")
System.err.println(" decompile")
System.err.println(" deob")
System.err.println(" deob-ast")
System.err.println(" game")
exitProcess(1)
}
}
}

@ -1,29 +0,0 @@
package org.openrs2
import com.github.ajalt.clikt.core.NoOpCliktCommand
import com.github.ajalt.clikt.core.subcommands
import org.openrs2.archive.ArchiveCommand
import org.openrs2.buffer.generator.GenerateBufferCommand
import org.openrs2.cache.cli.CacheCommand
import org.openrs2.compress.cli.CompressCommand
import org.openrs2.crc32.Crc32Command
import org.openrs2.deob.DeobfuscateCommand
import org.openrs2.game.GameCommand
import org.openrs2.patcher.PatchCommand
public fun main(args: Array<String>): Unit = Command().main(args)
public class Command : NoOpCliktCommand(name = "openrs2") {
init {
subcommands(
ArchiveCommand(),
CacheCommand(),
CompressCommand(),
Crc32Command(),
DeobfuscateCommand(),
GameCommand(),
GenerateBufferCommand(),
PatchCommand()
)
}
}

@ -1,64 +0,0 @@
plugins {
`maven-publish`
application
kotlin("jvm")
}
application {
mainClass.set("org.openrs2.archive.ArchiveCommandKt")
}
dependencies {
api(libs.bundles.guice)
api(libs.clikt)
implementation(projects.asm)
implementation(projects.buffer)
implementation(projects.cache550)
implementation(projects.cli)
implementation(projects.compress)
implementation(projects.db)
implementation(projects.http)
implementation(projects.inject)
implementation(projects.json)
implementation(projects.log)
implementation(projects.net)
implementation(projects.protocol)
implementation(projects.util)
implementation(projects.yaml)
implementation(libs.bootstrap)
implementation(libs.bootstrapTable)
implementation(libs.bundles.ktor)
implementation(libs.bundles.thymeleaf)
implementation(libs.byteUnits)
implementation(libs.cabParser)
implementation(libs.flyway)
implementation(libs.guava)
implementation(libs.hikaricp)
implementation(libs.jackson.jsr310)
implementation(libs.jdom)
implementation(libs.jelf)
implementation(libs.jquery)
implementation(libs.jsoup)
implementation(libs.kotlin.coroutines.core)
implementation(libs.netty.handler)
implementation(libs.pecoff4j)
implementation(libs.postgres)
}
publishing {
publications.create<MavenPublication>("maven") {
from(components["java"])
pom {
packaging = "jar"
name.set("OpenRS2 Archive")
description.set(
"""
Service for archiving clients, caches and XTEA keys in an
efficient deduplicated format.
""".trimIndent()
)
}
}
}

@ -1,23 +0,0 @@
package org.openrs2.archive
import com.github.ajalt.clikt.core.NoOpCliktCommand
import com.github.ajalt.clikt.core.subcommands
import org.openrs2.archive.cache.CacheCommand
import org.openrs2.archive.client.ClientCommand
import org.openrs2.archive.key.KeyCommand
import org.openrs2.archive.name.NameCommand
import org.openrs2.archive.web.WebCommand
public fun main(args: Array<String>): Unit = ArchiveCommand().main(args)
public class ArchiveCommand : NoOpCliktCommand(name = "archive") {
init {
subcommands(
CacheCommand(),
ClientCommand(),
KeyCommand(),
NameCommand(),
WebCommand()
)
}
}

@ -1,3 +0,0 @@
package org.openrs2.archive
public data class ArchiveConfig(val databaseUrl: String)

@ -1,27 +0,0 @@
package org.openrs2.archive
import com.fasterxml.jackson.databind.ObjectMapper
import jakarta.inject.Inject
import jakarta.inject.Provider
import org.openrs2.yaml.Yaml
import java.nio.file.Files
import java.nio.file.Path
public class ArchiveConfigProvider @Inject constructor(
@Yaml private val mapper: ObjectMapper
) : Provider<ArchiveConfig> {
override fun get(): ArchiveConfig {
if (Files.notExists(CONFIG_PATH)) {
Files.copy(EXAMPLE_CONFIG_PATH, CONFIG_PATH)
}
return Files.newBufferedReader(CONFIG_PATH).use { reader ->
mapper.readValue(reader, ArchiveConfig::class.java)
}
}
private companion object {
private val CONFIG_PATH = Path.of("etc/archive.yaml")
private val EXAMPLE_CONFIG_PATH = Path.of("etc/archive.example.yaml")
}
}

@ -1,55 +0,0 @@
package org.openrs2.archive
import com.fasterxml.jackson.databind.Module
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule
import com.google.inject.AbstractModule
import com.google.inject.Scopes
import com.google.inject.multibindings.Multibinder
import org.openrs2.archive.key.HdosKeyDownloader
import org.openrs2.archive.key.KeyDownloader
import org.openrs2.archive.key.RuneLiteKeyDownloader
import org.openrs2.archive.name.NameDownloader
import org.openrs2.archive.name.RuneStarNameDownloader
import org.openrs2.asm.AsmModule
import org.openrs2.buffer.BufferModule
import org.openrs2.cache.CacheModule
import org.openrs2.db.Database
import org.openrs2.http.HttpModule
import org.openrs2.json.JsonModule
import org.openrs2.net.NetworkModule
import org.openrs2.yaml.YamlModule
import javax.sql.DataSource
public object ArchiveModule : AbstractModule() {
override fun configure() {
install(AsmModule)
install(BufferModule)
install(CacheModule)
install(HttpModule)
install(JsonModule)
install(NetworkModule)
install(YamlModule)
bind(ArchiveConfig::class.java)
.toProvider(ArchiveConfigProvider::class.java)
.`in`(Scopes.SINGLETON)
bind(DataSource::class.java)
.toProvider(DataSourceProvider::class.java)
.`in`(Scopes.SINGLETON)
bind(Database::class.java)
.toProvider(DatabaseProvider::class.java)
.`in`(Scopes.SINGLETON)
Multibinder.newSetBinder(binder(), Module::class.java)
.addBinding().to(JavaTimeModule::class.java)
val keyBinder = Multibinder.newSetBinder(binder(), KeyDownloader::class.java)
keyBinder.addBinding().to(HdosKeyDownloader::class.java)
keyBinder.addBinding().to(RuneLiteKeyDownloader::class.java)
val nameBinder = Multibinder.newSetBinder(binder(), NameDownloader::class.java)
nameBinder.addBinding().to(RuneStarNameDownloader::class.java)
}
}

@ -1,28 +0,0 @@
package org.openrs2.archive
import com.zaxxer.hikari.HikariConfig
import com.zaxxer.hikari.HikariDataSource
import jakarta.inject.Inject
import jakarta.inject.Provider
import org.flywaydb.core.Flyway
import org.postgresql.ds.PGSimpleDataSource
import javax.sql.DataSource
public class DataSourceProvider @Inject constructor(
private val config: ArchiveConfig
) : Provider<DataSource> {
override fun get(): DataSource {
val dataSource = PGSimpleDataSource()
dataSource.setUrl(config.databaseUrl)
Flyway.configure()
.dataSource(dataSource)
.locations("classpath:/org/openrs2/archive/migrations")
.load()
.migrate()
val config = HikariConfig()
config.dataSource = dataSource
return HikariDataSource(config)
}
}

@ -1,15 +0,0 @@
package org.openrs2.archive
import jakarta.inject.Inject
import jakarta.inject.Provider
import org.openrs2.db.Database
import org.openrs2.db.PostgresDeadlockDetector
import javax.sql.DataSource
public class DatabaseProvider @Inject constructor(
private val dataSource: DataSource
) : Provider<Database> {
override fun get(): Database {
return Database(dataSource, deadlockDetector = PostgresDeadlockDetector)
}
}

@ -1,19 +0,0 @@
package org.openrs2.archive.cache
import com.github.ajalt.clikt.core.NoOpCliktCommand
import com.github.ajalt.clikt.core.subcommands
import org.openrs2.archive.cache.finder.ExtractCommand
public class CacheCommand : NoOpCliktCommand(name = "cache") {
init {
subcommands(
CrossPollinateCommand(),
DownloadCommand(),
ExtractCommand(),
ImportCommand(),
ImportMasterIndexCommand(),
ExportCommand(),
RefreshViewsCommand()
)
}
}

@ -1,158 +0,0 @@
package org.openrs2.archive.cache
import jakarta.inject.Inject
import jakarta.inject.Singleton
import org.openrs2.archive.cache.nxt.MusicStreamClient
import org.openrs2.archive.game.GameDatabase
import org.openrs2.archive.jav.JavConfig
import org.openrs2.archive.world.World
import org.openrs2.archive.world.WorldList
import org.openrs2.buffer.ByteBufBodyHandler
import org.openrs2.buffer.use
import org.openrs2.net.BootstrapFactory
import org.openrs2.net.awaitSuspend
import java.net.URI
import java.net.http.HttpClient
import java.net.http.HttpRequest
import kotlin.coroutines.resumeWithException
import kotlin.coroutines.suspendCoroutine
@Singleton
public class CacheDownloader @Inject constructor(
private val client: HttpClient,
private val byteBufBodyHandler: ByteBufBodyHandler,
private val bootstrapFactory: BootstrapFactory,
private val gameDatabase: GameDatabase,
private val importer: CacheImporter
) {
public suspend fun download(gameName: String, environment: String, language: String) {
val game = gameDatabase.getGame(gameName, environment, language) ?: throw Exception("Game not found")
val url = game.url ?: throw Exception("URL not set")
val config = JavConfig.download(client, url)
val group = bootstrapFactory.createEventLoopGroup()
try {
suspendCoroutine { continuation ->
val bootstrap = bootstrapFactory.createBootstrap(group)
val hostname: String
val initializer = when (gameName) {
"oldschool" -> {
var buildMajor = game.buildMajor
hostname = if (environment == "beta") {
findOsrsWorld(config, World::isBeta) ?: throw Exception("Failed to find beta world")
} else {
val codebase = config.config[CODEBASE] ?: throw Exception("Codebase missing")
URI(codebase).host ?: throw Exception("Hostname missing")
}
val serverVersion = config.params[OSRS_SERVER_VERSION]
if (serverVersion != null) {
buildMajor = serverVersion.toInt()
}
OsrsJs5ChannelInitializer(
OsrsJs5ChannelHandler(
bootstrap,
game.scopeId,
game.id,
hostname,
PORT,
buildMajor ?: throw Exception("Current major build not set"),
game.lastMasterIndexId,
continuation,
importer
)
)
}
"runescape" -> {
var buildMajor = game.buildMajor
var buildMinor = game.buildMinor
val serverVersion = config.config[NXT_SERVER_VERSION]
if (serverVersion != null) {
val n = serverVersion.toInt()
/*
* Only reset buildMinor if buildMajor changes, so
* we don't have to keep retrying minor versions.
*/
if (buildMajor != n) {
buildMajor = n
buildMinor = 1
}
}
val tokens = config.params.values.filter { TOKEN_REGEX.matches(it) }
val token = tokens.singleOrNull() ?: throw Exception("Multiple candidate tokens: $tokens")
hostname = if (environment == "beta") {
NXT_BETA_HOSTNAME
} else {
NXT_LIVE_HOSTNAME
}
val musicStreamClient = MusicStreamClient(client, byteBufBodyHandler, "http://$hostname")
NxtJs5ChannelInitializer(
NxtJs5ChannelHandler(
bootstrap,
game.scopeId,
game.id,
hostname,
PORT,
buildMajor ?: throw Exception("Current major build not set"),
buildMinor ?: throw Exception("Current minor build not set"),
game.lastMasterIndexId,
continuation,
importer,
token,
game.languageId,
musicStreamClient
)
)
}
else -> throw UnsupportedOperationException()
}
bootstrap.handler(initializer)
.connect(hostname, PORT)
.addListener { future ->
if (!future.isSuccess) {
continuation.resumeWithException(future.cause())
}
}
}
} finally {
group.shutdownGracefully().awaitSuspend()
}
}
private fun findOsrsWorld(config: JavConfig, predicate: (World) -> Boolean): String? {
val url = config.params[OSRS_WORLD_LIST_URL] ?: throw Exception("World list URL missing")
val list = client.send(HttpRequest.newBuilder(URI(url)).build(), byteBufBodyHandler).body().use { buf ->
WorldList.read(buf)
}
return list.worlds
.filter(predicate)
.map(World::hostname)
.shuffled()
.firstOrNull()
}
private companion object {
private const val CODEBASE = "codebase"
private const val OSRS_WORLD_LIST_URL = "17"
private const val OSRS_SERVER_VERSION = "25"
private const val NXT_SERVER_VERSION = "server_version"
private const val NXT_LIVE_HOSTNAME = "content.runescape.com"
private const val NXT_BETA_HOSTNAME = "content.beta.runescape.com"
private const val PORT = 443
private val TOKEN_REGEX = Regex("[A-Za-z0-9*-]{32}")
}
}

@ -1,806 +0,0 @@
package org.openrs2.archive.cache
import com.fasterxml.jackson.annotation.JsonIgnore
import com.fasterxml.jackson.annotation.JsonProperty
import com.fasterxml.jackson.annotation.JsonUnwrapped
import io.netty.buffer.ByteBuf
import io.netty.buffer.ByteBufAllocator
import io.netty.buffer.Unpooled
import jakarta.inject.Inject
import jakarta.inject.Singleton
import org.openrs2.buffer.use
import org.openrs2.cache.ChecksumTable
import org.openrs2.cache.DiskStore
import org.openrs2.cache.Js5Archive
import org.openrs2.cache.Js5Compression
import org.openrs2.cache.Js5MasterIndex
import org.openrs2.cache.MasterIndexFormat
import org.openrs2.cache.Store
import org.openrs2.crypto.SymmetricKey
import org.openrs2.db.Database
import org.postgresql.util.PGobject
import java.sql.Connection
import java.time.Instant
import java.time.ZoneOffset
import java.time.format.DateTimeFormatter
import java.util.SortedSet
@Singleton
public class CacheExporter @Inject constructor(
private val database: Database,
private val alloc: ByteBufAllocator
) {
public data class Stats(
val validIndexes: Long,
val indexes: Long,
val validGroups: Long,
val groups: Long,
val validKeys: Long,
val keys: Long,
val size: Long,
val blocks: Long
) {
@JsonIgnore
public val allIndexesValid: Boolean = indexes == validIndexes && indexes != 0L
@JsonIgnore
public val validIndexesFraction: Double = if (indexes == 0L) {
1.0
} else {
validIndexes.toDouble() / indexes
}
@JsonIgnore
public val allGroupsValid: Boolean = groups == validGroups
@JsonIgnore
public val validGroupsFraction: Double = if (groups == 0L) {
1.0
} else {
validGroups.toDouble() / groups
}
@JsonIgnore
public val allKeysValid: Boolean = keys == validKeys
@JsonIgnore
public val validKeysFraction: Double = if (keys == 0L) {
1.0
} else {
validKeys.toDouble() / keys
}
/*
* The max block ID is conveniently also the max number of blocks, as
* zero is reserved.
*/
public val diskStoreValid: Boolean = blocks <= DiskStore.MAX_BLOCK
}
public data class Archive(
val resolved: Boolean,
val stats: ArchiveStats?
)
public data class ArchiveStats(
val validGroups: Long,
val groups: Long,
val validKeys: Long,
val keys: Long,
val size: Long,
val blocks: Long
) {
public val allGroupsValid: Boolean = groups == validGroups
public val validGroupsFraction: Double = if (groups == 0L) {
1.0
} else {
validGroups.toDouble() / groups
}
public val allKeysValid: Boolean = keys == validKeys
public val validKeysFraction: Double = if (keys == 0L) {
1.0
} else {
validKeys.toDouble() / keys
}
}
public data class IndexStats(
val validFiles: Long,
val files: Long,
val size: Long,
val blocks: Long
) {
public val allFilesValid: Boolean = files == validFiles
public val validFilesFraction: Double = if (files == 0L) {
1.0
} else {
validFiles.toDouble() / files
}
}
public data class Build(val major: Int, val minor: Int?) : Comparable<Build> {
override fun compareTo(other: Build): Int {
return compareValuesBy(this, other, Build::major, Build::minor)
}
override fun toString(): String {
return if (minor != null) {
"$major.$minor"
} else {
major.toString()
}
}
internal companion object {
internal fun fromPgObject(o: PGobject): Build? {
val value = o.value!!
require(value.length >= 2)
val parts = value.substring(1, value.length - 1).split(",")
require(parts.size == 2)
val major = parts[0]
val minor = parts[1]
if (major.isEmpty()) {
return null
}
return Build(major.toInt(), if (minor.isEmpty()) null else minor.toInt())
}
}
}
public data class CacheSummary(
val id: Int,
val scope: String,
val game: String,
val environment: String,
val language: String,
val builds: SortedSet<Build>,
val timestamp: Instant?,
val sources: SortedSet<String>,
@JsonUnwrapped
val stats: Stats?
)
public data class Cache(
val id: Int,
val sources: List<Source>,
val updates: List<String>,
val stats: Stats?,
val archives: List<Archive>,
val indexes: List<IndexStats>?,
val masterIndex: Js5MasterIndex?,
val checksumTable: ChecksumTable?
)
public data class Source(
val game: String,
val environment: String,
val language: String,
val build: Build?,
val timestamp: Instant?,
val name: String?,
val description: String?,
val url: String?
)
public data class Key(
val archive: Int,
val group: Int,
val nameHash: Int?,
val name: String?,
@JsonProperty("mapsquare") val mapSquare: Int?,
val key: SymmetricKey
)
public suspend fun totalSize(): Long {
return database.execute { connection ->
connection.prepareStatement(
"""
SELECT SUM(size)
FROM cache_stats
""".trimIndent()
).use { stmt ->
stmt.executeQuery().use { rows ->
if (rows.next()) {
rows.getLong(1)
} else {
0
}
}
}
}
}
public suspend fun list(): List<CacheSummary> {
return database.execute { connection ->
connection.prepareStatement(
"""
SELECT *
FROM (
SELECT
c.id,
g.name AS game,
sc.name AS scope,
e.name AS environment,
l.iso_code AS language,
array_remove(array_agg(DISTINCT ROW(s.build_major, s.build_minor)::build ORDER BY ROW(s.build_major, s.build_minor)::build ASC), NULL) builds,
MIN(s.timestamp) AS timestamp,
array_remove(array_agg(DISTINCT s.name ORDER BY s.name ASC), NULL) sources,
cs.valid_indexes,
cs.indexes,
cs.valid_groups,
cs.groups,
cs.valid_keys,
cs.keys,
cs.size,
cs.blocks
FROM caches c
JOIN sources s ON s.cache_id = c.id
JOIN game_variants v ON v.id = s.game_id
JOIN games g ON g.id = v.game_id
JOIN scopes sc ON sc.id = g.scope_id
JOIN environments e ON e.id = v.environment_id
JOIN languages l ON l.id = v.language_id
LEFT JOIN cache_stats cs ON cs.scope_id = sc.id AND cs.cache_id = c.id
WHERE NOT c.hidden
GROUP BY sc.name, c.id, g.name, e.name, l.iso_code, cs.valid_indexes, cs.indexes, cs.valid_groups,
cs.groups, cs.valid_keys, cs.keys, cs.size, cs.blocks
) t
ORDER BY t.game ASC, t.environment ASC, t.language ASC, t.builds[1] ASC, t.timestamp ASC
""".trimIndent()
).use { stmt ->
stmt.executeQuery().use { rows ->
val caches = mutableListOf<CacheSummary>()
while (rows.next()) {
val id = rows.getInt(1)
val game = rows.getString(2)
val scope = rows.getString(3)
val environment = rows.getString(4)
val language = rows.getString(5)
val builds = rows.getArray(6).array as Array<*>
val timestamp = rows.getTimestamp(7)?.toInstant()
@Suppress("UNCHECKED_CAST")
val sources = rows.getArray(8).array as Array<String>
val validIndexes = rows.getLong(9)
val stats = if (!rows.wasNull()) {
val indexes = rows.getLong(10)
val validGroups = rows.getLong(11)
val groups = rows.getLong(12)
val validKeys = rows.getLong(13)
val keys = rows.getLong(14)
val size = rows.getLong(15)
val blocks = rows.getLong(16)
Stats(validIndexes, indexes, validGroups, groups, validKeys, keys, size, blocks)
} else {
null
}
caches += CacheSummary(
id,
scope,
game,
environment,
language,
builds.mapNotNull { o -> Build.fromPgObject(o as PGobject) }.toSortedSet(),
timestamp,
sources.toSortedSet(),
stats
)
}
caches
}
}
}
}
public suspend fun get(scope: String, id: Int): Cache? {
return database.execute { connection ->
val masterIndex: Js5MasterIndex?
val checksumTable: ChecksumTable?
val stats: Stats?
connection.prepareStatement(
"""
SELECT
m.format,
mc.data,
b.data,
cs.valid_indexes,
cs.indexes,
cs.valid_groups,
cs.groups,
cs.valid_keys,
cs.keys,
cs.size,
cs.blocks
FROM caches c
CROSS JOIN scopes s
LEFT JOIN master_indexes m ON m.id = c.id
LEFT JOIN containers mc ON mc.id = m.container_id
LEFT JOIN crc_tables t ON t.id = c.id
LEFT JOIN blobs b ON b.id = t.blob_id
LEFT JOIN cache_stats cs ON cs.scope_id = s.id AND cs.cache_id = c.id
WHERE s.name = ? AND c.id = ?
""".trimIndent()
).use { stmt ->
stmt.setString(1, scope)
stmt.setInt(2, id)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
return@execute null
}
val formatString = rows.getString(1)
masterIndex = if (formatString != null) {
Unpooled.wrappedBuffer(rows.getBytes(2)).use { compressed ->
Js5Compression.uncompress(compressed).use { uncompressed ->
val format = MasterIndexFormat.valueOf(formatString.uppercase())
Js5MasterIndex.readUnverified(uncompressed, format)
}
}
} else {
null
}
val blob = rows.getBytes(3)
checksumTable = if (blob != null) {
Unpooled.wrappedBuffer(blob).use { buf ->
ChecksumTable.read(buf)
}
} else {
null
}
val validIndexes = rows.getLong(4)
stats = if (rows.wasNull()) {
null
} else {
val indexes = rows.getLong(5)
val validGroups = rows.getLong(6)
val groups = rows.getLong(7)
val validKeys = rows.getLong(8)
val keys = rows.getLong(9)
val size = rows.getLong(10)
val blocks = rows.getLong(11)
Stats(validIndexes, indexes, validGroups, groups, validKeys, keys, size, blocks)
}
}
}
val sources = mutableListOf<Source>()
connection.prepareStatement(
"""
SELECT g.name, e.name, l.iso_code, s.build_major, s.build_minor, s.timestamp, s.name, s.description, s.url
FROM sources s
JOIN game_variants v ON v.id = s.game_id
JOIN games g ON g.id = v.game_id
JOIN scopes sc ON sc.id = g.scope_id
JOIN environments e ON e.id = v.environment_id
JOIN languages l ON l.id = v.language_id
WHERE sc.name = ? AND s.cache_id = ?
ORDER BY s.name ASC
""".trimIndent()
).use { stmt ->
stmt.setString(1, scope)
stmt.setInt(2, id)
stmt.executeQuery().use { rows ->
while (rows.next()) {
val game = rows.getString(1)
val environment = rows.getString(2)
val language = rows.getString(3)
var buildMajor: Int? = rows.getInt(4)
if (rows.wasNull()) {
buildMajor = null
}
var buildMinor: Int? = rows.getInt(5)
if (rows.wasNull()) {
buildMinor = null
}
val build = if (buildMajor != null) {
Build(buildMajor, buildMinor)
} else {
null
}
val timestamp = rows.getTimestamp(6)?.toInstant()
val name = rows.getString(7)
val description = rows.getString(8)
val url = rows.getString(9)
sources += Source(game, environment, language, build, timestamp, name, description, url)
}
}
}
val updates = mutableListOf<String>()
connection.prepareStatement(
"""
SELECT url
FROM updates
WHERE cache_id = ?
""".trimIndent()
).use { stmt ->
stmt.setInt(1, id)
stmt.executeQuery().use { rows ->
while (rows.next()) {
updates += rows.getString(1)
}
}
}
val archives = mutableListOf<Archive>()
connection.prepareStatement(
"""
SELECT a.archive_id, c.id IS NOT NULL, s.valid_groups, s.groups, s.valid_keys, s.keys, s.size, s.blocks
FROM master_index_archives a
LEFT JOIN resolve_index((SELECT id FROM scopes WHERE name = ?), a.archive_id, a.crc32, a.version) c ON TRUE
LEFT JOIN index_stats s ON s.container_id = c.id
WHERE a.master_index_id = ?
UNION ALL
SELECT a.archive_id, b.id IS NOT NULL, NULL, NULL, NULL, NULL, length(b.data), group_blocks(a.archive_id, length(b.data))
FROM crc_table_archives a
LEFT JOIN resolve_archive(a.archive_id, a.crc32) b ON TRUE
WHERE a.crc_table_id = ?
ORDER BY archive_id ASC
""".trimIndent()
).use { stmt ->
stmt.setString(1, scope)
stmt.setInt(2, id)
stmt.setInt(3, id)
stmt.executeQuery().use { rows ->
while (rows.next()) {
val resolved = rows.getBoolean(2)
val size = rows.getLong(7)
val archiveStats = if (!rows.wasNull()) {
val validGroups = rows.getLong(3)
val groups = rows.getLong(4)
val validKeys = rows.getLong(5)
val keys = rows.getLong(6)
val blocks = rows.getLong(8)
ArchiveStats(validGroups, groups, validKeys, keys, size, blocks)
} else {
null
}
archives += Archive(resolved, archiveStats)
}
}
}
val indexes = if (checksumTable != null && archives[5].resolved) {
connection.prepareStatement(
"""
SELECT s.valid_files, s.files, s.size, s.blocks
FROM crc_table_archives a
JOIN resolve_archive(a.archive_id, a.crc32) b ON TRUE
JOIN version_list_stats s ON s.blob_id = b.id
WHERE a.crc_table_id = ? AND a.archive_id = 5
ORDER BY s.index_id ASC
""".trimIndent()
).use { stmt ->
stmt.setInt(1, id)
stmt.executeQuery().use { rows ->
val indexes = mutableListOf<IndexStats>()
while (rows.next()) {
val validFiles = rows.getLong(1)
val files = rows.getLong(2)
val size = rows.getLong(3)
val blocks = rows.getLong(4)
indexes += IndexStats(validFiles, files, size, blocks)
}
indexes
}
}
} else {
null
}
Cache(id, sources, updates, stats, archives, indexes, masterIndex, checksumTable)
}
}
public suspend fun getFileName(scope: String, id: Int): String? {
return database.execute { connection ->
// TODO(gpe): what if a cache is from multiple games?
connection.prepareStatement(
"""
SELECT
g.name AS game,
e.name AS environment,
l.iso_code AS language,
array_remove(array_agg(DISTINCT ROW(s.build_major, s.build_minor)::build ORDER BY ROW(s.build_major, s.build_minor)::build ASC), NULL) builds,
MIN(s.timestamp) AS timestamp
FROM sources s
JOIN game_variants v ON v.id = s.game_id
JOIN games g ON g.id = v.game_id
JOIN scopes sc ON sc.id = g.scope_id
JOIN environments e ON e.id = v.environment_id
JOIN languages l ON l.id = v.language_id
WHERE sc.name = ? AND s.cache_id = ?
GROUP BY g.name, e.name, l.iso_code
LIMIT 1
""".trimIndent()
).use { stmt ->
stmt.setString(1, scope)
stmt.setInt(2, id)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
return@execute null
}
val game = rows.getString(1)
val environment = rows.getString(2)
val language = rows.getString(3)
val name = StringBuilder("$game-$environment-$language")
val builds = rows.getArray(4).array as Array<*>
for (build in builds.mapNotNull { o -> Build.fromPgObject(o as PGobject) }.toSortedSet()) {
name.append("-b")
name.append(build)
}
val timestamp = rows.getTimestamp(5)
if (!rows.wasNull()) {
name.append('-')
name.append(
timestamp.toInstant()
.atOffset(ZoneOffset.UTC)
.format(DateTimeFormatter.ofPattern("yyyy-MM-dd-HH-mm-ss"))
)
}
name.append("-openrs2#")
name.append(id)
name.toString()
}
}
}
}
public suspend fun exportGroup(scope: String, id: Int, archive: Int, group: Int): ByteBuf? {
return database.execute { connection ->
if (archive == Store.ARCHIVESET && group == Store.ARCHIVESET) {
connection.prepareStatement(
"""
SELECT c.data
FROM master_indexes m
JOIN containers c ON c.id = m.container_id
WHERE m.id = ?
""".trimIndent()
).use { stmt ->
stmt.setInt(1, id)
stmt.executeQuery().use { rows ->
if (rows.next()) {
val data = rows.getBytes(1)
return@execute Unpooled.wrappedBuffer(data)
}
}
}
}
connection.prepareStatement(
"""
SELECT g.data
FROM resolved_groups g
JOIN scopes s ON s.id = g.scope_id
WHERE s.name = ? AND g.master_index_id = ? AND g.archive_id = ? AND g.group_id = ?
UNION ALL
SELECT f.data
FROM resolved_files f
WHERE f.crc_table_id = ? AND f.index_id = ? AND f.file_id = ?
""".trimIndent()
).use { stmt ->
stmt.setString(1, scope)
stmt.setInt(2, id)
stmt.setInt(3, archive)
stmt.setInt(4, group)
stmt.setInt(5, id)
stmt.setInt(6, archive)
stmt.setInt(7, group)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
return@execute null
}
val data = rows.getBytes(1)
return@execute Unpooled.wrappedBuffer(data)
}
}
}
}
public fun export(scope: String, id: Int, storeFactory: (Boolean) -> Store) {
database.executeOnce { connection ->
val legacy = connection.prepareStatement(
"""
SELECT id
FROM crc_tables
WHERE id = ?
""".trimIndent()
).use { stmt ->
stmt.setInt(1, id)
stmt.executeQuery().use { rows ->
rows.next()
}
}
storeFactory(legacy).use { store ->
if (legacy) {
exportLegacy(connection, id, store)
} else {
export(connection, scope, id, store)
}
}
}
}
private fun export(connection: Connection, scope: String, id: Int, store: Store) {
connection.prepareStatement(
"""
SELECT g.archive_id, g.group_id, g.data, g.version
FROM resolved_groups g
JOIN scopes s ON s.id = g.scope_id
WHERE s.name = ? AND g.master_index_id = ?
""".trimIndent()
).use { stmt ->
stmt.fetchSize = BATCH_SIZE
stmt.setString(1, scope)
stmt.setInt(2, id)
stmt.executeQuery().use { rows ->
alloc.buffer(2, 2).use { versionBuf ->
store.create(Js5Archive.ARCHIVESET)
while (rows.next()) {
val archive = rows.getInt(1)
val group = rows.getInt(2)
val bytes = rows.getBytes(3)
val version = rows.getInt(4)
val versionNull = rows.wasNull()
versionBuf.clear()
if (!versionNull) {
versionBuf.writeShort(version)
}
Unpooled.wrappedBuffer(Unpooled.wrappedBuffer(bytes), versionBuf.retain()).use { buf ->
store.write(archive, group, buf)
// ensure the .idx file exists even if it is empty
if (archive == Js5Archive.ARCHIVESET) {
store.create(group)
}
}
}
}
}
}
}
private fun exportLegacy(connection: Connection, id: Int, store: Store) {
connection.prepareStatement(
"""
SELECT index_id, file_id, data, version
FROM resolved_files
WHERE crc_table_id = ?
""".trimIndent()
).use { stmt ->
stmt.fetchSize = BATCH_SIZE
stmt.setInt(1, id)
stmt.executeQuery().use { rows ->
alloc.buffer(2, 2).use { versionBuf ->
store.create(0)
while (rows.next()) {
val index = rows.getInt(1)
val file = rows.getInt(2)
val bytes = rows.getBytes(3)
val version = rows.getInt(4)
val versionNull = rows.wasNull()
versionBuf.clear()
if (!versionNull) {
versionBuf.writeShort(version)
}
Unpooled.wrappedBuffer(Unpooled.wrappedBuffer(bytes), versionBuf.retain()).use { buf ->
store.write(index, file, buf)
}
}
}
}
}
}
public suspend fun exportKeys(scope: String, id: Int): List<Key> {
return database.execute { connection ->
connection.prepareStatement(
"""
SELECT g.archive_id, g.group_id, g.name_hash, n.name, (k.key).k0, (k.key).k1, (k.key).k2, (k.key).k3
FROM resolved_groups g
JOIN scopes s ON s.id = g.scope_id
JOIN keys k ON k.id = g.key_id
LEFT JOIN names n ON n.hash = g.name_hash AND n.name ~ '^l(?:[0-9]|[1-9][0-9])_(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$'
WHERE s.name = ? AND g.master_index_id = ?
""".trimIndent()
).use { stmt ->
stmt.setString(1, scope)
stmt.setInt(2, id)
stmt.executeQuery().use { rows ->
val keys = mutableListOf<Key>()
while (rows.next()) {
val archive = rows.getInt(1)
val group = rows.getInt(2)
var nameHash: Int? = rows.getInt(3)
if (rows.wasNull()) {
nameHash = null
}
val name = rows.getString(4)
val k0 = rows.getInt(5)
val k1 = rows.getInt(6)
val k2 = rows.getInt(7)
val k3 = rows.getInt(8)
val mapSquare = getMapSquare(name)
keys += Key(archive, group, nameHash, name, mapSquare, SymmetricKey(k0, k1, k2, k3))
}
keys
}
}
}
}
private companion object {
private const val BATCH_SIZE = 256
private val LOC_NAME_REGEX = Regex("l(\\d+)_(\\d+)")
private fun getMapSquare(name: String?): Int? {
if (name == null) {
return null
}
val match = LOC_NAME_REGEX.matchEntire(name) ?: return null
val x = match.groupValues[1].toInt()
val z = match.groupValues[2].toInt()
return (x shl 8) or z
}
}
}

File diff suppressed because it is too large Load Diff

@ -1,16 +0,0 @@
package org.openrs2.archive.cache
import com.github.ajalt.clikt.core.CliktCommand
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
public class CrossPollinateCommand : CliktCommand(name = "cross-pollinate") {
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val crossPollinator = injector.getInstance(CrossPollinator::class.java)
crossPollinator.crossPollinate()
}
}
}

@ -1,223 +0,0 @@
package org.openrs2.archive.cache
import io.netty.buffer.ByteBuf
import io.netty.buffer.ByteBufAllocator
import io.netty.buffer.ByteBufInputStream
import io.netty.buffer.Unpooled
import jakarta.inject.Inject
import jakarta.inject.Singleton
import org.openrs2.buffer.crc32
import org.openrs2.buffer.use
import org.openrs2.cache.Js5Compression
import org.openrs2.cache.Js5CompressionType
import org.openrs2.db.Database
import java.sql.Connection
import java.util.zip.GZIPInputStream
@Singleton
public class CrossPollinator @Inject constructor(
private val database: Database,
private val alloc: ByteBufAllocator,
private val importer: CacheImporter
) {
public suspend fun crossPollinate() {
database.execute { connection ->
for ((index, archive) in OLD_TO_NEW_ENGINE) {
crossPollinate(connection, index, archive)
}
}
}
private fun crossPollinate(connection: Connection, index: Int, archive: Int) {
val scopeId: Int
connection.prepareStatement(
"""
SELECT id
FROM scopes
WHERE name = 'runescape'
""".trimIndent()
).use { stmt ->
stmt.executeQuery().use { rows ->
check(rows.next())
scopeId = rows.getInt(1)
}
}
val groups = mutableListOf<CacheImporter.Group>()
val files = mutableListOf<CacheImporter.File>()
try {
connection.prepareStatement(
"""
SELECT
new.group_id AS id,
old.version AS old_version,
old.crc32 AS old_crc32,
b.data AS old_data,
new.version AS new_version,
new.crc32 AS new_crc32,
c.data AS new_data
FROM (
SELECT DISTINCT vf.index_id, vf.file_id, vf.version, vf.crc32
FROM version_list_files vf
WHERE vf.blob_id IN (
SELECT v.blob_id
FROM version_lists v
JOIN resolved_archives a ON a.blob_id = v.blob_id AND a.archive_id = 5
) AND vf.index_id = ?
) old
JOIN (
SELECT DISTINCT ig.group_id, ig.version, ig.crc32
FROM index_groups ig
WHERE ig.container_id IN (
SELECT i.container_id
FROM resolved_indexes i
WHERE i.scope_id = ? AND i.archive_id = ?
)
) new ON old.file_id = new.group_id AND old.version = new.version + 1
LEFT JOIN resolve_file(old.index_id, old.file_id, old.version, old.crc32) b ON TRUE
LEFT JOIN resolve_group(?, ?::uint1, new.group_id, new.crc32, new.version) c ON TRUE
WHERE (b.data IS NULL AND c.data IS NOT NULL) OR (b.data IS NOT NULL AND c.data IS NULL)
""".trimIndent()
).use { stmt ->
stmt.setInt(1, index)
stmt.setInt(2, scopeId)
stmt.setInt(3, archive)
stmt.setInt(4, scopeId)
stmt.setInt(5, archive)
stmt.executeQuery().use { rows ->
while (rows.next()) {
val id = rows.getInt(1)
val oldVersion = rows.getInt(2)
val oldChecksum = rows.getInt(3)
val newVersion = rows.getInt(5)
val newChecksum = rows.getInt(6)
val oldData = rows.getBytes(4)
if (oldData != null) {
Unpooled.wrappedBuffer(oldData).use { oldBuf ->
fileToGroup(oldBuf, newChecksum).use { newBuf ->
if (newBuf != null) {
val uncompressed = Js5Compression.uncompressUnlessEncrypted(newBuf.slice())
groups += CacheImporter.Group(
archive,
id,
newBuf.retain(),
uncompressed,
newVersion,
false
)
}
}
}
}
val newData = rows.getBytes(7)
if (newData != null) {
Unpooled.wrappedBuffer(newData).use { newBuf ->
val oldBuf = groupToFile(newBuf, oldChecksum)
if (oldBuf != null) {
files += CacheImporter.File(index, id, oldBuf, oldVersion)
}
}
}
}
}
}
if (groups.isEmpty() && files.isEmpty()) {
return
}
importer.prepare(connection)
val sourceId = importer.addSource(
connection,
type = CacheImporter.SourceType.CROSS_POLLINATION,
cacheId = null,
gameId = null,
buildMajor = null,
buildMinor = null,
timestamp = null,
name = null,
description = null,
url = null,
)
if (groups.isNotEmpty()) {
importer.addGroups(connection, scopeId, sourceId, groups)
}
if (files.isNotEmpty()) {
importer.addFiles(connection, sourceId, files)
}
} finally {
groups.forEach(CacheImporter.Group::release)
files.forEach(CacheImporter.File::release)
}
}
private fun getUncompressedLength(buf: ByteBuf): Int {
GZIPInputStream(ByteBufInputStream(buf)).use { input ->
var len = 0
val temp = ByteArray(4096)
while (true) {
val n = input.read(temp)
if (n == -1) {
break
}
len += n
}
return len
}
}
private fun fileToGroup(input: ByteBuf, expectedChecksum: Int): ByteBuf? {
val len = input.readableBytes()
val lenWithHeader = len + JS5_COMPRESSION_HEADER_LEN
val uncompressedLen = getUncompressedLength(input.slice())
alloc.buffer(lenWithHeader, lenWithHeader).use { output ->
output.writeByte(Js5CompressionType.GZIP.ordinal)
output.writeInt(len)
output.writeInt(uncompressedLen)
output.writeBytes(input)
return if (output.crc32() == expectedChecksum) {
output.retain()
} else {
null
}
}
}
private fun groupToFile(input: ByteBuf, expectedChecksum: Int): ByteBuf? {
val type = Js5CompressionType.fromOrdinal(input.readUnsignedByte().toInt())
if (type != Js5CompressionType.GZIP) {
return null
}
input.skipBytes(JS5_COMPRESSION_HEADER_LEN - 1)
return if (input.crc32() == expectedChecksum) {
input.retainedSlice()
} else {
null
}
}
private companion object {
private val OLD_TO_NEW_ENGINE = mapOf(
1 to 7, // MODELS
3 to 6, // MIDI_SONGS
4 to 5, // MAPS
)
private const val JS5_COMPRESSION_HEADER_LEN = 9
}
}

@ -1,25 +0,0 @@
package org.openrs2.archive.cache
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.arguments.default
import com.github.ajalt.clikt.parameters.options.default
import com.github.ajalt.clikt.parameters.options.option
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
public class DownloadCommand : CliktCommand(name = "download") {
private val environment by option().default("live")
private val language by option().default("en")
private val game by argument().default("oldschool")
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val downloader = injector.getInstance(CacheDownloader::class.java)
downloader.download(game, environment, language)
}
}
}

@ -1,34 +0,0 @@
package org.openrs2.archive.cache
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.options.default
import com.github.ajalt.clikt.parameters.options.option
import com.github.ajalt.clikt.parameters.types.int
import com.github.ajalt.clikt.parameters.types.path
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.cache.DiskStore
import org.openrs2.inject.CloseableInjector
public class ExportCommand : CliktCommand(name = "export") {
private val scope by option().default("runescape")
private val id by argument().int()
private val output by argument().path(
mustExist = true,
canBeFile = false,
mustBeReadable = true,
mustBeWritable = true
)
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val exporter = injector.getInstance(CacheExporter::class.java)
exporter.export(scope, id) { legacy ->
DiskStore.create(output, legacy = legacy)
}
}
}
}

@ -1,53 +0,0 @@
package org.openrs2.archive.cache
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.options.default
import com.github.ajalt.clikt.parameters.options.option
import com.github.ajalt.clikt.parameters.types.int
import com.github.ajalt.clikt.parameters.types.path
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.cache.Store
import org.openrs2.cli.instant
import org.openrs2.inject.CloseableInjector
public class ImportCommand : CliktCommand(name = "import") {
private val buildMajor by option().int()
private val buildMinor by option().int()
private val timestamp by option().instant()
private val name by option()
private val description by option()
private val url by option()
private val environment by option().default("live")
private val language by option().default("en")
private val game by argument()
private val input by argument().path(
mustExist = true,
canBeFile = false,
mustBeReadable = true
)
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val importer = injector.getInstance(CacheImporter::class.java)
Store.open(input).use { store ->
importer.import(
store,
game,
environment,
language,
buildMajor,
buildMinor,
timestamp,
name,
description,
url
)
}
}
}
}

@ -1,116 +0,0 @@
package org.openrs2.archive.cache
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.options.default
import com.github.ajalt.clikt.parameters.options.flag
import com.github.ajalt.clikt.parameters.options.option
import com.github.ajalt.clikt.parameters.types.enum
import com.github.ajalt.clikt.parameters.types.int
import com.github.ajalt.clikt.parameters.types.path
import com.google.inject.Guice
import io.netty.buffer.ByteBuf
import io.netty.buffer.Unpooled
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.buffer.use
import org.openrs2.cache.Js5CompressionType
import org.openrs2.cache.MasterIndexFormat
import org.openrs2.cli.instant
import org.openrs2.inject.CloseableInjector
import java.io.IOException
import java.nio.file.Files
import kotlin.math.min
public class ImportMasterIndexCommand : CliktCommand(name = "import-master-index") {
private val buildMajor by option().int()
private val buildMinor by option().int()
private val timestamp by option().instant()
private val name by option()
private val description by option()
private val url by option()
private val environment by option().default("live")
private val language by option().default("en")
private val decodeJs5Response by option().flag()
private val game by argument()
private val format by argument().enum<MasterIndexFormat>()
private val input by argument().path(
mustExist = true,
canBeDir = false,
mustBeReadable = true
)
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val importer = injector.getInstance(CacheImporter::class.java)
Unpooled.wrappedBuffer(Files.readAllBytes(input)).use { buf ->
if (decodeJs5Response) {
decodeJs5Response(buf)
} else {
buf.retain()
}.use { decodedBuf ->
importer.importMasterIndex(
decodedBuf,
format,
game,
environment,
language,
buildMajor,
buildMinor,
timestamp,
name,
description,
url
)
}
}
}
}
private fun decodeJs5Response(input: ByteBuf): ByteBuf {
input.skipBytes(3) // archive and group
val compression = input.readUnsignedByte().toInt()
val len = input.readInt()
if (len < 0) {
throw IOException("Length is negative: $len")
}
val lenWithHeader = if (compression == Js5CompressionType.UNCOMPRESSED.ordinal) {
len + 5
} else {
len + 9
}
input.alloc().buffer(lenWithHeader, lenWithHeader).use { output ->
output.writeByte(compression)
output.writeInt(len)
var blockLen = 504
while (true) {
val n = min(blockLen, output.writableBytes())
if (input.readableBytes() < n) {
throw IOException("Input truncated (expecting $n bytes, got ${input.readableBytes()})")
}
output.writeBytes(input, n)
if (!output.isWritable) {
break
} else if (!input.isReadable) {
throw IOException("Input truncated (expecting block trailer)")
}
if (input.readUnsignedByte().toInt() != 0xFF) {
throw IOException("Invalid block trailer")
}
blockLen = 511
}
return output.retain()
}
}
}

@ -1,376 +0,0 @@
package org.openrs2.archive.cache
import com.github.michaelbull.logging.InlineLogger
import io.netty.bootstrap.Bootstrap
import io.netty.buffer.ByteBuf
import io.netty.channel.ChannelException
import io.netty.channel.ChannelHandler
import io.netty.channel.ChannelHandlerContext
import io.netty.channel.ChannelPipeline
import io.netty.channel.SimpleChannelInboundHandler
import kotlinx.coroutines.runBlocking
import org.openrs2.buffer.crc32
import org.openrs2.buffer.use
import org.openrs2.cache.Js5Archive
import org.openrs2.cache.Js5Compression
import org.openrs2.cache.Js5Index
import org.openrs2.cache.Js5MasterIndex
import org.openrs2.cache.MasterIndexFormat
import java.io.IOException
import java.nio.channels.ClosedChannelException
import java.time.Instant
import kotlin.coroutines.Continuation
import kotlin.coroutines.resume
import kotlin.coroutines.resumeWithException
@ChannelHandler.Sharable
public abstract class Js5ChannelHandler(
private val bootstrap: Bootstrap,
private val scopeId: Int,
private val gameId: Int,
private val hostname: String,
private val port: Int,
protected var buildMajor: Int,
protected var buildMinor: Int?,
private val lastMasterIndexId: Int?,
private val continuation: Continuation<Unit>,
private val importer: CacheImporter,
private val masterIndexFormat: MasterIndexFormat,
private val maxInFlightRequests: Int,
private val maxBuildAttempts: Int = 10,
private val maxReconnectionAttempts: Int = 1
) : SimpleChannelInboundHandler<Any>(Object::class.java) {
protected data class InFlightRequest(val prefetch: Boolean, val archive: Int, val group: Int)
protected data class PendingRequest(
val prefetch: Boolean,
val archive: Int,
val group: Int,
val version: Int,
val checksum: Int
)
private enum class State {
CONNECTING,
CLIENT_OUT_OF_DATE,
CONNECTED,
RESUMING_CONTINUATION
}
private var state = State.CONNECTING
private var buildAttempts = 0
private var reconnectionAttempts = 0
private val inFlightRequests = mutableSetOf<InFlightRequest>()
private val pendingRequests = ArrayDeque<PendingRequest>()
private var masterIndexId: Int = 0
private var sourceId: Int = 0
private var masterIndex: Js5MasterIndex? = null
private lateinit var indexes: Array<Js5Index?>
private val groups = mutableListOf<CacheImporter.Group>()
protected abstract fun createInitMessage(): Any
protected abstract fun createRequestMessage(prefetch: Boolean, archive: Int, group: Int): Any
protected abstract fun createConnectedMessage(): Any?
protected abstract fun configurePipeline(pipeline: ChannelPipeline)
protected abstract fun incrementVersion()
override fun channelActive(ctx: ChannelHandlerContext) {
assert(state == State.CONNECTING)
ctx.writeAndFlush(createInitMessage(), ctx.voidPromise())
ctx.read()
}
override fun channelReadComplete(ctx: ChannelHandlerContext) {
/*
* Wait for us to receive the OK message before we send JS5 requests,
* as the RS3 JS5 server ignores any JS5 requests sent before the OK
* message is received.
*/
if (state != State.CONNECTED) {
return
}
var flush = false
while (inFlightRequests.size < maxInFlightRequests) {
val request = pendingRequests.removeFirstOrNull() ?: break
inFlightRequests += InFlightRequest(request.prefetch, request.archive, request.group)
logger.info { "Requesting archive ${request.archive} group ${request.group}" }
ctx.write(createRequestMessage(request.prefetch, request.archive, request.group), ctx.voidPromise())
flush = true
}
if (flush) {
ctx.flush()
}
if (inFlightRequests.isNotEmpty()) {
ctx.read()
}
}
override fun channelInactive(ctx: ChannelHandlerContext) {
if (state == State.CLIENT_OUT_OF_DATE) {
state = State.CONNECTING
bootstrap.connect(hostname, port)
} else if (state != State.RESUMING_CONTINUATION) {
if (isComplete()) {
throw Exception("Connection closed unexpectedly")
} else if (++reconnectionAttempts > maxReconnectionAttempts) {
throw Exception("Connection closed unexpectedly after maximum number of reconnection attempts")
}
// move in-flight requests back to the pending queue
for (request in inFlightRequests) {
val prefetch = request.prefetch
val archive = request.archive
val group = request.group
pendingRequests += if (archive == Js5Archive.ARCHIVESET && group == Js5Archive.ARCHIVESET) {
PendingRequest(prefetch, archive, group, 0, 0)
} else if (archive == Js5Archive.ARCHIVESET) {
val entry = masterIndex!!.entries[group]
val version = entry.version
val checksum = entry.checksum
PendingRequest(prefetch, archive, group, version, checksum)
} else {
val entry = indexes[archive]!![group]!!
val version = entry.version
val checksum = entry.checksum
PendingRequest(prefetch, archive, group, version, checksum)
}
}
inFlightRequests.clear()
// re-connect
state = State.CONNECTING
bootstrap.connect(hostname, port)
}
}
@Suppress("OVERRIDE_DEPRECATION")
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
releaseGroups()
if (state == State.RESUMING_CONTINUATION) {
logger.warn(cause) { "Swallowing exception as continuation has already resumed" }
} else if (cause !is ChannelException && cause !is IOException) {
/*
* We skip continuation resumption if there's an I/O error or
* timeout - this allows channelInactive() to attempt to reconnect
* if we haven't used too many reconnection attempts.
*/
state = State.RESUMING_CONTINUATION
continuation.resumeWithException(cause)
}
if (cause !is ClosedChannelException) {
ctx.close()
}
}
protected fun handleOk(ctx: ChannelHandlerContext) {
assert(state == State.CONNECTING)
configurePipeline(ctx.pipeline())
val msg = createConnectedMessage()
if (msg != null) {
ctx.write(msg, ctx.voidPromise())
}
state = State.CONNECTED
if (masterIndex == null && pendingRequests.isEmpty()) {
request(ctx, Js5Archive.ARCHIVESET, Js5Archive.ARCHIVESET, 0, 0)
}
}
protected fun handleClientOutOfDate(ctx: ChannelHandlerContext) {
assert(state == State.CONNECTING)
if (++buildAttempts > maxBuildAttempts) {
throw Exception("Failed to identify current version")
}
state = State.CLIENT_OUT_OF_DATE
incrementVersion()
ctx.close()
}
protected fun handleResponse(
ctx: ChannelHandlerContext,
prefetch: Boolean,
archive: Int,
group: Int,
data: ByteBuf
) {
val request = InFlightRequest(prefetch, archive, group)
val removed = inFlightRequests.remove(request)
if (!removed) {
val type = if (prefetch) {
"prefetch"
} else {
"urgent"
}
throw Exception("Received response for $type request (archive $archive group $group) not in-flight")
}
processResponse(ctx, archive, group, data)
}
protected fun processResponse(ctx: ChannelHandlerContext, archive: Int, group: Int, data: ByteBuf) {
if (archive == Js5Archive.ARCHIVESET && group == Js5Archive.ARCHIVESET) {
processMasterIndex(ctx, data)
} else if (archive == Js5Archive.ARCHIVESET) {
processIndex(ctx, group, data)
} else {
processGroup(archive, group, data)
}
val complete = isComplete()
if (groups.size >= CacheImporter.BATCH_SIZE || complete) {
runBlocking {
importer.importGroups(scopeId, sourceId, groups)
}
releaseGroups()
}
if (complete) {
runBlocking {
importer.setLastMasterIndexId(gameId, masterIndexId)
}
state = State.RESUMING_CONTINUATION
continuation.resume(Unit)
ctx.close()
} else {
/*
* Reset the number of reconnection attempts as we are making
* progress.
*/
reconnectionAttempts = 0
}
}
protected open fun isComplete(): Boolean {
return pendingRequests.isEmpty() && inFlightRequests.isEmpty()
}
private fun processMasterIndex(ctx: ChannelHandlerContext, buf: ByteBuf) {
Js5Compression.uncompress(buf.slice()).use { uncompressed ->
masterIndex = Js5MasterIndex.readUnverified(uncompressed.slice(), masterIndexFormat)
val (masterIndexId, sourceId, rawIndexes) = runBlocking {
importer.importMasterIndexAndGetIndexes(
masterIndex!!,
buf,
uncompressed,
gameId,
scopeId,
buildMajor,
buildMinor,
lastMasterIndexId,
timestamp = Instant.now()
)
}
this.masterIndexId = masterIndexId
this.sourceId = sourceId
try {
indexes = arrayOfNulls(rawIndexes.size)
for ((archive, index) in rawIndexes.withIndex()) {
val entry = masterIndex!!.entries[archive]
if (entry.version == 0 && entry.checksum == 0) {
continue
}
if (index != null) {
processIndex(ctx, archive, index)
} else {
request(ctx, Js5Archive.ARCHIVESET, archive, entry.version, entry.checksum)
}
}
} finally {
rawIndexes.filterNotNull().forEach(ByteBuf::release)
}
}
}
private fun processIndex(ctx: ChannelHandlerContext, archive: Int, buf: ByteBuf) {
val checksum = buf.crc32()
val entry = masterIndex!!.entries[archive]
if (checksum != entry.checksum) {
throw Exception("Index $archive checksum invalid (expected ${entry.checksum}, actual $checksum)")
}
Js5Compression.uncompress(buf.slice()).use { uncompressed ->
val index = Js5Index.read(uncompressed.slice())
indexes[archive] = index
if (index.version != entry.version) {
throw Exception("Index $archive version invalid (expected ${entry.version}, actual ${index.version})")
}
val groups = runBlocking {
importer.importIndexAndGetMissingGroups(
scopeId,
sourceId,
archive,
index,
buf,
uncompressed,
lastMasterIndexId
)
}
for (group in groups) {
val groupEntry = index[group]!!
request(ctx, archive, group, groupEntry.version, groupEntry.checksum)
}
}
}
private fun processGroup(archive: Int, group: Int, buf: ByteBuf) {
val checksum = buf.crc32()
val entry = indexes[archive]!![group]!!
if (checksum != entry.checksum) {
val expected = entry.checksum
throw Exception("Archive $archive group $group checksum invalid (expected $expected, actual $checksum)")
}
val uncompressed = Js5Compression.uncompressUnlessEncrypted(buf.slice())
groups += CacheImporter.Group(
archive,
group,
buf.retain(),
uncompressed,
entry.version,
versionTruncated = false
)
}
protected open fun request(ctx: ChannelHandlerContext, archive: Int, group: Int, version: Int, checksum: Int) {
pendingRequests += PendingRequest(false, archive, group, version, checksum)
}
private fun releaseGroups() {
groups.forEach(CacheImporter.Group::release)
groups.clear()
}
private companion object {
private val logger = InlineLogger()
}
}

@ -1,158 +0,0 @@
package org.openrs2.archive.cache
import com.github.michaelbull.logging.InlineLogger
import io.netty.bootstrap.Bootstrap
import io.netty.channel.ChannelHandlerContext
import io.netty.channel.ChannelPipeline
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.asCoroutineDispatcher
import kotlinx.coroutines.cancel
import kotlinx.coroutines.launch
import org.openrs2.archive.cache.nxt.InitJs5RemoteConnection
import org.openrs2.archive.cache.nxt.Js5Request
import org.openrs2.archive.cache.nxt.Js5RequestEncoder
import org.openrs2.archive.cache.nxt.Js5Response
import org.openrs2.archive.cache.nxt.Js5ResponseDecoder
import org.openrs2.archive.cache.nxt.LoginResponse
import org.openrs2.archive.cache.nxt.MusicStreamClient
import org.openrs2.buffer.use
import org.openrs2.cache.MasterIndexFormat
import org.openrs2.protocol.Rs2Decoder
import org.openrs2.protocol.Rs2Encoder
import org.openrs2.protocol.js5.downstream.XorDecoder
import kotlin.coroutines.Continuation
public class NxtJs5ChannelHandler(
bootstrap: Bootstrap,
scopeId: Int,
gameId: Int,
hostname: String,
port: Int,
buildMajor: Int,
buildMinor: Int,
lastMasterIndexId: Int?,
continuation: Continuation<Unit>,
importer: CacheImporter,
private val token: String,
private val languageId: Int,
private val musicStreamClient: MusicStreamClient,
private val maxMinorBuildAttempts: Int = 5
) : Js5ChannelHandler(
bootstrap,
scopeId,
gameId,
hostname,
port,
buildMajor,
buildMinor,
lastMasterIndexId,
continuation,
importer,
MasterIndexFormat.LENGTHS,
maxInFlightRequests = 500
) {
private data class MusicRequest(val archive: Int, val group: Int, val version: Int, val checksum: Int)
private var inFlightRequests = 0
private val pendingRequests = ArrayDeque<MusicRequest>()
private var scope: CoroutineScope? = null
private var minorBuildAttempts = 0
override fun createInitMessage(): Any {
return InitJs5RemoteConnection(buildMajor, buildMinor!!, token, languageId)
}
override fun createRequestMessage(prefetch: Boolean, archive: Int, group: Int): Any {
return Js5Request.Group(prefetch, archive, group, buildMajor)
}
override fun createConnectedMessage(): Any? {
return Js5Request.Connected(buildMajor)
}
override fun configurePipeline(pipeline: ChannelPipeline) {
pipeline.addBefore("handler", null, Js5RequestEncoder)
pipeline.addBefore("handler", null, XorDecoder())
pipeline.addBefore("handler", null, Js5ResponseDecoder())
pipeline.remove(Rs2Encoder::class.java)
pipeline.remove(Rs2Decoder::class.java)
}
override fun incrementVersion() {
buildMinor = buildMinor!! + 1
if (++minorBuildAttempts >= maxMinorBuildAttempts) {
buildMajor++
buildMinor = 1
}
}
override fun channelActive(ctx: ChannelHandlerContext) {
super.channelActive(ctx)
scope = CoroutineScope(ctx.channel().eventLoop().asCoroutineDispatcher())
}
override fun channelInactive(ctx: ChannelHandlerContext) {
super.channelInactive(ctx)
scope!!.cancel()
}
override fun channelRead0(ctx: ChannelHandlerContext, msg: Any) {
when (msg) {
is LoginResponse.Js5Ok -> handleOk(ctx)
is LoginResponse.ClientOutOfDate -> handleClientOutOfDate(ctx)
is LoginResponse -> throw Exception("Invalid response: $msg")
is Js5Response -> handleResponse(ctx, msg.prefetch, msg.archive, msg.group, msg.data)
else -> throw Exception("Unknown message type: ${msg.javaClass.name}")
}
}
override fun channelReadComplete(ctx: ChannelHandlerContext) {
super.channelReadComplete(ctx)
while (inFlightRequests < 6) {
val request = pendingRequests.removeFirstOrNull() ?: break
inFlightRequests++
logger.info { "Requesting archive ${request.archive} group ${request.group}" }
scope!!.launch {
val archive = request.archive
val group = request.group
val version = request.version
val checksum = request.checksum
musicStreamClient.request(archive, group, version, checksum, buildMajor).use { buf ->
inFlightRequests--
processResponse(ctx, archive, group, buf)
/*
* Inject a fake channelReadComplete event to ensure we
* don't time out and to send any new music requests.
*/
ctx.channel().pipeline().fireChannelReadComplete()
}
}
}
}
override fun isComplete(): Boolean {
return super.isComplete() && pendingRequests.isEmpty() && inFlightRequests == 0
}
override fun request(ctx: ChannelHandlerContext, archive: Int, group: Int, version: Int, checksum: Int) {
if (archive == MUSIC_ARCHIVE) {
pendingRequests += MusicRequest(archive, group, version, checksum)
} else {
super.request(ctx, archive, group, version, checksum)
}
}
private companion object {
private val logger = InlineLogger()
private const val MUSIC_ARCHIVE = 40
}
}

@ -1,22 +0,0 @@
package org.openrs2.archive.cache
import io.netty.channel.Channel
import io.netty.channel.ChannelInitializer
import io.netty.handler.timeout.ReadTimeoutHandler
import org.openrs2.archive.cache.nxt.ClientOutOfDateCodec
import org.openrs2.archive.cache.nxt.InitJs5RemoteConnectionCodec
import org.openrs2.archive.cache.nxt.Js5OkCodec
import org.openrs2.protocol.Protocol
import org.openrs2.protocol.Rs2Decoder
import org.openrs2.protocol.Rs2Encoder
public class NxtJs5ChannelInitializer(private val handler: NxtJs5ChannelHandler) : ChannelInitializer<Channel>() {
override fun initChannel(ch: Channel) {
ch.pipeline().addLast(
ReadTimeoutHandler(30),
Rs2Encoder(Protocol(InitJs5RemoteConnectionCodec)),
Rs2Decoder(Protocol(Js5OkCodec, ClientOutOfDateCodec))
)
ch.pipeline().addLast("handler", handler)
}
}

@ -1,76 +0,0 @@
package org.openrs2.archive.cache
import io.netty.bootstrap.Bootstrap
import io.netty.channel.ChannelHandlerContext
import io.netty.channel.ChannelPipeline
import org.openrs2.cache.MasterIndexFormat
import org.openrs2.protocol.Rs2Decoder
import org.openrs2.protocol.Rs2Encoder
import org.openrs2.protocol.js5.downstream.Js5LoginResponse
import org.openrs2.protocol.js5.downstream.Js5Response
import org.openrs2.protocol.js5.downstream.Js5ResponseDecoder
import org.openrs2.protocol.js5.downstream.XorDecoder
import org.openrs2.protocol.js5.upstream.Js5Request
import org.openrs2.protocol.js5.upstream.Js5RequestEncoder
import org.openrs2.protocol.login.upstream.LoginRequest
import kotlin.coroutines.Continuation
public class OsrsJs5ChannelHandler(
bootstrap: Bootstrap,
scopeId: Int,
gameId: Int,
hostname: String,
port: Int,
build: Int,
lastMasterIndexId: Int?,
continuation: Continuation<Unit>,
importer: CacheImporter
) : Js5ChannelHandler(
bootstrap,
scopeId,
gameId,
hostname,
port,
build,
null,
lastMasterIndexId,
continuation,
importer,
MasterIndexFormat.VERSIONED,
maxInFlightRequests = 200
) {
override fun createInitMessage(): Any {
return LoginRequest.InitJs5RemoteConnection(buildMajor)
}
override fun createRequestMessage(prefetch: Boolean, archive: Int, group: Int): Any {
return Js5Request.Group(prefetch, archive, group)
}
override fun createConnectedMessage(): Any? {
return null
}
override fun configurePipeline(pipeline: ChannelPipeline) {
pipeline.addBefore("handler", null, Js5RequestEncoder)
pipeline.addBefore("handler", null, XorDecoder())
pipeline.addBefore("handler", null, Js5ResponseDecoder())
pipeline.remove(Rs2Encoder::class.java)
pipeline.remove(Rs2Decoder::class.java)
}
override fun incrementVersion() {
buildMajor++
}
override fun channelRead0(ctx: ChannelHandlerContext, msg: Any) {
when (msg) {
is Js5LoginResponse.Ok -> handleOk(ctx)
is Js5LoginResponse.ClientOutOfDate -> handleClientOutOfDate(ctx)
is Js5LoginResponse -> throw Exception("Invalid response: $msg")
is Js5Response -> handleResponse(ctx, msg.prefetch, msg.archive, msg.group, msg.data)
else -> throw Exception("Unknown message type: ${msg.javaClass.name}")
}
}
}

@ -1,22 +0,0 @@
package org.openrs2.archive.cache
import io.netty.channel.Channel
import io.netty.channel.ChannelInitializer
import io.netty.handler.timeout.ReadTimeoutHandler
import org.openrs2.protocol.Protocol
import org.openrs2.protocol.Rs2Decoder
import org.openrs2.protocol.Rs2Encoder
import org.openrs2.protocol.js5.downstream.Js5ClientOutOfDateCodec
import org.openrs2.protocol.js5.downstream.Js5OkCodec
import org.openrs2.protocol.login.upstream.InitJs5RemoteConnectionCodec
public class OsrsJs5ChannelInitializer(private val handler: OsrsJs5ChannelHandler) : ChannelInitializer<Channel>() {
override fun initChannel(ch: Channel) {
ch.pipeline().addLast(
ReadTimeoutHandler(30),
Rs2Encoder(Protocol(InitJs5RemoteConnectionCodec())),
Rs2Decoder(Protocol(Js5OkCodec(), Js5ClientOutOfDateCodec()))
)
ch.pipeline().addLast("handler", handler)
}
}

@ -1,16 +0,0 @@
package org.openrs2.archive.cache
import com.github.ajalt.clikt.core.CliktCommand
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
public class RefreshViewsCommand : CliktCommand(name = "refresh-views") {
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val importer = injector.getInstance(CacheImporter::class.java)
importer.refreshViews()
}
}
}

@ -1,149 +0,0 @@
package org.openrs2.archive.cache.finder
import com.github.michaelbull.logging.InlineLogger
import com.google.common.io.ByteStreams
import com.google.common.io.LittleEndianDataInputStream
import org.openrs2.util.charset.Cp1252Charset
import java.io.Closeable
import java.io.EOFException
import java.io.IOException
import java.io.InputStream
import java.io.PushbackInputStream
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.attribute.BasicFileAttributeView
import java.nio.file.attribute.FileTime
import java.time.Instant
public class CacheFinderExtractor(
input: InputStream
) : Closeable {
private val pushbackInput = PushbackInputStream(input)
private val input = LittleEndianDataInputStream(pushbackInput)
private fun readTimestamp(): FileTime {
val lo = input.readInt().toLong() and 0xFFFFFFFF
val hi = input.readInt().toLong() and 0xFFFFFFFF
val seconds = (((hi shl 32) or lo) / 10_000_000) - FILETIME_TO_UNIX_EPOCH
return FileTime.from(Instant.ofEpochSecond(seconds, lo))
}
private fun readName(): String {
val bytes = ByteArray(MAX_PATH)
input.readFully(bytes)
var len = bytes.size
for ((i, b) in bytes.withIndex()) {
if (b.toInt() == 0) {
len = i
break
}
}
return String(bytes, 0, len, Cp1252Charset)
}
private fun peekUnsignedByte(): Int {
val n = pushbackInput.read()
pushbackInput.unread(n)
return n
}
public fun extract(destination: Path) {
val newVersion = peekUnsignedByte() == 0xFE
if (newVersion) {
val signature = input.readInt()
if (signature != 0x435352FE) {
throw IOException("Invalid signature")
}
}
var readDirectoryPath = true
var number = 0
var directorySuffix: String? = null
while (true) {
if (newVersion && readDirectoryPath) {
val len = try {
input.readInt()
} catch (ex: EOFException) {
break
}
val bytes = ByteArray(len)
input.readFully(bytes)
val path = String(bytes, Cp1252Charset)
logger.info { "Extracting $path" }
readDirectoryPath = false
directorySuffix = path.substring(path.lastIndexOf('\\') + 1)
.replace(INVALID_CHARS, "_")
continue
}
if (peekUnsignedByte() == 0xFF) {
input.skipBytes(1)
readDirectoryPath = true
number++
continue
}
val attributes = try {
input.readInt()
} catch (ex: EOFException) {
break
}
val btime = readTimestamp()
val atime = readTimestamp()
val mtime = readTimestamp()
val sizeHi = input.readInt().toLong() and 0xFFFFFFFF
val sizeLo = input.readInt().toLong() and 0xFFFFFFFF
val size = (sizeHi shl 32) or sizeLo
input.skipBytes(8) // reserved
val name = readName()
input.skipBytes(14) // alternate name
input.skipBytes(2) // padding
val dir = if (directorySuffix != null) {
destination.resolve("cache${number}_$directorySuffix")
} else {
destination.resolve("cache$number")
}
Files.createDirectories(dir)
if ((attributes and FILE_ATTRIBUTE_DIRECTORY) == 0) {
val file = dir.resolve(name)
Files.newOutputStream(file).use { output ->
ByteStreams.copy(ByteStreams.limit(input, size), output)
}
val view = Files.getFileAttributeView(file, BasicFileAttributeView::class.java)
view.setTimes(mtime, atime, btime)
}
}
}
override fun close() {
input.close()
}
private companion object {
private const val FILETIME_TO_UNIX_EPOCH: Long = 11644473600
private const val MAX_PATH = 260
private const val FILE_ATTRIBUTE_DIRECTORY = 0x10
private val INVALID_CHARS = Regex("[^A-Za-z0-9-]")
private val logger = InlineLogger()
}
}

@ -1,25 +0,0 @@
package org.openrs2.archive.cache.finder
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.arguments.default
import com.github.ajalt.clikt.parameters.types.inputStream
import com.github.ajalt.clikt.parameters.types.path
import java.nio.file.Path
public class ExtractCommand : CliktCommand(name = "extract") {
private val input by argument().inputStream()
private val output by argument().path(
mustExist = false,
canBeFile = false,
canBeDir = true,
mustBeReadable = true,
mustBeWritable = true
).default(Path.of("."))
override fun run() {
CacheFinderExtractor(input).use { extractor ->
extractor.extract(output)
}
}
}

@ -1,8 +0,0 @@
package org.openrs2.archive.cache.nxt
import org.openrs2.protocol.EmptyPacketCodec
public object ClientOutOfDateCodec : EmptyPacketCodec<LoginResponse.ClientOutOfDate>(
opcode = 6,
packet = LoginResponse.ClientOutOfDate
)

@ -1,10 +0,0 @@
package org.openrs2.archive.cache.nxt
import org.openrs2.protocol.Packet
public data class InitJs5RemoteConnection(
public val buildMajor: Int,
public val buildMinor: Int,
public val token: String,
public val language: Int
) : Packet

@ -1,27 +0,0 @@
package org.openrs2.archive.cache.nxt
import io.netty.buffer.ByteBuf
import org.openrs2.buffer.readString
import org.openrs2.buffer.writeString
import org.openrs2.crypto.StreamCipher
import org.openrs2.protocol.VariableBytePacketCodec
public object InitJs5RemoteConnectionCodec : VariableBytePacketCodec<InitJs5RemoteConnection>(
type = InitJs5RemoteConnection::class.java,
opcode = 15
) {
override fun decode(input: ByteBuf, cipher: StreamCipher): InitJs5RemoteConnection {
val buildMajor = input.readInt()
val buildMinor = input.readInt()
val token = input.readString()
val language = input.readUnsignedByte().toInt()
return InitJs5RemoteConnection(buildMajor, buildMinor, token, language)
}
override fun encode(input: InitJs5RemoteConnection, output: ByteBuf, cipher: StreamCipher) {
output.writeInt(input.buildMajor)
output.writeInt(input.buildMinor)
output.writeString(input.token)
output.writeByte(input.language)
}
}

@ -1,8 +0,0 @@
package org.openrs2.archive.cache.nxt
import org.openrs2.protocol.EmptyPacketCodec
public object Js5OkCodec : EmptyPacketCodec<LoginResponse.Js5Ok>(
opcode = 0,
packet = LoginResponse.Js5Ok
)

@ -1,14 +0,0 @@
package org.openrs2.archive.cache.nxt
public sealed class Js5Request {
public data class Group(
public val prefetch: Boolean,
public val archive: Int,
public val group: Int,
public val build: Int
) : Js5Request()
public data class Connected(
public val build: Int
) : Js5Request()
}

@ -1,37 +0,0 @@
package org.openrs2.archive.cache.nxt
import io.netty.buffer.ByteBuf
import io.netty.channel.ChannelHandler
import io.netty.channel.ChannelHandlerContext
import io.netty.handler.codec.MessageToByteEncoder
@ChannelHandler.Sharable
public object Js5RequestEncoder : MessageToByteEncoder<Js5Request>(Js5Request::class.java) {
override fun encode(ctx: ChannelHandlerContext, msg: Js5Request, out: ByteBuf) {
when (msg) {
is Js5Request.Group -> {
out.writeByte(if (msg.prefetch) 32 else 33)
out.writeByte(msg.archive)
out.writeInt(msg.group)
out.writeShort(msg.build)
out.writeShort(0)
}
is Js5Request.Connected -> {
out.writeByte(6)
out.writeMedium(5)
out.writeShort(0)
out.writeShort(msg.build)
out.writeShort(0)
}
}
}
override fun allocateBuffer(ctx: ChannelHandlerContext, msg: Js5Request, preferDirect: Boolean): ByteBuf {
return if (preferDirect) {
ctx.alloc().ioBuffer(10, 10)
} else {
ctx.alloc().heapBuffer(10, 10)
}
}
}

@ -1,11 +0,0 @@
package org.openrs2.archive.cache.nxt
import io.netty.buffer.ByteBuf
import io.netty.buffer.DefaultByteBufHolder
public data class Js5Response(
public val prefetch: Boolean,
public val archive: Int,
public val group: Int,
public val data: ByteBuf
) : DefaultByteBufHolder(data)

@ -1,121 +0,0 @@
package org.openrs2.archive.cache.nxt
import io.netty.buffer.ByteBuf
import io.netty.channel.ChannelHandlerContext
import io.netty.handler.codec.ByteToMessageDecoder
import io.netty.handler.codec.DecoderException
import kotlin.math.min
public class Js5ResponseDecoder : ByteToMessageDecoder() {
private data class Request(val prefetch: Boolean, val archive: Int, val group: Int)
private enum class State {
READ_HEADER,
READ_LEN,
READ_DATA
}
private var state = State.READ_HEADER
private val buffers = mutableMapOf<Request, ByteBuf>()
private var request: Request? = null
override fun decode(ctx: ChannelHandlerContext, input: ByteBuf, out: MutableList<Any>) {
if (state == State.READ_HEADER) {
if (input.readableBytes() < 5) {
return
}
val prefetch: Boolean
val archive = input.readUnsignedByte().toInt()
var group = input.readInt()
if (group and 0x80000000.toInt() != 0) {
prefetch = true
group = group and 0x7FFFFFFF
} else {
prefetch = false
}
request = Request(prefetch, archive, group)
state = if (buffers.containsKey(request)) {
State.READ_DATA
} else {
State.READ_LEN
}
}
if (state == State.READ_LEN) {
if (input.readableBytes() < 5) {
return
}
val type = input.readUnsignedByte().toInt()
val len = input.readInt()
if (len < 0) {
throw DecoderException("Length is negative: $len")
}
val totalLen = if (type == 0) {
len + 5
} else {
len + 9
}
if (totalLen < 0) {
throw DecoderException("Total length exceeds maximum ByteBuf size")
}
val data = ctx.alloc().buffer(totalLen, totalLen)
data.writeByte(type)
data.writeInt(len)
buffers[request!!] = data
state = State.READ_DATA
}
if (state == State.READ_DATA) {
val data = buffers[request!!]!!
var blockLen = if (data.writerIndex() == 5) {
102400 - 10
} else {
102400 - 5
}
blockLen = min(blockLen, data.writableBytes())
if (input.readableBytes() < blockLen) {
return
}
data.writeBytes(input, blockLen)
if (!data.isWritable) {
out += Js5Response(request!!.prefetch, request!!.archive, request!!.group, data)
buffers.remove(request!!)
request = null
}
state = State.READ_HEADER
}
}
override fun channelInactive(ctx: ChannelHandlerContext) {
super.channelInactive(ctx)
reset()
}
override fun handlerRemoved0(ctx: ChannelHandlerContext?) {
reset()
}
private fun reset() {
buffers.values.forEach(ByteBuf::release)
buffers.clear()
state = State.READ_HEADER
}
}

@ -1,8 +0,0 @@
package org.openrs2.archive.cache.nxt
import org.openrs2.protocol.Packet
public sealed class LoginResponse : Packet {
public object Js5Ok : LoginResponse()
public object ClientOutOfDate : LoginResponse()
}

@ -1,32 +0,0 @@
package org.openrs2.archive.cache.nxt
import io.netty.buffer.ByteBuf
import kotlinx.coroutines.future.await
import org.openrs2.buffer.ByteBufBodyHandler
import org.openrs2.buffer.use
import org.openrs2.http.checkStatusCode
import java.net.URI
import java.net.http.HttpClient
import java.net.http.HttpRequest
import java.time.Duration
public class MusicStreamClient(
private val client: HttpClient,
private val byteBufBodyHandler: ByteBufBodyHandler,
private val origin: String
) {
public suspend fun request(archive: Int, group: Int, version: Int, checksum: Int, build: Int): ByteBuf {
val uri = URI("$origin/ms?m=0&a=$archive&k=$build&g=$group&c=$checksum&v=$version")
val request = HttpRequest.newBuilder(uri)
.GET()
.timeout(Duration.ofSeconds(30))
.build()
val response = client.sendAsync(request, byteBufBodyHandler).await()
response.body().use { buf ->
response.checkStatusCode()
return buf.retain()
}
}
}

@ -1,11 +0,0 @@
package org.openrs2.archive.client
public enum class Architecture {
INDEPENDENT,
UNIVERSAL,
X86,
AMD64,
POWERPC,
SPARC,
SPARCV9
}

@ -1,35 +0,0 @@
package org.openrs2.archive.client
import io.netty.buffer.ByteBuf
import io.netty.buffer.ByteBufUtil
import org.openrs2.archive.cache.CacheExporter
import org.openrs2.archive.cache.CacheImporter
import java.time.Instant
public class Artifact(
data: ByteBuf,
public val game: String,
public val environment: String,
public val build: CacheExporter.Build?,
public val timestamp: Instant?,
public val type: ArtifactType,
public val format: ArtifactFormat,
public val os: OperatingSystem,
public val arch: Architecture,
public val jvm: Jvm,
public val links: List<ArtifactLink>
) : CacheImporter.Blob(data)
public data class ArtifactLink(
val type: ArtifactType,
val format: ArtifactFormat,
val os: OperatingSystem,
val arch: Architecture,
val jvm: Jvm,
val crc32: Int?,
val sha1: ByteArray,
val size: Int?
) {
public val sha1Hex: String
get() = ByteBufUtil.hexDump(sha1)
}

@ -1,46 +0,0 @@
package org.openrs2.archive.client
import io.ktor.http.ContentType
public enum class ArtifactFormat {
CAB,
JAR,
NATIVE,
PACK200,
PACKCLASS;
public fun getPrefix(os: OperatingSystem): String {
return when (this) {
NATIVE -> os.getPrefix()
else -> ""
}
}
public fun getExtension(os: OperatingSystem): String {
return when (this) {
CAB -> "cab"
JAR -> "jar"
NATIVE -> os.getExtension()
PACK200 -> "pack200"
PACKCLASS -> "js5"
}
}
public fun getContentType(os: OperatingSystem): ContentType {
return when (this) {
CAB -> CAB_MIME_TYPE
JAR -> JAR_MIME_TYPE
NATIVE -> os.getContentType()
PACK200, PACKCLASS -> ContentType.Application.OctetStream
}
}
public fun isJar(): Boolean {
return this != NATIVE
}
private companion object {
private val CAB_MIME_TYPE = ContentType("application", "vnd.ms-cab-compressed")
private val JAR_MIME_TYPE = ContentType("application", "java-archive")
}
}

@ -1,16 +0,0 @@
package org.openrs2.archive.client
public enum class ArtifactType {
BROWSERCONTROL,
CLIENT,
CLIENT_GL,
GLUEGEN_RT,
JAGGL,
JAGGL_DRI,
JAGMISC,
JOGL,
JOGL_AWT,
LOADER,
LOADER_GL,
UNPACKCLASS
}

@ -1,14 +0,0 @@
package org.openrs2.archive.client
import com.github.ajalt.clikt.core.NoOpCliktCommand
import com.github.ajalt.clikt.core.subcommands
public class ClientCommand : NoOpCliktCommand(name = "client") {
init {
subcommands(
ExportCommand(),
ImportCommand(),
RefreshCommand()
)
}
}

@ -1,455 +0,0 @@
package org.openrs2.archive.client
import io.netty.buffer.ByteBuf
import io.netty.buffer.ByteBufUtil
import io.netty.buffer.DefaultByteBufHolder
import io.netty.buffer.Unpooled
import jakarta.inject.Inject
import jakarta.inject.Singleton
import org.openrs2.archive.cache.CacheExporter
import org.openrs2.db.Database
import java.time.Instant
import java.time.ZoneOffset
import java.time.format.DateTimeFormatter
@Singleton
public class ClientExporter @Inject constructor(
private val database: Database
) {
public data class ArtifactSummary(
public val id: Long,
public val game: String,
public val environment: String,
public val build: CacheExporter.Build?,
public val timestamp: Instant?,
public val type: ArtifactType,
public val format: ArtifactFormat,
public val os: OperatingSystem,
public val arch: Architecture,
public val jvm: Jvm,
public val size: Int
) {
public val name: String
get() {
val builder = StringBuilder()
builder.append(format.getPrefix(os))
when (type) {
ArtifactType.CLIENT -> builder.append(game)
ArtifactType.CLIENT_GL -> builder.append("${game}_gl")
ArtifactType.GLUEGEN_RT -> builder.append("gluegen-rt")
else -> builder.append(type.name.lowercase())
}
if (jvm == Jvm.MICROSOFT) {
builder.append("ms")
}
if (os != OperatingSystem.INDEPENDENT) {
builder.append('-')
builder.append(os.name.lowercase())
}
if (arch != Architecture.INDEPENDENT) {
builder.append('-')
builder.append(arch.name.lowercase())
}
if (build != null) {
builder.append("-b")
builder.append(build)
}
if (timestamp != null) {
builder.append('-')
builder.append(
timestamp
.atOffset(ZoneOffset.UTC)
.format(DateTimeFormatter.ofPattern("yyyy-MM-dd-HH-mm-ss"))
)
}
builder.append("-openrs2#")
builder.append(id)
builder.append('.')
builder.append(format.getExtension(os))
return builder.toString()
}
}
public data class ArtifactSource(
public val name: String?,
public val description: String?,
public val url: String?
)
public data class ArtifactLinkExport(
public val id: Long?,
public val build: CacheExporter.Build?,
public val timestamp: Instant?,
public val link: ArtifactLink
)
public class Artifact(
public val summary: ArtifactSummary,
public val crc32: Int,
public val sha1: ByteArray,
public val sources: List<ArtifactSource>,
public val links: List<ArtifactLinkExport>
) {
public val sha1Hex: String
get() = ByteBufUtil.hexDump(sha1)
}
public class ArtifactExport(
public val summary: ArtifactSummary,
buf: ByteBuf
) : DefaultByteBufHolder(buf)
public suspend fun list(): List<ArtifactSummary> {
return database.execute { connection ->
connection.prepareStatement(
"""
SELECT
a.blob_id,
g.name,
e.name,
a.build_major,
a.build_minor,
a.timestamp,
a.type,
a.format,
a.os,
a.arch,
a.jvm,
length(b.data) AS size
FROM artifacts a
JOIN blobs b ON b.id = a.blob_id
JOIN games g ON g.id = a.game_id
JOIN environments e ON e.id = a.environment_id
ORDER BY a.build_major ASC, a.timestamp ASC, a.type ASC, a.format ASC, a.os ASC, a.arch ASC, a.jvm ASC
""".trimIndent()
).use { stmt ->
stmt.executeQuery().use { rows ->
val artifacts = mutableListOf<ArtifactSummary>()
while (rows.next()) {
val id = rows.getLong(1)
val game = rows.getString(2)
val environment = rows.getString(3)
var buildMajor: Int? = rows.getInt(4)
if (rows.wasNull()) {
buildMajor = null
}
var buildMinor: Int? = rows.getInt(5)
if (rows.wasNull()) {
buildMinor = null
}
val build = if (buildMajor != null) {
CacheExporter.Build(buildMajor, buildMinor)
} else {
null
}
val timestamp = rows.getTimestamp(6)?.toInstant()
val type = ArtifactType.valueOf(rows.getString(7).uppercase())
val format = ArtifactFormat.valueOf(rows.getString(8).uppercase())
val os = OperatingSystem.valueOf(rows.getString(9).uppercase())
val arch = Architecture.valueOf(rows.getString(10).uppercase())
val jvm = Jvm.valueOf(rows.getString(11).uppercase())
val size = rows.getInt(12)
artifacts += ArtifactSummary(
id,
game,
environment,
build,
timestamp,
type,
format,
os,
arch,
jvm,
size
)
}
return@execute artifacts
}
}
}
}
public suspend fun get(id: Long): Artifact? {
return database.execute { connection ->
val sources = mutableListOf<ArtifactSource>()
val links = mutableListOf<ArtifactLinkExport>()
connection.prepareStatement(
"""
SELECT DISTINCT name, description, url
FROM artifact_sources
WHERE blob_id = ?
""".trimIndent()
).use { stmt ->
stmt.setLong(1, id)
stmt.executeQuery().use { rows ->
while (rows.next()) {
val name = rows.getString(1)
val description = rows.getString(2)
val url = rows.getString(3)
sources += ArtifactSource(name, description, url)
}
}
}
connection.prepareStatement(
"""
SELECT
a.blob_id,
a.build_major,
a.build_minor,
a.timestamp,
l.type,
l.format,
l.os,
l.arch,
l.jvm,
COALESCE(l.crc32, b.crc32),
l.sha1,
COALESCE(l.size, length(b.data))
FROM artifact_links l
LEFT JOIN blobs b ON b.sha1 = l.sha1
LEFT JOIN artifacts a ON a.blob_id = b.id
WHERE l.blob_id = ?
ORDER BY l.type, l.format, l.os, l.arch, l.jvm
""".trimIndent()
).use { stmt ->
stmt.setLong(1, id)
stmt.executeQuery().use { rows ->
while (rows.next()) {
var linkId: Long? = rows.getLong(1)
if (rows.wasNull()) {
linkId = null
}
var buildMajor: Int? = rows.getInt(2)
if (rows.wasNull()) {
buildMajor = null
}
var buildMinor: Int? = rows.getInt(3)
if (rows.wasNull()) {
buildMinor = null
}
val build = if (buildMajor != null) {
CacheExporter.Build(buildMajor, buildMinor)
} else {
null
}
val timestamp = rows.getTimestamp(4)?.toInstant()
val type = ArtifactType.valueOf(rows.getString(5).uppercase())
val format = ArtifactFormat.valueOf(rows.getString(6).uppercase())
val os = OperatingSystem.valueOf(rows.getString(7).uppercase())
val arch = Architecture.valueOf(rows.getString(8).uppercase())
val jvm = Jvm.valueOf(rows.getString(9).uppercase())
var crc32: Int? = rows.getInt(10)
if (rows.wasNull()) {
crc32 = null
}
val sha1 = rows.getBytes(11)
var size: Int? = rows.getInt(12)
if (rows.wasNull()) {
size = null
}
links += ArtifactLinkExport(
linkId,
build,
timestamp,
ArtifactLink(
type,
format,
os,
arch,
jvm,
crc32,
sha1,
size
)
)
}
}
}
connection.prepareStatement(
"""
SELECT
g.name,
e.name,
a.build_major,
a.build_minor,
a.timestamp,
a.type,
a.format,
a.os,
a.arch,
a.jvm,
length(b.data) AS size,
b.crc32,
b.sha1
FROM artifacts a
JOIN games g ON g.id = a.game_id
JOIN environments e ON e.id = a.environment_id
JOIN blobs b ON b.id = a.blob_id
WHERE a.blob_id = ?
""".trimIndent()
).use { stmt ->
stmt.setLong(1, id)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
return@execute null
}
val game = rows.getString(1)
val environment = rows.getString(2)
var buildMajor: Int? = rows.getInt(3)
if (rows.wasNull()) {
buildMajor = null
}
var buildMinor: Int? = rows.getInt(4)
if (rows.wasNull()) {
buildMinor = null
}
val build = if (buildMajor != null) {
CacheExporter.Build(buildMajor!!, buildMinor)
} else {
null
}
val timestamp = rows.getTimestamp(5)?.toInstant()
val type = ArtifactType.valueOf(rows.getString(6).uppercase())
val format = ArtifactFormat.valueOf(rows.getString(7).uppercase())
val os = OperatingSystem.valueOf(rows.getString(8).uppercase())
val arch = Architecture.valueOf(rows.getString(9).uppercase())
val jvm = Jvm.valueOf(rows.getString(10).uppercase())
val size = rows.getInt(11)
val crc32 = rows.getInt(12)
val sha1 = rows.getBytes(13)
return@execute Artifact(
ArtifactSummary(
id,
game,
environment,
build,
timestamp,
type,
format,
os,
arch,
jvm,
size
), crc32, sha1, sources, links
)
}
}
}
}
public suspend fun export(id: Long): ArtifactExport? {
return database.execute { connection ->
connection.prepareStatement(
"""
SELECT
g.name,
e.name,
a.build_major,
a.build_minor,
a.timestamp,
a.type,
a.format,
a.os,
a.arch,
a.jvm,
b.data
FROM artifacts a
JOIN games g ON g.id = a.game_id
JOIN environments e ON e.id = a.environment_id
JOIN blobs b ON b.id = a.blob_id
WHERE a.blob_id = ?
""".trimIndent()
).use { stmt ->
stmt.setLong(1, id)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
return@execute null
}
val game = rows.getString(1)
val environment = rows.getString(2)
var buildMajor: Int? = rows.getInt(3)
if (rows.wasNull()) {
buildMajor = null
}
var buildMinor: Int? = rows.getInt(4)
if (rows.wasNull()) {
buildMinor = null
}
val build = if (buildMajor != null) {
CacheExporter.Build(buildMajor, buildMinor)
} else {
null
}
val timestamp = rows.getTimestamp(5)?.toInstant()
val type = ArtifactType.valueOf(rows.getString(6).uppercase())
val format = ArtifactFormat.valueOf(rows.getString(7).uppercase())
val os = OperatingSystem.valueOf(rows.getString(8).uppercase())
val arch = Architecture.valueOf(rows.getString(9).uppercase())
val jvm = Jvm.valueOf(rows.getString(10).uppercase())
val buf = Unpooled.wrappedBuffer(rows.getBytes(11))
val size = buf.readableBytes()
return@execute ArtifactExport(
ArtifactSummary(
id,
game,
environment,
build,
timestamp,
type,
format,
os,
arch,
jvm,
size
), buf
)
}
}
}
}
}

@ -1,997 +0,0 @@
package org.openrs2.archive.client
import com.github.michaelbull.logging.InlineLogger
import com.kichik.pecoff4j.PE
import com.kichik.pecoff4j.constant.MachineType
import com.kichik.pecoff4j.io.PEParser
import dorkbox.cabParser.CabParser
import dorkbox.cabParser.CabStreamSaver
import dorkbox.cabParser.structure.CabFileEntry
import io.netty.buffer.ByteBuf
import io.netty.buffer.ByteBufAllocator
import io.netty.buffer.ByteBufInputStream
import io.netty.buffer.ByteBufOutputStream
import io.netty.buffer.Unpooled
import io.netty.util.ByteProcessor
import jakarta.inject.Inject
import jakarta.inject.Singleton
import net.fornwall.jelf.ElfFile
import net.fornwall.jelf.ElfSymbol
import org.objectweb.asm.Opcodes
import org.objectweb.asm.tree.AbstractInsnNode
import org.objectweb.asm.tree.ClassNode
import org.objectweb.asm.tree.JumpInsnNode
import org.objectweb.asm.tree.LdcInsnNode
import org.objectweb.asm.tree.MethodInsnNode
import org.objectweb.asm.tree.TypeInsnNode
import org.openrs2.archive.cache.CacheExporter
import org.openrs2.archive.cache.CacheImporter
import org.openrs2.asm.InsnMatcher
import org.openrs2.asm.classpath.Library
import org.openrs2.asm.getArgumentExpressions
import org.openrs2.asm.hasCode
import org.openrs2.asm.intConstant
import org.openrs2.asm.io.CabLibraryReader
import org.openrs2.asm.io.JarLibraryReader
import org.openrs2.asm.io.LibraryReader
import org.openrs2.asm.io.Pack200LibraryReader
import org.openrs2.asm.io.PackClassLibraryReader
import org.openrs2.asm.nextReal
import org.openrs2.buffer.use
import org.openrs2.compress.gzip.Gzip
import org.openrs2.db.Database
import org.openrs2.util.io.entries
import java.io.ByteArrayInputStream
import java.io.ByteArrayOutputStream
import java.io.InputStream
import java.io.OutputStream
import java.nio.file.Files
import java.nio.file.Path
import java.sql.Connection
import java.sql.Types
import java.time.Instant
import java.time.LocalDate
import java.time.Month
import java.time.ZoneOffset
import java.util.jar.JarInputStream
import java.util.jar.JarOutputStream
import java.util.jar.Pack200
import kotlin.io.path.getLastModifiedTime
@Singleton
public class ClientImporter @Inject constructor(
private val database: Database,
private val alloc: ByteBufAllocator,
private val packClassLibraryReader: PackClassLibraryReader,
private val importer: CacheImporter
) {
public suspend fun import(
paths: Iterable<Path>,
name: String?,
description: String?,
url: String?,
skipErrors: Boolean
) {
alloc.buffer().use { buf ->
for (path in paths) {
buf.clear()
Files.newInputStream(path).use { input ->
ByteBufOutputStream(buf).use { output ->
input.copyTo(output)
}
}
logger.info { "Importing $path" }
try {
import(
parse(buf),
name,
description,
url,
path.fileName.toString(),
path.getLastModifiedTime().toInstant()
)
} catch (t: Throwable) {
if (skipErrors) {
logger.warn(t) { "Failed to import $path" }
continue
}
throw t
}
}
}
}
public suspend fun import(
artifact: Artifact,
name: String?,
description: String?,
url: String?,
fileName: String,
timestamp: Instant
) {
database.execute { connection ->
importer.prepare(connection)
val id = import(connection, artifact)
connection.prepareStatement(
"""
INSERT INTO artifact_sources (blob_id, name, description, url, file_name, timestamp)
VALUES (?, ?, ?, ?, ?, ?)
""".trimIndent()
).use { stmt ->
stmt.setLong(1, id)
stmt.setString(2, name)
stmt.setString(3, description)
stmt.setString(4, url)
stmt.setString(5, fileName)
stmt.setObject(6, timestamp.atOffset(ZoneOffset.UTC), Types.TIMESTAMP_WITH_TIMEZONE)
stmt.execute()
}
}
}
private fun import(connection: Connection, artifact: Artifact): Long {
val id = importer.addBlob(connection, artifact)
val gameId = connection.prepareStatement(
"""
SELECT id
FROM games
WHERE name = ?
""".trimIndent()
).use { stmt ->
stmt.setString(1, artifact.game)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
throw IllegalArgumentException()
}
rows.getInt(1)
}
}
val environmentId = connection.prepareStatement(
"""
SELECT id
FROM environments
WHERE name = ?
""".trimIndent()
).use { stmt ->
stmt.setString(1, artifact.environment)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
throw IllegalArgumentException()
}
rows.getInt(1)
}
}
connection.prepareStatement(
"""
INSERT INTO artifacts (blob_id, game_id, environment_id, build_major, build_minor, timestamp, type, format, os, arch, jvm)
VALUES (?, ?, ?, ?, ?, ?, ?::artifact_type, ?::artifact_format, ?::os, ?::arch, ?::jvm)
ON CONFLICT (blob_id) DO UPDATE SET
game_id = EXCLUDED.game_id,
environment_id = EXCLUDED.environment_id,
build_major = EXCLUDED.build_major,
build_minor = EXCLUDED.build_minor,
timestamp = EXCLUDED.timestamp,
type = EXCLUDED.type,
format = EXCLUDED.format,
os = EXCLUDED.os,
arch = EXCLUDED.arch,
jvm = EXCLUDED.jvm
""".trimIndent()
).use { stmt ->
stmt.setLong(1, id)
stmt.setInt(2, gameId)
stmt.setInt(3, environmentId)
stmt.setObject(4, artifact.build?.major, Types.INTEGER)
stmt.setObject(5, artifact.build?.minor, Types.INTEGER)
stmt.setObject(6, artifact.timestamp?.atOffset(ZoneOffset.UTC), Types.TIMESTAMP_WITH_TIMEZONE)
stmt.setString(7, artifact.type.name.lowercase())
stmt.setString(8, artifact.format.name.lowercase())
stmt.setString(9, artifact.os.name.lowercase())
stmt.setString(10, artifact.arch.name.lowercase())
stmt.setString(11, artifact.jvm.name.lowercase())
stmt.execute()
}
connection.prepareStatement(
"""
DELETE FROM artifact_links
WHERE blob_id = ?
""".trimIndent()
).use { stmt ->
stmt.setLong(1, id)
stmt.execute()
}
connection.prepareStatement(
"""
INSERT INTO artifact_links (blob_id, type, format, os, arch, jvm, sha1, crc32, size)
VALUES (?, ?::artifact_type, ?::artifact_format, ?::os, ?::arch, ?::jvm, ?, ?, ?)
""".trimIndent()
).use { stmt ->
for (link in artifact.links) {
stmt.setLong(1, id)
stmt.setString(2, link.type.name.lowercase())
stmt.setString(3, link.format.name.lowercase())
stmt.setString(4, link.os.name.lowercase())
stmt.setString(5, link.arch.name.lowercase())
stmt.setString(6, link.jvm.name.lowercase())
stmt.setBytes(7, link.sha1)
stmt.setObject(8, link.crc32, Types.INTEGER)
stmt.setObject(9, link.size, Types.INTEGER)
stmt.addBatch()
}
stmt.executeBatch()
}
return id
}
public suspend fun refresh() {
data class Blob(val id: Long, val bytes: ByteArray)
database.execute { connection ->
importer.prepare(connection)
var lastId: Long? = null
val blobs = mutableListOf<Blob>()
while (true) {
blobs.clear()
connection.prepareStatement(
"""
SELECT a.blob_id, b.data
FROM artifacts a
JOIN blobs b ON b.id = a.blob_id
WHERE ? IS NULL OR a.blob_id > ?
ORDER BY a.blob_id ASC
LIMIT 1024
""".trimIndent()
).use { stmt ->
stmt.setObject(1, lastId, Types.BIGINT)
stmt.setObject(2, lastId, Types.BIGINT)
stmt.executeQuery().use { rows ->
while (rows.next()) {
val id = rows.getLong(1)
lastId = id
blobs += Blob(id, rows.getBytes(2))
}
}
}
if (blobs.isEmpty()) {
return@execute
}
for (blob in blobs) {
logger.info { "Refreshing artifact ${blob.id}" }
Unpooled.wrappedBuffer(blob.bytes).use { buf ->
import(connection, parse(buf))
}
}
}
}
}
private fun parse(buf: ByteBuf): Artifact {
return if (buf.hasPrefix(JAR)) {
parseJar(buf)
} else if (buf.hasPrefix(PACK200)) {
parsePack200(buf)
} else if (buf.hasPrefix(CAB)) {
parseCab(buf)
} else if (
buf.hasPrefix(PACKCLASS_UNCOMPRESSED) ||
buf.hasPrefix(PACKCLASS_BZIP2) ||
buf.hasPrefix(PACKCLASS_GZIP)
) {
parseLibrary(buf, packClassLibraryReader, ArtifactFormat.PACKCLASS)
} else if (buf.hasPrefix(ELF)) {
parseElf(buf)
} else if (buf.hasPrefix(PE)) {
parsePe(buf)
} else if (
buf.hasPrefix(MACHO32BE) ||
buf.hasPrefix(MACHO32LE) ||
buf.hasPrefix(MACHO64BE) ||
buf.hasPrefix(MACHO64LE) ||
buf.hasPrefix(MACHO_UNIVERSAL)
) {
parseMachO(buf)
} else {
throw IllegalArgumentException()
}
}
private fun parseElf(buf: ByteBuf): Artifact {
val elf = ElfFile.from(ByteBufInputStream(buf.slice()))
val arch = when (elf.e_machine.toInt()) {
ElfFile.ARCH_i386 -> Architecture.X86
ElfFile.ARCH_X86_64 -> Architecture.AMD64
ElfFile.ARCH_SPARC -> Architecture.SPARC
ARCH_SPARCV9 -> Architecture.SPARCV9
else -> throw IllegalArgumentException()
}
val comment = String(elf.firstSectionByName(".comment").data)
val os = if (comment.contains(SOLARIS_COMMENT)) {
OperatingSystem.SOLARIS
} else {
OperatingSystem.LINUX
}
val symbols = elf.dynamicSymbolTableSection ?: throw IllegalArgumentException()
val type = getArtifactType(symbols.symbols.asSequence().mapNotNull(ElfSymbol::getName))
return Artifact(
buf.retain(),
"shared",
"live",
null,
null,
type,
ArtifactFormat.NATIVE,
os,
arch,
Jvm.SUN,
emptyList()
)
}
private fun getArtifactType(symbols: Sequence<String>): ArtifactType {
for (symbol in symbols) {
var name = symbol
if (name.startsWith('_')) {
name = name.substring(1)
}
if (name.startsWith("Java_")) { // RNI methods don't have a Java_ prefix
name = name.substring("Java_".length)
}
if (name.startsWith("jaggl_X11_dri_")) {
return ArtifactType.JAGGL_DRI
} else if (name.startsWith("jaggl_opengl_")) {
return ArtifactType.JAGGL
} else if (name.startsWith("com_sun_opengl_impl_GLImpl_")) {
return ArtifactType.JOGL
} else if (name.startsWith("com_sun_opengl_impl_JAWT_")) {
return ArtifactType.JOGL_AWT
} else if (name.startsWith("com_sun_gluegen_runtime_")) {
return ArtifactType.GLUEGEN_RT
} else if (name.startsWith("jagex3_jagmisc_jagmisc_")) {
return ArtifactType.JAGMISC
} else if (name.startsWith("nativeadvert_browsercontrol_")) {
return ArtifactType.BROWSERCONTROL
}
}
throw IllegalArgumentException()
}
private fun parsePe(buf: ByteBuf): Artifact {
val pe = PEParser.parse(ByteBufInputStream(buf.slice()))
val arch = when (pe.coffHeader.machine) {
MachineType.IMAGE_FILE_MACHINE_I386 -> Architecture.X86
MachineType.IMAGE_FILE_MACHINE_AMD64 -> Architecture.AMD64
else -> throw IllegalArgumentException()
}
val symbols = parsePeExportNames(buf, pe).toSet()
val type = getArtifactType(symbols.asSequence())
val jvm = if (symbols.contains("RNIGetCompatibleVersion")) {
Jvm.MICROSOFT
} else {
Jvm.SUN
}
return Artifact(
buf.retain(),
"shared",
"live",
null,
Instant.ofEpochSecond(pe.coffHeader.timeDateStamp.toLong()),
type,
ArtifactFormat.NATIVE,
OperatingSystem.WINDOWS,
arch,
jvm,
emptyList()
)
}
private fun parsePeExportNames(buf: ByteBuf, pe: PE): Sequence<String> {
return sequence {
val exportTable = pe.imageData.exportTable
val namePointerTable =
pe.sectionTable.rvaConverter.convertVirtualAddressToRawDataPointer(exportTable.namePointerRVA.toInt())
for (i in 0 until exportTable.numberOfNamePointers.toInt()) {
val namePointerRva = buf.readerIndex() + buf.getIntLE(buf.readerIndex() + namePointerTable + 4 * i)
val namePointer = pe.sectionTable.rvaConverter.convertVirtualAddressToRawDataPointer(namePointerRva)
val end = buf.forEachByte(namePointer, buf.writerIndex() - namePointer, ByteProcessor.FIND_NUL)
require(end != -1) {
"Unterminated string"
}
yield(buf.toString(namePointer, end - namePointer, Charsets.US_ASCII))
}
}
}
private fun parseMachO(buf: ByteBuf): Artifact {
val (arch, symbols) = MachO.parse(buf.slice())
val type = getArtifactType(symbols.asSequence())
return Artifact(
buf.retain(),
"shared",
"live",
null,
null,
type,
ArtifactFormat.NATIVE,
OperatingSystem.MACOS,
arch,
Jvm.SUN,
emptyList()
)
}
private fun parseJar(buf: ByteBuf): Artifact {
val timestamp = getJarTimestamp(ByteBufInputStream(buf.slice()))
return parseLibrary(buf, JarLibraryReader, ArtifactFormat.JAR, timestamp)
}
private fun parsePack200(buf: ByteBuf): Artifact {
val timestamp = ByteArrayOutputStream().use { tempOutput ->
Gzip.createHeaderlessInputStream(ByteBufInputStream(buf.slice())).use { gzipInput ->
JarOutputStream(tempOutput).use { jarOutput ->
Pack200.newUnpacker().unpack(gzipInput, jarOutput)
}
}
getJarTimestamp(ByteArrayInputStream(tempOutput.toByteArray()))
}
return parseLibrary(buf, Pack200LibraryReader, ArtifactFormat.PACK200, timestamp)
}
private fun parseCab(buf: ByteBuf): Artifact {
val timestamp = getCabTimestamp(ByteBufInputStream(buf.slice()))
return parseLibrary(buf, CabLibraryReader, ArtifactFormat.CAB, timestamp)
}
private fun getJarTimestamp(input: InputStream): Instant? {
var timestamp: Instant? = null
JarInputStream(input).use { jar ->
for (entry in jar.entries) {
val t = entry.lastModifiedTime?.toInstant()
if (timestamp == null || (t != null && t < timestamp)) {
timestamp = t
}
}
}
return timestamp
}
private fun getCabTimestamp(input: InputStream): Instant? {
var timestamp: Instant? = null
CabParser(input, object : CabStreamSaver {
override fun closeOutputStream(outputStream: OutputStream, entry: CabFileEntry) {
// entry
}
override fun openOutputStream(entry: CabFileEntry): OutputStream {
val t = entry.date.toInstant()
if (timestamp == null || t < timestamp) {
timestamp = t
}
return OutputStream.nullOutputStream()
}
override fun saveReservedAreaData(data: ByteArray?, dataLength: Int): Boolean {
return false
}
}).extractStream()
return timestamp
}
private fun parseLibrary(
buf: ByteBuf,
reader: LibraryReader,
format: ArtifactFormat,
timestamp: Instant? = null
): Artifact {
val library = Library.read("client", ByteBufInputStream(buf.slice()), reader)
val game: String
val build: CacheExporter.Build?
val type: ArtifactType
val links: List<ArtifactLink>
val mudclient = library["mudclient"]
val client = library["client"]
val loader = library["loader"]
if (mudclient != null) {
game = "classic"
build = null // TODO(gpe): classic support
type = ArtifactType.CLIENT
links = emptyList()
} else if (client != null) {
game = "runescape"
build = parseClientBuild(library, client)
type = if (build != null && build.major < COMBINED_BUILD && isClientGl(library)) {
ArtifactType.CLIENT_GL
} else {
ArtifactType.CLIENT
}
links = emptyList()
} else if (loader != null) {
if (isLoaderClassic(loader)) {
game = "classic"
build = null // TODO(gpe): classic support
type = ArtifactType.LOADER
links = emptyList() // TODO(gpe): classic support
} else {
game = "runescape"
build = parseSignLinkBuild(library)
type = if (timestamp != null && timestamp < COMBINED_TIMESTAMP && isLoaderGl(library)) {
ArtifactType.LOADER_GL
} else {
ArtifactType.LOADER
}
links = parseLinks(library)
}
} else if (library.contains("mapview")) {
game = "mapview"
build = null
type = ArtifactType.CLIENT
links = emptyList()
} else if (library.contains("loginapplet")) {
game = "loginapplet"
build = null
type = ArtifactType.CLIENT
links = emptyList()
} else if (library.contains("passwordapp")) {
game = "passapplet"
build = null
type = ArtifactType.CLIENT
links = emptyList()
} else if (library.contains("jaggl/opengl")) {
game = "shared"
type = ArtifactType.JAGGL
build = null
links = emptyList()
} else if (library.contains("com/sun/opengl/impl/GLImpl")) {
game = "shared"
type = ArtifactType.JOGL
build = null
links = emptyList()
} else if (library.contains("unpackclass")) {
game = "shared"
type = ArtifactType.UNPACKCLASS
build = null
links = emptyList()
} else {
throw IllegalArgumentException()
}
return Artifact(
buf.retain(),
game,
"live",
build,
timestamp,
type,
format,
OperatingSystem.INDEPENDENT,
Architecture.INDEPENDENT,
Jvm.INDEPENDENT,
links
)
}
private fun isClientGl(library: Library): Boolean {
for (clazz in library) {
for (method in clazz.methods) {
if (!method.hasCode) {
continue
}
for (insn in method.instructions) {
if (insn is MethodInsnNode && insn.name == "glBegin") {
return true
}
}
}
}
return false
}
private fun isLoaderClassic(clazz: ClassNode): Boolean {
for (method in clazz.methods) {
if (!method.hasCode) {
continue
}
for (insn in method.instructions) {
if (insn is LdcInsnNode && insn.cst == "mudclient") {
return true
}
}
}
return false
}
private fun isLoaderGl(library: Library): Boolean {
for (clazz in library) {
for (method in clazz.methods) {
if (!method.hasCode || method.name != "<clinit>") {
continue
}
for (insn in method.instructions) {
if (insn !is LdcInsnNode) {
continue
}
if (insn.cst == "jaggl.dll" || insn.cst == "jogl.dll") {
return true
}
}
}
}
return false
}
private fun parseClientBuild(library: Library, clazz: ClassNode): CacheExporter.Build? {
for (method in clazz.methods) {
if (!method.hasCode || method.name != "main") {
continue
}
for (match in OLD_ENGINE_VERSION_MATCHER.match(method)) {
val ldc = match[0] as LdcInsnNode
if (ldc.cst != OLD_ENGINE_VERSION_STRING) {
continue
}
val version = match[2].intConstant
if (version != null) {
return CacheExporter.Build(version, null)
}
}
var betweenNewAndReturn = false
val candidates = mutableListOf<Int>()
for (insn in method.instructions) {
if (insn is TypeInsnNode && insn.desc == "client") {
betweenNewAndReturn = true
} else if (insn.opcode == Opcodes.RETURN) {
break
} else if (betweenNewAndReturn) {
val candidate = insn.intConstant
if (candidate != null && candidate in NEW_ENGINE_BUILDS) {
candidates += candidate
}
}
}
for (build in NEW_ENGINE_RESOLUTIONS) {
candidates -= build
}
val version = candidates.singleOrNull()
if (version != null) {
return CacheExporter.Build(version, null)
}
}
return parseSignLinkBuild(library)
}
private fun parseSignLinkBuild(library: Library): CacheExporter.Build? {
val clazz = library["sign/signlink"] ?: return null
for (field in clazz.fields) {
val value = field.value
if (field.name == "clientversion" && field.desc == "I" && value is Int) {
return CacheExporter.Build(value, null)
}
}
return null
}
private fun parseLinks(library: Library): List<ArtifactLink> {
val sig = library["sig"]
if (sig != null) {
var size: Int? = null
var sha1: ByteArray? = null
for (field in sig.fields) {
val value = field.value
if (field.name == "len" && field.desc == "I" && value is Int) {
size = value
}
}
for (method in sig.methods) {
if (!method.hasCode || method.name != "<clinit>") {
continue
}
for (match in SHA1_MATCHER.match(method)) {
val len = match[0].intConstant
if (len != SHA1_BYTES) {
continue
}
sha1 = ByteArray(SHA1_BYTES)
for (i in 2 until match.size step 4) {
val k = match[i + 1].intConstant!!
val v = match[i + 2].intConstant!!
sha1[k] = v.toByte()
}
}
}
require(size != null && sha1 != null)
return listOf(
ArtifactLink(
ArtifactType.CLIENT,
ArtifactFormat.JAR,
OperatingSystem.INDEPENDENT,
Architecture.INDEPENDENT,
Jvm.INDEPENDENT,
crc32 = null,
sha1,
size
)
)
}
val loader = library["loader"]
if (loader != null) {
val links = mutableListOf<ArtifactLink>()
val paths = mutableSetOf<String>()
for (method in loader.methods) {
if (method.name != "run" || method.desc != "()V") {
continue
}
for (insn in method.instructions) {
if (insn !is MethodInsnNode || insn.owner != loader.name || !insn.desc.endsWith(")[B")) {
continue
}
// TODO(gpe): extract file size too (tricky due to dummy arguments)
val exprs = getArgumentExpressions(insn) ?: continue
for (expr in exprs) {
val single = expr.singleOrNull() ?: continue
if (single !is LdcInsnNode) {
continue
}
val cst = single.cst
if (cst is String && FILE_NAME_REGEX.matches(cst)) {
paths += cst
}
}
}
}
val hashes = mutableMapOf<AbstractInsnNode, ByteArray>()
for (method in loader.methods) {
for (match in SHA1_CMP_MATCHER.match(method)) {
val sha1 = ByteArray(SHA1_BYTES)
var i = 0
while (i < match.size) {
var n = match[i++].intConstant
if (n != null) {
i++ // ALOAD
}
val index = match[i++].intConstant!!
i++ // BALOAD
var xor = false
if (i + 1 < match.size && match[i + 1].opcode == Opcodes.IXOR) {
i += 2 // ICONST_M1, IXOR
xor = true
}
if (match[i].opcode == Opcodes.IFNE) {
n = 0
i++
} else {
if (n == null) {
n = match[i++].intConstant!!
}
i++ // ICMP_IFNE
}
if (xor) {
n = n.inv()
}
sha1[index] = n.toByte()
}
hashes[match[0]] = sha1
}
}
for (method in loader.methods) {
for (match in PATH_CMP_MATCHER.match(method)) {
val first = match[0]
val ldc = if (first is LdcInsnNode) {
first
} else {
match[1] as LdcInsnNode
}
val path = ldc.cst
if (path !is String) {
continue
}
val acmp = match[2] as JumpInsnNode
val target = if (acmp.opcode == Opcodes.IF_ACMPNE) {
acmp.nextReal
} else {
acmp.label.nextReal
}
val hash = hashes.remove(target) ?: continue
if (!paths.remove(path)) {
logger.warn { "Adding link for unused file $path" }
}
links += parseLink(path, hash)
}
}
if (paths.size != hashes.size || paths.size > 1) {
throw IllegalArgumentException()
} else if (paths.size == 1) {
links += parseLink(paths.single(), hashes.values.single())
}
return links
}
// TODO(gpe)
return emptyList()
}
private fun parseLink(path: String, sha1: ByteArray): ArtifactLink {
val m = FILE_NAME_REGEX.matchEntire(path) ?: throw IllegalArgumentException()
val (name, crc1, ext, crc2) = m.destructured
val type = when (name) {
// TODO(gpe): funorb loaders
"runescape", "client" -> ArtifactType.CLIENT
"unpackclass" -> ArtifactType.UNPACKCLASS
"jogl", "jogltrimmed" -> ArtifactType.JOGL
"jogl_awt" -> ArtifactType.JOGL_AWT
else -> throw IllegalArgumentException()
}
val format = when (ext) {
"pack200" -> ArtifactFormat.PACK200
"js5" -> ArtifactFormat.PACKCLASS
"jar", "pack" -> ArtifactFormat.JAR
"dll" -> ArtifactFormat.NATIVE
else -> throw IllegalArgumentException()
}
val os = if (format == ArtifactFormat.NATIVE) OperatingSystem.WINDOWS else OperatingSystem.INDEPENDENT
val arch = if (format == ArtifactFormat.NATIVE) Architecture.X86 else Architecture.INDEPENDENT
val jvm = if (format == ArtifactFormat.NATIVE) Jvm.SUN else Jvm.INDEPENDENT
val crc = crc1.toIntOrNull() ?: crc2.toIntOrNull() ?: throw IllegalArgumentException()
return ArtifactLink(
type,
format,
os,
arch,
jvm,
crc,
sha1,
null
)
}
private fun ByteBuf.hasPrefix(bytes: ByteArray): Boolean {
Unpooled.wrappedBuffer(bytes).use { prefix ->
val len = prefix.readableBytes()
if (readableBytes() < len) {
return false
}
return slice(readerIndex(), len) == prefix
}
}
private companion object {
private val logger = InlineLogger()
private val CAB = byteArrayOf('M'.code.toByte(), 'S'.code.toByte(), 'C'.code.toByte(), 'F'.code.toByte())
private val ELF = byteArrayOf(0x7F, 'E'.code.toByte(), 'L'.code.toByte(), 'F'.code.toByte())
private val JAR = byteArrayOf('P'.code.toByte(), 'K'.code.toByte(), 0x03, 0x04)
private val MACHO32BE = byteArrayOf(0xFE.toByte(), 0xED.toByte(), 0xFA.toByte(), 0xCE.toByte())
private val MACHO32LE = byteArrayOf(0xCE.toByte(), 0xFA.toByte(), 0xED.toByte(), 0xFE.toByte())
private val MACHO64BE = byteArrayOf(0xFE.toByte(), 0xED.toByte(), 0xFA.toByte(), 0xCF.toByte())
private val MACHO64LE = byteArrayOf(0xCF.toByte(), 0xFA.toByte(), 0xED.toByte(), 0xFE.toByte())
private val MACHO_UNIVERSAL = byteArrayOf(0xCA.toByte(), 0xFE.toByte(), 0xBA.toByte(), 0xBE.toByte())
private val PACK200 = byteArrayOf(0x08)
private val PACKCLASS_UNCOMPRESSED = byteArrayOf(0x00)
private val PACKCLASS_BZIP2 = byteArrayOf(0x01)
private val PACKCLASS_GZIP = byteArrayOf(0x02)
private val PE = byteArrayOf('M'.code.toByte(), 'Z'.code.toByte())
private const val OLD_ENGINE_VERSION_STRING = "RS2 user client - release #"
private val OLD_ENGINE_VERSION_MATCHER =
InsnMatcher.compile("LDC INVOKESPECIAL (ICONST | BIPUSH | SIPUSH | LDC)")
private val NEW_ENGINE_RESOLUTIONS = listOf(765, 503, 1024, 768)
private val NEW_ENGINE_BUILDS = 402..916
private const val COMBINED_BUILD = 555
private val COMBINED_TIMESTAMP = LocalDate.of(2009, Month.SEPTEMBER, 2)
.atStartOfDay(ZoneOffset.UTC)
.toInstant()
private const val ARCH_SPARCV9 = 43
private const val SOLARIS_COMMENT = "Solaris Link Editors:"
private const val SHA1_BYTES = 20
private val SHA1_MATCHER =
InsnMatcher.compile("BIPUSH NEWARRAY (DUP (ICONST | BIPUSH) (ICONST | BIPUSH | SIPUSH) IASTORE)+")
private val FILE_NAME_REGEX = Regex("([a-z_]+)(?:_(-?[0-9]+))?[.]([a-z0-9]+)(?:\\?crc=(-?[0-9]+))?")
private val SHA1_CMP_MATCHER =
InsnMatcher.compile("((ICONST | BIPUSH)? ALOAD (ICONST | BIPUSH) BALOAD (ICONST IXOR)? (ICONST | BIPUSH)? (IF_ICMPEQ | IF_ICMPNE | IFEQ | IFNE))+")
private val PATH_CMP_MATCHER = InsnMatcher.compile("(LDC ALOAD | ALOAD LDC) (IF_ACMPEQ | IF_ACMPNE)")
}
}

@ -1,30 +0,0 @@
package org.openrs2.archive.client
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.types.defaultStdout
import com.github.ajalt.clikt.parameters.types.long
import com.github.ajalt.clikt.parameters.types.outputStream
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
import java.io.FileNotFoundException
public class ExportCommand : CliktCommand(name = "export") {
private val id by argument().long()
private val output by argument().outputStream().defaultStdout()
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val exporter = injector.getInstance(ClientExporter::class.java)
val artifact = exporter.export(id) ?: throw FileNotFoundException()
try {
val buf = artifact.content()
buf.readBytes(output, buf.readableBytes())
} finally {
artifact.release()
}
}
}
}

@ -1,32 +0,0 @@
package org.openrs2.archive.client
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.arguments.multiple
import com.github.ajalt.clikt.parameters.options.flag
import com.github.ajalt.clikt.parameters.options.option
import com.github.ajalt.clikt.parameters.types.path
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
public class ImportCommand : CliktCommand(name = "import") {
private val name by option()
private val description by option()
private val url by option()
private val skipErrors by option().flag()
private val input by argument().path(
mustExist = true,
canBeDir = false,
mustBeReadable = true,
).multiple()
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val importer = injector.getInstance(ClientImporter::class.java)
importer.import(input, name, description, url, skipErrors)
}
}
}

@ -1,7 +0,0 @@
package org.openrs2.archive.client
public enum class Jvm {
INDEPENDENT,
SUN,
MICROSOFT
}

@ -1,116 +0,0 @@
package org.openrs2.archive.client
import io.netty.buffer.ByteBuf
import org.openrs2.buffer.readString
public data class MachO(
public val architecture: Architecture,
public val symbols: Set<String>,
) {
public companion object {
private const val MACHO_UNIVERSAL = 0xCAFEBABE.toInt()
private const val MACHO32BE = 0xFEEDFACE.toInt()
private const val MACHO32LE = 0xCEFAEDFE.toInt()
private const val MACHO64BE = 0xFEEDFACF.toInt()
private const val MACHO64LE = 0xCFFAEDFE.toInt()
private const val CPU_TYPE_X86 = 0x7
private const val CPU_TYPE_AMD64 = 0x1000007
private const val CPU_TYPE_POWERPC = 0x12
private const val COMMAND_SYMTAB = 0x2
public fun parse(buf: ByteBuf): MachO {
val magic = buf.getInt(buf.readerIndex())
return if (magic == MACHO_UNIVERSAL) {
parseFat(buf)
} else {
parseMachO(buf)
}
}
private fun parseFat(buf: ByteBuf): MachO {
buf.skipBytes(4)
val symbols = mutableSetOf<String>()
val count = buf.readInt()
for (i in 0 until count) {
buf.skipBytes(8)
val offset = buf.readInt()
val size = buf.readInt()
buf.skipBytes(4)
symbols += parseMachO(buf.slice(offset, size)).symbols
}
return MachO(Architecture.UNIVERSAL, symbols)
}
private fun parseMachO(buf: ByteBuf): MachO {
val magic = buf.readInt()
require(magic == MACHO32BE || magic == MACHO32LE || magic == MACHO64BE || magic == MACHO64LE)
val big = magic == MACHO32BE || magic == MACHO64BE
val x64 = magic == MACHO64LE || magic == MACHO64BE
val arch = when (if (big) buf.readInt() else buf.readIntLE()) {
CPU_TYPE_X86 -> Architecture.X86
CPU_TYPE_AMD64 -> Architecture.AMD64
CPU_TYPE_POWERPC -> Architecture.POWERPC
else -> throw IllegalArgumentException()
}
buf.skipBytes(4) // cpuSubType
buf.skipBytes(4) // fileType
val nCmds = if (big) buf.readInt() else buf.readIntLE()
buf.skipBytes(4) // sizeOfCmds
buf.skipBytes(4) // flags
if (x64) {
buf.skipBytes(4) // reserved
}
val symbols = parseCommands(buf, big, nCmds)
return MachO(arch, symbols)
}
private fun parseCommands(buf: ByteBuf, big: Boolean, count: Int): Set<String> {
for (i in 0 until count) {
val base = buf.readerIndex()
val command = if (big) buf.readInt() else buf.readIntLE()
val size = if (big) buf.readInt() else buf.readIntLE()
if (command == COMMAND_SYMTAB) {
buf.skipBytes(8)
val strOff = if (big) buf.readInt() else buf.readIntLE()
val strSize = if (big) buf.readInt() else buf.readIntLE()
return parseStringTable(buf.slice(strOff, strSize))
}
buf.readerIndex(base + size)
}
return emptySet()
}
private fun parseStringTable(buf: ByteBuf): Set<String> {
return buildSet {
while (buf.isReadable) {
val str = buf.readString(Charsets.US_ASCII)
if (str.isNotEmpty()) {
add(str)
}
}
}
}
}
}

@ -1,43 +0,0 @@
package org.openrs2.archive.client
import io.ktor.http.ContentType
public enum class OperatingSystem {
INDEPENDENT,
WINDOWS,
MACOS,
LINUX,
SOLARIS;
public fun getPrefix(): String {
return when (this) {
INDEPENDENT -> throw IllegalArgumentException()
WINDOWS -> ""
else -> "lib"
}
}
public fun getExtension(): String {
return when (this) {
INDEPENDENT -> throw IllegalArgumentException()
WINDOWS -> "dll"
MACOS -> "dylib"
LINUX, SOLARIS -> "so"
}
}
public fun getContentType(): ContentType {
return when (this) {
INDEPENDENT -> throw IllegalArgumentException()
WINDOWS -> PE
MACOS -> MACHO
LINUX, SOLARIS -> ELF_SHARED
}
}
private companion object {
private val ELF_SHARED = ContentType("application", "x-sharedlib")
private val MACHO = ContentType("application", "x-mach-binary")
private val PE = ContentType("application", "vnd.microsoft.portable-executable")
}
}

@ -1,16 +0,0 @@
package org.openrs2.archive.client
import com.github.ajalt.clikt.core.CliktCommand
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
public class RefreshCommand : CliktCommand(name = "refresh") {
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val importer = injector.getInstance(ClientImporter::class.java)
importer.refresh()
}
}
}

@ -1,11 +0,0 @@
package org.openrs2.archive.game
public data class Game(
public val id: Int,
public val url: String?,
public val buildMajor: Int?,
public val buildMinor: Int?,
public val lastMasterIndexId: Int?,
public val languageId: Int,
public val scopeId: Int
)

@ -1,58 +0,0 @@
package org.openrs2.archive.game
import jakarta.inject.Inject
import jakarta.inject.Singleton
import org.openrs2.db.Database
@Singleton
public class GameDatabase @Inject constructor(
private val database: Database
) {
public suspend fun getGame(name: String, environment: String, language: String): Game? {
return database.execute { connection ->
connection.prepareStatement(
"""
SELECT v.id, v.url, v.build_major, v.build_minor, v.last_master_index_id, v.language_id, g.scope_id
FROM game_variants v
JOIN games g ON g.id = v.game_id
JOIN environments e ON e.id = v.environment_id
JOIN languages l ON l.id = v.language_id
WHERE g.name = ? AND e.name = ? AND l.iso_code = ?
""".trimIndent()
).use { stmt ->
stmt.setString(1, name)
stmt.setString(2, environment)
stmt.setString(3, language)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
return@execute null
}
val id = rows.getInt(1)
val url: String? = rows.getString(2)
var buildMajor: Int? = rows.getInt(3)
if (rows.wasNull()) {
buildMajor = null
}
var buildMinor: Int? = rows.getInt(4)
if (rows.wasNull()) {
buildMinor = null
}
var lastMasterIndexId: Int? = rows.getInt(5)
if (rows.wasNull()) {
lastMasterIndexId = null
}
val languageId = rows.getInt(6)
val scopeId = rows.getInt(7)
return@execute Game(id, url, buildMajor, buildMinor, lastMasterIndexId, languageId, scopeId)
}
}
}
}
}

@ -1,70 +0,0 @@
package org.openrs2.archive.jav
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.future.await
import kotlinx.coroutines.withContext
import org.openrs2.http.checkStatusCode
import java.io.BufferedReader
import java.net.URI
import java.net.http.HttpClient
import java.net.http.HttpRequest
import java.net.http.HttpResponse
import java.time.Duration
public data class JavConfig(
public val config: Map<String, String>,
public val params: Map<String, String>,
public val messages: Map<String, String>
) {
public companion object {
public suspend fun download(client: HttpClient, url: String): JavConfig {
val request = HttpRequest.newBuilder(URI(url))
.GET()
.timeout(Duration.ofSeconds(30))
.build()
val response = client.sendAsync(request, HttpResponse.BodyHandlers.ofInputStream()).await()
response.checkStatusCode()
return withContext(Dispatchers.IO) {
response.body().bufferedReader().use { reader ->
read(reader)
}
}
}
public fun read(reader: BufferedReader): JavConfig {
val config = mutableMapOf<String, String>()
val params = mutableMapOf<String, String>()
val messages = mutableMapOf<String, String>()
reader.lineSequence().map(String::trim).forEach { line ->
when {
line.startsWith("//") || line.startsWith("#") -> Unit
line.startsWith("msg=") -> {
val parts = line.substring("msg=".length).split("=", limit = 2)
if (parts.size == 2) {
messages[parts[0]] = parts[1]
}
}
line.startsWith("param=") -> {
val parts = line.substring("param=".length).split("=", limit = 2)
if (parts.size == 2) {
params[parts[0]] = parts[1]
}
}
else -> {
val parts = line.split("=", limit = 2)
if (parts.size == 2) {
config[parts[0]] = parts[1]
}
}
}
}
return JavConfig(config, params, messages)
}
}
}

@ -1,65 +0,0 @@
package org.openrs2.archive.key
import io.netty.buffer.ByteBuf
import io.netty.buffer.Unpooled
import org.openrs2.buffer.use
import org.openrs2.crypto.SymmetricKey
import java.io.InputStream
public object BinaryKeyReader : KeyReader {
override fun read(input: InputStream): Sequence<SymmetricKey> {
Unpooled.wrappedBuffer(input.readBytes()).use { buf ->
val len = buf.readableBytes()
if (len == (128 * 128 * 16)) {
val keys = read(buf, 0)
require(SymmetricKey.ZERO in keys)
return keys.asSequence()
}
val maybeShort = (len % 18) == 0
val maybeInt = (len % 20) == 0
if (maybeShort && !maybeInt) {
val keys = read(buf, 2)
require(SymmetricKey.ZERO in keys)
return keys.asSequence()
} else if (!maybeShort && maybeInt) {
val keys = read(buf, 4).asSequence()
require(SymmetricKey.ZERO in keys)
return keys.asSequence()
} else if (maybeShort && maybeInt) {
val shortKeys = read(buf, 2)
val intKeys = read(buf, 4)
return if (SymmetricKey.ZERO in shortKeys && SymmetricKey.ZERO !in intKeys) {
shortKeys.asSequence()
} else if (SymmetricKey.ZERO !in shortKeys && SymmetricKey.ZERO in intKeys) {
intKeys.asSequence()
} else {
throw IllegalArgumentException("Failed to determine if map square IDs are 2 or 4 bytes")
}
} else {
throw IllegalArgumentException(
"Binary XTEA files must be exactly 256 KiB or a multiple of 18 or 20 bytes long"
)
}
}
}
private fun read(buf: ByteBuf, mapSquareLen: Int): Set<SymmetricKey> {
val keys = mutableSetOf<SymmetricKey>()
while (buf.isReadable) {
buf.skipBytes(mapSquareLen)
val k0 = buf.readInt()
val k1 = buf.readInt()
val k2 = buf.readInt()
val k3 = buf.readInt()
keys += SymmetricKey(k0, k1, k2, k3)
}
return keys
}
}

@ -1,16 +0,0 @@
package org.openrs2.archive.key
import com.github.ajalt.clikt.core.CliktCommand
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
public class BruteForceCommand : CliktCommand(name = "brute-force") {
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val bruteForcer = injector.getInstance(KeyBruteForcer::class.java)
bruteForcer.bruteForce()
}
}
}

@ -1,16 +0,0 @@
package org.openrs2.archive.key
import com.github.ajalt.clikt.core.CliktCommand
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
public class DownloadCommand : CliktCommand(name = "download") {
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val importer = injector.getInstance(KeyImporter::class.java)
importer.download()
}
}
}

@ -1,16 +0,0 @@
package org.openrs2.archive.key
import com.github.ajalt.clikt.core.CliktCommand
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
public class EntCommand : CliktCommand(name = "ent") {
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val exporter = injector.getInstance(KeyExporter::class.java)
println(exporter.analyse())
}
}
}

@ -1,57 +0,0 @@
package org.openrs2.archive.key
import jakarta.inject.Inject
import jakarta.inject.Singleton
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.future.await
import kotlinx.coroutines.withContext
import org.openrs2.crypto.SymmetricKey
import org.openrs2.http.checkStatusCode
import java.net.URI
import java.net.http.HttpClient
import java.net.http.HttpRequest
import java.net.http.HttpResponse
import java.time.Duration
@Singleton
public class HdosKeyDownloader @Inject constructor(
private val client: HttpClient
) : KeyDownloader(KeySource.HDOS) {
override suspend fun getMissingUrls(seenUrls: Set<String>): Set<String> {
return setOf(ENDPOINT)
}
override suspend fun download(url: String): Sequence<SymmetricKey> {
val request = HttpRequest.newBuilder(URI(url))
.GET()
.timeout(Duration.ofSeconds(30))
.build()
val response = client.sendAsync(request, HttpResponse.BodyHandlers.ofInputStream()).await()
response.checkStatusCode()
return withContext(Dispatchers.IO) {
response.body().use { input ->
input.bufferedReader().use { reader ->
val keys = mutableSetOf<SymmetricKey>()
for (line in reader.lineSequence()) {
val parts = line.split(',')
if (parts.size < 3) {
continue
}
val key = SymmetricKey.fromHexOrNull(parts[2]) ?: continue
keys += key
}
keys.asSequence()
}
}
}
}
private companion object {
private const val ENDPOINT = "https://api.hdos.dev/keys/get"
}
}

@ -1,13 +0,0 @@
package org.openrs2.archive.key
import org.openrs2.crypto.SymmetricKey
import java.io.InputStream
public object HexKeyReader : KeyReader {
override fun read(input: InputStream): Sequence<SymmetricKey> {
return input.bufferedReader()
.lineSequence()
.map(SymmetricKey::fromHexOrNull)
.filterNotNull()
}
}

@ -1,23 +0,0 @@
package org.openrs2.archive.key
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.types.path
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
public class ImportCommand : CliktCommand(name = "import") {
private val input by argument().path(
mustExist = true,
mustBeReadable = true
)
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val importer = injector.getInstance(KeyImporter::class.java)
importer.import(input)
}
}
}

@ -1,34 +0,0 @@
package org.openrs2.archive.key
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.future.await
import kotlinx.coroutines.withContext
import org.openrs2.crypto.SymmetricKey
import org.openrs2.http.checkStatusCode
import java.net.URI
import java.net.http.HttpClient
import java.net.http.HttpRequest
import java.net.http.HttpResponse
import java.time.Duration
public abstract class JsonKeyDownloader(
source: KeySource,
private val client: HttpClient,
private val jsonKeyReader: JsonKeyReader
) : KeyDownloader(source) {
override suspend fun download(url: String): Sequence<SymmetricKey> {
val request = HttpRequest.newBuilder(URI(url))
.GET()
.timeout(Duration.ofSeconds(30))
.build()
val response = client.sendAsync(request, HttpResponse.BodyHandlers.ofInputStream()).await()
response.checkStatusCode()
return withContext(Dispatchers.IO) {
response.body().use { input ->
jsonKeyReader.read(input)
}
}
}
}

@ -1,39 +0,0 @@
package org.openrs2.archive.key
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.kotlin.treeToValue
import jakarta.inject.Inject
import jakarta.inject.Singleton
import org.openrs2.crypto.SymmetricKey
import org.openrs2.json.Json
import java.io.IOException
import java.io.InputStream
@Singleton
public class JsonKeyReader @Inject constructor(
@Json private val mapper: ObjectMapper
) : KeyReader {
override fun read(input: InputStream): Sequence<SymmetricKey> {
val keys = mutableSetOf<SymmetricKey>()
val root = mapper.readTree(input)
when {
root.isArray -> {
for (entry in root) {
val key = entry["key"] ?: entry["keys"] ?: throw IOException("Missing 'key' or 'keys' field")
keys += mapper.treeToValue<SymmetricKey?>(key) ?: throw IOException("Key must be non-null")
}
}
root.isObject -> {
for (entry in root.fields()) {
keys += mapper.treeToValue<SymmetricKey?>(entry.value) ?: throw IOException("Key must be non-null")
}
}
else -> throw IOException("Root element must be an array or object")
}
return keys.asSequence()
}
}

@ -1,403 +0,0 @@
package org.openrs2.archive.key
import io.netty.buffer.Unpooled
import jakarta.inject.Inject
import jakarta.inject.Singleton
import org.openrs2.buffer.crc32
import org.openrs2.buffer.use
import org.openrs2.cache.Js5Compression
import org.openrs2.crypto.SymmetricKey
import org.openrs2.db.Database
import java.sql.Connection
import java.sql.Types
@Singleton
public class KeyBruteForcer @Inject constructor(
private val database: Database
) {
private data class ValidatedKey(
val keyId: Long,
val containerId: Long,
val uncompressedLength: Int,
val uncompressedChecksum: Int
)
/*
* Copy XTEA keys from key_queue to keys. The queue exists so that we don't
* block the /keys API endpoint from working while the brute forcer is
* running.
*
* This has to be a different transaction as it needs to lock the keys
* table in EXCLUSIVE mode, but we want to downgrade that to SHARE mode as
* soon as possible. Locks can only be released on commit in Postgres.
*/
private suspend fun assignKeyIds() {
database.execute { connection ->
connection.prepareStatement(
"""
LOCK TABLE keys IN EXCLUSIVE MODE
""".trimIndent()
).use { stmt ->
stmt.execute()
}
connection.prepareStatement(
"""
CREATE TEMPORARY TABLE tmp_keys (
key xtea_key NOT NULL,
source key_source NOT NULL,
first_seen TIMESTAMPTZ NOT NULL,
last_seen TIMESTAMPTZ NOT NULL
) ON COMMIT DROP
""".trimIndent()
).use { stmt ->
stmt.execute()
}
connection.prepareStatement(
"""
INSERT INTO tmp_keys (key, source, first_seen, last_seen)
SELECT key, source, first_seen, last_seen
FROM key_queue
FOR UPDATE SKIP LOCKED
""".trimIndent()
).use { stmt ->
stmt.execute()
}
connection.prepareStatement(
"""
INSERT INTO keys (key)
SELECT t.key
FROM tmp_keys t
LEFT JOIN keys k ON k.key = t.key
WHERE k.key IS NULL
ON CONFLICT DO NOTHING
""".trimIndent()
).use { stmt ->
stmt.execute()
}
connection.prepareStatement(
"""
INSERT INTO key_sources AS s (key_id, source, first_seen, last_seen)
SELECT k.id, t.source, t.first_seen, t.last_seen
FROM tmp_keys t
JOIN keys k ON k.key = t.key
ON CONFLICT (key_id, source) DO UPDATE SET
first_seen = LEAST(s.first_seen, EXCLUDED.first_seen),
last_seen = GREATEST(s.last_seen, EXCLUDED.last_seen)
""".trimIndent()
).use { stmt ->
stmt.execute()
}
connection.prepareStatement(
"""
DELETE FROM key_queue k
USING tmp_keys t
WHERE k.key = t.key AND k.source = t.source
""".trimIndent()
).use { stmt ->
stmt.execute()
}
}
}
/*
* The code for writing to the containers and keys tables ensures that the
* row IDs are allocated monotonically (by forbidding any other
* transactions from writing simultaneously with an EXCLUSIVE table lock).
*
* Rather than storing a list of (container, key) pairs which have yet to
* be tested (or have already been tested), which would take O(n*m) space,
* the containers/keys are tested in order. This means we only need to
* store the IDs of the last container/key tested.
*
* If the container/key combinations are represented as a matrix, it looks
* like the diagram below:
*
* containers ->
* +----------+----------+
* k |##########| |
* e |##########| A |
* y |##########| |
* s +----------+----------+
* | | |
* | | C | B |
* v | | |
* +----------+----------+
*
* The shaded area represents combinations that have already been tried.
*
* When a new container is inserted, we test it against every key in the
* shaded area (quadrant A).
*
* When a new key is inserted, we test it against every container in the
* shaded area (quadrant C).
*
* If keys and containers are inserted simultaneously, we take care to
* avoid testing them twice (quadrant B) by testing new containers against
* all keys but not vice-versa.
*
* This code can't tolerate new IDs being inserted while it runs, so it
* locks the tables in SHARE mode. This prevents the import process from
* running (which takes EXCLUSIVE locks) but allows other processes to read
* from the tables.
*/
public suspend fun bruteForce() {
assignKeyIds()
database.execute { connection ->
connection.prepareStatement(
"""
LOCK TABLE containers, keys IN SHARE MODE
""".trimIndent()
).use { stmt ->
stmt.execute()
}
bruteForceNewContainers(connection) // A, B
bruteForceNewKeys(connection) // C
}
}
private fun bruteForceNewContainers(connection: Connection) {
var lastContainerId: Long?
connection.prepareStatement(
"""
SELECT last_container_id
FROM brute_force_iterator
FOR UPDATE
""".trimIndent()
).use { stmt ->
stmt.executeQuery().use { rows ->
check(rows.next())
lastContainerId = rows.getLong(1)
if (rows.wasNull()) {
lastContainerId = null
}
}
}
while (true) {
val pair = nextContainer(connection, lastContainerId) ?: break
val (containerId, data) = pair
var validatedKey: ValidatedKey? = null
connection.prepareStatement(
"""
SELECT id, (key).k0, (key).k1, (key).k2, (key).k3
FROM keys
""".trimIndent()
).use { stmt ->
stmt.fetchSize = BATCH_SIZE
stmt.executeQuery().use { rows ->
while (rows.next()) {
val keyId = rows.getLong(1)
val k0 = rows.getInt(2)
val k1 = rows.getInt(3)
val k2 = rows.getInt(4)
val k3 = rows.getInt(5)
val key = SymmetricKey(k0, k1, k2, k3)
validatedKey = validateKey(data, key, keyId, containerId)
if (validatedKey != null) {
break
}
}
}
if (validatedKey != null) {
updateContainers(connection, listOf(validatedKey!!))
}
lastContainerId = containerId
}
}
connection.prepareStatement(
"""
UPDATE brute_force_iterator
SET last_container_id = ?
""".trimIndent()
).use { stmt ->
stmt.setObject(1, lastContainerId, Types.BIGINT)
stmt.execute()
}
}
private fun nextContainer(connection: Connection, lastContainerId: Long?): Pair<Long, ByteArray>? {
connection.prepareStatement(
"""
SELECT id, data
FROM containers
WHERE (? IS NULL OR id > ?) AND encrypted AND key_id IS NULL
ORDER BY id ASC
LIMIT 1
""".trimIndent()
).use { stmt ->
stmt.setObject(1, lastContainerId, Types.BIGINT)
stmt.setObject(2, lastContainerId, Types.BIGINT)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
return null
}
val containerId = rows.getLong(1)
val data = rows.getBytes(2)
return Pair(containerId, data)
}
}
}
private fun bruteForceNewKeys(connection: Connection) {
var lastKeyId: Long?
var lastContainerId: Long
connection.prepareStatement(
"""
SELECT last_key_id, last_container_id
FROM brute_force_iterator
FOR UPDATE
""".trimIndent()
).use { stmt ->
stmt.executeQuery().use { rows ->
check(rows.next())
lastKeyId = rows.getLong(1)
if (rows.wasNull()) {
lastKeyId = null
}
lastContainerId = rows.getLong(2)
if (rows.wasNull()) {
return@bruteForceNewKeys
}
}
}
while (true) {
val pair = nextKey(connection, lastKeyId) ?: break
val (keyId, key) = pair
val validatedKeys = mutableListOf<ValidatedKey>()
connection.prepareStatement(
"""
SELECT id, data
FROM containers
WHERE encrypted AND key_id IS NULL AND id <= ?
""".trimIndent()
).use { stmt ->
stmt.fetchSize = BATCH_SIZE
stmt.setLong(1, lastContainerId)
stmt.executeQuery().use { rows ->
while (rows.next()) {
val containerId = rows.getLong(1)
val data = rows.getBytes(2)
val validatedKey = validateKey(data, key, keyId, containerId)
if (validatedKey != null) {
validatedKeys += validatedKey
}
}
}
}
updateContainers(connection, validatedKeys)
lastKeyId = keyId
}
connection.prepareStatement(
"""
UPDATE brute_force_iterator
SET last_key_id = ?
""".trimIndent()
).use { stmt ->
stmt.setObject(1, lastKeyId, Types.BIGINT)
stmt.execute()
}
}
private fun nextKey(connection: Connection, lastKeyId: Long?): Pair<Long, SymmetricKey>? {
connection.prepareStatement(
"""
SELECT id, (key).k0, (key).k1, (key).k2, (key).k3
FROM keys
WHERE ? IS NULL OR id > ?
ORDER BY id ASC
LIMIT 1
""".trimIndent()
).use { stmt ->
stmt.setObject(1, lastKeyId, Types.BIGINT)
stmt.setObject(2, lastKeyId, Types.BIGINT)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
return null
}
val keyId = rows.getLong(1)
val k0 = rows.getInt(2)
val k1 = rows.getInt(3)
val k2 = rows.getInt(4)
val k3 = rows.getInt(5)
val key = SymmetricKey(k0, k1, k2, k3)
return Pair(keyId, key)
}
}
}
private fun validateKey(
data: ByteArray,
key: SymmetricKey,
keyId: Long,
containerId: Long
): ValidatedKey? {
Unpooled.wrappedBuffer(data).use { buf ->
Js5Compression.uncompressIfKeyValid(buf, key).use { uncompressed ->
return if (uncompressed != null) {
ValidatedKey(keyId, containerId, uncompressed.readableBytes(), uncompressed.crc32())
} else {
null
}
}
}
}
private fun updateContainers(connection: Connection, keys: List<ValidatedKey>) {
if (keys.isEmpty()) {
return
}
connection.prepareStatement(
"""
UPDATE containers
SET key_id = ?, uncompressed_length = ?, uncompressed_crc32 = ?
WHERE id = ?""".trimIndent()
).use { stmt ->
for (key in keys) {
stmt.setLong(1, key.keyId)
stmt.setInt(2, key.uncompressedLength)
stmt.setInt(3, key.uncompressedChecksum)
stmt.setLong(4, key.containerId)
stmt.addBatch()
}
stmt.executeBatch()
}
}
private companion object {
private const val BATCH_SIZE = 1024
}
}

@ -1,15 +0,0 @@
package org.openrs2.archive.key
import com.github.ajalt.clikt.core.NoOpCliktCommand
import com.github.ajalt.clikt.core.subcommands
public class KeyCommand : NoOpCliktCommand(name = "key") {
init {
subcommands(
BruteForceCommand(),
DownloadCommand(),
EntCommand(),
ImportCommand()
)
}
}

@ -1,10 +0,0 @@
package org.openrs2.archive.key
import org.openrs2.crypto.SymmetricKey
public abstract class KeyDownloader(
public val source: KeySource
) {
public abstract suspend fun getMissingUrls(seenUrls: Set<String>): Set<String>
public abstract suspend fun download(url: String): Sequence<SymmetricKey>
}

@ -1,159 +0,0 @@
package org.openrs2.archive.key
import jakarta.inject.Inject
import jakarta.inject.Singleton
import org.openrs2.crypto.SymmetricKey
import org.openrs2.db.Database
import java.io.BufferedOutputStream
import java.io.DataOutputStream
@Singleton
public class KeyExporter @Inject constructor(
private val database: Database
) {
public data class Stats(
val allKeys: Long,
val validKeys: Long,
val encryptedGroups: Long,
val validGroups: Long,
val emptyGroups: Long
) {
val validKeysFraction: Double = if (allKeys == 0L) {
1.0
} else {
validKeys.toDouble() / allKeys
}
val validGroupsFraction: Double = if (encryptedGroups == 0L) {
1.0
} else {
validGroups.toDouble() / encryptedGroups
}
val emptyGroupsFraction: Double = if (encryptedGroups == 0L) {
1.0
} else {
emptyGroups.toDouble() / encryptedGroups
}
}
public suspend fun count(): Stats {
return database.execute { connection ->
val encryptedGroups: Long
val validGroups: Long
val emptyGroups: Long
connection.prepareStatement(
"""
SELECT
COUNT(*),
COUNT(*) FILTER (WHERE c.key_id IS NOT NULL),
COUNT(*) FILTER (WHERE c.key_id IS NULL AND c.empty_loc)
FROM containers c
WHERE c.encrypted
""".trimIndent()
).use { stmt ->
stmt.executeQuery().use { rows ->
check(rows.next())
encryptedGroups = rows.getLong(1)
validGroups = rows.getLong(2)
emptyGroups = rows.getLong(3)
}
}
connection.prepareStatement(
"""
SELECT
COUNT(DISTINCT k.id),
COUNT(DISTINCT k.id) FILTER (WHERE c.key_id IS NOT NULL)
FROM keys k
LEFT JOIN containers c ON c.key_id = k.id
""".trimIndent()
).use { stmt ->
stmt.executeQuery().use { rows ->
check(rows.next())
val allKeys = rows.getLong(1)
val validKeys = rows.getLong(2)
Stats(allKeys, validKeys, encryptedGroups, validGroups, emptyGroups)
}
}
}
}
public suspend fun exportAll(): List<SymmetricKey> {
return export(validOnly = false)
}
public suspend fun exportValid(): List<SymmetricKey> {
return export(validOnly = true)
}
public suspend fun analyse(): String {
val keys = exportValid()
val process = ProcessBuilder("ent")
.redirectError(ProcessBuilder.Redirect.INHERIT)
.start()
DataOutputStream(BufferedOutputStream(process.outputStream)).use { out ->
for (key in keys) {
out.writeInt(key.k0)
out.writeInt(key.k1)
out.writeInt(key.k2)
out.writeInt(key.k3)
}
}
val analysis = process.inputStream.readAllBytes().toString(Charsets.UTF_8)
val status = process.waitFor()
if (status != 0) {
throw Exception("ent failed: $status")
}
return analysis
}
private suspend fun export(validOnly: Boolean): List<SymmetricKey> {
return database.execute { connection ->
val query = if (validOnly) {
EXPORT_VALID_QUERY
} else {
EXPORT_ALL_QUERY
}
connection.prepareStatement(query).use { stmt ->
stmt.executeQuery().use { rows ->
val keys = mutableListOf<SymmetricKey>()
while (rows.next()) {
val k0 = rows.getInt(1)
val k1 = rows.getInt(2)
val k2 = rows.getInt(3)
val k3 = rows.getInt(4)
keys += SymmetricKey(k0, k1, k2, k3)
}
keys
}
}
}
}
private companion object {
private val EXPORT_ALL_QUERY = """
SELECT (k.key).k0, (k.key).k1, (k.key).k2, (k.key).k3
FROM keys k
ORDER BY k.id ASC
""".trimIndent()
private val EXPORT_VALID_QUERY = """
SELECT DISTINCT (k.key).k0, (k.key).k1, (k.key).k2, (k.key).k3, k.id
FROM keys k
JOIN containers c ON c.key_id = k.id
ORDER BY k.id ASC
""".trimIndent()
}
}

@ -1,155 +0,0 @@
package org.openrs2.archive.key
import com.github.michaelbull.logging.InlineLogger
import jakarta.inject.Inject
import jakarta.inject.Singleton
import org.openrs2.crypto.SymmetricKey
import org.openrs2.db.Database
import java.io.IOException
import java.nio.file.Files
import java.nio.file.Path
import java.sql.Connection
import java.sql.Types
import java.time.Instant
import java.time.ZoneOffset
@Singleton
public class KeyImporter @Inject constructor(
private val database: Database,
private val jsonKeyReader: JsonKeyReader,
private val downloaders: Set<KeyDownloader>
) {
private data class Key(val key: SymmetricKey, val source: KeySource)
public suspend fun import(path: Path) {
val keys = mutableSetOf<SymmetricKey>()
for (file in Files.walk(path)) {
if (!Files.isRegularFile(file)) {
continue
}
val name = file.fileName.toString()
val reader = when {
name.endsWith(".bin") -> BinaryKeyReader
name.endsWith(".dat") -> BinaryKeyReader
name.endsWith(".hex") -> HexKeyReader
name.endsWith(".json") -> jsonKeyReader
name.endsWith(".mcx") -> BinaryKeyReader
name.endsWith(".txt") -> TextKeyReader
else -> continue
}
Files.newInputStream(file).use { input ->
keys += reader.read(input)
}
}
keys -= SymmetricKey.ZERO
logger.info { "Importing ${keys.size} keys" }
import(keys, KeySource.DISK)
}
public suspend fun download() {
val now = Instant.now()
val seenUrls = database.execute { connection ->
connection.prepareStatement(
"""
SELECT url FROM keysets
""".trimIndent()
).use { stmt ->
stmt.executeQuery().use { rows ->
val urls = mutableSetOf<String>()
while (rows.next()) {
urls += rows.getString(1)
}
return@execute urls
}
}
}
val keys = mutableSetOf<Key>()
val urls = mutableSetOf<String>()
for (downloader in downloaders) {
try {
for (url in downloader.getMissingUrls(seenUrls)) {
keys += downloader.download(url).map { key ->
Key(key, downloader.source)
}
urls += url
}
} catch (ex: IOException) {
logger.warn(ex) { "Failed to download keys from ${downloader.source.name}" }
continue
}
}
database.execute { connection ->
connection.prepareStatement(
"""
INSERT INTO keysets (url)
VALUES (?)
ON CONFLICT DO NOTHING
""".trimIndent()
).use { stmt ->
for (url in urls) {
stmt.setString(1, url)
stmt.addBatch()
}
stmt.executeBatch()
}
import(connection, keys, now)
}
}
public suspend fun import(keys: Iterable<SymmetricKey>, source: KeySource) {
val now = Instant.now()
database.execute { connection ->
import(connection, keys.map { key ->
Key(key, source)
}, now)
}
}
private fun import(connection: Connection, keys: Iterable<Key>, now: Instant) {
val timestamp = now.atOffset(ZoneOffset.UTC)
connection.prepareStatement(
"""
INSERT INTO key_queue AS K (key, source, first_seen, last_seen)
VALUES (ROW(?, ?, ?, ?), ?::key_source, ?, ?)
ON CONFLICT (key, source) DO UPDATE SET
first_seen = LEAST(k.first_seen, EXCLUDED.first_seen),
last_seen = GREATEST(k.last_seen, EXCLUDED.last_seen)
""".trimIndent()
).use { stmt ->
for (key in keys) {
if (key.key.isZero) {
continue
}
stmt.setInt(1, key.key.k0)
stmt.setInt(2, key.key.k1)
stmt.setInt(3, key.key.k2)
stmt.setInt(4, key.key.k3)
stmt.setString(5, key.source.name.lowercase())
stmt.setObject(6, timestamp, Types.TIMESTAMP_WITH_TIMEZONE)
stmt.setObject(7, timestamp, Types.TIMESTAMP_WITH_TIMEZONE)
stmt.addBatch()
}
stmt.executeBatch()
}
}
private companion object {
private val logger = InlineLogger()
}
}

@ -1,8 +0,0 @@
package org.openrs2.archive.key
import org.openrs2.crypto.SymmetricKey
import java.io.InputStream
public interface KeyReader {
public fun read(input: InputStream): Sequence<SymmetricKey>
}

@ -1,10 +0,0 @@
package org.openrs2.archive.key
public enum class KeySource {
API,
DISK,
OPENOSRS,
POLAR,
RUNELITE,
HDOS
}

@ -1,54 +0,0 @@
package org.openrs2.archive.key
import jakarta.inject.Inject
import jakarta.inject.Singleton
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.future.await
import kotlinx.coroutines.withContext
import org.jdom2.input.SAXBuilder
import org.openrs2.http.checkStatusCode
import java.net.URI
import java.net.http.HttpClient
import java.net.http.HttpRequest
import java.net.http.HttpResponse
import java.time.Duration
@Singleton
public class RuneLiteKeyDownloader @Inject constructor(
private val client: HttpClient,
jsonKeyReader: JsonKeyReader
) : JsonKeyDownloader(KeySource.RUNELITE, client, jsonKeyReader) {
override suspend fun getMissingUrls(seenUrls: Set<String>): Set<String> {
val version = getVersion()
return setOf(getXteaEndpoint(version))
}
private suspend fun getVersion(): String {
val request = HttpRequest.newBuilder(VERSION_ENDPOINT)
.GET()
.timeout(Duration.ofSeconds(30))
.build()
val response = client.sendAsync(request, HttpResponse.BodyHandlers.ofInputStream()).await()
response.checkStatusCode()
val document = withContext(Dispatchers.IO) {
response.body().use { input ->
SAXBuilder().build(input)
}
}
return document.rootElement
.getChild("versioning")
.getChild("release")
.textTrim
}
private companion object {
private val VERSION_ENDPOINT = URI("https://repo.runelite.net/net/runelite/runelite-parent/maven-metadata.xml")
private fun getXteaEndpoint(version: String): String {
return "https://api.runelite.net/runelite-$version/xtea"
}
}
}

@ -1,17 +0,0 @@
package org.openrs2.archive.key
import org.openrs2.crypto.SymmetricKey
import java.io.InputStream
public object TextKeyReader : KeyReader {
override fun read(input: InputStream): Sequence<SymmetricKey> {
val reader = input.bufferedReader()
val k0 = reader.readLine()?.toIntOrNull() ?: return emptySequence()
val k1 = reader.readLine()?.toIntOrNull() ?: return emptySequence()
val k2 = reader.readLine()?.toIntOrNull() ?: return emptySequence()
val k3 = reader.readLine()?.toIntOrNull() ?: return emptySequence()
return sequenceOf(SymmetricKey(k0, k1, k2, k3))
}
}

@ -1,107 +0,0 @@
package org.openrs2.archive.map
import kotlin.math.max
import kotlin.math.min
import kotlin.math.pow
public object Colors {
private val HSL_TO_RGB = IntArray(65536)
private const val BRIGHTNESS = 0.8
init {
var i = 0
for (h in 0 until 64) {
for (s in 0 until 8) {
for (l in 0 until 128) {
val hue = h.toDouble() / 64 + 0.0078125
val saturation = s.toDouble() / 8 + 0.0625
val lightness = l.toDouble() / 128
HSL_TO_RGB[i++] = hslToRgb(hue, saturation, lightness)
}
}
}
}
private fun hslToRgb(h: Double, s: Double, l: Double): Int {
var r = l
var g = l
var b = l
if (s != 0.0) {
val q = if (l * 2 < 1) {
l * (s + 1)
} else {
l + s - (l * s)
}
val p = l * 2 - q
var tr = h + (1.0 / 3)
if (tr > 1) {
tr--
}
var tb = h - (1.0 / 3)
if (tb < 0) {
tb++
}
r = if (tr * 6 < 1) {
tr * (q - p) * 6 + p
} else if (tr * 2 < 1) {
q
} else if (tr * 3 < 2) {
(2.0 / 3 - tr) * (q - p) * 6 + p
} else {
p
}
g = if (h * 6 < 1) {
h * (q - p) * 6 + p
} else if (h * 2 < 1) {
q
} else if (h * 3 < 2) {
(2.0 / 3 - h) * (q - p) * 6 + p
} else {
p
}
b = if (tb * 6 < 1) {
tb * (q - p) * 6 + p
} else if (tb * 2 < 1) {
q
} else if (tb * 3 < 2) {
(2.0 / 3 - tb) * (q - p) * 6 + p
} else {
p
}
}
val red = (r.pow(BRIGHTNESS) * 256).toInt()
val green = (g.pow(BRIGHTNESS) * 256).toInt()
val blue = (b.pow(BRIGHTNESS) * 256).toInt()
var rgb = (red shl 16) or (green shl 8) or blue
if (rgb == 0) {
rgb = 1
}
return rgb
}
public fun hslToRgb(hsl: Int): Int {
return HSL_TO_RGB[hsl]
}
public fun multiplyLightness(hsl: Int, factor: Int): Int {
return when (hsl) {
-2 -> 12345678
-1 -> 127 - min(max(factor, 0), 127)
else -> {
var l = ((hsl and 0x7F) * factor) shr 7
l = min(max(l, 2), 126)
(hsl and 0xFF80) or l
}
}
}
}

@ -1,57 +0,0 @@
package org.openrs2.archive.map
import io.netty.buffer.ByteBuf
public data class FloType(
var color: Int = 0,
var texture: Int = -1,
var blendColor: Int = -1
) {
public companion object {
public fun read(buf: ByteBuf): FloType {
val type = FloType()
while (true) {
val code = buf.readUnsignedByte().toInt()
if (code == 0) {
break
} else if (code == 1) {
type.color = buf.readUnsignedMedium()
} else if (code == 2) {
type.texture = buf.readUnsignedByte().toInt()
} else if (code == 3) {
type.texture = buf.readUnsignedShort()
if (type.texture == 65535) {
type.texture = -1
}
} else if (code == 5) {
// empty
} else if (code == 7) {
type.blendColor = buf.readUnsignedMedium()
} else if (code == 8) {
// empty
} else if (code == 9) {
buf.skipBytes(2)
} else if (code == 10) {
// empty
} else if (code == 11) {
buf.skipBytes(1)
} else if (code == 12) {
// empty
} else if (code == 13) {
buf.skipBytes(3)
} else if (code == 14) {
buf.skipBytes(1)
} else if (code == 15) {
buf.skipBytes(2)
} else if (code == 16) {
buf.skipBytes(1)
} else {
throw IllegalArgumentException("Unsupported code: $code")
}
}
return type
}
}
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save