Compare commits

..

No commits in common. 'master' and 'master' have entirely different histories.

  1. 29
      .editorconfig
  2. 34
      .github/workflows/build.yaml
  3. 6
      .gitignore
  4. 1
      .idea/.gitignore
  5. 28
      .idea/codeInsightSettings.xml
  6. 14
      .idea/codeStyles/Project.xml
  7. 6
      .idea/fileTemplates/internal/Kotlin Class.kt
  8. 6
      .idea/fileTemplates/internal/Kotlin Enum.kt
  9. 6
      .idea/fileTemplates/internal/Kotlin Interface.kt
  10. 5
      .idea/inspectionProfiles/Project_Default.xml
  11. 13
      .idea/runConfigurations/AstDeobfuscator.xml
  12. 15
      .idea/runConfigurations/Bundler.xml
  13. 10
      .idea/runConfigurations/BytecodeDeobfuscator.xml
  14. 14
      .idea/runConfigurations/Decompiler.xml
  15. 14
      .idea/runConfigurations/Deobfuscator.xml
  16. 12
      .idea/runConfigurations/GameServer.xml
  17. 10
      .idea/runConfigurations/GenerateBuffer.xml
  18. 10
      .idea/runConfigurations/Patcher.xml
  19. 16
      .idea/runConfigurations/XteaPluginTest.xml
  20. 4
      .idea/runConfigurations/client.xml
  21. 12
      .idea/runConfigurations/dev_openrs2.xml
  22. 2
      .idea/scopes/exclude_nonfree.xml
  23. 2
      .mailmap
  24. 80
      CONTRIBUTING.md
  25. 674
      COPYING
  26. 37
      DCO
  27. 13
      LICENSE
  28. 140
      README.md
  29. 125
      all/build.gradle.kts
  30. 90
      all/pom.xml
  31. 30
      all/src/assembly/bin.xml
  32. 3
      all/src/bin/openrs2-bundle
  33. 3
      all/src/bin/openrs2-bundle.cmd
  34. 3
      all/src/bin/openrs2-decompiler
  35. 3
      all/src/bin/openrs2-decompiler.cmd
  36. 3
      all/src/bin/openrs2-deob
  37. 3
      all/src/bin/openrs2-deob-ast
  38. 3
      all/src/bin/openrs2-deob-ast.cmd
  39. 3
      all/src/bin/openrs2-deob.cmd
  40. 3
      all/src/bin/openrs2-game
  41. 3
      all/src/bin/openrs2-game.cmd
  42. 29
      all/src/main/kotlin/org/openrs2/Command.kt
  43. 64
      archive/build.gradle.kts
  44. 23
      archive/src/main/kotlin/org/openrs2/archive/ArchiveCommand.kt
  45. 3
      archive/src/main/kotlin/org/openrs2/archive/ArchiveConfig.kt
  46. 27
      archive/src/main/kotlin/org/openrs2/archive/ArchiveConfigProvider.kt
  47. 55
      archive/src/main/kotlin/org/openrs2/archive/ArchiveModule.kt
  48. 28
      archive/src/main/kotlin/org/openrs2/archive/DataSourceProvider.kt
  49. 15
      archive/src/main/kotlin/org/openrs2/archive/DatabaseProvider.kt
  50. 19
      archive/src/main/kotlin/org/openrs2/archive/cache/CacheCommand.kt
  51. 158
      archive/src/main/kotlin/org/openrs2/archive/cache/CacheDownloader.kt
  52. 806
      archive/src/main/kotlin/org/openrs2/archive/cache/CacheExporter.kt
  53. 1490
      archive/src/main/kotlin/org/openrs2/archive/cache/CacheImporter.kt
  54. 16
      archive/src/main/kotlin/org/openrs2/archive/cache/CrossPollinateCommand.kt
  55. 223
      archive/src/main/kotlin/org/openrs2/archive/cache/CrossPollinator.kt
  56. 25
      archive/src/main/kotlin/org/openrs2/archive/cache/DownloadCommand.kt
  57. 34
      archive/src/main/kotlin/org/openrs2/archive/cache/ExportCommand.kt
  58. 53
      archive/src/main/kotlin/org/openrs2/archive/cache/ImportCommand.kt
  59. 116
      archive/src/main/kotlin/org/openrs2/archive/cache/ImportMasterIndexCommand.kt
  60. 376
      archive/src/main/kotlin/org/openrs2/archive/cache/Js5ChannelHandler.kt
  61. 158
      archive/src/main/kotlin/org/openrs2/archive/cache/NxtJs5ChannelHandler.kt
  62. 22
      archive/src/main/kotlin/org/openrs2/archive/cache/NxtJs5ChannelInitializer.kt
  63. 76
      archive/src/main/kotlin/org/openrs2/archive/cache/OsrsJs5ChannelHandler.kt
  64. 22
      archive/src/main/kotlin/org/openrs2/archive/cache/OsrsJs5ChannelInitializer.kt
  65. 16
      archive/src/main/kotlin/org/openrs2/archive/cache/RefreshViewsCommand.kt
  66. 149
      archive/src/main/kotlin/org/openrs2/archive/cache/finder/CacheFinderExtractor.kt
  67. 25
      archive/src/main/kotlin/org/openrs2/archive/cache/finder/ExtractCommand.kt
  68. 8
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/ClientOutOfDateCodec.kt
  69. 10
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/InitJs5RemoteConnection.kt
  70. 27
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/InitJs5RemoteConnectionCodec.kt
  71. 8
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/Js5OkCodec.kt
  72. 14
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/Js5Request.kt
  73. 37
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/Js5RequestEncoder.kt
  74. 11
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/Js5Response.kt
  75. 121
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/Js5ResponseDecoder.kt
  76. 8
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/LoginResponse.kt
  77. 32
      archive/src/main/kotlin/org/openrs2/archive/cache/nxt/MusicStreamClient.kt
  78. 11
      archive/src/main/kotlin/org/openrs2/archive/client/Architecture.kt
  79. 35
      archive/src/main/kotlin/org/openrs2/archive/client/Artifact.kt
  80. 46
      archive/src/main/kotlin/org/openrs2/archive/client/ArtifactFormat.kt
  81. 16
      archive/src/main/kotlin/org/openrs2/archive/client/ArtifactType.kt
  82. 14
      archive/src/main/kotlin/org/openrs2/archive/client/ClientCommand.kt
  83. 455
      archive/src/main/kotlin/org/openrs2/archive/client/ClientExporter.kt
  84. 997
      archive/src/main/kotlin/org/openrs2/archive/client/ClientImporter.kt
  85. 30
      archive/src/main/kotlin/org/openrs2/archive/client/ExportCommand.kt
  86. 32
      archive/src/main/kotlin/org/openrs2/archive/client/ImportCommand.kt
  87. 7
      archive/src/main/kotlin/org/openrs2/archive/client/Jvm.kt
  88. 116
      archive/src/main/kotlin/org/openrs2/archive/client/MachO.kt
  89. 43
      archive/src/main/kotlin/org/openrs2/archive/client/OperatingSystem.kt
  90. 16
      archive/src/main/kotlin/org/openrs2/archive/client/RefreshCommand.kt
  91. 11
      archive/src/main/kotlin/org/openrs2/archive/game/Game.kt
  92. 58
      archive/src/main/kotlin/org/openrs2/archive/game/GameDatabase.kt
  93. 70
      archive/src/main/kotlin/org/openrs2/archive/jav/JavConfig.kt
  94. 65
      archive/src/main/kotlin/org/openrs2/archive/key/BinaryKeyReader.kt
  95. 16
      archive/src/main/kotlin/org/openrs2/archive/key/BruteForceCommand.kt
  96. 16
      archive/src/main/kotlin/org/openrs2/archive/key/DownloadCommand.kt
  97. 16
      archive/src/main/kotlin/org/openrs2/archive/key/EntCommand.kt
  98. 57
      archive/src/main/kotlin/org/openrs2/archive/key/HdosKeyDownloader.kt
  99. 13
      archive/src/main/kotlin/org/openrs2/archive/key/HexKeyReader.kt
  100. 23
      archive/src/main/kotlin/org/openrs2/archive/key/ImportCommand.kt
  101. Some files were not shown because too many files have changed in this diff Show More

@ -7,34 +7,13 @@ trim_trailing_whitespace = true
max_line_length = 120
indent_style = tab
# Prevent IDEA from translating *.{kt,kts} -> *.{kt, kts}. This confuses ktlint.
# @formatter:off
[*.{kt,kts}]
# @formatter:on
[*.kt]
indent_style = space
indent_size = 4
# see https://github.com/pinterest/ktlint/issues/527
# noinspection EditorConfigKeyCorrectness
ktlint_standard_argument-list-wrapping = disabled
# noinspection EditorConfigKeyCorrectness
ktlint_standard_indent = disabled
# noinspection EditorConfigKeyCorrectness
ktlint_standard_parameter-list-wrapping = disabled
# noinspection EditorConfigKeyCorrectness
ktlint_standard_trailing-comma-on-call-site = disabled
# noinspection EditorConfigKeyCorrectness
ktlint_standard_trailing-comma-on-declaration-site = disabled
# noinspection EditorConfigKeyCorrectness
ktlint_standard_wrapping = disabled
[*.md]
max_line_length = 80
[*.sql]
indent_style = space
indent_size = 4
disabled_rules = import-ordering
# @formatter:off
[*.{json,xml,yaml,yml}]
# @formatter:on
[*.xml]
indent_style = space
indent_size = 2

@ -1,34 +0,0 @@
---
on: push
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/cache@v3
with:
path: ~/.ssh/known_hosts
key: ssh-known-hosts
- uses: actions/checkout@v3
- uses: actions/setup-java@v3
with:
distribution: temurin
java-version: 11
- uses: gradle/wrapper-validation-action@v1
- uses: gradle/gradle-build-action@v2
with:
arguments: build
- if: github.ref == 'refs/heads/master'
run: |
install -dm0700 ~/.ssh
touch ~/.ssh/id_ed25519
chmod 0600 ~/.ssh/id_ed25519
echo "${SSH_KEY}" > ~/.ssh/id_ed25519
env:
SSH_KEY: ${{ secrets.SSH_KEY }}
- if: github.ref == 'refs/heads/master'
uses: gradle/gradle-build-action@v2
with:
arguments: publish
env:
ORG_GRADLE_PROJECT_openrs2Username: ${{ secrets.REPO_USERNAME }}
ORG_GRADLE_PROJECT_openrs2Password: ${{ secrets.REPO_PASSWORD }}

6
.gitignore vendored

@ -4,12 +4,10 @@
!.idea
!.mailmap
*.iml
*/build
*/out
*/target
*~
/nonfree
/build
/out
/target
hs_err_pid*.log
pom.xml.releaseBackup
release.properties

1
.idea/.gitignore vendored

@ -1,6 +1,5 @@
/*
!.gitignore
!codeInsightSettings.xml
!codeStyles
!fileTemplates
!inspectionProfiles

@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="JavaProjectCodeInsightSettings">
<excluded-names>
<name>com.google.inject.BindingAnnotation</name>
<name>com.google.inject.Inject</name>
<name>com.google.inject.Named</name>
<name>com.google.inject.Provider</name>
<name>com.google.inject.ScopeAnnotation</name>
<name>com.google.inject.Singleton</name>
<name>java.nio.file.Paths.get</name>
<name>org.junit.jupiter.api.AfterEach</name>
<name>org.junit.jupiter.api.Assertions.assertEquals</name>
<name>org.junit.jupiter.api.Assertions.assertFalse</name>
<name>org.junit.jupiter.api.Assertions.assertNotEquals</name>
<name>org.junit.jupiter.api.Assertions.assertNotNull</name>
<name>org.junit.jupiter.api.Assertions.assertNotSame</name>
<name>org.junit.jupiter.api.Assertions.assertNull</name>
<name>org.junit.jupiter.api.Assertions.assertSame</name>
<name>org.junit.jupiter.api.Assertions.assertThrows</name>
<name>org.junit.jupiter.api.Assertions.assertTrue</name>
<name>org.junit.jupiter.api.Assertions.fail</name>
<name>org.junit.jupiter.api.BeforeEach</name>
<name>org.junit.jupiter.api.Disabled</name>
<name>org.junit.jupiter.api.Test</name>
</excluded-names>
</component>
</project>

@ -7,10 +7,6 @@
</value>
</option>
<option name="FORMATTER_TAGS_ENABLED" value="true" />
<HTMLCodeStyleSettings>
<option name="HTML_SPACE_INSIDE_EMPTY_TAG" value="true" />
<option name="HTML_DO_NOT_INDENT_CHILDREN_OF" value="" />
</HTMLCodeStyleSettings>
<JavaCodeStyleSettings>
<option name="CLASS_NAMES_IN_JAVADOC" value="3" />
<option name="CLASS_COUNT_TO_USE_IMPORT_ON_DEMAND" value="9999" />
@ -47,13 +43,6 @@
<option name="XML_ALIGN_ATTRIBUTES" value="false" />
<option name="XML_SPACE_INSIDE_EMPTY_TAG" value="true" />
</XML>
<codeStyleSettings language="HTML">
<indentOptions>
<option name="CONTINUATION_INDENT_SIZE" value="4" />
<option name="USE_TAB_CHARACTER" value="true" />
<option name="SMART_TABS" value="true" />
</indentOptions>
</codeStyleSettings>
<codeStyleSettings language="JAVA">
<option name="KEEP_FIRST_COLUMN_COMMENT" value="false" />
<option name="KEEP_CONTROL_STATEMENT_IN_ONE_LINE" value="false" />
@ -81,9 +70,6 @@
<option name="SMART_TABS" value="true" />
</indentOptions>
</codeStyleSettings>
<codeStyleSettings language="Markdown">
<option name="RIGHT_MARGIN" value="80" />
</codeStyleSettings>
<codeStyleSettings language="XML">
<indentOptions>
<option name="INDENT_SIZE" value="2" />

@ -1,6 +0,0 @@
#if (${PACKAGE_NAME} && ${PACKAGE_NAME} != "")package ${PACKAGE_NAME}
#end
#parse("File Header.java")
public class ${NAME} {
}

@ -1,6 +0,0 @@
#if (${PACKAGE_NAME} && ${PACKAGE_NAME} != "")package ${PACKAGE_NAME}
#end
#parse("File Header.java")
public enum class ${NAME} {
}

@ -1,6 +0,0 @@
#if (${PACKAGE_NAME} && ${PACKAGE_NAME} != "")package ${PACKAGE_NAME}
#end
#parse("File Header.java")
public interface ${NAME} {
}

@ -1,11 +1,6 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="SpellCheckingInspection" enabled="true" level="INFORMATION" enabled_by_default="true">
<option name="processCode" value="true" />
<option name="processLiterals" value="true" />
<option name="processComments" value="true" />
</inspection_tool>
<inspection_tool class="WeakerAccess" enabled="true" level="WARNING" enabled_by_default="true">
<option name="SUGGEST_PACKAGE_LOCAL_FOR_MEMBERS" value="false" />
<option name="SUGGEST_PACKAGE_LOCAL_FOR_TOP_CLASSES" value="false" />

@ -1,8 +1,13 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="AstDeobfuscator" type="JetRunConfigurationType">
<option name="MAIN_CLASS_NAME" value="org.openrs2.deob.ast.DeobfuscateAstCommandKt" />
<module name="openrs2.deob-ast.main" />
<shortenClasspath name="NONE" />
<configuration default="false" name="AstDeobfuscator" type="JetRunConfigurationType" factoryName="Kotlin">
<module name="openrs2-deob-ast" />
<option name="VM_PARAMETERS" value="" />
<option name="PROGRAM_PARAMETERS" value="" />
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="false" />
<option name="ALTERNATIVE_JRE_PATH" />
<option name="PASS_PARENT_ENVS" value="true" />
<option name="MAIN_CLASS_NAME" value="dev.openrs2.deob.ast.AstDeobfuscatorKt" />
<option name="WORKING_DIRECTORY" value="" />
<method v="2">
<option name="Make" enabled="true" />
</method>

@ -0,0 +1,15 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Bundler" type="JetRunConfigurationType" factoryName="Kotlin">
<module name="openrs2-bundler" />
<option name="VM_PARAMETERS" value="" />
<option name="PROGRAM_PARAMETERS" value="" />
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="false" />
<option name="ALTERNATIVE_JRE_PATH" />
<option name="PASS_PARENT_ENVS" value="true" />
<option name="MAIN_CLASS_NAME" value="dev.openrs2.bundler.BundlerKt" />
<option name="WORKING_DIRECTORY" value="" />
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>

@ -1,10 +0,0 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="BytecodeDeobfuscator" type="JetRunConfigurationType">
<option name="MAIN_CLASS_NAME" value="org.openrs2.deob.bytecode.DeobfuscateBytecodeCommandKt" />
<module name="openrs2.deob-bytecode.main" />
<shortenClasspath name="NONE" />
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>

@ -1,9 +1,13 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Decompiler" type="JetRunConfigurationType">
<option name="MAIN_CLASS_NAME" value="org.openrs2.decompiler.DecompileCommandKt" />
<module name="openrs2.decompiler.main" />
<shortenClasspath name="NONE" />
<option name="VM_PARAMETERS" value="-Xmx3G" />
<configuration default="false" name="Decompiler" type="JetRunConfigurationType" factoryName="Kotlin">
<module name="openrs2-decompiler" />
<option name="VM_PARAMETERS" value="" />
<option name="PROGRAM_PARAMETERS" value="" />
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="false" />
<option name="ALTERNATIVE_JRE_PATH" />
<option name="PASS_PARENT_ENVS" value="true" />
<option name="MAIN_CLASS_NAME" value="dev.openrs2.decompiler.DecompilerKt" />
<option name="WORKING_DIRECTORY" value="" />
<method v="2">
<option name="Make" enabled="true" />
</method>

@ -1,9 +1,13 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Deobfuscator" type="JetRunConfigurationType">
<option name="MAIN_CLASS_NAME" value="org.openrs2.deob.DeobfuscateCommandKt" />
<module name="openrs2.deob.main" />
<shortenClasspath name="NONE" />
<option name="VM_PARAMETERS" value="-Xmx3G" />
<configuration default="false" name="Deobfuscator" type="JetRunConfigurationType" factoryName="Kotlin">
<module name="openrs2-deob" />
<option name="VM_PARAMETERS" value="" />
<option name="PROGRAM_PARAMETERS" value="" />
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="false" />
<option name="ALTERNATIVE_JRE_PATH" />
<option name="PASS_PARENT_ENVS" value="true" />
<option name="MAIN_CLASS_NAME" value="dev.openrs2.deob.DeobfuscatorKt" />
<option name="WORKING_DIRECTORY" value="" />
<method v="2">
<option name="Make" enabled="true" />
</method>

@ -1,9 +1,13 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="GameServer" type="JetRunConfigurationType">
<option name="MAIN_CLASS_NAME" value="org.openrs2.game.GameCommandKt" />
<module name="openrs2.game.main" />
<shortenClasspath name="NONE" />
<configuration default="false" name="GameServer" type="JetRunConfigurationType" factoryName="Kotlin">
<module name="openrs2-game" />
<option name="VM_PARAMETERS" value="-Dio.netty.leakDetection.level=PARANOID" />
<option name="PROGRAM_PARAMETERS" value="" />
<option name="ALTERNATIVE_JRE_PATH_ENABLED" value="false" />
<option name="ALTERNATIVE_JRE_PATH" />
<option name="PASS_PARENT_ENVS" value="true" />
<option name="MAIN_CLASS_NAME" value="dev.openrs2.game.GameServerKt" />
<option name="WORKING_DIRECTORY" value="" />
<method v="2">
<option name="Make" enabled="true" />
</method>

@ -1,10 +0,0 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="GenerateBuffer" type="JetRunConfigurationType">
<option name="MAIN_CLASS_NAME" value="org.openrs2.buffer.generator.GenerateBufferCommandKt" />
<module name="openrs2.buffer-generator.main" />
<shortenClasspath name="NONE" />
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>

@ -1,10 +0,0 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Patcher" type="JetRunConfigurationType">
<option name="MAIN_CLASS_NAME" value="org.openrs2.patcher.PatchCommandKt" />
<module name="openrs2.patcher.main" />
<shortenClasspath name="NONE" />
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>

@ -1,16 +0,0 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="XteaPluginTest" type="Application" factoryName="Application" nameIsGenerated="true">
<option name="MAIN_CLASS_NAME" value="org.openrs2.xtea.XteaPluginTest" />
<module name="openrs2.xtea-plugin.test" />
<option name="VM_PARAMETERS" value="-ea" />
<extension name="coverage">
<pattern>
<option name="PATTERN" value="org.openrs2.xtea.*" />
<option name="ENABLED" value="true" />
</pattern>
</extension>
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>

@ -1,8 +1,8 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="client" type="Application" factoryName="Application" nameIsGenerated="true">
<option name="MAIN_CLASS_NAME" value="client" />
<module name="openrs2.nonfree.client.main" />
<option name="PROGRAM_PARAMETERS" value="1 live en game0" />
<module name="openrs2-client" />
<option name="PROGRAM_PARAMETERS" value="1 live english game0" />
<method v="2">
<option name="Make" enabled="true" />
</method>

@ -1,19 +1,11 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="org.openrs2" type="JUnit" factoryName="JUnit">
<useClassPathOnly />
<extension name="coverage">
<pattern>
<option name="PATTERN" value="org.openrs2.*" />
<option name="ENABLED" value="true" />
</pattern>
</extension>
<option name="PACKAGE_NAME" value="org.openrs2" />
<configuration default="false" name="dev.openrs2" type="JUnit" factoryName="JUnit">
<option name="PACKAGE_NAME" value="dev.openrs2" />
<option name="MAIN_CLASS_NAME" value="" />
<option name="METHOD_NAME" value="" />
<option name="TEST_OBJECT" value="package" />
<option name="VM_PARAMETERS" value="-ea -Dio.netty.leakDetection.level=PARANOID" />
<option name="PARAMETERS" value="" />
<option name="WORKING_DIRECTORY" value="" />
<option name="TEST_SEARCH_SCOPE">
<value defaultName="wholeProject" />
</option>

@ -1,3 +1,3 @@
<component name="DependencyValidationManager">
<scope name="exclude-nonfree" pattern="!file[openrs2.nonfree*]:*//*" />
<scope name="exclude-nonfree" pattern="!file[openrs2-client]:*/&amp;&amp;!file[openrs2-gl]:*/&amp;&amp;!file[openrs2-gl-dri]:*/&amp;&amp;!file[openrs2-loader]:*/&amp;&amp;!file[openrs2-nonfree]:*/&amp;&amp;!file[openrs2-signlink]:*/&amp;&amp;!file[openrs2-unpack]:*/&amp;&amp;!file[openrs2-unpacker]:*/" />
</component>

@ -1,2 +0,0 @@
Graham <gpe@openrs2.org> <gpe@openrs2.dev>
Scu11 <scu11@openrs2.org> <scu11@openrs2.dev>

@ -1,80 +0,0 @@
# Contributing to OpenRS2
## Introduction
OpenRS2 is still in the early stages of development. The current focus is on
building underlying infrastructure, such as the deobfuscator, rather than game
content. This approach will make it much quicker to build game content in the
long run, but it does mean OpenRS2 won't be particularly useful in the short
term.
If you're interested in contributing new features, you should discuss your plans
in our [Discord][discord] server first. I have rough plans in my head for the
future development direction. Communicating beforehand will avoid the need for
significant changes to be made at the code review stage and make it less likely
for your contribution to be dropped entirely.
## Code style
All source code must be formatted with [IntelliJ IDEA][idea]'s built-in
formatter before each commit. The 'Optimize imports' option should also be
selected. Do not select 'Rearrange entries'.
OpenRS2's code style settings are held in `.idea/codeStyles/Project.xml` in the
repository, and IDEA should use them automatically after importing the Gradle
project.
Kotlin code must pass all of [ktlint][ktlint]'s tests.
Always use `//` for single-line comments and `/*` for multi-line comments.
## Commit messages
Commit messages should follow the ['seven rules'][commitmsg] described in
'How to Write a Git Commit Message', with the exception that the summary line
can be up to 72 characters in length (as OpenRS2 does not use email-based
patches).
You should use tools like [interactive rebase][rewriting-history] to ensure the
commit history is tidy.
## Developer Certificate of Origin
OpenRS2 uses version 1.1 of the [Developer Certificate of Origin][dco] (DCO) to
certify that contributors agree to license their code under OpenRS2's license
(see the License section below). To confirm that a contribution meets the
requirements of the DCO, a `Signed-off-by:` line must be added to the Git commit
message by passing `--signoff` to the `git commit` invocation.
If you intend to make a large number of contributions, run the following
commands from the repository root to add `Signed-off-by:` line to all your
commit messages by default:
```
echo -e "\n\nSigned-off-by: $(git config user.name) <$(git config user.email)>" > .git/commit-template
git config commit.template .git/commit-template
```
The full text of the DCO is available in the `DCO` file.
OpenRS2 does not distribute any of Jagex's intellectual property in this
repository, and care should be taken to avoid inadvertently including any in
contributions.
## Versioning
OpenRS2 uses [Semantic Versioning][semver].
## Gitea
OpenRS2 only uses GitHub as a mirror. Issues and pull requests should be
submitted to [OpenRS2's self-hosted Gitea instance][gitea].
[commitmsg]: https://chris.beams.io/posts/git-commit/#seven-rules
[dco]: https://developercertificate.org/
[discord]: https://chat.openrs2.org/
[gitea]: https://git.openrs2.org/openrs2/openrs2
[idea]: https://www.jetbrains.com/idea/
[ktlint]: https://github.com/pinterest/ktlint#readme
[rewriting-history]: https://git-scm.com/book/en/v2/Git-Tools-Rewriting-History
[semver]: https://semver.org/

@ -0,0 +1,674 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<https://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<https://www.gnu.org/licenses/why-not-lgpl.html>.

37
DCO

@ -1,37 +0,0 @@
Developer Certificate of Origin
Version 1.1
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
1 Letterman Drive
Suite D4700
San Francisco, CA, 94129
Everyone is permitted to copy and distribute verbatim copies of this
license document, but changing it is not allowed.
Developer's Certificate of Origin 1.1
By making a contribution to this project, I certify that:
(a) The contribution was created in whole or in part by me and I
have the right to submit it under the open source license
indicated in the file; or
(b) The contribution is based upon previous work that, to the best
of my knowledge, is covered under an appropriate open source
license and I have the right under that license to submit that
work with modifications, whether created in whole or in part
by me, under the same open source license (unless I am
permitted to submit under a different license), as indicated
in the file; or
(c) The contribution was provided directly to me by some other
person who certified (a), (b) or (c) and I have not modified
it.
(d) I understand and agree that this project and the contribution
are public and that a record of the contribution (including all
personal information I submit with it, including my sign-off) is
maintained indefinitely and may be redistributed consistent with
this project or the open source license(s) involved.

@ -1,13 +0,0 @@
Copyright (c) 2019-2023 OpenRS2 Authors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.

@ -1,88 +1,114 @@
# OpenRS2
[![GitHub Actions][actions-badge]][actions] [![Discord][discord-badge]][discord] [![ISC license][isc-badge]][isc]
[![Build status badge](https://build.openrs2.dev/buildStatus/icon?job=openrs2&build=lastCompleted)](https://build.openrs2.dev/job/openrs2/)
## Introduction
OpenRS2 is an open-source multiplayer game server and suite of associated tools.
It is compatible with build 550 of the RuneScape client, which was released in
late 2009.
OpenRS2 is an open-source multiplayer game server and suite of associated
tools. It is compatible with build 550 of the RuneScape client, which was
released in late 2009.
## Prerequisites
OpenRS2 requires version 11 or later of the [Java Development Kit][jdk].
Building OpenRS2 requires the following pieces of software:
The JDK is required even if a pre-built copy of OpenRS2 is used, as it depends
on JDK-only tools, such as `jarsigner`, at runtime.
* [Java Development Kit][jdk] (version 8 or later)
* [Apache Maven][maven] (version 3.3.9 or later)
### Non-free components
OpenRS2 requires the original RuneScape client code, data and location file
encryption keys, which we cannot legally distribute.
These files must be manually placed in the `nonfree` directory (directly beneath
the root of the repository), in the following structure:
These files must be manually placed in the `nonfree` directory (directly
beneath the root of the repository), in the following structure:
```
nonfree
├── lib
│ ├── jaggl.pack200
│ ├── loader_gl.jar
│ ├── loader.jar
│ ├── runescape_gl.pack200
│ ├── runescape.jar
│ └── unpackclass.pack
└── share
├── cache
│ ├── 0
│ │ ├── 0.dat
│ │ └── ...
│ ├── ...
│ └── 255
│ ├── ...
│ └── 28.dat
└── keys.json
└── code
   ├── game_unpacker.dat
   ├── jaggl.pack200
   ├── loader_gl.jar
   ├── loader.jar
   ├── runescape_gl.pack200
   └── runescape.jar
```
The CRC-32 checksums and SHA-256 digests of the correct files are:
The SHA-256 checksums of the correct files are:
| CRC-32 checksum | SHA-256 digest | File |
|----------------:|--------------------------------------------------------------------|------------------------|
| `-1418094567` | `d39578f4a88a376bcb2571f05da1939a14a80d8c4ed89a4eb172d9e525795fe2` | `jaggl.pack200` |
| `-2129469231` | `31182683ba04dc0ad45859161c13f66424b10deb0b2df10aa58b48bba57402db` | `loader_gl.jar` |
| `-1516355035` | `ccdfaa86be07452ddd69f869ade86ea900dbb916fd853db16602edf2eb54211b` | `loader.jar` |
| `-132784534` | `4a5032ea8079d2154617ae1f21dfcc46a10e023c8ba23a4827d5e25e75c73045` | `runescape_gl.pack200` |
| `1692522675` | `0ab28a95e7c5993860ff439ebb331c0df02ad40aa1f544777ed91b46d30d3d24` | `runescape.jar` |
| `-1911426584` | `7c090e07f8d754d09804ff6e9733ef3ba227893b6b639436db90977b39122590` | `unpackclass.pack` |
```
7c090e07f8d754d09804ff6e9733ef3ba227893b6b639436db90977b39122590 nonfree/code/game_unpacker.dat
d39578f4a88a376bcb2571f05da1939a14a80d8c4ed89a4eb172d9e525795fe2 nonfree/code/jaggl.pack200
31182683ba04dc0ad45859161c13f66424b10deb0b2df10aa58b48bba57402db nonfree/code/loader_gl.jar
ccdfaa86be07452ddd69f869ade86ea900dbb916fd853db16602edf2eb54211b nonfree/code/loader.jar
4a5032ea8079d2154617ae1f21dfcc46a10e023c8ba23a4827d5e25e75c73045 nonfree/code/runescape_gl.pack200
0ab28a95e7c5993860ff439ebb331c0df02ad40aa1f544777ed91b46d30d3d24 nonfree/code/runescape.jar
```
The `.gitignore` file includes the `nonfree` directory to prevent any non-free
material from being accidentally included in the repository.
The `nonfree` directory is included in the `.gitignore` file to prevent any
non-free material from being accidentally included in the repository.
## Building
Run `./gradlew` to download the dependencies, build the code, run the unit tests
and package it.
Run `mvn verify` to download the dependencies, build the code, run the unit
tests and package it.
## Contributing
### Code style
All source code must be formatted with [IntelliJ IDEA][idea]'s built-in
formatter before each commit. The 'Optimize imports' option should also be
selected. Do not select 'Rearrange entries'.
OpenRS2's code style settings are held in `.idea/codeStyles/Project.xml` in the
repository, and IDEA should use them automatically after importing the Maven
project.
Kotlin code must pass all of [ktlint][ktlint]'s tests.
### Commit messages
## Links
Commit messages should follow the ['seven rules'][commitmsg] described in
'How to Write a Git Commit Message', with the exception that the summary line
can be up to 72 characters in length (as OpenRS2 does not use email-based
patches).
* [Discord][discord]
* [Issue tracker][issue-tracker]
* [KDoc][kdoc]
* [Website][www]
### Versioning
OpenRS2 uses [Semantic Versioning][semver].
## License
OpenRS2 is available under the terms of the [ISC license][isc], which is similar
to the 2-clause BSD license. The full copyright notice and terms are available
in the `LICENSE` file.
[actions-badge]: https://github.com/openrs2/openrs2/actions/workflows/build.yaml/badge.svg?branch=master
[actions]: https://github.com/openrs2/openrs2/actions
[discord-badge]: https://img.shields.io/discord/684495254145335298
[discord]: https://chat.openrs2.org/
[isc-badge]: https://img.shields.io/badge/license-ISC-informational
[isc]: https://opensource.org/licenses/ISC
[issue-tracker]: https://git.openrs2.org/openrs2/openrs2/issues
Unless otherwise stated, all code and data is licensed under version 3.0 (and
only version 3.0) of the [GNU General Public License][gpl]. The full terms are
available in the `COPYING` file.
The `deob-annotations` and `jsobject` modules are instead licensed under
version 3.0 (and only version 3.0) of the
[GNU Lesser General Public License][lgpl]. The full terms are available in the
`COPYING.LESSER` file in each module's directory.
## Copyright
Copyright (c) 2019-2020 OpenRS2 Authors
OpenRS2 is free software: you can redistribute it and/or modify it under the
terms of version 3.0 of the GNU General Public License as published by the Free
Software Foundation.
OpenRS2 is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
OpenRS2. If not, see <https://www.gnu.org/licenses/>.
[commitmsg]: https://chris.beams.io/posts/git-commit/#seven-rules
[gpl]: https://www.gnu.org/licenses/gpl-3.0.html
[idea]: https://www.jetbrains.com/idea/
[jdk]: https://jdk.java.net/
[kdoc]: https://docs.openrs2.org/
[www]: https://www.openrs2.org/
[ktlint]: https://github.com/pinterest/ktlint#readme
[lgpl]: https://www.gnu.org/licenses/lgpl-3.0.html
[maven]: https://maven.apache.org/
[semver]: https://semver.org/

@ -1,125 +0,0 @@
import com.github.jk1.license.render.TextReportRenderer
import java.nio.file.Files
plugins {
`maven-publish`
application
alias(libs.plugins.dependencyLicenseReport)
alias(libs.plugins.shadow)
kotlin("jvm")
}
application {
applicationName = "openrs2"
mainClass.set("org.openrs2.CommandKt")
}
dependencies {
implementation(projects.archive)
implementation(projects.bufferGenerator)
implementation(projects.cacheCli)
implementation(projects.compressCli)
implementation(projects.crc32)
implementation(projects.deob)
implementation(projects.game)
implementation(projects.log)
implementation(projects.patcher)
}
tasks.shadowJar {
archiveFileName.set("openrs2.jar")
minimize {
exclude(dependency("ch.qos.logback:logback-classic"))
exclude(dependency("com.github.jnr:jnr-ffi"))
exclude(dependency("org.flywaydb:flyway-core"))
exclude(dependency("org.jetbrains.kotlin:kotlin-reflect"))
}
}
tasks.register("generateAuthors") {
inputs.dir("$rootDir/.git")
outputs.file(layout.buildDirectory.file("AUTHORS"))
doLast {
Files.newOutputStream(layout.buildDirectory.file("AUTHORS").get().asFile.toPath()).use { out ->
exec {
commandLine("git", "shortlog", "-esn", "HEAD")
standardOutput = out
}.assertNormalExitValue()
}
}
}
licenseReport {
renderers = arrayOf(TextReportRenderer())
}
val distTasks = listOf(
"distTar",
"distZip",
"installDist"
)
configure(tasks.filter { it.name in distTasks }) {
enabled = false
}
val shadowDistTasks = listOf(
"installShadowDist",
"shadowDistTar",
"shadowDistZip"
)
configure(tasks.filter { it.name in shadowDistTasks }) {
dependsOn("generateAuthors", "generateLicenseReport")
}
distributions {
named("shadow") {
distributionBaseName.set("openrs2")
contents {
from(layout.buildDirectory.file("AUTHORS"))
from("$rootDir/CONTRIBUTING.md")
from("$rootDir/DCO")
from("$rootDir/LICENSE")
from("$rootDir/README.md")
from("$rootDir/etc/archive.example.yaml") {
rename { "archive.yaml" }
into("etc")
}
from("$rootDir/etc/config.example.yaml") {
rename { "config.yaml" }
into("etc")
}
from("$rootDir/share") {
exclude(".*", "*~")
into("share")
}
from(layout.buildDirectory.file("reports/dependency-license/THIRD-PARTY-NOTICES.txt")) {
rename { "third-party-licenses.txt" }
into("share/doc")
}
}
}
}
publishing {
publications.create<MavenPublication>("maven") {
artifactId = "openrs2"
setArtifacts(listOf(tasks.named("shadowDistZip").get()))
pom {
packaging = "zip"
name.set("OpenRS2")
description.set(
"""
OpenRS2 is an open-source multiplayer game server and suite of
associated tools. It is compatible with build 550 of the
RuneScape client, which was released in mid-2009.
""".trimIndent()
)
}
}
}

@ -0,0 +1,90 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>dev.openrs2</groupId>
<artifactId>openrs2</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<artifactId>openrs2-all</artifactId>
<packaging>jar</packaging>
<name>OpenRS2 All</name>
<description>
Zip file for end users, containing OpenRS2, all of its transitive
dependencies and launcher scripts.
</description>
<dependencies>
<dependency>
<groupId>dev.openrs2</groupId>
<artifactId>openrs2-decompiler</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>dev.openrs2</groupId>
<artifactId>openrs2-deob</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>dev.openrs2</groupId>
<artifactId>openrs2-deob-ast</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>dev.openrs2</groupId>
<artifactId>openrs2-game</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
</execution>
</executions>
<configuration>
<shadedArtifactAttached>true</shadedArtifactAttached>
<shadedClassifierName>jar-with-dependencies</shadedClassifierName>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<manifestEntries>
<Main-Class>dev.openrs2.game.GameServerKt</Main-Class>
</manifestEntries>
</transformer>
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
</transformers>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
<configuration>
<descriptors>
<descriptor>${project.basedir}/src/assembly/bin.xml</descriptor>
</descriptors>
<appendAssemblyId>false</appendAssemblyId>
</configuration>
</plugin>
</plugins>
</build>
</project>

@ -0,0 +1,30 @@
<?xml version="1.0" encoding="UTF-8"?>
<assembly xmlns="http://maven.apache.org/ASSEMBLY/2.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/ASSEMBLY/2.0.0 http://maven.apache.org/xsd/assembly-2.0.0.xsd">
<id>bin</id>
<formats>
<format>zip</format>
</formats>
<fileSets>
<fileSet>
<directory>${project.basedir}/src/bin</directory>
<outputDirectory>bin</outputDirectory>
</fileSet>
<fileSet>
<directory>${project.parent.basedir}/docs</directory>
<outputDirectory>docs</outputDirectory>
</fileSet>
</fileSets>
<files>
<file>
<source>${project.build.directory}/${project.artifactId}-${project.version}-jar-with-dependencies.jar</source>
<outputDirectory>lib</outputDirectory>
<destName>openrs2.jar</destName>
</file>
<file>
<source>${project.parent.basedir}/COPYING</source>
</file>
<file>
<source>${project.parent.basedir}/README.md</source>
</file>
</files>
</assembly>

@ -0,0 +1,3 @@
#!/bin/sh -e
cd `dirname "$0"`/..
exec java -cp lib/openrs2.jar dev.openrs2.bundler.BundlerKt "$@"

@ -0,0 +1,3 @@
@echo off
cd /d %~dp0\..
java -cp lib\openrs2.jar dev.openrs2.bundler.BundlerKt %*

@ -0,0 +1,3 @@
#!/bin/sh -e
cd `dirname "$0"`/..
exec java -cp lib/openrs2.jar dev.openrs2.decompiler.DecompilerKt "$@"

@ -0,0 +1,3 @@
@echo off
cd /d %~dp0\..
java -cp lib\openrs2.jar dev.openrs2.decompiler.DecompilerKt %*

@ -0,0 +1,3 @@
#!/bin/sh -e
cd `dirname "$0"`/..
exec java -cp lib/openrs2.jar dev.openrs2.deob.DeobfuscatorKt "$@"

@ -0,0 +1,3 @@
#!/bin/sh -e
cd `dirname "$0"`/..
exec java -cp lib/openrs2.jar dev.openrs2.deob.ast.AstDeobfuscatorKt "$@"

@ -0,0 +1,3 @@
@echo off
cd /d %~dp0\..
java -cp lib\openrs2.jar dev.openrs2.deob.ast.AstDeobfuscatorKt %*

@ -0,0 +1,3 @@
@echo off
cd /d %~dp0\..
java -cp lib\openrs2.jar dev.openrs2.deob.DeobfuscatorKt %*

@ -0,0 +1,3 @@
#!/bin/sh -e
cd `dirname "$0"`/..
exec java -cp lib/openrs2.jar dev.openrs2.game.GameServerKt "$@"

@ -0,0 +1,3 @@
@echo off
cd /d %~dp0\..
java -cp lib\openrs2.jar dev.openrs2.game.GameServerKt %*

@ -1,29 +0,0 @@
package org.openrs2
import com.github.ajalt.clikt.core.NoOpCliktCommand
import com.github.ajalt.clikt.core.subcommands
import org.openrs2.archive.ArchiveCommand
import org.openrs2.buffer.generator.GenerateBufferCommand
import org.openrs2.cache.cli.CacheCommand
import org.openrs2.compress.cli.CompressCommand
import org.openrs2.crc32.Crc32Command
import org.openrs2.deob.DeobfuscateCommand
import org.openrs2.game.GameCommand
import org.openrs2.patcher.PatchCommand
public fun main(args: Array<String>): Unit = Command().main(args)
public class Command : NoOpCliktCommand(name = "openrs2") {
init {
subcommands(
ArchiveCommand(),
CacheCommand(),
CompressCommand(),
Crc32Command(),
DeobfuscateCommand(),
GameCommand(),
GenerateBufferCommand(),
PatchCommand()
)
}
}

@ -1,64 +0,0 @@
plugins {
`maven-publish`
application
kotlin("jvm")
}
application {
mainClass.set("org.openrs2.archive.ArchiveCommandKt")
}
dependencies {
api(libs.bundles.guice)
api(libs.clikt)
implementation(projects.asm)
implementation(projects.buffer)
implementation(projects.cache550)
implementation(projects.cli)
implementation(projects.compress)
implementation(projects.db)
implementation(projects.http)
implementation(projects.inject)
implementation(projects.json)
implementation(projects.log)
implementation(projects.net)
implementation(projects.protocol)
implementation(projects.util)
implementation(projects.yaml)
implementation(libs.bootstrap)
implementation(libs.bootstrapTable)
implementation(libs.bundles.ktor)
implementation(libs.bundles.thymeleaf)
implementation(libs.byteUnits)
implementation(libs.cabParser)
implementation(libs.flyway)
implementation(libs.guava)
implementation(libs.hikaricp)
implementation(libs.jackson.jsr310)
implementation(libs.jdom)
implementation(libs.jelf)
implementation(libs.jquery)
implementation(libs.jsoup)
implementation(libs.kotlin.coroutines.core)
implementation(libs.netty.handler)
implementation(libs.pecoff4j)
implementation(libs.postgres)
}
publishing {
publications.create<MavenPublication>("maven") {
from(components["java"])
pom {
packaging = "jar"
name.set("OpenRS2 Archive")
description.set(
"""
Service for archiving clients, caches and XTEA keys in an
efficient deduplicated format.
""".trimIndent()
)
}
}
}

@ -1,23 +0,0 @@
package org.openrs2.archive
import com.github.ajalt.clikt.core.NoOpCliktCommand
import com.github.ajalt.clikt.core.subcommands
import org.openrs2.archive.cache.CacheCommand
import org.openrs2.archive.client.ClientCommand
import org.openrs2.archive.key.KeyCommand
import org.openrs2.archive.name.NameCommand
import org.openrs2.archive.web.WebCommand
public fun main(args: Array<String>): Unit = ArchiveCommand().main(args)
public class ArchiveCommand : NoOpCliktCommand(name = "archive") {
init {
subcommands(
CacheCommand(),
ClientCommand(),
KeyCommand(),
NameCommand(),
WebCommand()
)
}
}

@ -1,3 +0,0 @@
package org.openrs2.archive
public data class ArchiveConfig(val databaseUrl: String)

@ -1,27 +0,0 @@
package org.openrs2.archive
import com.fasterxml.jackson.databind.ObjectMapper
import jakarta.inject.Inject
import jakarta.inject.Provider
import org.openrs2.yaml.Yaml
import java.nio.file.Files
import java.nio.file.Path
public class ArchiveConfigProvider @Inject constructor(
@Yaml private val mapper: ObjectMapper
) : Provider<ArchiveConfig> {
override fun get(): ArchiveConfig {
if (Files.notExists(CONFIG_PATH)) {
Files.copy(EXAMPLE_CONFIG_PATH, CONFIG_PATH)
}
return Files.newBufferedReader(CONFIG_PATH).use { reader ->
mapper.readValue(reader, ArchiveConfig::class.java)
}
}
private companion object {
private val CONFIG_PATH = Path.of("etc/archive.yaml")
private val EXAMPLE_CONFIG_PATH = Path.of("etc/archive.example.yaml")
}
}

@ -1,55 +0,0 @@
package org.openrs2.archive
import com.fasterxml.jackson.databind.Module
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule
import com.google.inject.AbstractModule
import com.google.inject.Scopes
import com.google.inject.multibindings.Multibinder
import org.openrs2.archive.key.HdosKeyDownloader
import org.openrs2.archive.key.KeyDownloader
import org.openrs2.archive.key.RuneLiteKeyDownloader
import org.openrs2.archive.name.NameDownloader
import org.openrs2.archive.name.RuneStarNameDownloader
import org.openrs2.asm.AsmModule
import org.openrs2.buffer.BufferModule
import org.openrs2.cache.CacheModule
import org.openrs2.db.Database
import org.openrs2.http.HttpModule
import org.openrs2.json.JsonModule
import org.openrs2.net.NetworkModule
import org.openrs2.yaml.YamlModule
import javax.sql.DataSource
public object ArchiveModule : AbstractModule() {
override fun configure() {
install(AsmModule)
install(BufferModule)
install(CacheModule)
install(HttpModule)
install(JsonModule)
install(NetworkModule)
install(YamlModule)
bind(ArchiveConfig::class.java)
.toProvider(ArchiveConfigProvider::class.java)
.`in`(Scopes.SINGLETON)
bind(DataSource::class.java)
.toProvider(DataSourceProvider::class.java)
.`in`(Scopes.SINGLETON)
bind(Database::class.java)
.toProvider(DatabaseProvider::class.java)
.`in`(Scopes.SINGLETON)
Multibinder.newSetBinder(binder(), Module::class.java)
.addBinding().to(JavaTimeModule::class.java)
val keyBinder = Multibinder.newSetBinder(binder(), KeyDownloader::class.java)
keyBinder.addBinding().to(HdosKeyDownloader::class.java)
keyBinder.addBinding().to(RuneLiteKeyDownloader::class.java)
val nameBinder = Multibinder.newSetBinder(binder(), NameDownloader::class.java)
nameBinder.addBinding().to(RuneStarNameDownloader::class.java)
}
}

@ -1,28 +0,0 @@
package org.openrs2.archive
import com.zaxxer.hikari.HikariConfig
import com.zaxxer.hikari.HikariDataSource
import jakarta.inject.Inject
import jakarta.inject.Provider
import org.flywaydb.core.Flyway
import org.postgresql.ds.PGSimpleDataSource
import javax.sql.DataSource
public class DataSourceProvider @Inject constructor(
private val config: ArchiveConfig
) : Provider<DataSource> {
override fun get(): DataSource {
val dataSource = PGSimpleDataSource()
dataSource.setUrl(config.databaseUrl)
Flyway.configure()
.dataSource(dataSource)
.locations("classpath:/org/openrs2/archive/migrations")
.load()
.migrate()
val config = HikariConfig()
config.dataSource = dataSource
return HikariDataSource(config)
}
}

@ -1,15 +0,0 @@
package org.openrs2.archive
import jakarta.inject.Inject
import jakarta.inject.Provider
import org.openrs2.db.Database
import org.openrs2.db.PostgresDeadlockDetector
import javax.sql.DataSource
public class DatabaseProvider @Inject constructor(
private val dataSource: DataSource
) : Provider<Database> {
override fun get(): Database {
return Database(dataSource, deadlockDetector = PostgresDeadlockDetector)
}
}

@ -1,19 +0,0 @@
package org.openrs2.archive.cache
import com.github.ajalt.clikt.core.NoOpCliktCommand
import com.github.ajalt.clikt.core.subcommands
import org.openrs2.archive.cache.finder.ExtractCommand
public class CacheCommand : NoOpCliktCommand(name = "cache") {
init {
subcommands(
CrossPollinateCommand(),
DownloadCommand(),
ExtractCommand(),
ImportCommand(),
ImportMasterIndexCommand(),
ExportCommand(),
RefreshViewsCommand()
)
}
}

@ -1,158 +0,0 @@
package org.openrs2.archive.cache
import jakarta.inject.Inject
import jakarta.inject.Singleton
import org.openrs2.archive.cache.nxt.MusicStreamClient
import org.openrs2.archive.game.GameDatabase
import org.openrs2.archive.jav.JavConfig
import org.openrs2.archive.world.World
import org.openrs2.archive.world.WorldList
import org.openrs2.buffer.ByteBufBodyHandler
import org.openrs2.buffer.use
import org.openrs2.net.BootstrapFactory
import org.openrs2.net.awaitSuspend
import java.net.URI
import java.net.http.HttpClient
import java.net.http.HttpRequest
import kotlin.coroutines.resumeWithException
import kotlin.coroutines.suspendCoroutine
@Singleton
public class CacheDownloader @Inject constructor(
private val client: HttpClient,
private val byteBufBodyHandler: ByteBufBodyHandler,
private val bootstrapFactory: BootstrapFactory,
private val gameDatabase: GameDatabase,
private val importer: CacheImporter
) {
public suspend fun download(gameName: String, environment: String, language: String) {
val game = gameDatabase.getGame(gameName, environment, language) ?: throw Exception("Game not found")
val url = game.url ?: throw Exception("URL not set")
val config = JavConfig.download(client, url)
val group = bootstrapFactory.createEventLoopGroup()
try {
suspendCoroutine { continuation ->
val bootstrap = bootstrapFactory.createBootstrap(group)
val hostname: String
val initializer = when (gameName) {
"oldschool" -> {
var buildMajor = game.buildMajor
hostname = if (environment == "beta") {
findOsrsWorld(config, World::isBeta) ?: throw Exception("Failed to find beta world")
} else {
val codebase = config.config[CODEBASE] ?: throw Exception("Codebase missing")
URI(codebase).host ?: throw Exception("Hostname missing")
}
val serverVersion = config.params[OSRS_SERVER_VERSION]
if (serverVersion != null) {
buildMajor = serverVersion.toInt()
}
OsrsJs5ChannelInitializer(
OsrsJs5ChannelHandler(
bootstrap,
game.scopeId,
game.id,
hostname,
PORT,
buildMajor ?: throw Exception("Current major build not set"),
game.lastMasterIndexId,
continuation,
importer
)
)
}
"runescape" -> {
var buildMajor = game.buildMajor
var buildMinor = game.buildMinor
val serverVersion = config.config[NXT_SERVER_VERSION]
if (serverVersion != null) {
val n = serverVersion.toInt()
/*
* Only reset buildMinor if buildMajor changes, so
* we don't have to keep retrying minor versions.
*/
if (buildMajor != n) {
buildMajor = n
buildMinor = 1
}
}
val tokens = config.params.values.filter { TOKEN_REGEX.matches(it) }
val token = tokens.singleOrNull() ?: throw Exception("Multiple candidate tokens: $tokens")
hostname = if (environment == "beta") {
NXT_BETA_HOSTNAME
} else {
NXT_LIVE_HOSTNAME
}
val musicStreamClient = MusicStreamClient(client, byteBufBodyHandler, "http://$hostname")
NxtJs5ChannelInitializer(
NxtJs5ChannelHandler(
bootstrap,
game.scopeId,
game.id,
hostname,
PORT,
buildMajor ?: throw Exception("Current major build not set"),
buildMinor ?: throw Exception("Current minor build not set"),
game.lastMasterIndexId,
continuation,
importer,
token,
game.languageId,
musicStreamClient
)
)
}
else -> throw UnsupportedOperationException()
}
bootstrap.handler(initializer)
.connect(hostname, PORT)
.addListener { future ->
if (!future.isSuccess) {
continuation.resumeWithException(future.cause())
}
}
}
} finally {
group.shutdownGracefully().awaitSuspend()
}
}
private fun findOsrsWorld(config: JavConfig, predicate: (World) -> Boolean): String? {
val url = config.params[OSRS_WORLD_LIST_URL] ?: throw Exception("World list URL missing")
val list = client.send(HttpRequest.newBuilder(URI(url)).build(), byteBufBodyHandler).body().use { buf ->
WorldList.read(buf)
}
return list.worlds
.filter(predicate)
.map(World::hostname)
.shuffled()
.firstOrNull()
}
private companion object {
private const val CODEBASE = "codebase"
private const val OSRS_WORLD_LIST_URL = "17"
private const val OSRS_SERVER_VERSION = "25"
private const val NXT_SERVER_VERSION = "server_version"
private const val NXT_LIVE_HOSTNAME = "content.runescape.com"
private const val NXT_BETA_HOSTNAME = "content.beta.runescape.com"
private const val PORT = 443
private val TOKEN_REGEX = Regex("[A-Za-z0-9*-]{32}")
}
}

@ -1,806 +0,0 @@
package org.openrs2.archive.cache
import com.fasterxml.jackson.annotation.JsonIgnore
import com.fasterxml.jackson.annotation.JsonProperty
import com.fasterxml.jackson.annotation.JsonUnwrapped
import io.netty.buffer.ByteBuf
import io.netty.buffer.ByteBufAllocator
import io.netty.buffer.Unpooled
import jakarta.inject.Inject
import jakarta.inject.Singleton
import org.openrs2.buffer.use
import org.openrs2.cache.ChecksumTable
import org.openrs2.cache.DiskStore
import org.openrs2.cache.Js5Archive
import org.openrs2.cache.Js5Compression
import org.openrs2.cache.Js5MasterIndex
import org.openrs2.cache.MasterIndexFormat
import org.openrs2.cache.Store
import org.openrs2.crypto.SymmetricKey
import org.openrs2.db.Database
import org.postgresql.util.PGobject
import java.sql.Connection
import java.time.Instant
import java.time.ZoneOffset
import java.time.format.DateTimeFormatter
import java.util.SortedSet
@Singleton
public class CacheExporter @Inject constructor(
private val database: Database,
private val alloc: ByteBufAllocator
) {
public data class Stats(
val validIndexes: Long,
val indexes: Long,
val validGroups: Long,
val groups: Long,
val validKeys: Long,
val keys: Long,
val size: Long,
val blocks: Long
) {
@JsonIgnore
public val allIndexesValid: Boolean = indexes == validIndexes && indexes != 0L
@JsonIgnore
public val validIndexesFraction: Double = if (indexes == 0L) {
1.0
} else {
validIndexes.toDouble() / indexes
}
@JsonIgnore
public val allGroupsValid: Boolean = groups == validGroups
@JsonIgnore
public val validGroupsFraction: Double = if (groups == 0L) {
1.0
} else {
validGroups.toDouble() / groups
}
@JsonIgnore
public val allKeysValid: Boolean = keys == validKeys
@JsonIgnore
public val validKeysFraction: Double = if (keys == 0L) {
1.0
} else {
validKeys.toDouble() / keys
}
/*
* The max block ID is conveniently also the max number of blocks, as
* zero is reserved.
*/
public val diskStoreValid: Boolean = blocks <= DiskStore.MAX_BLOCK
}
public data class Archive(
val resolved: Boolean,
val stats: ArchiveStats?
)
public data class ArchiveStats(
val validGroups: Long,
val groups: Long,
val validKeys: Long,
val keys: Long,
val size: Long,
val blocks: Long
) {
public val allGroupsValid: Boolean = groups == validGroups
public val validGroupsFraction: Double = if (groups == 0L) {
1.0
} else {
validGroups.toDouble() / groups
}
public val allKeysValid: Boolean = keys == validKeys
public val validKeysFraction: Double = if (keys == 0L) {
1.0
} else {
validKeys.toDouble() / keys
}
}
public data class IndexStats(
val validFiles: Long,
val files: Long,
val size: Long,
val blocks: Long
) {
public val allFilesValid: Boolean = files == validFiles
public val validFilesFraction: Double = if (files == 0L) {
1.0
} else {
validFiles.toDouble() / files
}
}
public data class Build(val major: Int, val minor: Int?) : Comparable<Build> {
override fun compareTo(other: Build): Int {
return compareValuesBy(this, other, Build::major, Build::minor)
}
override fun toString(): String {
return if (minor != null) {
"$major.$minor"
} else {
major.toString()
}
}
internal companion object {
internal fun fromPgObject(o: PGobject): Build? {
val value = o.value!!
require(value.length >= 2)
val parts = value.substring(1, value.length - 1).split(",")
require(parts.size == 2)
val major = parts[0]
val minor = parts[1]
if (major.isEmpty()) {
return null
}
return Build(major.toInt(), if (minor.isEmpty()) null else minor.toInt())
}
}
}
public data class CacheSummary(
val id: Int,
val scope: String,
val game: String,
val environment: String,
val language: String,
val builds: SortedSet<Build>,
val timestamp: Instant?,
val sources: SortedSet<String>,
@JsonUnwrapped
val stats: Stats?
)
public data class Cache(
val id: Int,
val sources: List<Source>,
val updates: List<String>,
val stats: Stats?,
val archives: List<Archive>,
val indexes: List<IndexStats>?,
val masterIndex: Js5MasterIndex?,
val checksumTable: ChecksumTable?
)
public data class Source(
val game: String,
val environment: String,
val language: String,
val build: Build?,
val timestamp: Instant?,
val name: String?,
val description: String?,
val url: String?
)
public data class Key(
val archive: Int,
val group: Int,
val nameHash: Int?,
val name: String?,
@JsonProperty("mapsquare") val mapSquare: Int?,
val key: SymmetricKey
)
public suspend fun totalSize(): Long {
return database.execute { connection ->
connection.prepareStatement(
"""
SELECT SUM(size)
FROM cache_stats
""".trimIndent()
).use { stmt ->
stmt.executeQuery().use { rows ->
if (rows.next()) {
rows.getLong(1)
} else {
0
}
}
}
}
}
public suspend fun list(): List<CacheSummary> {
return database.execute { connection ->
connection.prepareStatement(
"""
SELECT *
FROM (
SELECT
c.id,
g.name AS game,
sc.name AS scope,
e.name AS environment,
l.iso_code AS language,
array_remove(array_agg(DISTINCT ROW(s.build_major, s.build_minor)::build ORDER BY ROW(s.build_major, s.build_minor)::build ASC), NULL) builds,
MIN(s.timestamp) AS timestamp,
array_remove(array_agg(DISTINCT s.name ORDER BY s.name ASC), NULL) sources,
cs.valid_indexes,
cs.indexes,
cs.valid_groups,
cs.groups,
cs.valid_keys,
cs.keys,
cs.size,
cs.blocks
FROM caches c
JOIN sources s ON s.cache_id = c.id
JOIN game_variants v ON v.id = s.game_id
JOIN games g ON g.id = v.game_id
JOIN scopes sc ON sc.id = g.scope_id
JOIN environments e ON e.id = v.environment_id
JOIN languages l ON l.id = v.language_id
LEFT JOIN cache_stats cs ON cs.scope_id = sc.id AND cs.cache_id = c.id
WHERE NOT c.hidden
GROUP BY sc.name, c.id, g.name, e.name, l.iso_code, cs.valid_indexes, cs.indexes, cs.valid_groups,
cs.groups, cs.valid_keys, cs.keys, cs.size, cs.blocks
) t
ORDER BY t.game ASC, t.environment ASC, t.language ASC, t.builds[1] ASC, t.timestamp ASC
""".trimIndent()
).use { stmt ->
stmt.executeQuery().use { rows ->
val caches = mutableListOf<CacheSummary>()
while (rows.next()) {
val id = rows.getInt(1)
val game = rows.getString(2)
val scope = rows.getString(3)
val environment = rows.getString(4)
val language = rows.getString(5)
val builds = rows.getArray(6).array as Array<*>
val timestamp = rows.getTimestamp(7)?.toInstant()
@Suppress("UNCHECKED_CAST")
val sources = rows.getArray(8).array as Array<String>
val validIndexes = rows.getLong(9)
val stats = if (!rows.wasNull()) {
val indexes = rows.getLong(10)
val validGroups = rows.getLong(11)
val groups = rows.getLong(12)
val validKeys = rows.getLong(13)
val keys = rows.getLong(14)
val size = rows.getLong(15)
val blocks = rows.getLong(16)
Stats(validIndexes, indexes, validGroups, groups, validKeys, keys, size, blocks)
} else {
null
}
caches += CacheSummary(
id,
scope,
game,
environment,
language,
builds.mapNotNull { o -> Build.fromPgObject(o as PGobject) }.toSortedSet(),
timestamp,
sources.toSortedSet(),
stats
)
}
caches
}
}
}
}
public suspend fun get(scope: String, id: Int): Cache? {
return database.execute { connection ->
val masterIndex: Js5MasterIndex?
val checksumTable: ChecksumTable?
val stats: Stats?
connection.prepareStatement(
"""
SELECT
m.format,
mc.data,
b.data,
cs.valid_indexes,
cs.indexes,
cs.valid_groups,
cs.groups,
cs.valid_keys,
cs.keys,
cs.size,
cs.blocks
FROM caches c
CROSS JOIN scopes s
LEFT JOIN master_indexes m ON m.id = c.id
LEFT JOIN containers mc ON mc.id = m.container_id
LEFT JOIN crc_tables t ON t.id = c.id
LEFT JOIN blobs b ON b.id = t.blob_id
LEFT JOIN cache_stats cs ON cs.scope_id = s.id AND cs.cache_id = c.id
WHERE s.name = ? AND c.id = ?
""".trimIndent()
).use { stmt ->
stmt.setString(1, scope)
stmt.setInt(2, id)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
return@execute null
}
val formatString = rows.getString(1)
masterIndex = if (formatString != null) {
Unpooled.wrappedBuffer(rows.getBytes(2)).use { compressed ->
Js5Compression.uncompress(compressed).use { uncompressed ->
val format = MasterIndexFormat.valueOf(formatString.uppercase())
Js5MasterIndex.readUnverified(uncompressed, format)
}
}
} else {
null
}
val blob = rows.getBytes(3)
checksumTable = if (blob != null) {
Unpooled.wrappedBuffer(blob).use { buf ->
ChecksumTable.read(buf)
}
} else {
null
}
val validIndexes = rows.getLong(4)
stats = if (rows.wasNull()) {
null
} else {
val indexes = rows.getLong(5)
val validGroups = rows.getLong(6)
val groups = rows.getLong(7)
val validKeys = rows.getLong(8)
val keys = rows.getLong(9)
val size = rows.getLong(10)
val blocks = rows.getLong(11)
Stats(validIndexes, indexes, validGroups, groups, validKeys, keys, size, blocks)
}
}
}
val sources = mutableListOf<Source>()
connection.prepareStatement(
"""
SELECT g.name, e.name, l.iso_code, s.build_major, s.build_minor, s.timestamp, s.name, s.description, s.url
FROM sources s
JOIN game_variants v ON v.id = s.game_id
JOIN games g ON g.id = v.game_id
JOIN scopes sc ON sc.id = g.scope_id
JOIN environments e ON e.id = v.environment_id
JOIN languages l ON l.id = v.language_id
WHERE sc.name = ? AND s.cache_id = ?
ORDER BY s.name ASC
""".trimIndent()
).use { stmt ->
stmt.setString(1, scope)
stmt.setInt(2, id)
stmt.executeQuery().use { rows ->
while (rows.next()) {
val game = rows.getString(1)
val environment = rows.getString(2)
val language = rows.getString(3)
var buildMajor: Int? = rows.getInt(4)
if (rows.wasNull()) {
buildMajor = null
}
var buildMinor: Int? = rows.getInt(5)
if (rows.wasNull()) {
buildMinor = null
}
val build = if (buildMajor != null) {
Build(buildMajor, buildMinor)
} else {
null
}
val timestamp = rows.getTimestamp(6)?.toInstant()
val name = rows.getString(7)
val description = rows.getString(8)
val url = rows.getString(9)
sources += Source(game, environment, language, build, timestamp, name, description, url)
}
}
}
val updates = mutableListOf<String>()
connection.prepareStatement(
"""
SELECT url
FROM updates
WHERE cache_id = ?
""".trimIndent()
).use { stmt ->
stmt.setInt(1, id)
stmt.executeQuery().use { rows ->
while (rows.next()) {
updates += rows.getString(1)
}
}
}
val archives = mutableListOf<Archive>()
connection.prepareStatement(
"""
SELECT a.archive_id, c.id IS NOT NULL, s.valid_groups, s.groups, s.valid_keys, s.keys, s.size, s.blocks
FROM master_index_archives a
LEFT JOIN resolve_index((SELECT id FROM scopes WHERE name = ?), a.archive_id, a.crc32, a.version) c ON TRUE
LEFT JOIN index_stats s ON s.container_id = c.id
WHERE a.master_index_id = ?
UNION ALL
SELECT a.archive_id, b.id IS NOT NULL, NULL, NULL, NULL, NULL, length(b.data), group_blocks(a.archive_id, length(b.data))
FROM crc_table_archives a
LEFT JOIN resolve_archive(a.archive_id, a.crc32) b ON TRUE
WHERE a.crc_table_id = ?
ORDER BY archive_id ASC
""".trimIndent()
).use { stmt ->
stmt.setString(1, scope)
stmt.setInt(2, id)
stmt.setInt(3, id)
stmt.executeQuery().use { rows ->
while (rows.next()) {
val resolved = rows.getBoolean(2)
val size = rows.getLong(7)
val archiveStats = if (!rows.wasNull()) {
val validGroups = rows.getLong(3)
val groups = rows.getLong(4)
val validKeys = rows.getLong(5)
val keys = rows.getLong(6)
val blocks = rows.getLong(8)
ArchiveStats(validGroups, groups, validKeys, keys, size, blocks)
} else {
null
}
archives += Archive(resolved, archiveStats)
}
}
}
val indexes = if (checksumTable != null && archives[5].resolved) {
connection.prepareStatement(
"""
SELECT s.valid_files, s.files, s.size, s.blocks
FROM crc_table_archives a
JOIN resolve_archive(a.archive_id, a.crc32) b ON TRUE
JOIN version_list_stats s ON s.blob_id = b.id
WHERE a.crc_table_id = ? AND a.archive_id = 5
ORDER BY s.index_id ASC
""".trimIndent()
).use { stmt ->
stmt.setInt(1, id)
stmt.executeQuery().use { rows ->
val indexes = mutableListOf<IndexStats>()
while (rows.next()) {
val validFiles = rows.getLong(1)
val files = rows.getLong(2)
val size = rows.getLong(3)
val blocks = rows.getLong(4)
indexes += IndexStats(validFiles, files, size, blocks)
}
indexes
}
}
} else {
null
}
Cache(id, sources, updates, stats, archives, indexes, masterIndex, checksumTable)
}
}
public suspend fun getFileName(scope: String, id: Int): String? {
return database.execute { connection ->
// TODO(gpe): what if a cache is from multiple games?
connection.prepareStatement(
"""
SELECT
g.name AS game,
e.name AS environment,
l.iso_code AS language,
array_remove(array_agg(DISTINCT ROW(s.build_major, s.build_minor)::build ORDER BY ROW(s.build_major, s.build_minor)::build ASC), NULL) builds,
MIN(s.timestamp) AS timestamp
FROM sources s
JOIN game_variants v ON v.id = s.game_id
JOIN games g ON g.id = v.game_id
JOIN scopes sc ON sc.id = g.scope_id
JOIN environments e ON e.id = v.environment_id
JOIN languages l ON l.id = v.language_id
WHERE sc.name = ? AND s.cache_id = ?
GROUP BY g.name, e.name, l.iso_code
LIMIT 1
""".trimIndent()
).use { stmt ->
stmt.setString(1, scope)
stmt.setInt(2, id)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
return@execute null
}
val game = rows.getString(1)
val environment = rows.getString(2)
val language = rows.getString(3)
val name = StringBuilder("$game-$environment-$language")
val builds = rows.getArray(4).array as Array<*>
for (build in builds.mapNotNull { o -> Build.fromPgObject(o as PGobject) }.toSortedSet()) {
name.append("-b")
name.append(build)
}
val timestamp = rows.getTimestamp(5)
if (!rows.wasNull()) {
name.append('-')
name.append(
timestamp.toInstant()
.atOffset(ZoneOffset.UTC)
.format(DateTimeFormatter.ofPattern("yyyy-MM-dd-HH-mm-ss"))
)
}
name.append("-openrs2#")
name.append(id)
name.toString()
}
}
}
}
public suspend fun exportGroup(scope: String, id: Int, archive: Int, group: Int): ByteBuf? {
return database.execute { connection ->
if (archive == Store.ARCHIVESET && group == Store.ARCHIVESET) {
connection.prepareStatement(
"""
SELECT c.data
FROM master_indexes m
JOIN containers c ON c.id = m.container_id
WHERE m.id = ?
""".trimIndent()
).use { stmt ->
stmt.setInt(1, id)
stmt.executeQuery().use { rows ->
if (rows.next()) {
val data = rows.getBytes(1)
return@execute Unpooled.wrappedBuffer(data)
}
}
}
}
connection.prepareStatement(
"""
SELECT g.data
FROM resolved_groups g
JOIN scopes s ON s.id = g.scope_id
WHERE s.name = ? AND g.master_index_id = ? AND g.archive_id = ? AND g.group_id = ?
UNION ALL
SELECT f.data
FROM resolved_files f
WHERE f.crc_table_id = ? AND f.index_id = ? AND f.file_id = ?
""".trimIndent()
).use { stmt ->
stmt.setString(1, scope)
stmt.setInt(2, id)
stmt.setInt(3, archive)
stmt.setInt(4, group)
stmt.setInt(5, id)
stmt.setInt(6, archive)
stmt.setInt(7, group)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
return@execute null
}
val data = rows.getBytes(1)
return@execute Unpooled.wrappedBuffer(data)
}
}
}
}
public fun export(scope: String, id: Int, storeFactory: (Boolean) -> Store) {
database.executeOnce { connection ->
val legacy = connection.prepareStatement(
"""
SELECT id
FROM crc_tables
WHERE id = ?
""".trimIndent()
).use { stmt ->
stmt.setInt(1, id)
stmt.executeQuery().use { rows ->
rows.next()
}
}
storeFactory(legacy).use { store ->
if (legacy) {
exportLegacy(connection, id, store)
} else {
export(connection, scope, id, store)
}
}
}
}
private fun export(connection: Connection, scope: String, id: Int, store: Store) {
connection.prepareStatement(
"""
SELECT g.archive_id, g.group_id, g.data, g.version
FROM resolved_groups g
JOIN scopes s ON s.id = g.scope_id
WHERE s.name = ? AND g.master_index_id = ?
""".trimIndent()
).use { stmt ->
stmt.fetchSize = BATCH_SIZE
stmt.setString(1, scope)
stmt.setInt(2, id)
stmt.executeQuery().use { rows ->
alloc.buffer(2, 2).use { versionBuf ->
store.create(Js5Archive.ARCHIVESET)
while (rows.next()) {
val archive = rows.getInt(1)
val group = rows.getInt(2)
val bytes = rows.getBytes(3)
val version = rows.getInt(4)
val versionNull = rows.wasNull()
versionBuf.clear()
if (!versionNull) {
versionBuf.writeShort(version)
}
Unpooled.wrappedBuffer(Unpooled.wrappedBuffer(bytes), versionBuf.retain()).use { buf ->
store.write(archive, group, buf)
// ensure the .idx file exists even if it is empty
if (archive == Js5Archive.ARCHIVESET) {
store.create(group)
}
}
}
}
}
}
}
private fun exportLegacy(connection: Connection, id: Int, store: Store) {
connection.prepareStatement(
"""
SELECT index_id, file_id, data, version
FROM resolved_files
WHERE crc_table_id = ?
""".trimIndent()
).use { stmt ->
stmt.fetchSize = BATCH_SIZE
stmt.setInt(1, id)
stmt.executeQuery().use { rows ->
alloc.buffer(2, 2).use { versionBuf ->
store.create(0)
while (rows.next()) {
val index = rows.getInt(1)
val file = rows.getInt(2)
val bytes = rows.getBytes(3)
val version = rows.getInt(4)
val versionNull = rows.wasNull()
versionBuf.clear()
if (!versionNull) {
versionBuf.writeShort(version)
}
Unpooled.wrappedBuffer(Unpooled.wrappedBuffer(bytes), versionBuf.retain()).use { buf ->
store.write(index, file, buf)
}
}
}
}
}
}
public suspend fun exportKeys(scope: String, id: Int): List<Key> {
return database.execute { connection ->
connection.prepareStatement(
"""
SELECT g.archive_id, g.group_id, g.name_hash, n.name, (k.key).k0, (k.key).k1, (k.key).k2, (k.key).k3
FROM resolved_groups g
JOIN scopes s ON s.id = g.scope_id
JOIN keys k ON k.id = g.key_id
LEFT JOIN names n ON n.hash = g.name_hash AND n.name ~ '^l(?:[0-9]|[1-9][0-9])_(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$'
WHERE s.name = ? AND g.master_index_id = ?
""".trimIndent()
).use { stmt ->
stmt.setString(1, scope)
stmt.setInt(2, id)
stmt.executeQuery().use { rows ->
val keys = mutableListOf<Key>()
while (rows.next()) {
val archive = rows.getInt(1)
val group = rows.getInt(2)
var nameHash: Int? = rows.getInt(3)
if (rows.wasNull()) {
nameHash = null
}
val name = rows.getString(4)
val k0 = rows.getInt(5)
val k1 = rows.getInt(6)
val k2 = rows.getInt(7)
val k3 = rows.getInt(8)
val mapSquare = getMapSquare(name)
keys += Key(archive, group, nameHash, name, mapSquare, SymmetricKey(k0, k1, k2, k3))
}
keys
}
}
}
}
private companion object {
private const val BATCH_SIZE = 256
private val LOC_NAME_REGEX = Regex("l(\\d+)_(\\d+)")
private fun getMapSquare(name: String?): Int? {
if (name == null) {
return null
}
val match = LOC_NAME_REGEX.matchEntire(name) ?: return null
val x = match.groupValues[1].toInt()
val z = match.groupValues[2].toInt()
return (x shl 8) or z
}
}
}

File diff suppressed because it is too large Load Diff

@ -1,16 +0,0 @@
package org.openrs2.archive.cache
import com.github.ajalt.clikt.core.CliktCommand
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
public class CrossPollinateCommand : CliktCommand(name = "cross-pollinate") {
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val crossPollinator = injector.getInstance(CrossPollinator::class.java)
crossPollinator.crossPollinate()
}
}
}

@ -1,223 +0,0 @@
package org.openrs2.archive.cache
import io.netty.buffer.ByteBuf
import io.netty.buffer.ByteBufAllocator
import io.netty.buffer.ByteBufInputStream
import io.netty.buffer.Unpooled
import jakarta.inject.Inject
import jakarta.inject.Singleton
import org.openrs2.buffer.crc32
import org.openrs2.buffer.use
import org.openrs2.cache.Js5Compression
import org.openrs2.cache.Js5CompressionType
import org.openrs2.db.Database
import java.sql.Connection
import java.util.zip.GZIPInputStream
@Singleton
public class CrossPollinator @Inject constructor(
private val database: Database,
private val alloc: ByteBufAllocator,
private val importer: CacheImporter
) {
public suspend fun crossPollinate() {
database.execute { connection ->
for ((index, archive) in OLD_TO_NEW_ENGINE) {
crossPollinate(connection, index, archive)
}
}
}
private fun crossPollinate(connection: Connection, index: Int, archive: Int) {
val scopeId: Int
connection.prepareStatement(
"""
SELECT id
FROM scopes
WHERE name = 'runescape'
""".trimIndent()
).use { stmt ->
stmt.executeQuery().use { rows ->
check(rows.next())
scopeId = rows.getInt(1)
}
}
val groups = mutableListOf<CacheImporter.Group>()
val files = mutableListOf<CacheImporter.File>()
try {
connection.prepareStatement(
"""
SELECT
new.group_id AS id,
old.version AS old_version,
old.crc32 AS old_crc32,
b.data AS old_data,
new.version AS new_version,
new.crc32 AS new_crc32,
c.data AS new_data
FROM (
SELECT DISTINCT vf.index_id, vf.file_id, vf.version, vf.crc32
FROM version_list_files vf
WHERE vf.blob_id IN (
SELECT v.blob_id
FROM version_lists v
JOIN resolved_archives a ON a.blob_id = v.blob_id AND a.archive_id = 5
) AND vf.index_id = ?
) old
JOIN (
SELECT DISTINCT ig.group_id, ig.version, ig.crc32
FROM index_groups ig
WHERE ig.container_id IN (
SELECT i.container_id
FROM resolved_indexes i
WHERE i.scope_id = ? AND i.archive_id = ?
)
) new ON old.file_id = new.group_id AND old.version = new.version + 1
LEFT JOIN resolve_file(old.index_id, old.file_id, old.version, old.crc32) b ON TRUE
LEFT JOIN resolve_group(?, ?::uint1, new.group_id, new.crc32, new.version) c ON TRUE
WHERE (b.data IS NULL AND c.data IS NOT NULL) OR (b.data IS NOT NULL AND c.data IS NULL)
""".trimIndent()
).use { stmt ->
stmt.setInt(1, index)
stmt.setInt(2, scopeId)
stmt.setInt(3, archive)
stmt.setInt(4, scopeId)
stmt.setInt(5, archive)
stmt.executeQuery().use { rows ->
while (rows.next()) {
val id = rows.getInt(1)
val oldVersion = rows.getInt(2)
val oldChecksum = rows.getInt(3)
val newVersion = rows.getInt(5)
val newChecksum = rows.getInt(6)
val oldData = rows.getBytes(4)
if (oldData != null) {
Unpooled.wrappedBuffer(oldData).use { oldBuf ->
fileToGroup(oldBuf, newChecksum).use { newBuf ->
if (newBuf != null) {
val uncompressed = Js5Compression.uncompressUnlessEncrypted(newBuf.slice())
groups += CacheImporter.Group(
archive,
id,
newBuf.retain(),
uncompressed,
newVersion,
false
)
}
}
}
}
val newData = rows.getBytes(7)
if (newData != null) {
Unpooled.wrappedBuffer(newData).use { newBuf ->
val oldBuf = groupToFile(newBuf, oldChecksum)
if (oldBuf != null) {
files += CacheImporter.File(index, id, oldBuf, oldVersion)
}
}
}
}
}
}
if (groups.isEmpty() && files.isEmpty()) {
return
}
importer.prepare(connection)
val sourceId = importer.addSource(
connection,
type = CacheImporter.SourceType.CROSS_POLLINATION,
cacheId = null,
gameId = null,
buildMajor = null,
buildMinor = null,
timestamp = null,
name = null,
description = null,
url = null,
)
if (groups.isNotEmpty()) {
importer.addGroups(connection, scopeId, sourceId, groups)
}
if (files.isNotEmpty()) {
importer.addFiles(connection, sourceId, files)
}
} finally {
groups.forEach(CacheImporter.Group::release)
files.forEach(CacheImporter.File::release)
}
}
private fun getUncompressedLength(buf: ByteBuf): Int {
GZIPInputStream(ByteBufInputStream(buf)).use { input ->
var len = 0
val temp = ByteArray(4096)
while (true) {
val n = input.read(temp)
if (n == -1) {
break
}
len += n
}
return len
}
}
private fun fileToGroup(input: ByteBuf, expectedChecksum: Int): ByteBuf? {
val len = input.readableBytes()
val lenWithHeader = len + JS5_COMPRESSION_HEADER_LEN
val uncompressedLen = getUncompressedLength(input.slice())
alloc.buffer(lenWithHeader, lenWithHeader).use { output ->
output.writeByte(Js5CompressionType.GZIP.ordinal)
output.writeInt(len)
output.writeInt(uncompressedLen)
output.writeBytes(input)
return if (output.crc32() == expectedChecksum) {
output.retain()
} else {
null
}
}
}
private fun groupToFile(input: ByteBuf, expectedChecksum: Int): ByteBuf? {
val type = Js5CompressionType.fromOrdinal(input.readUnsignedByte().toInt())
if (type != Js5CompressionType.GZIP) {
return null
}
input.skipBytes(JS5_COMPRESSION_HEADER_LEN - 1)
return if (input.crc32() == expectedChecksum) {
input.retainedSlice()
} else {
null
}
}
private companion object {
private val OLD_TO_NEW_ENGINE = mapOf(
1 to 7, // MODELS
3 to 6, // MIDI_SONGS
4 to 5, // MAPS
)
private const val JS5_COMPRESSION_HEADER_LEN = 9
}
}

@ -1,25 +0,0 @@
package org.openrs2.archive.cache
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.arguments.default
import com.github.ajalt.clikt.parameters.options.default
import com.github.ajalt.clikt.parameters.options.option
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
public class DownloadCommand : CliktCommand(name = "download") {
private val environment by option().default("live")
private val language by option().default("en")
private val game by argument().default("oldschool")
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val downloader = injector.getInstance(CacheDownloader::class.java)
downloader.download(game, environment, language)
}
}
}

@ -1,34 +0,0 @@
package org.openrs2.archive.cache
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.options.default
import com.github.ajalt.clikt.parameters.options.option
import com.github.ajalt.clikt.parameters.types.int
import com.github.ajalt.clikt.parameters.types.path
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.cache.DiskStore
import org.openrs2.inject.CloseableInjector
public class ExportCommand : CliktCommand(name = "export") {
private val scope by option().default("runescape")
private val id by argument().int()
private val output by argument().path(
mustExist = true,
canBeFile = false,
mustBeReadable = true,
mustBeWritable = true
)
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val exporter = injector.getInstance(CacheExporter::class.java)
exporter.export(scope, id) { legacy ->
DiskStore.create(output, legacy = legacy)
}
}
}
}

@ -1,53 +0,0 @@
package org.openrs2.archive.cache
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.options.default
import com.github.ajalt.clikt.parameters.options.option
import com.github.ajalt.clikt.parameters.types.int
import com.github.ajalt.clikt.parameters.types.path
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.cache.Store
import org.openrs2.cli.instant
import org.openrs2.inject.CloseableInjector
public class ImportCommand : CliktCommand(name = "import") {
private val buildMajor by option().int()
private val buildMinor by option().int()
private val timestamp by option().instant()
private val name by option()
private val description by option()
private val url by option()
private val environment by option().default("live")
private val language by option().default("en")
private val game by argument()
private val input by argument().path(
mustExist = true,
canBeFile = false,
mustBeReadable = true
)
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val importer = injector.getInstance(CacheImporter::class.java)
Store.open(input).use { store ->
importer.import(
store,
game,
environment,
language,
buildMajor,
buildMinor,
timestamp,
name,
description,
url
)
}
}
}
}

@ -1,116 +0,0 @@
package org.openrs2.archive.cache
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.options.default
import com.github.ajalt.clikt.parameters.options.flag
import com.github.ajalt.clikt.parameters.options.option
import com.github.ajalt.clikt.parameters.types.enum
import com.github.ajalt.clikt.parameters.types.int
import com.github.ajalt.clikt.parameters.types.path
import com.google.inject.Guice
import io.netty.buffer.ByteBuf
import io.netty.buffer.Unpooled
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.buffer.use
import org.openrs2.cache.Js5CompressionType
import org.openrs2.cache.MasterIndexFormat
import org.openrs2.cli.instant
import org.openrs2.inject.CloseableInjector
import java.io.IOException
import java.nio.file.Files
import kotlin.math.min
public class ImportMasterIndexCommand : CliktCommand(name = "import-master-index") {
private val buildMajor by option().int()
private val buildMinor by option().int()
private val timestamp by option().instant()
private val name by option()
private val description by option()
private val url by option()
private val environment by option().default("live")
private val language by option().default("en")
private val decodeJs5Response by option().flag()
private val game by argument()
private val format by argument().enum<MasterIndexFormat>()
private val input by argument().path(
mustExist = true,
canBeDir = false,
mustBeReadable = true
)
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val importer = injector.getInstance(CacheImporter::class.java)
Unpooled.wrappedBuffer(Files.readAllBytes(input)).use { buf ->
if (decodeJs5Response) {
decodeJs5Response(buf)
} else {
buf.retain()
}.use { decodedBuf ->
importer.importMasterIndex(
decodedBuf,
format,
game,
environment,
language,
buildMajor,
buildMinor,
timestamp,
name,
description,
url
)
}
}
}
}
private fun decodeJs5Response(input: ByteBuf): ByteBuf {
input.skipBytes(3) // archive and group
val compression = input.readUnsignedByte().toInt()
val len = input.readInt()
if (len < 0) {
throw IOException("Length is negative: $len")
}
val lenWithHeader = if (compression == Js5CompressionType.UNCOMPRESSED.ordinal) {
len + 5
} else {
len + 9
}
input.alloc().buffer(lenWithHeader, lenWithHeader).use { output ->
output.writeByte(compression)
output.writeInt(len)
var blockLen = 504
while (true) {
val n = min(blockLen, output.writableBytes())
if (input.readableBytes() < n) {
throw IOException("Input truncated (expecting $n bytes, got ${input.readableBytes()})")
}
output.writeBytes(input, n)
if (!output.isWritable) {
break
} else if (!input.isReadable) {
throw IOException("Input truncated (expecting block trailer)")
}
if (input.readUnsignedByte().toInt() != 0xFF) {
throw IOException("Invalid block trailer")
}
blockLen = 511
}
return output.retain()
}
}
}

@ -1,376 +0,0 @@
package org.openrs2.archive.cache
import com.github.michaelbull.logging.InlineLogger
import io.netty.bootstrap.Bootstrap
import io.netty.buffer.ByteBuf
import io.netty.channel.ChannelException
import io.netty.channel.ChannelHandler
import io.netty.channel.ChannelHandlerContext
import io.netty.channel.ChannelPipeline
import io.netty.channel.SimpleChannelInboundHandler
import kotlinx.coroutines.runBlocking
import org.openrs2.buffer.crc32
import org.openrs2.buffer.use
import org.openrs2.cache.Js5Archive
import org.openrs2.cache.Js5Compression
import org.openrs2.cache.Js5Index
import org.openrs2.cache.Js5MasterIndex
import org.openrs2.cache.MasterIndexFormat
import java.io.IOException
import java.nio.channels.ClosedChannelException
import java.time.Instant
import kotlin.coroutines.Continuation
import kotlin.coroutines.resume
import kotlin.coroutines.resumeWithException
@ChannelHandler.Sharable
public abstract class Js5ChannelHandler(
private val bootstrap: Bootstrap,
private val scopeId: Int,
private val gameId: Int,
private val hostname: String,
private val port: Int,
protected var buildMajor: Int,
protected var buildMinor: Int?,
private val lastMasterIndexId: Int?,
private val continuation: Continuation<Unit>,
private val importer: CacheImporter,
private val masterIndexFormat: MasterIndexFormat,
private val maxInFlightRequests: Int,
private val maxBuildAttempts: Int = 10,
private val maxReconnectionAttempts: Int = 1
) : SimpleChannelInboundHandler<Any>(Object::class.java) {
protected data class InFlightRequest(val prefetch: Boolean, val archive: Int, val group: Int)
protected data class PendingRequest(
val prefetch: Boolean,
val archive: Int,
val group: Int,
val version: Int,
val checksum: Int
)
private enum class State {
CONNECTING,
CLIENT_OUT_OF_DATE,
CONNECTED,
RESUMING_CONTINUATION
}
private var state = State.CONNECTING
private var buildAttempts = 0
private var reconnectionAttempts = 0
private val inFlightRequests = mutableSetOf<InFlightRequest>()
private val pendingRequests = ArrayDeque<PendingRequest>()
private var masterIndexId: Int = 0
private var sourceId: Int = 0
private var masterIndex: Js5MasterIndex? = null
private lateinit var indexes: Array<Js5Index?>
private val groups = mutableListOf<CacheImporter.Group>()
protected abstract fun createInitMessage(): Any
protected abstract fun createRequestMessage(prefetch: Boolean, archive: Int, group: Int): Any
protected abstract fun createConnectedMessage(): Any?
protected abstract fun configurePipeline(pipeline: ChannelPipeline)
protected abstract fun incrementVersion()
override fun channelActive(ctx: ChannelHandlerContext) {
assert(state == State.CONNECTING)
ctx.writeAndFlush(createInitMessage(), ctx.voidPromise())
ctx.read()
}
override fun channelReadComplete(ctx: ChannelHandlerContext) {
/*
* Wait for us to receive the OK message before we send JS5 requests,
* as the RS3 JS5 server ignores any JS5 requests sent before the OK
* message is received.
*/
if (state != State.CONNECTED) {
return
}
var flush = false
while (inFlightRequests.size < maxInFlightRequests) {
val request = pendingRequests.removeFirstOrNull() ?: break
inFlightRequests += InFlightRequest(request.prefetch, request.archive, request.group)
logger.info { "Requesting archive ${request.archive} group ${request.group}" }
ctx.write(createRequestMessage(request.prefetch, request.archive, request.group), ctx.voidPromise())
flush = true
}
if (flush) {
ctx.flush()
}
if (inFlightRequests.isNotEmpty()) {
ctx.read()
}
}
override fun channelInactive(ctx: ChannelHandlerContext) {
if (state == State.CLIENT_OUT_OF_DATE) {
state = State.CONNECTING
bootstrap.connect(hostname, port)
} else if (state != State.RESUMING_CONTINUATION) {
if (isComplete()) {
throw Exception("Connection closed unexpectedly")
} else if (++reconnectionAttempts > maxReconnectionAttempts) {
throw Exception("Connection closed unexpectedly after maximum number of reconnection attempts")
}
// move in-flight requests back to the pending queue
for (request in inFlightRequests) {
val prefetch = request.prefetch
val archive = request.archive
val group = request.group
pendingRequests += if (archive == Js5Archive.ARCHIVESET && group == Js5Archive.ARCHIVESET) {
PendingRequest(prefetch, archive, group, 0, 0)
} else if (archive == Js5Archive.ARCHIVESET) {
val entry = masterIndex!!.entries[group]
val version = entry.version
val checksum = entry.checksum
PendingRequest(prefetch, archive, group, version, checksum)
} else {
val entry = indexes[archive]!![group]!!
val version = entry.version
val checksum = entry.checksum
PendingRequest(prefetch, archive, group, version, checksum)
}
}
inFlightRequests.clear()
// re-connect
state = State.CONNECTING
bootstrap.connect(hostname, port)
}
}
@Suppress("OVERRIDE_DEPRECATION")
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
releaseGroups()
if (state == State.RESUMING_CONTINUATION) {
logger.warn(cause) { "Swallowing exception as continuation has already resumed" }
} else if (cause !is ChannelException && cause !is IOException) {
/*
* We skip continuation resumption if there's an I/O error or
* timeout - this allows channelInactive() to attempt to reconnect
* if we haven't used too many reconnection attempts.
*/
state = State.RESUMING_CONTINUATION
continuation.resumeWithException(cause)
}
if (cause !is ClosedChannelException) {
ctx.close()
}
}
protected fun handleOk(ctx: ChannelHandlerContext) {
assert(state == State.CONNECTING)
configurePipeline(ctx.pipeline())
val msg = createConnectedMessage()
if (msg != null) {
ctx.write(msg, ctx.voidPromise())
}
state = State.CONNECTED
if (masterIndex == null && pendingRequests.isEmpty()) {
request(ctx, Js5Archive.ARCHIVESET, Js5Archive.ARCHIVESET, 0, 0)
}
}
protected fun handleClientOutOfDate(ctx: ChannelHandlerContext) {
assert(state == State.CONNECTING)
if (++buildAttempts > maxBuildAttempts) {
throw Exception("Failed to identify current version")
}
state = State.CLIENT_OUT_OF_DATE
incrementVersion()
ctx.close()
}
protected fun handleResponse(
ctx: ChannelHandlerContext,
prefetch: Boolean,
archive: Int,
group: Int,
data: ByteBuf
) {
val request = InFlightRequest(prefetch, archive, group)
val removed = inFlightRequests.remove(request)
if (!removed) {
val type = if (prefetch) {
"prefetch"
} else {
"urgent"
}
throw Exception("Received response for $type request (archive $archive group $group) not in-flight")
}
processResponse(ctx, archive, group, data)
}
protected fun processResponse(ctx: ChannelHandlerContext, archive: Int, group: Int, data: ByteBuf) {
if (archive == Js5Archive.ARCHIVESET && group == Js5Archive.ARCHIVESET) {
processMasterIndex(ctx, data)
} else if (archive == Js5Archive.ARCHIVESET) {
processIndex(ctx, group, data)
} else {
processGroup(archive, group, data)
}
val complete = isComplete()
if (groups.size >= CacheImporter.BATCH_SIZE || complete) {
runBlocking {
importer.importGroups(scopeId, sourceId, groups)
}
releaseGroups()
}
if (complete) {
runBlocking {
importer.setLastMasterIndexId(gameId, masterIndexId)
}
state = State.RESUMING_CONTINUATION
continuation.resume(Unit)
ctx.close()
} else {
/*
* Reset the number of reconnection attempts as we are making
* progress.
*/
reconnectionAttempts = 0
}
}
protected open fun isComplete(): Boolean {
return pendingRequests.isEmpty() && inFlightRequests.isEmpty()
}
private fun processMasterIndex(ctx: ChannelHandlerContext, buf: ByteBuf) {
Js5Compression.uncompress(buf.slice()).use { uncompressed ->
masterIndex = Js5MasterIndex.readUnverified(uncompressed.slice(), masterIndexFormat)
val (masterIndexId, sourceId, rawIndexes) = runBlocking {
importer.importMasterIndexAndGetIndexes(
masterIndex!!,
buf,
uncompressed,
gameId,
scopeId,
buildMajor,
buildMinor,
lastMasterIndexId,
timestamp = Instant.now()
)
}
this.masterIndexId = masterIndexId
this.sourceId = sourceId
try {
indexes = arrayOfNulls(rawIndexes.size)
for ((archive, index) in rawIndexes.withIndex()) {
val entry = masterIndex!!.entries[archive]
if (entry.version == 0 && entry.checksum == 0) {
continue
}
if (index != null) {
processIndex(ctx, archive, index)
} else {
request(ctx, Js5Archive.ARCHIVESET, archive, entry.version, entry.checksum)
}
}
} finally {
rawIndexes.filterNotNull().forEach(ByteBuf::release)
}
}
}
private fun processIndex(ctx: ChannelHandlerContext, archive: Int, buf: ByteBuf) {
val checksum = buf.crc32()
val entry = masterIndex!!.entries[archive]
if (checksum != entry.checksum) {
throw Exception("Index $archive checksum invalid (expected ${entry.checksum}, actual $checksum)")
}
Js5Compression.uncompress(buf.slice()).use { uncompressed ->
val index = Js5Index.read(uncompressed.slice())
indexes[archive] = index
if (index.version != entry.version) {
throw Exception("Index $archive version invalid (expected ${entry.version}, actual ${index.version})")
}
val groups = runBlocking {
importer.importIndexAndGetMissingGroups(
scopeId,
sourceId,
archive,
index,
buf,
uncompressed,
lastMasterIndexId
)
}
for (group in groups) {
val groupEntry = index[group]!!
request(ctx, archive, group, groupEntry.version, groupEntry.checksum)
}
}
}
private fun processGroup(archive: Int, group: Int, buf: ByteBuf) {
val checksum = buf.crc32()
val entry = indexes[archive]!![group]!!
if (checksum != entry.checksum) {
val expected = entry.checksum
throw Exception("Archive $archive group $group checksum invalid (expected $expected, actual $checksum)")
}
val uncompressed = Js5Compression.uncompressUnlessEncrypted(buf.slice())
groups += CacheImporter.Group(
archive,
group,
buf.retain(),
uncompressed,
entry.version,
versionTruncated = false
)
}
protected open fun request(ctx: ChannelHandlerContext, archive: Int, group: Int, version: Int, checksum: Int) {
pendingRequests += PendingRequest(false, archive, group, version, checksum)
}
private fun releaseGroups() {
groups.forEach(CacheImporter.Group::release)
groups.clear()
}
private companion object {
private val logger = InlineLogger()
}
}

@ -1,158 +0,0 @@
package org.openrs2.archive.cache
import com.github.michaelbull.logging.InlineLogger
import io.netty.bootstrap.Bootstrap
import io.netty.channel.ChannelHandlerContext
import io.netty.channel.ChannelPipeline
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.asCoroutineDispatcher
import kotlinx.coroutines.cancel
import kotlinx.coroutines.launch
import org.openrs2.archive.cache.nxt.InitJs5RemoteConnection
import org.openrs2.archive.cache.nxt.Js5Request
import org.openrs2.archive.cache.nxt.Js5RequestEncoder
import org.openrs2.archive.cache.nxt.Js5Response
import org.openrs2.archive.cache.nxt.Js5ResponseDecoder
import org.openrs2.archive.cache.nxt.LoginResponse
import org.openrs2.archive.cache.nxt.MusicStreamClient
import org.openrs2.buffer.use
import org.openrs2.cache.MasterIndexFormat
import org.openrs2.protocol.Rs2Decoder
import org.openrs2.protocol.Rs2Encoder
import org.openrs2.protocol.js5.downstream.XorDecoder
import kotlin.coroutines.Continuation
public class NxtJs5ChannelHandler(
bootstrap: Bootstrap,
scopeId: Int,
gameId: Int,
hostname: String,
port: Int,
buildMajor: Int,
buildMinor: Int,
lastMasterIndexId: Int?,
continuation: Continuation<Unit>,
importer: CacheImporter,
private val token: String,
private val languageId: Int,
private val musicStreamClient: MusicStreamClient,
private val maxMinorBuildAttempts: Int = 5
) : Js5ChannelHandler(
bootstrap,
scopeId,
gameId,
hostname,
port,
buildMajor,
buildMinor,
lastMasterIndexId,
continuation,
importer,
MasterIndexFormat.LENGTHS,
maxInFlightRequests = 500
) {
private data class MusicRequest(val archive: Int, val group: Int, val version: Int, val checksum: Int)
private var inFlightRequests = 0
private val pendingRequests = ArrayDeque<MusicRequest>()
private var scope: CoroutineScope? = null
private var minorBuildAttempts = 0
override fun createInitMessage(): Any {
return InitJs5RemoteConnection(buildMajor, buildMinor!!, token, languageId)
}
override fun createRequestMessage(prefetch: Boolean, archive: Int, group: Int): Any {
return Js5Request.Group(prefetch, archive, group, buildMajor)
}
override fun createConnectedMessage(): Any? {
return Js5Request.Connected(buildMajor)
}
override fun configurePipeline(pipeline: ChannelPipeline) {
pipeline.addBefore("handler", null, Js5RequestEncoder)
pipeline.addBefore("handler", null, XorDecoder())
pipeline.addBefore("handler", null, Js5ResponseDecoder())
pipeline.remove(Rs2Encoder::class.java)
pipeline.remove(Rs2Decoder::class.java)
}
override fun incrementVersion() {
buildMinor = buildMinor!! + 1
if (++minorBuildAttempts >= maxMinorBuildAttempts) {
buildMajor++
buildMinor = 1
}
}
override fun channelActive(ctx: ChannelHandlerContext) {
super.channelActive(ctx)
scope = CoroutineScope(ctx.channel().eventLoop().asCoroutineDispatcher())
}
override fun channelInactive(ctx: ChannelHandlerContext) {
super.channelInactive(ctx)
scope!!.cancel()
}
override fun channelRead0(ctx: ChannelHandlerContext, msg: Any) {
when (msg) {
is LoginResponse.Js5Ok -> handleOk(ctx)
is LoginResponse.ClientOutOfDate -> handleClientOutOfDate(ctx)
is LoginResponse -> throw Exception("Invalid response: $msg")
is Js5Response -> handleResponse(ctx, msg.prefetch, msg.archive, msg.group, msg.data)
else -> throw Exception("Unknown message type: ${msg.javaClass.name}")
}
}
override fun channelReadComplete(ctx: ChannelHandlerContext) {
super.channelReadComplete(ctx)
while (inFlightRequests < 6) {
val request = pendingRequests.removeFirstOrNull() ?: break
inFlightRequests++
logger.info { "Requesting archive ${request.archive} group ${request.group}" }
scope!!.launch {
val archive = request.archive
val group = request.group
val version = request.version
val checksum = request.checksum
musicStreamClient.request(archive, group, version, checksum, buildMajor).use { buf ->
inFlightRequests--
processResponse(ctx, archive, group, buf)
/*
* Inject a fake channelReadComplete event to ensure we
* don't time out and to send any new music requests.
*/
ctx.channel().pipeline().fireChannelReadComplete()
}
}
}
}
override fun isComplete(): Boolean {
return super.isComplete() && pendingRequests.isEmpty() && inFlightRequests == 0
}
override fun request(ctx: ChannelHandlerContext, archive: Int, group: Int, version: Int, checksum: Int) {
if (archive == MUSIC_ARCHIVE) {
pendingRequests += MusicRequest(archive, group, version, checksum)
} else {
super.request(ctx, archive, group, version, checksum)
}
}
private companion object {
private val logger = InlineLogger()
private const val MUSIC_ARCHIVE = 40
}
}

@ -1,22 +0,0 @@
package org.openrs2.archive.cache
import io.netty.channel.Channel
import io.netty.channel.ChannelInitializer
import io.netty.handler.timeout.ReadTimeoutHandler
import org.openrs2.archive.cache.nxt.ClientOutOfDateCodec
import org.openrs2.archive.cache.nxt.InitJs5RemoteConnectionCodec
import org.openrs2.archive.cache.nxt.Js5OkCodec
import org.openrs2.protocol.Protocol
import org.openrs2.protocol.Rs2Decoder
import org.openrs2.protocol.Rs2Encoder
public class NxtJs5ChannelInitializer(private val handler: NxtJs5ChannelHandler) : ChannelInitializer<Channel>() {
override fun initChannel(ch: Channel) {
ch.pipeline().addLast(
ReadTimeoutHandler(30),
Rs2Encoder(Protocol(InitJs5RemoteConnectionCodec)),
Rs2Decoder(Protocol(Js5OkCodec, ClientOutOfDateCodec))
)
ch.pipeline().addLast("handler", handler)
}
}

@ -1,76 +0,0 @@
package org.openrs2.archive.cache
import io.netty.bootstrap.Bootstrap
import io.netty.channel.ChannelHandlerContext
import io.netty.channel.ChannelPipeline
import org.openrs2.cache.MasterIndexFormat
import org.openrs2.protocol.Rs2Decoder
import org.openrs2.protocol.Rs2Encoder
import org.openrs2.protocol.js5.downstream.Js5LoginResponse
import org.openrs2.protocol.js5.downstream.Js5Response
import org.openrs2.protocol.js5.downstream.Js5ResponseDecoder
import org.openrs2.protocol.js5.downstream.XorDecoder
import org.openrs2.protocol.js5.upstream.Js5Request
import org.openrs2.protocol.js5.upstream.Js5RequestEncoder
import org.openrs2.protocol.login.upstream.LoginRequest
import kotlin.coroutines.Continuation
public class OsrsJs5ChannelHandler(
bootstrap: Bootstrap,
scopeId: Int,
gameId: Int,
hostname: String,
port: Int,
build: Int,
lastMasterIndexId: Int?,
continuation: Continuation<Unit>,
importer: CacheImporter
) : Js5ChannelHandler(
bootstrap,
scopeId,
gameId,
hostname,
port,
build,
null,
lastMasterIndexId,
continuation,
importer,
MasterIndexFormat.VERSIONED,
maxInFlightRequests = 200
) {
override fun createInitMessage(): Any {
return LoginRequest.InitJs5RemoteConnection(buildMajor)
}
override fun createRequestMessage(prefetch: Boolean, archive: Int, group: Int): Any {
return Js5Request.Group(prefetch, archive, group)
}
override fun createConnectedMessage(): Any? {
return null
}
override fun configurePipeline(pipeline: ChannelPipeline) {
pipeline.addBefore("handler", null, Js5RequestEncoder)
pipeline.addBefore("handler", null, XorDecoder())
pipeline.addBefore("handler", null, Js5ResponseDecoder())
pipeline.remove(Rs2Encoder::class.java)
pipeline.remove(Rs2Decoder::class.java)
}
override fun incrementVersion() {
buildMajor++
}
override fun channelRead0(ctx: ChannelHandlerContext, msg: Any) {
when (msg) {
is Js5LoginResponse.Ok -> handleOk(ctx)
is Js5LoginResponse.ClientOutOfDate -> handleClientOutOfDate(ctx)
is Js5LoginResponse -> throw Exception("Invalid response: $msg")
is Js5Response -> handleResponse(ctx, msg.prefetch, msg.archive, msg.group, msg.data)
else -> throw Exception("Unknown message type: ${msg.javaClass.name}")
}
}
}

@ -1,22 +0,0 @@
package org.openrs2.archive.cache
import io.netty.channel.Channel
import io.netty.channel.ChannelInitializer
import io.netty.handler.timeout.ReadTimeoutHandler
import org.openrs2.protocol.Protocol
import org.openrs2.protocol.Rs2Decoder
import org.openrs2.protocol.Rs2Encoder
import org.openrs2.protocol.js5.downstream.Js5ClientOutOfDateCodec
import org.openrs2.protocol.js5.downstream.Js5OkCodec
import org.openrs2.protocol.login.upstream.InitJs5RemoteConnectionCodec
public class OsrsJs5ChannelInitializer(private val handler: OsrsJs5ChannelHandler) : ChannelInitializer<Channel>() {
override fun initChannel(ch: Channel) {
ch.pipeline().addLast(
ReadTimeoutHandler(30),
Rs2Encoder(Protocol(InitJs5RemoteConnectionCodec())),
Rs2Decoder(Protocol(Js5OkCodec(), Js5ClientOutOfDateCodec()))
)
ch.pipeline().addLast("handler", handler)
}
}

@ -1,16 +0,0 @@
package org.openrs2.archive.cache
import com.github.ajalt.clikt.core.CliktCommand
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
public class RefreshViewsCommand : CliktCommand(name = "refresh-views") {
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val importer = injector.getInstance(CacheImporter::class.java)
importer.refreshViews()
}
}
}

@ -1,149 +0,0 @@
package org.openrs2.archive.cache.finder
import com.github.michaelbull.logging.InlineLogger
import com.google.common.io.ByteStreams
import com.google.common.io.LittleEndianDataInputStream
import org.openrs2.util.charset.Cp1252Charset
import java.io.Closeable
import java.io.EOFException
import java.io.IOException
import java.io.InputStream
import java.io.PushbackInputStream
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.attribute.BasicFileAttributeView
import java.nio.file.attribute.FileTime
import java.time.Instant
public class CacheFinderExtractor(
input: InputStream
) : Closeable {
private val pushbackInput = PushbackInputStream(input)
private val input = LittleEndianDataInputStream(pushbackInput)
private fun readTimestamp(): FileTime {
val lo = input.readInt().toLong() and 0xFFFFFFFF
val hi = input.readInt().toLong() and 0xFFFFFFFF
val seconds = (((hi shl 32) or lo) / 10_000_000) - FILETIME_TO_UNIX_EPOCH
return FileTime.from(Instant.ofEpochSecond(seconds, lo))
}
private fun readName(): String {
val bytes = ByteArray(MAX_PATH)
input.readFully(bytes)
var len = bytes.size
for ((i, b) in bytes.withIndex()) {
if (b.toInt() == 0) {
len = i
break
}
}
return String(bytes, 0, len, Cp1252Charset)
}
private fun peekUnsignedByte(): Int {
val n = pushbackInput.read()
pushbackInput.unread(n)
return n
}
public fun extract(destination: Path) {
val newVersion = peekUnsignedByte() == 0xFE
if (newVersion) {
val signature = input.readInt()
if (signature != 0x435352FE) {
throw IOException("Invalid signature")
}
}
var readDirectoryPath = true
var number = 0
var directorySuffix: String? = null
while (true) {
if (newVersion && readDirectoryPath) {
val len = try {
input.readInt()
} catch (ex: EOFException) {
break
}
val bytes = ByteArray(len)
input.readFully(bytes)
val path = String(bytes, Cp1252Charset)
logger.info { "Extracting $path" }
readDirectoryPath = false
directorySuffix = path.substring(path.lastIndexOf('\\') + 1)
.replace(INVALID_CHARS, "_")
continue
}
if (peekUnsignedByte() == 0xFF) {
input.skipBytes(1)
readDirectoryPath = true
number++
continue
}
val attributes = try {
input.readInt()
} catch (ex: EOFException) {
break
}
val btime = readTimestamp()
val atime = readTimestamp()
val mtime = readTimestamp()
val sizeHi = input.readInt().toLong() and 0xFFFFFFFF
val sizeLo = input.readInt().toLong() and 0xFFFFFFFF
val size = (sizeHi shl 32) or sizeLo
input.skipBytes(8) // reserved
val name = readName()
input.skipBytes(14) // alternate name
input.skipBytes(2) // padding
val dir = if (directorySuffix != null) {
destination.resolve("cache${number}_$directorySuffix")
} else {
destination.resolve("cache$number")
}
Files.createDirectories(dir)
if ((attributes and FILE_ATTRIBUTE_DIRECTORY) == 0) {
val file = dir.resolve(name)
Files.newOutputStream(file).use { output ->
ByteStreams.copy(ByteStreams.limit(input, size), output)
}
val view = Files.getFileAttributeView(file, BasicFileAttributeView::class.java)
view.setTimes(mtime, atime, btime)
}
}
}
override fun close() {
input.close()
}
private companion object {
private const val FILETIME_TO_UNIX_EPOCH: Long = 11644473600
private const val MAX_PATH = 260
private const val FILE_ATTRIBUTE_DIRECTORY = 0x10
private val INVALID_CHARS = Regex("[^A-Za-z0-9-]")
private val logger = InlineLogger()
}
}

@ -1,25 +0,0 @@
package org.openrs2.archive.cache.finder
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.arguments.default
import com.github.ajalt.clikt.parameters.types.inputStream
import com.github.ajalt.clikt.parameters.types.path
import java.nio.file.Path
public class ExtractCommand : CliktCommand(name = "extract") {
private val input by argument().inputStream()
private val output by argument().path(
mustExist = false,
canBeFile = false,
canBeDir = true,
mustBeReadable = true,
mustBeWritable = true
).default(Path.of("."))
override fun run() {
CacheFinderExtractor(input).use { extractor ->
extractor.extract(output)
}
}
}

@ -1,8 +0,0 @@
package org.openrs2.archive.cache.nxt
import org.openrs2.protocol.EmptyPacketCodec
public object ClientOutOfDateCodec : EmptyPacketCodec<LoginResponse.ClientOutOfDate>(
opcode = 6,
packet = LoginResponse.ClientOutOfDate
)

@ -1,10 +0,0 @@
package org.openrs2.archive.cache.nxt
import org.openrs2.protocol.Packet
public data class InitJs5RemoteConnection(
public val buildMajor: Int,
public val buildMinor: Int,
public val token: String,
public val language: Int
) : Packet

@ -1,27 +0,0 @@
package org.openrs2.archive.cache.nxt
import io.netty.buffer.ByteBuf
import org.openrs2.buffer.readString
import org.openrs2.buffer.writeString
import org.openrs2.crypto.StreamCipher
import org.openrs2.protocol.VariableBytePacketCodec
public object InitJs5RemoteConnectionCodec : VariableBytePacketCodec<InitJs5RemoteConnection>(
type = InitJs5RemoteConnection::class.java,
opcode = 15
) {
override fun decode(input: ByteBuf, cipher: StreamCipher): InitJs5RemoteConnection {
val buildMajor = input.readInt()
val buildMinor = input.readInt()
val token = input.readString()
val language = input.readUnsignedByte().toInt()
return InitJs5RemoteConnection(buildMajor, buildMinor, token, language)
}
override fun encode(input: InitJs5RemoteConnection, output: ByteBuf, cipher: StreamCipher) {
output.writeInt(input.buildMajor)
output.writeInt(input.buildMinor)
output.writeString(input.token)
output.writeByte(input.language)
}
}

@ -1,8 +0,0 @@
package org.openrs2.archive.cache.nxt
import org.openrs2.protocol.EmptyPacketCodec
public object Js5OkCodec : EmptyPacketCodec<LoginResponse.Js5Ok>(
opcode = 0,
packet = LoginResponse.Js5Ok
)

@ -1,14 +0,0 @@
package org.openrs2.archive.cache.nxt
public sealed class Js5Request {
public data class Group(
public val prefetch: Boolean,
public val archive: Int,
public val group: Int,
public val build: Int
) : Js5Request()
public data class Connected(
public val build: Int
) : Js5Request()
}

@ -1,37 +0,0 @@
package org.openrs2.archive.cache.nxt
import io.netty.buffer.ByteBuf
import io.netty.channel.ChannelHandler
import io.netty.channel.ChannelHandlerContext
import io.netty.handler.codec.MessageToByteEncoder
@ChannelHandler.Sharable
public object Js5RequestEncoder : MessageToByteEncoder<Js5Request>(Js5Request::class.java) {
override fun encode(ctx: ChannelHandlerContext, msg: Js5Request, out: ByteBuf) {
when (msg) {
is Js5Request.Group -> {
out.writeByte(if (msg.prefetch) 32 else 33)
out.writeByte(msg.archive)
out.writeInt(msg.group)
out.writeShort(msg.build)
out.writeShort(0)
}
is Js5Request.Connected -> {
out.writeByte(6)
out.writeMedium(5)
out.writeShort(0)
out.writeShort(msg.build)
out.writeShort(0)
}
}
}
override fun allocateBuffer(ctx: ChannelHandlerContext, msg: Js5Request, preferDirect: Boolean): ByteBuf {
return if (preferDirect) {
ctx.alloc().ioBuffer(10, 10)
} else {
ctx.alloc().heapBuffer(10, 10)
}
}
}

@ -1,11 +0,0 @@
package org.openrs2.archive.cache.nxt
import io.netty.buffer.ByteBuf
import io.netty.buffer.DefaultByteBufHolder
public data class Js5Response(
public val prefetch: Boolean,
public val archive: Int,
public val group: Int,
public val data: ByteBuf
) : DefaultByteBufHolder(data)

@ -1,121 +0,0 @@
package org.openrs2.archive.cache.nxt
import io.netty.buffer.ByteBuf
import io.netty.channel.ChannelHandlerContext
import io.netty.handler.codec.ByteToMessageDecoder
import io.netty.handler.codec.DecoderException
import kotlin.math.min
public class Js5ResponseDecoder : ByteToMessageDecoder() {
private data class Request(val prefetch: Boolean, val archive: Int, val group: Int)
private enum class State {
READ_HEADER,
READ_LEN,
READ_DATA
}
private var state = State.READ_HEADER
private val buffers = mutableMapOf<Request, ByteBuf>()
private var request: Request? = null
override fun decode(ctx: ChannelHandlerContext, input: ByteBuf, out: MutableList<Any>) {
if (state == State.READ_HEADER) {
if (input.readableBytes() < 5) {
return
}
val prefetch: Boolean
val archive = input.readUnsignedByte().toInt()
var group = input.readInt()
if (group and 0x80000000.toInt() != 0) {
prefetch = true
group = group and 0x7FFFFFFF
} else {
prefetch = false
}
request = Request(prefetch, archive, group)
state = if (buffers.containsKey(request)) {
State.READ_DATA
} else {
State.READ_LEN
}
}
if (state == State.READ_LEN) {
if (input.readableBytes() < 5) {
return
}
val type = input.readUnsignedByte().toInt()
val len = input.readInt()
if (len < 0) {
throw DecoderException("Length is negative: $len")
}
val totalLen = if (type == 0) {
len + 5
} else {
len + 9
}
if (totalLen < 0) {
throw DecoderException("Total length exceeds maximum ByteBuf size")
}
val data = ctx.alloc().buffer(totalLen, totalLen)
data.writeByte(type)
data.writeInt(len)
buffers[request!!] = data
state = State.READ_DATA
}
if (state == State.READ_DATA) {
val data = buffers[request!!]!!
var blockLen = if (data.writerIndex() == 5) {
102400 - 10
} else {
102400 - 5
}
blockLen = min(blockLen, data.writableBytes())
if (input.readableBytes() < blockLen) {
return
}
data.writeBytes(input, blockLen)
if (!data.isWritable) {
out += Js5Response(request!!.prefetch, request!!.archive, request!!.group, data)
buffers.remove(request!!)
request = null
}
state = State.READ_HEADER
}
}
override fun channelInactive(ctx: ChannelHandlerContext) {
super.channelInactive(ctx)
reset()
}
override fun handlerRemoved0(ctx: ChannelHandlerContext?) {
reset()
}
private fun reset() {
buffers.values.forEach(ByteBuf::release)
buffers.clear()
state = State.READ_HEADER
}
}

@ -1,8 +0,0 @@
package org.openrs2.archive.cache.nxt
import org.openrs2.protocol.Packet
public sealed class LoginResponse : Packet {
public object Js5Ok : LoginResponse()
public object ClientOutOfDate : LoginResponse()
}

@ -1,32 +0,0 @@
package org.openrs2.archive.cache.nxt
import io.netty.buffer.ByteBuf
import kotlinx.coroutines.future.await
import org.openrs2.buffer.ByteBufBodyHandler
import org.openrs2.buffer.use
import org.openrs2.http.checkStatusCode
import java.net.URI
import java.net.http.HttpClient
import java.net.http.HttpRequest
import java.time.Duration
public class MusicStreamClient(
private val client: HttpClient,
private val byteBufBodyHandler: ByteBufBodyHandler,
private val origin: String
) {
public suspend fun request(archive: Int, group: Int, version: Int, checksum: Int, build: Int): ByteBuf {
val uri = URI("$origin/ms?m=0&a=$archive&k=$build&g=$group&c=$checksum&v=$version")
val request = HttpRequest.newBuilder(uri)
.GET()
.timeout(Duration.ofSeconds(30))
.build()
val response = client.sendAsync(request, byteBufBodyHandler).await()
response.body().use { buf ->
response.checkStatusCode()
return buf.retain()
}
}
}

@ -1,11 +0,0 @@
package org.openrs2.archive.client
public enum class Architecture {
INDEPENDENT,
UNIVERSAL,
X86,
AMD64,
POWERPC,
SPARC,
SPARCV9
}

@ -1,35 +0,0 @@
package org.openrs2.archive.client
import io.netty.buffer.ByteBuf
import io.netty.buffer.ByteBufUtil
import org.openrs2.archive.cache.CacheExporter
import org.openrs2.archive.cache.CacheImporter
import java.time.Instant
public class Artifact(
data: ByteBuf,
public val game: String,
public val environment: String,
public val build: CacheExporter.Build?,
public val timestamp: Instant?,
public val type: ArtifactType,
public val format: ArtifactFormat,
public val os: OperatingSystem,
public val arch: Architecture,
public val jvm: Jvm,
public val links: List<ArtifactLink>
) : CacheImporter.Blob(data)
public data class ArtifactLink(
val type: ArtifactType,
val format: ArtifactFormat,
val os: OperatingSystem,
val arch: Architecture,
val jvm: Jvm,
val crc32: Int?,
val sha1: ByteArray,
val size: Int?
) {
public val sha1Hex: String
get() = ByteBufUtil.hexDump(sha1)
}

@ -1,46 +0,0 @@
package org.openrs2.archive.client
import io.ktor.http.ContentType
public enum class ArtifactFormat {
CAB,
JAR,
NATIVE,
PACK200,
PACKCLASS;
public fun getPrefix(os: OperatingSystem): String {
return when (this) {
NATIVE -> os.getPrefix()
else -> ""
}
}
public fun getExtension(os: OperatingSystem): String {
return when (this) {
CAB -> "cab"
JAR -> "jar"
NATIVE -> os.getExtension()
PACK200 -> "pack200"
PACKCLASS -> "js5"
}
}
public fun getContentType(os: OperatingSystem): ContentType {
return when (this) {
CAB -> CAB_MIME_TYPE
JAR -> JAR_MIME_TYPE
NATIVE -> os.getContentType()
PACK200, PACKCLASS -> ContentType.Application.OctetStream
}
}
public fun isJar(): Boolean {
return this != NATIVE
}
private companion object {
private val CAB_MIME_TYPE = ContentType("application", "vnd.ms-cab-compressed")
private val JAR_MIME_TYPE = ContentType("application", "java-archive")
}
}

@ -1,16 +0,0 @@
package org.openrs2.archive.client
public enum class ArtifactType {
BROWSERCONTROL,
CLIENT,
CLIENT_GL,
GLUEGEN_RT,
JAGGL,
JAGGL_DRI,
JAGMISC,
JOGL,
JOGL_AWT,
LOADER,
LOADER_GL,
UNPACKCLASS
}

@ -1,14 +0,0 @@
package org.openrs2.archive.client
import com.github.ajalt.clikt.core.NoOpCliktCommand
import com.github.ajalt.clikt.core.subcommands
public class ClientCommand : NoOpCliktCommand(name = "client") {
init {
subcommands(
ExportCommand(),
ImportCommand(),
RefreshCommand()
)
}
}

@ -1,455 +0,0 @@
package org.openrs2.archive.client
import io.netty.buffer.ByteBuf
import io.netty.buffer.ByteBufUtil
import io.netty.buffer.DefaultByteBufHolder
import io.netty.buffer.Unpooled
import jakarta.inject.Inject
import jakarta.inject.Singleton
import org.openrs2.archive.cache.CacheExporter
import org.openrs2.db.Database
import java.time.Instant
import java.time.ZoneOffset
import java.time.format.DateTimeFormatter
@Singleton
public class ClientExporter @Inject constructor(
private val database: Database
) {
public data class ArtifactSummary(
public val id: Long,
public val game: String,
public val environment: String,
public val build: CacheExporter.Build?,
public val timestamp: Instant?,
public val type: ArtifactType,
public val format: ArtifactFormat,
public val os: OperatingSystem,
public val arch: Architecture,
public val jvm: Jvm,
public val size: Int
) {
public val name: String
get() {
val builder = StringBuilder()
builder.append(format.getPrefix(os))
when (type) {
ArtifactType.CLIENT -> builder.append(game)
ArtifactType.CLIENT_GL -> builder.append("${game}_gl")
ArtifactType.GLUEGEN_RT -> builder.append("gluegen-rt")
else -> builder.append(type.name.lowercase())
}
if (jvm == Jvm.MICROSOFT) {
builder.append("ms")
}
if (os != OperatingSystem.INDEPENDENT) {
builder.append('-')
builder.append(os.name.lowercase())
}
if (arch != Architecture.INDEPENDENT) {
builder.append('-')
builder.append(arch.name.lowercase())
}
if (build != null) {
builder.append("-b")
builder.append(build)
}
if (timestamp != null) {
builder.append('-')
builder.append(
timestamp
.atOffset(ZoneOffset.UTC)
.format(DateTimeFormatter.ofPattern("yyyy-MM-dd-HH-mm-ss"))
)
}
builder.append("-openrs2#")
builder.append(id)
builder.append('.')
builder.append(format.getExtension(os))
return builder.toString()
}
}
public data class ArtifactSource(
public val name: String?,
public val description: String?,
public val url: String?
)
public data class ArtifactLinkExport(
public val id: Long?,
public val build: CacheExporter.Build?,
public val timestamp: Instant?,
public val link: ArtifactLink
)
public class Artifact(
public val summary: ArtifactSummary,
public val crc32: Int,
public val sha1: ByteArray,
public val sources: List<ArtifactSource>,
public val links: List<ArtifactLinkExport>
) {
public val sha1Hex: String
get() = ByteBufUtil.hexDump(sha1)
}
public class ArtifactExport(
public val summary: ArtifactSummary,
buf: ByteBuf
) : DefaultByteBufHolder(buf)
public suspend fun list(): List<ArtifactSummary> {
return database.execute { connection ->
connection.prepareStatement(
"""
SELECT
a.blob_id,
g.name,
e.name,
a.build_major,
a.build_minor,
a.timestamp,
a.type,
a.format,
a.os,
a.arch,
a.jvm,
length(b.data) AS size
FROM artifacts a
JOIN blobs b ON b.id = a.blob_id
JOIN games g ON g.id = a.game_id
JOIN environments e ON e.id = a.environment_id
ORDER BY a.build_major ASC, a.timestamp ASC, a.type ASC, a.format ASC, a.os ASC, a.arch ASC, a.jvm ASC
""".trimIndent()
).use { stmt ->
stmt.executeQuery().use { rows ->
val artifacts = mutableListOf<ArtifactSummary>()
while (rows.next()) {
val id = rows.getLong(1)
val game = rows.getString(2)
val environment = rows.getString(3)
var buildMajor: Int? = rows.getInt(4)
if (rows.wasNull()) {
buildMajor = null
}
var buildMinor: Int? = rows.getInt(5)
if (rows.wasNull()) {
buildMinor = null
}
val build = if (buildMajor != null) {
CacheExporter.Build(buildMajor, buildMinor)
} else {
null
}
val timestamp = rows.getTimestamp(6)?.toInstant()
val type = ArtifactType.valueOf(rows.getString(7).uppercase())
val format = ArtifactFormat.valueOf(rows.getString(8).uppercase())
val os = OperatingSystem.valueOf(rows.getString(9).uppercase())
val arch = Architecture.valueOf(rows.getString(10).uppercase())
val jvm = Jvm.valueOf(rows.getString(11).uppercase())
val size = rows.getInt(12)
artifacts += ArtifactSummary(
id,
game,
environment,
build,
timestamp,
type,
format,
os,
arch,
jvm,
size
)
}
return@execute artifacts
}
}
}
}
public suspend fun get(id: Long): Artifact? {
return database.execute { connection ->
val sources = mutableListOf<ArtifactSource>()
val links = mutableListOf<ArtifactLinkExport>()
connection.prepareStatement(
"""
SELECT DISTINCT name, description, url
FROM artifact_sources
WHERE blob_id = ?
""".trimIndent()
).use { stmt ->
stmt.setLong(1, id)
stmt.executeQuery().use { rows ->
while (rows.next()) {
val name = rows.getString(1)
val description = rows.getString(2)
val url = rows.getString(3)
sources += ArtifactSource(name, description, url)
}
}
}
connection.prepareStatement(
"""
SELECT
a.blob_id,
a.build_major,
a.build_minor,
a.timestamp,
l.type,
l.format,
l.os,
l.arch,
l.jvm,
COALESCE(l.crc32, b.crc32),
l.sha1,
COALESCE(l.size, length(b.data))
FROM artifact_links l
LEFT JOIN blobs b ON b.sha1 = l.sha1
LEFT JOIN artifacts a ON a.blob_id = b.id
WHERE l.blob_id = ?
ORDER BY l.type, l.format, l.os, l.arch, l.jvm
""".trimIndent()
).use { stmt ->
stmt.setLong(1, id)
stmt.executeQuery().use { rows ->
while (rows.next()) {
var linkId: Long? = rows.getLong(1)
if (rows.wasNull()) {
linkId = null
}
var buildMajor: Int? = rows.getInt(2)
if (rows.wasNull()) {
buildMajor = null
}
var buildMinor: Int? = rows.getInt(3)
if (rows.wasNull()) {
buildMinor = null
}
val build = if (buildMajor != null) {
CacheExporter.Build(buildMajor, buildMinor)
} else {
null
}
val timestamp = rows.getTimestamp(4)?.toInstant()
val type = ArtifactType.valueOf(rows.getString(5).uppercase())
val format = ArtifactFormat.valueOf(rows.getString(6).uppercase())
val os = OperatingSystem.valueOf(rows.getString(7).uppercase())
val arch = Architecture.valueOf(rows.getString(8).uppercase())
val jvm = Jvm.valueOf(rows.getString(9).uppercase())
var crc32: Int? = rows.getInt(10)
if (rows.wasNull()) {
crc32 = null
}
val sha1 = rows.getBytes(11)
var size: Int? = rows.getInt(12)
if (rows.wasNull()) {
size = null
}
links += ArtifactLinkExport(
linkId,
build,
timestamp,
ArtifactLink(
type,
format,
os,
arch,
jvm,
crc32,
sha1,
size
)
)
}
}
}
connection.prepareStatement(
"""
SELECT
g.name,
e.name,
a.build_major,
a.build_minor,
a.timestamp,
a.type,
a.format,
a.os,
a.arch,
a.jvm,
length(b.data) AS size,
b.crc32,
b.sha1
FROM artifacts a
JOIN games g ON g.id = a.game_id
JOIN environments e ON e.id = a.environment_id
JOIN blobs b ON b.id = a.blob_id
WHERE a.blob_id = ?
""".trimIndent()
).use { stmt ->
stmt.setLong(1, id)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
return@execute null
}
val game = rows.getString(1)
val environment = rows.getString(2)
var buildMajor: Int? = rows.getInt(3)
if (rows.wasNull()) {
buildMajor = null
}
var buildMinor: Int? = rows.getInt(4)
if (rows.wasNull()) {
buildMinor = null
}
val build = if (buildMajor != null) {
CacheExporter.Build(buildMajor!!, buildMinor)
} else {
null
}
val timestamp = rows.getTimestamp(5)?.toInstant()
val type = ArtifactType.valueOf(rows.getString(6).uppercase())
val format = ArtifactFormat.valueOf(rows.getString(7).uppercase())
val os = OperatingSystem.valueOf(rows.getString(8).uppercase())
val arch = Architecture.valueOf(rows.getString(9).uppercase())
val jvm = Jvm.valueOf(rows.getString(10).uppercase())
val size = rows.getInt(11)
val crc32 = rows.getInt(12)
val sha1 = rows.getBytes(13)
return@execute Artifact(
ArtifactSummary(
id,
game,
environment,
build,
timestamp,
type,
format,
os,
arch,
jvm,
size
), crc32, sha1, sources, links
)
}
}
}
}
public suspend fun export(id: Long): ArtifactExport? {
return database.execute { connection ->
connection.prepareStatement(
"""
SELECT
g.name,
e.name,
a.build_major,
a.build_minor,
a.timestamp,
a.type,
a.format,
a.os,
a.arch,
a.jvm,
b.data
FROM artifacts a
JOIN games g ON g.id = a.game_id
JOIN environments e ON e.id = a.environment_id
JOIN blobs b ON b.id = a.blob_id
WHERE a.blob_id = ?
""".trimIndent()
).use { stmt ->
stmt.setLong(1, id)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
return@execute null
}
val game = rows.getString(1)
val environment = rows.getString(2)
var buildMajor: Int? = rows.getInt(3)
if (rows.wasNull()) {
buildMajor = null
}
var buildMinor: Int? = rows.getInt(4)
if (rows.wasNull()) {
buildMinor = null
}
val build = if (buildMajor != null) {
CacheExporter.Build(buildMajor, buildMinor)
} else {
null
}
val timestamp = rows.getTimestamp(5)?.toInstant()
val type = ArtifactType.valueOf(rows.getString(6).uppercase())
val format = ArtifactFormat.valueOf(rows.getString(7).uppercase())
val os = OperatingSystem.valueOf(rows.getString(8).uppercase())
val arch = Architecture.valueOf(rows.getString(9).uppercase())
val jvm = Jvm.valueOf(rows.getString(10).uppercase())
val buf = Unpooled.wrappedBuffer(rows.getBytes(11))
val size = buf.readableBytes()
return@execute ArtifactExport(
ArtifactSummary(
id,
game,
environment,
build,
timestamp,
type,
format,
os,
arch,
jvm,
size
), buf
)
}
}
}
}
}

@ -1,997 +0,0 @@
package org.openrs2.archive.client
import com.github.michaelbull.logging.InlineLogger
import com.kichik.pecoff4j.PE
import com.kichik.pecoff4j.constant.MachineType
import com.kichik.pecoff4j.io.PEParser
import dorkbox.cabParser.CabParser
import dorkbox.cabParser.CabStreamSaver
import dorkbox.cabParser.structure.CabFileEntry
import io.netty.buffer.ByteBuf
import io.netty.buffer.ByteBufAllocator
import io.netty.buffer.ByteBufInputStream
import io.netty.buffer.ByteBufOutputStream
import io.netty.buffer.Unpooled
import io.netty.util.ByteProcessor
import jakarta.inject.Inject
import jakarta.inject.Singleton
import net.fornwall.jelf.ElfFile
import net.fornwall.jelf.ElfSymbol
import org.objectweb.asm.Opcodes
import org.objectweb.asm.tree.AbstractInsnNode
import org.objectweb.asm.tree.ClassNode
import org.objectweb.asm.tree.JumpInsnNode
import org.objectweb.asm.tree.LdcInsnNode
import org.objectweb.asm.tree.MethodInsnNode
import org.objectweb.asm.tree.TypeInsnNode
import org.openrs2.archive.cache.CacheExporter
import org.openrs2.archive.cache.CacheImporter
import org.openrs2.asm.InsnMatcher
import org.openrs2.asm.classpath.Library
import org.openrs2.asm.getArgumentExpressions
import org.openrs2.asm.hasCode
import org.openrs2.asm.intConstant
import org.openrs2.asm.io.CabLibraryReader
import org.openrs2.asm.io.JarLibraryReader
import org.openrs2.asm.io.LibraryReader
import org.openrs2.asm.io.Pack200LibraryReader
import org.openrs2.asm.io.PackClassLibraryReader
import org.openrs2.asm.nextReal
import org.openrs2.buffer.use
import org.openrs2.compress.gzip.Gzip
import org.openrs2.db.Database
import org.openrs2.util.io.entries
import java.io.ByteArrayInputStream
import java.io.ByteArrayOutputStream
import java.io.InputStream
import java.io.OutputStream
import java.nio.file.Files
import java.nio.file.Path
import java.sql.Connection
import java.sql.Types
import java.time.Instant
import java.time.LocalDate
import java.time.Month
import java.time.ZoneOffset
import java.util.jar.JarInputStream
import java.util.jar.JarOutputStream
import java.util.jar.Pack200
import kotlin.io.path.getLastModifiedTime
@Singleton
public class ClientImporter @Inject constructor(
private val database: Database,
private val alloc: ByteBufAllocator,
private val packClassLibraryReader: PackClassLibraryReader,
private val importer: CacheImporter
) {
public suspend fun import(
paths: Iterable<Path>,
name: String?,
description: String?,
url: String?,
skipErrors: Boolean
) {
alloc.buffer().use { buf ->
for (path in paths) {
buf.clear()
Files.newInputStream(path).use { input ->
ByteBufOutputStream(buf).use { output ->
input.copyTo(output)
}
}
logger.info { "Importing $path" }
try {
import(
parse(buf),
name,
description,
url,
path.fileName.toString(),
path.getLastModifiedTime().toInstant()
)
} catch (t: Throwable) {
if (skipErrors) {
logger.warn(t) { "Failed to import $path" }
continue
}
throw t
}
}
}
}
public suspend fun import(
artifact: Artifact,
name: String?,
description: String?,
url: String?,
fileName: String,
timestamp: Instant
) {
database.execute { connection ->
importer.prepare(connection)
val id = import(connection, artifact)
connection.prepareStatement(
"""
INSERT INTO artifact_sources (blob_id, name, description, url, file_name, timestamp)
VALUES (?, ?, ?, ?, ?, ?)
""".trimIndent()
).use { stmt ->
stmt.setLong(1, id)
stmt.setString(2, name)
stmt.setString(3, description)
stmt.setString(4, url)
stmt.setString(5, fileName)
stmt.setObject(6, timestamp.atOffset(ZoneOffset.UTC), Types.TIMESTAMP_WITH_TIMEZONE)
stmt.execute()
}
}
}
private fun import(connection: Connection, artifact: Artifact): Long {
val id = importer.addBlob(connection, artifact)
val gameId = connection.prepareStatement(
"""
SELECT id
FROM games
WHERE name = ?
""".trimIndent()
).use { stmt ->
stmt.setString(1, artifact.game)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
throw IllegalArgumentException()
}
rows.getInt(1)
}
}
val environmentId = connection.prepareStatement(
"""
SELECT id
FROM environments
WHERE name = ?
""".trimIndent()
).use { stmt ->
stmt.setString(1, artifact.environment)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
throw IllegalArgumentException()
}
rows.getInt(1)
}
}
connection.prepareStatement(
"""
INSERT INTO artifacts (blob_id, game_id, environment_id, build_major, build_minor, timestamp, type, format, os, arch, jvm)
VALUES (?, ?, ?, ?, ?, ?, ?::artifact_type, ?::artifact_format, ?::os, ?::arch, ?::jvm)
ON CONFLICT (blob_id) DO UPDATE SET
game_id = EXCLUDED.game_id,
environment_id = EXCLUDED.environment_id,
build_major = EXCLUDED.build_major,
build_minor = EXCLUDED.build_minor,
timestamp = EXCLUDED.timestamp,
type = EXCLUDED.type,
format = EXCLUDED.format,
os = EXCLUDED.os,
arch = EXCLUDED.arch,
jvm = EXCLUDED.jvm
""".trimIndent()
).use { stmt ->
stmt.setLong(1, id)
stmt.setInt(2, gameId)
stmt.setInt(3, environmentId)
stmt.setObject(4, artifact.build?.major, Types.INTEGER)
stmt.setObject(5, artifact.build?.minor, Types.INTEGER)
stmt.setObject(6, artifact.timestamp?.atOffset(ZoneOffset.UTC), Types.TIMESTAMP_WITH_TIMEZONE)
stmt.setString(7, artifact.type.name.lowercase())
stmt.setString(8, artifact.format.name.lowercase())
stmt.setString(9, artifact.os.name.lowercase())
stmt.setString(10, artifact.arch.name.lowercase())
stmt.setString(11, artifact.jvm.name.lowercase())
stmt.execute()
}
connection.prepareStatement(
"""
DELETE FROM artifact_links
WHERE blob_id = ?
""".trimIndent()
).use { stmt ->
stmt.setLong(1, id)
stmt.execute()
}
connection.prepareStatement(
"""
INSERT INTO artifact_links (blob_id, type, format, os, arch, jvm, sha1, crc32, size)
VALUES (?, ?::artifact_type, ?::artifact_format, ?::os, ?::arch, ?::jvm, ?, ?, ?)
""".trimIndent()
).use { stmt ->
for (link in artifact.links) {
stmt.setLong(1, id)
stmt.setString(2, link.type.name.lowercase())
stmt.setString(3, link.format.name.lowercase())
stmt.setString(4, link.os.name.lowercase())
stmt.setString(5, link.arch.name.lowercase())
stmt.setString(6, link.jvm.name.lowercase())
stmt.setBytes(7, link.sha1)
stmt.setObject(8, link.crc32, Types.INTEGER)
stmt.setObject(9, link.size, Types.INTEGER)
stmt.addBatch()
}
stmt.executeBatch()
}
return id
}
public suspend fun refresh() {
data class Blob(val id: Long, val bytes: ByteArray)
database.execute { connection ->
importer.prepare(connection)
var lastId: Long? = null
val blobs = mutableListOf<Blob>()
while (true) {
blobs.clear()
connection.prepareStatement(
"""
SELECT a.blob_id, b.data
FROM artifacts a
JOIN blobs b ON b.id = a.blob_id
WHERE ? IS NULL OR a.blob_id > ?
ORDER BY a.blob_id ASC
LIMIT 1024
""".trimIndent()
).use { stmt ->
stmt.setObject(1, lastId, Types.BIGINT)
stmt.setObject(2, lastId, Types.BIGINT)
stmt.executeQuery().use { rows ->
while (rows.next()) {
val id = rows.getLong(1)
lastId = id
blobs += Blob(id, rows.getBytes(2))
}
}
}
if (blobs.isEmpty()) {
return@execute
}
for (blob in blobs) {
logger.info { "Refreshing artifact ${blob.id}" }
Unpooled.wrappedBuffer(blob.bytes).use { buf ->
import(connection, parse(buf))
}
}
}
}
}
private fun parse(buf: ByteBuf): Artifact {
return if (buf.hasPrefix(JAR)) {
parseJar(buf)
} else if (buf.hasPrefix(PACK200)) {
parsePack200(buf)
} else if (buf.hasPrefix(CAB)) {
parseCab(buf)
} else if (
buf.hasPrefix(PACKCLASS_UNCOMPRESSED) ||
buf.hasPrefix(PACKCLASS_BZIP2) ||
buf.hasPrefix(PACKCLASS_GZIP)
) {
parseLibrary(buf, packClassLibraryReader, ArtifactFormat.PACKCLASS)
} else if (buf.hasPrefix(ELF)) {
parseElf(buf)
} else if (buf.hasPrefix(PE)) {
parsePe(buf)
} else if (
buf.hasPrefix(MACHO32BE) ||
buf.hasPrefix(MACHO32LE) ||
buf.hasPrefix(MACHO64BE) ||
buf.hasPrefix(MACHO64LE) ||
buf.hasPrefix(MACHO_UNIVERSAL)
) {
parseMachO(buf)
} else {
throw IllegalArgumentException()
}
}
private fun parseElf(buf: ByteBuf): Artifact {
val elf = ElfFile.from(ByteBufInputStream(buf.slice()))
val arch = when (elf.e_machine.toInt()) {
ElfFile.ARCH_i386 -> Architecture.X86
ElfFile.ARCH_X86_64 -> Architecture.AMD64
ElfFile.ARCH_SPARC -> Architecture.SPARC
ARCH_SPARCV9 -> Architecture.SPARCV9
else -> throw IllegalArgumentException()
}
val comment = String(elf.firstSectionByName(".comment").data)
val os = if (comment.contains(SOLARIS_COMMENT)) {
OperatingSystem.SOLARIS
} else {
OperatingSystem.LINUX
}
val symbols = elf.dynamicSymbolTableSection ?: throw IllegalArgumentException()
val type = getArtifactType(symbols.symbols.asSequence().mapNotNull(ElfSymbol::getName))
return Artifact(
buf.retain(),
"shared",
"live",
null,
null,
type,
ArtifactFormat.NATIVE,
os,
arch,
Jvm.SUN,
emptyList()
)
}
private fun getArtifactType(symbols: Sequence<String>): ArtifactType {
for (symbol in symbols) {
var name = symbol
if (name.startsWith('_')) {
name = name.substring(1)
}
if (name.startsWith("Java_")) { // RNI methods don't have a Java_ prefix
name = name.substring("Java_".length)
}
if (name.startsWith("jaggl_X11_dri_")) {
return ArtifactType.JAGGL_DRI
} else if (name.startsWith("jaggl_opengl_")) {
return ArtifactType.JAGGL
} else if (name.startsWith("com_sun_opengl_impl_GLImpl_")) {
return ArtifactType.JOGL
} else if (name.startsWith("com_sun_opengl_impl_JAWT_")) {
return ArtifactType.JOGL_AWT
} else if (name.startsWith("com_sun_gluegen_runtime_")) {
return ArtifactType.GLUEGEN_RT
} else if (name.startsWith("jagex3_jagmisc_jagmisc_")) {
return ArtifactType.JAGMISC
} else if (name.startsWith("nativeadvert_browsercontrol_")) {
return ArtifactType.BROWSERCONTROL
}
}
throw IllegalArgumentException()
}
private fun parsePe(buf: ByteBuf): Artifact {
val pe = PEParser.parse(ByteBufInputStream(buf.slice()))
val arch = when (pe.coffHeader.machine) {
MachineType.IMAGE_FILE_MACHINE_I386 -> Architecture.X86
MachineType.IMAGE_FILE_MACHINE_AMD64 -> Architecture.AMD64
else -> throw IllegalArgumentException()
}
val symbols = parsePeExportNames(buf, pe).toSet()
val type = getArtifactType(symbols.asSequence())
val jvm = if (symbols.contains("RNIGetCompatibleVersion")) {
Jvm.MICROSOFT
} else {
Jvm.SUN
}
return Artifact(
buf.retain(),
"shared",
"live",
null,
Instant.ofEpochSecond(pe.coffHeader.timeDateStamp.toLong()),
type,
ArtifactFormat.NATIVE,
OperatingSystem.WINDOWS,
arch,
jvm,
emptyList()
)
}
private fun parsePeExportNames(buf: ByteBuf, pe: PE): Sequence<String> {
return sequence {
val exportTable = pe.imageData.exportTable
val namePointerTable =
pe.sectionTable.rvaConverter.convertVirtualAddressToRawDataPointer(exportTable.namePointerRVA.toInt())
for (i in 0 until exportTable.numberOfNamePointers.toInt()) {
val namePointerRva = buf.readerIndex() + buf.getIntLE(buf.readerIndex() + namePointerTable + 4 * i)
val namePointer = pe.sectionTable.rvaConverter.convertVirtualAddressToRawDataPointer(namePointerRva)
val end = buf.forEachByte(namePointer, buf.writerIndex() - namePointer, ByteProcessor.FIND_NUL)
require(end != -1) {
"Unterminated string"
}
yield(buf.toString(namePointer, end - namePointer, Charsets.US_ASCII))
}
}
}
private fun parseMachO(buf: ByteBuf): Artifact {
val (arch, symbols) = MachO.parse(buf.slice())
val type = getArtifactType(symbols.asSequence())
return Artifact(
buf.retain(),
"shared",
"live",
null,
null,
type,
ArtifactFormat.NATIVE,
OperatingSystem.MACOS,
arch,
Jvm.SUN,
emptyList()
)
}
private fun parseJar(buf: ByteBuf): Artifact {
val timestamp = getJarTimestamp(ByteBufInputStream(buf.slice()))
return parseLibrary(buf, JarLibraryReader, ArtifactFormat.JAR, timestamp)
}
private fun parsePack200(buf: ByteBuf): Artifact {
val timestamp = ByteArrayOutputStream().use { tempOutput ->
Gzip.createHeaderlessInputStream(ByteBufInputStream(buf.slice())).use { gzipInput ->
JarOutputStream(tempOutput).use { jarOutput ->
Pack200.newUnpacker().unpack(gzipInput, jarOutput)
}
}
getJarTimestamp(ByteArrayInputStream(tempOutput.toByteArray()))
}
return parseLibrary(buf, Pack200LibraryReader, ArtifactFormat.PACK200, timestamp)
}
private fun parseCab(buf: ByteBuf): Artifact {
val timestamp = getCabTimestamp(ByteBufInputStream(buf.slice()))
return parseLibrary(buf, CabLibraryReader, ArtifactFormat.CAB, timestamp)
}
private fun getJarTimestamp(input: InputStream): Instant? {
var timestamp: Instant? = null
JarInputStream(input).use { jar ->
for (entry in jar.entries) {
val t = entry.lastModifiedTime?.toInstant()
if (timestamp == null || (t != null && t < timestamp)) {
timestamp = t
}
}
}
return timestamp
}
private fun getCabTimestamp(input: InputStream): Instant? {
var timestamp: Instant? = null
CabParser(input, object : CabStreamSaver {
override fun closeOutputStream(outputStream: OutputStream, entry: CabFileEntry) {
// entry
}
override fun openOutputStream(entry: CabFileEntry): OutputStream {
val t = entry.date.toInstant()
if (timestamp == null || t < timestamp) {
timestamp = t
}
return OutputStream.nullOutputStream()
}
override fun saveReservedAreaData(data: ByteArray?, dataLength: Int): Boolean {
return false
}
}).extractStream()
return timestamp
}
private fun parseLibrary(
buf: ByteBuf,
reader: LibraryReader,
format: ArtifactFormat,
timestamp: Instant? = null
): Artifact {
val library = Library.read("client", ByteBufInputStream(buf.slice()), reader)
val game: String
val build: CacheExporter.Build?
val type: ArtifactType
val links: List<ArtifactLink>
val mudclient = library["mudclient"]
val client = library["client"]
val loader = library["loader"]
if (mudclient != null) {
game = "classic"
build = null // TODO(gpe): classic support
type = ArtifactType.CLIENT
links = emptyList()
} else if (client != null) {
game = "runescape"
build = parseClientBuild(library, client)
type = if (build != null && build.major < COMBINED_BUILD && isClientGl(library)) {
ArtifactType.CLIENT_GL
} else {
ArtifactType.CLIENT
}
links = emptyList()
} else if (loader != null) {
if (isLoaderClassic(loader)) {
game = "classic"
build = null // TODO(gpe): classic support
type = ArtifactType.LOADER
links = emptyList() // TODO(gpe): classic support
} else {
game = "runescape"
build = parseSignLinkBuild(library)
type = if (timestamp != null && timestamp < COMBINED_TIMESTAMP && isLoaderGl(library)) {
ArtifactType.LOADER_GL
} else {
ArtifactType.LOADER
}
links = parseLinks(library)
}
} else if (library.contains("mapview")) {
game = "mapview"
build = null
type = ArtifactType.CLIENT
links = emptyList()
} else if (library.contains("loginapplet")) {
game = "loginapplet"
build = null
type = ArtifactType.CLIENT
links = emptyList()
} else if (library.contains("passwordapp")) {
game = "passapplet"
build = null
type = ArtifactType.CLIENT
links = emptyList()
} else if (library.contains("jaggl/opengl")) {
game = "shared"
type = ArtifactType.JAGGL
build = null
links = emptyList()
} else if (library.contains("com/sun/opengl/impl/GLImpl")) {
game = "shared"
type = ArtifactType.JOGL
build = null
links = emptyList()
} else if (library.contains("unpackclass")) {
game = "shared"
type = ArtifactType.UNPACKCLASS
build = null
links = emptyList()
} else {
throw IllegalArgumentException()
}
return Artifact(
buf.retain(),
game,
"live",
build,
timestamp,
type,
format,
OperatingSystem.INDEPENDENT,
Architecture.INDEPENDENT,
Jvm.INDEPENDENT,
links
)
}
private fun isClientGl(library: Library): Boolean {
for (clazz in library) {
for (method in clazz.methods) {
if (!method.hasCode) {
continue
}
for (insn in method.instructions) {
if (insn is MethodInsnNode && insn.name == "glBegin") {
return true
}
}
}
}
return false
}
private fun isLoaderClassic(clazz: ClassNode): Boolean {
for (method in clazz.methods) {
if (!method.hasCode) {
continue
}
for (insn in method.instructions) {
if (insn is LdcInsnNode && insn.cst == "mudclient") {
return true
}
}
}
return false
}
private fun isLoaderGl(library: Library): Boolean {
for (clazz in library) {
for (method in clazz.methods) {
if (!method.hasCode || method.name != "<clinit>") {
continue
}
for (insn in method.instructions) {
if (insn !is LdcInsnNode) {
continue
}
if (insn.cst == "jaggl.dll" || insn.cst == "jogl.dll") {
return true
}
}
}
}
return false
}
private fun parseClientBuild(library: Library, clazz: ClassNode): CacheExporter.Build? {
for (method in clazz.methods) {
if (!method.hasCode || method.name != "main") {
continue
}
for (match in OLD_ENGINE_VERSION_MATCHER.match(method)) {
val ldc = match[0] as LdcInsnNode
if (ldc.cst != OLD_ENGINE_VERSION_STRING) {
continue
}
val version = match[2].intConstant
if (version != null) {
return CacheExporter.Build(version, null)
}
}
var betweenNewAndReturn = false
val candidates = mutableListOf<Int>()
for (insn in method.instructions) {
if (insn is TypeInsnNode && insn.desc == "client") {
betweenNewAndReturn = true
} else if (insn.opcode == Opcodes.RETURN) {
break
} else if (betweenNewAndReturn) {
val candidate = insn.intConstant
if (candidate != null && candidate in NEW_ENGINE_BUILDS) {
candidates += candidate
}
}
}
for (build in NEW_ENGINE_RESOLUTIONS) {
candidates -= build
}
val version = candidates.singleOrNull()
if (version != null) {
return CacheExporter.Build(version, null)
}
}
return parseSignLinkBuild(library)
}
private fun parseSignLinkBuild(library: Library): CacheExporter.Build? {
val clazz = library["sign/signlink"] ?: return null
for (field in clazz.fields) {
val value = field.value
if (field.name == "clientversion" && field.desc == "I" && value is Int) {
return CacheExporter.Build(value, null)
}
}
return null
}
private fun parseLinks(library: Library): List<ArtifactLink> {
val sig = library["sig"]
if (sig != null) {
var size: Int? = null
var sha1: ByteArray? = null
for (field in sig.fields) {
val value = field.value
if (field.name == "len" && field.desc == "I" && value is Int) {
size = value
}
}
for (method in sig.methods) {
if (!method.hasCode || method.name != "<clinit>") {
continue
}
for (match in SHA1_MATCHER.match(method)) {
val len = match[0].intConstant
if (len != SHA1_BYTES) {
continue
}
sha1 = ByteArray(SHA1_BYTES)
for (i in 2 until match.size step 4) {
val k = match[i + 1].intConstant!!
val v = match[i + 2].intConstant!!
sha1[k] = v.toByte()
}
}
}
require(size != null && sha1 != null)
return listOf(
ArtifactLink(
ArtifactType.CLIENT,
ArtifactFormat.JAR,
OperatingSystem.INDEPENDENT,
Architecture.INDEPENDENT,
Jvm.INDEPENDENT,
crc32 = null,
sha1,
size
)
)
}
val loader = library["loader"]
if (loader != null) {
val links = mutableListOf<ArtifactLink>()
val paths = mutableSetOf<String>()
for (method in loader.methods) {
if (method.name != "run" || method.desc != "()V") {
continue
}
for (insn in method.instructions) {
if (insn !is MethodInsnNode || insn.owner != loader.name || !insn.desc.endsWith(")[B")) {
continue
}
// TODO(gpe): extract file size too (tricky due to dummy arguments)
val exprs = getArgumentExpressions(insn) ?: continue
for (expr in exprs) {
val single = expr.singleOrNull() ?: continue
if (single !is LdcInsnNode) {
continue
}
val cst = single.cst
if (cst is String && FILE_NAME_REGEX.matches(cst)) {
paths += cst
}
}
}
}
val hashes = mutableMapOf<AbstractInsnNode, ByteArray>()
for (method in loader.methods) {
for (match in SHA1_CMP_MATCHER.match(method)) {
val sha1 = ByteArray(SHA1_BYTES)
var i = 0
while (i < match.size) {
var n = match[i++].intConstant
if (n != null) {
i++ // ALOAD
}
val index = match[i++].intConstant!!
i++ // BALOAD
var xor = false
if (i + 1 < match.size && match[i + 1].opcode == Opcodes.IXOR) {
i += 2 // ICONST_M1, IXOR
xor = true
}
if (match[i].opcode == Opcodes.IFNE) {
n = 0
i++
} else {
if (n == null) {
n = match[i++].intConstant!!
}
i++ // ICMP_IFNE
}
if (xor) {
n = n.inv()
}
sha1[index] = n.toByte()
}
hashes[match[0]] = sha1
}
}
for (method in loader.methods) {
for (match in PATH_CMP_MATCHER.match(method)) {
val first = match[0]
val ldc = if (first is LdcInsnNode) {
first
} else {
match[1] as LdcInsnNode
}
val path = ldc.cst
if (path !is String) {
continue
}
val acmp = match[2] as JumpInsnNode
val target = if (acmp.opcode == Opcodes.IF_ACMPNE) {
acmp.nextReal
} else {
acmp.label.nextReal
}
val hash = hashes.remove(target) ?: continue
if (!paths.remove(path)) {
logger.warn { "Adding link for unused file $path" }
}
links += parseLink(path, hash)
}
}
if (paths.size != hashes.size || paths.size > 1) {
throw IllegalArgumentException()
} else if (paths.size == 1) {
links += parseLink(paths.single(), hashes.values.single())
}
return links
}
// TODO(gpe)
return emptyList()
}
private fun parseLink(path: String, sha1: ByteArray): ArtifactLink {
val m = FILE_NAME_REGEX.matchEntire(path) ?: throw IllegalArgumentException()
val (name, crc1, ext, crc2) = m.destructured
val type = when (name) {
// TODO(gpe): funorb loaders
"runescape", "client" -> ArtifactType.CLIENT
"unpackclass" -> ArtifactType.UNPACKCLASS
"jogl", "jogltrimmed" -> ArtifactType.JOGL
"jogl_awt" -> ArtifactType.JOGL_AWT
else -> throw IllegalArgumentException()
}
val format = when (ext) {
"pack200" -> ArtifactFormat.PACK200
"js5" -> ArtifactFormat.PACKCLASS
"jar", "pack" -> ArtifactFormat.JAR
"dll" -> ArtifactFormat.NATIVE
else -> throw IllegalArgumentException()
}
val os = if (format == ArtifactFormat.NATIVE) OperatingSystem.WINDOWS else OperatingSystem.INDEPENDENT
val arch = if (format == ArtifactFormat.NATIVE) Architecture.X86 else Architecture.INDEPENDENT
val jvm = if (format == ArtifactFormat.NATIVE) Jvm.SUN else Jvm.INDEPENDENT
val crc = crc1.toIntOrNull() ?: crc2.toIntOrNull() ?: throw IllegalArgumentException()
return ArtifactLink(
type,
format,
os,
arch,
jvm,
crc,
sha1,
null
)
}
private fun ByteBuf.hasPrefix(bytes: ByteArray): Boolean {
Unpooled.wrappedBuffer(bytes).use { prefix ->
val len = prefix.readableBytes()
if (readableBytes() < len) {
return false
}
return slice(readerIndex(), len) == prefix
}
}
private companion object {
private val logger = InlineLogger()
private val CAB = byteArrayOf('M'.code.toByte(), 'S'.code.toByte(), 'C'.code.toByte(), 'F'.code.toByte())
private val ELF = byteArrayOf(0x7F, 'E'.code.toByte(), 'L'.code.toByte(), 'F'.code.toByte())
private val JAR = byteArrayOf('P'.code.toByte(), 'K'.code.toByte(), 0x03, 0x04)
private val MACHO32BE = byteArrayOf(0xFE.toByte(), 0xED.toByte(), 0xFA.toByte(), 0xCE.toByte())
private val MACHO32LE = byteArrayOf(0xCE.toByte(), 0xFA.toByte(), 0xED.toByte(), 0xFE.toByte())
private val MACHO64BE = byteArrayOf(0xFE.toByte(), 0xED.toByte(), 0xFA.toByte(), 0xCF.toByte())
private val MACHO64LE = byteArrayOf(0xCF.toByte(), 0xFA.toByte(), 0xED.toByte(), 0xFE.toByte())
private val MACHO_UNIVERSAL = byteArrayOf(0xCA.toByte(), 0xFE.toByte(), 0xBA.toByte(), 0xBE.toByte())
private val PACK200 = byteArrayOf(0x08)
private val PACKCLASS_UNCOMPRESSED = byteArrayOf(0x00)
private val PACKCLASS_BZIP2 = byteArrayOf(0x01)
private val PACKCLASS_GZIP = byteArrayOf(0x02)
private val PE = byteArrayOf('M'.code.toByte(), 'Z'.code.toByte())
private const val OLD_ENGINE_VERSION_STRING = "RS2 user client - release #"
private val OLD_ENGINE_VERSION_MATCHER =
InsnMatcher.compile("LDC INVOKESPECIAL (ICONST | BIPUSH | SIPUSH | LDC)")
private val NEW_ENGINE_RESOLUTIONS = listOf(765, 503, 1024, 768)
private val NEW_ENGINE_BUILDS = 402..916
private const val COMBINED_BUILD = 555
private val COMBINED_TIMESTAMP = LocalDate.of(2009, Month.SEPTEMBER, 2)
.atStartOfDay(ZoneOffset.UTC)
.toInstant()
private const val ARCH_SPARCV9 = 43
private const val SOLARIS_COMMENT = "Solaris Link Editors:"
private const val SHA1_BYTES = 20
private val SHA1_MATCHER =
InsnMatcher.compile("BIPUSH NEWARRAY (DUP (ICONST | BIPUSH) (ICONST | BIPUSH | SIPUSH) IASTORE)+")
private val FILE_NAME_REGEX = Regex("([a-z_]+)(?:_(-?[0-9]+))?[.]([a-z0-9]+)(?:\\?crc=(-?[0-9]+))?")
private val SHA1_CMP_MATCHER =
InsnMatcher.compile("((ICONST | BIPUSH)? ALOAD (ICONST | BIPUSH) BALOAD (ICONST IXOR)? (ICONST | BIPUSH)? (IF_ICMPEQ | IF_ICMPNE | IFEQ | IFNE))+")
private val PATH_CMP_MATCHER = InsnMatcher.compile("(LDC ALOAD | ALOAD LDC) (IF_ACMPEQ | IF_ACMPNE)")
}
}

@ -1,30 +0,0 @@
package org.openrs2.archive.client
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.types.defaultStdout
import com.github.ajalt.clikt.parameters.types.long
import com.github.ajalt.clikt.parameters.types.outputStream
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
import java.io.FileNotFoundException
public class ExportCommand : CliktCommand(name = "export") {
private val id by argument().long()
private val output by argument().outputStream().defaultStdout()
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val exporter = injector.getInstance(ClientExporter::class.java)
val artifact = exporter.export(id) ?: throw FileNotFoundException()
try {
val buf = artifact.content()
buf.readBytes(output, buf.readableBytes())
} finally {
artifact.release()
}
}
}
}

@ -1,32 +0,0 @@
package org.openrs2.archive.client
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.arguments.multiple
import com.github.ajalt.clikt.parameters.options.flag
import com.github.ajalt.clikt.parameters.options.option
import com.github.ajalt.clikt.parameters.types.path
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
public class ImportCommand : CliktCommand(name = "import") {
private val name by option()
private val description by option()
private val url by option()
private val skipErrors by option().flag()
private val input by argument().path(
mustExist = true,
canBeDir = false,
mustBeReadable = true,
).multiple()
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val importer = injector.getInstance(ClientImporter::class.java)
importer.import(input, name, description, url, skipErrors)
}
}
}

@ -1,7 +0,0 @@
package org.openrs2.archive.client
public enum class Jvm {
INDEPENDENT,
SUN,
MICROSOFT
}

@ -1,116 +0,0 @@
package org.openrs2.archive.client
import io.netty.buffer.ByteBuf
import org.openrs2.buffer.readString
public data class MachO(
public val architecture: Architecture,
public val symbols: Set<String>,
) {
public companion object {
private const val MACHO_UNIVERSAL = 0xCAFEBABE.toInt()
private const val MACHO32BE = 0xFEEDFACE.toInt()
private const val MACHO32LE = 0xCEFAEDFE.toInt()
private const val MACHO64BE = 0xFEEDFACF.toInt()
private const val MACHO64LE = 0xCFFAEDFE.toInt()
private const val CPU_TYPE_X86 = 0x7
private const val CPU_TYPE_AMD64 = 0x1000007
private const val CPU_TYPE_POWERPC = 0x12
private const val COMMAND_SYMTAB = 0x2
public fun parse(buf: ByteBuf): MachO {
val magic = buf.getInt(buf.readerIndex())
return if (magic == MACHO_UNIVERSAL) {
parseFat(buf)
} else {
parseMachO(buf)
}
}
private fun parseFat(buf: ByteBuf): MachO {
buf.skipBytes(4)
val symbols = mutableSetOf<String>()
val count = buf.readInt()
for (i in 0 until count) {
buf.skipBytes(8)
val offset = buf.readInt()
val size = buf.readInt()
buf.skipBytes(4)
symbols += parseMachO(buf.slice(offset, size)).symbols
}
return MachO(Architecture.UNIVERSAL, symbols)
}
private fun parseMachO(buf: ByteBuf): MachO {
val magic = buf.readInt()
require(magic == MACHO32BE || magic == MACHO32LE || magic == MACHO64BE || magic == MACHO64LE)
val big = magic == MACHO32BE || magic == MACHO64BE
val x64 = magic == MACHO64LE || magic == MACHO64BE
val arch = when (if (big) buf.readInt() else buf.readIntLE()) {
CPU_TYPE_X86 -> Architecture.X86
CPU_TYPE_AMD64 -> Architecture.AMD64
CPU_TYPE_POWERPC -> Architecture.POWERPC
else -> throw IllegalArgumentException()
}
buf.skipBytes(4) // cpuSubType
buf.skipBytes(4) // fileType
val nCmds = if (big) buf.readInt() else buf.readIntLE()
buf.skipBytes(4) // sizeOfCmds
buf.skipBytes(4) // flags
if (x64) {
buf.skipBytes(4) // reserved
}
val symbols = parseCommands(buf, big, nCmds)
return MachO(arch, symbols)
}
private fun parseCommands(buf: ByteBuf, big: Boolean, count: Int): Set<String> {
for (i in 0 until count) {
val base = buf.readerIndex()
val command = if (big) buf.readInt() else buf.readIntLE()
val size = if (big) buf.readInt() else buf.readIntLE()
if (command == COMMAND_SYMTAB) {
buf.skipBytes(8)
val strOff = if (big) buf.readInt() else buf.readIntLE()
val strSize = if (big) buf.readInt() else buf.readIntLE()
return parseStringTable(buf.slice(strOff, strSize))
}
buf.readerIndex(base + size)
}
return emptySet()
}
private fun parseStringTable(buf: ByteBuf): Set<String> {
return buildSet {
while (buf.isReadable) {
val str = buf.readString(Charsets.US_ASCII)
if (str.isNotEmpty()) {
add(str)
}
}
}
}
}
}

@ -1,43 +0,0 @@
package org.openrs2.archive.client
import io.ktor.http.ContentType
public enum class OperatingSystem {
INDEPENDENT,
WINDOWS,
MACOS,
LINUX,
SOLARIS;
public fun getPrefix(): String {
return when (this) {
INDEPENDENT -> throw IllegalArgumentException()
WINDOWS -> ""
else -> "lib"
}
}
public fun getExtension(): String {
return when (this) {
INDEPENDENT -> throw IllegalArgumentException()
WINDOWS -> "dll"
MACOS -> "dylib"
LINUX, SOLARIS -> "so"
}
}
public fun getContentType(): ContentType {
return when (this) {
INDEPENDENT -> throw IllegalArgumentException()
WINDOWS -> PE
MACOS -> MACHO
LINUX, SOLARIS -> ELF_SHARED
}
}
private companion object {
private val ELF_SHARED = ContentType("application", "x-sharedlib")
private val MACHO = ContentType("application", "x-mach-binary")
private val PE = ContentType("application", "vnd.microsoft.portable-executable")
}
}

@ -1,16 +0,0 @@
package org.openrs2.archive.client
import com.github.ajalt.clikt.core.CliktCommand
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
public class RefreshCommand : CliktCommand(name = "refresh") {
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val importer = injector.getInstance(ClientImporter::class.java)
importer.refresh()
}
}
}

@ -1,11 +0,0 @@
package org.openrs2.archive.game
public data class Game(
public val id: Int,
public val url: String?,
public val buildMajor: Int?,
public val buildMinor: Int?,
public val lastMasterIndexId: Int?,
public val languageId: Int,
public val scopeId: Int
)

@ -1,58 +0,0 @@
package org.openrs2.archive.game
import jakarta.inject.Inject
import jakarta.inject.Singleton
import org.openrs2.db.Database
@Singleton
public class GameDatabase @Inject constructor(
private val database: Database
) {
public suspend fun getGame(name: String, environment: String, language: String): Game? {
return database.execute { connection ->
connection.prepareStatement(
"""
SELECT v.id, v.url, v.build_major, v.build_minor, v.last_master_index_id, v.language_id, g.scope_id
FROM game_variants v
JOIN games g ON g.id = v.game_id
JOIN environments e ON e.id = v.environment_id
JOIN languages l ON l.id = v.language_id
WHERE g.name = ? AND e.name = ? AND l.iso_code = ?
""".trimIndent()
).use { stmt ->
stmt.setString(1, name)
stmt.setString(2, environment)
stmt.setString(3, language)
stmt.executeQuery().use { rows ->
if (!rows.next()) {
return@execute null
}
val id = rows.getInt(1)
val url: String? = rows.getString(2)
var buildMajor: Int? = rows.getInt(3)
if (rows.wasNull()) {
buildMajor = null
}
var buildMinor: Int? = rows.getInt(4)
if (rows.wasNull()) {
buildMinor = null
}
var lastMasterIndexId: Int? = rows.getInt(5)
if (rows.wasNull()) {
lastMasterIndexId = null
}
val languageId = rows.getInt(6)
val scopeId = rows.getInt(7)
return@execute Game(id, url, buildMajor, buildMinor, lastMasterIndexId, languageId, scopeId)
}
}
}
}
}

@ -1,70 +0,0 @@
package org.openrs2.archive.jav
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.future.await
import kotlinx.coroutines.withContext
import org.openrs2.http.checkStatusCode
import java.io.BufferedReader
import java.net.URI
import java.net.http.HttpClient
import java.net.http.HttpRequest
import java.net.http.HttpResponse
import java.time.Duration
public data class JavConfig(
public val config: Map<String, String>,
public val params: Map<String, String>,
public val messages: Map<String, String>
) {
public companion object {
public suspend fun download(client: HttpClient, url: String): JavConfig {
val request = HttpRequest.newBuilder(URI(url))
.GET()
.timeout(Duration.ofSeconds(30))
.build()
val response = client.sendAsync(request, HttpResponse.BodyHandlers.ofInputStream()).await()
response.checkStatusCode()
return withContext(Dispatchers.IO) {
response.body().bufferedReader().use { reader ->
read(reader)
}
}
}
public fun read(reader: BufferedReader): JavConfig {
val config = mutableMapOf<String, String>()
val params = mutableMapOf<String, String>()
val messages = mutableMapOf<String, String>()
reader.lineSequence().map(String::trim).forEach { line ->
when {
line.startsWith("//") || line.startsWith("#") -> Unit
line.startsWith("msg=") -> {
val parts = line.substring("msg=".length).split("=", limit = 2)
if (parts.size == 2) {
messages[parts[0]] = parts[1]
}
}
line.startsWith("param=") -> {
val parts = line.substring("param=".length).split("=", limit = 2)
if (parts.size == 2) {
params[parts[0]] = parts[1]
}
}
else -> {
val parts = line.split("=", limit = 2)
if (parts.size == 2) {
config[parts[0]] = parts[1]
}
}
}
}
return JavConfig(config, params, messages)
}
}
}

@ -1,65 +0,0 @@
package org.openrs2.archive.key
import io.netty.buffer.ByteBuf
import io.netty.buffer.Unpooled
import org.openrs2.buffer.use
import org.openrs2.crypto.SymmetricKey
import java.io.InputStream
public object BinaryKeyReader : KeyReader {
override fun read(input: InputStream): Sequence<SymmetricKey> {
Unpooled.wrappedBuffer(input.readBytes()).use { buf ->
val len = buf.readableBytes()
if (len == (128 * 128 * 16)) {
val keys = read(buf, 0)
require(SymmetricKey.ZERO in keys)
return keys.asSequence()
}
val maybeShort = (len % 18) == 0
val maybeInt = (len % 20) == 0
if (maybeShort && !maybeInt) {
val keys = read(buf, 2)
require(SymmetricKey.ZERO in keys)
return keys.asSequence()
} else if (!maybeShort && maybeInt) {
val keys = read(buf, 4).asSequence()
require(SymmetricKey.ZERO in keys)
return keys.asSequence()
} else if (maybeShort && maybeInt) {
val shortKeys = read(buf, 2)
val intKeys = read(buf, 4)
return if (SymmetricKey.ZERO in shortKeys && SymmetricKey.ZERO !in intKeys) {
shortKeys.asSequence()
} else if (SymmetricKey.ZERO !in shortKeys && SymmetricKey.ZERO in intKeys) {
intKeys.asSequence()
} else {
throw IllegalArgumentException("Failed to determine if map square IDs are 2 or 4 bytes")
}
} else {
throw IllegalArgumentException(
"Binary XTEA files must be exactly 256 KiB or a multiple of 18 or 20 bytes long"
)
}
}
}
private fun read(buf: ByteBuf, mapSquareLen: Int): Set<SymmetricKey> {
val keys = mutableSetOf<SymmetricKey>()
while (buf.isReadable) {
buf.skipBytes(mapSquareLen)
val k0 = buf.readInt()
val k1 = buf.readInt()
val k2 = buf.readInt()
val k3 = buf.readInt()
keys += SymmetricKey(k0, k1, k2, k3)
}
return keys
}
}

@ -1,16 +0,0 @@
package org.openrs2.archive.key
import com.github.ajalt.clikt.core.CliktCommand
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
public class BruteForceCommand : CliktCommand(name = "brute-force") {
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val bruteForcer = injector.getInstance(KeyBruteForcer::class.java)
bruteForcer.bruteForce()
}
}
}

@ -1,16 +0,0 @@
package org.openrs2.archive.key
import com.github.ajalt.clikt.core.CliktCommand
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
public class DownloadCommand : CliktCommand(name = "download") {
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val importer = injector.getInstance(KeyImporter::class.java)
importer.download()
}
}
}

@ -1,16 +0,0 @@
package org.openrs2.archive.key
import com.github.ajalt.clikt.core.CliktCommand
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
public class EntCommand : CliktCommand(name = "ent") {
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val exporter = injector.getInstance(KeyExporter::class.java)
println(exporter.analyse())
}
}
}

@ -1,57 +0,0 @@
package org.openrs2.archive.key
import jakarta.inject.Inject
import jakarta.inject.Singleton
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.future.await
import kotlinx.coroutines.withContext
import org.openrs2.crypto.SymmetricKey
import org.openrs2.http.checkStatusCode
import java.net.URI
import java.net.http.HttpClient
import java.net.http.HttpRequest
import java.net.http.HttpResponse
import java.time.Duration
@Singleton
public class HdosKeyDownloader @Inject constructor(
private val client: HttpClient
) : KeyDownloader(KeySource.HDOS) {
override suspend fun getMissingUrls(seenUrls: Set<String>): Set<String> {
return setOf(ENDPOINT)
}
override suspend fun download(url: String): Sequence<SymmetricKey> {
val request = HttpRequest.newBuilder(URI(url))
.GET()
.timeout(Duration.ofSeconds(30))
.build()
val response = client.sendAsync(request, HttpResponse.BodyHandlers.ofInputStream()).await()
response.checkStatusCode()
return withContext(Dispatchers.IO) {
response.body().use { input ->
input.bufferedReader().use { reader ->
val keys = mutableSetOf<SymmetricKey>()
for (line in reader.lineSequence()) {
val parts = line.split(',')
if (parts.size < 3) {
continue
}
val key = SymmetricKey.fromHexOrNull(parts[2]) ?: continue
keys += key
}
keys.asSequence()
}
}
}
}
private companion object {
private const val ENDPOINT = "https://api.hdos.dev/keys/get"
}
}

@ -1,13 +0,0 @@
package org.openrs2.archive.key
import org.openrs2.crypto.SymmetricKey
import java.io.InputStream
public object HexKeyReader : KeyReader {
override fun read(input: InputStream): Sequence<SymmetricKey> {
return input.bufferedReader()
.lineSequence()
.map(SymmetricKey::fromHexOrNull)
.filterNotNull()
}
}

@ -1,23 +0,0 @@
package org.openrs2.archive.key
import com.github.ajalt.clikt.core.CliktCommand
import com.github.ajalt.clikt.parameters.arguments.argument
import com.github.ajalt.clikt.parameters.types.path
import com.google.inject.Guice
import kotlinx.coroutines.runBlocking
import org.openrs2.archive.ArchiveModule
import org.openrs2.inject.CloseableInjector
public class ImportCommand : CliktCommand(name = "import") {
private val input by argument().path(
mustExist = true,
mustBeReadable = true
)
override fun run(): Unit = runBlocking {
CloseableInjector(Guice.createInjector(ArchiveModule)).use { injector ->
val importer = injector.getInstance(KeyImporter::class.java)
importer.import(input)
}
}
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save