diff --git a/.ci/Dockerfile.linux b/.ci/Dockerfile.linux deleted file mode 100644 index b46f7ff..0000000 --- a/.ci/Dockerfile.linux +++ /dev/null @@ -1,7 +0,0 @@ -FROM dtmt-ci-base-linux - -COPY . /src/dtmt -COPY --from=dtmt-ci-base-linux /src/*.lib /src/*.so /src/dtmt/lib/oodle/ -RUN --mount=type=cache,id=cargo-registry,target=/cargo/registry \ - --mount=type=cache,id=cargo-target,target=/src/dtmt/target \ - cargo build --release --locked diff --git a/.ci/Dockerfile.msvc b/.ci/Dockerfile.msvc deleted file mode 100644 index e8c9c32..0000000 --- a/.ci/Dockerfile.msvc +++ /dev/null @@ -1,35 +0,0 @@ -FROM dtmt-ci-base-msvc - -# Create dummy crates and copy their Cargo.toml, so that dependencies can be cached -RUN set -e; \ - cargo new --bin crates/dtmt; \ - cargo new --bin crates/dtmm; \ - cargo new --lib lib/dtmt-shared; \ - cargo new --lib lib/nexusmods; \ - cargo new --lib lib/sdk; \ - cargo new --lib lib/serde_sjson; \ - cargo new --lib lib/ansi-parser - -COPY Cargo.toml Cargo.lock /src/dtmt/ -COPY crates/dtmt/Cargo.toml /src/dtmt/crates/dtmt/ -COPY crates/dtmm/Cargo.toml /src/dtmt/crates/dtmm/ -COPY lib/dtmt-shared/Cargo.toml /src/dtmt/lib/dtmt-shared/ -COPY lib/nexusmods/Cargo.toml /src/dtmt/lib/nexusmods/ -COPY lib/sdk/Cargo.toml /src/dtmt/lib/sdk/ -COPY lib/serde_sjson/Cargo.toml /src/dtmt/lib/serde_sjson/ -COPY lib/ansi-parser/Cargo.toml /src/dtmt/lib/ansi-parser/ - -# Crates with build scripts cannot be split that way, but they shouldn't change too often -COPY lib/luajit2-sys /src/dtmt/lib/luajit2-sys -COPY lib/oodle /src/dtmt/lib/oodle -# color-eyre needs to be copied, too, then, as it's used by `oodle` -COPY lib/color-eyre /src/dtmt/lib/color-eyre -COPY --from=dtmt-ci-base-msvc /src/*.lib /src/dtmt/lib/oodle/ - -RUN cargo build --release --target x86_64-pc-windows-msvc --locked -Zbuild-std -RUN rm -r crates lib - -COPY . /src/dtmt -COPY --from=dtmt-ci-base-msvc /src/*.lib /src/dtmt/lib/oodle/ - -RUN cargo build --release --target x86_64-pc-windows-msvc --frozen -Zbuild-std diff --git a/.ci/image/Dockerfile b/.ci/image/Dockerfile deleted file mode 100644 index f115929..0000000 --- a/.ci/image/Dockerfile +++ /dev/null @@ -1,138 +0,0 @@ -# https://jake-shadle.github.io/xwin/ -FROM debian:bullseye-slim as xwin - -ARG XWIN_VERSION=0.5.2 -ARG XWIN_PREFIX="xwin-$XWIN_VERSION-x86_64-unknown-linux-musl" -ADD https://github.com/Jake-Shadle/xwin/releases/download/$XWIN_VERSION/$XWIN_PREFIX.tar.gz /root/$XWIN_PREFIX.tar.gz - -RUN set -eux; \ - apt-get update; \ - apt-get install --no-install-recommends -y \ - tar \ - ; \ - # Install xwin to cargo/bin via github release. Note you could also just use `cargo install xwin`. - tar -xzv -f /root/$XWIN_PREFIX.tar.gz -C /usr/bin --strip-components=1 $XWIN_PREFIX/xwin; \ - apt-get remove -y --auto-remove; \ - rm -rf \ - /var/lib/apt/lists/* \ - /root/$XWIN_PREFIX.tar.gz; - -RUN set -eux; \ - # Splat the CRT and SDK files to /xwin/crt and /xwin/sdk respectively - xwin \ - --log-level debug \ - --cache-dir /root/.xwin-cache \ - --manifest-version 16 \ - --accept-license \ - splat \ - --output /xwin; \ - # Even though this build step only exists temporary, to copy the - # final data out of, it still generates a cache entry on the Docker host. - # And to keep that to a minimum, we still delete the stuff we don't need. - rm -rf /root/.xwin-cache; - -FROM rust:slim-bullseye as linux - -RUN set -eux; \ - apt-get update; \ - apt-get install --no-install-recommends -y \ - build-essential \ - cmake \ - curl \ - git \ - gpg \ - jq \ - libatk1.0-dev \ - libclang-13-dev \ - libglib2.0-dev \ - libgtk-3-dev \ - libpango1.0-dev \ - libssl-dev \ - libzstd-dev \ - pkg-config; \ - apt-get remove -y --auto-remove; \ - rm -rf /var/lib/apt/lists/*; \ - rustup default nightly - -WORKDIR /src/dtmt - -COPY lib/oodle/*.so lib/oodle/*.a /src/ - -FROM linux as msvc - -ARG LLVM_VERSION=18 -ENV KEYRINGS /usr/local/share/keyrings - -ADD https://apt.llvm.org/llvm-snapshot.gpg.key /root/llvm-snapshot.gpg.key -ADD https://dl.winehq.org/wine-builds/winehq.key /root/winehq.key - -RUN set -eux; \ - mkdir -p $KEYRINGS; \ - # clang/lld/llvm - gpg --dearmor > $KEYRINGS/llvm.gpg < /root/llvm-snapshot.gpg.key; \ - # wine - gpg --dearmor > $KEYRINGS/winehq.gpg < /root/winehq.key; \ - echo "deb [signed-by=$KEYRINGS/llvm.gpg] http://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-${LLVM_VERSION} main" > /etc/apt/sources.list.d/llvm.list; \ - echo "deb [signed-by=$KEYRINGS/winehq.gpg] https://dl.winehq.org/wine-builds/debian/ bullseye main" > /etc/apt/sources.list.d/winehq.list; \ - dpkg --add-architecture i386; \ - apt-get update; \ - apt-get install --no-install-recommends -y \ - libclang-${LLVM_VERSION}-dev \ - gcc-mingw-w64-x86-64 \ - clang-${LLVM_VERSION} \ - llvm-${LLVM_VERSION} \ - lld-${LLVM_VERSION} \ - winehq-staging \ - ; \ - # ensure that clang/clang++ are callable directly - ln -s clang-${LLVM_VERSION} /usr/bin/clang && ln -s clang /usr/bin/clang++ && ln -s lld-${LLVM_VERSION} /usr/bin/ld.lld; \ - # We also need to setup symlinks ourselves for the MSVC shims because they aren't in the debian packages - ln -s clang-${LLVM_VERSION} /usr/bin/clang-cl && ln -s llvm-ar-${LLVM_VERSION} /usr/bin/llvm-lib && ln -s lld-link-${LLVM_VERSION} /usr/bin/lld-link; \ - # Verify the symlinks are correct - clang++ -v; \ - ld.lld -v; \ - # Doesn't have an actual -v/--version flag, but it still exits with 0 - llvm-lib -v; \ - clang-cl -v; \ - lld-link --version; \ - # Use clang instead of gcc when compiling and linking binaries targeting the host (eg proc macros, build files) - update-alternatives --install /usr/bin/cc cc /usr/bin/clang 100; \ - update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++ 100; \ - update-alternatives --install /usr/bin/ld ld /usr/bin/ld.lld 100; \ - rustup target add x86_64-pc-windows-msvc; \ - rustup component add rust-src; \ - # Remove unneeded files to reduce image size - apt-get remove -y --auto-remove; \ - rm -rf \ - /var/lib/apt/lists/* \ - /root/*.key; - -COPY lib/oodle/*.lib /src -COPY --from=xwin /xwin /xwin - -# Note that we're using the full target triple for each variable instead of the -# simple CC/CXX/AR shorthands to avoid issues when compiling any C/C++ code for -# build dependencies that need to compile and execute in the host environment -ENV CC_x86_64_pc_windows_msvc="clang-cl" \ - CXX_x86_64_pc_windows_msvc="clang-cl" \ - AR_x86_64_pc_windows_msvc="llvm-lib" \ - # wine can be quite spammy with log messages and they're generally uninteresting - WINEDEBUG="-all" \ - # Use wine to run test executables - CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_RUNNER="wine" \ - # Note that we only disable unused-command-line-argument here since clang-cl - # doesn't implement all of the options supported by cl, but the ones it doesn't - # are _generally_ not interesting. - CL_FLAGS="-Wno-unused-command-line-argument -fuse-ld=lld-link /imsvc/xwin/crt/include /imsvc/xwin/sdk/include/ucrt /imsvc/xwin/sdk/include/um /imsvc/xwin/sdk/include/shared" \ - # Let cargo know what linker to invoke if you haven't already specified it - # in a .cargo/config.toml file - CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_LINKER="lld-link" \ - CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_RUSTFLAGS="-Lnative=/xwin/crt/lib/x86_64 -Lnative=/xwin/sdk/lib/um/x86_64 -Lnative=/xwin/sdk/lib/ucrt/x86_64" - -# These are separate since docker/podman won't transform environment variables defined in the same ENV block -ENV CFLAGS_x86_64_pc_windows_msvc="$CL_FLAGS" \ - CXXFLAGS_x86_64_pc_windows_msvc="$CL_FLAGS" - -# Run wineboot just to setup the default WINEPREFIX so we don't do it every -# container run -RUN wine wineboot --init diff --git a/.ci/pipelines/base.yml b/.ci/pipelines/base.yml deleted file mode 100644 index 4e7ed14..0000000 --- a/.ci/pipelines/base.yml +++ /dev/null @@ -1,243 +0,0 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/cappyzawa/concourse-pipeline-jsonschema/master/concourse_jsonschema.json#/definitions/Config ---- - -# The actual CI pipeline that is run per branch -resource_types: -- name: gitea-package - type: registry-image - source: - repository: registry.sclu1034.dev/gitea-package - username: ((registry_user)) - password: ((registry_password)) - -- name: gitea-status - type: registry-image - source: - repository: registry.sclu1034.dev/gitea-status - username: ((registry_user)) - password: ((registry_password)) - -- name: gitea-pr - type: registry-image - source: - repository: registry.sclu1034.dev/gitea-pr - username: ((registry_user)) - password: ((registry_password)) - - -resources: -- name: repo - type: git - source: - uri: https://git.sclu1034.dev/bitsquid_dt/dtmt - branch: master - -- name: repo-pr - type: gitea-pr - source: - access_token: ((gitea_api_key)) - owner: ((owner)) - repo: ((repo)) - url: https://git.sclu1034.dev - -- name: gitea-package - type: gitea-package - source: - access_token: ((gitea_api_key)) - url: https://git.sclu1034.dev - owner: bitsquid_dt - type: generic - name: dtmt - - -- name: status-build-msvc - type: gitea-status - source: - access_token: ((gitea_api_key)) - url: https://git.sclu1034.dev - owner: bitsquid_dt - repo: dtmt - context: build/msvc - description: "Build for the target platform: msvc" - -- name: status-build-linux - type: gitea-status - source: - access_token: ((gitea_api_key)) - url: https://git.sclu1034.dev - owner: bitsquid_dt - repo: dtmt - context: build/linux - description: "Build for the target platform: linux" - - -jobs: -- name: set-pipelines - plan: - - in_parallel: - - get: repo-pr - trigger: true - - - get: repo - - - load_var: prs - file: repo-pr/prs.json - - - across: - - var: pr - values: ((.:prs)) - set_pipeline: dtmt-pr - file: repo/.ci/pipelines/pr.yml - public: true - vars: - pr: ((.:pr)) - gitea_api_key: ((gitea_api_key)) - registry_user: ((registry_user)) - registry_password: ((registry_password)) - instance_vars: - number: ((.:pr.number)) - - -- name: build-msvc - on_success: - put: state-success - resource: status-build-msvc - no_get: true - params: - state: success - sha: ((.:git_sha)) - - on_failure: - put: state-failure - resource: status-build-msvc - no_get: true - params: - state: failure - sha: ((.:git_sha)) - - plan: - - get: repo - trigger: true - - - load_var: git_sha - file: repo/.git/ref - - - put: state-pending - resource: status-build-msvc - no_get: true - params: - state: pending - sha: ((.:git_sha)) - - - task: build - file: repo/.ci/tasks/build.yml - vars: - pr: "" - target: msvc - registry_user: ((registry_user)) - registry_password: ((registry_password)) - - - load_var: version_number - reveal: true - file: artifact/version - - - put: package - resource: gitea-package - no_get: true - inputs: - - artifact - params: - version: ((.:version_number)) - fail_fast: true - override: true - globs: - - artifact/*.exe - - artifact/*.exe.sha256 - - - put: package - resource: gitea-package - no_get: true - inputs: - - artifact - params: - version: master - fail_fast: true - override: true - globs: - - artifact/*.exe - - artifact/*.exe.sha256 - -- name: build-linux - on_success: - put: state-success - resource: status-build-linux - no_get: true - params: - state: success - sha: ((.:git_sha)) - - on_failure: - put: state-failure - resource: status-build-linux - no_get: true - params: - state: failure - sha: ((.:git_sha)) - - plan: - - get: repo - trigger: true - - - load_var: git_sha - file: repo/.git/ref - - - put: state-pending - resource: status-build-linux - no_get: true - params: - state: pending - sha: ((.:git_sha)) - - - task: build - file: repo/.ci/tasks/build.yml - vars: - pr: "" - target: linux - gitea_url: https://git.sclu1034.dev - gitea_api_key: ((gitea_api_key)) - registry_user: ((registry_user)) - registry_password: ((registry_password)) - - - load_var: version_number - reveal: true - file: artifact/version - - - put: package - resource: gitea-package - no_get: true - inputs: - - artifact - params: - version: ((.:version_number)) - fail_fast: true - override: true - globs: - - artifact/dtmt - - artifact/dtmm - - artifact/dtmm.sha256 - - artifact/dtmt.sha256 - - - put: package - resource: gitea-package - no_get: true - inputs: - - artifact - params: - version: master - fail_fast: true - override: true - globs: - - artifact/dtmt - - artifact/dtmm - - artifact/dtmm.sha256 - - artifact/dtmt.sha256 diff --git a/.ci/pipelines/check.yml b/.ci/pipelines/check.yml deleted file mode 100644 index c8d5d47..0000000 --- a/.ci/pipelines/check.yml +++ /dev/null @@ -1,32 +0,0 @@ ---- - -# The actual CI pipeline that is run per branch - -resources: -- name: repo - type: git - source: - uri: https://git.sclu1034.dev/bitsquid_dt/dtmt - branch: ((branch)) - -jobs: -- name: build-msvc - plan: - - get: repo - trigger: true - - task: build - file: repo/.ci/tasks/build.yml - vars: - target: msvc - registry_user: ((registry_user)) - registry_password: ((registry_password)) -- name: build-linux - plan: - - get: repo - trigger: true - - task: build - file: repo/.ci/tasks/build.yml - vars: - target: linux - registry_user: ((registry_user)) - registry_password: ((registry_password)) diff --git a/.ci/pipelines/pr.yml b/.ci/pipelines/pr.yml deleted file mode 100644 index 5c8d7cd..0000000 --- a/.ci/pipelines/pr.yml +++ /dev/null @@ -1,227 +0,0 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/cappyzawa/concourse-pipeline-jsonschema/master/concourse_jsonschema.json#/definitions/Config ---- - -# The actual CI pipeline that is run per branch -resource_types: -- name: gitea-package - type: registry-image - source: - repository: registry.sclu1034.dev/gitea-package - username: ((registry_user)) - password: ((registry_password)) - -- name: gitea-status - type: registry-image - source: - repository: registry.sclu1034.dev/gitea-status - username: ((registry_user)) - password: ((registry_password)) - - -resources: -- name: repo - type: git - source: - uri: https://git.sclu1034.dev/bitsquid_dt/dtmt - branch: ((pr.head.ref)) - -- name: gitea-package - type: gitea-package - source: - access_token: ((gitea_api_key)) - url: https://git.sclu1034.dev - owner: bitsquid_dt - type: generic - name: dtmt - -- name: pr-status-lint-clippy - type: gitea-status - source: - access_token: ((gitea_api_key)) - url: https://git.sclu1034.dev - owner: bitsquid_dt - repo: dtmt - context: lint/clippy - description: Checking for common mistakes and opportunities for code improvement - -- name: pr-status-build-msvc - type: gitea-status - source: - access_token: ((gitea_api_key)) - url: https://git.sclu1034.dev - owner: bitsquid_dt - repo: dtmt - context: build/msvc - description: "Build for the target platform: msvc" - -- name: pr-status-build-linux - type: gitea-status - source: - access_token: ((gitea_api_key)) - url: https://git.sclu1034.dev - owner: bitsquid_dt - repo: dtmt - context: build/linux - description: "Build for the target platform: linux" - - -jobs: -- name: clippy - on_success: - put: state-success - resource: pr-status-lint-clippy - no_get: true - params: - state: success - sha: ((.:git_sha)) - - on_failure: - put: state-failure - resource: pr-status-lint-clippy - no_get: true - params: - state: failure - sha: ((.:git_sha)) - - plan: - - get: repo - trigger: true - - - load_var: git_sha - file: repo/.git/ref - - - put: state-pending - resource: pr-status-lint-clippy - no_get: true - params: - state: pending - sha: ((.:git_sha)) - - - task: check - file: repo/.ci/tasks/clippy.yml - vars: - gitea_api_key: ((gitea_api_key)) - registry_user: ((registry_user)) - registry_password: ((registry_password)) - - -- name: build-msvc - on_success: - put: state-success - resource: pr-status-build-msvc - no_get: true - params: - state: success - sha: ((.:git_sha)) - - on_failure: - put: state-failure - resource: pr-status-build-msvc - no_get: true - params: - state: failure - sha: ((.:git_sha)) - - plan: - - get: repo - trigger: true - - - load_var: git_sha - file: repo/.git/ref - - - put: state-pending - resource: pr-status-build-msvc - no_get: true - params: - state: pending - sha: ((.:git_sha)) - - - task: build - file: repo/.ci/tasks/build.yml - vars: - target: msvc - pr: ((pr)) - gitea_url: https://git.sclu1034.dev - gitea_api_key: ((gitea_api_key)) - registry_user: ((registry_user)) - registry_password: ((registry_password)) - - - load_var: version_number - reveal: true - file: artifact/version - - - put: package - resource: gitea-package - no_get: true - inputs: - - artifact - params: - version: ((.:version_number)) - fail_fast: true - override: true - globs: - - artifact/dtmt - - artifact/dtmm - - artifact/*.exe - - artifact/*.sha256 - -- name: build-linux - on_success: - put: state-success - resource: pr-status-build-linux - no_get: true - params: - state: success - sha: ((.:git_sha)) - - on_failure: - put: state-failure - resource: pr-status-build-linux - no_get: true - params: - state: failure - sha: ((.:git_sha)) - - plan: - - get: repo - trigger: true - - - load_var: git_sha - file: repo/.git/ref - - - put: state-pending - resource: pr-status-build-linux - no_get: true - params: - state: pending - sha: ((.:git_sha)) - - - task: build - file: repo/.ci/tasks/build.yml - vars: - target: linux - pr: ((pr)) - gitea_url: https://git.sclu1034.dev - gitea_api_key: ((gitea_api_key)) - registry_user: ((registry_user)) - registry_password: ((registry_password)) - - - load_var: version_number - reveal: true - file: artifact/version - - - put: package - resource: gitea-package - no_get: true - inputs: - - artifact - params: - version: ((.:version_number)) - fail_fast: true - override: true - globs: - - artifact/dtmt - - artifact/dtmm - - artifact/*.exe - - artifact/*.sha256 - diff --git a/.ci/tasks/build.sh b/.ci/tasks/build.sh deleted file mode 100755 index b362266..0000000 --- a/.ci/tasks/build.sh +++ /dev/null @@ -1,63 +0,0 @@ -#!/bin/bash - -set -eu - -if [ -n "$OUTPUT" ]; then - OUTPUT="$PWD/$OUTPUT" -else - OUTPUT=$(mktemp -d) -fi - -title() { - printf "\033[1m%s\033[0m\n" "$1" -} - -install_artifact() { - install -v -t "$OUTPUT/" "$1" - sha256sum "$1" | cut -d' ' -f1 > "$OUTPUT/$(basename "$1").sha256" -} - -cd "repo" - -PR=${PR:-} -ref=$(cat .git/ref || echo "HEAD") - -if [ -n "$PR" ]; then - title "PR: $(echo "$PR" | jq '.number') - $(echo "$PR" | jq '.title')" - ref="pr-$(echo "$PR" | jq '.number')-$(git rev-parse --short "$ref" 2>/dev/null || echo 'manual')" -elif [ -f ".git/branch" ]; then - ref=$(cat .git/branch)-$(git rev-parse --short "$ref") -else - ref=$(git rev-parse --short "$ref") -fi - -title "Version: '$ref'" -echo "$ref" > "$OUTPUT/version" - -case "$TARGET" in - msvc) - cp /src/*.lib ./lib/oodle/ - - title "Building project for target $TARGET" - cargo build --color always --locked --release --target x86_64-pc-windows-msvc -Zbuild-std - - title "Install artifacts" - install_artifact target/x86_64-pc-windows-msvc/release/dtmt.exe - install_artifact target/x86_64-pc-windows-msvc/release/dtmm.exe - ;; - linux) - cp /src/*.a ./lib/oodle/ - - title "Building project for target $TARGET" - cargo build --color always --locked --profile release-lto - - title "Installing artifacts" - install_artifact target/release-lto/dtmt - install_artifact target/release-lto/dtmm - ;; - *) - echo -e "\033[31;1mEnv var 'TARGET' must either be 'msvc' or 'linux'. Got '$TARGET'.\033[0m" >&2 - exit 1 -esac - -title "Done" diff --git a/.ci/tasks/build.yml b/.ci/tasks/build.yml deleted file mode 100644 index 9b9c094..0000000 --- a/.ci/tasks/build.yml +++ /dev/null @@ -1,31 +0,0 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/cappyzawa/concourse-pipeline-jsonschema/master/concourse_jsonschema.json#/definitions/TaskConfig ---- -platform: linux - -image_resource: - name: ctmt-bi-base-((target)) - type: registry-image - source: - repository: registry.sclu1034.dev/dtmt-ci-base-((target)) - username: ((registry_user)) - password: ((registry_password)) - tag: latest - -inputs: -- name: repo - -outputs: -- name: artifact - -caches: - - path: repo/target - - path: /usr/local/cargo/registry - -params: - CI: "true" - TARGET: ((target)) - PR: ((pr)) - OUTPUT: artifact - -run: - path: repo/.ci/tasks/build.sh diff --git a/.ci/tasks/clippy.sh b/.ci/tasks/clippy.sh deleted file mode 100755 index 7c27b5f..0000000 --- a/.ci/tasks/clippy.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh - -set -eu - -title() { - printf "\033[1m%s\033[0m\n" "$1" -} - -title "Install clippy" -rustup component add clippy - -title "Run clippy" -cargo clippy --color always --no-deps -- -D warnings - -title "Done" diff --git a/.ci/tasks/clippy.yml b/.ci/tasks/clippy.yml deleted file mode 100644 index 806dfd4..0000000 --- a/.ci/tasks/clippy.yml +++ /dev/null @@ -1,28 +0,0 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/cappyzawa/concourse-pipeline-jsonschema/master/concourse_jsonschema.json#/definitions/TaskConfig ---- -platform: linux - -image_resource: - name: dtmt-ci-base-linux - type: registry-image - source: - repository: registry.sclu1034.dev/dtmt-ci-base-linux - username: ((registry_user)) - password: ((registry_password)) - tag: latest - -inputs: -- name: repo - -caches: - - path: repo/target - - path: /usr/local/cargo/registry - -params: - CI: "true" - GITEA_API_KEY: ((gitea_api_key)) - -run: - path: .ci/tasks/clippy.sh - dir: repo - diff --git a/.ci/util/run.sh b/.ci/util/run.sh deleted file mode 100755 index 60c8112..0000000 --- a/.ci/util/run.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/bin/sh - -set -ux - -script="$1" -context="$2" -desc="$3" - -if [ -z "$script" ]; then - echo "No script to run" >&2 - exit 1 -fi - -if [ -z "$context" ]; then - echo "Missing 'context' for CI status report" >&2 - exit 1 -fi - -if [ -z "$REF" ]; then - echo "Environment variable 'REF' must be set to a valid Git ref." >&2 - exit 1 -fi - -if [ -z "$GITEA_API_KEY" ]; then - echo "Environment variable 'GITEA_API_KEY' must be set." >&2 - exit 1 -fi - -notify() { - curl -X 'POST' \ - -H 'Content-Type: application/json' \ - -H 'Accept: application/json' \ - -H "Authorization: token $GITEA_API_KEY" \ - "https://git.sclu1034.dev/api/v1/repos/bitsquid_dt/dtmt/statuses/$REF" \ - --data @- < 'pr-{{pr}}.yaml' - fly -t main set-pipeline \ - --pipeline dtmt-pr \ - --config .ci/pipelines/pr.yml \ - -v gitea_api_key=${GITEA_API_KEY} \ - -i number={{pr}} \ - -y branch="$(yq -y '.head.ref' 'pr-{{pr}}.yaml')" \ - -y pr="$(cat 'pr-{{pr}}.yaml')" - diff --git a/README.adoc b/README.adoc index 34e0ef0..bc322fd 100644 --- a/README.adoc +++ b/README.adoc @@ -10,18 +10,23 @@ :tip-caption: :bulb: :warning-caption: :warning: -A set of tools to use and develop mods for the newest generation of the Bitsquid game engine that powers the game _Warhammer 40.000: Darktide_. +A set of tools to develop mods for the newest generation of the Bitsquid game engine that powers the game _Warhammer 40.000: Darktide_. -== Darktide Mod Manager (DTMM) +== Quickstart -DTMM is a GUI application to install and manage mods for the game. +1. Download the latest https://git.sclu1034.dev/bitsquid_dt/dtmt/releases/[release] for your platform. +2. Place the binary for your system and `dictionary.csv` next to each other. +3. Open a command prompt, navigate to the downloaded binary and run `dtmt.exe help`. +4. Use the `help` command (it works for subcommands, too) and the https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki/CLI-Reference[CLI Reference]. -image::docs/screenshots/dtmm.png[dtmm main view] +== Runtime dependencies -Head to https://git.sclu1034.dev/bitsquid_dt/dtmt/src/branch/master/crates/dtmm[crates/dtmm] for more information or check the https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki[Wiki]. +The LuaJit decompiler (short "ljd") is used to decompile Lua files. A version tailored specifically to Bitsquid may be found here: https://github.com/Aussiemon/ljd. -== Darktide Mod Tools (DTMT) +A custom executable location may be passed via the `--ljd` flag during extraction, otherwise decompilation expects `ljd` to be found via the `PATH` environmental variable. -DTMT is a CLI application providing various commands that aid in developing mods for the game. +== Building -Head to https://git.sclu1034.dev/bitsquid_dt/dtmt/src/branch/master/crates/dtmt[crates/dtmt] for more information or check the https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki[Wiki]. +1. Install Rust from https://www.rust-lang.org/learn/get-started[rust-lang.org] or via the preferred means for your system. +2. Download or clone this source code. Make sure to include the submodules in `lib/`. +3. Run `cargo build`. diff --git a/crates/dtmm/Cargo.toml b/crates/dtmm/Cargo.toml index 52c0522..64361c4 100644 --- a/crates/dtmm/Cargo.toml +++ b/crates/dtmm/Cargo.toml @@ -2,48 +2,18 @@ name = "dtmm" version = "0.1.0" edition = "2021" -authors = ["Lucas Schwiderski "] -description = "DTMM is a GUI application to install and manage mods for the game." -documentation = "https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki" -repository = "https://git.sclu1034.dev/bitsquid_dt/dtmt" -homepage = "https://git.sclu1034.dev/bitsquid_dt/dtmt" -license-file = "LICENSE" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -ansi-parser = { workspace = true } -async-recursion = { workspace = true } -bincode = { workspace = true } -bitflags = { workspace = true } -clap = { workspace = true } -color-eyre = { workspace = true } -colors-transform = { workspace = true } -confy = { workspace = true } -druid = { workspace = true } -druid-widget-nursery = { workspace = true } -dtmt-shared = { workspace = true } -futures = { workspace = true } -interprocess = { workspace = true } -lazy_static = { workspace = true } -luajit2-sys = { workspace = true } -minijinja = { workspace = true } -nexusmods = { workspace = true } -oodle = { workspace = true } -open = { workspace = true } -path-slash = { workspace = true } -sdk = { workspace = true } -serde = { workspace = true } -serde_sjson = { workspace = true } -strip-ansi-escapes = { workspace = true } -time = { workspace = true } -tokio = { workspace = true } -tokio-stream = { workspace = true } -tracing = { workspace = true } -tracing-error = { workspace = true } -tracing-subscriber = { workspace = true } -usvg = { workspace = true } -zip = { workspace = true } - -[build-dependencies] -winres = "0.1.12" +clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "unicode"] } +color-eyre = "0.6.2" +confy = "0.5.1" +druid = { git = "https://github.com/linebender/druid.git", features = ["im"] } +sdk = { path = "../../lib/sdk", version = "0.2.0" } +serde = "1.0.152" +tokio = "1.23.0" +toml = "0.5.10" +tracing = "0.1.37" +tracing-error = "0.2.0" +tracing-subscriber = { version = "0.3.16", features = ["env-filter"] } diff --git a/crates/dtmm/README.adoc b/crates/dtmm/README.adoc deleted file mode 100644 index 45130f1..0000000 --- a/crates/dtmm/README.adoc +++ /dev/null @@ -1,16 +0,0 @@ -= Darktide Mod Manager (DTMM) -:idprefix: -:idseparator: -:toc: macro -:toclevels: 1 -:!toc-title: -:caution-caption: :fire: -:important-caption: :exclamtion: -:note-caption: :paperclip: -:tip-caption: :bulb: -:warning-caption: :warning: - -DTMM is a GUI application to install and manage mods for the game. - -![dtmm main view](../../docs/screenshots/dtmm.png) - diff --git a/crates/dtmm/assets/DTMM_logo.xcf b/crates/dtmm/assets/DTMM_logo.xcf deleted file mode 100644 index 00de67d..0000000 --- a/crates/dtmm/assets/DTMM_logo.xcf +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:144903129d56235895e433435ecee90528ceb5c4db98f5b02e637a215dde1881 -size 17736337 diff --git a/crates/dtmm/assets/DTMM_logo_256.png b/crates/dtmm/assets/DTMM_logo_256.png deleted file mode 100644 index e53931f..0000000 --- a/crates/dtmm/assets/DTMM_logo_256.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:62096b0f6e3798820c9ad04ba61fdff45522b7c67c4b254dc8fd11bdde984c76 -size 39017 diff --git a/crates/dtmm/assets/DTMM_logo_48.png b/crates/dtmm/assets/DTMM_logo_48.png deleted file mode 100644 index 33c1d11..0000000 --- a/crates/dtmm/assets/DTMM_logo_48.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6b0524e05b8f2c6ca061aa7edc6f7e62efb8bcddf347e4e9344187efb437436c -size 3082 diff --git a/crates/dtmm/assets/DTMM_logo_64.png b/crates/dtmm/assets/DTMM_logo_64.png deleted file mode 100644 index e5d5407..0000000 --- a/crates/dtmm/assets/DTMM_logo_64.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e0bf9431d5f46f4437c21fe38ebbd86e8b3872acaf3a13f0bc8f4a9e8e78e118 -size 4287 diff --git a/crates/dtmm/assets/DTMM_logo_border.png b/crates/dtmm/assets/DTMM_logo_border.png deleted file mode 100644 index bf610e4..0000000 --- a/crates/dtmm/assets/DTMM_logo_border.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:cefec60ffe91eb4d827e1c7c9b4bfebdec528236809e02ccc9f15b15ee290442 -size 537707 diff --git a/crates/dtmm/assets/DTMM_logo_faint_glow.png b/crates/dtmm/assets/DTMM_logo_faint_glow.png deleted file mode 100644 index 1066370..0000000 --- a/crates/dtmm/assets/DTMM_logo_faint_glow.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d579be0297e78ef0c9cfb4ce357dd61ed13cc65084d5a38c322913cdcdbe5b99 -size 605023 diff --git a/crates/dtmm/assets/DTMM_logo_small.png b/crates/dtmm/assets/DTMM_logo_small.png deleted file mode 100644 index 0020520..0000000 --- a/crates/dtmm/assets/DTMM_logo_small.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:be2a3cb6a94828b3df9368bcf9ea335ad08590a69b128a15a92fb1cf751d06b6 -size 502425 diff --git a/crates/dtmm/assets/dtmm.desktop b/crates/dtmm/assets/dtmm.desktop deleted file mode 100644 index cb9185c..0000000 --- a/crates/dtmm/assets/dtmm.desktop +++ /dev/null @@ -1,11 +0,0 @@ -[Desktop Entry] -Name=DTMM -GenericName=Mod Manager -Comment=A graphical mod manager for Warhammer 40,000: Darktide -Exec=dtmm %u -Type=Application -Keywords=Mod; -StartupNotify=true -Categories=Utility; -MimeType=x-scheme-handler/nxm; -Icon=dtmm diff --git a/crates/dtmm/assets/dtmm.ico b/crates/dtmm/assets/dtmm.ico deleted file mode 100644 index b862839..0000000 --- a/crates/dtmm/assets/dtmm.ico +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:02d4bea2d7af89a86c3f052240a2acd137a0df5e9b6e85ecfe3f163032010652 -size 37519 diff --git a/crates/dtmm/assets/init.lua b/crates/dtmm/assets/init.lua deleted file mode 100644 index e2acdbd..0000000 --- a/crates/dtmm/assets/init.lua +++ /dev/null @@ -1,70 +0,0 @@ -local StateGame = require("scripts/game_states/state_game") -local StateSplash = require("scripts/game_states/game/state_splash") -local GameStateMachine = require("scripts/foundation/utilities/game_state_machine") - -local function hook(obj, fn_name, cb) - local orig = obj[fn_name] - - obj[fn_name] = function(...) - return cb(orig, ...) - end -end - -function init(mod_data, boot_gui) - local ModLoader = require("scripts/mods/mod_loader") - local mod_loader = ModLoader:new(mod_data, boot_gui) - - -- The mod loader needs to remain active during game play, to - -- enable reloads - hook(StateGame, "update", function(func, dt, ...) - mod_loader:update(dt) - return func(dt, ...) - end) - - -- Skip splash view - hook(StateSplash, "on_enter", function(func, self, ...) - local result = func(self, ...) - - self._should_skip = true - self._continue = true - - return result - end) - - -- Trigger state change events - hook(GameStateMachine, "_change_state", function(func, self, ...) - local old_state = self._state - local old_state_name = old_state and self:current_state_name() - - if old_state_name then - mod_loader:on_game_state_changed("exit", old_state_name, old_state) - end - - local result = func(self, ...) - - local new_state = self._state - local new_state_name = new_state and self:current_state_name() - - if new_state_name then - mod_loader:on_game_state_changed("enter", new_state_name, new_state) - end - - return result - end) - - -- Trigger ending state change event - hook(GameStateMachine, "destroy", function(func, self, ...) - local old_state = self._state - local old_state_name = old_state and self:current_state_name() - - if old_state_name then - mod_loader:on_game_state_changed("exit", old_state_name) - end - - return func(self, ...) - end) - - return mod_loader -end - -return init diff --git a/crates/dtmm/assets/mod_data.lua.j2 b/crates/dtmm/assets/mod_data.lua.j2 deleted file mode 100644 index b5e7f17..0000000 --- a/crates/dtmm/assets/mod_data.lua.j2 +++ /dev/null @@ -1,28 +0,0 @@ -return { -{% for mod in mods %} -{ - id = "{{ mod.id }}", - name = "{{ mod.name }}", - bundled = {{ mod.bundled }}, - version = {{ mod.version }}, - packages = { - {% for pkg in mod.packages %} - "{{ pkg }}", - {% endfor %} - }, - run = function() - {% if mod.data is none %} - return dofile("{{ mod.init }}") - {% else %} - new_mod("{{ mod.id }}", { - mod_script = "{{ mod.init }}", - mod_data = "{{ mod.data }}", - {% if not mod.localization is none %} - mod_localization = "{{ mod.localization }}", - {% endif %} - }) - {% endif %} - end, -}, -{% endfor %} -} diff --git a/crates/dtmm/assets/mod_loader.lua b/crates/dtmm/assets/mod_loader.lua deleted file mode 100644 index 126c0eb..0000000 --- a/crates/dtmm/assets/mod_loader.lua +++ /dev/null @@ -1,412 +0,0 @@ --- Copyright on this file is owned by Fatshark. --- It is extracted, used and modified with permission only for --- the purpose of loading mods within Warhammer 40,000: Darktide. -local ModLoader = class("ModLoader") - -local table_unpack = table.unpack or unpack -local table_pack = table.pack or pack - -local ScriptGui = require("scripts/foundation/utilities/script_gui") - -local FONT_MATERIAL = "content/ui/fonts/arial" - -local LOG_LEVELS = { - spew = 4, - info = 3, - warning = 2, - error = 1 -} -local DEFAULT_SETTINGS = { - log_level = LOG_LEVELS.error, - developer_mode = false -} - -local Keyboard = Keyboard -local BUTTON_INDEX_R = Keyboard.button_index("r") -local BUTTON_INDEX_LEFT_SHIFT = Keyboard.button_index("left shift") -local BUTTON_INDEX_LEFT_CTRL = Keyboard.button_index("left ctrl") - -ModLoader.init = function(self, mod_data, boot_gui) - table.dump(mod_data, nil, 5, function(...) Log.info("ModLoader", ...) end) - - self._mod_data = mod_data - self._gui = boot_gui - - self._settings = Application.user_setting("mod_settings") or DEFAULT_SETTINGS - - self._mods = {} - self._num_mods = nil - self._chat_print_buffer = {} - self._reload_data = {} - self._ui_time = 0 - - self._state = "scanning" -end - -ModLoader.developer_mode_enabled = function(self) - return self._settings.developer_mode -end - -ModLoader.set_developer_mode = function(self, enabled) - self._settings.developer_mode = enabled -end - -ModLoader._draw_state_to_gui = function(self, gui, dt) - local state = self._state - local t = self._ui_time + dt - self._ui_time = t - local status_str = "Loading mods" - - if state == "scanning" then - status_str = "Scanning for mods" - elseif state == "loading" or state == "initializing" then - local mod = self._mods[self._mod_load_index] - status_str = string.format("Loading mod %q", mod.name) - end - - local msg = status_str .. string.rep(".", (2 * t) % 4) - ScriptGui.text(gui, msg, FONT_MATERIAL, 25, Vector3(20, 30, 1), Color.white()) -end - -ModLoader.remove_gui = function(self) - self._gui = nil -end - -ModLoader.mod_data = function(self, id) - -- Since this primarily exists for DMF, - -- we can optimize the search for its use case of looking for the - -- mod currently being loaded - local mod_data = self._mods[self._mod_load_index] - - if mod_data.id ~= id then - mod_data = nil - - for _, v in ipairs(self._mods) do - if v.id == id then - mod_data = v - end - end - end - - return mod_data -end - -ModLoader._check_reload = function() - return Keyboard.pressed(BUTTON_INDEX_R) and - Keyboard.button(BUTTON_INDEX_LEFT_SHIFT) + - Keyboard.button(BUTTON_INDEX_LEFT_CTRL) == 2 -end - -ModLoader.update = function(self, dt) - local chat_print_buffer = self._chat_print_buffer - local num_delayed_prints = #chat_print_buffer - - if num_delayed_prints > 0 and Managers.chat then - for i = 1, num_delayed_prints, 1 do - -- TODO: Use new chat system - -- Managers.chat:add_local_system_message(1, chat_print_buffer[i], true) - - chat_print_buffer[i] = nil - end - end - - local old_state = self._state - - if self._settings.developer_mode and self:_check_reload() then - self._reload_requested = true - end - - if self._reload_requested and old_state == "done" then - self:_reload_mods() - end - - if old_state == "done" then - self:_run_callbacks("update", dt) - elseif old_state == "scanning" then - Log.info("ModLoader", "Scanning for mods") - self:_build_mod_table() - - self._state = self:_load_mod(1) - self._ui_time = 0 - elseif old_state == "loading" then - local handle = self._loading_resource_handle - - if ResourcePackage.has_loaded(handle) then - ResourcePackage.flush(handle) - - local mod = self._mods[self._mod_load_index] - local next_index = mod.package_index + 1 - local mod_data = mod.data - - if next_index <= #mod_data.packages then - self:_load_package(mod, next_index) - else - self._state = "initializing" - end - end - elseif old_state == "initializing" then - local mod = self._mods[self._mod_load_index] - local mod_data = mod.data - - Log.info("ModLoader", "Initializing mod %q", mod.name) - - mod.state = "running" - local ok, object = xpcall(mod_data.run, function(err) - if type(err) == "string" then - return err .. "\n" .. Script.callstack() - else - return err - end - end) - - if not ok then - if object.error then - object = string.format( - "%s\n<>\n%s\n<>\n<>\n%s\n<>\n<>\n%s\n<>", - object.error, object.traceback, object.locals, object.self) - end - - Log.error("ModLoader", "Failed 'run' for %q: %s", mod.name, object) - end - - mod.object = object or {} - - self:_run_callback(mod, "init", self._reload_data[mod.id]) - - Log.info("ModLoader", "Finished loading %q", mod.name) - - self._state = self:_load_mod(self._mod_load_index + 1) - end - - local gui = self._gui - if gui then - self:_draw_state_to_gui(gui, dt) - end - - if old_state ~= self._state then - Log.info("ModLoader", "%s -> %s", old_state, self._state) - end -end - -ModLoader.all_mods_loaded = function(self) - return self._state == "done" -end - -ModLoader.destroy = function(self) - self:_run_callbacks("on_destroy") - self:unload_all_mods() -end - -ModLoader._run_callbacks = function(self, callback_name, ...) - for i = 1, self._num_mods, 1 do - local mod = self._mods[i] - - if mod and not mod.callbacks_disabled then - self:_run_callback(mod, callback_name, ...) - end - end -end - -ModLoader._run_callback = function(self, mod, callback_name, ...) - local object = mod.object - local cb = object[callback_name] - - if not cb then - return - end - - local args = table_pack(...) - - local success, val = xpcall( - function() return cb(object, table_unpack(args)) end, - function(err) - if type(err) == "string" then - return err .. "\n" .. Script.callstack() - else - return err - end - end - ) - - if success then - return val - else - Log.error("ModLoader", "Failed to run callback %q for mod %q with id %q. Disabling callbacks until reload.", - callback_name, mod.name, mod.id) - if val.error then - Log.error("ModLoader", - "Error: %s\n<>\n%s<>\n<>\n%s<>\n<>\n%s<>", - val.error, val.traceback, val.locals, val.self) - else - Log.error("ModLoader", "Error: %s", val or "[unknown error]") - end - - mod.callbacks_disabled = true - end -end - -ModLoader._start_scan = function(self) - Log.info("ModLoader", "Starting mod scan") - self._state = "scanning" -end - -ModLoader._build_mod_table = function(self) - fassert(table.is_empty(self._mods), "Trying to add mods to non-empty mod table") - - for i, mod_data in ipairs(self._mod_data) do - Log.info( - "ModLoader", - "mods[%d] = id=%q | name=%q | version=%q | bundled=%s", - i, - mod_data.id, - mod_data.name, - mod_data.version, - tostring(mod_data.bundled) - ) - - self._mods[i] = { - id = mod_data.id, - state = "not_loaded", - callbacks_disabled = false, - name = mod_data.name, - loaded_packages = {}, - packages = mod_data.packages, - data = mod_data, - bundled = mod_data.bundled or false, - } - end - - self._num_mods = #self._mods - - Log.info("ModLoader", "Found %i mods", self._num_mods) -end - -ModLoader._load_mod = function(self, index) - self._ui_time = 0 - local mods = self._mods - local mod = mods[index] - - if not mod then - table.clear(self._reload_data) - - return "done" - end - - Log.info("ModLoader", "Loading mod %q", mod.id) - - mod.state = "loading" - - Crashify.print_property(string.format("Mod:%s", mod.name), true) - - self._mod_load_index = index - - if mod.bundled and mod.packages[1] then - self:_load_package(mod, 1) - return "loading" - else - return "initializing" - end -end - -ModLoader._load_package = function(self, mod, index) - mod.package_index = index - local package_name = mod.packages[index] - - if not package_name then - return - end - - Log.info("ModLoader", "Loading package %q", package_name) - - local resource_handle = Application.resource_package(package_name) - self._loading_resource_handle = resource_handle - - ResourcePackage.load(resource_handle) - - table.insert(mod.loaded_packages, resource_handle) -end - -ModLoader.unload_all_mods = function(self) - if self._state ~= "done" then - Log.error("ModLoader", "Mods can't be unloaded, mod state is not \"done\". current: %q", self._state) - - return - end - - Log.info("ModLoader", "Unload all mod packages") - - for i = self._num_mods, 1, -1 do - local mod = self._mods[i] - - if mod then - self:unload_mod(i) - end - - self._mods[i] = nil - end - - self._num_mods = nil - self._state = "unloaded" -end - -ModLoader.unload_mod = function(self, index) - local mod = self._mods[index] - - if mod then - Log.info("ModLoader", "Unloading %q.", mod.name) - - for _, handle in ipairs(mod.loaded_packages) do - ResourcePackage.unload(handle) - Application.release_resource_package(handle) - end - - mod.state = "not_loaded" - else - Log.error("ModLoader", "Mod index %i can't be unloaded, has not been loaded", index) - end -end - -ModLoader._reload_mods = function(self) - Log.info("ModLoader", "reloading mods") - - for i = 1, self._num_mods, 1 do - local mod = self._mods[i] - - if mod and mod.state == "running" then - Log.info("ModLoader", "reloading %s", mod.name) - - self._reload_data[mod.id] = self:_run_callback(mod, "on_reload") - else - Log.info("ModLoader", "not reloading mod, state: %s", mod.state) - end - end - - self:unload_all_mods() - self:_start_scan() - - self._reload_requested = false -end - -ModLoader.on_game_state_changed = function(self, status, state_name, state_object) - if self._state == "done" then - self:_run_callbacks("on_game_state_changed", status, state_name, state_object) - else - Log.warning("ModLoader", "Ignored on_game_state_changed call due to being in state %q", self._state) - end -end - -ModLoader.print = function(self, level, str, ...) - local f = Log[level] - if f then - f("ModLoader", str, ...) - else - local message = string.format("[ModLoader][" .. level .. "] " .. str, ...) - local log_level = LOG_LEVELS[level] or 99 - - if log_level <= 2 then - print(message) - end - end -end - -return ModLoader diff --git a/crates/dtmm/assets/mod_main.lua.j2 b/crates/dtmm/assets/mod_main.lua.j2 deleted file mode 100644 index 29caa79..0000000 --- a/crates/dtmm/assets/mod_main.lua.j2 +++ /dev/null @@ -1,216 +0,0 @@ -local _G = _G -local rawget = rawget -local rawset = rawset - -local log = function(category, format, ...) - local Log = rawget(_G, "Log") - if Log then - Log.info(category, format, ...) - else - print(string.format("[%s] %s", category or "", string.format(format or "", ...))) - end -end - -log("mod_main", "Initializing mods...") -log("mod_main", "[DTMM] Deployment data:\n{{ deployment_info }}") - -local require_store = {} - --- This token is treated as a string template and filled by DTMM during deployment. --- This allows hiding unsafe I/O functions behind a setting. --- When not replaced, it's also a valid table definition, thereby degrading gracefully. -local is_io_enabled = {{ is_io_enabled }} -- luacheck: ignore 113 -local lua_libs = { - debug = debug, - os = { - date = os.date, - time = os.time, - clock = os.clock, - getenv = os.getenv, - difftime = os.difftime, - }, - load = load, - loadfile = loadfile, - loadstring = loadstring, -} - -if is_io_enabled then - lua_libs.io = io - lua_libs.os = os - lua_libs.ffi = require("ffi") -end - -Mods = { - -- Keep a backup of certain system libraries before - -- Fatshark's code scrubs them. - -- The loader can then decide to pass them on to mods, or ignore them - lua = setmetatable({}, { __index = lua_libs }), - require_store = require_store, - original_require = require, -} - -local can_insert = function(filepath, new_result) - local store = require_store[filepath] - if not store or #store then - return true - end - - if store[#store] ~= new_result then - return true - end -end - -local original_require = require -require = function(filepath, ...) - local result = original_require(filepath, ...) - if result and type(result) == "table" then - if can_insert(filepath, result) then - require_store[filepath] = require_store[filepath] or {} - local store = require_store[filepath] - - table.insert(store, result) - - if Mods.hook then - Mods.hook.enable_by_file(filepath, #store) - end - end - end - - return result -end - -require("scripts/boot_init") -require("scripts/foundation/utilities/class") - --- The `__index` metamethod maps a proper identifier `CLASS.MyClassName` to the --- stringified version of the key: `"MyClassName"`. --- This allows using LuaCheck for the stringified class names in hook parameters. -_G.CLASS = setmetatable({}, { - __index = function(_, key) - return key - end -}) - -local original_class = class -class = function(class_name, super_name, ...) - local result = original_class(class_name, super_name, ...) - if not rawget(_G, class_name) then - rawset(_G, class_name, result) - end - if not rawget(_G.CLASS, class_name) then - rawset(_G.CLASS, class_name, result) - end - return result -end - -require("scripts/main") -log("mod_main", "'scripts/main' loaded") - --- We need to inject two states into two different state machines: --- First, we inject one into the `"Main"` state machine at a specific location, so that we're --- still early in the process, but right after `StateRequireScripts` where most game files --- are already available to `require` and hook. --- This is where the `ModLoader` is created initially. --- Then, we inject into the very first position of the `"Game"` state machine. This runs right --- after `StateGame._init_managers`, at which point all the parts needed for DMF and other mods --- have been initialized. --- This is where `ModLoader` will finally start loading mods. -local function patch_mod_loading_state() - local StateBootLoadDML = class("StateBootLoadDML", "StateBootSubStateBase") - local StateGameLoadMods = class("StateGameLoadMods") - - StateBootLoadDML.on_enter = function(self, parent, params) - log("StateBootLoadDML", "Entered") - StateBootLoadDML.super.on_enter(self, parent, params) - - local state_params = self:_state_params() - local package_manager = state_params.package_manager - - self._package_manager = package_manager - self._package_handles = { - ["packages/mods"] = package_manager:load("packages/mods", "StateBootLoadDML", nil), - } - end - - StateBootLoadDML._state_update = function(self, _) - local package_manager = self._package_manager - - if package_manager:update() then - local mod_data = require("scripts/mods/mod_data") - - local create_mod_loader = require("scripts/mods/init") - local mod_loader = create_mod_loader(mod_data) - - Managers.mod = mod_loader - - log("StateBootLoadDML", "DML loaded, exiting") - return true, false - end - - return false, false - end - - - function StateGameLoadMods:on_enter(_, params) - log("StateGameLoadMods", "Entered") - self._next_state = require("scripts/game_states/game/state_splash") - self._next_state_params = params - end - - function StateGameLoadMods:update(_) - -- We're relying on the fact that DML internally makes sure - -- that `Managers.mod:update()` is being called appropriately. - -- The implementation as of this writing is to hook `StateGame.update`. - if Managers.mod:all_mods_loaded() then - Log.info("StateGameLoadMods", "Mods loaded, exiting") - return self._next_state, self._next_state_params - end - end - - local GameStateMachine = require("scripts/foundation/utilities/game_state_machine") - local GameStateMachine_init = GameStateMachine.init - GameStateMachine.init = function(self, parent, start_state, params, creation_context, state_change_callbacks, name) - if name == "Main" then - log("mod_main", "Injecting StateBootLoadDML") - - -- Hardcoded position after `StateRequireScripts`. - -- We need to wait until then to even begin most of our stuff, - -- so that most of the game's core systems are at least loaded and can be hooked, - -- even if they aren't running, yet. - local pos = 4 - table.insert(params.states, pos, { - StateBootLoadDML, - { - package_manager = params.package_manager, - }, - }) - - GameStateMachine_init(self, parent, start_state, params, creation_context, state_change_callbacks, name) - elseif name == "Game" then - log("mod_main", "Injection StateGameLoadMods") - -- The second time around, we want to be the first, so we pass our own - -- 'start_state'. - -- We can't just have the state machine be initialized and then change its `_next_state`, as by the end of - -- `init`, a bunch of stuff will already be initialized. - GameStateMachine_init(self, parent, StateGameLoadMods, params, creation_context, state_change_callbacks, name) - -- And since we're done now, we can revert the function to its original - GameStateMachine.init = GameStateMachine_init - else - -- In all other cases, simply call the original - GameStateMachine_init(self, parent, start_state, params, creation_context, state_change_callbacks, name) - end - end -end - --- Override `init` to run our injection -function init() - patch_mod_loading_state() - - -- As requested by Fatshark - local StateRequireScripts = require("scripts/game_states/boot/state_require_scripts") - StateRequireScripts._get_is_modded = function() return true end - - Main:init() -end - --- vim: ft=lua diff --git a/crates/dtmm/assets/tabler-icons/LICENSE b/crates/dtmm/assets/tabler-icons/LICENSE deleted file mode 100644 index fe62055..0000000 --- a/crates/dtmm/assets/tabler-icons/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020-2023 Paweł Kuna - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/crates/dtmm/assets/tabler-icons/alert-circle.svg b/crates/dtmm/assets/tabler-icons/alert-circle.svg deleted file mode 100644 index 35e7aad..0000000 --- a/crates/dtmm/assets/tabler-icons/alert-circle.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - diff --git a/crates/dtmm/assets/tabler-icons/alert-triangle.svg b/crates/dtmm/assets/tabler-icons/alert-triangle.svg deleted file mode 100644 index 523111c..0000000 --- a/crates/dtmm/assets/tabler-icons/alert-triangle.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - diff --git a/crates/dtmm/assets/tabler-icons/cloud-download.svg b/crates/dtmm/assets/tabler-icons/cloud-download.svg deleted file mode 100644 index 5b62734..0000000 --- a/crates/dtmm/assets/tabler-icons/cloud-download.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - diff --git a/crates/dtmm/build.rs b/crates/dtmm/build.rs deleted file mode 100644 index 9e551d4..0000000 --- a/crates/dtmm/build.rs +++ /dev/null @@ -1,7 +0,0 @@ -fn main() { - if cfg!(target_os = "windows") { - let mut res = winres::WindowsResource::new(); - res.set_icon("assets/dtmm.ico"); - res.compile().unwrap(); - } -} diff --git a/crates/dtmm/src/controller.rs b/crates/dtmm/src/controller.rs new file mode 100644 index 0000000..a2bf429 --- /dev/null +++ b/crates/dtmm/src/controller.rs @@ -0,0 +1,47 @@ +use druid::widget::{Button, Controller}; +use druid::{Data, Env, Event, EventCtx, LifeCycle, LifeCycleCtx, UpdateCtx, Widget}; + +pub struct DisabledButtonController; + +impl Controller> for DisabledButtonController { + fn event( + &mut self, + child: &mut Button, + ctx: &mut EventCtx, + event: &Event, + data: &mut T, + env: &Env, + ) { + if !ctx.is_disabled() { + ctx.set_disabled(true); + ctx.request_paint(); + } + child.event(ctx, event, data, env) + } + + fn lifecycle( + &mut self, + child: &mut Button, + ctx: &mut LifeCycleCtx, + event: &LifeCycle, + data: &T, + env: &Env, + ) { + child.lifecycle(ctx, event, data, env) + } + + fn update( + &mut self, + child: &mut Button, + ctx: &mut UpdateCtx, + old_data: &T, + data: &T, + env: &Env, + ) { + if !ctx.is_disabled() { + ctx.set_disabled(true); + ctx.request_paint(); + } + child.update(ctx, old_data, data, env) + } +} diff --git a/crates/dtmm/src/controller/app.rs b/crates/dtmm/src/controller/app.rs deleted file mode 100644 index d5b397c..0000000 --- a/crates/dtmm/src/controller/app.rs +++ /dev/null @@ -1,303 +0,0 @@ -use std::collections::HashMap; -use std::io::ErrorKind; -use std::path::{Path, PathBuf}; -use std::sync::Arc; - -use color_eyre::eyre::{self, Context}; -use color_eyre::{Help, Report, Result}; -use druid::im::Vector; -use druid::ImageBuf; -use dtmt_shared::ModConfig; -use nexusmods::Api as NexusApi; -use tokio::fs::{self, DirEntry, File}; -use tokio_stream::wrappers::ReadDirStream; -use tokio_stream::StreamExt; - -use crate::state::{ActionState, InitialLoadResult, ModInfo, ModOrder, NexusInfo, PackageInfo}; -use crate::util; -use crate::util::config::{ConfigSerialize, LoadOrderEntry}; - -use super::read_sjson_file; - -#[tracing::instrument(skip(state))] -pub(crate) async fn delete_mod(state: ActionState, info: &ModInfo) -> Result<()> { - let mod_dir = state.mod_dir.join(&info.id); - fs::remove_dir_all(&mod_dir) - .await - .wrap_err_with(|| format!("Failed to remove directory {}", mod_dir.display()))?; - - Ok(()) -} - -#[tracing::instrument(skip(state))] -pub(crate) async fn save_settings(state: ActionState) -> Result<()> { - let cfg = ConfigSerialize::from(&state); - - tracing::info!("Saving settings to '{}'", state.config_path.display()); - tracing::debug!(?cfg); - - let data = serde_sjson::to_string(&cfg).wrap_err("Failed to serialize config")?; - - fs::write(state.config_path.as_ref(), &data) - .await - .wrap_err_with(|| { - format!( - "Failed to write config to '{}'", - state.config_path.display() - ) - }) -} - -#[tracing::instrument(skip_all,fields( - name = ?res.as_ref().map(|entry| entry.file_name()) -))] -async fn read_mod_dir_entry(res: Result) -> Result { - let entry = res?; - let config_path = entry.path().join("dtmt.cfg"); - let nexus_path = entry.path().join("nexus.sjson"); - let index_path = entry.path().join("files.sjson"); - - let cfg: ModConfig = read_sjson_file(&config_path) - .await - .wrap_err_with(|| format!("Failed to read mod config '{}'", config_path.display()))?; - - let nexus: Option = match read_sjson_file(&nexus_path) - .await - .wrap_err_with(|| format!("Failed to read Nexus info '{}'", nexus_path.display())) - { - Ok(nexus) => Some(nexus), - Err(err) if err.is::() => match err.downcast_ref::() { - Some(err) if err.kind() == std::io::ErrorKind::NotFound => None, - _ => return Err(err), - }, - Err(err) => return Err(err), - }; - - let files: HashMap> = if cfg.bundled { - read_sjson_file(&index_path) - .await - .wrap_err_with(|| format!("Failed to read file index '{}'", index_path.display()))? - } else { - Default::default() - }; - - let image = if let Some(path) = &cfg.image { - let path = entry.path().join(path); - if let Ok(data) = fs::read(&path).await { - // Druid somehow doesn't return an error compatible with eyre, here. - // So we have to wrap through `Display` manually. - let img = match ImageBuf::from_data(&data) { - Ok(img) => img, - Err(err) => { - let err = Report::msg(err.to_string()); - return Err(err) - .wrap_err_with(|| { - format!("Failed to import image file '{}'", path.display()) - }) - .with_suggestion(|| { - "Supported formats are: PNG, JPEG, Bitmap and WebP".to_string() - }); - } - }; - - Some(img) - } else { - None - } - } else { - None - }; - - let packages = files - .into_iter() - .map(|(name, files)| Arc::new(PackageInfo::new(name, files.into_iter().collect()))) - .collect(); - let info = ModInfo::new(cfg, packages, image, nexus); - Ok(info) -} - -#[tracing::instrument(skip(mod_order))] -pub(crate) async fn load_mods<'a, P, S>(mod_dir: P, mod_order: S) -> Result>> -where - S: Iterator, - P: AsRef + std::fmt::Debug, -{ - let mod_dir = mod_dir.as_ref(); - let read_dir = match fs::read_dir(mod_dir).await { - Ok(read_dir) => read_dir, - Err(err) if err.kind() == ErrorKind::NotFound => { - return Ok(Vector::new()); - } - Err(err) => { - return Err(err) - .wrap_err_with(|| format!("Failed to open directory '{}'", mod_dir.display())); - } - }; - - let stream = ReadDirStream::new(read_dir) - .map(|res| res.wrap_err("Failed to read dir entry")) - .then(read_mod_dir_entry); - tokio::pin!(stream); - - let mut mods: HashMap = HashMap::new(); - - while let Some(res) = stream.next().await { - let info = res?; - mods.insert(info.id.clone(), info); - } - - let mods = mod_order - .filter_map(|entry| { - if let Some(mut info) = mods.remove(&entry.id) { - info.enabled = entry.enabled; - Some(Arc::new(info)) - } else { - None - } - }) - .collect(); - - Ok(mods) -} - -pub(crate) fn check_mod_order(state: &ActionState) -> Result<()> { - if tracing::enabled!(tracing::Level::DEBUG) { - let order = state - .mods - .iter() - .enumerate() - .filter(|(_, i)| i.enabled) - .fold(String::new(), |mut s, (i, info)| { - s.push_str(&format!("{}: {} - {}\n", i, info.id, info.name)); - s - }); - - tracing::debug!("Mod order:\n{}", order); - } - - for (i, mod_info) in state.mods.iter().enumerate().filter(|(_, i)| i.enabled) { - for dep in &mod_info.depends { - let dep_info = state.mods.iter().enumerate().find(|(_, m)| m.id == dep.id); - - match dep_info { - Some((_, dep_info)) if !dep_info.enabled => { - eyre::bail!( - "Dependency '{}' ({}) must be enabled.", - dep_info.name, - dep.id - ); - } - Some((j, dep_info)) if dep.order == ModOrder::Before && j >= i => { - eyre::bail!( - "Dependency '{}' ({}) must be loaded before '{}'", - dep_info.name, - dep.id, - mod_info.name - ); - } - Some((j, dep_info)) if dep.order == ModOrder::After && j <= i => { - eyre::bail!( - "Dependency '{}' ({}) must be loaded after '{}'", - dep_info.name, - dep.id, - mod_info.name - ); - } - None => { - eyre::bail!( - "Missing dependency '{}' for mod '{}'", - dep.id, - mod_info.name - ); - } - Some(_) => { - // All good - } - } - } - } - - Ok(()) -} - -#[tracing::instrument(skip(info, api), fields(id = info.id, name = info.name, version = info.version))] -async fn check_mod_update(info: Arc, api: Arc) -> Result> { - let Some(nexus) = &info.nexus else { - return Ok(None); - }; - - let updated_info = api - .mods_id(nexus.id) - .await - .wrap_err_with(|| format!("Failed to query mod {} from Nexus", nexus.id))?; - - let mut info = Arc::unwrap_or_clone(info); - info.nexus = Some(NexusInfo::from(updated_info)); - - Ok(Some(info)) -} - -#[tracing::instrument(skip(state))] -pub(crate) async fn check_updates(state: ActionState) -> Result> { - if state.nexus_api_key.is_empty() { - eyre::bail!("Nexus API key not set. Cannot check for updates."); - } - - let api = NexusApi::new(state.nexus_api_key.to_string()) - .wrap_err("Failed to initialize Nexus API")?; - let api = Arc::new(api); - - let tasks = state - .mods - .iter() - .map(|info| check_mod_update(info.clone(), api.clone())); - - let results = futures::future::join_all(tasks).await; - let updates = results - .into_iter() - .filter_map(|res| match res { - Ok(info) => info, - Err(err) => { - tracing::error!("{:?}", err); - None - } - }) - .collect(); - Ok(updates) -} - -pub(crate) async fn load_initial(path: PathBuf, is_default: bool) -> Result { - let config = util::config::read_config(path, is_default) - .await - .wrap_err("Failed to read config file")?; - - // Create or truncate the log file - let log_path = config.data_dir.join("dtmm.log"); - tokio::spawn(async move { - let _ = File::create(&log_path).await; - tracing::debug!("Truncated log file"); - }); - - let game_info = tokio::task::spawn_blocking(dtmt_shared::collect_game_info) - .await - .wrap_err("Failed to spawn task to collect Steam game info")?; - - let game_info = match game_info { - Ok(game_info) => game_info, - Err(err) => { - tracing::error!("Failed to collect game info: {:?}", err); - None - } - }; - - if config.game_dir.is_none() && game_info.is_none() { - tracing::error!("No Game Directory set. Head to the 'Settings' tab to set it manually",); - } - - let mod_dir = config.data_dir.join("mods"); - let mods = load_mods(mod_dir, config.mod_order.iter()) - .await - .wrap_err("Failed to load mods")?; - - Ok((config, mods)) -} diff --git a/crates/dtmm/src/controller/deploy.rs b/crates/dtmm/src/controller/deploy.rs deleted file mode 100644 index 481b07c..0000000 --- a/crates/dtmm/src/controller/deploy.rs +++ /dev/null @@ -1,809 +0,0 @@ -use std::io::{Cursor, ErrorKind}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::sync::Arc; - -use color_eyre::eyre::Context; -use color_eyre::{eyre, Help, Report, Result}; -use futures::StreamExt; -use futures::{stream, TryStreamExt}; -use minijinja::Environment; -use sdk::filetype::lua; -use sdk::filetype::package::Package; -use sdk::murmur::Murmur64; -use sdk::{ - Bundle, BundleDatabase, BundleFile, BundleFileType, BundleFileVariant, FromBinary, ToBinary, -}; -use serde::{Deserialize, Serialize}; -use time::OffsetDateTime; -use tokio::fs::{self, DirEntry}; -use tokio::io::AsyncWriteExt; -use tracing::Instrument; - -use super::read_sjson_file; -use crate::controller::app::check_mod_order; -use crate::state::{ActionState, PackageInfo}; - -pub const MOD_BUNDLE_NAME: &str = "packages/mods"; -pub const BOOT_BUNDLE_NAME: &str = "packages/boot"; -pub const BUNDLE_DATABASE_NAME: &str = "bundle_database.data"; -pub const MOD_BOOT_SCRIPT: &str = "scripts/mod_main"; -pub const MOD_DATA_SCRIPT: &str = "scripts/mods/mod_data"; -pub const SETTINGS_FILE_PATH: &str = "application_settings/settings_common.ini"; -pub const DEPLOYMENT_DATA_PATH: &str = "dtmm-deployment.sjson"; - -#[derive(Debug, Serialize, Deserialize)] -pub struct DeploymentData { - pub bundles: Vec, - pub mod_folders: Vec, - #[serde(with = "time::serde::iso8601")] - pub timestamp: OffsetDateTime, -} - -#[tracing::instrument] -async fn read_file_with_backup

(path: P) -> Result> -where - P: AsRef + std::fmt::Debug, -{ - let path = path.as_ref(); - let backup_path = { - let mut p = PathBuf::from(path); - let ext = if let Some(ext) = p.extension() { - ext.to_string_lossy().to_string() + ".bak" - } else { - String::from("bak") - }; - p.set_extension(ext); - p - }; - - let file_name = path - .file_name() - .map(|s| s.to_string_lossy().to_string()) - .unwrap_or_else(|| String::from("file")); - - let bin = match fs::read(&backup_path).await { - Ok(bin) => bin, - Err(err) if err.kind() == ErrorKind::NotFound => { - // TODO: This doesn't need to be awaited here, yet. - // I only need to make sure it has finished before writing the changed bundle. - tracing::debug!( - "Backup does not exist. Backing up original {} to '{}'", - file_name, - backup_path.display() - ); - fs::copy(path, &backup_path).await.wrap_err_with(|| { - format!( - "Failed to back up {} '{}' to '{}'", - file_name, - path.display(), - backup_path.display() - ) - })?; - - tracing::debug!("Reading {} from original '{}'", file_name, path.display()); - fs::read(path).await.wrap_err_with(|| { - format!("Failed to read {} file: {}", file_name, path.display()) - })? - } - Err(err) => { - return Err(err).wrap_err_with(|| { - format!( - "Failed to read {} from backup '{}'", - file_name, - backup_path.display() - ) - }); - } - }; - Ok(bin) -} - -#[tracing::instrument(skip_all)] -async fn patch_game_settings(state: Arc) -> Result<()> { - let settings_path = state.game_dir.join("bundle").join(SETTINGS_FILE_PATH); - - let settings = read_file_with_backup(&settings_path) - .await - .wrap_err("Failed to read settings.ini")?; - let settings = String::from_utf8(settings).wrap_err("Settings.ini is not valid UTF-8")?; - - let mut f = fs::File::create(&settings_path) - .await - .wrap_err_with(|| format!("Failed to open {}", settings_path.display()))?; - - let Some(i) = settings.find("boot_script =") else { - eyre::bail!("couldn't find 'boot_script' field"); - }; - - f.write_all(&settings.as_bytes()[0..i]).await?; - f.write_all(b"boot_script = \"scripts/mod_main\"").await?; - - let Some(j) = settings[i..].find('\n') else { - eyre::bail!("couldn't find end of 'boot_script' field"); - }; - - f.write_all(&settings.as_bytes()[(i + j)..]).await?; - - Ok(()) -} - -#[tracing::instrument(skip_all, fields(package = info.name))] -fn make_package(info: &PackageInfo) -> Result { - let mut pkg = Package::new(info.name.clone(), PathBuf::new()); - - for f in &info.files { - let mut it = f.rsplit('.'); - let file_type = it - .next() - .ok_or_else(|| eyre::eyre!("missing file extension")) - .and_then(BundleFileType::from_str) - .wrap_err("Invalid file name in package info")?; - let name: String = it.collect(); - pkg.add_file(file_type, name); - } - - Ok(pkg) -} - -#[tracing::instrument] -async fn copy_recursive( - from: impl Into + std::fmt::Debug, - to: impl AsRef + std::fmt::Debug, -) -> Result<()> { - let to = to.as_ref(); - - #[tracing::instrument] - async fn handle_dir(from: PathBuf) -> Result> { - let mut dir = fs::read_dir(&from) - .await - .wrap_err("Failed to read directory")?; - let mut entries = Vec::new(); - - while let Some(entry) = dir.next_entry().await? { - let meta = entry.metadata().await.wrap_err_with(|| { - format!("Failed to get metadata for '{}'", entry.path().display()) - })?; - entries.push((meta.is_dir(), entry)); - } - - Ok(entries) - } - - let base = from.into(); - stream::unfold(vec![base.clone()], |mut state| async { - let from = state.pop()?; - let inner = match handle_dir(from).await { - Ok(entries) => { - for (is_dir, entry) in &entries { - if *is_dir { - state.push(entry.path()); - } - } - stream::iter(entries).map(Ok).left_stream() - } - Err(e) => stream::once(async { Err(e) }).right_stream(), - }; - - Some((inner, state)) - }) - .flatten() - .try_for_each(|(is_dir, entry)| { - let path = entry.path(); - let dest = path - .strip_prefix(&base) - .map(|suffix| to.join(suffix)) - .expect("all entries are relative to the directory we are walking"); - - async move { - if is_dir { - tracing::trace!("Creating directory '{}'", dest.display()); - // Instead of trying to filter "already exists" errors out explicitly, - // we just ignore all. It'll fail eventually with the next copy operation. - let _ = fs::create_dir(&dest).await; - Ok(()) - } else { - tracing::trace!("Copying file '{}' -> '{}'", path.display(), dest.display()); - fs::copy(&path, &dest).await.map(|_| ()).wrap_err_with(|| { - format!( - "Failed to copy file '{}' -> '{}'", - path.display(), - dest.display() - ) - }) - } - } - }) - .await - .map(|_| ()) -} - -#[tracing::instrument(skip(state))] -async fn copy_mod_folders(state: Arc) -> Result> { - let game_dir = Arc::clone(&state.game_dir); - - let mut tasks = Vec::new(); - - for mod_info in state.mods.iter().filter(|m| m.enabled && !m.bundled) { - let span = tracing::trace_span!("copying legacy mod", name = mod_info.name); - let _enter = span.enter(); - - let mod_id = mod_info.id.clone(); - let mod_dir = Arc::clone(&state.mod_dir); - let game_dir = Arc::clone(&game_dir); - - let task = async move { - let from = mod_dir.join(&mod_id); - let to = game_dir.join("mods").join(&mod_id); - - tracing::debug!(from = %from.display(), to = %to.display(), "Copying legacy mod '{}'", mod_id); - let _ = fs::create_dir_all(&to).await; - copy_recursive(&from, &to).await.wrap_err_with(|| { - format!( - "Failed to copy legacy mod from '{}' to '{}'", - from.display(), - to.display() - ) - })?; - - Ok::<_, Report>(mod_id) - }; - tasks.push(task); - } - - let ids = futures::future::try_join_all(tasks).await?; - Ok(ids) -} - -fn build_mod_data_lua(state: Arc) -> Result { - #[derive(Serialize)] - struct TemplateDataMod { - id: String, - name: String, - bundled: bool, - version: String, - init: String, - data: Option, - localization: Option, - packages: Vec, - } - - let mut env = Environment::new(); - env.set_trim_blocks(true); - env.set_lstrip_blocks(true); - env.add_template("mod_data.lua", include_str!("../../assets/mod_data.lua.j2")) - .wrap_err("Failed to compile template for `mod_data.lua`")?; - let tmpl = env - .get_template("mod_data.lua") - .wrap_err("Failed to get template `mod_data.lua`")?; - - let data: Vec = state - .mods - .iter() - .filter_map(|m| { - if !m.enabled { - return None; - } - - Some(TemplateDataMod { - id: m.id.clone(), - name: m.name.clone(), - bundled: m.bundled, - version: m.version.clone(), - init: m.resources.init.to_string_lossy().to_string(), - data: m - .resources - .data - .as_ref() - .map(|p| p.to_string_lossy().to_string()), - localization: m - .resources - .localization - .as_ref() - .map(|p| p.to_string_lossy().to_string()), - packages: m.packages.iter().map(|p| p.name.clone()).collect(), - }) - }) - .collect(); - - let lua = tmpl - .render(minijinja::context!(mods => data)) - .wrap_err("Failed to render template `mod_data.lua`")?; - - tracing::debug!("mod_data.lua:\n{}", lua); - - Ok(lua) -} - -#[tracing::instrument(skip_all)] -async fn build_bundles(state: Arc) -> Result> { - let mut mod_bundle = Bundle::new(MOD_BUNDLE_NAME.to_string()); - let mut tasks = Vec::new(); - - let bundle_dir = Arc::new(state.game_dir.join("bundle")); - - let mut bundles = Vec::new(); - - let mut add_lua_asset = |name: &str, data: &str| { - let span = tracing::info_span!("Compiling Lua", name, data_len = data.len()); - let _enter = span.enter(); - - let file = lua::compile(name.to_string(), data).wrap_err("Failed to compile Lua")?; - - mod_bundle.add_file(file); - - Ok::<_, Report>(()) - }; - - build_mod_data_lua(state.clone()) - .wrap_err("Failed to build 'mod_data.lua'") - .and_then(|data| add_lua_asset(MOD_DATA_SCRIPT, &data))?; - add_lua_asset("scripts/mods/init", include_str!("../../assets/init.lua"))?; - add_lua_asset( - "scripts/mods/mod_loader", - include_str!("../../assets/mod_loader.lua"), - )?; - - tracing::trace!("Preparing tasks to deploy bundle files"); - - for mod_info in state.mods.iter().filter(|m| m.enabled && m.bundled) { - let span = tracing::trace_span!("building mod packages", name = mod_info.name); - let _enter = span.enter(); - - let mod_dir = state.mod_dir.join(&mod_info.id); - for pkg_info in &mod_info.packages { - let span = tracing::trace_span!("building package", name = pkg_info.name); - let _enter = span.enter(); - - tracing::trace!( - "Building package {} for mod {}", - pkg_info.name, - mod_info.name - ); - - let pkg = make_package(pkg_info).wrap_err("Failed to make package")?; - let mut variant = BundleFileVariant::new(); - let bin = pkg - .to_binary() - .wrap_err("Failed to serialize package to binary")?; - variant.set_data(bin); - let mut file = BundleFile::new(pkg_info.name.clone(), BundleFileType::Package); - file.add_variant(variant); - - tracing::trace!( - "Compiled package {} for mod {}", - pkg_info.name, - mod_info.name - ); - - mod_bundle.add_file(file); - - let bundle_name = format!("{:016x}", Murmur64::hash(&pkg_info.name)); - let src = mod_dir.join(&bundle_name); - let dest = bundle_dir.join(&bundle_name); - let pkg_name = pkg_info.name.clone(); - let mod_name = mod_info.name.clone(); - - // Explicitely drop the guard, so that we can move the span - // into the async operation - drop(_enter); - - let ctx = state.ctx.clone(); - - let task = async move { - let bundle = { - let bin = fs::read(&src).await.wrap_err_with(|| { - format!("Failed to read bundle file '{}'", src.display()) - })?; - let name = Bundle::get_name_from_path(&ctx, &src); - Bundle::from_binary(&ctx, name, bin) - .wrap_err_with(|| format!("Failed to parse bundle '{}'", src.display()))? - }; - - tracing::debug!( - src = %src.display(), - dest = %dest.display(), - "Copying bundle '{}' for mod '{}'", - pkg_name, - mod_name, - ); - // We attempt to remove any previous file, so that the hard link can be created. - // We can reasonably ignore errors here, as a 'NotFound' is actually fine, the copy - // may be possible despite an error here, or the error will be reported by it anyways. - // TODO: There is a chance that we delete an actual game bundle, but with 64bit - // hashes, it's low enough for now, and the setup required to detect - // "game bundle vs mod bundle" is non-trivial. - let _ = fs::remove_file(&dest).await; - fs::copy(&src, &dest).await.wrap_err_with(|| { - format!( - "Failed to copy bundle {pkg_name} for mod {mod_name}. Src: {}, dest: {}", - src.display(), - dest.display() - ) - })?; - - Ok::(bundle) - } - .instrument(span); - - tasks.push(task); - } - } - - tracing::debug!("Copying {} mod bundles", tasks.len()); - - let mut tasks = stream::iter(tasks).buffer_unordered(10); - - while let Some(res) = tasks.next().await { - let bundle = res?; - bundles.push(bundle); - } - - { - let path = bundle_dir.join(format!("{:x}", mod_bundle.name().to_murmur64())); - tracing::trace!("Writing mod bundle to '{}'", path.display()); - fs::write(&path, mod_bundle.to_binary()?) - .await - .wrap_err_with(|| format!("Failed to write bundle to '{}'", path.display()))?; - } - - bundles.push(mod_bundle); - - Ok(bundles) -} - -#[tracing::instrument(skip_all)] -async fn patch_boot_bundle(state: Arc, deployment_info: &str) -> Result> { - let bundle_dir = Arc::new(state.game_dir.join("bundle")); - let bundle_path = bundle_dir.join(format!("{:x}", Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes()))); - - let mut bundles = Vec::with_capacity(2); - - let mut boot_bundle = async { - let bin = read_file_with_backup(&bundle_path) - .await - .wrap_err("Failed to read boot bundle")?; - - Bundle::from_binary(&state.ctx, BOOT_BUNDLE_NAME.to_string(), bin) - .wrap_err("Failed to parse boot bundle") - } - .instrument(tracing::trace_span!("read boot bundle")) - .await - .wrap_err_with(|| format!("Failed to read bundle '{}'", BOOT_BUNDLE_NAME))?; - - { - tracing::trace!("Adding mod package file to boot bundle"); - let span = tracing::trace_span!("create mod package file"); - let _enter = span.enter(); - - let mut pkg = Package::new(MOD_BUNDLE_NAME.to_string(), PathBuf::new()); - - for mod_info in &state.mods { - for pkg_info in &mod_info.packages { - pkg.add_file(BundleFileType::Package, &pkg_info.name); - } - } - - pkg.add_file(BundleFileType::Lua, MOD_DATA_SCRIPT); - - let mut variant = BundleFileVariant::new(); - variant.set_data(pkg.to_binary()?); - let mut f = BundleFile::new(MOD_BUNDLE_NAME.to_string(), BundleFileType::Package); - f.add_variant(variant); - - boot_bundle.add_file(f); - } - - { - let span = tracing::debug_span!("Importing mod main script"); - let _enter = span.enter(); - - let mut env = Environment::new(); - env.set_trim_blocks(true); - env.set_lstrip_blocks(true); - env.add_template("mod_main.lua", include_str!("../../assets/mod_main.lua.j2")) - .wrap_err("Failed to compile template for `mod_main.lua`")?; - let tmpl = env - .get_template("mod_main.lua") - .wrap_err("Failed to get template `mod_main.lua`")?; - - let is_io_enabled = if state.is_io_enabled { "true" } else { "false" }; - let deployment_info = deployment_info.replace("\"", "\\\"").replace("\n", "\\n"); - let lua = tmpl - .render(minijinja::context!(is_io_enabled => is_io_enabled, deployment_info => deployment_info)) - .wrap_err("Failed to render template `mod_main.lua`")?; - - tracing::trace!("Main script rendered:\n===========\n{}\n=============", lua); - let file = lua::compile(MOD_BOOT_SCRIPT.to_string(), lua) - .wrap_err("Failed to compile mod main Lua file")?; - - boot_bundle.add_file(file); - } - - async { - let bin = boot_bundle - .to_binary() - .wrap_err("Failed to serialize boot bundle")?; - fs::write(&bundle_path, bin) - .await - .wrap_err_with(|| format!("Failed to write main bundle: {}", bundle_path.display())) - } - .instrument(tracing::trace_span!("write boot bundle")) - .await?; - - bundles.push(boot_bundle); - - Ok(bundles) -} - -#[tracing::instrument(skip_all, fields(bundles = bundles.as_ref().len()))] -async fn patch_bundle_database(state: Arc, bundles: B) -> Result<()> -where - B: AsRef<[Bundle]>, -{ - let bundle_dir = Arc::new(state.game_dir.join("bundle")); - let database_path = bundle_dir.join(BUNDLE_DATABASE_NAME); - - let mut db = { - let bin = read_file_with_backup(&database_path) - .await - .wrap_err("Failed to read bundle database")?; - let mut r = Cursor::new(bin); - let db = BundleDatabase::from_binary(&mut r).wrap_err("Failed to parse bundle database")?; - tracing::trace!("Finished parsing bundle database"); - db - }; - - for bundle in bundles.as_ref() { - tracing::trace!("Adding '{}' to bundle database", bundle.name().display()); - db.add_bundle(bundle); - } - - { - let bin = db - .to_binary() - .wrap_err("Failed to serialize bundle database")?; - fs::write(&database_path, bin).await.wrap_err_with(|| { - format!( - "failed to write bundle database to '{}'", - database_path.display() - ) - })?; - } - - Ok(()) -} - -#[tracing::instrument(skip_all, fields(bundles = bundles.as_ref().len()))] -fn build_deployment_data( - bundles: impl AsRef<[Bundle]>, - mod_folders: impl AsRef<[String]>, -) -> Result { - let info = DeploymentData { - timestamp: OffsetDateTime::now_utc(), - bundles: bundles - .as_ref() - .iter() - .map(|bundle| format!("{:x}", bundle.name().to_murmur64())) - .collect(), - // TODO: - mod_folders: mod_folders.as_ref().to_vec(), - }; - serde_sjson::to_string(&info).wrap_err("Failed to serizalize deployment data") -} - -#[tracing::instrument(skip_all, fields( - game_dir = %state.game_dir.display(), - mods = state.mods.len() -))] -pub(crate) async fn deploy_mods(state: ActionState) -> Result<()> { - let state = Arc::new(state); - let bundle_dir = state.game_dir.join("bundle"); - let boot_bundle_path = format!("{:016x}", Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes())); - - if fs::metadata(bundle_dir.join(format!("{boot_bundle_path}.patch_999"))) - .await - .is_ok() - { - let err = eyre::eyre!("Found dtkit-patch-based mod installation."); - return Err(err) - .with_suggestion(|| { - "If you're a mod author and saved projects directly in 'mods/', \ - use DTMT to migrate them to the new project structure." - .to_string() - }) - .with_suggestion(|| { - "Click 'Reset Game' to remove the previous mod installation.".to_string() - }); - } - - let (_, game_info, deployment_info) = tokio::try_join!( - async { - fs::metadata(&bundle_dir) - .await - .wrap_err("Failed to open game bundle directory") - .with_suggestion(|| "Double-check 'Game Directory' in the Settings tab.") - }, - async { - tokio::task::spawn_blocking(dtmt_shared::collect_game_info) - .await - .map_err(Report::new) - }, - async { - let path = state.game_dir.join(DEPLOYMENT_DATA_PATH); - match read_sjson_file::<_, DeploymentData>(&path).await { - Ok(data) => Ok(Some(data)), - Err(err) => { - if let Some(err) = err.downcast_ref::() - && err.kind() == ErrorKind::NotFound - { - Ok(None) - } else { - Err(err).wrap_err(format!( - "Failed to read deployment data from: {}", - path.display() - )) - } - } - } - } - ) - .wrap_err("Failed to gather deployment information")?; - - let game_info = match game_info { - Ok(game_info) => game_info, - Err(err) => { - tracing::error!("Failed to collect game info: {:#?}", err); - None - } - }; - - tracing::debug!(?game_info, ?deployment_info); - - if let Some(game_info) = game_info { - if deployment_info - .as_ref() - .map(|i| game_info.last_updated > i.timestamp) - .unwrap_or(false) - { - tracing::warn!( - "Game was updated since last mod deployment. \ - Attempting to reconcile game files." - ); - - tokio::try_join!( - async { - let path = bundle_dir.join(BUNDLE_DATABASE_NAME); - let backup_path = path.with_extension("data.bak"); - - fs::copy(&path, &backup_path) - .await - .wrap_err("Failed to re-create backup for bundle database.") - }, - async { - let path = bundle_dir.join(boot_bundle_path); - let backup_path = path.with_extension("bak"); - - fs::copy(&path, &backup_path) - .await - .wrap_err("Failed to re-create backup for boot bundle") - } - ) - .with_suggestion(|| { - "Reset the game using 'Reset Game', then verify game files.".to_string() - })?; - - tracing::info!( - "Successfully re-created game file backups. \ - Continuing mod deployment." - ); - } - } - - check_mod_order(&state)?; - - tracing::info!( - "Deploying {} mods to '{}'.", - state.mods.iter().filter(|i| i.enabled).count(), - bundle_dir.display() - ); - - tracing::info!("Copy legacy mod folders"); - let mod_folders = copy_mod_folders(state.clone()) - .await - .wrap_err("Failed to copy mod folders")?; - - tracing::info!("Build mod bundles"); - let mut bundles = build_bundles(state.clone()) - .await - .wrap_err("Failed to build mod bundles")?; - - let new_deployment_info = build_deployment_data(&bundles, &mod_folders) - .wrap_err("Failed to build new deployment data")?; - - tracing::info!("Patch boot bundle"); - let mut boot_bundles = patch_boot_bundle(state.clone(), &new_deployment_info) - .await - .wrap_err("Failed to patch boot bundle")?; - bundles.append(&mut boot_bundles); - - if let Some(info) = &deployment_info { - let bundle_dir = Arc::new(bundle_dir); - // Remove bundles from the previous deployment that don't match the current one. - // I.e. mods that used to be installed/enabled but aren't anymore. - { - let tasks = info.bundles.iter().cloned().filter_map(|file_name| { - let is_being_deployed = bundles.iter().any(|b2| { - let name = format!("{:016x}", b2.name()); - file_name == name - }); - - if !is_being_deployed { - let bundle_dir = bundle_dir.clone(); - let task = async move { - let path = bundle_dir.join(&file_name); - - tracing::debug!("Removing unused bundle '{}'", file_name); - - if let Err(err) = fs::remove_file(&path).await.wrap_err_with(|| { - format!("Failed to remove unused bundle '{}'", path.display()) - }) { - tracing::error!("{:?}", err); - } - }; - Some(task) - } else { - None - } - }); - - futures::future::join_all(tasks).await; - } - - // Do the same thing for mod folders - { - let tasks = info.mod_folders.iter().filter_map(|mod_id| { - let is_being_deployed = mod_folders.iter().any(|id| id == mod_id); - - if !is_being_deployed { - let path = bundle_dir.join("mods").join(mod_id); - tracing::debug!("Removing unused mod folder '{}'", path.display()); - - let task = async move { - if let Err(err) = fs::remove_dir_all(&path).await.wrap_err_with(|| { - format!("Failed to remove unused legacy mod '{}'", path.display()) - }) { - tracing::error!("{:?}", err); - } - }; - - Some(task) - } else { - None - } - }); - futures::future::join_all(tasks).await; - } - } - - tracing::info!("Patch game settings"); - patch_game_settings(state.clone()) - .await - .wrap_err("Failed to patch game settings")?; - - tracing::info!("Patching bundle database"); - patch_bundle_database(state.clone(), &bundles) - .await - .wrap_err("Failed to patch bundle database")?; - - tracing::info!("Writing deployment data"); - { - let path = state.game_dir.join(DEPLOYMENT_DATA_PATH); - fs::write(&path, &new_deployment_info) - .await - .wrap_err_with(|| format!("Failed to write deployment data to '{}'", path.display()))?; - } - - tracing::info!("Finished deploying mods"); - Ok(()) -} diff --git a/crates/dtmm/src/controller/game.rs b/crates/dtmm/src/controller/game.rs deleted file mode 100644 index b93d985..0000000 --- a/crates/dtmm/src/controller/game.rs +++ /dev/null @@ -1,259 +0,0 @@ -use std::io::{self, ErrorKind}; -use std::path::{Path, PathBuf}; -use std::sync::Arc; - -use color_eyre::eyre::Context; -use color_eyre::{eyre, Result}; -use sdk::murmur::Murmur64; -use tokio::fs::{self}; -use tokio::io::AsyncWriteExt; - -use crate::controller::deploy::{ - DeploymentData, BOOT_BUNDLE_NAME, BUNDLE_DATABASE_NAME, DEPLOYMENT_DATA_PATH, -}; -use crate::state::ActionState; - -use super::deploy::SETTINGS_FILE_PATH; - -#[tracing::instrument] -async fn read_file_with_backup

(path: P) -> Result> -where - P: AsRef + std::fmt::Debug, -{ - let path = path.as_ref(); - let backup_path = { - let mut p = PathBuf::from(path); - let ext = if let Some(ext) = p.extension() { - ext.to_string_lossy().to_string() + ".bak" - } else { - String::from("bak") - }; - p.set_extension(ext); - p - }; - - let file_name = path - .file_name() - .map(|s| s.to_string_lossy().to_string()) - .unwrap_or_else(|| String::from("file")); - - let bin = match fs::read(&backup_path).await { - Ok(bin) => bin, - Err(err) if err.kind() == ErrorKind::NotFound => { - // TODO: This doesn't need to be awaited here, yet. - // I only need to make sure it has finished before writing the changed bundle. - tracing::debug!( - "Backup does not exist. Backing up original {} to '{}'", - file_name, - backup_path.display() - ); - fs::copy(path, &backup_path).await.wrap_err_with(|| { - format!( - "Failed to back up {} '{}' to '{}'", - file_name, - path.display(), - backup_path.display() - ) - })?; - - tracing::debug!("Reading {} from original '{}'", file_name, path.display()); - fs::read(path).await.wrap_err_with(|| { - format!("Failed to read {} file: {}", file_name, path.display()) - })? - } - Err(err) => { - return Err(err).wrap_err_with(|| { - format!( - "Failed to read {} from backup '{}'", - file_name, - backup_path.display() - ) - }); - } - }; - Ok(bin) -} - -#[tracing::instrument(skip_all)] -async fn patch_game_settings(state: Arc) -> Result<()> { - let settings_path = state.game_dir.join("bundle").join(SETTINGS_FILE_PATH); - - let settings = read_file_with_backup(&settings_path) - .await - .wrap_err("Failed to read settings.ini")?; - let settings = String::from_utf8(settings).wrap_err("Settings.ini is not valid UTF-8")?; - - let mut f = fs::File::create(&settings_path) - .await - .wrap_err_with(|| format!("Failed to open {}", settings_path.display()))?; - - let Some(i) = settings.find("boot_script =") else { - eyre::bail!("couldn't find 'boot_script' field"); - }; - - f.write_all(&settings.as_bytes()[0..i]).await?; - f.write_all(b"boot_script = \"scripts/mod_main\"").await?; - - let Some(j) = settings[i..].find('\n') else { - eyre::bail!("couldn't find end of 'boot_script' field"); - }; - - f.write_all(&settings.as_bytes()[(i + j)..]).await?; - - Ok(()) -} - -#[tracing::instrument(skip_all)] -async fn reset_dtkit_patch(state: ActionState) -> Result<()> { - let bundle_dir = state.game_dir.join("bundle"); - - { - let path = bundle_dir.join(BUNDLE_DATABASE_NAME); - let backup_path = path.with_extension("data.bak"); - fs::rename(&backup_path, &path).await.wrap_err_with(|| { - format!( - "Failed to move bundle database backup '{}' -> '{}'", - backup_path.display(), - path.display() - ) - })?; - tracing::trace!("Reverted bundle database from backup"); - } - - for path in [ - bundle_dir.join(format!( - "{:016x}.patch_999", - Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes()) - )), - state.game_dir.join("binaries/mod_loader"), - state.game_dir.join("toggle_darktide_mods.bat"), - state.game_dir.join("README.md"), - ] { - match fs::remove_file(&path).await { - Ok(_) => tracing::trace!("Removed file '{}'", path.display()), - Err(err) if err.kind() != io::ErrorKind::NotFound => { - tracing::error!("Failed to remove file '{}': {}", path.display(), err) - } - Err(_) => {} - } - } - - // We deliberately skip the `mods/` directory here. - // Many modders did their development right in there, and as people are prone to not read - // error messages and guides in full, there is bound to be someone who would have - // deleted all their source code if this removed the `mods/` folder. - for path in [state.game_dir.join("tools")] { - match fs::remove_dir_all(&path).await { - Ok(_) => tracing::trace!("Removed directory '{}'", path.display()), - Err(err) if err.kind() != io::ErrorKind::NotFound => { - tracing::error!("Failed to remove directory '{}': {}", path.display(), err) - } - Err(_) => {} - } - } - - tracing::info!("Removed dtkit-patch-based mod installation."); - Ok(()) -} - -#[tracing::instrument(skip(state))] -pub(crate) async fn reset_mod_deployment(state: ActionState) -> Result<()> { - let boot_bundle_path = format!("{:016x}", Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes())); - let paths = [BUNDLE_DATABASE_NAME, &boot_bundle_path, SETTINGS_FILE_PATH]; - let bundle_dir = state.game_dir.join("bundle"); - - tracing::info!("Resetting mod deployment in {}", bundle_dir.display()); - - if fs::metadata(bundle_dir.join(format!("{boot_bundle_path}.patch_999"))) - .await - .is_ok() - { - tracing::info!("Found dtkit-patch-based mod installation. Removing."); - return reset_dtkit_patch(state).await; - } - - tracing::debug!("Reading mod deployment"); - - let info: DeploymentData = { - let path = state.game_dir.join(DEPLOYMENT_DATA_PATH); - let data = match fs::read(&path).await { - Ok(data) => data, - Err(err) if err.kind() == ErrorKind::NotFound => { - tracing::info!("No deployment to reset"); - return Ok(()); - } - Err(err) => { - return Err(err).wrap_err_with(|| { - format!("Failed to read deployment info at '{}'", path.display()) - }); - } - }; - - let data = String::from_utf8(data).wrap_err("Invalid UTF8 in deployment data")?; - - serde_sjson::from_str(&data).wrap_err("Invalid SJSON in deployment data")? - }; - - for name in info.bundles { - let path = bundle_dir.join(name); - - match fs::remove_file(&path).await { - Ok(_) => {} - Err(err) if err.kind() == ErrorKind::NotFound => {} - Err(err) => { - tracing::error!("Failed to remove '{}': {:?}", path.display(), err); - } - }; - } - - for p in paths { - let path = bundle_dir.join(p); - let backup = bundle_dir.join(format!("{}.bak", p)); - - let res = async { - tracing::debug!( - "Copying from backup: {} -> {}", - backup.display(), - path.display() - ); - - fs::copy(&backup, &path) - .await - .wrap_err_with(|| format!("Failed to copy from '{}'", backup.display()))?; - - tracing::debug!("Deleting backup: {}", backup.display()); - - match fs::remove_file(&backup).await { - Ok(_) => Ok(()), - Err(err) if err.kind() == ErrorKind::NotFound => Ok(()), - Err(err) => { - Err(err).wrap_err_with(|| format!("Failed to remove '{}'", backup.display())) - } - } - } - .await; - - if let Err(err) = res { - tracing::error!( - "Failed to restore '{}' from backup. You may need to verify game files. Error: {:?}", - &p, - err - ); - } - } - - { - let path = state.game_dir.join(DEPLOYMENT_DATA_PATH); - if let Err(err) = fs::remove_file(&path).await { - tracing::error!( - "Failed to remove deployment data '{}': {:?}", - path.display(), - err - ); - } - } - - tracing::info!("Reset finished"); - - Ok(()) -} diff --git a/crates/dtmm/src/controller/import.rs b/crates/dtmm/src/controller/import.rs deleted file mode 100644 index 6fc9693..0000000 --- a/crates/dtmm/src/controller/import.rs +++ /dev/null @@ -1,584 +0,0 @@ -use std::collections::HashMap; -use std::ffi::CStr; -use std::io::{Cursor, Read, Seek, Write}; -use std::path::{Path, PathBuf}; -use std::sync::Arc; - -use color_eyre::eyre::{self, Context}; -use color_eyre::{Help, Report, Result}; -use druid::im::Vector; -use druid::{FileInfo, ImageBuf}; -use dtmt_shared::{ModConfig, ModConfigResources}; -use luajit2_sys as lua; -use nexusmods::Api as NexusApi; -use tokio::fs; -use zip::ZipArchive; - -use crate::state::{ActionState, ModInfo, NexusInfo, PackageInfo}; - -fn find_archive_file( - archive: &ZipArchive, - name: impl AsRef, -) -> Option { - let path = archive - .file_names() - .find(|path| path.ends_with(name.as_ref())) - .map(|s| s.to_string()); - path -} - -fn image_data_to_buffer(data: impl AsRef<[u8]>) -> Result { - // Druid somehow doesn't return an error compatible with eyre, here. - // So we have to wrap through `Display` manually. - ImageBuf::from_data(data.as_ref()).map_err(|err| { - Report::msg(err.to_string()) - .wrap_err("Invalid image data") - .suggestion("Supported formats are: PNG, JPEG, Bitmap and WebP") - }) -} - -// Runs the content of a `.mod` file to extract what data we can get -// from legacy mods. -// 1. Create a global function `new_mod` that stores -// the relevant bits in global variables. -// 2. Run the `.mod` file, which will return a table. -// 3. Run the `run` function from that table. -// 4. Access the global variables from #1. -#[tracing::instrument] -fn parse_mod_id_file(data: &str) -> Result<(String, ModConfigResources)> { - tracing::debug!("Parsing mod file:\n{}", data); - - let ret = unsafe { - let state = lua::luaL_newstate(); - lua::luaL_openlibs(state); - - let run = b" -function fassert() end -function new_mod(id, resources) - _G.id = id - _G.script = resources.mod_script - _G.data = resources.mod_data - _G.localization = resources.mod_localization -end -\0"; - match lua::luaL_loadstring(state, run.as_ptr() as _) as u32 { - lua::LUA_OK => {} - lua::LUA_ERRSYNTAX => { - let err = lua::lua_tostring(state, -1); - let err = CStr::from_ptr(err).to_string_lossy().to_string(); - - lua::lua_close(state); - - eyre::bail!("Invalid syntax: {}", err); - } - lua::LUA_ERRMEM => { - lua::lua_close(state); - eyre::bail!("Failed to allocate sufficient memory to create `new_mod`") - } - _ => unreachable!(), - } - - match lua::lua_pcall(state, 0, 0, 0) as u32 { - lua::LUA_OK => {} - lua::LUA_ERRRUN => { - let err = lua::lua_tostring(state, -1); - let err = CStr::from_ptr(err).to_string_lossy().to_string(); - - lua::lua_close(state); - - eyre::bail!("Failed to run buffer: {}", err); - } - lua::LUA_ERRMEM => { - lua::lua_close(state); - eyre::bail!("Failed to allocate sufficient memory to run buffer") - } - // We don't use an error handler function, so this should be unreachable - lua::LUA_ERRERR => unreachable!(), - _ => unreachable!(), - } - - let name = b".mod\0"; - match lua::luaL_loadbuffer( - state, - data.as_ptr() as _, - data.len() as _, - name.as_ptr() as _, - ) as u32 - { - lua::LUA_OK => {} - lua::LUA_ERRSYNTAX => { - let err = lua::lua_tostring(state, -1); - let err = CStr::from_ptr(err).to_string_lossy().to_string(); - - lua::lua_close(state); - - eyre::bail!("Invalid syntax: {}", err); - } - lua::LUA_ERRMEM => { - lua::lua_close(state); - eyre::bail!("Failed to allocate sufficient memory to load `.mod` file buffer") - } - _ => unreachable!(), - } - - match lua::lua_pcall(state, 0, 1, 0) as u32 { - lua::LUA_OK => {} - lua::LUA_ERRRUN => { - let err = lua::lua_tostring(state, -1); - let err = CStr::from_ptr(err).to_string_lossy().to_string(); - - lua::lua_close(state); - - eyre::bail!("Failed to run `.mod` file: {}", err); - } - lua::LUA_ERRMEM => { - lua::lua_close(state); - eyre::bail!("Failed to allocate sufficient memory to run `.mod` file") - } - // We don't use an error handler function, so this should be unreachable - lua::LUA_ERRERR => unreachable!(), - _ => unreachable!(), - } - - let key = b"run\0"; - lua::lua_pushstring(state, key.as_ptr() as _); - lua::lua_gettable(state, -2); - - match lua::lua_pcall(state, 0, 0, 0) as u32 { - lua::LUA_OK => {} - lua::LUA_ERRRUN => { - let err = lua::lua_tostring(state, -1); - let err = CStr::from_ptr(err).to_string_lossy().to_string(); - - lua::lua_close(state); - - eyre::bail!("Failed to run `.mod.run`: {}", err); - } - lua::LUA_ERRMEM => { - lua::lua_close(state); - eyre::bail!("Failed to allocate sufficient memory to run `.mod.run`") - } - // We don't use an error handler function, so this should be unreachable - lua::LUA_ERRERR => unreachable!(), - _ => unreachable!(), - } - - let get_global = |state, key: &[u8]| { - lua::lua_getglobal(state, key.as_ptr() as _); - - if lua::lua_isnil(state, -1) != 0 { - return Ok(None); - } - - let s = lua::lua_tostring(state, -1); - - if s.is_null() { - eyre::bail!("Expected string, got NULL"); - } - - let ret = CStr::from_ptr(s).to_string_lossy().to_string(); - lua::lua_pop(state, 1); - Ok(Some(ret)) - }; - - let mod_id = get_global(state, b"id\0") - .and_then(|s| s.ok_or_else(|| eyre::eyre!("Got `nil`"))) - .wrap_err("Failed to get `id`")?; - - let resources = ModConfigResources { - init: get_global(state, b"script\0") - .and_then(|s| s.map(PathBuf::from).ok_or_else(|| eyre::eyre!("Got `nil`"))) - .wrap_err("Failed to get `script`.")?, - data: get_global(state, b"data\0") - .wrap_err("Failed to get `data`.")? - .map(PathBuf::from), - localization: get_global(state, b"localization\0") - .wrap_err("Failed to get `localization`")? - .map(PathBuf::from), - }; - - lua::lua_close(state); - - (mod_id, resources) - }; - - Ok(ret) -} - -// Extracts the mod configuration from the mod archive. -// This may either be a proper `dtmt.cfg`, or the legacy `.mod` ID file. -// -// It also returns the directory where this file was found, used as root path. This -// allows flexibility in what the directory structure is exactly, since many people -// still end up creating tarbombs and Nexus does its own re-packaging. -#[tracing::instrument(skip(archive))] -fn extract_mod_config(archive: &mut ZipArchive) -> Result<(ModConfig, String)> { - let legacy_mod_data = if let Some(name) = find_archive_file(archive, ".mod") { - let (mod_id, resources) = { - let mut f = archive - .by_name(&name) - .wrap_err("Failed to read `.mod` file from archive")?; - - let mut buf = Vec::with_capacity(f.size() as usize); - f.read_to_end(&mut buf) - .wrap_err("Failed to read `.mod` file from archive")?; - - let data = String::from_utf8(buf).wrap_err("`.mod` file is not valid UTF-8")?; - parse_mod_id_file(&data) - .wrap_err("Invalid `.mod` file") - .note( - "The `.mod` file's `run` function may not contain any additional logic \ - besides the default.", - ) - .suggestion("Contact the mod author to fix this.")? - }; - - let root = if let Some(index) = name.rfind('/') { - name[..index].to_string() - } else { - String::new() - }; - - Some((mod_id, resources, root)) - } else { - None - }; - - tracing::debug!(?legacy_mod_data); - - if let Some(name) = find_archive_file(archive, "dtmt.cfg") { - let mut f = archive - .by_name(&name) - .wrap_err("Failed to read mod config from archive")?; - - let mut buf = Vec::with_capacity(f.size() as usize); - f.read_to_end(&mut buf) - .wrap_err("Failed to read mod config from archive")?; - - let data = String::from_utf8(buf).wrap_err("Mod config is not valid UTF-8")?; - - let mut cfg: ModConfig = serde_sjson::from_str(&data) - .wrap_err("Failed to deserialize mod config") - .suggestion("Contact the mod author to fix this.")?; - - if let Some((mod_id, resources, root)) = legacy_mod_data { - if cfg.id != mod_id { - let err = eyre::eyre!("Mod ID in `dtmt.cfg` does not match mod ID in `.mod` file"); - return Err(err).suggestion("Contact the mod author to fix this."); - } - - cfg.resources = resources; - - // Enforce that packages are skipped - cfg.bundled = false; - cfg.packages = vec![]; - - Ok((cfg, root)) - } else { - let root = name - .strip_suffix("dtmt.cfg") - .expect("String must end with that suffix") - .to_string(); - - Ok((cfg, root)) - } - } else if let Some((mod_id, resources, root)) = legacy_mod_data { - let cfg = ModConfig { - bundled: false, - dir: PathBuf::new(), - id: mod_id.clone(), - name: mod_id, - summary: "A mod for the game Warhammer 40,000: Darktide".into(), - version: "N/A".into(), - description: None, - author: None, - image: None, - categories: Vec::new(), - packages: Vec::new(), - resources, - depends: Vec::new(), - name_overrides: Default::default(), - }; - - Ok((cfg, root)) - } else { - eyre::bail!( - "Mod needs a config file or `.mod` file. \ - Please get in touch with the author to provide a properly packaged mod." - ); - } -} - -#[tracing::instrument(skip(archive))] -fn extract_bundled_mod( - archive: &mut ZipArchive, - root: String, - dest: impl AsRef + std::fmt::Debug, -) -> Result>> { - let files: HashMap> = { - let name = archive - .file_names() - .find(|name| name.ends_with("files.sjson")) - .map(|s| s.to_string()) - .ok_or_else(|| eyre::eyre!("archive does not contain file index"))?; - - let mut f = archive - .by_name(&name) - .wrap_err("Failed to read file index from archive")?; - let mut buf = Vec::with_capacity(f.size() as usize); - f.read_to_end(&mut buf) - .wrap_err("Failed to read file index from archive")?; - - let data = String::from_utf8(buf).wrap_err("File index is not valid UTF-8")?; - serde_sjson::from_str(&data).wrap_err("Failed to deserialize file index")? - }; - - tracing::trace!(?files); - - let dest = dest.as_ref(); - tracing::trace!("Extracting mod archive to {}", dest.display()); - archive - .extract(dest) - .wrap_err_with(|| format!("Failed to extract archive to {}", dest.display()))?; - - let packages = files - .into_iter() - .map(|(name, files)| Arc::new(PackageInfo::new(name, files.into_iter().collect()))) - .collect(); - - tracing::trace!(?packages); - - Ok(packages) -} - -#[tracing::instrument(skip(archive))] -fn extract_legacy_mod( - archive: &mut ZipArchive, - root: String, - dest: impl Into + std::fmt::Debug, -) -> Result<()> { - let dest = dest.into(); - let file_count = archive.len(); - - for i in 0..file_count { - let mut f = archive - .by_index(i) - .wrap_err_with(|| format!("Failed to get file at index {}", i))?; - - let Some(name) = f.enclosed_name().map(|p| p.to_path_buf()) else { - let err = eyre::eyre!("File name in archive is not a safe path value.").suggestion( - "Only use well-known applications to create the ZIP archive, \ - and don't create paths that point outside the archive directory.", - ); - return Err(err); - }; - - let Ok(suffix) = name.strip_prefix(&root) else { - tracing::warn!( - "Skipping file outside of the mod root directory: {}", - name.display() - ); - continue; - }; - let name = dest.join(suffix); - - if f.is_dir() { - // The majority of errors will actually be "X already exists". - // But rather than filter them invidually, we just ignore all of them. - // If there is a legitimate error of "couldn't create X", it will eventually fail when - // we try to put a file in there. - tracing::trace!("Creating directory '{}'", name.display()); - let _ = std::fs::create_dir_all(&name); - } else { - let mut buf = Vec::with_capacity(f.size() as usize); - f.read_to_end(&mut buf) - .wrap_err_with(|| format!("Failed to read file '{}'", name.display()))?; - - tracing::trace!("Writing file '{}'", name.display()); - let mut out = std::fs::OpenOptions::new() - .write(true) - .truncate(true) - .open(&name) - .wrap_err_with(|| format!("Failed to open file '{}'", name.display()))?; - - out.write_all(&buf) - .wrap_err_with(|| format!("Failed to write to '{}'", name.display()))?; - } - } - - Ok(()) -} - -#[tracing::instrument(skip(state))] -pub(crate) async fn import_from_file(state: ActionState, info: FileInfo) -> Result { - let data = fs::read(&info.path) - .await - .wrap_err_with(|| format!("Failed to read file {}", info.path.display()))?; - - let nexus = if let Some((_, id, version, timestamp)) = info - .path - .file_name() - .and_then(|s| s.to_str()) - .and_then(NexusApi::parse_file_name) - { - if !state.nexus_api_key.is_empty() { - let api = NexusApi::new(state.nexus_api_key.to_string())?; - let mod_info = api - .mods_id(id) - .await - .wrap_err_with(|| format!("Failed to query mod {} from Nexus", id))?; - - let version = match api.file_version(id, timestamp).await { - Ok(version) => version, - Err(err) => { - let err = Report::new(err); - tracing::warn!( - "Failed to fetch version for Nexus download. \ - Falling back to file name:\n{:?}", - err - ); - version - } - }; - - let info = NexusInfo::from(mod_info); - tracing::debug!(version, ?info); - - Some((info, version)) - } else { - None - } - } else { - None - }; - - tracing::trace!(?nexus); - - import_mod(state, nexus, data).await -} - -#[tracing::instrument(skip(state))] -pub(crate) async fn import_from_nxm(state: ActionState, uri: String) -> Result { - let url = uri - .parse() - .wrap_err_with(|| format!("Invalid Uri '{}'", uri))?; - - let api = NexusApi::new(state.nexus_api_key.to_string())?; - let (mod_info, file_info, data) = api - .handle_nxm(url) - .await - .wrap_err_with(|| format!("Failed to download mod from NXM uri '{}'", uri))?; - - let nexus = NexusInfo::from(mod_info); - import_mod(state, Some((nexus, file_info.version)), data).await -} - -#[tracing::instrument(skip(state, data), fields(data = data.len()))] -pub(crate) async fn import_mod( - state: ActionState, - nexus: Option<(NexusInfo, String)>, - data: Vec, -) -> Result { - let data = Cursor::new(data); - let mut archive = ZipArchive::new(data).wrap_err("Failed to open ZIP archive")?; - - if tracing::enabled!(tracing::Level::DEBUG) { - let names = archive.file_names().fold(String::new(), |mut s, name| { - s.push('\n'); - s.push_str(name); - s - }); - tracing::debug!("Archive contents:{}", names); - } - - let (mut mod_cfg, root) = - extract_mod_config(&mut archive).wrap_err("Failed to extract mod configuration")?; - tracing::info!("Importing mod {} ({})", mod_cfg.name, mod_cfg.id); - - let mod_dir = state.data_dir.join(state.mod_dir.as_ref()); - let dest = mod_dir.join(&mod_cfg.id); - tracing::trace!("Creating mods directory {}", dest.display()); - fs::create_dir_all(&dest) - .await - .wrap_err_with(|| format!("Failed to create data directory '{}'", dest.display()))?; - - let image = if let Some(path) = &mod_cfg.image { - let name = archive - .file_names() - .find(|name| name.ends_with(&path.display().to_string())) - .map(|s| s.to_string()) - .ok_or_else(|| eyre::eyre!("archive does not contain configured image file"))?; - - let mut f = archive - .by_name(&name) - .wrap_err("Failed to read image file from archive")?; - let mut buf = Vec::with_capacity(f.size() as usize); - f.read_to_end(&mut buf) - .wrap_err("Failed to read file index from archive")?; - - let img = image_data_to_buffer(buf)?; - Some(img) - } else if let Some((nexus, _)) = &nexus { - let api = NexusApi::new(state.nexus_api_key.to_string())?; - let url = nexus.picture_url.as_ref(); - let data = api - .picture(url) - .await - .wrap_err_with(|| format!("Failed to download Nexus image from '{}'", url))?; - - let img = image_data_to_buffer(&data)?; - - let name = "image.bin"; - let path = dest.join(name); - match fs::write(&path, &data).await { - Ok(_) => { - mod_cfg.image = Some(name.into()); - Some(img) - } - Err(err) => { - let err = Report::new(err).wrap_err(format!( - "Failed to write Nexus picture to file '{}'", - path.display() - )); - tracing::error!("{:?}", err); - None - } - } - } else { - None - }; - - tracing::trace!(?image); - tracing::debug!(root, ?mod_cfg); - - let packages = if mod_cfg.bundled { - extract_bundled_mod(&mut archive, root, &mod_dir).wrap_err("Failed to extract mod")? - } else { - extract_legacy_mod(&mut archive, root, &dest).wrap_err("Failed to extract legacy mod")?; - - if let Some((_, version)) = &nexus { - // We use the version number stored in the `ModInfo` to compare against the `NexusInfo` - // for version checks. So for this one, we can't actually rely on merely shadowing, - // like with the other fields. - mod_cfg.version = version.clone(); - } - - let data = serde_sjson::to_string(&mod_cfg).wrap_err("Failed to serialize mod config")?; - fs::write(dest.join("dtmt.cfg"), &data) - .await - .wrap_err("Failed to write mod config")?; - - Default::default() - }; - - if let Some((nexus, _)) = &nexus { - let data = serde_sjson::to_string(nexus).wrap_err("Failed to serialize Nexus info")?; - let path = dest.join("nexus.sjson"); - fs::write(&path, data.as_bytes()) - .await - .wrap_err_with(|| format!("Failed to write Nexus info to '{}'", path.display()))?; - } - - let info = ModInfo::new(mod_cfg, packages, image, nexus.map(|(info, _)| info)); - Ok(info) -} diff --git a/crates/dtmm/src/controller/mod.rs b/crates/dtmm/src/controller/mod.rs deleted file mode 100644 index 9c75e84..0000000 --- a/crates/dtmm/src/controller/mod.rs +++ /dev/null @@ -1,25 +0,0 @@ -use std::path::Path; - -use color_eyre::{eyre::Context, Result}; -use serde::Deserialize; -use tokio::fs; - -pub mod app; -pub mod deploy; -pub mod game; -pub mod import; -pub mod worker; - -#[tracing::instrument] -async fn read_sjson_file(path: P) -> Result -where - T: for<'a> Deserialize<'a>, - P: AsRef + std::fmt::Debug, -{ - let path = path.as_ref(); - let buf = fs::read(path) - .await - .wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?; - let data = String::from_utf8(buf).wrap_err("Invalid UTF8")?; - serde_sjson::from_str(&data).wrap_err("Failed to deserialize SJSON") -} diff --git a/crates/dtmm/src/controller/worker.rs b/crates/dtmm/src/controller/worker.rs deleted file mode 100644 index 6ee498f..0000000 --- a/crates/dtmm/src/controller/worker.rs +++ /dev/null @@ -1,246 +0,0 @@ -use std::sync::Arc; - -use color_eyre::eyre::Context; -use color_eyre::Help; -use color_eyre::Report; -use color_eyre::Result; -use druid::{ExtEventSink, SingleUse, Target}; -use tokio::fs::OpenOptions; -use tokio::io::AsyncWriteExt; -use tokio::runtime::Runtime; - -use tokio::sync::mpsc::UnboundedReceiver; -use tokio::sync::RwLock; - -use crate::controller::app::*; -use crate::controller::deploy::deploy_mods; -use crate::controller::game::*; -use crate::controller::import::*; -use crate::state::AsyncAction; -use crate::state::ACTION_FINISH_CHECK_UPDATE; -use crate::state::ACTION_FINISH_LOAD_INITIAL; -use crate::state::ACTION_FINISH_SAVE_SETTINGS; -use crate::state::ACTION_SHOW_ERROR_DIALOG; -use crate::state::{ - ACTION_FINISH_ADD_MOD, ACTION_FINISH_DELETE_SELECTED_MOD, ACTION_FINISH_DEPLOY, - ACTION_FINISH_RESET_DEPLOYMENT, ACTION_LOG, -}; - -async fn send_error(sink: Arc>, err: Report) { - sink.write() - .await - .submit_command(ACTION_SHOW_ERROR_DIALOG, SingleUse::new(err), Target::Auto) - .expect("failed to send command"); -} - -async fn handle_action( - event_sink: Arc>, - action_queue: Arc>>, -) { - while let Some(action) = action_queue.write().await.recv().await { - if cfg!(debug_assertions) && !matches!(action, AsyncAction::Log(_)) { - tracing::debug!(?action); - } - - let event_sink = event_sink.clone(); - match action { - AsyncAction::DeployMods(state) => tokio::spawn(async move { - if let Err(err) = deploy_mods(state).await.wrap_err("Failed to deploy mods") { - tracing::error!("{:?}", err); - send_error(event_sink.clone(), err).await; - } - - event_sink - .write() - .await - .submit_command(ACTION_FINISH_DEPLOY, (), Target::Auto) - .expect("failed to send command"); - }), - AsyncAction::AddMod(state, info) => tokio::spawn(async move { - match import_from_file(state, info) - .await - .wrap_err("Failed to import mod") - { - Ok(mod_info) => { - event_sink - .write() - .await - .submit_command( - ACTION_FINISH_ADD_MOD, - SingleUse::new(Arc::new(mod_info)), - Target::Auto, - ) - .expect("failed to send command"); - } - Err(err) => { - tracing::error!("{:?}", err); - send_error(event_sink.clone(), err).await; - } - } - }), - AsyncAction::DeleteMod(state, info) => tokio::spawn(async move { - let mod_dir = state.mod_dir.join(&info.id); - if let Err(err) = delete_mod(state, &info) - .await - .wrap_err("Failed to delete mod files") - .with_suggestion(|| { - format!("Clean the folder '{}' manually", mod_dir.display()) - }) - { - tracing::error!("{:?}", err); - send_error(event_sink.clone(), err).await; - } - - event_sink - .write() - .await - .submit_command( - ACTION_FINISH_DELETE_SELECTED_MOD, - SingleUse::new(info), - Target::Auto, - ) - .expect("failed to send command"); - }), - AsyncAction::ResetDeployment(state) => tokio::spawn(async move { - if let Err(err) = reset_mod_deployment(state) - .await - .wrap_err("Failed to reset mod deployment") - { - tracing::error!("{:?}", err); - send_error(event_sink.clone(), err).await; - } - - event_sink - .write() - .await - .submit_command(ACTION_FINISH_RESET_DEPLOYMENT, (), Target::Auto) - .expect("failed to send command"); - }), - AsyncAction::SaveSettings(state) => tokio::spawn(async move { - if let Err(err) = save_settings(state) - .await - .wrap_err("Failed to save settings") - { - tracing::error!("{:?}", err); - send_error(event_sink.clone(), err).await; - } - - event_sink - .write() - .await - .submit_command(ACTION_FINISH_SAVE_SETTINGS, (), Target::Auto) - .expect("failed to send command"); - }), - AsyncAction::CheckUpdates(state) => tokio::spawn(async move { - let updates = match check_updates(state) - .await - .wrap_err("Failed to check for updates") - { - Ok(updates) => updates, - Err(err) => { - tracing::error!("{:?}", err); - send_error(event_sink.clone(), err).await; - vec![] - } - }; - - event_sink - .write() - .await - .submit_command( - ACTION_FINISH_CHECK_UPDATE, - SingleUse::new(updates), - Target::Auto, - ) - .expect("failed to send command"); - }), - AsyncAction::LoadInitial((path, is_default)) => tokio::spawn(async move { - let data = match load_initial(path, is_default) - .await - .wrap_err("Failed to load initial application data") - { - Ok(data) => Some(data), - Err(err) => { - tracing::error!("{:?}", err); - send_error(event_sink.clone(), err).await; - None - } - }; - - event_sink - .write() - .await - .submit_command( - ACTION_FINISH_LOAD_INITIAL, - SingleUse::new(data), - Target::Auto, - ) - .expect("failed to send command"); - }), - AsyncAction::Log((state, line)) => tokio::spawn(async move { - if let Ok(mut f) = OpenOptions::new() - .append(true) - .open(state.data_dir.join("dtmm.log")) - .await - { - let _ = f.write_all(&line).await; - } - }), - AsyncAction::NxmDownload(state, uri) => tokio::spawn(async move { - match import_from_nxm(state, uri) - .await - .wrap_err("Failed to handle NXM URI") - { - Ok(mod_info) => { - event_sink - .write() - .await - .submit_command( - ACTION_FINISH_ADD_MOD, - SingleUse::new(Arc::new(mod_info)), - Target::Auto, - ) - .expect("failed to send command"); - } - Err(err) => { - tracing::error!("{:?}", err); - send_error(event_sink.clone(), err).await; - } - } - }), - }; - } -} - -async fn handle_log( - event_sink: Arc>, - log_queue: Arc>>>, -) { - while let Some(line) = log_queue.write().await.recv().await { - let event_sink = event_sink.clone(); - event_sink - .write() - .await - .submit_command(ACTION_LOG, SingleUse::new(line), Target::Auto) - .expect("failed to send command"); - } -} - -pub(crate) fn work_thread( - event_sink: Arc>, - action_queue: Arc>>, - log_queue: Arc>>>, -) -> Result<()> { - let rt = Runtime::new()?; - - rt.block_on(async { - loop { - tokio::select! { - _ = handle_action(event_sink.clone(), action_queue.clone()) => {}, - _ = handle_log(event_sink.clone(), log_queue.clone()) => {}, - } - } - }); - - Ok(()) -} diff --git a/crates/dtmm/src/main.rs b/crates/dtmm/src/main.rs index 54e101a..8bf57c1 100644 --- a/crates/dtmm/src/main.rs +++ b/crates/dtmm/src/main.rs @@ -1,222 +1,45 @@ -#![recursion_limit = "256"] #![feature(let_chains)] -#![feature(iterator_try_collect)] -#![windows_subsystem = "windows"] -use std::path::PathBuf; -use std::sync::Arc; - -use clap::parser::ValueSource; -use clap::{command, value_parser, Arg}; -use color_eyre::eyre::{self, Context}; -use color_eyre::{Report, Result, Section}; +use clap::command; +use color_eyre::Report; +use color_eyre::Result; use druid::AppLauncher; -use interprocess::local_socket::{prelude::*, GenericNamespaced, ListenerOptions}; -use tokio::sync::RwLock; +use tracing_error::ErrorLayer; +use tracing_subscriber::prelude::*; +use tracing_subscriber::EnvFilter; -use crate::controller::worker::work_thread; -use crate::state::{AsyncAction, ACTION_HANDLE_NXM}; -use crate::state::{Delegate, State}; -use crate::ui::theme; -use crate::util::log::LogLevel; +use crate::state::State; mod controller; +mod main_window; mod state; -mod util { - pub mod ansi; - pub mod config; - pub mod log; -} -mod ui; - -// As explained in https://docs.rs/interprocess/2.1.0/interprocess/local_socket/struct.Name.html -// namespaces are supported on both platforms we care about: Windows and Linux. -const IPC_ADDRESS: &str = "dtmm.sock"; +mod theme; +mod widget; #[tracing::instrument] -fn notify_nxm_download( - uri: impl AsRef + std::fmt::Debug, - level: Option, -) -> Result<()> { - util::log::create_tracing_subscriber(level, None); - - tracing::debug!("Received Uri '{}', sending to main process.", uri.as_ref()); - - let mut stream = LocalSocketStream::connect( - IPC_ADDRESS - .to_ns_name::() - .expect("Invalid socket name"), - ) - .wrap_err_with(|| format!("Failed to connect to '{}'", IPC_ADDRESS)) - .suggestion("Make sure the main window is open.")?; - - tracing::debug!("Connected to main process at '{}'", IPC_ADDRESS); - - let bincode_config = bincode::config::standard(); - - bincode::encode_into_std_write(uri.as_ref(), &mut stream, bincode_config) - .wrap_err("Failed to send URI")?; - - // We don't really care what the message is, we just need an acknowledgement. - let _: String = bincode::decode_from_std_read(&mut stream, bincode_config) - .wrap_err("Failed to receive reply")?; - - tracing::info!( - "Notified DTMM with uri '{}'. Check the main window.", - uri.as_ref() - ); - Ok(()) -} - -#[tracing::instrument] -fn main() -> Result<()> { +#[tokio::main] +async fn main() -> Result<()> { color_eyre::install()?; - let default_config_path = util::config::get_default_config_path(); - - tracing::trace!(default_config_path = %default_config_path.display()); - - let matches = command!() - .arg( - Arg::new("config") - .long("config") - .short('c') - .help("Path to the config file") - .value_parser(value_parser!(PathBuf)) - .default_value(default_config_path.to_string_lossy().to_string()), - ) - .arg( - Arg::new("log-level") - .long("log-level") - .help("The maximum level of log events to print") - .value_parser(value_parser!(LogLevel)) - .default_value("info"), - ) - .arg( - Arg::new("nxm") - .help("An `nxm://` URI to download") - .required(false), - ) - .get_matches(); - - let level = if matches.value_source("log-level") == Some(ValueSource::DefaultValue) { - None - } else { - matches.get_one::("log-level").cloned() - }; - - if let Some(uri) = matches.get_one::("nxm") { - return notify_nxm_download(uri, level).wrap_err("Failed to send NXM Uri to main window."); - } - - let (log_tx, log_rx) = tokio::sync::mpsc::unbounded_channel(); - util::log::create_tracing_subscriber(level, Some(log_tx)); - - let (action_tx, action_rx) = tokio::sync::mpsc::unbounded_channel(); - - let config_path = matches - .get_one::("config") - .cloned() - .expect("argument has default value"); - let is_config_default = matches.value_source("config") == Some(ValueSource::DefaultValue); - if action_tx - .send(AsyncAction::LoadInitial((config_path, is_config_default))) - .is_err() - { - let err = eyre::eyre!("Failed to send action"); - return Err(err); - } - - let launcher = AppLauncher::with_window(ui::window::main::new()) - .delegate(Delegate::new(action_tx)) - .configure_env(theme::set_theme_env); - - let event_sink = launcher.get_external_handle(); + let _matches = command!().get_matches(); { - let span = tracing::info_span!(IPC_ADDRESS, "nxm-socket"); - let _guard = span.enter(); + let fmt_layer = tracing_subscriber::fmt::layer().pretty(); + let filter_layer = + EnvFilter::try_from_default_env().or_else(|_| EnvFilter::try_new("info"))?; - let event_sink = event_sink.clone(); - let server = ListenerOptions::new() - .name( - IPC_ADDRESS - .to_ns_name::() - .expect("Invalid socket name"), - ) - .create_sync() - .wrap_err("Failed to create IPC listener")?; - - tracing::debug!("IPC server listening on '{}'", IPC_ADDRESS); - - // Drop the guard here, so that we can re-enter the same span in the thread. - drop(_guard); - - std::thread::Builder::new() - .name("nxm-socket".into()) - .spawn(move || { - let _guard = span.enter(); - - loop { - let res = server.accept().wrap_err_with(|| { - format!("IPC server failed to listen on '{}'", IPC_ADDRESS) - }); - - match res { - Ok(mut stream) => { - let res = bincode::decode_from_std_read( - &mut stream, - bincode::config::standard(), - ) - .wrap_err("Failed to read message") - .and_then(|uri: String| { - tracing::trace!(uri, "Received NXM uri"); - - event_sink - .submit_command(ACTION_HANDLE_NXM, uri, druid::Target::Auto) - .wrap_err("Failed to start NXM download") - }); - match res { - Ok(()) => { - let _ = bincode::encode_into_std_write( - "Ok", - &mut stream, - bincode::config::standard(), - ); - } - Err(err) => { - tracing::error!("{:?}", err); - let _ = bincode::encode_into_std_write( - "Error", - &mut stream, - bincode::config::standard(), - ); - } - } - } - Err(err) => { - tracing::error!("Failed to receive client connection: {:?}", err) - } - } - } - }) - .wrap_err("Failed to create thread")?; + tracing_subscriber::registry() + .with(filter_layer) + .with(fmt_layer) + .with(ErrorLayer::new( + tracing_subscriber::fmt::format::Pretty::default(), + )) + .init(); } - std::thread::Builder::new() - .name("work-thread".into()) - .spawn(move || { - let event_sink = Arc::new(RwLock::new(event_sink)); - let action_rx = Arc::new(RwLock::new(action_rx)); - let log_rx = Arc::new(RwLock::new(log_rx)); - loop { - if let Err(err) = work_thread(event_sink.clone(), action_rx.clone(), log_rx.clone()) - { - tracing::error!("Work thread failed, restarting: {:?}", err); - } - } - }) - .wrap_err("Failed to create thread")?; + let initial_state = State::new(); - launcher.launch(State::new()).map_err(Report::new) + AppLauncher::with_window(main_window::new()) + .launch(initial_state) + .map_err(Report::new) } diff --git a/crates/dtmm/src/main_window.rs b/crates/dtmm/src/main_window.rs new file mode 100644 index 0000000..385894b --- /dev/null +++ b/crates/dtmm/src/main_window.rs @@ -0,0 +1,232 @@ +use druid::im::Vector; +use druid::widget::{ + Align, Button, CrossAxisAlignment, Flex, Label, List, MainAxisAlignment, Maybe, Scroll, Split, + ViewSwitcher, +}; +use druid::{lens, Insets, LensExt, Widget, WidgetExt, WindowDesc}; + +use crate::state::{ + ModInfo, State, StateController, View, ACTION_DELETE_SELECTED_MOD, ACTION_SELECTED_MOD_DOWN, + ACTION_SELECTED_MOD_UP, ACTION_SELECT_MOD, +}; +use crate::theme; +use crate::widget::ExtraWidgetExt; + +const TITLE: &str = "Darktide Mod Manager"; +const WINDOW_SIZE: (f64, f64) = (800.0, 600.0); +const MOD_DETAILS_MIN_WIDTH: f64 = 325.0; + +pub(crate) fn new() -> WindowDesc { + WindowDesc::new(build_window()) + .title(TITLE) + .window_size(WINDOW_SIZE) +} + +fn build_top_bar() -> impl Widget { + Flex::row() + .must_fill_main_axis(true) + .main_axis_alignment(MainAxisAlignment::SpaceBetween) + .with_child( + Flex::row() + .with_child( + Button::new("Mods").on_click(|_ctx, state: &mut State, _env| { + state.set_current_view(View::Mods) + }), + ) + .with_default_spacer() + .with_child( + Button::new("Settings") + .on_click(|_ctx, state: &mut State, _env| { + state.set_current_view(View::Settings) + }) + .hidden_if(|_, _| true), + ) + .with_default_spacer() + .with_child( + Button::new("About").on_click(|_ctx, state: &mut State, _env| { + state.set_current_view(View::About) + }), + ), + ) + .with_child( + Flex::row() + .with_child(Button::new("Deploy Mods").on_click( + |_ctx, _state: &mut State, _env| { + todo!(); + }, + )) + .with_default_spacer() + .with_child( + Button::new("Run Game").on_click(|_ctx, _state: &mut State, _env| { + todo!(); + }), + ), + ) + .padding(theme::TOP_BAR_INSETS) + .background(theme::TOP_BAR_BACKGROUND_COLOR) + // TODO: Add bottom border. Need a custom widget for that, as the built-in only provides + // uniform borders on all sides +} + +fn build_mod_list() -> impl Widget { + let list = List::new(|| { + Flex::row() + .must_fill_main_axis(true) + .with_child( + Label::dynamic(|enabled, _env| { + if *enabled { + "Enabled".into() + } else { + "Disabled".into() + } + }) + .lens(lens!((usize, ModInfo), 1).then(ModInfo::enabled)), + ) + .with_child(Label::raw().lens(lens!((usize, ModInfo), 1).then(ModInfo::name))) + .on_click(|ctx, (i, _info), _env| ctx.submit_notification(ACTION_SELECT_MOD.with(*i))) + }); + + let scroll = Scroll::new(list) + .vertical() + .lens(State::mods.map( + |mods| { + mods.iter() + .enumerate() + .map(|(i, val)| (i, val.clone())) + .collect::>() + }, + |mods, infos| { + infos.into_iter().for_each(|(i, info)| { + mods.set(i, info); + }); + }, + )) + .content_must_fill(); + + Flex::column() + .must_fill_main_axis(true) + .with_child(Flex::row()) + .with_flex_child(scroll, 1.0) +} + +fn build_mod_details() -> impl Widget { + let details_container = Maybe::new( + || { + Flex::column() + .cross_axis_alignment(CrossAxisAlignment::Start) + .with_child(Label::raw().lens(ModInfo::name)) + .with_flex_child(Label::raw().lens(ModInfo::description), 1.0) + }, + Flex::column, + ) + .padding(Insets::uniform_xy(5.0, 1.0)) + .lens(State::selected_mod); + + let button_move_up = Button::new("Move Up") + .on_click(|ctx, _state, _env| ctx.submit_notification(ACTION_SELECTED_MOD_UP)) + .disabled_if(|state: &State, _env: &druid::Env| state.can_move_mod_up()); + + let button_move_down = Button::new("Move Down") + .on_click(|ctx, _state, _env| ctx.submit_notification(ACTION_SELECTED_MOD_DOWN)) + .disabled_if(|state: &State, _env: &druid::Env| state.can_move_mod_down()); + + let button_toggle_mod = Maybe::new( + || { + Button::dynamic(|enabled, _env| { + if *enabled { + "Disable Mod".into() + } else { + "Enable Mod".into() + } + }) + .on_click(|_ctx, enabled: &mut bool, _env| { + *enabled = !(*enabled); + }) + .lens(ModInfo::enabled) + }, + // TODO: Gray out + || Button::new("Enable Mod"), + ) + .disabled_if(|info: &Option, _env: &druid::Env| info.is_none()) + .lens(State::selected_mod); + + let button_add_mod = Button::new("Add Mod").on_click(|_ctx, state: &mut State, _env| { + // TODO: Implement properly + let info = ModInfo::new(); + state.add_mod(info); + }); + + let button_delete_mod = Button::new("Delete Mod") + .on_click(|ctx, _state, _env| ctx.submit_notification(ACTION_DELETE_SELECTED_MOD)) + .disabled_if(|info: &Option, _env: &druid::Env| info.is_none()) + .lens(State::selected_mod); + + let buttons = Flex::column() + .with_child( + Flex::row() + .main_axis_alignment(MainAxisAlignment::End) + .with_child(button_move_up) + .with_default_spacer() + .with_child(button_move_down) + .padding(Insets::uniform_xy(5.0, 2.0)), + ) + .with_child( + Flex::row() + .main_axis_alignment(MainAxisAlignment::End) + .with_child(button_toggle_mod) + .with_default_spacer() + .with_child(button_add_mod) + .with_default_spacer() + .with_child(button_delete_mod) + .padding(Insets::uniform_xy(5.0, 2.0)), + ) + .with_default_spacer(); + + Flex::column() + .must_fill_main_axis(true) + .main_axis_alignment(MainAxisAlignment::SpaceBetween) + .with_flex_child(details_container, 1.0) + .with_child(buttons) +} + +fn build_view_mods() -> impl Widget { + Split::columns(build_mod_list(), build_mod_details()) + .split_point(0.75) + .min_size(0.0, MOD_DETAILS_MIN_WIDTH) + .solid_bar(true) + .bar_size(2.0) + .draggable(true) +} + +fn build_view_settings() -> impl Widget { + Label::new("Settings") +} + +fn build_view_about() -> impl Widget { + Align::centered( + Flex::column() + .with_child(Label::new("Darktide Mod Manager")) + .with_child(Label::new( + "Website: https://git.sclu1034.dev/bitsquid_dt/dtmt", + )), + ) +} + +fn build_main() -> impl Widget { + ViewSwitcher::new( + |state: &State, _env| state.get_current_view(), + |selector, _state, _env| match selector { + View::Mods => Box::new(build_view_mods()), + View::Settings => Box::new(build_view_settings()), + View::About => Box::new(build_view_about()), + }, + ) +} + +fn build_window() -> impl Widget { + Flex::column() + .must_fill_main_axis(true) + .with_child(build_top_bar()) + .with_flex_child(build_main(), 1.0) + .controller(StateController::new()) +} diff --git a/crates/dtmm/src/state.rs b/crates/dtmm/src/state.rs new file mode 100644 index 0000000..51f0d40 --- /dev/null +++ b/crates/dtmm/src/state.rs @@ -0,0 +1,226 @@ +use std::sync::Arc; + +use druid::im::Vector; +use druid::widget::Controller; +use druid::{Data, Env, Event, EventCtx, Lens, Selector, Widget}; + +pub const ACTION_SELECT_MOD: Selector = Selector::new("dtmm.action..select-mod"); +pub const ACTION_SELECTED_MOD_UP: Selector = Selector::new("dtmm.action.selected-mod-up"); +pub const ACTION_SELECTED_MOD_DOWN: Selector = Selector::new("dtmm.action.selected-mod-down"); +pub const ACTION_DELETE_SELECTED_MOD: Selector = Selector::new("dtmm.action.delete-selected-mod"); + +#[derive(Copy, Clone, Data, PartialEq)] +pub(crate) enum View { + Mods, + Settings, + About, +} + +impl Default for View { + fn default() -> Self { + Self::Mods + } +} + +#[derive(Clone, Data, Lens)] +pub(crate) struct ModInfo { + name: String, + description: Arc, + enabled: bool, +} +impl ModInfo { + pub fn new() -> Self { + Self { + name: format!("Test Mod: {:?}", std::time::SystemTime::now()), + description: Arc::new(String::from("A test dummy")), + enabled: false, + } + } +} + +impl PartialEq for ModInfo { + fn eq(&self, other: &Self) -> bool { + self.name.eq(&other.name) + } +} + +#[derive(Clone, Data, Default, Lens)] +pub(crate) struct State { + current_view: View, + mods: Vector, + selected_mod_index: Option, +} + +pub(crate) struct SelectedModLens; + +impl Lens> for SelectedModLens { + #[tracing::instrument(name = "SelectedModLens::with", skip_all)] + fn with) -> V>(&self, data: &State, f: F) -> V { + let info = data + .selected_mod_index + .and_then(|i| data.mods.get(i).cloned()); + + f(&info) + } + + #[tracing::instrument(name = "SelectedModLens::with_mut", skip_all)] + fn with_mut) -> V>(&self, data: &mut State, f: F) -> V { + match data.selected_mod_index { + Some(i) => { + let mut info = data.mods.get_mut(i).cloned(); + let ret = f(&mut info); + + if let Some(info) = info { + // TODO: Figure out a way to check for equality and + // only update when needed + data.mods.set(i, info); + } else { + data.selected_mod_index = None; + } + + ret + } + None => f(&mut None), + } + } +} + +/// A Lens that maps an `im::Vector` to `im::Vector<(usize, T)>`, +/// where each element in the destination vector includes its index in the +/// source vector. +pub(crate) struct IndexedVectorLens; + +impl Lens, Vector<(usize, T)>> for IndexedVectorLens { + #[tracing::instrument(name = "IndexedVectorLens::with", skip_all)] + fn with) -> V>(&self, values: &Vector, f: F) -> V { + let indexed = values + .iter() + .enumerate() + .map(|(i, val)| (i, val.clone())) + .collect(); + f(&indexed) + } + + #[tracing::instrument(name = "IndexedVectorLens::with_mut", skip_all)] + fn with_mut) -> V>( + &self, + values: &mut Vector, + f: F, + ) -> V { + let mut indexed = values + .iter() + .enumerate() + .map(|(i, val)| (i, val.clone())) + .collect(); + let ret = f(&mut indexed); + tracing::trace!("with_mut: {}", indexed.len()); + + *values = indexed.into_iter().map(|(_i, val)| val).collect(); + + ret + } +} + +impl State { + #[allow(non_upper_case_globals)] + pub const selected_mod: SelectedModLens = SelectedModLens; + + pub fn new() -> Self { + Default::default() + } + + pub fn get_current_view(&self) -> View { + self.current_view + } + + pub fn set_current_view(&mut self, view: View) { + self.current_view = view; + } + + pub fn select_mod(&mut self, index: usize) { + self.selected_mod_index = Some(index); + } + + pub fn add_mod(&mut self, info: ModInfo) { + self.mods.push_back(info); + self.selected_mod_index = Some(self.mods.len() - 1); + } + + pub fn can_move_mod_down(&self) -> bool { + self.selected_mod_index + .map(|i| i >= (self.mods.len().saturating_sub(1))) + .unwrap_or(true) + } + + pub fn can_move_mod_up(&self) -> bool { + self.selected_mod_index.map(|i| i == 0).unwrap_or(true) + } +} + +pub struct StateController {} + +impl StateController { + pub fn new() -> Self { + Self {} + } +} + +impl> Controller for StateController { + #[tracing::instrument(name = "StateController::event", skip_all)] + fn event( + &mut self, + child: &mut W, + ctx: &mut EventCtx, + event: &Event, + state: &mut State, + env: &Env, + ) { + match event { + Event::Notification(notif) if notif.is(ACTION_SELECT_MOD) => { + ctx.set_handled(); + let index = notif + .get(ACTION_SELECT_MOD) + .expect("notification type didn't match after check"); + + state.select_mod(*index); + } + Event::Notification(notif) if notif.is(ACTION_SELECTED_MOD_UP) => { + ctx.set_handled(); + let Some(i) = state.selected_mod_index else { + return; + }; + + let len = state.mods.len(); + if len == 0 || i == 0 { + return; + } + + state.mods.swap(i, i - 1); + state.selected_mod_index = Some(i - 1); + } + Event::Notification(notif) if notif.is(ACTION_SELECTED_MOD_DOWN) => { + ctx.set_handled(); + let Some(i) = state.selected_mod_index else { + return; + }; + + let len = state.mods.len(); + if len == 0 || i == usize::MAX || i >= len - 1 { + return; + } + + state.mods.swap(i, i + 1); + state.selected_mod_index = Some(i + 1); + } + Event::Notification(notif) if notif.is(ACTION_DELETE_SELECTED_MOD) => { + ctx.set_handled(); + let Some(index) = state.selected_mod_index else { + return; + }; + + state.mods.remove(index); + } + _ => child.event(ctx, event, state, env), + } + } +} diff --git a/crates/dtmm/src/state/data.rs b/crates/dtmm/src/state/data.rs deleted file mode 100644 index 23a4ae0..0000000 --- a/crates/dtmm/src/state/data.rs +++ /dev/null @@ -1,276 +0,0 @@ -use std::path::PathBuf; -use std::sync::Arc; - -use druid::im::{HashMap, Vector}; -use druid::text::RichText; -use druid::{Data, ImageBuf, Lens, WindowHandle, WindowId}; -use dtmt_shared::ModConfig; -use nexusmods::Mod as NexusMod; - -use super::SelectedModLens; - -#[derive(Copy, Clone, Data, Debug, PartialEq)] -pub(crate) enum View { - Mods, - Settings, -} - -impl Default for View { - fn default() -> Self { - Self::Mods - } -} - -#[derive(Clone, Data, Debug, PartialEq)] -pub struct PackageInfo { - pub name: String, - pub files: Vector, -} - -impl PackageInfo { - pub fn new(name: String, files: Vector) -> Self { - Self { name, files } - } -} - -#[derive(Clone, Debug, PartialEq)] -pub(crate) struct ModResourceInfo { - pub init: PathBuf, - pub data: Option, - pub localization: Option, -} - -#[derive(Clone, Data, Debug, PartialEq)] -pub(crate) enum ModOrder { - Before, - After, -} - -#[derive(Clone, Data, Debug, PartialEq)] -pub(crate) struct ModDependency { - pub id: String, - pub order: ModOrder, -} - -impl From for ModDependency { - fn from(value: dtmt_shared::ModDependency) -> Self { - match value { - dtmt_shared::ModDependency::ID(id) => ModDependency { - id, - order: ModOrder::Before, - }, - dtmt_shared::ModDependency::Config { id, order } => ModDependency { - id, - order: match order { - dtmt_shared::ModOrder::Before => ModOrder::Before, - dtmt_shared::ModOrder::After => ModOrder::After, - }, - }, - } - } -} - -#[derive(Clone, Data, Debug, Lens, serde::Serialize, serde::Deserialize)] -pub(crate) struct NexusInfo { - pub author: String, - pub category_id: u64, - pub created_timestamp: i64, - pub description: Arc, - pub id: u64, - pub name: String, - pub picture_url: Arc, - pub summary: Arc, - pub uid: u64, - pub updated_timestamp: i64, - pub uploaded_by: String, - pub version: String, -} - -impl From for NexusInfo { - fn from(value: NexusMod) -> Self { - Self { - author: value.author, - category_id: value.category_id, - created_timestamp: value.created_timestamp.unix_timestamp(), - description: Arc::new(value.description), - id: value.mod_id, - name: value.name, - picture_url: Arc::new(value.picture_url.into()), - summary: Arc::new(value.summary), - uid: value.uid, - updated_timestamp: value.updated_timestamp.unix_timestamp(), - uploaded_by: value.uploaded_by, - version: value.version, - } - } -} - -#[derive(Clone, Data, Lens)] -pub(crate) struct ModInfo { - pub id: String, - pub name: String, - pub summary: Arc, - pub description: Option>, - pub categories: Vector, - pub author: Option, - pub image: Option, - pub version: String, - pub enabled: bool, - pub depends: Vector, - pub bundled: bool, - #[lens(ignore)] - #[data(ignore)] - pub packages: Vector>, - #[lens(ignore)] - #[data(ignore)] - pub resources: ModResourceInfo, - #[data(ignore)] - pub nexus: Option, -} - -impl std::fmt::Debug for ModInfo { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("ModInfo") - .field("id", &self.id) - .field("name", &self.name) - .field("summary", &self.summary) - .field( - "description", - &(match &self.description { - Some(desc) => format!("Some(String[0..{}])", desc.len()), - None => "None".to_string(), - }), - ) - .field("categories", &self.categories) - .field("author", &self.author) - .field( - "image", - &(match &self.image { - Some(image) => format!("Some(ImageBuf[{}x{}])", image.width(), image.height()), - None => "None".to_string(), - }), - ) - .field("version", &self.version) - .field("enabled", &self.enabled) - .field("packages", &format!("Vec[0..{}]", self.packages.len())) - .field("resources", &self.resources) - .field("depends", &self.depends) - .field("bundled", &self.bundled) - .field("nexus", &self.nexus) - .finish() - } -} - -impl ModInfo { - pub fn new( - cfg: ModConfig, - packages: Vector>, - image: Option, - nexus: Option, - ) -> Self { - Self { - id: cfg.id, - name: cfg.name, - summary: Arc::new(cfg.summary), - description: cfg.description.map(Arc::new), - author: cfg.author, - version: cfg.version, - enabled: false, - packages, - bundled: cfg.bundled, - image, - categories: cfg.categories.into_iter().collect(), - resources: ModResourceInfo { - init: cfg.resources.init, - data: cfg.resources.data, - localization: cfg.resources.localization, - }, - depends: cfg.depends.into_iter().map(ModDependency::from).collect(), - nexus, - } - } -} - -#[derive(Clone, Data, Lens)] -pub(crate) struct State { - pub current_view: View, - pub mods: Vector>, - pub selected_mod_index: Option, - pub dirty: bool, - pub is_deployment_in_progress: bool, - pub is_reset_in_progress: bool, - pub is_save_in_progress: bool, - pub is_next_save_pending: bool, - pub is_update_in_progress: bool, - pub is_io_enabled: bool, - pub game_dir: Arc, - pub data_dir: Arc, - pub nexus_api_key: Arc, - pub log: Vector, - // True, when the initial loading of configuration and mods is still in progress - pub loading: bool, - - #[lens(ignore)] - #[data(ignore)] - pub config_path: Arc, - #[lens(ignore)] - #[data(ignore)] - pub windows: HashMap, - #[lens(ignore)] - #[data(ignore)] - pub ctx: Arc, -} - -impl State { - #[allow(non_upper_case_globals)] - pub const selected_mod: SelectedModLens = SelectedModLens; - - pub fn new() -> Self { - let ctx = sdk::Context::new(); - - Self { - ctx: Arc::new(ctx), - current_view: View::default(), - mods: Vector::new(), - selected_mod_index: None, - dirty: false, - is_deployment_in_progress: false, - is_reset_in_progress: false, - is_save_in_progress: false, - is_next_save_pending: false, - is_update_in_progress: false, - is_io_enabled: false, - config_path: Arc::new(PathBuf::new()), - game_dir: Arc::new(PathBuf::new()), - data_dir: Arc::new(PathBuf::new()), - nexus_api_key: Arc::new(String::new()), - log: Vector::new(), - windows: HashMap::new(), - loading: true, - } - } - - pub fn select_mod(&mut self, index: usize) { - self.selected_mod_index = Some(index); - } - - pub fn add_mod(&mut self, info: Arc) { - if let Some(pos) = self.mods.iter().position(|i| i.id == info.id) { - self.mods.set(pos, info); - self.selected_mod_index = Some(pos); - } else { - self.mods.push_back(info); - self.selected_mod_index = Some(self.mods.len() - 1); - } - } - - pub fn can_move_mod_down(&self) -> bool { - self.selected_mod_index - .map(|i| i < (self.mods.len().saturating_sub(1))) - .unwrap_or(false) - } - - pub fn can_move_mod_up(&self) -> bool { - self.selected_mod_index.map(|i| i > 0).unwrap_or(false) - } -} diff --git a/crates/dtmm/src/state/delegate.rs b/crates/dtmm/src/state/delegate.rs deleted file mode 100644 index f3c4711..0000000 --- a/crates/dtmm/src/state/delegate.rs +++ /dev/null @@ -1,475 +0,0 @@ -use std::path::PathBuf; -use std::sync::Arc; - -use color_eyre::Report; -use druid::im::Vector; -use druid::{ - AppDelegate, Command, DelegateCtx, Env, FileInfo, Handled, Selector, SingleUse, Target, - WindowHandle, WindowId, -}; -use tokio::sync::mpsc::UnboundedSender; - -use crate::ui::window; -use crate::util::ansi::ansi_to_rich_text; -use crate::util::config::Config; - -use super::{ModInfo, State}; - -pub(crate) const ACTION_SELECT_MOD: Selector = Selector::new("dtmm.action.select-mod"); -pub(crate) const ACTION_SELECTED_MOD_UP: Selector = Selector::new("dtmm.action.selected-mod-up"); -pub(crate) const ACTION_SELECTED_MOD_DOWN: Selector = - Selector::new("dtmm.action.selected-mod-down"); -pub(crate) const ACTION_START_DELETE_SELECTED_MOD: Selector>> = - Selector::new("dtmm.action.srart-delete-selected-mod"); -pub(crate) const ACTION_FINISH_DELETE_SELECTED_MOD: Selector>> = - Selector::new("dtmm.action.finish-delete-selected-mod"); - -pub(crate) const ACTION_START_DEPLOY: Selector = Selector::new("dtmm.action.start-deploy"); -pub(crate) const ACTION_FINISH_DEPLOY: Selector = Selector::new("dtmm.action.finish-deploy"); - -pub(crate) const ACTION_START_RESET_DEPLOYMENT: Selector = - Selector::new("dtmm.action.start-reset-deployment"); -pub(crate) const ACTION_FINISH_RESET_DEPLOYMENT: Selector = - Selector::new("dtmm.action.finish-reset-deployment"); - -pub(crate) const ACTION_HANDLE_NXM: Selector = Selector::new("dtmm.action.handle-nxm"); -pub(crate) const ACTION_ADD_MOD: Selector = Selector::new("dtmm.action.add-mod"); -pub(crate) const ACTION_FINISH_ADD_MOD: Selector>> = - Selector::new("dtmm.action.finish-add-mod"); - -pub(crate) const ACTION_LOG: Selector>> = Selector::new("dtmm.action.log"); - -pub(crate) const ACTION_START_SAVE_SETTINGS: Selector = - Selector::new("dtmm.action.start-save-settings"); -pub(crate) const ACTION_FINISH_SAVE_SETTINGS: Selector = - Selector::new("dtmm.action.finish-save-settings"); - -pub(crate) const ACTION_START_CHECK_UPDATE: Selector = - Selector::new("dtmm.action.start-check-update"); -pub(crate) const ACTION_FINISH_CHECK_UPDATE: Selector>> = - Selector::new("dtmm.action.finish-check-update"); - -pub(crate) const ACTION_SET_DIRTY: Selector = Selector::new("dtmm.action.set-dirty"); - -pub(crate) const ACTION_SHOW_ERROR_DIALOG: Selector> = - Selector::new("dtmm.action.show-error-dialog"); - -pub(crate) const ACTION_SET_WINDOW_HANDLE: Selector> = - Selector::new("dtmm.action.set-window-handle"); - -pub(crate) type InitialLoadResult = (Config, Vector>); -pub(crate) const ACTION_FINISH_LOAD_INITIAL: Selector>> = - Selector::new("dtmm.action.finish-load-initial"); - -pub(crate) const ACTION_OPEN_LINK: Selector> = Selector::new("dtmm.action.open-link"); - -// A sub-selection of `State`'s fields that are required in `AsyncAction`s and that are -// `Send + Sync` -pub(crate) struct ActionState { - pub mods: Vector>, - pub game_dir: Arc, - pub data_dir: Arc, - pub mod_dir: Arc, - pub config_path: Arc, - pub ctx: Arc, - pub nexus_api_key: Arc, - pub is_io_enabled: bool, -} - -impl From for ActionState { - fn from(state: State) -> Self { - Self { - mods: state.mods, - game_dir: state.game_dir, - mod_dir: Arc::new(state.data_dir.join("mods")), - data_dir: state.data_dir, - config_path: state.config_path, - ctx: state.ctx, - nexus_api_key: state.nexus_api_key, - is_io_enabled: state.is_io_enabled, - } - } -} - -pub(crate) enum AsyncAction { - DeployMods(ActionState), - ResetDeployment(ActionState), - AddMod(ActionState, FileInfo), - DeleteMod(ActionState, Arc), - SaveSettings(ActionState), - CheckUpdates(ActionState), - LoadInitial((PathBuf, bool)), - Log((ActionState, Vec)), - NxmDownload(ActionState, String), -} - -impl std::fmt::Debug for AsyncAction { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - AsyncAction::DeployMods(_) => write!(f, "AsyncAction::DeployMods(_state)"), - AsyncAction::ResetDeployment(_) => write!(f, "AsyncAction::ResetDeployment(_state)"), - AsyncAction::AddMod(_, info) => write!(f, "AsyncAction::AddMod(_state, {:?})", info), - AsyncAction::DeleteMod(_, info) => { - write!(f, "AsyncAction::DeleteMod(_state, {:?})", info) - } - AsyncAction::SaveSettings(_) => write!(f, "AsyncAction::SaveSettings(_state)"), - AsyncAction::CheckUpdates(_) => write!(f, "AsyncAction::CheckUpdates(_state)"), - AsyncAction::LoadInitial((path, is_default)) => write!( - f, - "AsyncAction::LoadInitial(({:?}, {:?}))", - path, is_default - ), - AsyncAction::Log(_) => write!(f, "AsyncAction::Log(_)"), - AsyncAction::NxmDownload(_, uri) => { - write!(f, "AsyncAction::NxmDownload(_state, {})", uri) - } - } - } -} - -pub(crate) struct Delegate { - sender: UnboundedSender, -} - -impl Delegate { - pub fn new(sender: UnboundedSender) -> Self { - Self { sender } - } -} - -impl AppDelegate for Delegate { - #[tracing::instrument(name = "Delegate", skip_all)] - fn command( - &mut self, - ctx: &mut DelegateCtx, - _target: Target, - cmd: &Command, - state: &mut State, - _env: &Env, - ) -> Handled { - if cfg!(debug_assertions) && !cmd.is(ACTION_LOG) { - tracing::trace!(?cmd); - } - - match cmd { - cmd if cmd.is(ACTION_START_DEPLOY) => { - if self - .sender - .send(AsyncAction::DeployMods(state.clone().into())) - .is_ok() - { - state.is_deployment_in_progress = true; - } else { - tracing::error!("Failed to queue action to deploy mods"); - } - - Handled::Yes - } - cmd if cmd.is(ACTION_FINISH_DEPLOY) => { - state.is_deployment_in_progress = false; - state.dirty = false; - Handled::Yes - } - cmd if cmd.is(ACTION_START_RESET_DEPLOYMENT) => { - if self - .sender - .send(AsyncAction::ResetDeployment(state.clone().into())) - .is_ok() - { - state.is_reset_in_progress = true; - } else { - tracing::error!("Failed to queue action to reset mod deployment"); - } - - Handled::Yes - } - cmd if cmd.is(ACTION_FINISH_RESET_DEPLOYMENT) => { - state.is_reset_in_progress = false; - Handled::Yes - } - cmd if cmd.is(ACTION_SELECT_MOD) => { - let index = cmd - .get(ACTION_SELECT_MOD) - .expect("command type matched but didn't contain the expected value"); - - state.select_mod(*index); - // ctx.submit_command(ACTION_START_SAVE_SETTINGS); - Handled::Yes - } - cmd if cmd.is(ACTION_SELECTED_MOD_UP) => { - let Some(i) = state.selected_mod_index else { - return Handled::No; - }; - - let len = state.mods.len(); - if len == 0 || i == 0 { - return Handled::No; - } - - state.mods.swap(i, i - 1); - state.selected_mod_index = Some(i - 1); - // ctx.submit_command(ACTION_START_SAVE_SETTINGS); - Handled::Yes - } - cmd if cmd.is(ACTION_SELECTED_MOD_DOWN) => { - let Some(i) = state.selected_mod_index else { - return Handled::No; - }; - - let len = state.mods.len(); - if len == 0 || i == usize::MAX || i >= len - 1 { - return Handled::No; - } - - state.mods.swap(i, i + 1); - state.selected_mod_index = Some(i + 1); - // ctx.submit_command(ACTION_START_SAVE_SETTINGS); - Handled::Yes - } - cmd if cmd.is(ACTION_START_DELETE_SELECTED_MOD) => { - let info = cmd - .get(ACTION_START_DELETE_SELECTED_MOD) - .and_then(SingleUse::take) - .expect("command type matched but didn't contain the expected value"); - - if self - .sender - .send(AsyncAction::DeleteMod(state.clone().into(), info)) - .is_err() - { - tracing::error!("Failed to queue action to deploy mods"); - } - - Handled::Yes - } - cmd if cmd.is(ACTION_FINISH_DELETE_SELECTED_MOD) => { - let info = cmd - .get(ACTION_FINISH_DELETE_SELECTED_MOD) - .and_then(SingleUse::take) - .expect("command type matched but didn't contain the expected value"); - - let found = state.mods.iter().enumerate().find(|(_, i)| i.id == info.id); - let Some((index, _)) = found else { - return Handled::No; - }; - - state.mods.remove(index); - - Handled::Yes - } - cmd if cmd.is(ACTION_HANDLE_NXM) => { - let uri = cmd - .get(ACTION_HANDLE_NXM) - .expect("command type match but didn't contain the expected value"); - - if self - .sender - .send(AsyncAction::NxmDownload(state.clone().into(), uri.clone())) - .is_err() - { - tracing::error!("Failed to queue action to download NXM mod"); - } - Handled::Yes - } - cmd if cmd.is(ACTION_ADD_MOD) => { - let info = cmd - .get(ACTION_ADD_MOD) - .expect("command type matched but didn't contain the expected value"); - - if self - .sender - .send(AsyncAction::AddMod(state.clone().into(), info.clone())) - .is_err() - { - tracing::error!("Failed to queue action to add mod"); - } - Handled::Yes - } - cmd if cmd.is(ACTION_FINISH_ADD_MOD) => { - let info = cmd - .get(ACTION_FINISH_ADD_MOD) - .expect("command type matched but didn't contain the expected value"); - - if let Some(info) = info.take() { - state.add_mod(info); - } - - Handled::Yes - } - cmd if cmd.is(ACTION_LOG) => { - let line = cmd - .get(ACTION_LOG) - .expect("command type matched but didn't contain the expected value"); - - if let Some(line) = line.take() { - { - let line = String::from_utf8_lossy(&line); - state.log.push_back(ansi_to_rich_text(line.trim())); - } - - if self - .sender - .send(AsyncAction::Log((state.clone().into(), line))) - .is_err() - { - tracing::error!("Failed to queue action to add mod"); - } - } - - Handled::Yes - } - cmd if cmd.is(ACTION_START_SAVE_SETTINGS) => { - if state.is_save_in_progress { - state.is_next_save_pending = true; - } else if self - .sender - .send(AsyncAction::SaveSettings(state.clone().into())) - .is_ok() - { - state.is_save_in_progress = true; - } else { - tracing::error!("Failed to queue action to save settings"); - } - - Handled::Yes - } - cmd if cmd.is(ACTION_FINISH_SAVE_SETTINGS) => { - tracing::trace!( - in_progress = state.is_save_in_progress, - next_pending = state.is_next_save_pending, - "Finished saving settings", - ); - state.is_save_in_progress = false; - - if state.is_next_save_pending { - state.is_next_save_pending = false; - ctx.submit_command(ACTION_START_SAVE_SETTINGS); - } - - Handled::Yes - } - cmd if cmd.is(ACTION_SET_DIRTY) => { - state.dirty = true; - Handled::Yes - } - cmd if cmd.is(ACTION_SHOW_ERROR_DIALOG) => { - let err = cmd - .get(ACTION_SHOW_ERROR_DIALOG) - .and_then(SingleUse::take) - .expect("command type matched but didn't contain the expected value"); - - let window = state - .windows - .get(&window::main::WINDOW_ID) - .expect("root window does not exist"); - - let dialog = window::dialog::error::(err, window.clone()); - ctx.new_window(dialog); - - Handled::Yes - } - cmd if cmd.is(ACTION_SET_WINDOW_HANDLE) => { - let (id, handle) = cmd - .get(ACTION_SET_WINDOW_HANDLE) - .and_then(SingleUse::take) - .expect("command type matched but didn't contain the expected value"); - - state.windows.insert(id, handle); - Handled::Yes - } - cmd if cmd.is(ACTION_START_CHECK_UPDATE) => { - if self - .sender - .send(AsyncAction::CheckUpdates(state.clone().into())) - .is_ok() - { - state.is_update_in_progress = true; - } else { - tracing::error!("Failed to queue action to check updates"); - } - Handled::Yes - } - cmd if cmd.is(ACTION_FINISH_CHECK_UPDATE) => { - let mut updates = cmd - .get(ACTION_FINISH_CHECK_UPDATE) - .and_then(SingleUse::take) - .expect("command type matched but didn't contain the expected value"); - - if tracing::enabled!(tracing::Level::DEBUG) { - let mods: Vec<_> = updates - .iter() - .map(|info| { - format!( - "{}: {} -> {:?}", - info.name, - info.version, - info.nexus.as_ref().map(|n| &n.version) - ) - }) - .collect(); - - tracing::info!("Mod updates:\n{}", mods.join("\n")); - } - - for mod_info in state.mods.iter_mut() { - if let Some(index) = updates.iter().position(|i2| i2.id == mod_info.id) { - let update = updates.swap_remove(index); - *mod_info = Arc::new(update); - } - } - - state.is_update_in_progress = false; - Handled::Yes - } - cmd if cmd.is(ACTION_FINISH_LOAD_INITIAL) => { - let data = cmd - .get(ACTION_FINISH_LOAD_INITIAL) - .and_then(SingleUse::take) - .expect("command type matched but didn't contain the expected value"); - - if let Some((config, mods)) = data { - state.mods = mods; - state.config_path = Arc::new(config.path); - state.data_dir = Arc::new(config.data_dir); - state.game_dir = Arc::new(config.game_dir.unwrap_or_default()); - state.nexus_api_key = Arc::new(config.nexus_api_key.unwrap_or_default()); - state.is_io_enabled = config.unsafe_io; - } - - state.loading = false; - - Handled::Yes - } - cmd if cmd.is(ACTION_OPEN_LINK) => { - let url = cmd - .get(ACTION_OPEN_LINK) - .expect("command type matched but didn't contain the expected value"); - - if let Err(err) = open::that_detached(Arc::as_ref(url)) { - tracing::error!( - "{:?}", - Report::new(err).wrap_err(format!("Failed to open url '{}'", url)) - ); - } - - Handled::Yes - } - _ => Handled::No, - } - } - - fn window_added( - &mut self, - id: WindowId, - handle: WindowHandle, - data: &mut State, - _: &Env, - _: &mut DelegateCtx, - ) { - data.windows.insert(id, handle); - } - - fn window_removed(&mut self, id: WindowId, data: &mut State, _: &Env, _: &mut DelegateCtx) { - data.windows.remove(&id); - } -} diff --git a/crates/dtmm/src/state/lens.rs b/crates/dtmm/src/state/lens.rs deleted file mode 100644 index 983cb2e..0000000 --- a/crates/dtmm/src/state/lens.rs +++ /dev/null @@ -1,124 +0,0 @@ -use std::sync::Arc; - -use druid::im::Vector; -use druid::{Data, Lens}; - -use super::{ModInfo, NexusInfo, State}; - -pub(crate) struct SelectedModLens; - -impl Lens>> for SelectedModLens { - #[tracing::instrument(name = "SelectedModLens::with", skip_all)] - fn with>) -> V>(&self, data: &State, f: F) -> V { - let info = data - .selected_mod_index - .and_then(|i| data.mods.get(i).cloned()); - - f(&info) - } - - #[tracing::instrument(name = "SelectedModLens::with_mut", skip_all)] - fn with_mut>) -> V>(&self, data: &mut State, f: F) -> V { - match data.selected_mod_index { - Some(i) => { - let mut info = data.mods.get_mut(i).cloned(); - let ret = f(&mut info); - - if let Some(new) = info { - // TODO: Figure out a way to check for equality and - // only update when needed - data.mods.set(i, new); - } else { - data.selected_mod_index = None; - } - - ret - } - None => f(&mut None), - } - } -} - -/// A Lens that maps an `im::Vector` to `im::Vector<(usize, T)>`, -/// where each element in the destination vector includes its index in the -/// source vector. -#[allow(dead_code)] -pub(crate) struct IndexedVectorLens; - -impl Lens, Vector<(usize, T)>> for IndexedVectorLens { - #[tracing::instrument(name = "IndexedVectorLens::with", skip_all)] - fn with) -> V>(&self, values: &Vector, f: F) -> V { - let indexed = values - .iter() - .enumerate() - .map(|(i, val)| (i, val.clone())) - .collect(); - f(&indexed) - } - - #[tracing::instrument(name = "IndexedVectorLens::with_mut", skip_all)] - fn with_mut) -> V>( - &self, - values: &mut Vector, - f: F, - ) -> V { - let mut indexed = values - .iter() - .enumerate() - .map(|(i, val)| (i, val.clone())) - .collect(); - let ret = f(&mut indexed); - - *values = indexed.into_iter().map(|(_i, val)| val).collect(); - - ret - } -} - -/// A Lens that first checks a key in a mod's `NexusInfo`, then falls back to -/// the regular one. -pub(crate) struct NexusInfoLens -where - L: Lens, - R: Lens, -{ - value: L, - fallback: R, - _marker: std::marker::PhantomData, -} - -impl NexusInfoLens -where - L: Lens, - R: Lens, -{ - pub fn new(value: L, fallback: R) -> Self { - Self { - value, - fallback, - _marker: std::marker::PhantomData, - } - } -} - -impl Lens for NexusInfoLens -where - L: Lens, - R: Lens, -{ - fn with V>(&self, data: &ModInfo, f: F) -> V { - if let Some(nexus) = &data.nexus { - self.value.with(nexus, f) - } else { - self.fallback.with(data, f) - } - } - - fn with_mut V>(&self, data: &mut ModInfo, f: F) -> V { - if let Some(nexus) = &mut data.nexus { - self.value.with_mut(nexus, f) - } else { - self.fallback.with_mut(data, f) - } - } -} diff --git a/crates/dtmm/src/state/mod.rs b/crates/dtmm/src/state/mod.rs deleted file mode 100644 index f0eb8c3..0000000 --- a/crates/dtmm/src/state/mod.rs +++ /dev/null @@ -1,7 +0,0 @@ -mod data; -mod delegate; -mod lens; - -pub(crate) use data::*; -pub(crate) use delegate::*; -pub(crate) use lens::*; diff --git a/crates/dtmm/src/theme.rs b/crates/dtmm/src/theme.rs new file mode 100644 index 0000000..7658f3f --- /dev/null +++ b/crates/dtmm/src/theme.rs @@ -0,0 +1,4 @@ +use druid::{Color, Insets}; + +pub const TOP_BAR_BACKGROUND_COLOR: Color = Color::rgba8(255, 255, 255, 50); +pub const TOP_BAR_INSETS: Insets = Insets::uniform(5.0); diff --git a/crates/dtmm/src/ui/mod.rs b/crates/dtmm/src/ui/mod.rs deleted file mode 100644 index 12f66c9..0000000 --- a/crates/dtmm/src/ui/mod.rs +++ /dev/null @@ -1,6 +0,0 @@ -pub mod theme; -pub mod widget; -pub mod window { - pub mod dialog; - pub mod main; -} diff --git a/crates/dtmm/src/ui/theme/colors.rs b/crates/dtmm/src/ui/theme/colors.rs deleted file mode 100644 index 1051539..0000000 --- a/crates/dtmm/src/ui/theme/colors.rs +++ /dev/null @@ -1,87 +0,0 @@ -use colors_transform::Color as _; -use colors_transform::Rgb; -use druid::Color; - -pub use gruvbox_dark::*; - -macro_rules! make_color { - ($name:ident, $r:literal, $g:literal, $b:literal, $a:literal) => { - pub const $name: Color = Color::rgba8($r, $g, $b, $a); - }; - ($name:ident, $r:literal, $g:literal, $b:literal) => { - pub const $name: Color = Color::rgb8($r, $g, $b); - }; - ($name:ident, $col:expr) => { - pub const $name: Color = $col; - }; -} - -make_color!(TOP_BAR_BACKGROUND_COLOR, COLOR_BG1); -make_color!(LINK_COLOR, COLOR_ACCENT); - -#[allow(dead_code)] -pub mod gruvbox_dark { - use druid::Color; - - make_color!(COLOR_BG0_H, 0x1d, 0x20, 0x21); - make_color!(COLOR_BG0_S, 0x32, 0x20, 0x2f); - make_color!(COLOR_BG0, 0x28, 0x28, 0x28); - make_color!(COLOR_BG1, 0x3c, 0x38, 0x36); - make_color!(COLOR_BG2, 0x50, 0x49, 0x45); - make_color!(COLOR_BG3, 0x66, 0x5c, 0x54); - make_color!(COLOR_BG4, 0x7c, 0x6f, 0x64); - - make_color!(COLOR_FG0, 0xfb, 0xf1, 0xc7); - make_color!(COLOR_FG1, 0xeb, 0xdb, 0xb2); - make_color!(COLOR_FG2, 0xd5, 0xc4, 0xa1); - make_color!(COLOR_FG3, 0xbd, 0xae, 0x93); - make_color!(COLOR_FG4, 0xa8, 0x99, 0x84); - - make_color!(COLOR_BG, COLOR_BG0); - make_color!(COLOR_GRAY_LIGHT, 0x92, 0x83, 0x74); - - make_color!(COLOR_RED_DARK, 0xcc, 0x24, 0x1d); - make_color!(COLOR_RED_LIGHT, 0xfb, 0x49, 0x34); - - make_color!(COLOR_GREEN_DARK, 0x98, 0x97, 0x1a); - make_color!(COLOR_GREEN_LIGHT, 0xb8, 0xbb, 0x26); - - make_color!(COLOR_YELLOW_DARK, 0xd7, 0x99, 0x21); - make_color!(COLOR_YELLOW_LIGHT, 0xfa, 0xbd, 0x2f); - - make_color!(COLOR_BLUE_DARK, 0x45, 0x85, 0x88); - make_color!(COLOR_BLUE_LIGHT, 0x83, 0xa5, 0x98); - - make_color!(COLOR_PURPLE_DARK, 0xb1, 0x26, 0x86); - make_color!(COLOR_PURPLE_LIGHT, 0xd3, 0x86, 0x9b); - - make_color!(COLOR_AQUA_DARK, 0x68, 0x9d, 0x6a); - make_color!(COLOR_AQUA_LIGHT, 0x8e, 0xc0, 0x7c); - - make_color!(COLOR_GRAY_DARK, 0xa8, 0x99, 0x84); - make_color!(COLOR_FG, COLOR_FG1); - - make_color!(COLOR_ORANGE_DARK, 0xd6, 0x5d, 0x0e); - make_color!(COLOR_ORANGE_LIGHT, 0xfe, 0x80, 0x19); - - make_color!(COLOR_ACCENT, COLOR_BLUE_LIGHT); - make_color!(COLOR_ACCENT_FG, COLOR_BG0_H); -} - -pub trait ColorExt { - fn darken(&self, fac: f32) -> Self; -} - -impl ColorExt for Color { - fn darken(&self, fac: f32) -> Self { - let (r, g, b, a) = self.as_rgba(); - let rgb = Rgb::from(r as f32, g as f32, b as f32); - let rgb = rgb.lighten(-1. * fac); - Self::rgba( - rgb.get_red() as f64, - rgb.get_green() as f64, - rgb.get_blue() as f64, - a, - ) - } -} diff --git a/crates/dtmm/src/ui/theme/icons.rs b/crates/dtmm/src/ui/theme/icons.rs deleted file mode 100644 index 50ecbe3..0000000 --- a/crates/dtmm/src/ui/theme/icons.rs +++ /dev/null @@ -1,41 +0,0 @@ -use druid::Color; -use usvg::{ - Error, Fill, LineCap, LineJoin, NodeKind, NonZeroPositiveF64, Options, Paint, Stroke, Tree, -}; - -pub static ALERT_CIRCLE: &str = include_str!("../../../assets/tabler-icons/alert-circle.svg"); -pub static CLOUD_DOWNLOAD: &str = include_str!("../../../assets/tabler-icons/cloud-download.svg"); - -pub fn parse_svg(svg: &str) -> Result { - let opt = Options::default(); - Tree::from_str(svg, &opt.to_ref()) -} - -pub fn recolor_icon(tree: Tree, stroke: bool, color: Color) -> Tree { - let (red, green, blue, _) = color.as_rgba8(); - - let mut children = tree.root.children(); - // The first element is always some kind of background placeholder - children.next(); - - for node in children { - if let NodeKind::Path(ref mut path) = *node.borrow_mut() { - if stroke { - path.stroke = Some(Stroke { - paint: Paint::Color(usvg::Color { red, green, blue }), - width: NonZeroPositiveF64::new(2.).expect("the value is not zero"), - linecap: LineCap::Round, - linejoin: LineJoin::Round, - ..Default::default() - }); - } else { - path.fill = Some(Fill { - paint: Paint::Color(usvg::Color { red, green, blue }), - ..Default::default() - }); - } - } - } - - tree -} diff --git a/crates/dtmm/src/ui/theme/keys.rs b/crates/dtmm/src/ui/theme/keys.rs deleted file mode 100644 index 9d4120b..0000000 --- a/crates/dtmm/src/ui/theme/keys.rs +++ /dev/null @@ -1,13 +0,0 @@ -use druid::{Color, Insets, Key}; - -pub const KEY_BUTTON_BG: Key = Key::new("dtmm.button.bg"); -pub const KEY_BUTTON_BG_HOT: Key = Key::new("dtmm.button.bg-hot"); -pub const KEY_BUTTON_BG_ACTIVE: Key = Key::new("dtmm.button.bg-active"); -pub const KEY_BUTTON_BG_DISABLED: Key = Key::new("dtmm.button.bg-disabled"); - -pub const KEY_BUTTON_FG: Key = Key::new("dtmm.button.fg"); -pub const KEY_BUTTON_FG_DISABLED: Key = Key::new("dtmm.button.fg-disabled"); - -pub const KEY_BUTTON_PADDING: Key = Key::new("dtmm.button.padding"); - -pub const KEY_MOD_LIST_ITEM_BG_COLOR: Key = Key::new("dtmm.mod-list.item.background-color"); diff --git a/crates/dtmm/src/ui/theme/mod.rs b/crates/dtmm/src/ui/theme/mod.rs deleted file mode 100644 index 7f93524..0000000 --- a/crates/dtmm/src/ui/theme/mod.rs +++ /dev/null @@ -1,33 +0,0 @@ -use druid::{Env, Insets}; - -use crate::state::State; - -mod colors; -pub mod icons; -pub mod keys; - -pub use colors::*; - -pub const TOP_BAR_INSETS: Insets = Insets::uniform(5.0); -pub const DISABLED_ALPHA: f64 = 0.65; - -pub(crate) fn set_theme_env(env: &mut Env, _: &State) { - env.set(druid::theme::TEXT_COLOR, COLOR_FG); - env.set(druid::theme::SCROLLBAR_COLOR, COLOR_FG); - env.set(druid::theme::BORDER_LIGHT, COLOR_FG); - env.set(druid::theme::BUTTON_BORDER_RADIUS, 2.); - - env.set(keys::KEY_BUTTON_BG, COLOR_ACCENT); - env.set(keys::KEY_BUTTON_BG_HOT, COLOR_ACCENT.darken(0.03)); - env.set(keys::KEY_BUTTON_BG_ACTIVE, COLOR_ACCENT.darken(0.1)); - env.set( - keys::KEY_BUTTON_BG_DISABLED, - COLOR_ACCENT.with_alpha(DISABLED_ALPHA), - ); - env.set(keys::KEY_BUTTON_FG, COLOR_ACCENT_FG); - env.set( - keys::KEY_BUTTON_FG_DISABLED, - COLOR_ACCENT_FG.with_alpha(DISABLED_ALPHA), - ); - env.set(keys::KEY_BUTTON_PADDING, Insets::uniform_xy(8., 2.)); -} diff --git a/crates/dtmm/src/ui/widget/border.rs b/crates/dtmm/src/ui/widget/border.rs deleted file mode 100644 index 2ca7cdb..0000000 --- a/crates/dtmm/src/ui/widget/border.rs +++ /dev/null @@ -1,197 +0,0 @@ -use druid::kurbo::Line; -use druid::widget::prelude::*; -use druid::{Color, KeyOrValue, Point, WidgetPod}; - -pub struct Border { - inner: WidgetPod>>, - color: BorderColor, - width: BorderWidths, - // corner_radius: KeyOrValue, -} - -impl Border { - pub fn new(inner: impl Widget + 'static) -> Self { - let inner = WidgetPod::new(inner).boxed(); - Self { - inner, - color: Color::TRANSPARENT.into(), - width: 0f64.into(), - } - } - - pub fn set_color(&mut self, color: impl Into>) { - self.color = BorderColor::Uniform(color.into()); - } - - pub fn with_color(mut self, color: impl Into>) -> Self { - self.set_color(color); - self - } - - pub fn set_bottom_border(&mut self, width: impl Into>) { - self.width.bottom = width.into(); - } - - pub fn with_bottom_border(mut self, width: impl Into>) -> Self { - self.set_bottom_border(width); - self - } - - pub fn set_top_border(&mut self, width: impl Into>) { - self.width.top = width.into(); - } - - pub fn with_top_border(mut self, width: impl Into>) -> Self { - self.set_top_border(width); - self - } -} - -impl Widget for Border { - fn event(&mut self, ctx: &mut EventCtx, event: &Event, data: &mut T, env: &Env) { - self.inner.event(ctx, event, data, env) - } - - fn lifecycle(&mut self, ctx: &mut LifeCycleCtx, event: &LifeCycle, data: &T, env: &Env) { - self.inner.lifecycle(ctx, event, data, env); - } - - fn update(&mut self, ctx: &mut UpdateCtx, _: &T, data: &T, env: &Env) { - self.inner.update(ctx, data, env); - } - - fn layout(&mut self, ctx: &mut LayoutCtx, bc: &BoxConstraints, data: &T, env: &Env) -> Size { - bc.debug_check("Border"); - - let (left, top, right, bottom) = self.width.resolve(env); - - let inner_bc = bc.shrink((left + right, top + bottom)); - let inner_size = self.inner.layout(ctx, &inner_bc, data, env); - - let origin = Point::new(left, top); - self.inner.set_origin(ctx, origin); - - let size = Size::new( - inner_size.width + left + right, - inner_size.height + top + bottom, - ); - - let insets = self.inner.compute_parent_paint_insets(size); - ctx.set_paint_insets(insets); - - let baseline_offset = self.inner.baseline_offset(); - if baseline_offset > 0. { - ctx.set_baseline_offset(baseline_offset + bottom); - } - - size - } - - fn paint(&mut self, ctx: &mut PaintCtx, data: &T, env: &Env) { - let size = ctx.size(); - let (left, top, right, bottom) = self.width.resolve(env); - let (col_left, col_top, col_right, col_bottom) = self.color.resolve(env); - - self.inner.paint(ctx, data, env); - - // There's probably a more elegant way to create the various `Line`s, but this works for now. - // The important bit is to move each line inwards by half each side's border width. Otherwise - // it would draw hald of the border outside of the widget's boundary. - - if left > 0. { - ctx.stroke( - Line::new((left / 2., top / 2.), (left / 2., size.height)), - &col_left, - left, - ); - } - - if top > 0. { - ctx.stroke( - Line::new((left / 2., top / 2.), (size.width - (right / 2.), top / 2.)), - &col_top, - top, - ); - } - - if right > 0. { - ctx.stroke( - Line::new( - (size.width - (right / 2.), top / 2.), - (size.width - (right / 2.), size.height - (bottom / 2.)), - ), - &col_right, - right, - ); - } - - if bottom > 0. { - ctx.stroke( - Line::new( - (left / 2., size.height - (bottom / 2.)), - (size.width - (right / 2.), size.height - (bottom / 2.)), - ), - &col_bottom, - bottom, - ); - } - } -} - -#[derive(Clone, Debug)] -pub enum BorderColor { - Uniform(KeyOrValue), - // Individual { - // left: KeyOrValue, - // top: KeyOrValue, - // right: KeyOrValue, - // bottom: KeyOrValue, - // }, -} - -impl BorderColor { - pub fn resolve(&self, env: &Env) -> (Color, Color, Color, Color) { - match self { - Self::Uniform(val) => { - let color = val.resolve(env); - (color, color, color, color) - } - } - } -} - -impl From for BorderColor { - fn from(value: Color) -> Self { - Self::Uniform(value.into()) - } -} - -#[derive(Clone, Debug)] -pub struct BorderWidths { - pub left: KeyOrValue, - pub top: KeyOrValue, - pub right: KeyOrValue, - pub bottom: KeyOrValue, -} - -impl From for BorderWidths { - fn from(value: f64) -> Self { - Self { - left: value.into(), - top: value.into(), - right: value.into(), - bottom: value.into(), - } - } -} - -impl BorderWidths { - pub fn resolve(&self, env: &Env) -> (f64, f64, f64, f64) { - ( - self.left.resolve(env), - self.top.resolve(env), - self.right.resolve(env), - self.bottom.resolve(env), - ) - } -} diff --git a/crates/dtmm/src/ui/widget/button.rs b/crates/dtmm/src/ui/widget/button.rs deleted file mode 100644 index 08e1dec..0000000 --- a/crates/dtmm/src/ui/widget/button.rs +++ /dev/null @@ -1,113 +0,0 @@ -use druid::widget::prelude::*; -use druid::widget::{Click, ControllerHost, Label, LabelText}; -use druid::WidgetPod; -use druid::{Affine, WidgetExt}; - -use crate::ui::theme; - -pub struct Button { - inner: WidgetPod>>, - inner_size: Size, -} - -impl Button { - pub fn new(inner: impl Widget + 'static) -> Self { - let inner = inner.env_scope(|env, _| { - env.set( - druid::theme::TEXT_COLOR, - env.get(theme::keys::KEY_BUTTON_FG), - ); - env.set( - druid::theme::DISABLED_TEXT_COLOR, - env.get(theme::keys::KEY_BUTTON_FG_DISABLED), - ); - }); - let inner = WidgetPod::new(inner).boxed(); - Self { - inner, - inner_size: Size::ZERO, - } - } - - pub fn with_label(text: impl Into>) -> Self { - let inner = Label::new(text); - Self::new(inner) - } - - pub fn on_click( - self, - f: impl Fn(&mut EventCtx, &mut T, &Env) + 'static, - ) -> ControllerHost> { - ControllerHost::new(self, Click::new(f)) - } -} - -impl Widget for Button { - fn event(&mut self, ctx: &mut EventCtx, event: &Event, _: &mut T, _: &Env) { - match event { - Event::MouseDown(_) if !ctx.is_disabled() => { - ctx.set_active(true); - ctx.request_paint(); - } - Event::MouseUp(_) => { - if ctx.is_active() && !ctx.is_disabled() { - ctx.request_paint(); - } - ctx.set_active(false); - } - _ => {} - } - } - - fn lifecycle(&mut self, ctx: &mut LifeCycleCtx, event: &LifeCycle, data: &T, env: &Env) { - if let LifeCycle::HotChanged(_) | LifeCycle::DisabledChanged(_) = event { - ctx.request_paint(); - } - self.inner.lifecycle(ctx, event, data, env); - } - - fn update(&mut self, ctx: &mut UpdateCtx, _: &T, data: &T, env: &Env) { - self.inner.update(ctx, data, env); - } - - fn layout(&mut self, ctx: &mut LayoutCtx, bc: &BoxConstraints, data: &T, env: &Env) -> Size { - bc.debug_check("Button"); - - let padding = env.get(theme::keys::KEY_BUTTON_PADDING).size(); - let inner_bc = bc.shrink(padding).loosen(); - - self.inner_size = self.inner.layout(ctx, &inner_bc, data, env); - - bc.constrain(Size::new( - self.inner_size.width + padding.width, - self.inner_size.height + padding.height, - )) - } - - fn paint(&mut self, ctx: &mut PaintCtx, data: &T, env: &Env) { - let size = ctx.size(); - - let bg_color = if ctx.is_disabled() { - env.get(theme::keys::KEY_BUTTON_BG_DISABLED) - } else if ctx.is_hot() { - env.get(theme::keys::KEY_BUTTON_BG_HOT) - } else if ctx.is_active() { - env.get(theme::keys::KEY_BUTTON_BG_ACTIVE) - } else { - env.get(theme::keys::KEY_BUTTON_BG) - }; - - ctx.fill( - size.to_rect() - .to_rounded_rect(env.get(druid::theme::BUTTON_BORDER_RADIUS)), - &bg_color, - ); - - let inner_pos = (size.to_vec2() - self.inner_size.to_vec2()) / 2.; - - ctx.with_save(|ctx| { - ctx.transform(Affine::translate(inner_pos)); - self.inner.paint(ctx, data, env); - }); - } -} diff --git a/crates/dtmm/src/ui/widget/controller.rs b/crates/dtmm/src/ui/widget/controller.rs deleted file mode 100644 index f789b5a..0000000 --- a/crates/dtmm/src/ui/widget/controller.rs +++ /dev/null @@ -1,136 +0,0 @@ -use druid::widget::{Button, Controller, Image, Scroll}; -use druid::{ - Data, Env, Event, EventCtx, ImageBuf, LifeCycle, LifeCycleCtx, Rect, UpdateCtx, Widget, -}; - -use crate::state::{State, ACTION_SET_DIRTY, ACTION_START_SAVE_SETTINGS}; - -pub struct DisabledButtonController; - -impl Controller> for DisabledButtonController { - fn event( - &mut self, - child: &mut Button, - ctx: &mut EventCtx, - event: &Event, - data: &mut T, - env: &Env, - ) { - if !ctx.is_disabled() { - ctx.set_disabled(true); - ctx.request_paint(); - } - child.event(ctx, event, data, env) - } - - fn update( - &mut self, - child: &mut Button, - ctx: &mut UpdateCtx, - old_data: &T, - data: &T, - env: &Env, - ) { - if !ctx.is_disabled() { - ctx.set_disabled(true); - ctx.request_paint(); - } - child.update(ctx, old_data, data, env) - } -} - -pub struct AutoScrollController; - -impl> Controller> for AutoScrollController { - fn update( - &mut self, - child: &mut Scroll, - ctx: &mut UpdateCtx, - old_data: &T, - data: &T, - env: &Env, - ) { - child.update(ctx, old_data, data, env); - - if !ctx.is_disabled() { - let size = child.child_size(); - let end_region = Rect::new(size.width - 1., size.height - 1., size.width, size.height); - child.scroll_to(ctx, end_region); - } - } -} - -macro_rules! compare_state_fields { - ($old:ident, $new:ident, $($field:ident),+) => { - $(!Data::same(&$old.$field, &$new.$field)) || + - } -} - -/// A controller that tracks state changes for certain fields and submits commands to handle them. -pub struct DirtyStateController; - -impl> Controller for DirtyStateController { - fn update( - &mut self, - child: &mut W, - ctx: &mut UpdateCtx, - old_data: &State, - data: &State, - env: &Env, - ) { - // Only start tracking changes after the initial load has finished - if old_data.loading == data.loading { - if compare_state_fields!( - old_data, - data, - mods, - game_dir, - data_dir, - nexus_api_key, - is_io_enabled - ) { - ctx.submit_command(ACTION_START_SAVE_SETTINGS); - } - - if compare_state_fields!(old_data, data, mods, game_dir, is_io_enabled) { - ctx.submit_command(ACTION_SET_DIRTY); - } - } - - child.update(ctx, old_data, data, env) - } -} - -pub struct ImageLensController; - -impl Controller for ImageLensController { - fn lifecycle( - &mut self, - widget: &mut Image, - ctx: &mut LifeCycleCtx, - event: &LifeCycle, - data: &ImageBuf, - env: &Env, - ) { - if let LifeCycle::WidgetAdded = event { - widget.set_image_data(data.clone()); - } - - widget.lifecycle(ctx, event, data, env); - } - - fn update( - &mut self, - widget: &mut Image, - ctx: &mut UpdateCtx, - old_data: &ImageBuf, - data: &ImageBuf, - env: &Env, - ) { - if !Data::same(old_data, data) { - widget.set_image_data(data.clone()); - } - - widget.update(ctx, old_data, data, env); - } -} diff --git a/crates/dtmm/src/ui/widget/mod.rs b/crates/dtmm/src/ui/widget/mod.rs deleted file mode 100644 index 06ccedd..0000000 --- a/crates/dtmm/src/ui/widget/mod.rs +++ /dev/null @@ -1,35 +0,0 @@ -use std::path::PathBuf; -use std::sync::Arc; - -use druid::text::Formatter; - -pub mod border; -pub mod button; -pub mod controller; - -pub(crate) struct PathBufFormatter; - -impl PathBufFormatter { - pub fn new() -> Self { - Self {} - } -} - -impl Formatter> for PathBufFormatter { - fn format(&self, value: &Arc) -> String { - value.display().to_string() - } - - fn validate_partial_input( - &self, - _input: &str, - _sel: &druid::text::Selection, - ) -> druid::text::Validation { - druid::text::Validation::success() - } - - fn value(&self, input: &str) -> Result, druid::text::ValidationError> { - let p = PathBuf::from(input); - Ok(Arc::new(p)) - } -} diff --git a/crates/dtmm/src/ui/window/dialog.rs b/crates/dtmm/src/ui/window/dialog.rs deleted file mode 100644 index 11df4d5..0000000 --- a/crates/dtmm/src/ui/window/dialog.rs +++ /dev/null @@ -1,91 +0,0 @@ -use color_eyre::{Handler, HelpInfo, Report}; -use druid::widget::{CrossAxisAlignment, Flex, Label, LineBreaking}; -use druid::{Data, WidgetExt, WindowDesc, WindowHandle}; - -use crate::ui::theme; -use crate::ui::widget::button::Button; - -const WINDOW_SIZE: (f64, f64) = (600., 250.); - -/// Show an error dialog. -/// The title and message are extracted from the error chain in the given `Report`. -pub fn error(err: Report, _parent: WindowHandle) -> WindowDesc { - let (title, msg) = { - let count = err.chain().count(); - - if count == 1 { - // If there is only one error, that's all we can show. - ( - String::from("An error occurred!"), - err.root_cause().to_string(), - ) - } else { - let first = err.chain().next().unwrap(); - let root = err.root_cause(); - - // If there is more than one error in the chain we want to show - // - The first one: This will describe the overall operation that failed - // - The root cause: The actual thing that failed (e.g. 'No such file or directory') - // - The one before the root cause: With diligent `wrap_err` usage, this will provide - // context to the root cause (e.g. the file name we failed to access) - // - // If there are only two errors, the first one is also the context to the root cause. - if count > 2 { - // The second to last one, the context to the root cause - let context = err.chain().nth(count - 2).unwrap(); - - (format!("{first}!"), format!("{}: {}", context, root)) - } else { - ("An error occurred!".to_string(), format!("{}: {}", first, root)) - } - } - }; - - let title = Label::new(title) - .with_text_size(24.) - .with_text_color(theme::COLOR_RED_LIGHT); - let text = Label::new(msg).with_line_break_mode(LineBreaking::WordWrap); - - let button = Button::with_label("Ok") - .on_click(|ctx, _, _| { - ctx.window().close(); - }) - .align_right(); - - let mut widget = Flex::column() - .cross_axis_alignment(CrossAxisAlignment::Start) - .with_child(title) - .with_default_spacer() - .with_child(text); - - if let Some(handler) = err.handler().downcast_ref::() { - let mut first = true; - for section in handler.sections() { - if let HelpInfo::Suggestion(data, _) = section { - if first { - widget.add_default_spacer(); - first = false; - } - - let w = Flex::row() - .cross_axis_alignment(CrossAxisAlignment::Start) - .with_child(Label::new("Suggestion:").with_text_color(theme::COLOR_GREEN_LIGHT)) - .with_spacer(2.) - .with_child( - Label::new(data.to_string()).with_line_break_mode(LineBreaking::WordWrap), - ); - - widget.add_child(w); - } - } - } - - let widget = widget.with_flex_spacer(1.).with_child(button).padding(10.); - - WindowDesc::new(widget) - .title("Critical Error") - .show_titlebar(true) - .with_min_size(WINDOW_SIZE) - .set_always_on_top(true) - .resizable(false) -} diff --git a/crates/dtmm/src/ui/window/main.rs b/crates/dtmm/src/ui/window/main.rs deleted file mode 100644 index 022a780..0000000 --- a/crates/dtmm/src/ui/window/main.rs +++ /dev/null @@ -1,536 +0,0 @@ -use std::str::FromStr; -use std::sync::Arc; - -use druid::im::Vector; -use druid::text::RichTextBuilder; -use druid::widget::{ - Checkbox, CrossAxisAlignment, Either, Flex, Image, Label, LineBreaking, List, - MainAxisAlignment, Maybe, Scroll, SizedBox, Split, Svg, SvgData, TextBox, ViewSwitcher, -}; -use druid::{lens, Env}; -use druid::{ - Color, FileDialogOptions, FileSpec, FontDescriptor, FontFamily, LensExt, SingleUse, Widget, - WidgetExt, WindowDesc, WindowId, -}; -use druid::{Data, ImageBuf, LifeCycleCtx}; -use druid_widget_nursery::WidgetExt as _; -use lazy_static::lazy_static; - -use crate::state::{ - ModInfo, NexusInfo, NexusInfoLens, State, View, ACTION_ADD_MOD, ACTION_OPEN_LINK, - ACTION_SELECTED_MOD_DOWN, ACTION_SELECTED_MOD_UP, ACTION_SELECT_MOD, ACTION_SET_WINDOW_HANDLE, - ACTION_START_CHECK_UPDATE, ACTION_START_DELETE_SELECTED_MOD, ACTION_START_DEPLOY, - ACTION_START_RESET_DEPLOYMENT, -}; -use crate::ui::theme::{self, ColorExt, COLOR_GREEN_LIGHT}; -use crate::ui::widget::border::Border; -use crate::ui::widget::button::Button; -use crate::ui::widget::controller::{ - AutoScrollController, DirtyStateController, ImageLensController, -}; -use crate::ui::widget::PathBufFormatter; - -lazy_static! { - pub static ref WINDOW_ID: WindowId = WindowId::next(); -} - -const TITLE: &str = "Darktide Mod Manager"; -const WINDOW_SIZE: (f64, f64) = (1080., 720.); -const MOD_DETAILS_MIN_WIDTH: f64 = 325.; - -pub(crate) fn new() -> WindowDesc { - WindowDesc::new(build_window()) - .title(TITLE) - .window_size(WINDOW_SIZE) -} - -fn build_top_bar() -> impl Widget { - let mods_button = Button::with_label("Mods") - .on_click(|_ctx, state: &mut State, _env| state.current_view = View::Mods); - - let settings_button = - Button::with_label("Settings").on_click(|_ctx, state: &mut State, _env| { - state.current_view = View::Settings; - }); - - let check_update_button = { - let make_button = || { - Button::with_label("Check for updates").on_click(|ctx, _: &mut State, _| { - ctx.submit_command(ACTION_START_CHECK_UPDATE); - }) - }; - - Either::new( - |data, _| data.nexus_api_key.is_empty(), - make_button() - .tooltip(|_: &State, _: &Env| "A Nexus API key is required") - .disabled_if(|_, _| true), - make_button().disabled_if(|data, _| data.is_update_in_progress), - ) - }; - - let deploy_button = { - let icon = Svg::new(SvgData::from_str(theme::icons::ALERT_CIRCLE).expect("invalid SVG")) - .fix_height(druid::theme::TEXT_SIZE_NORMAL); - - let inner = Either::new( - |state: &State, _| state.dirty, - Flex::row() - .with_child(icon) - .with_spacer(3.) - .with_child(Label::new("Deploy Mods")), - Label::new("Deploy Mods"), - ); - Button::new(inner) - .on_click(|ctx, _state: &mut State, _env| { - ctx.submit_command(ACTION_START_DEPLOY); - }) - .disabled_if(|data, _| data.is_deployment_in_progress || data.is_reset_in_progress) - }; - - let reset_button = Button::with_label("Reset Game") - .on_click(|ctx, _state: &mut State, _env| { - ctx.submit_command(ACTION_START_RESET_DEPLOYMENT); - }) - .disabled_if(|data, _| data.is_deployment_in_progress || data.is_reset_in_progress); - - let bar = Flex::row() - .must_fill_main_axis(true) - .main_axis_alignment(MainAxisAlignment::SpaceBetween) - .with_child( - Flex::row() - .with_child(mods_button) - .with_default_spacer() - .with_child(settings_button), - ) - .with_child( - Flex::row() - .with_child(check_update_button) - .with_default_spacer() - .with_child(deploy_button) - .with_default_spacer() - .with_child(reset_button), - ) - .padding(theme::TOP_BAR_INSETS) - .background(theme::TOP_BAR_BACKGROUND_COLOR); - - Border::new(bar) - .with_color(theme::COLOR_FG2) - .with_bottom_border(1.) -} - -fn build_mod_list() -> impl Widget { - let list = List::new(|| { - let checkbox = Checkbox::new("") - .env_scope(|env, selected| { - env.set(druid::theme::BORDER_DARK, theme::COLOR_BG3); - env.set(druid::theme::BORDER_LIGHT, theme::COLOR_BG3); - env.set(druid::theme::TEXT_COLOR, theme::COLOR_ACCENT_FG); - - if *selected { - env.set(druid::theme::BACKGROUND_DARK, theme::COLOR_ACCENT); - env.set(druid::theme::BACKGROUND_LIGHT, theme::COLOR_ACCENT); - } else { - env.set(druid::theme::BACKGROUND_DARK, Color::TRANSPARENT); - env.set(druid::theme::BACKGROUND_LIGHT, Color::TRANSPARENT); - } - }) - .lens(lens!((usize, Arc, bool), 1).then(ModInfo::enabled.in_arc())); - - let name = Label::dynamic(|info: &Arc, _| { - info.nexus - .as_ref() - .map(|n| n.name.clone()) - .unwrap_or_else(|| info.name.clone()) - }) - .lens(lens!((usize, Arc, bool), 1)); - - let version = { - let icon = { - let tree = - theme::icons::parse_svg(theme::icons::CLOUD_DOWNLOAD).expect("invalid SVG"); - - let tree = theme::icons::recolor_icon(tree, true, COLOR_GREEN_LIGHT); - - Svg::new(tree).fix_height(druid::theme::TEXT_SIZE_NORMAL) - }; - - Either::new( - |info, _| { - info.nexus - .as_ref() - .map(|n| info.version != n.version) - .unwrap_or(false) - }, - Flex::row() - .with_child(icon) - .with_spacer(3.) - .with_child(Label::raw().lens(ModInfo::version.in_arc())), - Label::raw().lens(ModInfo::version.in_arc()), - ) - .lens(lens!((usize, Arc, bool), 1)) - }; - - let fields = Flex::row() - .must_fill_main_axis(true) - .main_axis_alignment(MainAxisAlignment::SpaceBetween) - .with_child(name) - .with_child(version); - - Flex::row() - .must_fill_main_axis(true) - .with_child(checkbox) - .with_flex_child(fields, 1.) - .padding((5.0, 4.0)) - .background(theme::keys::KEY_MOD_LIST_ITEM_BG_COLOR) - .on_click(|ctx, (i, _, _), _env| ctx.submit_command(ACTION_SELECT_MOD.with(*i))) - .env_scope(|env, (i, _, selected)| { - if *selected { - env.set(theme::keys::KEY_MOD_LIST_ITEM_BG_COLOR, theme::COLOR_ACCENT); - env.set( - druid::theme::TEXT_COLOR, - theme::COLOR_ACCENT_FG.darken(0.05), - ); - } else { - env.set(druid::theme::TEXT_COLOR, theme::COLOR_FG); - - if (i % 2) == 1 { - env.set(theme::keys::KEY_MOD_LIST_ITEM_BG_COLOR, theme::COLOR_BG1); - } else { - env.set(theme::keys::KEY_MOD_LIST_ITEM_BG_COLOR, theme::COLOR_BG); - } - } - }) - }); - - let scroll = Scroll::new(list).vertical().lens(lens::Identity.map( - |state: &State| { - state - .mods - .iter() - .enumerate() - .map(|(i, val)| (i, val.clone(), Some(i) == state.selected_mod_index)) - .collect::>() - }, - |state, infos| { - infos.into_iter().for_each(|(i, new, _)| { - if !Data::same(&state.mods.get(i).cloned(), &Some(new.clone())) { - state.mods.set(i, new); - } - }); - }, - )); - - Flex::column() - .must_fill_main_axis(true) - .with_child(Flex::row()) - .with_flex_child(scroll, 1.0) -} - -fn build_mod_details_buttons() -> impl Widget { - let button_move_up = Button::with_label("Move Up") - .on_click(|ctx, _state, _env| ctx.submit_command(ACTION_SELECTED_MOD_UP)) - .disabled_if(|state: &State, _env: &druid::Env| !state.can_move_mod_up()); - - let button_move_down = Button::with_label("Move Down") - .on_click(|ctx, _state, _env| ctx.submit_command(ACTION_SELECTED_MOD_DOWN)) - .disabled_if(|state: &State, _env: &druid::Env| !state.can_move_mod_down()); - - let button_toggle_mod = Maybe::new( - || { - let inner = Label::dynamic(|enabled, _env| { - if *enabled { - "Disable Mod".into() - } else { - "Enable Mod".into() - } - }); - Button::new(inner) - .on_click(|_ctx, enabled: &mut bool, _env| { - *enabled = !(*enabled); - }) - .lens(ModInfo::enabled.in_arc()) - }, - // TODO: Gray out - || Button::with_label("Enable Mod"), - ) - .disabled_if(|info: &Option>, _env: &druid::Env| info.is_none()) - .lens(State::selected_mod); - - let button_add_mod = Button::with_label("Add Mod").on_click(|ctx, _state: &mut State, _env| { - let zip = FileSpec::new("Zip file", &["zip"]); - let opts = FileDialogOptions::new() - .allowed_types(vec![zip]) - .default_type(zip) - .name_label("Mod Archive") - .title("Choose a mod to add") - .accept_command(ACTION_ADD_MOD); - ctx.submit_command(druid::commands::SHOW_OPEN_PANEL.with(opts)) - }); - - let button_delete_mod = Button::with_label("Delete Mod") - .on_click(|ctx, data: &mut Option>, _env| { - if let Some(info) = data { - ctx.submit_command( - ACTION_START_DELETE_SELECTED_MOD.with(SingleUse::new(info.clone())), - ); - } - }) - .disabled_if(|info: &Option>, _env: &druid::Env| info.is_none()) - .lens(State::selected_mod); - - Flex::column() - .cross_axis_alignment(CrossAxisAlignment::Center) - .with_child( - Flex::row() - .main_axis_alignment(MainAxisAlignment::End) - .with_child(button_move_up) - .with_default_spacer() - .with_child(button_move_down), - ) - .with_default_spacer() - .with_child( - Flex::row() - .main_axis_alignment(MainAxisAlignment::End) - .with_child(button_toggle_mod) - .with_default_spacer() - .with_child(button_add_mod) - .with_default_spacer() - .with_child(button_delete_mod), - ) - .expand_width() -} - -fn build_mod_details_info() -> impl Widget { - Maybe::new( - || { - let name = Label::raw() - .with_text_size(24.) - // Force the label to take up the entire details' pane width, - // so that we can center-align it. - .expand_width() - .lens(NexusInfoLens::new(NexusInfo::name, ModInfo::name).in_arc()); - let summary = Label::raw() - .with_line_break_mode(LineBreaking::WordWrap) - .lens(NexusInfoLens::new(NexusInfo::summary, ModInfo::summary).in_arc()); - - let version_line = Label::dynamic(|info: &Arc, _| { - let author = info - .nexus - .as_ref() - .map(|n| &n.author) - .or(info.author.as_ref()); - - if let Some(author) = &author { - format!("Version: {}, by {author}", info.version) - } else { - format!("Version: {}", info.version) - } - }); - - let categories = Label::dynamic(|info: &Arc, _| { - if info.categories.is_empty() { - String::from("Uncategorized") - } else { - info.categories.iter().enumerate().fold( - String::from("Category: "), - |mut s, (i, category)| { - if i > 0 { - s.push_str(", "); - } - s.push_str(category); - s - }, - ) - } - }); - - let nexus_link = Maybe::or_empty(|| { - let link = Label::raw().lens(NexusInfo::id.map( - |id| { - let url = format!("https://nexusmods.com/warhammer40kdarktide/mods/{}", id); - let mut builder = RichTextBuilder::new(); - builder - .push("Open on Nexusmods") - .underline(true) - .text_color(theme::LINK_COLOR) - .link(ACTION_OPEN_LINK.with(Arc::new(url))); - builder.build() - }, - |_, _| {}, - )); - Flex::column() - .cross_axis_alignment(CrossAxisAlignment::Start) - .main_axis_alignment(MainAxisAlignment::Start) - .with_child(link) - .with_spacer(4.) - }) - .lens(ModInfo::nexus.in_arc()); - - let details = Flex::column() - .cross_axis_alignment(CrossAxisAlignment::Start) - .main_axis_alignment(MainAxisAlignment::Start) - .with_child(name) - .with_spacer(4.) - .with_child(summary) - .with_spacer(4.) - .with_child(nexus_link) - .with_child(version_line) - .with_spacer(4.) - .with_child(categories) - .padding((4., 4.)); - - let image = - Maybe::or_empty(|| Image::new(ImageBuf::empty()).controller(ImageLensController)) - .lens(ModInfo::image.in_arc()); - - Flex::column() - .main_axis_alignment(MainAxisAlignment::Start) - .must_fill_main_axis(true) - .cross_axis_alignment(CrossAxisAlignment::Start) - .with_child(image) - .with_child(details) - }, - Flex::column, - ) - .lens(State::selected_mod) -} - -fn build_mod_details() -> impl Widget { - Flex::column() - .must_fill_main_axis(true) - .cross_axis_alignment(CrossAxisAlignment::Start) - .main_axis_alignment(MainAxisAlignment::SpaceBetween) - .with_flex_child(build_mod_details_info(), 1.0) - .with_child(build_mod_details_buttons().padding((4., 4., 4., 8.))) -} - -fn build_view_mods() -> impl Widget { - Split::columns(build_mod_list(), build_mod_details()) - .split_point(0.75) - .min_size(0.0, MOD_DETAILS_MIN_WIDTH) - .solid_bar(true) - .bar_size(2.0) - .draggable(true) -} - -fn build_view_settings() -> impl Widget { - let data_dir_setting = Flex::row() - .must_fill_main_axis(true) - .main_axis_alignment(MainAxisAlignment::Start) - .with_child(Label::new("Data Directory:")) - .with_default_spacer() - .with_flex_child( - TextBox::new() - .with_formatter(PathBufFormatter::new()) - .expand_width() - .lens(State::data_dir), - 1., - ) - .expand_width(); - - let game_dir_setting = Flex::row() - .must_fill_main_axis(true) - .main_axis_alignment(MainAxisAlignment::Start) - .with_child(Label::new("Game Directory:")) - .with_default_spacer() - .with_flex_child( - TextBox::new() - .with_formatter(PathBufFormatter::new()) - .expand_width() - .lens(State::game_dir), - 1., - ) - .expand_width(); - - let nexus_apy_key_setting = Flex::row() - .must_fill_main_axis(true) - .main_axis_alignment(MainAxisAlignment::Start) - .with_child(Label::new("Nexus API Key:")) - .with_default_spacer() - .with_flex_child(TextBox::new().expand_width().lens(State::nexus_api_key), 1.) - .expand_width(); - - let io_setting = Flex::row() - .must_fill_main_axis(true) - .main_axis_alignment(MainAxisAlignment::Start) - .with_child(Label::new("Enable unsafe I/O:")) - .with_default_spacer() - .with_child(Checkbox::from_label(Label::dynamic( - |enabled: &bool, _: &Env| { - if *enabled { - "Enabled".into() - } else { - "Disabled".into() - } - }, - ))) - .lens(State::is_io_enabled) - .tooltip(|_: &State, _: &Env| { - "Enabling this gives ANY mod full access to your files \ - and the ability to load arbitrary software libraries.\n\ - Only enable this if it is crucial for a mod's functionality, \ - and you are sure none of the ones you have installed are malicious." - }) - .expand_width(); - - let content = Flex::column() - .must_fill_main_axis(true) - .cross_axis_alignment(CrossAxisAlignment::Start) - .with_child(data_dir_setting) - .with_default_spacer() - .with_child(game_dir_setting) - .with_default_spacer() - .with_child(io_setting) - .with_default_spacer() - .with_child(nexus_apy_key_setting); - - SizedBox::new(content) - .width(800.) - .expand_height() - .padding(5.) -} - -fn build_main() -> impl Widget { - ViewSwitcher::new( - |state: &State, _| state.current_view, - |selector, _, _| match selector { - View::Mods => Box::new(build_view_mods()), - View::Settings => Box::new(build_view_settings()), - }, - ) -} - -fn build_log_view() -> impl Widget { - let list = List::new(|| { - Label::raw() - .with_font(FontDescriptor::new(FontFamily::MONOSPACE)) - .with_line_break_mode(LineBreaking::WordWrap) - }) - .lens(State::log) - .padding(4.) - .scroll() - .vertical() - .controller(AutoScrollController); - - let inner = Border::new(list) - .with_color(theme::COLOR_FG2) - .with_top_border(1.); - - SizedBox::new(inner).expand_width().height(128.0) -} - -fn build_window() -> impl Widget { - // TODO: Add borders between the sections - Flex::column() - .must_fill_main_axis(true) - .with_child(build_top_bar()) - .with_flex_child(build_main(), 1.0) - .with_child(build_log_view()) - .controller(DirtyStateController) - .on_added(|_, ctx: &mut LifeCycleCtx, _, _| { - ctx.submit_command( - ACTION_SET_WINDOW_HANDLE.with(SingleUse::new((*WINDOW_ID, ctx.window().clone()))), - ); - }) -} diff --git a/crates/dtmm/src/util/ansi.rs b/crates/dtmm/src/util/ansi.rs deleted file mode 100644 index 24855fc..0000000 --- a/crates/dtmm/src/util/ansi.rs +++ /dev/null @@ -1,92 +0,0 @@ -use ansi_parser::{AnsiParser, AnsiSequence, Output}; -use druid::text::{RichText, RichTextBuilder}; -use druid::{Color, FontStyle, FontWeight}; - -use crate::ui::theme; - -#[derive(Default, Debug)] -struct TextState { - color: Option, - dim: bool, - bold: bool, - underline: bool, - strikethrough: bool, - italic: bool, -} - -pub fn ansi_to_rich_text(input: &str) -> RichText { - let mut builder = RichTextBuilder::new(); - - let mut state = TextState::default(); - - for token in input.ansi_parse() { - match token { - Output::TextBlock(text) => { - let mut attr = builder.push(text); - attr.underline(state.underline); - attr.strikethrough(state.strikethrough); - - if state.bold { - attr.weight(FontWeight::BOLD); - } - - if state.italic { - attr.style(FontStyle::Italic); - } - - if let Some(color) = state.color { - attr.text_color(color); - } - } - Output::Escape(AnsiSequence::SetGraphicsMode(values)) => { - for v in values { - match v { - 0 => { - state = Default::default(); - break; - } - 1 => state.bold = true, - 2 => state.dim = true, - 3 => state.italic = true, - 4 => state.underline = true, - 9 => state.strikethrough = true, - 22 => { - state.bold = false; - state.dim = false; - } - 23 => state.italic = false, - 24 => state.underline = false, - 29 => state.underline = false, - 30..=40 | 90..=100 => { - let mut col = v - 30; - if col > 9 { - state.bold = true; - col -= 60; - } - - state.color = match col { - // This escape code is usually called 'black', but is actually used - // as "foreground color", in regards to light themes. - 0 => Some(theme::COLOR_FG), - 1 => Some(theme::COLOR_RED_LIGHT), - 2 => Some(theme::COLOR_GREEN_LIGHT), - 3 => Some(theme::COLOR_YELLOW_LIGHT), - 4 => Some(theme::COLOR_BLUE_LIGHT), - 5 => Some(theme::COLOR_PURPLE_LIGHT), - 6 => Some(theme::COLOR_AQUA_LIGHT), - // Similarly, 'white' is the background color - 7 => Some(theme::COLOR_BG), - 9 => None, - _ => unreachable!(), - }; - } - _ => {} - } - } - } - Output::Escape(_) => {} - } - } - - builder.build() -} diff --git a/crates/dtmm/src/util/config.rs b/crates/dtmm/src/util/config.rs deleted file mode 100644 index 9affbb6..0000000 --- a/crates/dtmm/src/util/config.rs +++ /dev/null @@ -1,179 +0,0 @@ -use std::io::ErrorKind; -use std::path::Path; -use std::path::PathBuf; -use std::sync::Arc; - -use color_eyre::{eyre::Context, Result}; -use serde::{Deserialize, Serialize}; -use tokio::fs; - -use crate::state::{ActionState, ModInfo}; - -#[derive(Clone, Debug, Serialize)] -pub(crate) struct LoadOrderEntrySerialize<'a> { - pub id: &'a String, - pub enabled: bool, -} - -impl<'a> From<&'a ModInfo> for LoadOrderEntrySerialize<'a> { - fn from(info: &'a ModInfo) -> Self { - Self { - id: &info.id, - enabled: info.enabled, - } - } -} - -#[derive(Debug, Serialize)] -pub(crate) struct ConfigSerialize<'a> { - game_dir: &'a Path, - data_dir: &'a Path, - nexus_api_key: &'a String, - mod_order: Vec>, - unsafe_io: bool, -} - -impl<'a> From<&'a ActionState> for ConfigSerialize<'a> { - fn from(state: &'a ActionState) -> Self { - Self { - game_dir: &state.game_dir, - data_dir: &state.data_dir, - nexus_api_key: &state.nexus_api_key, - unsafe_io: state.is_io_enabled, - mod_order: state - .mods - .iter() - .map(Arc::as_ref) - .map(LoadOrderEntrySerialize::from) - .collect(), - } - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub(crate) struct LoadOrderEntry { - pub id: String, - pub enabled: bool, -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub(crate) struct Config { - #[serde(skip)] - pub path: PathBuf, - #[serde(default = "get_default_data_dir")] - pub data_dir: PathBuf, - pub game_dir: Option, - #[serde(default)] - pub unsafe_io: bool, - pub nexus_api_key: Option, - #[serde(default)] - pub mod_order: Vec, -} - -#[cfg(not(target_os = "windows"))] -pub fn get_default_config_path() -> PathBuf { - let config_dir = std::env::var("XDG_CONFIG_DIR").unwrap_or_else(|_| { - let home = std::env::var("HOME").unwrap_or_else(|_| { - let user = std::env::var("USER").expect("user env variable not set"); - format!("/home/{user}") - }); - format!("{home}/.config") - }); - - PathBuf::from(config_dir).join("dtmm").join("dtmm.cfg") -} - -#[cfg(target_os = "windows")] -pub fn get_default_config_path() -> PathBuf { - let config_dir = std::env::var("APPDATA").expect("appdata env var not set"); - PathBuf::from(config_dir).join("dtmm").join("dtmm.cfg") -} - -#[cfg(not(target_os = "windows"))] -pub fn get_default_data_dir() -> PathBuf { - let data_dir = std::env::var("XDG_DATA_DIR").unwrap_or_else(|_| { - let home = std::env::var("HOME").unwrap_or_else(|_| { - let user = std::env::var("USER").expect("user env variable not set"); - format!("/home/{user}") - }); - format!("{home}/.local/share") - }); - - PathBuf::from(data_dir).join("dtmm") -} - -#[cfg(target_os = "windows")] -pub fn get_default_data_dir() -> PathBuf { - let data_dir = std::env::var("LOCALAPPDATA").expect("appdata env var not set"); - PathBuf::from(data_dir).join("dtmm") -} - -#[tracing::instrument] -pub(crate) async fn read_config

(path: P, is_default: bool) -> Result -where - P: Into + std::fmt::Debug, -{ - let path = path.into(); - let default_path = get_default_config_path(); - - match fs::read(&path).await { - Ok(data) => { - let data = String::from_utf8(data).wrap_err_with(|| { - format!("Config file '{}' contains invalid UTF-8", path.display()) - })?; - let mut cfg: Config = serde_sjson::from_str(&data) - .wrap_err_with(|| format!("Invalid config file {}", path.display()))?; - - cfg.path = path; - - tracing::debug!("Read config file '{}': {:?}", cfg.path.display(), cfg); - - Ok(cfg) - } - Err(err) if err.kind() == ErrorKind::NotFound => { - if !is_default { - return Err(err) - .wrap_err_with(|| format!("Failed to read config file {}", path.display()))?; - } - - tracing::debug!( - "Config file not found at '{}', creating default.", - path.display() - ); - - { - let parent = default_path - .parent() - .expect("a file path always has a parent directory"); - fs::create_dir_all(parent).await.wrap_err_with(|| { - format!("Failed to create directories {}", parent.display()) - })?; - } - - let config = Config { - path: default_path, - data_dir: get_default_data_dir(), - game_dir: None, - nexus_api_key: None, - mod_order: Vec::new(), - unsafe_io: false, - }; - - { - let data = serde_sjson::to_string(&config) - .wrap_err("Failed to serialize default config value")?; - fs::write(&config.path, data).await.wrap_err_with(|| { - format!( - "Failed to write default config to {}", - config.path.display() - ) - })?; - } - - Ok(config) - } - Err(err) => { - Err(err).wrap_err_with(|| format!("Failed to read config file {}", path.display())) - } - } -} diff --git a/crates/dtmm/src/util/log.rs b/crates/dtmm/src/util/log.rs deleted file mode 100644 index 4b7c15a..0000000 --- a/crates/dtmm/src/util/log.rs +++ /dev/null @@ -1,95 +0,0 @@ -use clap::ValueEnum; -use tokio::sync::mpsc::UnboundedSender; -use tracing_error::ErrorLayer; -use tracing_subscriber::filter::FilterFn; -use tracing_subscriber::fmt; -use tracing_subscriber::fmt::format::debug_fn; -use tracing_subscriber::layer::SubscriberExt; -use tracing_subscriber::prelude::*; -use tracing_subscriber::EnvFilter; - -#[derive(Clone, Copy, Debug, ValueEnum)] -pub enum LogLevel { - Trace, - Debug, - Info, - Warn, - Error, -} - -impl From for EnvFilter { - fn from(level: LogLevel) -> Self { - let filter = match level { - LogLevel::Trace => "error,dtmm=trace,sdk=trace", - LogLevel::Debug => "error,dtmm=debug,sdk=debug", - LogLevel::Info => "error,dtmm=info", - LogLevel::Warn => "error,dtmm=warn", - LogLevel::Error => "error", - }; - EnvFilter::new(filter) - } -} - -pub struct ChannelWriter { - tx: UnboundedSender>, -} - -impl ChannelWriter { - pub fn new(tx: UnboundedSender>) -> Self { - Self { tx } - } -} - -impl std::io::Write for ChannelWriter { - fn write(&mut self, buf: &[u8]) -> std::io::Result { - let tx = self.tx.clone(); - // The `send` errors when the receiving end has closed. - // But there's not much we can do at that point, so we just ignore it. - let _ = tx.send(buf.to_vec()); - - Ok(buf.len()) - } - - fn flush(&mut self) -> std::io::Result<()> { - Ok(()) - } -} - -pub fn create_tracing_subscriber(level: Option, tx: Option>>) { - let mut env_layer = if let Some(level) = level { - EnvFilter::from(level) - } else if cfg!(debug_assertions) { - EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info")) - } else { - EnvFilter::new("error,dtmm=info") - }; - - // The internal implementation of Druid's GTK file dialog turns - // cancelling the dialog into an error. The, also internal, wrapper - // then logs and swallows the error. - // Therefore, as a consumer of the library, we don't have any way - // to customize this behavior, and instead have to filter out the - // tracing event. - env_layer = env_layer.add_directive( - "druid_shell::backend::gtk::window=off" - .parse() - .expect("Invalid env filter directive"), - ); - - let stdout_layer = fmt::layer().pretty(); - - let channel_layer = tx.map(|tx| { - fmt::layer() - .event_format(dtmt_shared::Formatter) - .fmt_fields(debug_fn(dtmt_shared::format_fields)) - .with_writer(move || ChannelWriter::new(tx.clone())) - .with_filter(FilterFn::new(dtmt_shared::filter_fields)) - }); - - tracing_subscriber::registry() - .with(env_layer) - .with(channel_layer) - .with(stdout_layer) - .with(ErrorLayer::new(fmt::format::Pretty::default())) - .init(); -} diff --git a/crates/dtmm/src/widget/container.rs b/crates/dtmm/src/widget/container.rs new file mode 100644 index 0000000..e58e64c --- /dev/null +++ b/crates/dtmm/src/widget/container.rs @@ -0,0 +1,7 @@ +use druid::{Data, Widget, WidgetPod}; + +pub struct Container { + child: WidgetPod>>, +} + +impl Container {} diff --git a/crates/dtmm/src/ui/widget/fill_container.rs b/crates/dtmm/src/widget/fill_container.rs similarity index 100% rename from crates/dtmm/src/ui/widget/fill_container.rs rename to crates/dtmm/src/widget/fill_container.rs diff --git a/crates/dtmm/src/widget/hidden_if.rs b/crates/dtmm/src/widget/hidden_if.rs new file mode 100644 index 0000000..122eb40 --- /dev/null +++ b/crates/dtmm/src/widget/hidden_if.rs @@ -0,0 +1,60 @@ +use druid::widget::prelude::*; +use druid::{Point, WidgetPod}; + +pub struct HiddenIf { + child: WidgetPod, + hidden_if: Box bool>, +} + +impl> HiddenIf { + pub fn new(child: W, hidden_if: impl Fn(&T, &Env) -> bool + 'static) -> Self { + Self { + hidden_if: Box::new(hidden_if), + child: WidgetPod::new(child), + } + } +} + +impl> Widget for HiddenIf { + #[tracing::instrument(name = "HideContainer", level = "trace", skip_all)] + fn event(&mut self, ctx: &mut EventCtx, event: &Event, data: &mut T, env: &Env) { + let hidden = (self.hidden_if)(data, env); + ctx.set_disabled(hidden); + self.child.event(ctx, event, data, env); + } + + #[tracing::instrument(name = "HideContainer", level = "trace", skip_all)] + fn lifecycle(&mut self, ctx: &mut LifeCycleCtx, event: &LifeCycle, data: &T, env: &Env) { + let hidden = (self.hidden_if)(data, env); + ctx.set_disabled(hidden); + self.child.lifecycle(ctx, event, data, env) + } + + #[tracing::instrument(name = "HideContainer", level = "trace", skip_all)] + fn update(&mut self, ctx: &mut UpdateCtx, _: &T, data: &T, env: &Env) { + self.child.update(ctx, data, env); + } + + #[tracing::instrument(name = "HideContainer", level = "trace", skip_all)] + fn layout(&mut self, ctx: &mut LayoutCtx, bc: &BoxConstraints, data: &T, env: &Env) -> Size { + bc.debug_check("HideContainer"); + let hidden = (self.hidden_if)(data, env); + if hidden { + return Size::ZERO; + } + + let child_size = self.child.layout(ctx, bc, data, env); + self.child.set_origin(ctx, Point::new(0.0, 0.0)); + child_size + } + + #[tracing::instrument(name = "HideContainer", level = "trace", skip_all)] + fn paint(&mut self, ctx: &mut PaintCtx, data: &T, env: &Env) { + let hidden = (self.hidden_if)(data, env); + if hidden { + return; + } + + self.child.paint(ctx, data, env); + } +} diff --git a/crates/dtmm/src/widget/mod.rs b/crates/dtmm/src/widget/mod.rs new file mode 100644 index 0000000..61a8778 --- /dev/null +++ b/crates/dtmm/src/widget/mod.rs @@ -0,0 +1,20 @@ +use druid::{Data, Env, Widget}; + +use self::fill_container::FillContainer; +use self::hidden_if::HiddenIf; + +pub mod container; +pub mod fill_container; +pub mod hidden_if; + +pub trait ExtraWidgetExt: Widget + Sized + 'static { + fn content_must_fill(self) -> FillContainer { + FillContainer::new(self) + } + + fn hidden_if(self, hidden_if: impl Fn(&T, &Env) -> bool + 'static) -> HiddenIf { + HiddenIf::new(self, hidden_if) + } +} + +impl + 'static> ExtraWidgetExt for W {} diff --git a/crates/dtmm/src/ui/widget/table_select.rs b/crates/dtmm/src/widget/table_select.rs similarity index 100% rename from crates/dtmm/src/ui/widget/table_select.rs rename to crates/dtmm/src/widget/table_select.rs diff --git a/crates/dtmt/Cargo.toml b/crates/dtmt/Cargo.toml index 183d6a5..1a0a1e6 100644 --- a/crates/dtmt/Cargo.toml +++ b/crates/dtmt/Cargo.toml @@ -1,43 +1,30 @@ [package] name = "dtmt" -version = "0.3.0" +version = "0.2.0" edition = "2021" [dependencies] -async-recursion = { workspace = true } -clap = { workspace = true } -cli-table = { workspace = true } -color-eyre = { workspace = true } -confy = { workspace = true } -csv-async = { workspace = true } -dtmt-shared = { workspace = true } -futures = { workspace = true } -futures-util = { workspace = true } -glob = { workspace = true } -luajit2-sys = { workspace = true } -minijinja = { workspace = true } -nanorand = { workspace = true } -notify = { workspace = true } -oodle = { workspace = true } -path-clean = { workspace = true } -path-slash = { workspace = true } -pin-project-lite = { workspace = true } -promptly = { workspace = true } -sdk = { workspace = true } -serde = { workspace = true } -serde_sjson = { workspace = true } -tokio = { workspace = true } -tokio-stream = { workspace = true } -tracing = { workspace = true } -tracing-error = { workspace = true } -tracing-subscriber = { workspace = true } -zip = { workspace = true } - -# Cannot be a workspace dependencies when it's optional -shlex = { version = "1.2.0", optional = true } +clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "unicode"] } +color-eyre = "0.6.2" +csv-async = { version = "1.2.4", features = ["tokio", "serde"] } +sdk = { path = "../../lib/sdk", version = "0.2.0" } +futures = "0.3.25" +futures-util = "0.3.24" +glob = "0.3.0" +libloading = "0.7.4" +nanorand = "0.7.0" +pin-project-lite = "0.2.9" +serde = { version = "1.0.147", features = ["derive"] } +serde_sjson = { path = "../../lib/serde_sjson", version = "*" } +tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] } +tokio-stream = { version = "0.1.11", features = ["fs", "io-util"] } +tracing = { version = "0.1.37", features = ["async-await"] } +tracing-error = "0.2.0" +tracing-subscriber = { version = "0.3.16", features = ["env-filter"] } +confy = "0.5.1" +zip = "0.6.3" +string_template = "0.2.1" +promptly = "0.3.1" [dev-dependencies] tempfile = "3.3.0" - -[features] -shlex-bench = ["dep:shlex"] diff --git a/crates/dtmt/README.adoc b/crates/dtmt/README.adoc deleted file mode 100644 index 4304805..0000000 --- a/crates/dtmt/README.adoc +++ /dev/null @@ -1,32 +0,0 @@ -= Darktide Mod Tools (DTMT) -:idprefix: -:idseparator: -:toc: macro -:toclevels: 1 -:!toc-title: -:caution-caption: :fire: -:important-caption: :exclamtion: -:note-caption: :paperclip: -:tip-caption: :bulb: -:warning-caption: :warning: - -A set of tools to develop mods for the newest generation of the Bitsquid game engine that powers the game _Warhammer 40.000: Darktide_. - -== Quickstart - -1. Head to the latest https://git.sclu1034.dev/bitsquid_dt/dtmt/releases/[release] and download the `dtmt` binary for your platform. -2. Place the binary and `dictionary.csv` next to each other. -3. Open a command prompt, navigate to the downloaded binary and run `dtmt.exe help`. -4. Use the `help` command (it works for subcommands, too) and the https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki/CLI-Reference[CLI Reference]. - -== Runtime dependencies - -The LuaJit decompiler (short "ljd") is used to decompile Lua files. A version tailored specifically to Bitsquid may be found here: https://github.com/Aussiemon/ljd. - -A custom executable location may be passed via the `--ljd` flag during extraction, otherwise decompilation expects `ljd` to be found via the `PATH` environmental variable. - -== Building - -1. Install Rust from https://www.rust-lang.org/learn/get-started[rust-lang.org] or via the preferred means for your system. -2. Download or clone this source code. Make sure to include the submodules in `lib/`. -3. Run `cargo build`. diff --git a/crates/dtmt/src/cmd/build.rs b/crates/dtmt/src/cmd/build.rs index 74a627d..6cc1469 100644 --- a/crates/dtmt/src/cmd/build.rs +++ b/crates/dtmt/src/cmd/build.rs @@ -1,26 +1,21 @@ -use std::collections::{HashMap, HashSet}; -use std::ops::Deref; use std::path::{Path, PathBuf}; use std::sync::Arc; use clap::{value_parser, Arg, ArgMatches, Command}; use color_eyre::eyre::{self, Context, Result}; use color_eyre::{Help, Report}; -use dtmt_shared::ModConfig; use futures::future::try_join_all; use futures::StreamExt; -use path_slash::PathExt; use sdk::filetype::package::Package; -use sdk::murmur::IdString64; -use sdk::{Bundle, BundleFile}; +use sdk::{Bundle, BundleFile, Oodle}; +use serde::Deserialize; use tokio::fs::{self, File}; use tokio::io::AsyncReadExt; -use tokio::sync::Mutex; + +use crate::mods::archive::Archive; const PROJECT_CONFIG_NAME: &str = "dtmt.cfg"; -type FileIndexMap = HashMap>; - pub(crate) fn command_definition() -> Command { Command::new("build") .about("Build a project") @@ -30,38 +25,36 @@ pub(crate) fn command_definition() -> Command { .value_parser(value_parser!(PathBuf)) .help( "The path to the project to build. \ - If omitted, dtmt will search from the current working directory upward.", + If omitted, dtmt will search from the current working directory upward.", ), ) .arg( - Arg::new("out") - .long("out") - .short('o') - .default_value("out") - .value_parser(value_parser!(PathBuf)) - .help("The directory to write output files to."), - ) - .arg( - Arg::new("deploy") - .long("deploy") - .short('d') - .value_parser(value_parser!(PathBuf)) + Arg::new("oodle") + .long("oodle") + .default_value(super::OODLE_LIB_NAME) .help( - "If the path to the game (without the trailing '/bundle') is specified, \ - deploy the newly built bundles. \ - This will not adjust the bundle database or package files, so if files are \ - added or removed, you will have to import into DTMM and re-deploy there.", + "The oodle library to load. This may either be:\n\ + - A library name that will be searched for in the system's default paths.\n\ + - A file path relative to the current working directory.\n\ + - An absolute file path.", ), ) } -/// Try to find a `dtmt.cfg` in the given directory or traverse up the parents. +#[derive(Debug, Default, Deserialize)] +struct ProjectConfig { + #[serde(skip)] + dir: PathBuf, + name: String, + packages: Vec, +} + #[tracing::instrument] -async fn find_project_config(dir: Option) -> Result { +async fn find_project_config(dir: Option) -> Result { let (path, mut file) = if let Some(path) = dir { let file = File::open(&path.join(PROJECT_CONFIG_NAME)) .await - .wrap_err_with(|| format!("Failed to open file: {}", path.display())) + .wrap_err_with(|| format!("failed to open file: {}", path.display())) .with_suggestion(|| { format!( "Make sure the file at '{}' exists and is readable", @@ -85,7 +78,7 @@ async fn find_project_config(dir: Option) -> Result { } Err(err) => { let err = Report::new(err) - .wrap_err(format!("Failed to open file: {}", path.display())); + .wrap_err(format!("failed to open file: {}", path.display())); return Err(err); } } @@ -93,54 +86,46 @@ async fn find_project_config(dir: Option) -> Result { }; let mut buf = String::new(); - file.read_to_string(&mut buf) - .await - .wrap_err("Invalid UTF-8")?; + file.read_to_string(&mut buf).await?; - let mut cfg: ModConfig = - serde_sjson::from_str(&buf).wrap_err("Failed to deserialize mod config")?; + let mut cfg: ProjectConfig = serde_sjson::from_str(&buf)?; cfg.dir = path; Ok(cfg) } -/// Iterate over the paths in the given `Package` and -/// compile each file by its file type. #[tracing::instrument(skip_all)] -async fn compile_package_files(pkg: &Package, cfg: &ModConfig) -> Result> { - let root = Arc::new(&cfg.dir); - let name_overrides = &cfg.name_overrides; +async fn compile_package_files

(pkg: &Package, root: P) -> Result> +where + P: AsRef + std::fmt::Debug, +{ + let root = Arc::new(root.as_ref()); let tasks = pkg .iter() - .flat_map(|(file_type, names)| { - names.iter().map(|name| { + .flat_map(|(file_type, paths)| { + paths.iter().map(|path| { ( *file_type, - name, + path, // Cloning the `Arc` here solves the issue that in the next `.map`, I need to // `move` the closure parameters, but can't `move` `root` before it was cloned. root.clone(), ) }) }) - .map(|(file_type, name, root)| async move { - let path = PathBuf::from(name); - let sjson = fs::read_to_string(&path) - .await - .wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?; + .map(|(file_type, path, root)| async move { + let sjson = fs::read_to_string(&path).await?; - let name = path.with_extension("").to_slash_lossy().to_string(); - let name = if let Some(new_name) = name_overrides.get(&name) { - let new_name = match u64::from_str_radix(new_name, 16) { - Ok(hash) => IdString64::from(hash), - Err(_) => IdString64::from(new_name.clone()), - }; - tracing::info!("Overriding '{}' -> '{}'", name, new_name.display()); - new_name - } else { - IdString64::from(name.clone()) - }; - BundleFile::from_sjson(name, file_type, sjson, root.as_ref()).await + let mut path = path.clone(); + path.set_extension(""); + + BundleFile::from_sjson( + path.to_string_lossy().to_string(), + file_type, + sjson, + root.as_ref(), + ) + .await }); let results = futures::stream::iter(tasks) @@ -151,29 +136,10 @@ async fn compile_package_files(pkg: &Package, cfg: &ModConfig) -> Result + std::fmt::Debug, -) -> Result { - let root = &cfg.dir; - let package = package.as_ref(); +#[tracing::instrument(skip_all, fields(files = files.len()))] +fn compile_bundle(name: String, files: Vec) -> Result { + let mut bundle = Bundle::new(name); - let mut path = root.join(package); - path.set_extension("package"); - let sjson = fs::read_to_string(&path) - .await - .wrap_err_with(|| format!("Failed to read file {}", path.display()))?; - - let pkg_name = package.to_slash_lossy().to_string(); - let pkg = Package::from_sjson(sjson, pkg_name.clone(), root) - .await - .wrap_err_with(|| format!("Invalid package file {}", &pkg_name))?; - - let files = compile_package_files(&pkg, cfg).await?; - let mut bundle = Bundle::new(pkg_name); for file in files { bundle.add_file(file); } @@ -181,113 +147,59 @@ async fn build_package( Ok(bundle) } -/// Cleans the path of internal parent (`../`) or self (`./`) components, -/// and ensures that it is relative. -fn normalize_file_path>(path: P) -> Result { - let path = path.as_ref(); - - if path.is_absolute() || path.has_root() { - let err = eyre::eyre!("Path is absolute: {}", path.display()); - return Err(err).with_suggestion(|| "Specify a relative file path.".to_string()); - } - - let path = path_clean::clean(path); - - if path.starts_with("..") { - eyre::bail!("path starts with a parent component: {}", path.display()); - } - - Ok(path) -} - #[tracing::instrument] -pub(crate) async fn read_project_config(dir: Option) -> Result { - let mut cfg = find_project_config(dir).await?; - - if let Some(path) = cfg.image { - let path = normalize_file_path(path) - .wrap_err("Invalid config field 'image'") - .with_suggestion(|| { - "Specify a file path relative to and child path of the \ - directory where 'dtmt.cfg' is." - .to_string() - })?; - cfg.image = Some(path); - } - - cfg.resources.init = normalize_file_path(cfg.resources.init) - .wrap_err("Invalid config field 'resources.init'") - .with_suggestion(|| { - "Specify a file path relative to and child path of the \ - directory where 'dtmt.cfg' is." - .to_string() - }) - .with_suggestion(|| { - "Use 'dtmt new' in a separate directory to generate \ - a valid mod template." - .to_string() - })?; - - if let Some(path) = cfg.resources.data { - let path = normalize_file_path(path) - .wrap_err("Invalid config field 'resources.data'") - .with_suggestion(|| { - "Specify a file path relative to and child path of the \ - directory where 'dtmt.cfg' is." - .to_string() - }) - .with_suggestion(|| { - "Use 'dtmt new' in a separate directory to generate \ - a valid mod template." - .to_string() - })?; - cfg.resources.data = Some(path); - } - - if let Some(path) = cfg.resources.localization { - let path = normalize_file_path(path) - .wrap_err("Invalid config field 'resources.localization'") - .with_suggestion(|| { - "Specify a file path relative to and child path of the \ - directory where 'dtmt.cfg' is." - .to_string() - }) - .with_suggestion(|| { - "Use 'dtmt new' in a separate directory to generate \ - a valid mod template." - .to_string() - })?; - cfg.resources.localization = Some(path); - } - - Ok(cfg) -} - -#[tracing::instrument] -pub(crate) async fn build

( - cfg: &ModConfig, - out_path: impl AsRef + std::fmt::Debug, - game_dir: Arc>, -) -> Result<()> +async fn build_package(package: P1, root: P2) -> Result where - P: AsRef + std::fmt::Debug, + P1: AsRef + std::fmt::Debug, + P2: AsRef + std::fmt::Debug, { - let out_path = out_path.as_ref(); + let root = root.as_ref(); + let package = package.as_ref(); - fs::create_dir_all(out_path) + let mut path = root.join(package); + path.set_extension("package"); + let sjson = fs::read_to_string(&path) .await - .wrap_err_with(|| format!("Failed to create output directory '{}'", out_path.display()))?; + .wrap_err_with(|| format!("failed to read file {}", path.display()))?; - let file_map = Arc::new(Mutex::new(FileIndexMap::new())); + let pkg_name = package.to_string_lossy().to_string(); + let pkg = Package::from_sjson(sjson, pkg_name.clone(), root) + .await + .wrap_err_with(|| format!("invalid package file {}", &pkg_name))?; + + compile_package_files(&pkg, root) + .await + .wrap_err("failed to compile package") + .and_then(|files| compile_bundle(pkg_name, files)) + .wrap_err("failed to build bundle") +} + +#[tracing::instrument(skip_all)] +pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<()> { + if let Some(name) = matches.get_one::("oodle") { + let oodle = Oodle::new(name)?; + ctx.oodle = Some(oodle); + } + + let cfg = { + let dir = matches.get_one::("directory").cloned(); + find_project_config(dir).await? + }; + + let dest = { + let mut path = PathBuf::from(&cfg.name); + path.set_extension("zip"); + Arc::new(path) + }; + let cfg = Arc::new(cfg); + + tracing::debug!(?cfg); let tasks = cfg .packages .iter() - // The closure below would capture the `Arc`s before they could be cloned, - // so instead we need to clone them in a non-move block and inject them - // via parameters. - .map(|path| (path, cfg.clone(), file_map.clone(), game_dir.clone())) - .map(|(path, cfg, file_map, game_dir)| async move { + .map(|path| (path, cfg.clone())) + .map(|(path, cfg)| async move { if path.extension().is_some() { eyre::bail!( "Package name must be specified without file extension: {}", @@ -295,120 +207,42 @@ where ); } - let bundle = build_package(&cfg, path).await.wrap_err_with(|| { + build_package(path, &cfg.dir).await.wrap_err_with(|| { format!( - "Failed to build package '{}' at '{}'", + "failed to build package {} in {}", path.display(), cfg.dir.display() ) - })?; - - let bundle_name = match bundle.name() { - IdString64::Hash(_) => { - eyre::bail!("bundle name must be known as string. got hash") - } - IdString64::String(s) => s.clone(), - }; - - { - let mut file_map = file_map.lock().await; - let map_entry = file_map.entry(bundle_name).or_default(); - - for file in bundle.files() { - map_entry.insert(file.name(false, None)); - } - } - - let name = bundle.name().to_murmur64().to_string().to_ascii_lowercase(); - let path = out_path.join(&name); - let data = bundle.to_binary()?; - - tracing::trace!( - "Writing bundle {} to '{}'", - bundle.name().display(), - path.display() - ); - fs::write(&path, &data) - .await - .wrap_err_with(|| format!("Failed to write bundle to '{}'", path.display()))?; - - if let Some(game_dir) = game_dir.as_ref() { - let path = game_dir.as_ref().join(&name); - - tracing::trace!( - "Deploying bundle {} to '{}'", - bundle.name().display(), - path.display() - ); - fs::write(&path, &data) - .await - .wrap_err_with(|| format!("Failed to write bundle to '{}'", path.display()))?; - } - - Ok(()) + }) }); - try_join_all(tasks) - .await - .wrap_err("Failed to build mod bundles")?; + let bundles = try_join_all(tasks).await?; + + let mod_file = { + let mut path = cfg.dir.join(&cfg.name); + path.set_extension("mod"); + fs::read(path).await? + }; { - let path = out_path.join("files.sjson"); - tracing::trace!(path = %path.display(), "Writing file index"); - let file_map = file_map.lock().await; - let data = serde_sjson::to_string(file_map.deref())?; - fs::write(&path, data) - .await - .wrap_err_with(|| format!("Failed to write file index to '{}'", path.display()))?; - } - - if let Some(img_path) = &cfg.image { - let path = cfg.dir.join(img_path); - let dest = out_path.join(img_path); - - tracing::trace!(src = %path.display(), dest = %dest.display(), "Copying image file"); - - if let Some(parent) = dest.parent() { - fs::create_dir_all(&parent) - .await - .wrap_err_with(|| format!("Failed to create directory '{}'", parent.display()))?; - } - - fs::copy(&path, &dest).await.wrap_err_with(|| { - format!( - "Failed to copy image from '{}' to '{}'", - path.display(), - dest.display() - ) - })?; - } - - tracing::info!("Compiled bundles written to '{}'", out_path.display()); - - if let Some(game_dir) = game_dir.as_ref() { - tracing::info!("Deployed bundles to '{}'", game_dir.as_ref().display()); + let dest = dest.clone(); + let name = cfg.name.clone(); + tokio::task::spawn_blocking(move || { + let mut archive = Archive::new(name); + + archive.add_mod_file(mod_file); + + for bundle in bundles { + archive.add_bundle(bundle); + } + + archive + .write(&ctx, dest.as_ref()) + .wrap_err("failed to write mod archive") + }) + .await??; } - Ok(()) -} - -#[tracing::instrument(skip_all)] -pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> { - let cfg = read_project_config(matches.get_one::("directory").cloned()).await?; - - let game_dir = matches - .get_one::("deploy") - .map(|p| p.join("bundle")); - - let out_path = matches - .get_one::("out") - .expect("parameter should have default value"); - - tracing::debug!(?cfg, ?game_dir, ?out_path); - - let game_dir = Arc::new(game_dir); - - build(&cfg, out_path, game_dir).await?; - + tracing::info!("Mod archive written to {}", dest.display()); Ok(()) } diff --git a/crates/dtmt/src/cmd/bundle/db.rs b/crates/dtmt/src/cmd/bundle/db.rs deleted file mode 100644 index b537991..0000000 --- a/crates/dtmt/src/cmd/bundle/db.rs +++ /dev/null @@ -1,174 +0,0 @@ -use std::{io::Cursor, path::PathBuf}; - -use clap::{value_parser, Arg, ArgMatches, Command}; -use color_eyre::{eyre::Context as _, Result}; -use sdk::murmur::{HashGroup, IdString64, Murmur64}; -use sdk::{BundleDatabase, FromBinary as _}; -use tokio::fs; - -pub(crate) fn command_definition() -> Command { - Command::new("db") - .about("Various operations regarding `bundle_database.data`.") - .subcommand_required(true) - .subcommand( - Command::new("list-files") - .about("List bundle contents") - .arg( - Arg::new("database") - .required(true) - .help("Path to the bundle database") - .value_parser(value_parser!(PathBuf)), - ) - .arg( - Arg::new("bundle") - .help("The bundle name. If omitted, all bundles will be listed.") - .required(false), - ), - ) - .subcommand( - Command::new("list-bundles").about("List bundles").arg( - Arg::new("database") - .required(true) - .help("Path to the bundle database") - .value_parser(value_parser!(PathBuf)), - ), - ) - .subcommand( - Command::new("find-file") - .about("Find the bundle a file belongs to") - .arg( - Arg::new("database") - .required(true) - .help("Path to the bundle database") - .value_parser(value_parser!(PathBuf)), - ) - .arg( - Arg::new("file-name") - .required(true) - .help("Name of the file. May be a hash in hex representation or a string"), - ), - ) -} - -#[tracing::instrument(skip_all)] -pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> { - let Some((op, sub_matches)) = matches.subcommand() else { - unreachable!("clap is configured to require a subcommand"); - }; - - let database = { - let path = sub_matches - .get_one::("database") - .expect("argument is required"); - - let binary = fs::read(&path) - .await - .wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?; - - let mut r = Cursor::new(binary); - - BundleDatabase::from_binary(&mut r).wrap_err("Failed to parse bundle database")? - }; - - match op { - "list-files" => { - let index = database.files(); - - if let Some(bundle) = sub_matches.get_one::("bundle") { - let hash = u64::from_str_radix(bundle, 16) - .map(Murmur64::from) - .wrap_err("Invalid hex sequence")?; - - if let Some(files) = index.get(&hash) { - for file in files { - let name = ctx.lookup_hash(file.name, HashGroup::Filename); - let extension = file.extension.ext_name(); - println!("{}.{}", name.display(), extension); - } - } else { - tracing::info!("Bundle {} not found in the database", bundle); - } - } else { - for (bundle_hash, files) in index.iter() { - let bundle_name = ctx.lookup_hash(*bundle_hash, HashGroup::Filename); - - match bundle_name { - IdString64::String(name) => { - println!("{:016x} {}", bundle_hash, name); - } - IdString64::Hash(hash) => { - println!("{:016x}", hash); - } - } - - for file in files { - let name = ctx.lookup_hash(file.name, HashGroup::Filename); - let extension = file.extension.ext_name(); - - match name { - IdString64::String(name) => { - println!("\t{:016x}.{:<12} {}", file.name, extension, name); - } - IdString64::Hash(hash) => { - println!("\t{:016x}.{}", hash, extension); - } - } - } - - println!(); - } - } - - Ok(()) - } - "list-bundles" => { - for bundle_hash in database.bundles().keys() { - let bundle_name = ctx.lookup_hash(*bundle_hash, HashGroup::Filename); - - match bundle_name { - IdString64::String(name) => { - println!("{:016x} {}", bundle_hash, name); - } - IdString64::Hash(hash) => { - println!("{:016x}", hash); - } - } - } - - Ok(()) - } - "find-file" => { - let name = sub_matches - .get_one::("file-name") - .expect("required argument"); - let name = match u64::from_str_radix(name, 16).map(Murmur64::from) { - Ok(hash) => hash, - Err(_) => Murmur64::hash(name), - }; - - let bundles = database.files().iter().filter_map(|(bundle_hash, files)| { - if files.iter().any(|file| file.name == name) { - Some(bundle_hash) - } else { - None - } - }); - - let mut found = false; - - for bundle in bundles { - found = true; - println!("{:016x}", bundle); - } - - if !found { - std::process::exit(1); - } - - Ok(()) - } - _ => unreachable!( - "clap is configured to require a subcommand, and they're all handled above" - ), - } -} diff --git a/crates/dtmt/src/cmd/bundle/extract.rs b/crates/dtmt/src/cmd/bundle/extract.rs index 75f1360..3524f5b 100644 --- a/crates/dtmt/src/cmd/bundle/extract.rs +++ b/crates/dtmt/src/cmd/bundle/extract.rs @@ -1,20 +1,17 @@ -use std::ffi::OsStr; use std::path::{Path, PathBuf}; use std::sync::Arc; use clap::{value_parser, Arg, ArgAction, ArgMatches, Command}; -use color_eyre::eyre::{self, bail, Context, Result}; -use color_eyre::{Help, Report}; +use color_eyre::eyre::{self, Context, Result}; +use color_eyre::{Help, Report, SectionExt}; use futures::future::try_join_all; use futures::StreamExt; use glob::Pattern; -use sdk::{Bundle, BundleFile, CmdLine}; +use sdk::{Bundle, BundleFile}; use tokio::fs; use crate::cmd::util::resolve_bundle_paths; -use crate::shell_parse::ShellParser; -#[inline] fn parse_glob_pattern(s: &str) -> Result { match Pattern::new(s) { Ok(p) => Ok(p), @@ -22,7 +19,6 @@ fn parse_glob_pattern(s: &str) -> Result { } } -#[inline] fn flatten_name(s: &str) -> String { s.replace('/', "_") } @@ -37,7 +33,7 @@ pub(crate) fn command_definition() -> Command { .value_parser(value_parser!(PathBuf)) .help( "Path to the bundle(s) to read. If this points to a directory instead \ - of a file, all files in that directory will be checked.", + of a file, all files in that directory will be checked.", ), ) .arg( @@ -93,81 +89,30 @@ pub(crate) fn command_definition() -> Command { Arg::new("ljd") .long("ljd") .help( - "A custom command line to execute ljd with. It is treated as follows:\n\ - * if the argument is a valid path to an existing file:\n\ - ** if the file is called 'main.py', it is assumed that 'python.exe' \ - exists in PATH to execute this with.\n\ - ** otherwise it is treated as an executable\n\ - * if it's a single word, it's treated as an executable in PATH\n\ - * otherwise it is treated as a command line template.\n\ - In any case, the application being run must accept ljd's flags '-c' and '-f'.", + "Path to a custom ljd executable. If not set, \ + `ljd` will be called from PATH.", ) .default_value("ljd"), ) - // .arg( - // Arg::new("revorb") - // .long("revorb") - // .help( - // "Path to a custom revorb executable. If not set, \ - // `revorb` will be called from PATH.", - // ) - // .default_value("revorb"), - // ) - // .arg( - // Arg::new("ww2ogg") - // .long("ww2ogg") - // .help( - // "Path to a custom ww2ogg executable. If not set, \ - // `ww2ogg` will be called from PATH.\nSee the documentation for how \ - // to set up the script for this.", - // ) - // .default_value("ww2ogg"), - // ) -} - -#[tracing::instrument] -async fn parse_command_line_template(tmpl: &String) -> Result { - if tmpl.trim().is_empty() { - eyre::bail!("Command line template must not be empty"); - } - - let mut cmd = if matches!(fs::try_exists(tmpl).await, Ok(true)) { - let path = PathBuf::from(tmpl); - if path.file_name() == Some(OsStr::new("main.py")) { - let mut cmd = CmdLine::new("python"); - cmd.arg(path); - cmd - } else { - CmdLine::new(path) - } - } else { - let mut parsed = ShellParser::new(tmpl.as_bytes()); - // Safety: The initial `tmpl` was a `&String` (i.e. valid UTF-8), and `shlex` does not - // insert or remove characters, nor does it split UTF-8 characters. - // So the resulting byte stream is still valid UTF-8. - let mut cmd = CmdLine::new(unsafe { - let bytes = parsed.next().expect("Template is not empty"); - String::from_utf8_unchecked(bytes.to_vec()) - }); - - for arg in parsed.by_ref() { - // Safety: See above. - cmd.arg(unsafe { String::from_utf8_unchecked(arg.to_vec()) }); - } - - if parsed.errored { - bail!("Invalid command line template"); - } - - cmd - }; - - // Add ljd flags - cmd.arg("-c"); - - tracing::debug!("Parsed command line template: {:?}", cmd); - - Ok(cmd) + .arg( + Arg::new("revorb") + .long("revorb") + .help( + "Path to a custom revorb executable. If not set, \ + `revorb` will be called from PATH.", + ) + .default_value("revorb"), + ) + .arg( + Arg::new("ww2ogg") + .long("ww2ogg") + .help( + "Path to a custom ww2ogg executable. If not set, \ + `ww2ogg` will be called from PATH.\nSee the documentation for how \ + to set up the script for this.", + ) + .default_value("ww2ogg"), + ) } #[tracing::instrument(skip_all)] @@ -176,19 +121,16 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<( let ljd_bin = matches .get_one::("ljd") .expect("no default value for 'ljd' parameter"); - // let revorb_bin = matches - // .get_one::("revorb") - // .expect("no default value for 'revorb' parameter"); - // let ww2ogg_bin = matches - // .get_one::("ww2ogg") - // .expect("no default value for 'ww2ogg' parameter"); + let revorb_bin = matches + .get_one::("revorb") + .expect("no default value for 'revorb' parameter"); + let ww2ogg_bin = matches + .get_one::("ww2ogg") + .expect("no default value for 'ww2ogg' parameter"); - ctx.ljd = parse_command_line_template(ljd_bin) - .await - .map(Option::Some) - .wrap_err("Failed to parse command line template for flag 'ljd'")?; - // ctx.revorb = Some(revorb_bin.clone()); - // ctx.ww2ogg = Some(ww2ogg_bin.clone()); + ctx.ljd = Some(ljd_bin.clone()); + ctx.revorb = Some(revorb_bin.clone()); + ctx.ww2ogg = Some(ww2ogg_bin.clone()); } let includes = match matches.get_many::("include") { @@ -369,25 +311,14 @@ where path.push(name); if options.dry_run { - tracing::info!("Dry Run: Writing file '{}'", path.display()); + tracing::info!(path = %path.display(), "Writing file"); } else { - tracing::info!("Writing file '{}'", path.display()); + tracing::debug!(path = %path.display(), "Writing file"); tasks.push(tokio::spawn(async move { - if let Some(parent) = path.parent() { - fs::create_dir_all(&parent).await.wrap_err_with(|| { - format!( - "failed to create parent directories '{}'", - parent.display() - ) - })?; - } - - fs::write(&path, file.data()).await.wrap_err_with(|| { - format!( - "failed to write extracted file to disc: '{}'", - path.display() - ) - }) + fs::write(&path, file.data()) + .await + .wrap_err("failed to write extracted file to disc") + .with_section(|| path.display().to_string().header("Path")) })); } } @@ -411,9 +342,9 @@ where path.push(name); if options.dry_run { - tracing::info!("Dry Run: Writing file '{}'", path.display()); + tracing::info!(path = %path.display(), "Writing file"); } else { - tracing::info!("Writing file '{}'", path.display()); + tracing::debug!(path = %path.display(), "Writing file"); tasks.push(tokio::spawn(async move { let parent = match path.parent() { Some(parent) => parent, @@ -425,19 +356,17 @@ where } }; - fs::create_dir_all(parent).await.wrap_err_with(|| { - format!( - "failed to create parent directory: '{}'", - parent.display() - ) - })?; + fs::create_dir_all(parent) + .await + .wrap_err("failed to create parent directory") + .with_section(|| { + parent.display().to_string().header("Path") + })?; - fs::write(&path, file.data()).await.wrap_err_with(|| { - format!( - "failed to write extracted file to disc: '{}'", - path.display() - ) - }) + fs::write(&path, file.data()) + .await + .wrap_err("failed to write extracted file to disc") + .with_section(|| path.display().to_string().header("Path")) })); } } @@ -445,7 +374,10 @@ where } } Err(err) => { - let err = err.wrap_err(format!("Failed to decompile file {}", name)); + let err = err + .wrap_err("Failed to decompile") + .with_section(|| name.header("File")); + tracing::error!("{:?}", err); } }; diff --git a/crates/dtmt/src/cmd/bundle/inject.rs b/crates/dtmt/src/cmd/bundle/inject.rs index 21f4a91..f8e001c 100644 --- a/crates/dtmt/src/cmd/bundle/inject.rs +++ b/crates/dtmt/src/cmd/bundle/inject.rs @@ -1,297 +1,112 @@ -use std::path::{Path, PathBuf}; -use std::str::FromStr as _; +use std::path::PathBuf; -use clap::{value_parser, Arg, ArgAction, ArgMatches, Command}; -use color_eyre::eyre::{self, Context, OptionExt, Result}; +use clap::{value_parser, Arg, ArgMatches, Command}; +use color_eyre::eyre::{self, Context, Result}; use color_eyre::Help; -use path_slash::PathBufExt as _; -use sdk::murmur::IdString64; -use sdk::{Bundle, BundleFile, BundleFileType}; -use tokio::fs; +use sdk::Bundle; +use tokio::fs::{self, File}; +use tokio::io::AsyncReadExt; pub(crate) fn command_definition() -> Command { Command::new("inject") - .subcommand_required(true) - .about("Inject a file into a bundle.\n\ - Raw binary data can be used to directly replace the file's variant data blob without affecting the metadata.\n\ - Alternatively, a compiler format may be specified, and a complete bundle file is created.") + .about("Inject a file into a bundle.") + .arg( + Arg::new("replace") + .help("The name of a file in the bundle whos content should be replaced.") + .short('r') + .long("replace"), + ) .arg( Arg::new("output") .help( "The path to write the changed bundle to. \ - If omitted, the input bundle will be overwritten.\n\ - Remember to add a `.patch_` suffix if you also use '--patch'.", + If omitted, the input bundle will be overwritten.", ) .short('o') .long("output") .value_parser(value_parser!(PathBuf)), ) .arg( - Arg::new("patch") - .help("Create a patch bundle. Optionally, a patch NUMBER may be specified as \ - '--patch=123'.\nThe maximum number is 999, the default is 1.\n\ - If `--output` is not specified, the `.patch_` suffix is added to \ - the given bundle name.") - .short('p') - .long("patch") - .num_args(0..=1) - .require_equals(true) - .default_missing_value("1") - .value_name("NUMBER") - .value_parser(value_parser!(u16)) + Arg::new("bundle") + .help("Path to the bundle to inject the file into.") + .required(true) + .value_parser(value_parser!(PathBuf)), ) .arg( - Arg::new("type") - .help("Compile the new file as the given TYPE. If omitted, the file type is \ - is guessed from the file extension.") - .value_name("TYPE") + Arg::new("file") + .help("Path to the file to inject.") + .required(true) + .value_parser(value_parser!(PathBuf)), ) - .subcommand( - Command::new("replace") - .about("Replace an existing file in the bundle") - .arg( - Arg::new("variant") - .help("In combination with '--raw', specify the variant index to replace.") - .long("variant") - .default_value("0") - .value_parser(value_parser!(u8)) - ) - .arg( - Arg::new("raw") - .help("Insert the given file as raw binary data.\n\ - Cannot be used with '--patch'.") - .long("raw") - .action(ArgAction::SetTrue) - ) - .arg( - Arg::new("bundle") - .help("Path to the bundle to inject the file into.") - .required(true) - .value_parser(value_parser!(PathBuf)), - ) - .arg( - Arg::new("bundle-file") - .help("The name of a file in the bundle whose content should be replaced.") - .required(true), - ) - .arg( - Arg::new("new-file") - .help("Path to the file to inject.") - .required(true) - .value_parser(value_parser!(PathBuf)), - ), - ) - // .subcommand( - // Command::new("add") - // .about("Add a new file to the bundle") - // .arg( - // Arg::new("new-file") - // .help("Path to the file to inject.") - // .required(true) - // .value_parser(value_parser!(PathBuf)), - // ) - // .arg( - // Arg::new("bundle") - // .help("Path to the bundle to inject the file into.") - // .required(true) - // .value_parser(value_parser!(PathBuf)), - // ), - // ) } -#[tracing::instrument] -async fn compile_file( - path: impl AsRef + std::fmt::Debug, - name: impl Into + std::fmt::Debug, - file_type: BundleFileType, -) -> Result { - let path = path.as_ref(); - - let file_data = fs::read(&path) - .await - .wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?; - let _sjson = String::from_utf8(file_data) - .wrap_err_with(|| format!("Invalid UTF8 data in '{}'", path.display()))?; - - let _root = path.parent().ok_or_eyre("File path has no parent")?; - - eyre::bail!( - "Compilation for type '{}' is not implemented, yet", - file_type - ) -} - -#[tracing::instrument( - skip_all, - fields( - bundle_path = tracing::field::Empty, - in_file_path = tracing::field::Empty, - output_path = tracing::field::Empty, - target_name = tracing::field::Empty, - file_type = tracing::field::Empty, - raw = tracing::field::Empty, - ) -)] +#[tracing::instrument(skip_all)] pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> { - let Some((op, sub_matches)) = matches.subcommand() else { - unreachable!("clap is configured to require a subcommand, and they're all handled above"); - }; - - let bundle_path = sub_matches + let bundle_path = matches .get_one::("bundle") .expect("required parameter not found"); - let in_file_path = sub_matches - .get_one::("new-file") + let file_path = matches + .get_one::("file") .expect("required parameter not found"); - let patch_number = matches - .get_one::("patch") - .map(|num| format!("{:03}", num)); + tracing::trace!(bundle_path = %bundle_path.display(), file_path = %file_path.display()); - let output_path = matches - .get_one::("output") - .cloned() - .unwrap_or_else(|| { - let mut output_path = bundle_path.clone(); - - if let Some(patch_number) = patch_number.as_ref() { - output_path.set_extension(format!("patch_{:03}", patch_number)); - } - - output_path - }); - - let target_name = if op == "replace" { - sub_matches - .get_one::("bundle-file") - .map(|name| match u64::from_str_radix(name, 16) { - Ok(id) => IdString64::from(id), - Err(_) => IdString64::String(name.clone()), - }) - .expect("argument is required") - } else { - let mut path = PathBuf::from(in_file_path); - path.set_extension(""); - IdString64::from(path.to_slash_lossy().to_string()) - }; - - let file_type = if let Some(forced_type) = matches.get_one::("type") { - BundleFileType::from_str(forced_type.as_str()).wrap_err("Unknown file type")? - } else { - in_file_path - .extension() - .and_then(|s| s.to_str()) - .ok_or_eyre("File extension missing") - .and_then(BundleFileType::from_str) - .wrap_err("Unknown file type") - .with_suggestion(|| "Use '--type TYPE' to specify the file type")? - }; - - { - let span = tracing::Span::current(); - if !span.is_disabled() { - span.record("bundle_path", bundle_path.display().to_string()); - span.record("in_file_path", in_file_path.display().to_string()); - span.record("output_path", output_path.display().to_string()); - span.record("raw", sub_matches.get_flag("raw")); - span.record("target_name", target_name.display().to_string()); - span.record("file_type", format!("{:?}", file_type)); - } - } - - let bundle_name = Bundle::get_name_from_path(&ctx, bundle_path); let mut bundle = { let binary = fs::read(bundle_path).await?; - Bundle::from_binary(&ctx, bundle_name.clone(), binary) - .wrap_err_with(|| format!("Failed to open bundle '{}'", bundle_path.display()))? + let name = Bundle::get_name_from_path(&ctx, bundle_path); + Bundle::from_binary(&ctx, name, binary).wrap_err("Failed to open bundle file")? }; - if op == "copy" { - unimplemented!("Implement copying a file from one bundle to the other."); - } + if let Some(_name) = matches.get_one::("replace") { + let mut file = File::open(&file_path) + .await + .wrap_err_with(|| format!("failed to open '{}'", file_path.display()))?; - let output_bundle = match op { - "replace" => { - let Some(file) = bundle - .files_mut() - .find(|file| *file.base_name() == target_name) - else { - let err = eyre::eyre!( - "No file with name '{}' in bundle '{}'", - target_name.display(), - bundle_path.display() - ); - - return Err(err).with_suggestion(|| { + if let Some(variant) = bundle + .files_mut() + .filter(|file| file.matches_name(_name)) + // TODO: Handle file variants + .find_map(|file| file.variants_mut().next()) + { + let mut data = Vec::new(); + file.read_to_end(&mut data) + .await + .wrap_err("failed to read input file")?; + variant.set_data(data); + } else { + let err = eyre::eyre!("No file '{}' in this bundle.", _name) + .with_suggestion(|| { format!( - "Run '{} bundle list \"{}\"' to list the files in this bundle.", + "Run '{} bundle list {}' to list the files in this bundle.", clap::crate_name!(), bundle_path.display() ) + }) + .with_suggestion(|| { + format!( + "Use '{} bundle inject --add {} {} {}' to add it as a new file", + clap::crate_name!(), + _name, + bundle_path.display(), + file_path.display() + ) }); - }; - if sub_matches.get_flag("raw") { - let variant_index = sub_matches - .get_one::("variant") - .expect("argument with default missing"); - - let Some(variant) = file.variants_mut().nth(*variant_index as usize) else { - let err = eyre::eyre!( - "Variant index '{}' does not exist in '{}'", - variant_index, - target_name.display() - ); - - return Err(err).with_suggestion(|| { - format!( - "See '{} bundle inject add --help' if you want to add it as a new file", - clap::crate_name!(), - ) - }); - }; - - let data = tokio::fs::read(&in_file_path).await.wrap_err_with(|| { - format!("Failed to read file '{}'", in_file_path.display()) - })?; - variant.set_data(data); - file.set_modded(true); - bundle - } else { - let mut bundle_file = compile_file(in_file_path, target_name.clone(), file_type) - .await - .wrap_err("Failed to compile")?; - - bundle_file.set_modded(true); - - if patch_number.is_some() { - let mut output_bundle = Bundle::new(bundle_name); - output_bundle.add_file(bundle_file); - output_bundle - } else { - *file = bundle_file; - - dbg!(&file); - bundle - } - } + return Err(err); } - "add" => { - unimplemented!("Implement adding a new file to the bundle."); - } - _ => unreachable!("no other operations exist"), - }; - let data = output_bundle - .to_binary() - .wrap_err("Failed to write changed bundle to output")?; + let out_path = matches.get_one::("output").unwrap_or(bundle_path); + let data = bundle + .to_binary(&ctx) + .wrap_err("failed to write changed bundle to output")?; - fs::write(&output_path, &data) - .await - .wrap_err_with(|| format!("Failed to write data to '{}'", output_path.display()))?; + fs::write(out_path, &data) + .await + .wrap_err("failed to write data to output file")?; - tracing::info!("Modified bundle written to '{}'", output_path.display()); - - Ok(()) + Ok(()) + } else { + eyre::bail!("Currently, only the '--replace' operation is supported."); + } } diff --git a/crates/dtmt/src/cmd/bundle/list.rs b/crates/dtmt/src/cmd/bundle/list.rs index 558126b..ec869ba 100644 --- a/crates/dtmt/src/cmd/bundle/list.rs +++ b/crates/dtmt/src/cmd/bundle/list.rs @@ -36,18 +36,6 @@ enum OutputFormat { Text, } -fn format_byte_size(size: usize) -> String { - if size < 1024 { - format!("{} Bytes", size) - } else if size < 1024 * 1024 { - format!("{} kB", size / 1024) - } else if size < 1024 * 1024 * 1024 { - format!("{} MB", size / (1024 * 1024)) - } else { - format!("{} GB", size / (1024 * 1024 * 1024)) - } -} - #[tracing::instrument(skip(ctx))] async fn print_bundle_contents

(ctx: &sdk::Context, path: P, fmt: OutputFormat) -> Result<()> where @@ -62,27 +50,22 @@ where match fmt { OutputFormat::Text => { - println!( - "Bundle: {} ({:016x})", - bundle.name().display(), - bundle.name() - ); + println!("Bundle: {}", bundle.name()); for f in bundle.files().iter() { if f.variants().len() != 1 { let err = eyre::eyre!("Expected exactly one version for this file.") .with_section(|| f.variants().len().to_string().header("Bundle:")) - .with_section(|| bundle.name().display().header("Bundle:")); + .with_section(|| bundle.name().clone().header("Bundle:")); tracing::error!("{:#}", err); } let v = &f.variants()[0]; println!( - "\t{}.{}: {} ({})", - f.base_name().display(), + "\t{}.{}: {} bytes", + f.base_name(), f.file_type().ext_name(), - format_byte_size(v.size()), v.size() ); } @@ -115,7 +98,7 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> { async move { if let Err(err) = print_bundle_contents(&ctx, &p, fmt) .await - .wrap_err_with(|| format!("Failed to list contents of bundle {}", p.display())) + .wrap_err_with(|| format!("failed to list contents of bundle {}", p.display())) { tracing::error!("{err:?}"); } diff --git a/crates/dtmt/src/cmd/bundle/mod.rs b/crates/dtmt/src/cmd/bundle/mod.rs index c5145e4..17f7a8c 100644 --- a/crates/dtmt/src/cmd/bundle/mod.rs +++ b/crates/dtmt/src/cmd/bundle/mod.rs @@ -1,7 +1,7 @@ -use clap::{ArgMatches, Command}; +use clap::{Arg, ArgMatches, Command}; use color_eyre::eyre::Result; +use sdk::Oodle; -mod db; mod decompress; mod extract; mod inject; @@ -11,21 +11,35 @@ pub(crate) fn command_definition() -> Command { Command::new("bundle") .subcommand_required(true) .about("Manipulate the game's bundle files") + .arg( + Arg::new("oodle") + .long("oodle") + .default_value(super::OODLE_LIB_NAME) + .help( + "The oodle library to load. This may either be:\n\ + - A library name that will be searched for in the system's default paths.\n\ + - A file path relative to the current working directory.\n\ + - An absolute file path.", + ), + ) .subcommand(decompress::command_definition()) .subcommand(extract::command_definition()) .subcommand(inject::command_definition()) .subcommand(list::command_definition()) - .subcommand(db::command_definition()) } #[tracing::instrument(skip_all)] -pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> { +pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<()> { + if let Some(name) = matches.get_one::("oodle") { + let oodle = Oodle::new(name)?; + ctx.oodle = Some(oodle); + } + match matches.subcommand() { Some(("decompress", sub_matches)) => decompress::run(ctx, sub_matches).await, Some(("extract", sub_matches)) => extract::run(ctx, sub_matches).await, Some(("inject", sub_matches)) => inject::run(ctx, sub_matches).await, Some(("list", sub_matches)) => list::run(ctx, sub_matches).await, - Some(("db", sub_matches)) => db::run(ctx, sub_matches).await, _ => unreachable!( "clap is configured to require a subcommand, and they're all handled above" ), diff --git a/crates/dtmt/src/cmd/dictionary.rs b/crates/dtmt/src/cmd/dictionary.rs index 4c54c34..22a225b 100644 --- a/crates/dtmt/src/cmd/dictionary.rs +++ b/crates/dtmt/src/cmd/dictionary.rs @@ -1,10 +1,8 @@ use std::path::PathBuf; use clap::{value_parser, Arg, ArgAction, ArgMatches, Command, ValueEnum}; -use cli_table::{print_stdout, WithTitle}; use color_eyre::eyre::{Context, Result}; use color_eyre::{Help, SectionExt}; -use sdk::murmur::{IdString64, Murmur32, Murmur64}; use tokio::fs::File; use tokio::io::{AsyncBufReadExt, BufReader}; use tokio_stream::wrappers::LinesStream; @@ -29,40 +27,6 @@ impl From for sdk::murmur::HashGroup { } } -impl std::fmt::Display for HashGroup { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - HashGroup::Filename => write!(f, "filename"), - HashGroup::Filetype => write!(f, "filetype"), - HashGroup::Strings => write!(f, "strings"), - HashGroup::Other => write!(f, "other"), - } - } -} - -#[derive(cli_table::Table)] -struct TableRow { - #[table(title = "Value")] - value: String, - #[table(title = "Murmur64")] - long: Murmur64, - #[table(title = "Murmur32")] - short: Murmur32, - #[table(title = "Group")] - group: sdk::murmur::HashGroup, -} - -impl From<&sdk::murmur::Entry> for TableRow { - fn from(entry: &sdk::murmur::Entry) -> Self { - Self { - value: entry.value().clone(), - long: entry.long(), - short: entry.short(), - group: entry.group(), - } - } -} - pub(crate) fn command_definition() -> Command { Command::new("dictionary") .about("Manipulate a hash dictionary file.") @@ -79,8 +43,7 @@ pub(crate) fn command_definition() -> Command { .short('g') .long("group") .action(ArgAction::Append) - .value_parser(value_parser!(HashGroup)) - .default_values(["other", "filename", "filetype", "strings"]), + .value_parser(value_parser!(HashGroup)), ), ) .subcommand( @@ -104,7 +67,6 @@ pub(crate) fn command_definition() -> Command { .value_parser(value_parser!(PathBuf)), ), ) - .subcommand(Command::new("show").about("Show the contents of the dictionary")) .subcommand(Command::new("save").about( "Save back the currently loaded dictionary, with hashes pre-computed. \ Pre-computing hashes speeds up loading large dictionaries, as they would \ @@ -116,23 +78,17 @@ pub(crate) fn command_definition() -> Command { pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<()> { match matches.subcommand() { Some(("lookup", sub_matches)) => { - let hash = { - let s = sub_matches - .get_one::("hash") - .expect("required argument not found"); - - u64::from_str_radix(s, 16) - .wrap_err("Failed to parse argument as hexadecimal string")? - }; + let hash = sub_matches + .get_one::("hash") + .expect("required argument not found"); let groups = sub_matches .get_many::("group") .unwrap_or_default(); for group in groups { - if let IdString64::String(value) = ctx.lookup_hash(hash, (*group).into()) { - println!("{group}: {value}"); - } + let value = ctx.lookup_hash(*hash, (*group).into()); + println!("{value}"); } Ok(()) @@ -145,10 +101,7 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<( .get_one::("group") .expect("required argument not found"); - let r: BufReader> = if let Some(name) = - path.file_name() - && name == "-" - { + let r: BufReader> = if let Some(name) = path.file_name() && name == "-" { let f = tokio::io::stdin(); BufReader::new(Box::new(f)) } else { @@ -223,17 +176,6 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<( .await .wrap_err("Failed to write dictionary to disk") } - Some(("show", _)) => { - let lookup = &ctx.lookup; - let rows: Vec<_> = lookup.entries().iter().map(TableRow::from).collect(); - - match print_stdout(rows.with_title()) { - Ok(_) => Ok(()), - // Closing stdout prematurely is normal behavior with things like piping into `head` - Err(err) if err.kind() == std::io::ErrorKind::BrokenPipe => Ok(()), - Err(err) => Err(err.into()), - } - } _ => unreachable!( "clap is configured to require a subcommand, and they're all handled above" ), diff --git a/crates/dtmt/src/cmd/migrate.rs b/crates/dtmt/src/cmd/migrate.rs deleted file mode 100644 index d7bfa19..0000000 --- a/crates/dtmt/src/cmd/migrate.rs +++ /dev/null @@ -1,407 +0,0 @@ -use std::collections::HashMap; -use std::ffi::{CStr, CString}; -use std::path::{Path, PathBuf}; - -use clap::{value_parser, Arg, ArgMatches, Command}; -use color_eyre::eyre::{self, Context}; -use color_eyre::{Help, Report, Result}; -use dtmt_shared::{ModConfig, ModConfigResources, ModDependency}; -use futures::FutureExt; -use luajit2_sys as lua; -use tokio::fs; -use tokio_stream::wrappers::ReadDirStream; -use tokio_stream::StreamExt; - -pub(crate) fn command_definition() -> Command { - Command::new("migrate") - .about("Migrate a mod project from the loose file structure to DTMT.") - .arg( - Arg::new("mod-file") - .required(true) - .value_parser(value_parser!(PathBuf)) - .help("The path to the mod's '.mod' file."), - ) - .arg( - Arg::new("directory") - .required(true) - .value_parser(value_parser!(PathBuf)) - .help( - "The directory to create the mod in. Within this directory, \ - DTMT will create a new folder named after the mod ID and migrate files \ - into that folder.", - ), - ) -} - -#[derive(Clone, Debug)] -struct ModFile { - id: String, - init: PathBuf, - data: Option, - localization: Option, -} - -// This piece of Lua code stubs DMF functions and runs a mod's `.mod` file to extract -// the contained information. -static MOD_FILE_RUNNER: &str = r#" -_DATA = {} - -function fassert() end - -function new_mod(id, options) - _DATA.id = id - _DATA.init = options.mod_script - _DATA.data = options.mod_data - _DATA.localization = options.mod_localization -end - -dmf = { - dofile = function(self, file) - _DATA.init = file - end -} - -_MOD().run() -"#; - -#[tracing::instrument] -async fn evaluate_mod_file(path: impl AsRef + std::fmt::Debug) -> Result { - let path = path.as_ref(); - let code = fs::read(path) - .await - .wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?; - - tokio::task::spawn_blocking(move || unsafe { - let state = lua::luaL_newstate(); - lua::luaL_openlibs(state); - - let code = CString::new(code).expect("Cannot build CString"); - let name = CString::new("_MOD").expect("Cannot build CString"); - - match lua::luaL_loadstring(state, code.as_ptr()) as u32 { - lua::LUA_OK => {} - lua::LUA_ERRSYNTAX => { - let err = lua::lua_tostring(state, -1); - let err = CStr::from_ptr(err).to_string_lossy().to_string(); - - lua::lua_close(state); - - eyre::bail!("Invalid syntax: {}", err); - } - lua::LUA_ERRMEM => { - lua::lua_close(state); - eyre::bail!("Failed to allocate sufficient memory") - } - _ => unreachable!(), - } - - tracing::trace!("Loaded '.mod' code"); - - lua::lua_setglobal(state, name.as_ptr()); - - let code = CString::new(MOD_FILE_RUNNER).expect("Cannot build CString"); - match lua::luaL_loadstring(state, code.as_ptr()) as u32 { - lua::LUA_OK => {} - lua::LUA_ERRSYNTAX => { - let err = lua::lua_tostring(state, -1); - let err = CStr::from_ptr(err).to_string_lossy().to_string(); - - lua::lua_close(state); - - eyre::bail!("Invalid syntax: {}", err); - } - lua::LUA_ERRMEM => { - lua::lua_close(state); - eyre::bail!("Failed to allocate sufficient memory") - } - _ => unreachable!(), - } - - match lua::lua_pcall(state, 0, 1, 0) as u32 { - lua::LUA_OK => {} - lua::LUA_ERRRUN => { - let err = lua::lua_tostring(state, -1); - let err = CStr::from_ptr(err).to_string_lossy().to_string(); - - lua::lua_close(state); - - eyre::bail!("Failed to evaluate '.mod' file: {}", err); - } - lua::LUA_ERRMEM => { - lua::lua_close(state); - eyre::bail!("Failed to allocate sufficient memory") - } - // We don't use an error handler function, so this should be unreachable - lua::LUA_ERRERR => unreachable!(), - _ => unreachable!(), - } - - tracing::trace!("Loaded file runner code"); - - let name = CString::new("_DATA").expect("Cannot build CString"); - lua::lua_getglobal(state, name.as_ptr()); - - let id = { - let name = CString::new("id").expect("Cannot build CString"); - lua::lua_getfield(state, -1, name.as_ptr()); - let val = { - let ptr = lua::lua_tostring(state, -1); - let str = CStr::from_ptr(ptr); - str.to_str() - .expect("ID value is not a valid string") - .to_string() - }; - lua::lua_pop(state, 1); - val - }; - - let path_prefix = format!("{id}/"); - - let init = { - let name = CString::new("init").expect("Cannot build CString"); - lua::lua_getfield(state, -1, name.as_ptr()); - let val = { - let ptr = lua::lua_tostring(state, -1); - let str = CStr::from_ptr(ptr); - str.to_str().expect("ID value is not a valid string") - }; - lua::lua_pop(state, 1); - PathBuf::from(val.strip_prefix(&path_prefix).unwrap_or(val)) - }; - - let data = { - let name = CString::new("data").expect("Cannot build CString"); - lua::lua_getfield(state, -1, name.as_ptr()); - - if lua::lua_isnil(state, -1) > 0 { - None - } else { - let val = { - let ptr = lua::lua_tostring(state, -1); - let str = CStr::from_ptr(ptr); - str.to_str().expect("ID value is not a valid string") - }; - lua::lua_pop(state, 1); - Some(PathBuf::from(val.strip_prefix(&path_prefix).unwrap_or(val))) - } - }; - - let localization = { - let name = CString::new("localization").expect("Cannot build CString"); - lua::lua_getfield(state, -1, name.as_ptr()); - - if lua::lua_isnil(state, -1) > 0 { - None - } else { - let val = { - let ptr = lua::lua_tostring(state, -1); - let str = CStr::from_ptr(ptr); - str.to_str().expect("ID value is not a valid string") - }; - lua::lua_pop(state, 1); - Some(PathBuf::from(val.strip_prefix(&path_prefix).unwrap_or(val))) - } - }; - - lua::lua_close(state); - - let mod_file = ModFile { - id, - init, - data, - localization, - }; - - tracing::trace!(?mod_file); - - Ok(mod_file) - }) - .await - .map_err(Report::new) - .flatten() - .wrap_err("Failed to run mod file handler") -} - -#[async_recursion::async_recursion] -#[tracing::instrument] -async fn process_directory(path: P1, prefix: P2) -> Result<()> -where - P1: AsRef + std::fmt::Debug + std::marker::Send, - P2: AsRef + std::fmt::Debug + std::marker::Send, -{ - let path = path.as_ref(); - let prefix = prefix.as_ref(); - - let read_dir = fs::read_dir(&path) - .await - .wrap_err_with(|| format!("Failed to read directory '{}'", path.display()))?; - - let stream = ReadDirStream::new(read_dir).map(|res| res.wrap_err("Failed to read dir entry")); - tokio::pin!(stream); - - while let Some(res) = stream.next().await { - let entry = res?; - let in_path = entry.path(); - let out_path = prefix.join(entry.file_name()); - - let t = entry.file_type().await?; - - if t.is_dir() { - process_directory(in_path, out_path).await?; - } else { - tracing::trace!( - "Copying file '{}' -> '{}'", - in_path.display(), - out_path.display() - ); - let res = fs::create_dir_all(prefix) - .then(|_| fs::copy(&in_path, &out_path)) - .await - .wrap_err_with(|| { - format!( - "Failed to copy '{}' -> '{}'", - in_path.display(), - out_path.display() - ) - }); - if let Err(err) = res { - tracing::error!("{:?}", err); - } - } - } - - Ok(()) -} - -#[tracing::instrument(skip_all)] -pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> { - let (mod_file, in_dir) = { - let path = matches - .get_one::("mod-file") - .expect("Parameter is required"); - - let mod_file = evaluate_mod_file(&path) - .await - .wrap_err("Failed to evaluate '.mod' file")?; - - ( - mod_file, - path.parent().expect("A file path always has a parent"), - ) - }; - - let out_dir = matches - .get_one::("directory") - .expect("Parameter is required"); - - { - let is_dir = fs::metadata(out_dir) - .await - .map(|meta| meta.is_dir()) - .unwrap_or(false); - - if !is_dir { - let err = eyre::eyre!("Invalid output directory '{}'", out_dir.display()); - return Err(err) - .with_suggestion(|| "Make sure the directory exists and is writable.".to_string()); - } - } - - let out_dir = out_dir.join(&mod_file.id); - - fs::create_dir(&out_dir) - .await - .wrap_err_with(|| format!("Failed to create mod directory '{}'", out_dir.display()))?; - - tracing::info!("Created mod directory '{}'", out_dir.display()); - - println!( - "Enter additional information about your mod '{}'!", - &mod_file.id - ); - - let name = promptly::prompt_default("Display name", mod_file.id.clone()) - .map(|s: String| s.trim().to_string())?; - let summary = promptly::prompt("Short summary").map(|s: String| s.trim().to_string())?; - let author = - promptly::prompt_opt("Author").map(|opt| opt.map(|s: String| s.trim().to_string()))?; - let version = promptly::prompt_default("Version", String::from("0.1.0")) - .map(|s: String| s.trim().to_string())?; - let categories = promptly::prompt("Categories (comma separated list)") - .map(|s: String| s.trim().to_string()) - .map(|s: String| s.split(',').map(|s| s.trim().to_string()).collect())?; - - let packages = vec![PathBuf::from("packages/mods").join(&mod_file.id)]; - - let dtmt_cfg = ModConfig { - dir: out_dir, - id: mod_file.id, - name, - summary, - author, - version, - description: None, - image: None, - categories, - packages, - resources: ModConfigResources { - init: mod_file.init, - data: mod_file.data, - localization: mod_file.localization, - }, - depends: vec![ModDependency::ID(String::from("DMF"))], - bundled: true, - name_overrides: HashMap::new(), - }; - - tracing::debug!(?dtmt_cfg); - - { - let path = dtmt_cfg.dir.join("dtmt.cfg"); - let data = serde_sjson::to_string(&dtmt_cfg).wrap_err("Failed to serialize dtmt.cfg")?; - fs::write(&path, &data) - .await - .wrap_err_with(|| format!("Failed to write '{}'", path.display()))?; - - tracing::info!("Created mod configuration at '{}'", path.display()); - } - - { - let path = dtmt_cfg - .dir - .join(&dtmt_cfg.packages[0]) - .with_extension("package"); - - let data = { - let mut map = HashMap::new(); - map.insert("lua", vec![format!("scripts/mods/{}/*", dtmt_cfg.id)]); - map - }; - let data = serde_sjson::to_string(&data).wrap_err("Failed to serialize package file")?; - - fs::create_dir_all(path.parent().unwrap()) - .then(|_| fs::write(&path, &data)) - .await - .wrap_err_with(|| format!("Failed to write '{}'", path.display()))?; - - tracing::info!("Created package file at '{}'", path.display()); - } - - { - let path = in_dir.join("scripts"); - let scripts_dir = dtmt_cfg.dir.join("scripts"); - process_directory(&path, &scripts_dir) - .await - .wrap_err_with(|| { - format!( - "Failed to copy files from '{}' to '{}'", - path.display(), - scripts_dir.display() - ) - })?; - - tracing::info!("Copied script files to '{}'", scripts_dir.display()); - } - - Ok(()) -} diff --git a/crates/dtmt/src/cmd/new.rs b/crates/dtmt/src/cmd/new.rs index 571b0cb..a7a66ca 100644 --- a/crates/dtmt/src/cmd/new.rs +++ b/crates/dtmt/src/cmd/new.rs @@ -1,102 +1,74 @@ +use std::collections::HashMap; use std::path::PathBuf; use clap::{Arg, ArgMatches, Command}; use color_eyre::eyre::{self, Context, Result}; use color_eyre::Help; use futures::{StreamExt, TryStreamExt}; -use minijinja::Environment; +use string_template::Template; use tokio::fs::{self, DirBuilder}; -const TEMPLATES: [(&str, &str); 5] = [ +const TEMPLATES: [(&str, &str); 6] = [ ( "dtmt.cfg", - r#"// -// This is your mod's main configuration file. It tells DTMT how to build the mod, -// and DTMM what to display to your users. -// Certain files have been pre-filled by the template, the ones commented out (`//`) -// are optional. -// -// A unique identifier (preferably lower case, alphanumeric) -id = "{{id}}" -// The display name that your users will see. -// This doesn't have to be unique, but you still want to avoid being confused with other -// mods. -name = "{{name}}" -// It's good practice to increase this number whenever you publish changes. -// It's up to you if you use SemVer or something simpler like `1970-12-24`. It should sort and -// compare well, though. + r#"name = "{{name}}" +description = "An elaborate description of my cool game mod!" version = "0.1.0" -// author = "" -// A one- or two-line short description. -summary = "This is my new mod '{{name}}'!" -// description = "" -// image = "assets/logo.png" - -// Can contain arbitrary strings. But to keep things consistent and useful, -// capitalize names and check existing mods for matching categories. -categories = [ - Misc - // UI - // QoL - // Tools -] - -// A list of mod IDs that this mod depends on. You can find -// those IDs by downloading the mod and extracting their `dtmt.cfg`. -// To make your fellow modders' lives easier, publish your own mods' IDs -// somewhere visible, such as the Nexusmods page. -depends = [ - DMF -] - -// The primary resources that serve as the entry point to your -// mod's code. Unless for very specific use cases, the generated -// values shouldn't be changed. -resources = { - init = "scripts/mods/{{id}}/init" - data = "scripts/mods/{{id}}/data" - localization = "scripts/mods/{{id}}/localization" -} - -// The list of packages, or bundles, to build. -// Each one corresponds to a package definition in the named folder. -// For mods that contain only code and/or a few small assets, a single -// package will suffice. packages = [ - "packages/mods/{{id}}" + "packages/{{name}}" +] + +depends = [ + "dmf" ] "#, ), ( - "packages/mods/{{id}}.package", + "{{name}}.mod", + r#"return { + run = function() + fassert(rawget(_G, "new_mod"), "`{{title}}` encountered an error loading the Darktide Mod Framework.") + + new_mod("{{name}}", { + mod_script = "scripts/mods/{{name}}/{{name}}", + mod_data = "scripts/mods/{{name}}/{{name}}_data", + mod_localization = "scripts/mods/{{name}}/{{name}}_localization", + }) + end, + packages = {}, +}"#, + ), + ( + "packages/{{name}}.package", r#"lua = [ - "scripts/mods/{{id}}/*" + "scripts/mods/{{name}}/*" ] "#, ), ( - "scripts/mods/{{id}}/init.lua", - r#"local mod = get_mod("{{id}}") + "scripts/mods/{{name}}/{{name}}.lua", + r#"local mod = get_mod("{{name}}") -- Your mod code goes here. +-- https://vmf-docs.verminti.de "#, ), ( - "scripts/mods/{{id}}/data.lua", - r#"local mod = get_mod("{{id}}") + "scripts/mods/{{name}}/{{name}}_data.lua", + r#"local mod = get_mod("{{name}}") return { - name = "{{name}}", + name = "{{title}}", description = mod:localize("mod_description"), is_togglable = true, }"#, ), ( - "scripts/mods/{{id}}/localization.lua", + "scripts/mods/{{name}}/{{name}}_localization.lua", r#"return { mod_description = { - en = "This is my new mod '{{name}}'!", + en = "An elaborate description of my cool game mod!", }, }"#, ), @@ -106,8 +78,8 @@ pub(crate) fn command_definition() -> Command { Command::new("new") .about("Create a new project") .arg( - Arg::new("name") - .long("name") + Arg::new("title") + .long("title") .help("The display name of the new mod."), ) .arg(Arg::new("root").help( @@ -122,7 +94,7 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> let root = if let Some(dir) = matches.get_one::("root") { if dir == "." { std::env::current_dir() - .wrap_err("The current working dir is invalid") + .wrap_err("the current working dir is invalid") .with_suggestion(|| "Change to a different directory.")? } else { PathBuf::from(dir) @@ -135,14 +107,14 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> } }; - let name = if let Some(name) = matches.get_one::("name") { - name.clone() + let title = if let Some(title) = matches.get_one::("title") { + title.clone() } else { - promptly::prompt("The display name")? + promptly::prompt("The mod display name")? }; - let id = { - let default = name + let name = { + let default = title .chars() .map(|c| { if c.is_ascii_alphanumeric() { @@ -152,50 +124,40 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> } }) .collect::(); - promptly::prompt_default("The unique mod ID", default)? + promptly::prompt_default("The mod identifier name", default)? }; - tracing::debug!(root = %root.display(), name, id); + tracing::debug!(root = %root.display()); + tracing::debug!(title, name); - let render_ctx = minijinja::context!(name => name.as_str(), id => id.as_str()); - let env = Environment::new(); + let mut data = HashMap::new(); + data.insert("name", name.as_str()); + data.insert("title", title.as_str()); let templates = TEMPLATES .iter() .map(|(path_tmpl, content_tmpl)| { - env.render_str(path_tmpl, &render_ctx) - .wrap_err_with(|| format!("Failed to render template: {}", path_tmpl)) - .and_then(|path| { - env.render_named_str(&path, content_tmpl, &render_ctx) - .wrap_err_with(|| format!("Failed to render template '{}'", &path)) - .map(|content| (root.join(path), content)) - }) + let path = Template::new(path_tmpl).render(&data); + let content = Template::new(content_tmpl).render(&data); + + (root.join(path), content) }) - .map(|res| async move { - match res { - Ok((path, content)) => { - let dir = path - .parent() - .ok_or_else(|| eyre::eyre!("invalid root path"))?; + .map(|(path, content)| async move { + let dir = path + .parent() + .ok_or_else(|| eyre::eyre!("invalid root path"))?; - DirBuilder::new() - .recursive(true) - .create(&dir) - .await - .wrap_err_with(|| { - format!("Failed to create directory {}", dir.display()) - })?; + DirBuilder::new() + .recursive(true) + .create(&dir) + .await + .wrap_err_with(|| format!("failed to create directory {}", dir.display()))?; - tracing::trace!("Writing file {}", path.display()); + tracing::trace!("Writing file {}", path.display()); - fs::write(&path, content.as_bytes()) - .await - .wrap_err_with(|| { - format!("Failed to write content to path {}", path.display()) - }) - } - Err(e) => Err(e), - } + fs::write(&path, content.as_bytes()) + .await + .wrap_err_with(|| format!("failed to write content to path {}", path.display())) }); futures::stream::iter(templates) @@ -206,7 +168,7 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> tracing::info!( "Created {} files for mod '{}' in '{}'.", TEMPLATES.len(), - name, + title, root.display() ); diff --git a/crates/dtmt/src/cmd/package.rs b/crates/dtmt/src/cmd/package.rs deleted file mode 100644 index 5ded885..0000000 --- a/crates/dtmt/src/cmd/package.rs +++ /dev/null @@ -1,147 +0,0 @@ -use std::io::{Cursor, Write}; -use std::path::{Path, PathBuf}; - -use clap::{value_parser, Arg, ArgMatches, Command}; -use color_eyre::eyre::{Context, Result}; -use color_eyre::Help; -use dtmt_shared::ModConfig; -use path_slash::{PathBufExt, PathExt}; -use tokio::fs; -use tokio_stream::wrappers::ReadDirStream; -use tokio_stream::StreamExt; -use zip::write::SimpleFileOptions; -use zip::ZipWriter; - -use crate::cmd::build::read_project_config; - -pub(crate) fn command_definition() -> Command { - Command::new("package") - .about("Package compiled bundles for distribution") - .arg( - Arg::new("project") - .required(false) - .value_parser(value_parser!(PathBuf)) - .help( - "The path to the project to build. \ - If omitted, dtmt will search from the current working directory upward.", - ), - ) - .arg( - Arg::new("directory") - .long("directory") - .short('d') - .default_value("out") - .value_parser(value_parser!(PathBuf)) - .help( - "The path to the directory were the compiled bundles were written to. \ - This is the same directory as `dtmt build -o`", - ), - ) - .arg( - Arg::new("out") - .long("out") - .short('o') - .value_parser(value_parser!(PathBuf)) - .help( - "The path to write the packaged file to. Will default to a file in the \ - current working directory", - ), - ) -} - -#[async_recursion::async_recursion] -async fn process_directory(zip: &mut ZipWriter, path: P1, prefix: P2) -> Result<()> -where - P1: AsRef + std::marker::Send, - P2: AsRef + std::marker::Send, - W: std::io::Write + std::io::Seek + std::marker::Send, -{ - let path = path.as_ref(); - let prefix = prefix.as_ref(); - - zip.add_directory(prefix.to_slash_lossy(), SimpleFileOptions::default())?; - - let read_dir = fs::read_dir(&path) - .await - .wrap_err_with(|| format!("Failed to read directory '{}'", path.display()))?; - - let stream = ReadDirStream::new(read_dir).map(|res| res.wrap_err("Failed to read dir entry")); - tokio::pin!(stream); - - while let Some(res) = stream.next().await { - let entry = res?; - let in_path = entry.path(); - let out_path = prefix.join(entry.file_name()); - - let t = entry.file_type().await?; - - if t.is_file() || t.is_symlink() { - let data = fs::read(&in_path) - .await - .wrap_err_with(|| format!("Failed to read '{}'", in_path.display()))?; - { - zip.start_file(out_path.to_slash_lossy(), SimpleFileOptions::default())?; - zip.write_all(&data)?; - } - } else if t.is_dir() { - process_directory(zip, in_path, out_path).await?; - } - } - - Ok(()) -} - -pub(crate) async fn package(cfg: &ModConfig, path: P1, dest: P2) -> Result<()> -where - P1: AsRef, - P2: AsRef, -{ - let path = path.as_ref(); - let dest = dest.as_ref(); - - let mut zip = ZipWriter::new(Cursor::new(Vec::with_capacity(1024))); - - process_directory(&mut zip, path, PathBuf::from(&cfg.id)) - .await - .wrap_err("Failed to add directory to archive")?; - - { - let name = PathBuf::from(&cfg.id).join("dtmt.cfg"); - let path = cfg.dir.join("dtmt.cfg"); - - let data = fs::read(&path) - .await - .wrap_err_with(|| format!("Failed to read mod config at {}", path.display()))?; - - zip.start_file(name.to_slash_lossy(), SimpleFileOptions::default())?; - zip.write_all(&data)?; - } - - let data = zip.finish()?; - - fs::write(dest, data.into_inner()) - .await - .wrap_err_with(|| format!("Failed to write mod archive to '{}'", dest.display())) - .with_suggestion(|| "Make sure that parent directories exist.".to_string())?; - - tracing::info!("Mod archive written to {}", dest.display()); - Ok(()) -} - -#[tracing::instrument(skip_all)] -pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> { - let cfg = read_project_config(matches.get_one::("project").cloned()).await?; - - let dest = matches - .get_one::("out") - .map(path_clean::clean) - .unwrap_or_else(|| PathBuf::from(format!("{}.zip", cfg.id))); - - let path = cfg.dir.join( - matches - .get_one::("directory") - .expect("parameter has default value"), - ); - - package(&cfg, path, dest).await -} diff --git a/crates/dtmt/src/cmd/util.rs b/crates/dtmt/src/cmd/util.rs index c233425..c783ed1 100644 --- a/crates/dtmt/src/cmd/util.rs +++ b/crates/dtmt/src/cmd/util.rs @@ -8,7 +8,7 @@ use tokio::fs; use tokio_stream::wrappers::ReadDirStream; #[tracing::instrument] -pub async fn process_path

(path: P) -> Vec +pub async fn foo

(path: P) -> Vec where P: AsRef + std::fmt::Debug, { @@ -98,10 +98,7 @@ where I: Iterator + std::fmt::Debug, { let tasks = paths.map(|p| async move { - // Clippy doesn't understand that the block here is required to `move` in the reference. - // The task is spawned to make sure tokio can distribute these over threads. - #[allow(clippy::redundant_async_block)] - match tokio::spawn(async move { process_path(&p).await }).await { + match tokio::spawn(async move { foo(&p).await }).await { Ok(paths) => paths, Err(err) => { tracing::error!(%err, "failed to spawn task to resolve bundle paths"); @@ -114,9 +111,6 @@ where results.into_iter().flatten().collect() } -// `tracing::instrument` generates code that triggers this warning. -// Not much we can do to prevent that. -#[allow(clippy::let_with_type_underscore)] #[tracing::instrument(skip_all)] pub fn resolve_bundle_paths(paths: I) -> impl Stream where @@ -135,12 +129,12 @@ mod tests { use tempfile::tempdir; use tokio::process::Command; - use super::process_path; + use super::foo; #[tokio::test] async fn resolve_single_file() { let path = PathBuf::from("foo"); - let paths = process_path(&path).await; + let paths = foo(&path).await; assert_eq!(paths.len(), 1); assert_eq!(paths[0], path); } @@ -148,7 +142,7 @@ mod tests { #[tokio::test] async fn resolve_empty_directory() { let dir = tempdir().expect("failed to create temporary directory"); - let paths = process_path(dir).await; + let paths = foo(dir).await; assert!(paths.is_empty()); } @@ -176,7 +170,7 @@ mod tests { .await .expect("failed to create temporary files"); - let paths = process_path(dir).await; + let paths = foo(dir).await; assert_eq!(bundle_names.len(), paths.len()); diff --git a/crates/dtmt/src/cmd/watch.rs b/crates/dtmt/src/cmd/watch.rs index 2abd0f7..508cef9 100644 --- a/crates/dtmt/src/cmd/watch.rs +++ b/crates/dtmt/src/cmd/watch.rs @@ -1,231 +1,24 @@ -use std::path::{Path, PathBuf}; -use std::sync::Arc; -use std::time::Duration; +use std::path::PathBuf; -use clap::{value_parser, Arg, ArgAction, ArgMatches, Command}; -use color_eyre::eyre::{Context, Result}; -use dtmt_shared::ModConfig; -use notify::{Event, Watcher}; +use clap::{value_parser, Arg, ArgMatches, Command}; +use color_eyre::eyre::Result; -use crate::cmd::build::{build, read_project_config}; - -use super::package::package; - -pub(crate) fn command_definition() -> Command { +pub(crate) fn _command_definition() -> Command { Command::new("watch") - .about("Watch for file system changes and re-build the mod archive.") - .arg( - Arg::new("debounce") - .long("debounce") - .short('b') - .default_value("150") - .value_parser(value_parser!(u64)) - .help( - "The delay to debounce events by. This avoids continously \ - rebuilding on rapid file changes, such as version control checkouts.", - ), - ) + .about("Re-build the given directory on file changes.") .arg( Arg::new("directory") .required(false) + .default_value(".") .value_parser(value_parser!(PathBuf)) .help( "The path to the project to build. \ - If omitted, the current working directory is used.", + If omitted, the current working directory is used.", ), ) - .arg( - Arg::new("out") - .long("out") - .short('o') - .default_value("out") - .value_parser(value_parser!(PathBuf)) - .help("The directory to write output files to."), - ) - .arg( - Arg::new("deploy") - .long("deploy") - .short('d') - .value_parser(value_parser!(PathBuf)) - .help( - "If the path to the game (without the trailing '/bundle') is specified, \ - deploy the newly built bundles. \ - This will not adjust the bundle database or package files, so if files are \ - added or removed, you will have to import into DTMM and re-deploy there.", - ), - ) - .arg( - Arg::new("archive") - .long("archive") - .short('a') - .value_parser(value_parser!(PathBuf)) - .help( - "The path to write the packaged file to. Will default to a file in the \ - current working directory", - ), - ) - .arg( - Arg::new("ignore") - .long("ignore") - .short('i') - .value_parser(value_parser!(PathBuf)) - .action(ArgAction::Append) - .help( - "A directory or file path to ignore. May be specified multiple times. \ - The values of 'out' and 'archive' are ignored automatically.", - ), - ) -} - -#[tracing::instrument] -async fn compile( - cfg: &ModConfig, - out_path: impl AsRef + std::fmt::Debug, - archive_path: impl AsRef + std::fmt::Debug, - game_dir: Arc + std::fmt::Debug>>, -) -> Result<()> { - let out_path = out_path.as_ref(); - build(cfg, out_path, game_dir) - .await - .wrap_err("Failed to build bundles")?; - package(cfg, out_path, archive_path) - .await - .wrap_err("Failed to package bundles") } #[tracing::instrument(skip_all)] -pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> { - let cfg = read_project_config(matches.get_one::("directory").cloned()) - .await - .wrap_err("failed to load project config")?; - tracing::debug!(?cfg); - let cfg = Arc::new(cfg); - - let game_dir = matches - .get_one::("deploy") - .map(path_clean::clean) - .map(|p| if p.is_absolute() { p } else { cfg.dir.join(p) }) - .map(|p| p.join("bundle")); - - let out_path = matches - .get_one::("out") - .map(path_clean::clean) - .map(|p| if p.is_absolute() { p } else { cfg.dir.join(p) }) - .expect("parameter should have default value"); - - let archive_path = matches - .get_one::("archive") - .map(path_clean::clean) - .map(|p| if p.is_absolute() { p } else { cfg.dir.join(p) }) - .unwrap_or_else(|| cfg.dir.join(format!("{}.zip", cfg.id))); - - let ignored = { - let mut ignored: Vec<_> = matches - .get_many::("ignore") - .unwrap_or_default() - .map(path_clean::clean) - .map(|p| if p.is_absolute() { p } else { cfg.dir.join(p) }) - .collect(); - - ignored.push(out_path.clone()); - ignored.push(archive_path.clone()); - - ignored - }; - - if tracing::enabled!(tracing::Level::INFO) { - let list = ignored.iter().fold(String::new(), |mut s, p| { - s.push_str("\n - "); - s.push_str(&p.display().to_string()); - s - }); - - tracing::info!("Ignoring:{}", list); - } - - let game_dir = Arc::new(game_dir); - - let duration = - Duration::from_millis(matches.get_one::("debounce").copied().unwrap_or(150)); - let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel(); - - let mut watcher = notify::recommended_watcher(move |res: Result| { - let ignored = match &res { - Ok(evt) => evt.paths.iter().any(|p1| { - let p1 = path_clean::clean(p1); - ignored.iter().any(|p2| p1.starts_with(p2)) - }), - Err(_) => false, - }; - - tracing::trace!(?res, ignored, "Received file system event"); - - if !ignored { - if let Err(err) = tx.send(res) { - tracing::error!("Failed to send file system event: {:?}", err); - } - } - }) - .wrap_err("failed to create file system watcher")?; - - tracing::info!("Starting file watcher on '{}'", cfg.dir.display()); - - let path = cfg.dir.clone(); - watcher - .watch(&path, notify::RecursiveMode::Recursive) - .wrap_err_with(|| { - format!( - "failed to watch directory for file changes: {}", - path.display() - ) - })?; - - tracing::trace!("Starting debounce loop"); - - let mut dirty = false; - loop { - // While we could just always await on the timeout, splitting things like this - // optimizes the case when no events happen for a while. Rather than being woken every - // `duration` just to do nothing, this way we always wait for a new event first until - // we start the debounce timeouts. - if dirty { - match tokio::time::timeout(duration, rx.recv()).await { - // The error is the wanted case, as it signals that we haven't received an - // event within `duration`, which es what the debounce is supposed to wait for. - Err(_) => { - tracing::trace!("Received debounce timeout, running build"); - if let Err(err) = - compile(&cfg, &out_path, &archive_path, game_dir.clone()).await - { - tracing::error!("Failed to build mod archive: {:?}", err); - } - dirty = false; - } - Ok(None) => { - break; - } - // We received a value before the timeout, so we reset it - Ok(_) => { - tracing::trace!("Received value before timeout, resetting"); - } - } - } else { - match rx.recv().await { - Some(_) => { - tracing::trace!("Received event, starting debounce"); - dirty = true; - } - None => { - break; - } - } - } - } - - tracing::trace!("Event channel closed"); - if let Err(err) = compile(&cfg, &out_path, &archive_path, game_dir.clone()).await { - tracing::error!("Failed to build mod archive: {:?}", err); - } - - Ok(()) +pub(crate) async fn run(_ctx: sdk::Context, _matches: &ArgMatches) -> Result<()> { + unimplemented!() } diff --git a/crates/dtmt/src/main.rs b/crates/dtmt/src/main.rs index e41e802..22399dc 100644 --- a/crates/dtmt/src/main.rs +++ b/crates/dtmt/src/main.rs @@ -1,8 +1,5 @@ #![feature(io_error_more)] #![feature(let_chains)] -#![feature(result_flattening)] -#![feature(test)] -#![windows_subsystem = "console"] use std::path::PathBuf; use std::sync::Arc; @@ -16,19 +13,29 @@ use serde::{Deserialize, Serialize}; use tokio::fs::File; use tokio::io::BufReader; use tokio::sync::RwLock; +use tracing_error::ErrorLayer; +use tracing_subscriber::prelude::*; +use tracing_subscriber::EnvFilter; mod cmd { + #[cfg(target_os = "windows")] + const OODLE_LIB_NAME: &str = "oo2core_8_win64"; + + #[cfg(target_os = "linux")] + const OODLE_LIB_NAME: &str = "liboo2corelinux64.so"; + pub mod build; pub mod bundle; pub mod dictionary; - pub mod migrate; pub mod murmur; pub mod new; - pub mod package; mod util; pub mod watch; } -mod shell_parse; + +mod mods { + pub mod archive; +} #[derive(Default, Deserialize, Serialize)] struct GlobalConfig { @@ -36,21 +43,10 @@ struct GlobalConfig { } #[tokio::main] -#[tracing::instrument(level = "error", fields(cmd_line = tracing::field::Empty))] +#[tracing::instrument] async fn main() -> Result<()> { color_eyre::install()?; - { - let span = tracing::Span::current(); - if !span.is_disabled() { - let cmdline: String = std::env::args_os().fold(String::new(), |mut s, arg| { - s.push_str(&arg.to_string_lossy()); - s - }); - span.record("cmd_line", cmdline); - } - } - let matches = command!() .subcommand_required(true) .arg( @@ -67,14 +63,24 @@ async fn main() -> Result<()> { .subcommand(cmd::build::command_definition()) .subcommand(cmd::bundle::command_definition()) .subcommand(cmd::dictionary::command_definition()) - .subcommand(cmd::migrate::command_definition()) .subcommand(cmd::murmur::command_definition()) .subcommand(cmd::new::command_definition()) - .subcommand(cmd::package::command_definition()) - .subcommand(cmd::watch::command_definition()) + // .subcommand(cmd::watch::command_definition()) .get_matches(); - dtmt_shared::create_tracing_subscriber(); + { + let fmt_layer = tracing_subscriber::fmt::layer().pretty(); + let filter_layer = + EnvFilter::try_from_default_env().or_else(|_| EnvFilter::try_new("info"))?; + + tracing_subscriber::registry() + .with(filter_layer) + .with(fmt_layer) + .with(ErrorLayer::new( + tracing_subscriber::fmt::format::Pretty::default(), + )) + .init(); + } // TODO: Move this into a `Context::init` method? let ctx = sdk::Context::new(); @@ -91,7 +97,7 @@ async fn main() -> Result<()> { tokio::spawn(async move { let res = File::open(&path) .await - .wrap_err_with(|| format!("Failed to open dictionary file: {}", path.display())); + .wrap_err_with(|| format!("failed to open dictionary file: {}", path.display())); let f = match res { Ok(f) => f, @@ -118,7 +124,7 @@ async fn main() -> Result<()> { tokio::spawn(async move { let conf = tokio::task::spawn_blocking(|| { confy::load::(clap::crate_name!(), None) - .wrap_err("Failed to load global configuration") + .wrap_err("failed to load global configuration") }) .await; @@ -141,14 +147,12 @@ async fn main() -> Result<()> { }; match matches.subcommand() { - Some(("build", sub_matches)) => cmd::build::run(ctx, sub_matches).await?, Some(("bundle", sub_matches)) => cmd::bundle::run(ctx, sub_matches).await?, - Some(("dictionary", sub_matches)) => cmd::dictionary::run(ctx, sub_matches).await?, - Some(("migrate", sub_matches)) => cmd::migrate::run(ctx, sub_matches).await?, Some(("murmur", sub_matches)) => cmd::murmur::run(ctx, sub_matches).await?, Some(("new", sub_matches)) => cmd::new::run(ctx, sub_matches).await?, - Some(("package", sub_matches)) => cmd::package::run(ctx, sub_matches).await?, + Some(("build", sub_matches)) => cmd::build::run(ctx, sub_matches).await?, Some(("watch", sub_matches)) => cmd::watch::run(ctx, sub_matches).await?, + Some(("dictionary", sub_matches)) => cmd::dictionary::run(ctx, sub_matches).await?, _ => unreachable!( "clap is configured to require a subcommand, and they're all handled above" ), diff --git a/crates/dtmt/src/mods/archive.rs b/crates/dtmt/src/mods/archive.rs new file mode 100644 index 0000000..5e0c268 --- /dev/null +++ b/crates/dtmt/src/mods/archive.rs @@ -0,0 +1,96 @@ +use std::collections::{HashMap, HashSet}; +use std::fs::File; +use std::io::Write; +use std::path::{Path, PathBuf}; + +use color_eyre::eyre::{self, Context}; +use color_eyre::Result; +use sdk::murmur::Murmur64; +use sdk::Bundle; +use zip::ZipWriter; + +pub struct Archive { + name: String, + bundles: Vec, + mod_file: Option>, +} + +impl Archive { + pub fn new(name: String) -> Self { + Self { + name, + bundles: Vec::new(), + mod_file: None, + } + } + + pub fn add_bundle(&mut self, bundle: Bundle) { + self.bundles.push(bundle) + } + + pub fn add_mod_file(&mut self, content: Vec) { + self.mod_file = Some(content); + } + + pub fn write

(&self, ctx: &sdk::Context, path: P) -> Result<()> + where + P: AsRef, + { + let mod_file = self + .mod_file + .as_ref() + .ok_or_else(|| eyre::eyre!("Mod file is missing from mod archive"))?; + + let f = File::create(path.as_ref()).wrap_err_with(|| { + format!( + "failed to open file for reading: {}", + path.as_ref().display() + ) + })?; + let mut zip = ZipWriter::new(f); + + zip.add_directory(&self.name, Default::default())?; + + let base_path = PathBuf::from(&self.name); + + { + let mut name = base_path.join(&self.name); + name.set_extension("mod"); + zip.start_file(name.to_string_lossy(), Default::default())?; + zip.write_all(mod_file)?; + } + + let mut file_map = HashMap::new(); + + for bundle in self.bundles.iter() { + let bundle_name = bundle.name().clone(); + + let map_entry: &mut HashSet<_> = file_map.entry(bundle_name).or_default(); + + for file in bundle.files() { + map_entry.insert(file.name(false, None)); + } + + let name = Murmur64::hash(bundle.name().as_bytes()); + let path = base_path.join(name.to_string().to_ascii_lowercase()); + + zip.start_file(path.to_string_lossy(), Default::default())?; + + let data = bundle.to_binary(ctx)?; + zip.write_all(&data)?; + } + + { + let data = serde_sjson::to_string(&file_map)?; + zip.start_file( + base_path.join("files.sjson").to_string_lossy(), + Default::default(), + )?; + zip.write_all(data.as_bytes())?; + } + + zip.finish()?; + + Ok(()) + } +} diff --git a/crates/dtmt/src/shell_parse.rs b/crates/dtmt/src/shell_parse.rs deleted file mode 100644 index 13b3c4d..0000000 --- a/crates/dtmt/src/shell_parse.rs +++ /dev/null @@ -1,183 +0,0 @@ -#[derive(Copy, Clone, PartialEq, Eq, Debug)] -enum ParserState { - Start, - Word, - SingleQuote, - DoubleQuote, -} - -pub struct ShellParser<'a> { - bytes: &'a [u8], - offset: usize, - pub errored: bool, -} - -impl<'a> ShellParser<'a> { - pub fn new(bytes: &'a [u8]) -> Self { - Self { - bytes, - offset: 0, - errored: false, - } - } - - fn parse_word(&mut self) -> Option<&'a [u8]> { - // The start of the current word. Certain leading characters should be ignored, - // so this might change. - let mut start = self.offset; - let mut state = ParserState::Start; - - while self.offset < self.bytes.len() { - let c = self.bytes[self.offset]; - self.offset += 1; - - match state { - ParserState::Start => match c { - // Ignore leading whitespace - b' ' | b'\t' | b'\n' => start += 1, - b'\'' => { - state = ParserState::SingleQuote; - start += 1; - } - b'"' => { - state = ParserState::DoubleQuote; - start += 1; - } - _ => { - state = ParserState::Word; - } - }, - ParserState::Word => match c { - // Unquoted whitespace ends the current word - b' ' | b'\t' | b'\n' => { - return Some(&self.bytes[start..self.offset - 1]); - } - _ => {} - }, - ParserState::SingleQuote => if c == b'\'' { - return Some(&self.bytes[start..(self.offset - 1)]); - }, - ParserState::DoubleQuote => if c == b'"' { - return Some(&self.bytes[start..(self.offset - 1)]); - }, - } - } - - match state { - ParserState::Start => None, - ParserState::Word => Some(&self.bytes[start..self.offset]), - ParserState::SingleQuote | ParserState::DoubleQuote => { - self.errored = true; - None - } - } - } -} - -impl<'a> Iterator for ShellParser<'a> { - type Item = &'a [u8]; - - fn next(&mut self) -> Option { - self.parse_word() - } -} - -#[cfg(test)] -mod test { - use super::*; - - #[test] - fn test_one_word() { - let mut it = ShellParser::new(b"hello"); - assert_eq!(it.next(), Some("hello".as_bytes())); - assert_eq!(it.next(), None); - } - - #[test] - fn test_one_single() { - let mut it = ShellParser::new(b"'hello'"); - assert_eq!(it.next(), Some("hello".as_bytes())); - assert_eq!(it.next(), None); - } - - #[test] - fn test_open_quote() { - let mut it = ShellParser::new(b"'hello"); - assert_eq!(it.next(), None); - assert!(it.errored) - } - - #[test] - fn test_ww2ogg() { - let mut it = ShellParser::new( - b"ww2ogg.exe --pcb \"/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin\"", - ); - assert_eq!(it.next(), Some("ww2ogg.exe".as_bytes())); - assert_eq!(it.next(), Some("--pcb".as_bytes())); - assert_eq!( - it.next(), - Some("/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin".as_bytes()) - ); - assert_eq!(it.next(), None); - } -} - -#[cfg(test)] -mod bench { - extern crate test; - - use super::*; - #[cfg(feature = "shlex-bench")] - use shlex::bytes::Shlex; - use test::Bencher; - - mod ww2ogg { - use super::*; - - #[bench] - fn custom(b: &mut Bencher) { - let val = test::black_box( - b"ww2ogg.exe --pcb \"/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin\"", - ); - b.iter(|| { - let it = ShellParser::new(val); - let _: Vec<_> = test::black_box(it.collect()); - }) - } - - #[cfg(feature = "shlex-bench")] - #[bench] - fn shlex(b: &mut Bencher) { - let val = test::black_box( - b"ww2ogg.exe --pcb \"/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin\"", - ); - b.iter(|| { - let it = Shlex::new(val); - let _: Vec<_> = test::black_box(it.collect()); - }) - } - } - - mod one_single { - use super::*; - - #[bench] - fn custom(b: &mut Bencher) { - let val = test::black_box(b"'hello'"); - b.iter(|| { - let it = ShellParser::new(val); - let _: Vec<_> = test::black_box(it.collect()); - }) - } - - #[cfg(feature = "shlex-bench")] - #[bench] - fn shlex(b: &mut Bencher) { - let val = test::black_box(b"'hello'"); - b.iter(|| { - let it = Shlex::new(val); - let _: Vec<_> = test::black_box(it.collect()); - }) - } - } -} diff --git a/docs/screenshots/dtmm.png b/docs/screenshots/dtmm.png deleted file mode 100644 index d51c253..0000000 --- a/docs/screenshots/dtmm.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:afeb671fc5f1d683a805a0c31236fd55fd1fc34267142f2f3446cbe780e11801 -size 58994 diff --git a/lib/ansi-parser b/lib/ansi-parser deleted file mode 160000 index 27beb4b..0000000 --- a/lib/ansi-parser +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 27beb4bc1ffd2865a432e13f0588b5351ff419bf diff --git a/lib/color-eyre b/lib/color-eyre deleted file mode 160000 index 228b8ca..0000000 --- a/lib/color-eyre +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 228b8ca37ee79ab9afa45c40da415e4dcb029751 diff --git a/lib/druid-widget-extra b/lib/druid-widget-extra new file mode 160000 index 0000000..fd069cc --- /dev/null +++ b/lib/druid-widget-extra @@ -0,0 +1 @@ +Subproject commit fd069ccf62af95bbbe204283e79cab1eeb83ecf1 diff --git a/lib/dtmt-shared/Cargo.toml b/lib/dtmt-shared/Cargo.toml deleted file mode 100644 index 26e1b6a..0000000 --- a/lib/dtmt-shared/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "dtmt-shared" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -ansi_term = { workspace = true } -color-eyre = { workspace = true } -serde = { workspace = true } -steamlocate = { workspace = true } -time = { workspace = true } -tracing = { workspace = true } -tracing-error = { workspace = true } -tracing-subscriber = { workspace = true } diff --git a/lib/dtmt-shared/README.adoc b/lib/dtmt-shared/README.adoc deleted file mode 100644 index 01b26ec..0000000 --- a/lib/dtmt-shared/README.adoc +++ /dev/null @@ -1,13 +0,0 @@ -= dtmt-shared -:idprefix: -:idseparator: -:toc: macro -:toclevels: 1 -:!toc-title: -:caution-caption: :fire: -:important-caption: :exclamtion: -:note-caption: :paperclip: -:tip-caption: :bulb: -:warning-caption: :warning: - -A set of types and functions shared between multiple crates within _Darktide Mod Tools_ that don't fit into the engine SDK. diff --git a/lib/dtmt-shared/src/lib.rs b/lib/dtmt-shared/src/lib.rs deleted file mode 100644 index db11579..0000000 --- a/lib/dtmt-shared/src/lib.rs +++ /dev/null @@ -1,102 +0,0 @@ -use std::collections::HashMap; -use std::path::PathBuf; - -use color_eyre::eyre::{OptionExt as _, WrapErr as _}; -use color_eyre::Result; -use serde::{Deserialize, Serialize}; -use steamlocate::SteamDir; -use time::OffsetDateTime; - -pub use log::*; - -mod log; - -#[derive(Clone, Debug, Default, Deserialize, Serialize)] -pub struct ModConfigResources { - pub init: PathBuf, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub data: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub localization: Option, -} - -#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)] -#[serde(rename_all = "snake_case")] -pub enum ModOrder { - Before, - After, -} - -#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)] -#[serde(untagged)] -pub enum ModDependency { - ID(String), - Config { id: String, order: ModOrder }, -} - -// A bit dumb, but serde doesn't support literal values with the -// `default` attribute, only paths. -fn default_true() -> bool { - true -} - -// Similarly dumb, as the `skip_serializing_if` attribute needs a function -fn is_true(val: &bool) -> bool { - *val -} - -#[derive(Clone, Debug, Default, Deserialize, Serialize)] -pub struct ModConfig { - #[serde(skip)] - pub dir: PathBuf, - pub id: String, - pub name: String, - pub summary: String, - pub version: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub description: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub author: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub image: Option, - #[serde(default)] - pub categories: Vec, - #[serde(default)] - pub packages: Vec, - pub resources: ModConfigResources, - #[serde(default)] - pub depends: Vec, - #[serde(default = "default_true", skip_serializing_if = "is_true")] - pub bundled: bool, - #[serde(default)] - pub name_overrides: HashMap, -} - -pub const STEAMAPP_ID: u32 = 1361210; - -#[derive(Debug)] -pub struct GameInfo { - pub path: PathBuf, - pub last_updated: OffsetDateTime, -} - -pub fn collect_game_info() -> Result> { - let dir = SteamDir::locate().wrap_err("Failed to locate Steam installation")?; - - let found = dir - .find_app(STEAMAPP_ID) - .wrap_err("Failed to look up game by Steam app ID")?; - - let Some((app, library)) = found else { - return Ok(None); - }; - - let last_updated = app - .last_updated - .ok_or_eyre("Missing field 'last_updated'")?; - - Ok(Some(GameInfo { - path: library.path().join(app.install_dir), - last_updated: last_updated.into(), - })) -} diff --git a/lib/dtmt-shared/src/log.rs b/lib/dtmt-shared/src/log.rs deleted file mode 100644 index 9c95c63..0000000 --- a/lib/dtmt-shared/src/log.rs +++ /dev/null @@ -1,110 +0,0 @@ -use std::fmt::Result; - -use ansi_term::Color; -use time::format_description::FormatItem; -use time::macros::format_description; -use time::OffsetDateTime; -use tracing::field::Field; -use tracing::{Event, Level, Metadata, Subscriber}; -use tracing_error::ErrorLayer; -use tracing_subscriber::filter::FilterFn; -use tracing_subscriber::fmt::format::{debug_fn, Writer}; -use tracing_subscriber::fmt::{self, FmtContext, FormatEvent, FormatFields}; -use tracing_subscriber::layer::SubscriberExt; -use tracing_subscriber::prelude::*; -use tracing_subscriber::registry::LookupSpan; -use tracing_subscriber::EnvFilter; - -pub const TIME_FORMAT: &[FormatItem] = format_description!("[hour]:[minute]:[second]"); - -pub fn format_fields(w: &mut Writer<'_>, field: &Field, val: &dyn std::fmt::Debug) -> Result { - if field.name() == "message" { - write!(w, "{:?}", val) - } else { - Ok(()) - } -} - -pub fn filter_fields(metadata: &Metadata<'_>) -> bool { - metadata - .fields() - .iter() - .any(|field| field.name() == "message") -} - -pub struct Formatter; - -impl FormatEvent for Formatter -where - S: Subscriber + for<'a> LookupSpan<'a>, - N: for<'a> FormatFields<'a> + 'static, -{ - fn format_event( - &self, - ctx: &FmtContext<'_, S, N>, - mut writer: Writer<'_>, - event: &Event<'_>, - ) -> Result { - let meta = event.metadata(); - - let time = OffsetDateTime::now_local().unwrap_or_else(|_| OffsetDateTime::now_utc()); - let time = time.format(TIME_FORMAT).map_err(|_| std::fmt::Error)?; - - let level = meta.level(); - // Sadly, tracing's `Level` is a struct, not an enum, so we can't properly `match` it. - let color = if *level == Level::TRACE { - Color::Purple - } else if *level == Level::DEBUG { - Color::Blue - } else if *level == Level::INFO { - Color::Green - } else if *level == Level::WARN { - Color::Yellow - } else if *level == Level::ERROR { - Color::Red - } else { - unreachable!() - }; - - write!( - writer, - "[{}] [{:>5}] ", - time, - color.bold().paint(format!("{}", level)) - )?; - - ctx.field_format().format_fields(writer.by_ref(), event)?; - - writeln!(writer) - } -} - -pub fn create_tracing_subscriber() { - let env_layer = - EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::try_new("info").unwrap()); - - let (dev_stdout_layer, prod_stdout_layer, filter_layer) = if cfg!(debug_assertions) { - let fmt_layer = fmt::layer().pretty().with_writer(std::io::stderr); - (Some(fmt_layer), None, None) - } else { - // Creates a layer that - // - only prints events that contain a message - // - does not print fields - // - does not print spans/targets - // - only prints time, not date - let fmt_layer = fmt::layer() - .with_writer(std::io::stderr) - .event_format(Formatter) - .fmt_fields(debug_fn(format_fields)); - - (None, Some(fmt_layer), Some(FilterFn::new(filter_fields))) - }; - - tracing_subscriber::registry() - .with(filter_layer) - .with(env_layer) - .with(dev_stdout_layer) - .with(prod_stdout_layer) - .with(ErrorLayer::new(fmt::format::Pretty::default())) - .init(); -} diff --git a/lib/luajit2-sys b/lib/luajit2-sys deleted file mode 160000 index 6d94a4d..0000000 --- a/lib/luajit2-sys +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 6d94a4dd2c296bf1f044ee4c70fb10dca4c1c241 diff --git a/lib/nexusmods/Cargo.toml b/lib/nexusmods/Cargo.toml deleted file mode 100644 index b9cc879..0000000 --- a/lib/nexusmods/Cargo.toml +++ /dev/null @@ -1,21 +0,0 @@ -[package] -name = "nexusmods" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -futures = "0.3.26" -lazy_static = "1.4.0" -regex = "1.7.1" -reqwest = { version = "0.12.4" } -serde = { version = "1.0.152", features = ["derive"] } -serde_json = "1.0.94" -thiserror = "2.0.0" -time = { version = "0.3.20", features = ["serde"] } -tracing = "0.1.37" -url = { version = "2.3.1", features = ["serde"] } - -[dev-dependencies] -tokio = { version = "1.26.0", features = ["rt", "macros"] } diff --git a/lib/nexusmods/src/lib.rs b/lib/nexusmods/src/lib.rs deleted file mode 100644 index cddf6a0..0000000 --- a/lib/nexusmods/src/lib.rs +++ /dev/null @@ -1,339 +0,0 @@ -use std::collections::HashMap; -use std::convert::Infallible; - -use lazy_static::lazy_static; -use regex::Regex; -use reqwest::header::{HeaderMap, HeaderValue, InvalidHeaderValue}; -use reqwest::{Client, IntoUrl, RequestBuilder, Url}; -use serde::Deserialize; -use thiserror::Error; - -mod types; -use time::OffsetDateTime; -pub use types::*; - -// TODO: Add OS information -const USER_AGENT: &str = concat!("DTMM/", env!("CARGO_PKG_VERSION")); -const GAME_ID: &str = "warhammer40kdarktide"; - -lazy_static! { - static ref BASE_URL: Url = Url::parse("https://api.nexusmods.com/v1/").unwrap(); - static ref BASE_URL_GAME: Url = - Url::parse("https://api.nexusmods.com/v1/games/warhammer40kdarktide/").unwrap(); -} - -#[derive(Error, Debug)] -pub enum Error { - #[error("HTTP error: {0:?}")] - HTTP(#[from] reqwest::Error), - #[error("invalid URL: {0:?}")] - URLParseError(#[from] url::ParseError), - #[error("failed to deserialize due to {error}: {json}")] - Deserialize { - json: String, - error: serde_json::Error, - }, - #[error(transparent)] - InvalidHeaderValue(#[from] InvalidHeaderValue), - #[error("this error cannot happen")] - Infallible(#[from] Infallible), - #[error("invalid NXM URL '{url}': {0}", url = .1.as_str())] - InvalidNXM(&'static str, Url), - #[error("{0}")] - Custom(String), -} - -pub type Result = std::result::Result; - -pub struct Nxm { - pub mod_id: u64, - pub file_id: u64, - pub user_id: u64, - pub key: String, - pub expires: OffsetDateTime, -} - -pub struct Api { - client: Client, -} - -impl Api { - pub fn new(key: String) -> Result { - let mut headers = HeaderMap::new(); - headers.insert("accept", HeaderValue::from_static("application/json")); - headers.insert("apikey", HeaderValue::from_str(&key)?); - - let client = Client::builder() - .user_agent(USER_AGENT) - .default_headers(headers) - .build()?; - - Ok(Self { client }) - } - - #[tracing::instrument(skip(self))] - async fn send(&self, req: RequestBuilder) -> Result - where - T: for<'a> Deserialize<'a>, - { - let res = req.send().await?.error_for_status()?; - tracing::trace!(?res); - - let json = res.text().await?; - serde_json::from_str(&json).map_err(|error| Error::Deserialize { json, error }) - } - - #[tracing::instrument(skip(self))] - pub async fn user_validate(&self) -> Result { - let url = BASE_URL.join("users/validate.json")?; - let req = self.client.get(url); - self.send(req).await - } - - #[tracing::instrument(skip(self))] - pub async fn mods_updated(&self, period: UpdatePeriod) -> Result> { - let url = BASE_URL_GAME.join("mods/updated.json")?; - let req = self.client.get(url).query(&[period]); - self.send(req).await - } - - #[tracing::instrument(skip(self))] - pub async fn mods_id(&self, id: u64) -> Result { - let url = BASE_URL_GAME.join(&format!("mods/{}.json", id))?; - let req = self.client.get(url); - self.send(req).await - } - - #[tracing::instrument(skip(self))] - pub async fn file_version(&self, id: u64, timestamp: T) -> Result - where - T: std::fmt::Debug, - OffsetDateTime: PartialEq, - { - let url = BASE_URL_GAME.join(&format!("mods/{id}/files.json"))?; - let req = self.client.get(url); - let files: FileList = self.send(req).await?; - - let Some(file) = files - .files - .into_iter() - .find(|file| file.uploaded_timestamp == timestamp) - else { - let err = Error::Custom("Timestamp does not match any file".into()); - return Err(err); - }; - - Ok(file.version) - } - - #[tracing::instrument(skip(self))] - pub async fn picture(&self, url: impl IntoUrl + std::fmt::Debug) -> Result> { - let res = self.client.get(url).send().await?.error_for_status()?; - - res.bytes() - .await - .map(|bytes| bytes.to_vec()) - .map_err(From::from) - } - - #[tracing::instrument(skip(self))] - pub async fn get_file_by_id(&self, mod_id: u64, file_id: u64) -> Result { - let url = BASE_URL_GAME.join(&format!("mods/{mod_id}/files/{file_id}.json"))?; - let req = self.client.get(url); - self.send(req).await - } - - pub fn parse_file_name>( - name: S, - ) -> Option<(String, u64, String, OffsetDateTime)> { - lazy_static! { - static ref RE: Regex = Regex::new(r#"^(?P.+?)-(?P[1-9]\d*)-(?P.+?)-(?P[1-9]\d*)(?:\.\w+)?$"#).unwrap(); - } - - RE.captures(name.as_ref()).and_then(|cap| { - let name = cap.name("name").map(|s| s.as_str().to_string())?; - let mod_id = cap.name("mod_id").and_then(|s| s.as_str().parse().ok())?; - let version = cap.name("version").map(|s| s.as_str().replace('-', "."))?; - let updated = cap - .name("updated") - .and_then(|s| s.as_str().parse().ok()) - .and_then(|s| OffsetDateTime::from_unix_timestamp(s).ok())?; - - Some((name, mod_id, version, updated)) - }) - } - - #[tracing::instrument(skip(self))] - pub async fn mods_download_link( - &self, - mod_id: u64, - file_id: u64, - key: String, - expires: OffsetDateTime, - ) -> Result> { - let url = - BASE_URL_GAME.join(&format!("mods/{mod_id}/files/{file_id}/download_link.json"))?; - let req = self - .client - .get(url) - .query(&[("key", key)]) - .query(&[("expires", expires.unix_timestamp())]); - self.send(req).await - } - - pub async fn handle_nxm(&self, url: Url) -> Result<(Mod, File, Vec)> { - let nxm = Self::parse_nxm(url.clone())?; - - let user = self.user_validate().await?; - - if nxm.user_id != user.user_id { - return Err(Error::InvalidNXM("user_id mismtach", url)); - } - - let (mod_data, file_info, download_info) = futures::try_join!( - self.mods_id(nxm.mod_id), - self.get_file_by_id(nxm.mod_id, nxm.file_id), - self.mods_download_link(nxm.mod_id, nxm.file_id, nxm.key, nxm.expires) - )?; - - let Some(download_url) = download_info.first().map(|i| i.uri.clone()) else { - return Err(Error::InvalidNXM("no download link", url)); - }; - - let req = self.client.get(download_url); - let data = req.send().await?.bytes().await?; - - Ok((mod_data, file_info, data.to_vec())) - } - - pub fn parse_nxm(nxm: Url) -> Result { - if nxm.scheme() != "nxm" { - return Err(Error::InvalidNXM("Invalid scheme", nxm)); - } - - // Now it makes sense, why Nexus calls this field `game_domain_name`, when it's just - // another path segment in the regular API calls. - if nxm.host_str() != Some(GAME_ID) { - return Err(Error::InvalidNXM("Invalid game domain name", nxm)); - } - - let Some(mut segments) = nxm.path_segments() else { - return Err(Error::InvalidNXM("Missing path segments", nxm)); - }; - - if segments.next() != Some("mods") { - return Err(Error::InvalidNXM( - "Unexpected path segment, expected 'mods'", - nxm, - )); - } - - let Some(mod_id) = segments.next().and_then(|id| id.parse().ok()) else { - return Err(Error::InvalidNXM("Invalid mod ID", nxm)); - }; - - if segments.next() != Some("files") { - return Err(Error::InvalidNXM( - "Unexpected path segment, expected 'files'", - nxm, - )); - } - - let Some(file_id) = segments.next().and_then(|id| id.parse().ok()) else { - return Err(Error::InvalidNXM("Invalid file ID", nxm)); - }; - - let mut query = HashMap::new(); - let pairs = nxm.query_pairs(); - - for (key, val) in pairs { - query.insert(key, val); - } - - let Some(key) = query.get("key") else { - return Err(Error::InvalidNXM("Missing query field 'key'", nxm)); - }; - - let expires = query - .get("expires") - .and_then(|expires| expires.parse().ok()) - .and_then(|expires| OffsetDateTime::from_unix_timestamp(expires).ok()); - let Some(expires) = expires else { - return Err(Error::InvalidNXM("Missing query field 'expires'", nxm)); - }; - - let user_id = query.get("user_id").and_then(|id| id.parse().ok()); - let Some(user_id) = user_id else { - return Err(Error::InvalidNXM("Missing query field 'user_id'", nxm)); - }; - - Ok(Nxm { - mod_id, - file_id, - key: key.to_string(), - expires, - user_id, - }) - } -} - -#[cfg(test)] -mod test { - use reqwest::Url; - use time::OffsetDateTime; - - use crate::Api; - - fn make_api() -> Api { - let key = std::env::var("NEXUSMODS_API_KEY").expect("'NEXUSMODS_API_KEY' env var missing"); - Api::new(key).expect("failed to build API client") - } - - #[tokio::test] - async fn mods_updated() { - let client = make_api(); - client - .mods_updated(Default::default()) - .await - .expect("failed to query 'mods_updated'"); - } - - #[tokio::test] - async fn user_validate() { - let client = make_api(); - client - .user_validate() - .await - .expect("failed to query 'user_validate'"); - } - - #[tokio::test] - async fn mods_id() { - let client = make_api(); - let dmf_id = 8; - client - .mods_id(dmf_id) - .await - .expect("failed to query 'mods_id'"); - } - - #[test] - fn parse_file_name() { - let file = "Darktide Mod Framework-8-23-3-04-1677966575.zip"; - let (name, mod_id, version, updated) = Api::parse_file_name(file).unwrap(); - - assert_eq!(name, String::from("Darktide Mod Framework")); - assert_eq!(mod_id, 8); - assert_eq!(version, String::from("23-3-04")); - assert_eq!( - updated, - OffsetDateTime::from_unix_timestamp(1677966575).unwrap() - ); - } - - #[test] - fn parse_nxm() { - let nxm = Url::parse("nxm://warhammer40kdarktide/mods/8/files/1000172397?key=VZ86Guj_LosPvtkD90-ZQg&expires=1678359882&user_id=1234567").expect("invalid NXM example"); - Api::parse_nxm(nxm).expect("failed to parse nxm link"); - } -} diff --git a/lib/nexusmods/src/types.rs b/lib/nexusmods/src/types.rs deleted file mode 100644 index db0f624..0000000 --- a/lib/nexusmods/src/types.rs +++ /dev/null @@ -1,140 +0,0 @@ -use reqwest::Url; -use serde::ser::SerializeTuple; -use serde::{Deserialize, Serialize}; -use time::OffsetDateTime; - -#[derive(Debug, Deserialize)] -pub struct User { - pub user_id: u64, - pub name: String, - pub profile_url: Url, - // pub is_premium: bool, - // pub is_supporter: bool, - // pub email: String, -} - -#[derive(Copy, Clone, Debug, Deserialize)] -#[serde(rename_all = "snake_case")] -pub enum ModStatus { - Published, -} - -#[derive(Copy, Clone, Debug, Deserialize)] -pub enum EndorseStatus { - Endorsed, - Undecided, -} - -#[derive(Debug, Deserialize)] -pub struct ModEndorsement { - pub endorse_status: EndorseStatus, - #[serde(with = "time::serde::timestamp::option")] - pub timestamp: Option, - pub version: Option, -} - -#[derive(Debug, Deserialize)] -pub struct Mod { - pub name: String, - pub description: String, - pub summary: String, - pub picture_url: Url, - pub uid: u64, - pub mod_id: u64, - pub category_id: u64, - pub version: String, - #[serde(with = "time::serde::timestamp")] - pub created_timestamp: OffsetDateTime, - // created_time: OffsetDateTime, - #[serde(with = "time::serde::timestamp")] - pub updated_timestamp: OffsetDateTime, - // updated_time: OffsetDateTime, - pub author: String, - pub uploaded_by: String, - pub uploaded_users_profile_url: Url, - pub status: ModStatus, - pub available: bool, - pub endorsement: ModEndorsement, - // pub mod_downloads: u64, - // pub mod_unique_downloads: u64, - // pub game_id: u64, - // pub allow_rating: bool, - // pub domain_name: String, - // pub endorsement_count: u64, - // pub contains_adult_content: bool, -} - -#[derive(Debug, Deserialize)] -pub struct File { - pub id: Vec, - pub uid: u64, - pub file_id: u64, - pub name: String, - pub version: String, - pub category_id: u64, - pub category_name: String, - pub is_primary: bool, - pub size: u64, - pub file_name: String, - #[serde(with = "time::serde::timestamp")] - pub uploaded_timestamp: OffsetDateTime, - pub mod_version: String, - pub external_virus_scan_url: String, - pub description: String, - pub size_kb: u64, - pub size_in_bytes: u64, - pub changelog_html: Option, - pub content_preview_link: String, -} - -#[derive(Debug, Deserialize)] -pub struct FileList { - pub files: Vec, - // pub file_updates: Vec, -} - -#[derive(Debug, Deserialize)] -pub struct DownloadLink { - pub name: String, - pub short_name: String, - #[serde(alias = "URI")] - pub uri: Url, -} - -#[derive(Debug, Deserialize)] -pub struct UpdateInfo { - pub mod_id: u64, - #[serde(with = "time::serde::timestamp")] - pub latest_file_update: OffsetDateTime, - #[serde(with = "time::serde::timestamp")] - pub latest_mod_activity: OffsetDateTime, -} - -#[derive(Copy, Clone, Debug)] -pub enum UpdatePeriod { - Day, - Week, - Month, -} - -impl Default for UpdatePeriod { - fn default() -> Self { - Self::Week - } -} - -impl Serialize for UpdatePeriod { - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - let mut tup = serializer.serialize_tuple(2)?; - tup.serialize_element("period")?; - tup.serialize_element(match self { - Self::Day => "1d", - Self::Week => "1w", - Self::Month => "1m", - })?; - tup.end() - } -} diff --git a/lib/oodle/Cargo.toml b/lib/oodle/Cargo.toml deleted file mode 100644 index 4a6fe2f..0000000 --- a/lib/oodle/Cargo.toml +++ /dev/null @@ -1,13 +0,0 @@ -[package] -name = "oodle" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -color-eyre = { workspace = true } -tracing = { workspace = true } - -[build-dependencies] -bindgen = "0.72.0" diff --git a/lib/oodle/build.rs b/lib/oodle/build.rs deleted file mode 100644 index 1a1d4e9..0000000 --- a/lib/oodle/build.rs +++ /dev/null @@ -1,45 +0,0 @@ -use std::env; -use std::path::PathBuf; - -fn main() { - let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").expect("No CARGO_MANIFEST_DIR"); - println!("cargo:rustc-link-search=native={}", &manifest_dir); - - if std::env::var("CARGO_CFG_TARGET_FAMILY") == Ok(String::from("windows")) { - let lib_name = if cfg!(debug_assertions) { - "oo2core_win64_debug" - } else { - "oo2core_win64" - }; - println!("cargo:rustc-link-lib=static={}", lib_name); - } else { - println!("cargo:rustc-link-lib=static=oo2corelinux64"); - println!("cargo:rustc-link-lib=stdc++"); - } - - println!("cargo:rerun-if-changed=oodle2.h"); - - // The bindgen::Builder is the main entry point - // to bindgen, and lets you build up options for - // the resulting bindings. - let bindings = bindgen::Builder::default() - // The input header we would like to generate - // bindings for. - .header("oodle2base.h") - .header("oodle2.h") - .blocklist_file("stdint.h") - .blocklist_file("stdlib.h") - // Tell cargo to invalidate the built crate whenever any of the - // included header files changed. - .parse_callbacks(Box::new(bindgen::CargoCallbacks::new())) - // Finish the builder and generate the bindings. - .generate() - // Unwrap the Result and panic on failure. - .expect("Unable to generate bindings"); - - // Write the bindings to the $OUT_DIR/bindings.rs file. - let out_path = PathBuf::from(env::var("OUT_DIR").unwrap()); - bindings - .write_to_file(out_path.join("bindings.rs")) - .expect("Couldn't write bindings!"); -} diff --git a/lib/oodle/oodle2.h b/lib/oodle/oodle2.h deleted file mode 100644 index ffe4152..0000000 --- a/lib/oodle/oodle2.h +++ /dev/null @@ -1,1643 +0,0 @@ - -//=================================================== -// Oodle2 Core header -// (C) Copyright 1994-2021 Epic Games Tools LLC -//=================================================== - -#ifndef __OODLE2_H_INCLUDED__ -#define __OODLE2_H_INCLUDED__ - -#ifndef OODLE2_PUBLIC_HEADER -#define OODLE2_PUBLIC_HEADER 1 -#endif - -#ifndef __OODLE2BASE_H_INCLUDED__ -#include "oodle2base.h" -#endif - -#ifdef _MSC_VER -#pragma pack(push, Oodle, 8) - -#pragma warning(push) -#pragma warning(disable : 4127) // conditional is constant -#endif - -// header version : -// the DLL is incompatible when MAJOR is bumped -// MINOR is for internal revs and bug fixes that don't affect API compatibility -#define OODLE2_VERSION_MAJOR 9 -#define OODLE2_VERSION_MINOR 5 - -// OodleVersion string is 1 . MAJOR . MINOR -// don't make it from macros cuz the doc tool has to parse the string literal - -#define OodleVersion "2.9.5" /* -*/ - -//----------------------------------------------------- -// OodleLZ - -#if 0 -#define OODLE_ALLOW_DEPRECATED_COMPRESSORS /* If you need to encode with the deprecated compressors, define this before including oodle2.h - - You may still decode with them without defining this. -*/ -#endif - -// Default verbosity selection of 0 will not even log when it sees corruption -typedef enum OodleLZ_Verbosity -{ - OodleLZ_Verbosity_None = 0, - OodleLZ_Verbosity_Minimal = 1, - OodleLZ_Verbosity_Some = 2, - OodleLZ_Verbosity_Lots = 3, - OodleLZ_Verbosity_Force32 = 0x40000000 -} OodleLZ_Verbosity; -/* Verbosity of LZ functions - LZ functions print information to the function set by $OodleCore_Plugins_SetPrintf - or $OodleXLog_Printf if using OodleX. -*/ - -OO_COMPILER_ASSERT( sizeof(OodleLZ_Verbosity) == 4 ); - -typedef enum OodleLZ_Compressor -{ - OodleLZ_Compressor_Invalid = -1, - OodleLZ_Compressor_None = 3, // None = memcpy, pass through uncompressed bytes - - // NEW COMPRESSORS : - OodleLZ_Compressor_Kraken = 8, // Fast decompression and high compression ratios, amazing! - OodleLZ_Compressor_Leviathan = 13,// Leviathan = Kraken's big brother with higher compression, slightly slower decompression. - OodleLZ_Compressor_Mermaid = 9, // Mermaid is between Kraken & Selkie - crazy fast, still decent compression. - OodleLZ_Compressor_Selkie = 11, // Selkie is a super-fast relative of Mermaid. For maximum decode speed. - OodleLZ_Compressor_Hydra = 12, // Hydra, the many-headed beast = Leviathan, Kraken, Mermaid, or Selkie (see $OodleLZ_About_Hydra) - -#ifdef OODLE_ALLOW_DEPRECATED_COMPRESSORS - OodleLZ_Compressor_BitKnit = 10, // no longer supported as of Oodle 2.9.0 - OodleLZ_Compressor_LZB16 = 4, // DEPRECATED but still supported - OodleLZ_Compressor_LZNA = 7, // no longer supported as of Oodle 2.9.0 - OodleLZ_Compressor_LZH = 0, // no longer supported as of Oodle 2.9.0 - OodleLZ_Compressor_LZHLW = 1, // no longer supported as of Oodle 2.9.0 - OodleLZ_Compressor_LZNIB = 2, // no longer supported as of Oodle 2.9.0 - OodleLZ_Compressor_LZBLW = 5, // no longer supported as of Oodle 2.9.0 - OodleLZ_Compressor_LZA = 6, // no longer supported as of Oodle 2.9.0 -#endif - - OodleLZ_Compressor_Count = 14, - OodleLZ_Compressor_Force32 = 0x40000000 -} OodleLZ_Compressor; -/* Selection of compression algorithm. - - Each compressor provides a different balance of speed vs compression ratio. - - New Oodle users should only use the new sea monster family of compressors. - - The OODLE_ALLOW_DEPRECATED_COMPRESSORS set of compressors is no longer supported - as of Oodle 2.9.0 ; see $Oodle_FAQ_deprecated_compressors - - The sea monsters are all fuzz safe and use whole-block quantum (not the 16k quantum) - ($OodleLZ_Compressor_UsesWholeBlockQuantum) - - If you need to encode the deprecated compressors, define $OODLE_ALLOW_DEPRECATED_COMPRESSORS before - including oodle2.h - - See $Oodle_FAQ_WhichLZ for a quick FAQ on which compressor to use - - See $OodleLZ_About for discussion of how to choose a compressor. -*/ - -OO_COMPILER_ASSERT( sizeof(OodleLZ_Compressor) == 4 ); - -typedef enum OodleLZ_PackedRawOverlap -{ - OodleLZ_PackedRawOverlap_No = 0, - OodleLZ_PackedRawOverlap_Yes = 1, - OodleLZ_PackedRawOverlap_Force32 = 0x40000000 -} OodleLZ_PackedRawOverlap; -/* Bool enum -*/ - -typedef enum OodleLZ_CheckCRC -{ - OodleLZ_CheckCRC_No = 0, - OodleLZ_CheckCRC_Yes = 1, - OodleLZ_CheckCRC_Force32 = 0x40000000 -} OodleLZ_CheckCRC; -/* Bool enum for the LZ decoder - should it check CRC before decoding or not? - - NOTE : the CRC's in the LZH decompress checks are the CRC's of the *compressed* bytes. This allows checking the CRc - prior to decompression, so corrupted data cannot be fed to the compressor. - - To use OodleLZ_CheckCRC_Yes, the compressed data must have been made with $(OodleLZ_CompressOptions:sendQuantumCRCs) set to true. - - If you want a CRC of the raw bytes, there is one optionally stored in the $OodleLZ_SeekTable and can be confirmed with - $OodleLZ_CheckSeekTableCRCs -*/ - - -typedef enum OodleLZ_Profile -{ - OodleLZ_Profile_Main=0, // Main profile (all current features allowed) - OodleLZ_Profile_Reduced=1, // Reduced profile (Kraken only, limited feature set) - OodleLZ_Profile_Force32 = 0x40000000 -} OodleLZ_Profile; -/* Decode profile to target */ - -// Not flagged for idoc and done using a #define since it's internal (testing) use only -#define OodleLZ_Profile_Internal_Custom ((OodleLZ_Profile)100) - -OO_COMPILER_ASSERT( sizeof(OodleLZ_Profile) == 4 ); - -typedef enum OodleDecompressCallbackRet -{ - OodleDecompressCallbackRet_Continue=0, - OodleDecompressCallbackRet_Cancel=1, - OodleDecompressCallbackRet_Invalid=2, - OodleDecompressCallbackRet_Force32 = 0x40000000 -} OodleDecompressCallbackRet; -/* Return value for $OodleDecompressCallback - return OodleDecompressCallbackRet_Cancel to abort the in-progress decompression -*/ - -OODEFFUNC typedef OodleDecompressCallbackRet (OODLE_CALLBACK OodleDecompressCallback)(void * userdata, const OO_U8 * rawBuf,OO_SINTa rawLen,const OO_U8 * compBuf,OO_SINTa compBufferSize , OO_SINTa rawDone, OO_SINTa compUsed); -/* User-provided callback for decompression - - $:userdata the data you passed for _pcbData_ - $:rawBuf the decompressed buffer - $:rawLen the total decompressed length - $:compBuf the compressed buffer - $:compBufferSize the total compressed length - $:rawDone number of bytes in rawBuf decompressed so far - $:compUsed number of bytes in compBuf consumed so far - - OodleDecompressCallback is called incrementally during decompression. -*/ - -typedef enum OodleLZ_CompressionLevel -{ - OodleLZ_CompressionLevel_None=0, // don't compress, just copy raw bytes - OodleLZ_CompressionLevel_SuperFast=1, // super fast mode, lower compression ratio - OodleLZ_CompressionLevel_VeryFast=2, // fastest LZ mode with still decent compression ratio - OodleLZ_CompressionLevel_Fast=3, // fast - good for daily use - OodleLZ_CompressionLevel_Normal=4, // standard medium speed LZ mode - - OodleLZ_CompressionLevel_Optimal1=5, // optimal parse level 1 (faster optimal encoder) - OodleLZ_CompressionLevel_Optimal2=6, // optimal parse level 2 (recommended baseline optimal encoder) - OodleLZ_CompressionLevel_Optimal3=7, // optimal parse level 3 (slower optimal encoder) - OodleLZ_CompressionLevel_Optimal4=8, // optimal parse level 4 (very slow optimal encoder) - OodleLZ_CompressionLevel_Optimal5=9, // optimal parse level 5 (don't care about encode speed, maximum compression) - - OodleLZ_CompressionLevel_HyperFast1=-1, // faster than SuperFast, less compression - OodleLZ_CompressionLevel_HyperFast2=-2, // faster than HyperFast1, less compression - OodleLZ_CompressionLevel_HyperFast3=-3, // faster than HyperFast2, less compression - OodleLZ_CompressionLevel_HyperFast4=-4, // fastest, less compression - - // aliases : - OodleLZ_CompressionLevel_HyperFast=OodleLZ_CompressionLevel_HyperFast1, // alias hyperfast base level - OodleLZ_CompressionLevel_Optimal = OodleLZ_CompressionLevel_Optimal2, // alias optimal standard level - OodleLZ_CompressionLevel_Max = OodleLZ_CompressionLevel_Optimal5, // maximum compression level - OodleLZ_CompressionLevel_Min = OodleLZ_CompressionLevel_HyperFast4, // fastest compression level - - OodleLZ_CompressionLevel_Force32 = 0x40000000, - OodleLZ_CompressionLevel_Invalid = OodleLZ_CompressionLevel_Force32 -} OodleLZ_CompressionLevel; -/* Selection of compression encoder complexity - - Higher numerical value of CompressionLevel = slower compression, but smaller compressed data. - - The compressed stream is always decodable with the same decompressors. - CompressionLevel controls the amount of work the encoder does to find the best compressed bit stream. - CompressionLevel does not primary affect decode speed, it trades off encode speed for compressed bit stream quality. - - I recommend starting with OodleLZ_CompressionLevel_Normal, then try up or down if you want - faster encoding or smaller output files. - - The Optimal levels are good for distribution when you compress rarely and decompress often; - they provide very high compression ratios but are slow to encode. Optimal2 is the recommended level - to start with of the optimal levels. - Optimal4 and 5 are not recommended for common use, they are very slow and provide the maximum compression ratio, - but the gain over Optimal3 is usually small. - - The HyperFast levels have negative numeric CompressionLevel values. - They are faster than SuperFast for when you're encoder CPU time constrained or want - something closer to symmetric compression vs. decompression time. - The HyperFast levels are currently only available in Kraken, Mermaid & Selkie. - Higher levels of HyperFast are faster to encode, eg. HyperFast4 is the fastest. - - The CompressionLevel does not affect decode speed much. Higher compression level does not mean - slower to decode. To trade off decode speed vs ratio, use _spaceSpeedTradeoffBytes_ in $OodleLZ_CompressOptions - -*/ - -OO_COMPILER_ASSERT( sizeof(OodleLZ_CompressionLevel) == 4 ); - -typedef enum OodleLZ_Jobify -{ - OodleLZ_Jobify_Default=0, // Use compressor default for level of internal job usage - OodleLZ_Jobify_Disable=1, // Don't use jobs at all - OodleLZ_Jobify_Normal=2, // Try to balance parallelism with increased memory usage - OodleLZ_Jobify_Aggressive=3, // Maximize parallelism even when doing so requires large amounts of memory - OodleLZ_Jobify_Count=4, - - OodleLZ_Jobify_Force32 = 0x40000000, -} OodleLZ_Jobify; -/* Controls the amount of internal threading in $OodleLZ_Compress calls - - Once you install a pluggable job system via $OodleCore_Plugins_SetJobSystem, Oodle can internally break - heavy-weight compression tasks into smaller jobs that can run in parallel. This can speed up - compression of large blocks of data at Optimal1 and higher levels substantially. - - The trade-off is that running more jobs concurrently rather than sequentially can greatly increase - memory requirements when there are multiple outstanding memory-intensive jobs. - - OodleLZ_Jobify_Default lets the compressor decide; typically compressors will default to "Normal" - when a pluggable job system has been installed, and "Disable" otherwise. - - OodleLZ_Jobify_Disable disables use of internal jobs entirely; all compression work is done on - the calling thread. This minimizes the amount of memory used, and is also appropriate when you're - getting parallelism in other ways, e.g. by running OodleLZ_Compress on many threads yourself. - - OodleLZ_Jobify_Normal uses jobs to increase compressor parallelism and speeds up compression of - large blocks of data, but avoids handing out many concurrent jobs for tasks that are memory-intensive. - - OodleLZ_Jobify_Aggressive will use concurrent jobs even for highly memory-intensive tasks. This - can speed up things further, but at a potentially significant increase in the amount of memory used - by Oodle. - -*/ - -#define OODLELZ_LOCALDICTIONARYSIZE_MAX (1<<30) /* Maximum value of maxLocalDictionarySize in OodleLZ_CompressOptions -*/ - -#define OODLELZ_SPACESPEEDTRADEOFFBYTES_DEFAULT (256) /* Default value of spaceSpeedTradeoffBytes in OodleLZ_CompressOptions - Changes how the encoder makes decisions in the bit stream - Higher spaceSpeedTradeoffBytes favors decode speed more (larger compressed files) - Lower spaceSpeedTradeoffBytes favors smaller compressed files (slower decoder) - Goes in a power of 2 scale; so try 64,128 and 512,1024 - (OODLELZ_SPACESPEEDTRADEOFFBYTES_DEFAULT/2) or (OODLELZ_SPACESPEEDTRADEOFFBYTES_DEFAULT*2) -*/ - - -typedef OOSTRUCT OodleLZ_CompressOptions -{ - OO_U32 unused_was_verbosity; // unused ; was verbosity (set to zero) - OO_S32 minMatchLen; // minimum match length ; cannot be used to reduce a compressor's default MML, but can be higher. On some types of data, a large MML (6 or 8) is a space-speed win. - OO_BOOL seekChunkReset; // whether chunks should be independent, for seeking and parallelism - OO_S32 seekChunkLen; // length of independent seek chunks (if seekChunkReset) ; must be a power of 2 and >= $OODLELZ_BLOCK_LEN ; you can use $OodleLZ_MakeSeekChunkLen - OodleLZ_Profile profile; // decoder profile to target (set to zero) - OO_S32 dictionarySize; // sets a maximum offset for matches, if lower than the maximum the format supports. <= 0 means infinite (use whole buffer). Often power of 2 but doesn't have to be. - OO_S32 spaceSpeedTradeoffBytes; // this is a number of bytes; I must gain at least this many bytes of compressed size to accept a speed-decreasing decision - OO_S32 unused_was_maxHuffmansPerChunk; // unused ; was maxHuffmansPerChunk - OO_BOOL sendQuantumCRCs; // should the encoder send a CRC of each compressed quantum, for integrity checks; this is necessary if you want to use OodleLZ_CheckCRC_Yes on decode - OO_S32 maxLocalDictionarySize; // (Optimals) size of local dictionary before needing a long range matcher. This does not set a window size for the decoder; it's useful to limit memory use and time taken in the encoder. maxLocalDictionarySize must be a power of 2. Must be <= OODLELZ_LOCALDICTIONARYSIZE_MAX - OO_BOOL makeLongRangeMatcher; // (Optimals) should the encoder find matches beyond maxLocalDictionarySize using an LRM - OO_S32 matchTableSizeLog2; //(non-Optimals) when variable, sets the size of the match finder structure (often a hash table) ; use 0 for the compressor's default - - OodleLZ_Jobify jobify; // controls internal job usage by compressors - void * jobifyUserPtr; // user pointer passed through to RunJob and WaitJob callbacks - - OO_S32 farMatchMinLen; // far matches must be at least this len - OO_S32 farMatchOffsetLog2; // if not zero, the log2 of an offset that must meet farMatchMinLen - - OO_U32 reserved[4]; // reserved space for adding more options; zero these! -} OodleLZ_CompressOptions; -/* Options for the compressor - - Typically filled by calling $OodleLZ_CompressOptions_GetDefault , then individual options may be modified, like : - - OodleLZ_CompressOptions my_options = *OodleLZ_CompressOptions_GetDefault() - - To ensure you have set up the options correctly, call $OodleLZ_CompressOptions_Validate. - - _unused_was_verbosity_ : place holder, set to zero - - _minMatchLen_ : rarely useful. Default value of 0 means let the compressor decide. On some types of data, - bumping this up to 4,6, or 8 can improve decode speed with little effect on compression ratio. Most of the - Oodle compressors use a default MML of 4 at levels below 7, and MML 3 at levels >= 7. If you want to keep MML 4 - at the higher levels, set _minMatchLen_ here to 4. _minMatchLen_ cannot be used to reduce the base MML of the compressor, only to increase it. - - _seekChunkReset_ must be true if you want the decode to be able to run "Wide", with pieces that can be - decoded independently (not keeping previous pieces in memory for match references). - - _seekChunkLen_ : length of independent seek chunks (if seekChunkReset) ; must be a power of 2 and >= $OODLELZ_BLOCK_LEN ; you can use $OodleLZ_MakeSeekChunkLen - - _profile_ : tells the encoder to target alternate bitstream profile. Default value of zero for normal use. - - _dictionarySize_ : limits the encoder to partial buffer access for matches. Can be useful for decoding incrementally - without keeping the entire output buffer in memory. - - _spaceSpeedTradeoffBytes_ is a way to trade off compression ratio for decode speed. If you make it smaller, - you get more compression ratio and slower decodes. It's the number of bytes that a decision must save to - be worth a slower decode. Default is 256 (OODLELZ_SPACESPEEDTRADEOFFBYTES_DEFAULT). So that means the encoder must be able to save >= 256 bytes to - accept something that will slow down decoding (like adding another Huffman table). The typical range is - 64-1024. - - Lower _spaceSpeedTradeoffBytes_ = more compression, slower decode - Higher _spaceSpeedTradeoffBytes_ = less compression, faster decode - - _spaceSpeedTradeoffBytes_ is the primary parameter for controlling Hydra. The default value of 256 will make - Hydra decodes that are just a little bit faster than Kraken. You get Kraken speeds around 200, and Mermaid - speeds around 1200. - - At the extreme, a _spaceSpeedTradeoffBytes_ of zero would mean all you care about is compression ratio, not decode - speed, you want the encoder to make the smallest possible output. (you cannot actually set zero, as zero values - always mean "use default" in this struct; you never really want zero anyway) - Generally _spaceSpeedTradeoffBytes_ below 16 provides diminishing gains in size with pointless decode speed loss. - - _spaceSpeedTradeoffBytes_ is on sort of powers of 2 scale, so you might want to experiment with 32,64,128,256,512 - - _spaceSpeedTradeoffBytes_ outside the range [16 - 2048] is not recommended. - - _unused_was_maxHuffmansPerChunk_ : place holder, set to zero - - _sendQuantumCRCs_ : send hashes of the compressed data to verify in the decoder; not recommended, if you need data - verification, use your own system outside of Oodle. DEPRECATED, not recommended. For backwards compatibility only. - - _maxLocalDictionarySize_ : only applies to optimal parsers at level >= Optimal2. This limits the encoder memory use. - Making it larger = more compression, higher memory use. Matches within maxLocalDictionarySize are found exactly, - outside the maxLocalDictionarySize window an approximate long range matcher is used. - - _makeLongRangeMatcher_ : whether an LRM should be used to find matches outside the _maxLocalDictionarySize_ window - (Optimal levels only) - - _matchTableSizeLog2_ : for non-optimal levels (level <= Normal), controls the hash table size. Making this very - small can sometimes boost encoder speed. For the very fastest encoding, use the SuperFast level and change - _matchTableSizeLog2_ to 12 or 13. - - _matchTableSizeLog2_ should usually be left zero to use the encoder's default - - _matchTableSizeLog2_ allows you to limit memory use of the non-Optimal encoder levels. Memory use is roughly - ( 1 MB + 4 << matchTableSizeLog2 ) - - _jobify_ tells compressors how to use internal jobs for compression tasks. Jobs can be run in parallel using the - job system plugins set with $OodleCore_Plugins_SetJobSystem. Not all compressors or compression level support - jobs, but the slower ones generally do. The default value of jobify is to use a thread system if one is installed. - - _farMatchMinLen_ and _farMatchOffsetLog2_ can be used to tune the encoded stream for a known cache size on the - decoding hardware. If set, then offsets with log2 greater or each to _farMatchOffsetLog2_ must have a minimum - length of _farMatchMinLen_. For example to target a machine with a 2 MB cache, set _farMatchOffsetLog2_ to 21, - and _farMatchMinLen_ to something large, like 16 or 20. - - Without _farMatchMinLen_ and _farMatchOffsetLog2_ set, the Oodle encoders tune for a blend of cache sizes that works - well on most machines. _dictionarySize_ can also be used to tune for cache size, but cuts off all matches - beyond a certain distance. That may be more appropriate when you don't want to go out of cache at all. - _farMatchMinLen_ can only be used to make the standard blend target more restrictive; it can reduce the target cache size - but can't make it larger (or it can raise min match len outside cache but can't make it shorter). - - For help on setting up OodleLZ_CompressOptions contact support at oodle@radgametools.com - - NOTE : fields you do not set should always be zero initialized. In particular the _reserved_ fields should be zeroed. - Zero always means "use default" and is a future-portable initialization value. - - If you set fields to zero to mean "use default" you can call $OodleLZ_CompressOptions_Validate to change them - to default values. This is done automatically internally if you don't do it explicitly. - -*/ - -typedef enum OodleLZ_Decode_ThreadPhase -{ - OodleLZ_Decode_ThreadPhase1 = 1, - OodleLZ_Decode_ThreadPhase2 = 2, - OodleLZ_Decode_ThreadPhaseAll = 3, - OodleLZ_Decode_Unthreaded = OodleLZ_Decode_ThreadPhaseAll -} OodleLZ_Decode_ThreadPhase; -/* ThreadPhase for threaded Oodle decode - - Check $OodleLZ_Compressor_CanDecodeThreadPhased - (currently only used by Kraken) - - See $OodleLZ_About_ThreadPhasedDecode - -*/ - -typedef enum OodleLZ_FuzzSafe -{ - OodleLZ_FuzzSafe_No = 0, - OodleLZ_FuzzSafe_Yes = 1 -} OodleLZ_FuzzSafe; -/* OodleLZ_FuzzSafe (deprecated) - - About fuzz safety: - - Fuzz Safe decodes will not crash on corrupt data. They may or may not return failure, and produce garbage output. - - Fuzz safe decodes will not read out of bounds. They won't put data on the stack or previously in memory - into the output buffer. - - As of Oodle 2.9.0 all compressors supported are fuzzsafe, so OodleLZ_FuzzSafe_Yes should always be used and this - enum is deprecated. - -*/ - -#define OODLELZ_BLOCK_LEN (1<<18) /* The number of raw bytes per "seek chunk" - Seek chunks can be decompressed independently if $(OodleLZ_CompressOptions:seekChunkReset) is set. -*/ - -#define OODLELZ_BLOCK_MAXIMUM_EXPANSION (2) -#define OODLELZ_BLOCK_MAX_COMPLEN (OODLELZ_BLOCK_LEN+OODLELZ_BLOCK_MAXIMUM_EXPANSION) /* Maximum expansion per $OODLELZ_BLOCK_LEN is 1 byte. - Note that the compressed buffer must be allocated bigger than this (use $OodleLZ_GetCompressedBufferSizeNeeded) -*/ - -#define OODLELZ_QUANTUM_LEN (1<<14) /* Minimum decompression quantum (for old legacy codecs only) - - Deprecated. - - The new sea monster family of compressors use a whole block quantum (OODLELZ_BLOCK_LEN). - Check $OodleLZ_Compressor_UsesWholeBlockQuantum -*/ - -// 5 byte expansion per-quantum with CRC's -#define OODLELZ_QUANTUM_MAXIMUM_EXPANSION (5) - -#define OODLELZ_QUANTUM_MAX_COMPLEN (OODLELZ_QUANTUM_LEN+OODLELZ_QUANTUM_MAXIMUM_EXPANSION) - -#define OODLELZ_SEEKCHUNKLEN_MIN OODLELZ_BLOCK_LEN -#define OODLELZ_SEEKCHUNKLEN_MAX (1<<29) // half GB - -typedef OOSTRUCT OodleLZ_DecodeSome_Out -{ - OO_S32 decodedCount; // number of uncompressed bytes decoded - OO_S32 compBufUsed; // number of compressed bytes consumed - - - OO_S32 curQuantumRawLen; // tells you the current quantum size. you must have at least this much room available in the output buffer to be able to decode anything. - OO_S32 curQuantumCompLen; // if you didn't pass in enough data, nothing will decode (decodedCount will be 0), and this will tell you how much is needed -} OodleLZ_DecodeSome_Out; -/* Output value of $OodleLZDecoder_DecodeSome -*/ - -//--------------------------------------------- - -//======================================================= - -typedef OOSTRUCT OodleLZ_SeekTable -{ - OodleLZ_Compressor compressor; // which compressor was used - OO_BOOL seekChunksIndependent; // are the seek chunks independent, or must they be decompressed in sequence - - OO_S64 totalRawLen; // total uncompressed data lenth - OO_S64 totalCompLen; // sum of seekChunkCompLens - - OO_S32 numSeekChunks; // derived from rawLen & seekChunkLen - OO_S32 seekChunkLen; // multiple of OODLELZ_BLOCK_LEN - - OO_U32 * seekChunkCompLens; // array of compressed lengths of seek chunks - OO_U32 * rawCRCs; // crc of the raw bytes of the chunk (optional; NULL unless $OodleLZSeekTable_Flags_MakeRawCRCs was specified) -} OodleLZ_SeekTable; - -typedef enum OodleLZSeekTable_Flags -{ - OodleLZSeekTable_Flags_None = 0, // default - OodleLZSeekTable_Flags_MakeRawCRCs = 1, // make the _rawCRCs_ member of $OodleLZ_SeekTable - OodleLZSeekTable_Flags_Force32 = 0x40000000 -} OodleLZSeekTable_Flags; - -//===================================================== - - -typedef OOSTRUCT OodleConfigValues -{ - OO_S32 m_OodleLZ_LW_LRM_step; // LZHLW LRM : bytes between LRM entries - OO_S32 m_OodleLZ_LW_LRM_hashLength; // LZHLW LRM : bytes hashed for each LRM entries - OO_S32 m_OodleLZ_LW_LRM_jumpbits; // LZHLW LRM : bits of hash used for jump table - - OO_S32 m_OodleLZ_Decoder_Max_Stack_Size; // if OodleLZ_Decompress needs to allocator a Decoder object, and it's smaller than this size, it's put on the stack instead of the heap - OO_S32 m_OodleLZ_Small_Buffer_LZ_Fallback_Size_Unused; // deprecated - OO_S32 m_OodleLZ_BackwardsCompatible_MajorVersion; // if you need to encode streams that can be read with an older version of Oodle, set this to the Oodle2 MAJOR version number that you need compatibility with. eg to be compatible with oodle 2.7.3 you would put 7 here - - OO_U32 m_oodle_header_version; // = OODLE_HEADER_VERSION - -} OodleConfigValues; -/* OodleConfigValues - - Struct of user-settable low level config values. See $Oodle_SetConfigValues. - - May have different defaults per platform. -*/ - -OOFUNC1 void OOFUNC2 Oodle_GetConfigValues(OodleConfigValues * ptr); -/* Get $OodleConfigValues - - $:ptr filled with OodleConfigValues - - Gets the current $OodleConfigValues. - - May be different per platform. -*/ - -OOFUNC1 void OOFUNC2 Oodle_SetConfigValues(const OodleConfigValues * ptr); -/* Set $OodleConfigValues - - $:ptr your desired OodleConfigValues - - Sets the global $OodleConfigValues from your struct. - - You should call $Oodle_GetConfigValues to fill the struct, then change the values you - want to change, then call $Oodle_SetConfigValues. - - This should generally be done before doing anything with Oodle (eg. even before OodleX_Init). - Changing OodleConfigValues after Oodle has started has undefined effects. -*/ - -typedef enum Oodle_UsageWarnings -{ - Oodle_UsageWarnings_Enabled = 0, - Oodle_UsageWarnings_Disabled = 1, - Oodle_UsageWarnings_Force32 = 0x40000000 -} Oodle_UsageWarnings; -/* Whether Oodle usage warnings are enable or disabled. */ - -OOFUNC1 void OOFUNC2 Oodle_SetUsageWarnings(Oodle_UsageWarnings state); -/* Enables or disables Oodle usage warnings. - - $:state whether usage warnings should be enabled or disabled. - - Usage warnings are enabled by default and try to be low-noise, but in case you want to - disable them, this is how. - - This should generally be done once at startup. Setting this state while there are Oodle - calls running on other threads has undefined results. -*/ - -// function pointers to mallocs needed : - -OODEFFUNC typedef void * (OODLE_CALLBACK t_fp_OodleCore_Plugin_MallocAligned)( OO_SINTa bytes, OO_S32 alignment); -/* Function pointer type for OodleMallocAligned - - $:bytes number of bytes to allocate - $:alignment required alignment of returned pointer - $:return pointer to memory allocated (must not be NULL) - - _alignment_ will always be a power of two - - _alignment_ will always be >= $OODLE_MALLOC_MINIMUM_ALIGNMENT - -*/ - -OODEFFUNC typedef void (OODLE_CALLBACK t_fp_OodleCore_Plugin_Free)( void * ptr ); -/* Function pointer type for OodleFree - - $:return pointer to memory to free - -*/ - -OOFUNC1 void OOFUNC2 OodleCore_Plugins_SetAllocators( - t_fp_OodleCore_Plugin_MallocAligned * fp_OodleMallocAligned, - t_fp_OodleCore_Plugin_Free * fp_OodleFree); -/* Set the function pointers for allocation needed by Oodle2 Core - - If these are not set, the default implementation on most platforms uses the C stdlib. - On Microsoft platforms the default implementation uses HeapAlloc. - - These must not be changed once they are set! Set them once then don't change them. - - NOTE: if you are using Oodle Ext, do NOT call this. OodleX_Init will install an allocator for Oodle Core. Do not mix your own allocator with the OodleX allocator. See $OodleXAPI_Malloc. - - If you want to ensure that Oodle is not doing any allocations, you can call OodleCore_Plugins_SetAllocators(NULL,NULL); - If you do that, then any time Oodle needs to allocate memory internally, it will stop the process. - It is STRONGLY not recommended that you ship that way. You can verify that Oodle is not allocating, but then leave some - fallback allocator installed when you actually ship just in case. - - Also note that on many consoles the standard allocation practices may not - leave much heap memory for the C stdlib malloc. In this case Oodle may fail to allocate. - -*/ - -OODEFFUNC typedef OO_U64 (OODLE_CALLBACK t_fp_OodleCore_Plugin_RunJob)( t_fp_Oodle_Job * fp_job, void * job_data , OO_U64 * dependencies, int num_dependencies, void * user_ptr ); -/* Function pointer type for OodleCore_Plugins_SetJobSystem - - $:dependencies array of handles of other pending jobs. All guaranteed to be nonzero. - $:num_dependencies number of dependencies. Guaranteed to be no more than OODLE_JOB_MAX_DEPENDENCIES. - $:user_ptr is passed through from the OodleLZ_CompressOptions. - $:return handle to the async job, or 0 if it was run synchronously - - RunJob will call fp_job(job_data) - - it may be done on a thread, or it may run the function synchronously and return 0, indicating the job is already done. - The returned OO_U64 is a handle passed to WaitJob, unless it is 0, in which case WaitJob won't get called. - - fp_job should not run until all the dependencies are done. This function should not delete the dependencies. - - RunJob must be callable from within an Oodle Job, i.e. jobs may spawn their own sub-jobs directly. - However, the matching WaitJob calls will only ever occur on the thread that called the - internally threaded Oodle API function. - - See $Oodle_About_Job_Threading_Plugins -*/ - -OODEFFUNC typedef void (OODLE_CALLBACK t_fp_OodleCore_Plugin_WaitJob)( OO_U64 job_handle, void * user_ptr ); -/* Function pointer type for OodleCore_Plugins_SetJobSystem - - $:job_handle a job handle returned from RunJob. Never 0. - $:user_ptr is passed through from the OodleLZ_CompressOptions. - - Waits until the job specified by job_handle is done and cleans up any associated resources. Oodle - will call WaitJob exactly once for every RunJob call that didn't return 0. - - If job_handle was already completed, this should clean it up without waiting. - - A handle value should not be reused by another RunJob until WaitJob has been done with that value. - - WaitJob will not be called from running jobs. It will be only be called from the original thread that - invoked Oodle. If you are running Oodle from a worker thread, ensure that that thread is allowed to wait - on other job threads. - - See $Oodle_About_Job_Threading_Plugins -*/ - -OOFUNC1 void OOFUNC2 OodleCore_Plugins_SetJobSystem( - t_fp_OodleCore_Plugin_RunJob * fp_RunJob, - t_fp_OodleCore_Plugin_WaitJob * fp_WaitJob); -/* DEPRECATED use OodleCore_Plugins_SetJobSystemAndCount instead - - See $OodleCore_Plugins_SetJobSystemAndCount -*/ - - -OOFUNC1 void OOFUNC2 OodleCore_Plugins_SetJobSystemAndCount( - t_fp_OodleCore_Plugin_RunJob * fp_RunJob, - t_fp_OodleCore_Plugin_WaitJob * fp_WaitJob, - int target_parallelism); -/* Set the function pointers for async job system needed by Oodle2 Core - - $:fp_RunJob pointer to RunJob function - $:fp_WaitJob pointer to WaitJob function - $:target_parallelism goal of number of jobs to run simultaneously - - If these are not set, the default implementation runs jobs synchronously on the calling thread. - - These must not be changed once they are set! Set them once then don't change them. - - _target_parallelism_ allows you to tell Oodle how many Jobs it should try to keep in flight at once. - Depending on the operation it may not be able to split work into this many jobs (so fewer will be used), - but it will not exceed this count. - - For Oodle Data LZ work, typically _target_parallelism_ is usually best at the number of hardware cores - not including hyper threads). - - For Oodle Texture BCN encoding work, _target_parallelism_ is usually best as the full number of hyper cores. - - In some cases you may wish to reduce _target_parallelism_ by 1 or 2 cores to leave some of the CPU free for - other work. - - For example on a CPU with 16 cores and 32 hardware threads, for LZ work you might set _target_parallelism_ to 15 - when calling OodleCorePlugins. For BC7 encoding you might set _target_parallelism_ to 30 when calling OodleTexPlugins. - - NOTE : if you are using Oodle Ext, do NOT call this. OodleX_Init will install a job system for Oodle Core. - Note OodleX only installs automatically to Oodle Core, not Net or Tex. See example_jobify.cpp for manual - plugin. - - Replaces deprecated $OodleCore_Plugins_SetJobSystem - - See $Oodle_About_Job_Threading_Plugins -*/ - -// the main func pointer for log : -OODEFFUNC typedef void (OODLE_CALLBACK t_fp_OodleCore_Plugin_Printf)(int verboseLevel,const char * file,int line,const char * fmt,...); -/* Function pointer to Oodle Core printf - - $:verboseLevel verbosity of the message; 0-2 ; lower = more important - $:file C file that sent the message - $:line C line that sent the message - $:fmt vararg printf format string - - The logging function installed here must parse varargs like printf. - - _verboseLevel_ may be used to omit verbose messages. -*/ - -OOFUNC1 t_fp_OodleCore_Plugin_Printf * OOFUNC2 OodleCore_Plugins_SetPrintf(t_fp_OodleCore_Plugin_Printf * fp_rrRawPrintf); -/* Install the callback used by Oodle Core for logging - - $:fp_rrRawPrintf function pointer to your log function; may be NULL to disable all logging - $:return returns the previous function pointer - - Use this function to install your own printf for Oodle Core. - - The default implementation in debug builds, if you install nothing, uses the C stdio printf for logging. - On Microsoft platforms, it uses OutputDebugString and not stdio. - - To disable all logging, call OodleCore_Plugins_SetPrintf(NULL) - - WARNING : this function is NOT thread safe! It should be done only once and done in a place where the caller can guarantee thread safety. - - In the debug build of Oodle, you can install OodleCore_Plugin_Printf_Verbose to get more verbose logging - -*/ - -OODEFFUNC typedef OO_BOOL (OODLE_CALLBACK t_fp_OodleCore_Plugin_DisplayAssertion)(const char * file,const int line,const char * function,const char * message); -/* Function pointer to Oodle Core assert callback - - $:file C file that triggered the assert - $:line C line that triggered the assert - $:function C function that triggered the assert (may be NULL) - $:message assert message - $:return true to break execution at the assertion site, false to continue - - This callback is called by Oodle Core when it detects an assertion condition. - - This will only happen in debug builds. - - -*/ - -OOFUNC1 t_fp_OodleCore_Plugin_DisplayAssertion * OOFUNC2 OodleCore_Plugins_SetAssertion(t_fp_OodleCore_Plugin_DisplayAssertion * fp_rrDisplayAssertion); -/* Install the callback used by Oodle Core for asserts - - $:fp_rrDisplayAssertion function pointer to your assert display function - $:return returns the previous function pointer - - Use this function to install your own display for Oodle Core assertions. - This will only happen in debug builds. - - The default implementation in debug builds, if you install nothing, uses the C stderr printf for logging, - except on Microsoft platforms where it uses OutputDebugString. - - WARNING : this function is NOT thread safe! It should be done only once and done in a place where the caller can guarantee thread safety. - -*/ - -//============================================================= - - -OOFUNC1 void * OOFUNC2 OodleCore_Plugin_MallocAligned_Default(OO_SINTa size,OO_S32 alignment); -OOFUNC1 void OOFUNC2 OodleCore_Plugin_Free_Default(void * ptr); -OOFUNC1 void OOFUNC2 OodleCore_Plugin_Printf_Default(int verboseLevel,const char * file,int line,const char * fmt,...); -OOFUNC1 void OOFUNC2 OodleCore_Plugin_Printf_Verbose(int verboseLevel,const char * file,int line,const char * fmt,...); -OOFUNC1 OO_BOOL OOFUNC2 OodleCore_Plugin_DisplayAssertion_Default(const char * file,const int line,const char * function,const char * message); -OOFUNC1 OO_U64 OOFUNC2 OodleCore_Plugin_RunJob_Default( t_fp_Oodle_Job * fp_job, void * job_data, OO_U64 * dependencies, int num_dependencies, void * user_ptr ); -OOFUNC1 void OOFUNC2 OodleCore_Plugin_WaitJob_Default( OO_U64 job_handle, void * user_ptr ); - -//============================================================= - -//---------------------------------------------- -// OodleLZ - -#define OODLELZ_FAILED (0) /* Return value of OodleLZ_Decompress on failure -*/ - -//======================================================= - -OOFUNC1 OO_SINTa OOFUNC2 OodleLZ_Compress(OodleLZ_Compressor compressor, - const void * rawBuf,OO_SINTa rawLen,void * compBuf, - OodleLZ_CompressionLevel level, - const OodleLZ_CompressOptions * pOptions OODEFAULT(NULL), - const void * dictionaryBase OODEFAULT(NULL), - const void * lrm OODEFAULT(NULL), - void * scratchMem OODEFAULT(NULL), - OO_SINTa scratchSize OODEFAULT(0) ); -/* Compress some data from memory to memory, synchronously, with OodleLZ - - $:compressor which OodleLZ variant to use in compression - $:rawBuf raw data to compress - $:rawLen number of bytes in rawBuf to compress - $:compBuf pointer to write compressed data to ; should be at least $OodleLZ_GetCompressedBufferSizeNeeded - $:level OodleLZ_CompressionLevel controls how much CPU effort is put into maximizing compression - $:pOptions (optional) options; if NULL, $OodleLZ_CompressOptions_GetDefault is used - $:dictionaryBase (optional) if not NULL, provides preceding data to prime the dictionary; must be contiguous with rawBuf, the data between the pointers _dictionaryBase_ and _rawBuf_ is used as the preconditioning data. The exact same precondition must be passed to encoder and decoder. - $:lrm (optional) long range matcher - $:scratchMem (optional) pointer to scratch memory - $:scratchSize (optional) size of scratch memory (see $OodleLZ_GetCompressScratchMemBound) - $:return size of compressed data written, or $OODLELZ_FAILED for failure - - Performs synchronous memory to memory LZ compression. - - In tools, you should generally use $OodleXLZ_Compress_AsyncAndWait instead to get parallelism. (in the Oodle2 Ext lib) - - You can compress a large buffer in several calls by setting _dictionaryBase_ to the start - of the buffer, and then making _rawBuf_ and _rawLen_ select portions of that buffer. As long - as _rawLen_ is a multiple of $OODLELZ_BLOCK_LEN , the compressed chunks can simply be - concatenated together. - - If _scratchMem_ is provided, it will be used for the compressor's scratch memory needs before OodleMalloc is - called. If the scratch is big enough, no malloc will be done. If the scratch is not big enough, the compress - will not fail, instead OodleMalloc will be used. OodleMalloc should not return null. There is currently no way - to make compress fail cleanly due to using too much memory, it must either succeed or abort the process. - - If _scratchSize_ is at least $OodleLZ_GetCompressScratchMemBound , additional allocations will not be needed. - - See $OodleLZ_About for tips on setting the compression options. - - If _dictionaryBase_ is provided, the backup distance from _rawBuf_ must be a multiple of $OODLELZ_BLOCK_LEN - - If $(OodleLZ_CompressOptions:seekChunkReset) is enabled, and _dictionaryBase_ is not NULL or _rawBuf_ , then the - seek chunk boundaries are relative to _dictionaryBase_, not to _rawBuf_. - -*/ - -// Decompress returns raw (decompressed) len received -// Decompress returns 0 (OODLELZ_FAILED) if it detects corruption -OOFUNC1 OO_SINTa OOFUNC2 OodleLZ_Decompress(const void * compBuf,OO_SINTa compBufSize,void * rawBuf,OO_SINTa rawLen, - OodleLZ_FuzzSafe fuzzSafe OODEFAULT(OodleLZ_FuzzSafe_Yes), - OodleLZ_CheckCRC checkCRC OODEFAULT(OodleLZ_CheckCRC_No), - OodleLZ_Verbosity verbosity OODEFAULT(OodleLZ_Verbosity_None), - void * decBufBase OODEFAULT(NULL), - OO_SINTa decBufSize OODEFAULT(0), - OodleDecompressCallback * fpCallback OODEFAULT(NULL), - void * callbackUserData OODEFAULT(NULL), - void * decoderMemory OODEFAULT(NULL), - OO_SINTa decoderMemorySize OODEFAULT(0), - OodleLZ_Decode_ThreadPhase threadPhase OODEFAULT(OodleLZ_Decode_Unthreaded) - ); -/* Decompress a some data from memory to memory, synchronously. - - $:compBuf pointer to compressed data - $:compBufSize number of compressed bytes available (must be greater or equal to the number consumed) - $:rawBuf pointer to output uncompressed data into - $:rawLen number of uncompressed bytes to output - $:fuzzSafe (optional) should the decode fail if it contains non-fuzz safe codecs? - $:checkCRC (optional) if data could be corrupted and you want to know about it, pass OodleLZ_CheckCRC_Yes - $:verbosity (optional) if not OodleLZ_Verbosity_None, logs some info - $:decBufBase (optional) if not NULL, provides preceding data to prime the dictionary; must be contiguous with rawBuf, the data between the pointers _dictionaryBase_ and _rawBuf_ is used as the preconditioning data. The exact same precondition must be passed to encoder and decoder. The decBufBase must be a reset point. - $:decBufSize (optional) size of decode buffer starting at decBufBase, if 0, _rawLen_ is assumed - $:fpCallback (optional) OodleDecompressCallback to call incrementally as decode proceeds - $:callbackUserData (optional) passed as userData to fpCallback - $:decoderMemory (optional) pre-allocated memory for the Decoder, of size _decoderMemorySize_ - $:decoderMemorySize (optional) size of the buffer at _decoderMemory_; must be at least $OodleLZDecoder_MemorySizeNeeded bytes to be used - $:threadPhase (optional) for threaded decode; see $OodleLZ_About_ThreadPhasedDecode (default OodleLZ_Decode_Unthreaded) - $:return the number of decompressed bytes output, $OODLELZ_FAILED (0) if none can be decompressed - - Decodes data encoded with any $OodleLZ_Compressor. - - Note : _rawLen_ must be the actual number of bytes to output, the same as the number that were encoded with the corresponding - OodleLZ_Compress size. You must store this somewhere in your own header and pass it in to this call. _compBufSize_ does NOT - need to be the exact number of compressed bytes, is the number of bytes available in the buffer, it must be greater or equal to - the actual compressed length. - - Note that the new compressors (Kraken,Mermaid,Selkie,BitKnit) are all fuzz safe and you can use OodleLZ_FuzzSafe_Yes - with them and no padding of the decode target buffer. - - If checkCRC is OodleLZ_CheckCRC_Yes, then corrupt data will be detected and the decode aborted. - If checkCRC is OodleLZ_CheckCRC_No, then corruption might result in invalid data, but no detection of any error (garbage in, garbage out). - - If corruption is possible, _fuzzSafe_ is No and _checkCRC_ is OodleLZ_CheckCRC_No, $OodleLZ_GetDecodeBufferSize must be used to allocate - _rawBuf_ large enough to prevent overrun. - - $OodleLZ_GetDecodeBufferSize should always be used to ensure _rawBuf_ is large enough, even when corruption is not - possible (when fuzzSafe is No). - - _compBuf_ and _rawBuf_ are allowed to overlap for "in place" decoding, but then _rawBuf_ must be allocated to - the size given by $OodleLZ_GetInPlaceDecodeBufferSize , and the compressed data must be at the end of that buffer. - - An easy way to take the next step to parallel decoding is with $OodleXLZ_Decompress_MakeSeekTable_Wide_Async (in the Oodle2 Ext lib) - - NOTE : the return value is the *total* number of decompressed bytes output so far. If rawBuf is > decBufBase, that means - the initial inset of (rawBuf - decBufBase) is included! (eg. you won't just get _rawLen_) - - If _decBufBase_ is provided, the backup distance from _rawBuf_ must be a multiple of $OODLELZ_BLOCK_LEN - - About fuzz safety: - - OodleLZ_Decompress is guaranteed not to crash even if the data is corrupted when _fuzzSafe_ is set to OodleLZ_FuzzSafe_Yes. - When _fuzzSafe_ is Yes, the target buffer (_rawBuf_ and _rawLen_) will never be overrun. Note that corrupted data might not - be detected (the return value might indicate success). - - Fuzz Safe decodes will not crash on corrupt data. They may or may not return failure, and produce garbage output. - - Fuzz safe decodes will not read out of bounds. They won't put data on the stack or previously in memory - into the output buffer. - - Fuzz safe decodes will not output more than the uncompressed size. (eg. the output buffer does not need to - be padded like OodleLZ_GetDecodeBufferSize) - - If you ask for a fuzz safe decode and the compressor doesn't satisfy OodleLZ_Compressor_CanDecodeFuzzSafe - then it will return failure. - - The _fuzzSafe_ argument should always be OodleLZ_FuzzSafe_Yes as of Oodle 2.9.0 ; older compressors did not - support fuzz safety but they now all do. - - Use of OodleLZ_FuzzSafe_No is deprecated. - -*/ - - -//------------------------------------------- -// Incremental Decoder functions : - -struct _OodleLZDecoder; -typedef struct _OodleLZDecoder OodleLZDecoder; -/* Opaque type for OodleLZDecoder - - See $OodleLZDecoder_Create -*/ - - -OOFUNC1 OodleLZDecoder * OOFUNC2 OodleLZDecoder_Create(OodleLZ_Compressor compressor,OO_S64 rawLen,void * memory, OO_SINTa memorySize); -/* Create a OodleLZDecoder - - $:compressor the type of data you will decode; use $OodleLZ_Compressor_Invalid if unknown - $:rawLen total raw bytes of the decode - $:memory (optional) provide memory for the OodleLZDecoder object (not the window) - $:memorySize (optional) if memory is provided, this is its size in bytes - $:return the OodleLZDecoder - - If memory is provided, it must be of size $OodleLZDecoder_MemorySizeNeeded. If it is NULL it will be - allocated with the malloc specified by $OodleAPI_OodleCore_Plugins. - - Free with $OodleLZDecoder_Destroy. You should Destroy even if you passed in the memory. - - Providing _compressor_ lets the OodleLZDecoder be the minimum size needed for that type of data. - If you pass $OodleLZ_Compressor_Invalid, then any type of data may be decoded, and the Decoder is allocated - large enought to handle any of them. - - If you are going to pass rawLen to OodleLZDecoder_Reset , then you can pass 0 to rawLen here. - - See $OodleLZDecoder_DecodeSome for more. -*/ - -OOFUNC1 OO_S32 OOFUNC2 OodleLZDecoder_MemorySizeNeeded(OodleLZ_Compressor compressor OODEFAULT(OodleLZ_Compressor_Invalid), OO_SINTa rawLen OODEFAULT(-1)); -/* If you want to provide the memory needed by $OodleLZDecoder_Create , this tells you how big it must be. - - $:compressor the type of data you will decode; use $OodleLZ_Compressor_Invalid if unknown - $:rawLen should almost always be -1, which supports any size of raw data decompression - $:return bytes to allocate or reserve, 0 for failure - - NOTE : using $OodleLZ_Compressor_Invalid lets you decode any time of compressed data. - It requests as much memory as the largest compressor. This may be a *lot* more than your data needs; - try to use the correct compressor type. - - If _rawLen_ is -1 (default) then the Decoder object created can be used on any length of raw data - decompression. If _rawLen_ is specified here, then you can only use it to decode data shorter than - the length you specified here. This use case is very rare, contact support for details. -*/ - -OOFUNC1 OO_S32 OOFUNC2 OodleLZ_ThreadPhased_BlockDecoderMemorySizeNeeded(void); -/* Returns the size of the decoder needed for ThreadPhased decode - - For use with $OodleLZ_Decode_ThreadPhase - See $OodleLZ_About_ThreadPhasedDecode -*/ - -OOFUNC1 void OOFUNC2 OodleLZDecoder_Destroy(OodleLZDecoder * decoder); -/* Pairs with $OodleLZDecoder_Create - - You should always call Destroy even if you provided the memory for $OodleLZDecoder_Create -*/ - -// Reset decoder - can reset to the start of any OODLELZ_BLOCK_LEN chunk -OOFUNC1 OO_BOOL OOFUNC2 OodleLZDecoder_Reset(OodleLZDecoder * decoder, OO_SINTa decPos, OO_SINTa decLen OODEFAULT(0)); -/* Reset an OodleLZDecoder to restart at given pos - - $:decoder the OodleLZDecoder, made by $OodleLZDecoder_Create - $:decPos position to reset to; must be a multiple of OODLELZ_BLOCK_LEN - $:decLen (optional) if not zero, change the length of the data we expect to decode - $:return true for success - - If you are seeking in a packed stream, you must seek to a seek chunk reset point, as was made at compress time. - - That is, $(OodleLZ_CompressOptions:seekChunkReset) must have been true, and - _decPos_ must be a multiple of $(OodleLZ_CompressOptions:seekChunkLen) that was used at compress time. - - You can use $OodleLZ_GetChunkCompressor to verify that you are at a valid - independent chunk start point. - -*/ - -// returns false if corruption detected -OOFUNC1 OO_BOOL OOFUNC2 OodleLZDecoder_DecodeSome( - OodleLZDecoder * decoder, - OodleLZ_DecodeSome_Out * out, - - // the decode sliding window : we output here & read from this for matches - void * decBuf, - OO_SINTa decBufPos, - OO_SINTa decBufferSize, // decBufferSize should be the result of OodleLZDecoder_MakeDecodeBufferSize() - OO_SINTa decBufAvail, // usually Size - Pos, but maybe less if you have pending IO flushes - - // compressed data : - const void * compPtr, - OO_SINTa compAvail, - - OodleLZ_FuzzSafe fuzzSafe OODEFAULT(OodleLZ_FuzzSafe_No), - OodleLZ_CheckCRC checkCRC OODEFAULT(OodleLZ_CheckCRC_No), - OodleLZ_Verbosity verbosity OODEFAULT(OodleLZ_Verbosity_None), - OodleLZ_Decode_ThreadPhase threadPhase OODEFAULT(OodleLZ_Decode_Unthreaded) - - ); -/* Incremental decode some LZ compressed data - - $:decoder the OodleLZDecoder, made by $OodleLZDecoder_Create - $:out filled with results - $:decBuf the decode buffer (window) - $:decBufPos the current position in the buffer - $:decBufferSize size of decBuf ; this must be either equal to the total decompressed size (_rawLen_ passed to $OodleLZDecoder_Create) or the result of $OodleLZDecoder_MakeValidCircularWindowSize - $:decBufAvail the number of bytes available after decBufPos in decBuf ; usually (decBufferSize - decBufPos), but can be less - $:compPtr pointer to compressed data to read - $:compAvail number of compressed bytes available at compPtr - $:fuzzSafe (optional) should the decode be fuzz safe - $:checkCRC (optional) if data could be corrupted and you want to know about it, pass OodleLZ_CheckCRC_Yes - $:verbosity (optional) if not OodleLZ_Verbosity_None, logs some info - $:threadPhase (optional) for threaded decode; see $OodleLZ_About_ThreadPhasedDecode (default OodleLZ_Decode_Unthreaded) - $:return true if success, false if invalid arguments or data is encountered - - Decodes data encoded with an OodleLZ compressor. - - Decodes an integer number of quanta; quanta are $OODLELZ_QUANTUM_LEN uncompressed bytes. - - _decBuf_ can either be a circular window or the whole _rawLen_ array. - In either case, _decBufPos_ should be in the range [0,_decBufferSize_). - If _decBuf_ is a circular window, then _decBufferSize_ should come from $OodleLZDecoder_MakeValidCircularWindowSize. - - (circular windows are deprecated as of 2.9.0) - - NOTE : all the new LZ codecs (Kraken, etc.) do not do circular windows. They can do sliding windows, see lz_test_11 in $example_lz. - They should always have decBufferSize = total raw size, even if the decode buffer is smaller than that. - - NOTE : insufficient data provided (with _compAvail_ > 0 but not enough to decode a quantum) is a *success* case - (return value of true), even though nothing is decoded. A return of false always indicates a non-recoverable error. - - If _decBufAvail_ or _compAvail_ is insufficient for any decompression, the "curQuantum" fields of $OodleLZ_DecodeSome_Out - will tell you how much you must provide to proceed. That is, if enough compressed bytes are provided to get a quantum header, but not enough to decode a quantum, this - function returns true and fills out the $OodleLZ_DecodeSome_Out structure with the size of the quantum. - - See $OodleLZ_Decompress about fuzz safety. - - NOTE : DecodeSome expect to decode either one full quantum (of len $OODLELZ_QUANTUM_LEN) or up to the length of the total buffer specified in the -call to $OodleLZDecoder_Create or $OodleLZDecoder_Reset. That total buffer length -must match what was use during compression (or be a seek-chunk portion thereof). -That is, you cannot decompress partial streams in intervals smaller than -$OODLELZ_QUANTUM_LEN except for the final partial quantum at the end of the stream. - -*/ - -// pass in how much you want to alloc and it will tell you a valid size as close that as possible -// the main use is just to call OodleLZDecoder_MakeDecodeBufferSize(0) to get the min size; the min size is a good size -OOFUNC1 OO_S32 OOFUNC2 OodleLZDecoder_MakeValidCircularWindowSize(OodleLZ_Compressor compressor,OO_S32 minWindowSize OODEFAULT(0)); -/* Get a valid "Window" size for an LZ - - $:compressor which compressor you will be decoding - $:minWindowSize (optional) minimum size of the window - - NOTE: circular windows are deprecated as of 2.9.0 - - Most common usage is OodleLZDecoder_MakeValidCircularWindowSize(0) to get the minimum window size. - - Only compressors which pass $OodleLZ_Compressor_CanDecodeInCircularWindow can be decoded in a circular window. - - WARNING : this is NOT the size to malloc the window! you need to call $OodleLZ_GetDecodeBufferSize() and - pass in the window size to get the malloc size. -*/ - -//======================================================= - -//======================================================= -// remember if you want to IO the SeekEntries you need to make them endian-independent -// see WriteOOZHeader for example - -#define OODLELZ_SEEKPOINTCOUNT_DEFAULT 16 - -OOFUNC1 OO_S32 OOFUNC2 OodleLZ_MakeSeekChunkLen(OO_S64 rawLen, OO_S32 desiredSeekPointCount); -/* Compute a valid seekChunkLen - - $:rawLen total length of uncompressed data - $:desiredSeekPointCount desired number of seek chunks - $:return a valid seekChunkLen for use in $OodleLZ_CreateSeekTable - - Returns a seekChunkLen which is close to (rawLen/desiredSeekPointCount) but is a power of two multiple of $OODLELZ_BLOCK_LEN - - _desiredSeekPointCount_ = 16 is good for parallel decompression. - (OODLELZ_SEEKPOINTCOUNT_DEFAULT) -*/ - -OOFUNC1 OO_S32 OOFUNC2 OodleLZ_GetNumSeekChunks(OO_S64 rawLen, OO_S32 seekChunkLen); -/* Compute the number of seek chunks - - $:rawLen total length of uncompressed data - $:seekChunkLen the length of a seek chunk (eg from $OodleLZ_MakeSeekChunkLen) - $:return the number of seek chunks - - returns (rawLen+seekChunkLen-1)/seekChunkLen -*/ - -OOFUNC1 OO_SINTa OOFUNC2 OodleLZ_GetSeekTableMemorySizeNeeded(OO_S32 numSeekChunks,OodleLZSeekTable_Flags flags); -/* Tells you the size in bytes to allocate the seekTable before calling $OodleLZ_FillSeekTable - - $:numSeekChunks number of seek chunks (eg from $OodleLZ_GetNumSeekChunks) - $:flags options that will be passed to $OodleLZ_CreateSeekTable - $:return size in bytes of memory needed for seek table - - If you wish to provide the memory for the seek table yourself, you may call this to get the required size, - allocate the memory, and then simply point a $OodleLZ_SeekTable at your memory. - Then use $OodleLZ_FillSeekTable to fill it out. - - Do NOT use sizeof(OodleLZ_SeekTable) ! -*/ - -OOFUNC1 OO_BOOL OOFUNC2 OodleLZ_FillSeekTable(OodleLZ_SeekTable * pTable,OodleLZSeekTable_Flags flags,OO_S32 seekChunkLen,const void * rawBuf, OO_SINTa rawLen,const void * compBuf,OO_SINTa compLen); -/* scan compressed LZ stream to fill the seek table - - $:pTable pointer to table to be filled - $:flags options - $:seekChunkLen the length of a seek chunk (eg from $OodleLZ_MakeSeekChunkLen) - $:rawBuf (optional) uncompressed buffer; used to compute the _rawCRCs_ member of $OodleLZ_SeekTable - $:rawLen size of rawBuf - $:compBuf compressed buffer - $:compLen size of compBuf - $:return true for success - - _pTable_ must be able to hold at least $OodleLZ_GetSeekTableMemorySizeNeeded - - _seekChunkLen_ must be a multiple of $OODLELZ_BLOCK_LEN. - _seekChunkLen_ must match what was in CompressOptions when the buffer was made, or any integer multiple thereof. -*/ - - -OOFUNC1 OodleLZ_SeekTable * OOFUNC2 OodleLZ_CreateSeekTable(OodleLZSeekTable_Flags flags,OO_S32 seekChunkLen,const void * rawBuf, OO_SINTa rawLen,const void * compBuf,OO_SINTa compLen); -/* allocate a table, then scan compressed LZ stream to fill the seek table - - $:flags options - $:seekChunkLen the length of a seek chunk (eg from $OodleLZ_MakeSeekChunkLen) - $:rawBuf (optional) uncompressed buffer; used to compute the _rawCRCs_ member of $OodleLZ_SeekTable - $:rawLen size of rawBuf - $:compBuf compressed buffer - $:compLen size of compBuf - $:return pointer to table if succeeded, null if failed - - Same as $OodleLZ_FillSeekTable , but allocates the memory for you. Use $OodleLZ_FreeSeekTable to free. - - _seekChunkLen_ must be a multiple of $OODLELZ_BLOCK_LEN. - _seekChunkLen_ must match what was in CompressOptions when the buffer was made, or any integer multiple thereof. - -*/ - -OOFUNC1 void OOFUNC2 OodleLZ_FreeSeekTable(OodleLZ_SeekTable * pTable); -/* Frees a table allocated by $OodleLZ_CreateSeekTable -*/ - -OOFUNC1 OO_BOOL OOFUNC2 OodleLZ_CheckSeekTableCRCs(const void * rawBuf,OO_SINTa rawLen, const OodleLZ_SeekTable * seekTable); -/* Check the CRC's in seekTable vs rawBuf - - $:rawBuf uncompressed buffer - $:rawLen size of rawBuf - $:seekTable result of $OodleLZ_CreateSeekTable - $:return true if the CRC's check out - - Note that $OodleLZ_Decompress option of $OodleLZ_CheckCRC checks the CRC of *compressed* data, - this call checks the CRC of the *raw* (uncompressed) data. - - OodleLZ data contains a CRC of the compressed data if it was made with $(OodleLZ_CompressOptions:sendQuantumCRCs). - The SeekTable contains a CRC of the raw data if it was made with $OodleLZSeekTable_Flags_MakeRawCRCs. - - Checking the CRC of compressed data is faster, but does not verify that the decompress succeeded. -*/ - -OOFUNC1 OO_S32 OOFUNC2 OodleLZ_FindSeekEntry( OO_S64 rawPos, const OodleLZ_SeekTable * seekTable); -/* Find the seek entry that contains a raw position - - $:rawPos uncompressed position to look for - $:seekTable result of $OodleLZ_CreateSeekTable - $:return a seek entry index - - returns the index of the chunk that contains _rawPos_ -*/ - -OOFUNC1 OO_S64 OOFUNC2 OodleLZ_GetSeekEntryPackedPos( OO_S32 seekI , const OodleLZ_SeekTable * seekTable ); -/* Get the compressed position of a seek entry - - $:seekI seek entry index , in [0,numSeekEntries) - $:seekTable result of $OodleLZ_CreateSeekTable - $:return compressed buffer position of the start of this seek entry - - -*/ - -//============================================================= - -OOFUNC1 const char * OOFUNC2 OodleLZ_CompressionLevel_GetName(OodleLZ_CompressionLevel compressSelect); -/* Provides a string naming a $OodleLZ_CompressionLevel compressSelect -*/ - -OOFUNC1 const char * OOFUNC2 OodleLZ_Compressor_GetName(OodleLZ_Compressor compressor); -/* Provides a string naming a $OodleLZ_Compressor compressor -*/ - -OOFUNC1 const char * OOFUNC2 OodleLZ_Jobify_GetName(OodleLZ_Jobify jobify); -/* Provides a string naming a $OodleLZ_Jobify enum -*/ - -OOFUNC1 const OodleLZ_CompressOptions * OOFUNC2 OodleLZ_CompressOptions_GetDefault( - OodleLZ_Compressor compressor OODEFAULT(OodleLZ_Compressor_Invalid), - OodleLZ_CompressionLevel lzLevel OODEFAULT(OodleLZ_CompressionLevel_Normal)); -/* Provides a pointer to default compression options - - $:compressor deprecated, ignored - $:lzLevel deprecated, ignored - - Use to fill your own $OodleLZ_CompressOptions then change individual fields. - -*/ - -// after you fiddle with options, call this to ensure they are allowed -OOFUNC1 void OOFUNC2 OodleLZ_CompressOptions_Validate(OodleLZ_CompressOptions * pOptions); -/* Clamps the values in _pOptions_ to be in valid range - -*/ - -// inline functions for compressor property queries -OODEFSTART - -OO_BOOL OodleLZ_Compressor_UsesWholeBlockQuantum(OodleLZ_Compressor compressor); -/* OodleLZ_Compressor properties helper. - - Tells you if this compressor is "whole block quantum" ; must decode in steps of - $OODLELZ_BLOCK_LEN , not $OODLELZ_QUANTUM_LEN like others. -*/ -OO_BOOL OodleLZ_Compressor_UsesLargeWindow(OodleLZ_Compressor compressor); -/* OodleLZ_Compressor properties helper. - - Tells you if this compressor is "LargeWindow" or not, meaning it can benefit from - a Long-Range-Matcher and windows larger than $OODLELZ_BLOCK_LEN -*/ -OO_BOOL OodleLZ_Compressor_CanDecodeInCircularWindow(OodleLZ_Compressor compressor); -/* OodleLZ_Compressor properties helper. - - Tells you if this compressor can be decoded using a fixed size circular window. - deprecated as of 2.9.0 -*/ -OO_BOOL OodleLZ_Compressor_CanDecodeThreadPhased(OodleLZ_Compressor compressor); -/* OodleLZ_Compressor properties helper. - - Tells you if this compressor can be used with the $OodleLZ_Decode_ThreadPhase. - - See $OodleLZ_About_ThreadPhasedDecode -*/ -OO_BOOL OodleLZ_Compressor_CanDecodeInPlace(OodleLZ_Compressor compressor); -/* OodleLZ_Compressor properties helper. - - Tells you if this compressor can be used with "in-place" decoding. - - This is now always true (all compressors support in-place decoding). The function is left - for backward compatibility. - - All compressors in the future will support in-place, you don't need to check this property. - -*/ -OO_BOOL OodleLZ_Compressor_MustDecodeWithoutResets(OodleLZ_Compressor compressor); -/* OodleLZ_Compressor properties helper. - - Tells you if this compressor must decode contiguous ranges of buffer with the same Decoder. - - That is, most of the compressors can be Reset and restart on any block, not just seek blocks, - as long as the correct window data is provided. That is, if this returns false then the only - state required across a non-reset block is the dictionary of previously decoded data. - - But if OodleLZ_Compressor_MustDecodeWithoutResets returns true, then you cannot do that, - because the Decoder object must carry state across blocks (except reset blocks). - - This does not apply to seek points - you can always reset and restart decompression at a seek point. -*/ -OO_BOOL OodleLZ_Compressor_CanDecodeFuzzSafe(OodleLZ_Compressor compressor); -/* OodleLZ_Compressor properties helper. - - Tells you if this compressor is "fuzz safe" which means it can accept corrupted data - and won't crash or overrun any buffers. -*/ - -OO_BOOL OodleLZ_Compressor_RespectsDictionarySize(OodleLZ_Compressor compressor); -/* OodleLZ_Compressor properties helper. - - Tells you if this compressor obeys $(OodleLZ_CompressOptions:dictionarySize) which limits - match references to a finite bound. (eg. for sliding window decompression). - - All the new codecs do (Kraken,Mermaid,Selkie,Leviathan). Some old codecs don't. -*/ -//===================================================================== - -#define OODLELZ_COMPRESSOR_MASK(c) (((OO_U32)1)<<((OO_S32)(c))) -// OODLELZ_COMPRESSOR_BOOLBIT : extract a value of 1 or 0 so it maps to "bool" -#define OODLELZ_COMPRESSOR_BOOLBIT(s,c) (((s)>>(OO_S32)(c))&1) - -OOINLINEFUNC OO_BOOL OodleLZ_Compressor_IsNewLZFamily(OodleLZ_Compressor compressor) -{ - const OO_U32 set = - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Kraken) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Leviathan) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Mermaid) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Selkie) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Hydra); - return OODLELZ_COMPRESSOR_BOOLBIT(set,compressor); -} - -OOINLINEFUNC OO_BOOL OodleLZ_Compressor_CanDecodeFuzzSafe(OodleLZ_Compressor compressor) -{ - #ifdef OODLE_ALLOW_DEPRECATED_COMPRESSORS - const OO_U32 set = - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_None) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Kraken) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Leviathan) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Mermaid) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Selkie) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Hydra) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_BitKnit) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_LZB16); - return OODLELZ_COMPRESSOR_BOOLBIT(set,compressor); - #else - // all new compressors are fuzz safe - return compressor != OodleLZ_Compressor_Invalid; - #endif -} - -OOINLINEFUNC OO_BOOL OodleLZ_Compressor_RespectsDictionarySize(OodleLZ_Compressor compressor) -{ - #ifdef OODLE_ALLOW_DEPRECATED_COMPRESSORS - const OO_U32 set = - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_None) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Kraken) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Leviathan) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Mermaid) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Selkie) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Hydra) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_LZNA) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_BitKnit); - return OODLELZ_COMPRESSOR_BOOLBIT(set,compressor); - #else - // all new compressors respect dictionarySize - return compressor != OodleLZ_Compressor_Invalid; - #endif -} - -OOINLINEFUNC OO_BOOL OodleLZ_Compressor_UsesWholeBlockQuantum(OodleLZ_Compressor compressor) -{ - return OodleLZ_Compressor_IsNewLZFamily(compressor); -} - -OOINLINEFUNC OO_BOOL OodleLZ_Compressor_CanDecodeThreadPhased(OodleLZ_Compressor compressor) -{ - return OodleLZ_Compressor_IsNewLZFamily(compressor); -} - -OOINLINEFUNC OO_BOOL OodleLZ_Compressor_CanDecodeInPlace(OodleLZ_Compressor compressor) -{ - // all compressors can now decode in place : - return compressor != OodleLZ_Compressor_Invalid; -} - -OOINLINEFUNC OO_BOOL OodleLZ_Compressor_CanDecodeInCircularWindow(OodleLZ_Compressor compressor) -{ - #ifdef OODLE_ALLOW_DEPRECATED_COMPRESSORS - const OO_U32 set = - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_LZH) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_LZB16); - #else - const OO_U32 set = 0; - #endif - - return OODLELZ_COMPRESSOR_BOOLBIT(set,compressor); -} - -OOINLINEFUNC OO_BOOL OodleLZ_Compressor_UsesLargeWindow(OodleLZ_Compressor compressor) -{ - // all but LZH and LZB16 now are large window - return ! OodleLZ_Compressor_CanDecodeInCircularWindow(compressor); -} - -OOINLINEFUNC OO_BOOL OodleLZ_Compressor_MustDecodeWithoutResets(OodleLZ_Compressor compressor) -{ - #ifdef OODLE_ALLOW_DEPRECATED_COMPRESSORS - const OO_U32 set = - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_BitKnit) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_LZA) | - OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_LZNA); - #else - const OO_U32 set = 0; - #endif - - return OODLELZ_COMPRESSOR_BOOLBIT(set,compressor); -} - -OODEFEND - -//======================================================= - - -#define OODLELZ_SCRATCH_MEM_NO_BOUND (-1) /* Scratch mem size when bound is unknown. - Installed allocator may be used no matter how much scratch mem you provide. -*/ - -OOFUNC1 OO_SINTa OOFUNC2 OodleLZ_GetCompressScratchMemBound( - OodleLZ_Compressor compressor, - OodleLZ_CompressionLevel level, - OO_SINTa rawLen, - const OodleLZ_CompressOptions * pOptions OODEFAULT(NULL) - ); -/* Return the maximum amount of scratch mem that will be needed by OodleLZ_Compress - - $:compressor which OodleLZ variant to use in compression - $:level OodleLZ_CompressionLevel controls how much CPU effort is put into maximizing compression - $:rawLen maximum number of bytes you will compress (plus dictionary backup) - $:pOptions (optional) options; if NULL, $OodleLZ_CompressOptions_GetDefault is used - - If you pass scratch mem to $OodleLZ_Compress of this size, it is gauranteed to do no allocations. - (normally if it runs out of scratch mem, it falls back to the installed allocator) - - For _rawLen_ pass at least the maximum size you will ever encode. If your data is divided into chunks, - pass the chunk size. If you will encode full buffers of unbounded size, pass -1. - - The options must be the same as when you call $OodleLZ_Compress - - Some options and levels may not have simple finite bounds. Then $OODLELZ_SCRATCH_MEM_NO_BOUND is returned - and the call to $OodleLZ_Compress may use the allocator even if infinite scratch memory is provided. - Currently this applies to all the Optimal levels. - - When OODLELZ_SCRATCH_MEM_NO_BOUND is returned, you can still pass in scratch mem which will be used before - going to the plugin allocator. - -*/ - -// get maximum expanded size for compBuf alloc : -// (note this is actually larger than the maximum compressed stream, it includes trash padding) -OOFUNC1 OO_SINTa OOFUNC2 OodleLZ_GetCompressedBufferSizeNeeded(OodleLZ_Compressor compressor,OO_SINTa rawSize); -/* Return the size you must malloc the compressed buffer - - $:compressor compressor used; OodleLZ_Compressor_Invalid to make it enough for any compressor - $:rawSize uncompressed size you will compress into this buffer - - The _compBuf_ passed to $OodleLZ_Compress must be allocated at least this big. - - note this is actually larger than the maximum size of a compressed stream, it includes overrun padding. - -*/ - -// decBuf needs to be a little larger than rawLen, -// this will tell you exactly how much : -OOFUNC1 OO_SINTa OOFUNC2 OodleLZ_GetDecodeBufferSize(OodleLZ_Compressor compressor,OO_SINTa rawSize,OO_BOOL corruptionPossible); -/* Get the size you must malloc the decode (raw) buffer - - $:compressor compressor used; OodleLZ_Compressor_Invalid to make it enough for any compressor - $:rawSize uncompressed (raw) size without padding - $:corruptionPossible true if it is possible for the decoder to get corrupted data - $:return size of buffer to malloc; slightly larger than rawSize if padding is needed - - As of Oodle 2.9.0 this function is deprecated. For all new codecs you can just use the size of the - uncompressed data for the decode buffer size (_rawSize_), no padding is needed. - - Note that LZB16 is still supported in 2.9.0 but does require padding when used in a circular - window (which is deprecated). - - This padding is necessary for the older compressors when FuzzSafe_No is used. The old compressors - and FuzzSafe_No are no longer supported. - - If _corruptionPossible_ is true, a slightly larger buffer size is returned. - - If _corruptionPossible_ is false, then you must ensure that the decoder does not get corrupted data, - either by passing $OodleLZ_CheckCRC_Yes , or by your own mechanism. - - Note about possible overrun in LZ decoding (applies to the old non-fuzz-safe compressors) : - as long as the compresseddata is not corrupted, - and you decode either the entire compressed buffer, or an integer number of "seek chunks" ($OODLELZ_BLOCK_LEN), - then there will be no overrun. So you can decode LZ data in place and it won't stomp any following bytes. - If those conditions are not true (eg. decoding only part of a larger compressed stream, decoding - around a circular window, decoding data that may be corrupted), then there may be some limited amount of - overrun on decode, as returned by $OodleLZ_GetDecodeBufferSize. - - -*/ - -// OodleLZ_GetInPlaceDecodeBufferSize : -// after compressing, ask how big the in-place buffer needs to be -OOFUNC1 OO_SINTa OOFUNC2 OodleLZ_GetInPlaceDecodeBufferSize(OodleLZ_Compressor compressor,OO_SINTa compLen, OO_SINTa rawLen); -/* Get the size of buffer needed for "in place" decode - - $:compressor compressor used; OodleLZ_Compressor_Invalid to make it enough for any compressor - $:compLen compressed data length - $:rawLen decompressed data length - $:return size of buffer needed for "in place" decode ; slighly larger than rawLen - - To do an "in place" decode, allocate a buffer of this size (or larger). Read the compressed data into the end of - the buffer, and decompress to the front of the buffer. The size returned here guarantees that the writes to the - front of the buffer don't conflict with the reads from the end. - - If _compressor_ is one of the new codecs (Kraken,Mermaid,Selkie,Leviathan), the padding for in place decodes can be - very small indeed. It is assumed you will be passing FuzzSafe_Yes to the decompress call. - - If _compLen_ is unknown, you want an in place buffer size that can accomodate any compressed data, then - pass compLen = 0. - - See $OodleLZ_Decompress for more. -*/ - -// GetCompressedStepForRawStep is at OODLELZ_QUANTUM_LEN granularity -// returns how many packed bytes to step to get the desired raw count step -OOFUNC1 OO_SINTa OOFUNC2 OodleLZ_GetCompressedStepForRawStep( - const void * compPtr, OO_SINTa compAvail, - OO_SINTa startRawPos, OO_SINTa rawSeekBytes, - OO_SINTa * pEndRawPos OODEFAULT(NULL), - OO_BOOL * pIndependent OODEFAULT(NULL) ); -/* How many bytes to step a compressed pointer to advance a certain uncompressed amount - - $:compPtr current compressed pointer - $:compAvail compressed bytes available at compPtr - $:startRawPos initial raw pos (corresponding to compPtr) - $:rawSeekBytes the desired step in raw bytes, must be a multiple of $OODLELZ_QUANTUM_LEN or $OODLELZ_BLOCK_LEN - $:pEndRawPos (optional) filled with the end raw pos actually reached - $:pIndependent (optional) filled with a bool that is true if the current chunk is independent from previous - $:return the number of compressed bytes to step - - You should try to use GetCompressedStepForRawStep only at block granularity - both _startRawPos_ and - _rawSeekBytes_ should be multiples of OODLELZ_BLOCK_LEN (except at the end of the stream). As long as you - do that, then *pEndRawPos will = startRawPos + rawSeekBytes. - - You can use it at quantum granularity (OODLELZ_QUANTUM_LEN), but there are some caveats. You cannot step - quanta inside uncompressed blocks, only in normal LZ blocks. If you try to seek quanta inside an uncompressed - block, you will get *pEndRawPos = the end of the block. - - You can only resume seeking from *pEndRawPos . - - returns 0 for valid not-enough-data case - returns -1 for error - - If _compAvail_ is not the whole compressed buffer, then the returned step may be less than the amount you requested. - eg. if the compressed data in _compAvail_ does not contain enough data to make a step of _rawSeekBytes_ a smaller - step will be taken. - NOTE : *can* return comp step > comp avail ! - - -*/ - -OOFUNC1 OodleLZ_Compressor OOFUNC2 OodleLZ_GetAllChunksCompressor(const void * compBuf,OO_SINTa compBufSize, - OO_SINTa rawLen); -/* ask who compressed all chunks in this buf chunk - - $:compBuf pointer to compressed data; must be the start of compressed buffer, or a step of $OODLELZ_BLOCK_LEN raw bytes - $:compBufSize size of _compBuf_ - $:rawLen rawlen of data in _compBuf_ - $:return the $OodleLZ_Compressor used to encode this chunk - - returns a simple compressor (for example OodleLZ_Compressor_Kraken) if that was used on all chunks - - returns OodleLZ_Compressor_Hydra if different NewLZ encoders were used (for example Kraken+Mermaid) - - returns OodleLZ_Compressor_Count if a heterogenous mix of compressors was used (not just NewLZ) - - returns OodleLZ_Compressor_Invalid on error - - note this is only for this chunk - later chunks may have different compressors (eg. with Hydra) - if you compressed all chunks the same it's up to you to store that info in your header - - returns OodleLZ_Compressor_Invalid if _compBufSize_ is too small or any chunk is corrupt -*/ - -OOFUNC1 OodleLZ_Compressor OOFUNC2 OodleLZ_GetFirstChunkCompressor(const void * compChunkPtr, - OO_SINTa compBufAvail, - OO_BOOL * pIndependent); -/* ask who compressed this chunk - - $:compChunkPtr pointer to compressed data; must be the start of compressed buffer, or a step of $OODLELZ_BLOCK_LEN raw bytes - $:compBufAvail number of bytes at _compChunkPtr_ available to read - $:pIndependent (optional) filled with a bool for whether this chunk is independent of predecessors - $:return the $OodleLZ_Compressor used to encode this chunk - - note this is only for this chunk - later chunks may have different compressors (eg. with Hydra) - if you compressed all chunks the same it's up to you to store that info in your header - - Use $OodleLZ_GetAllChunksCompressor for data that might be mixed compressors. - - This replaces the deprecated function $OodleLZ_GetChunkCompressor - - returns OodleLZ_Compressor_Invalid if _compBufAvail_ is too small or the chunk is corrupt -*/ - -OOFUNC1 OodleLZ_Compressor OOFUNC2 OodleLZ_GetChunkCompressor(const void * compChunkPtr, - OO_SINTa compBufAvail, - OO_BOOL * pIndependent); -/* Deprecated entry point for backwards compatibility - - Use $OodleLZ_GetFirstChunkCompressor or $OodleLZ_GetAllChunksCompressor - -*/ - -//======================================================= - -#define OODLE_HEADER_VERSION ((46<<24)|(OODLE2_VERSION_MAJOR<<16)|(OODLE2_VERSION_MINOR<<8)|(OO_U32)sizeof(OodleLZ_SeekTable)) /* OODLE_HEADER_VERSION is used to ensure the Oodle header matches the lib. Don't copy the value of this macro, it will change when - the header is rev'ed. - - This is what you pass to $OodleX_Init or $Oodle_CheckVersion -*/ - -OOFUNC1 OO_BOOL OOFUNC2 Oodle_CheckVersion(OO_U32 oodle_header_version, OO_U32 * pOodleLibVersion OODEFAULT(NULL)); -/* Check the Oodle lib version against the header you are compiling with - - $:oodle_header_version pass $OODLE_HEADER_VERSION here - $:pOodleLibVersion (optional) filled with the Oodle lib version - $:return false if $OODLE_HEADER_VERSION is not compatible with this lib - - If you use the Oodle2 Ext lib,, $OodleX_Init does it for you. But if you want to check that you have a - compatible lib before trying to Init, then use this. -*/ - -OOFUNC1 void OOFUNC2 Oodle_LogHeader(void); -/* Log the Oodle version & copyright - - Uses the log set with $OodleCore_Plugins_SetPrintf -*/ - -// define old names so they still compile : -#define OODLECORE_PLUGIN_JOB_MAX_DEPENDENCIES OODLE_JOB_MAX_DEPENDENCIES -#define t_fp_OodleCore_Plugin_Job t_fp_Oodle_Job - -#ifdef _MSC_VER -#pragma warning(pop) -#pragma pack(pop, Oodle) -#endif - -#endif // __OODLE2_H_INCLUDED__ diff --git a/lib/oodle/oodle2base.h b/lib/oodle/oodle2base.h deleted file mode 100644 index 05f73f3..0000000 --- a/lib/oodle/oodle2base.h +++ /dev/null @@ -1,167 +0,0 @@ - -//=================================================== -// Oodle2 Base header -// (C) Copyright 1994-2021 Epic Games Tools LLC -//=================================================== - -#ifndef __OODLE2BASE_H_INCLUDED__ -#define __OODLE2BASE_H_INCLUDED__ - -#ifndef OODLE2BASE_PUBLIC_HEADER -#define OODLE2BASE_PUBLIC_HEADER 1 -#endif - -#ifdef _MSC_VER -#pragma pack(push, Oodle, 8) - -#pragma warning(push) -#pragma warning(disable : 4127) // conditional is constant -#endif - -#ifndef OODLE_BASE_TYPES_H -#define OODLE_BASE_TYPES_H - -#include - -#define OOCOPYRIGHT "Copyright (C) 1994-2021, Epic Games Tools LLC" - -// Typedefs -typedef int8_t OO_S8; -typedef uint8_t OO_U8; -typedef int16_t OO_S16; -typedef uint16_t OO_U16; -typedef int32_t OO_S32; -typedef uint32_t OO_U32; -typedef int64_t OO_S64; -typedef uint64_t OO_U64; -typedef float OO_F32; -typedef double OO_F64; -typedef intptr_t OO_SINTa; -typedef uintptr_t OO_UINTa; -typedef int32_t OO_BOOL; - -// Struct packing handling and inlining -#if defined(__GNUC__) || defined(__clang__) - #define OOSTRUCT struct __attribute__((__packed__)) - #define OOINLINEFUNC inline -#elif defined(_MSC_VER) - // on VC++, we use pragmas for the struct packing - #define OOSTRUCT struct - #define OOINLINEFUNC __inline -#endif - -// Linkage stuff -#if defined(_WIN32) - #define OOLINK __stdcall - #define OOEXPLINK __stdcall -#else - #define OOLINK - #define OOEXPLINK -#endif - -// C++ name demangaling -#ifdef __cplusplus - #define OODEFFUNC extern "C" - #define OODEFSTART extern "C" { - #define OODEFEND } - #define OODEFAULT( val ) =val -#else - #define OODEFFUNC - #define OODEFSTART - #define OODEFEND - #define OODEFAULT( val ) -#endif - -// ======================================================== -// Exported function declarations -#define OOEXPFUNC OODEFFUNC - -//=========================================================================== -// OO_STRING_JOIN joins strings in the preprocessor and works with LINESTRING -#define OO_STRING_JOIN(arg1, arg2) OO_STRING_JOIN_DELAY(arg1, arg2) -#define OO_STRING_JOIN_DELAY(arg1, arg2) OO_STRING_JOIN_IMMEDIATE(arg1, arg2) -#define OO_STRING_JOIN_IMMEDIATE(arg1, arg2) arg1 ## arg2 - -//=========================================================================== -// OO_NUMBERNAME is a macro to make a name unique, so that you can use it to declare -// variable names and they won't conflict with each other -// using __LINE__ is broken in MSVC with /ZI , but __COUNTER__ is an MSVC extension that works - -#ifdef _MSC_VER - #define OO_NUMBERNAME(name) OO_STRING_JOIN(name,__COUNTER__) -#else - #define OO_NUMBERNAME(name) OO_STRING_JOIN(name,__LINE__) -#endif - -//=================================================================== -// simple compiler assert -// this happens at declaration time, so if it's inside a function in a C file, drop {} around it -#ifndef OO_COMPILER_ASSERT - #if defined(__clang__) - #define OO_COMPILER_ASSERT_UNUSED __attribute__((unused)) // hides warnings when compiler_asserts are in a local scope - #else - #define OO_COMPILER_ASSERT_UNUSED - #endif - - #define OO_COMPILER_ASSERT(exp) typedef char OO_NUMBERNAME(_dummy_array) [ (exp) ? 1 : -1 ] OO_COMPILER_ASSERT_UNUSED -#endif - - -#endif - - - -// Oodle2 base header - -#ifndef OODLE2_PUBLIC_CORE_DEFINES -#define OODLE2_PUBLIC_CORE_DEFINES 1 - -#define OOFUNC1 OOEXPFUNC -#define OOFUNC2 OOEXPLINK -#define OOFUNCSTART -#define OODLE_CALLBACK OOLINK - -// Check build flags - #if defined(OODLE_BUILDING_LIB) || defined(OODLE_BUILDING_DLL) - #error Should not see OODLE_BUILDING set for users of oodle.h - #endif - -#ifndef NULL -#define NULL (0) -#endif - -// OODLE_MALLOC_MINIMUM_ALIGNMENT is 8 in 32-bit, 16 in 64-bit -#define OODLE_MALLOC_MINIMUM_ALIGNMENT ((OO_SINTa)(2*sizeof(void *))) - -typedef void (OODLE_CALLBACK t_OodleFPVoidVoid)(void); -/* void-void callback func pointer - takes void, returns void -*/ - -typedef void (OODLE_CALLBACK t_OodleFPVoidVoidStar)(void *); -/* void-void-star callback func pointer - takes void pointer, returns void -*/ - -#define OODLE_JOB_MAX_DEPENDENCIES (4) /* Maximum number of dependencies Oodle will ever pass to a RunJob callback -*/ - -#define OODLE_JOB_NULL_HANDLE (0) /* Value 0 of Jobify handles is reserved to mean none -* Wait(OODLE_JOB_NULL_HANDLE) is a nop -* if RunJob returns OODLE_JOB_NULL_HANDLE it means the job -* was run synchronously and no wait is required -*/ - -#define t_fp_Oodle_Job t_OodleFPVoidVoidStar /* Job function pointer for Plugin Jobify system - - takes void pointer returns void -*/ - -#endif // OODLE2_PUBLIC_CORE_DEFINES - -#ifdef _MSC_VER -#pragma warning(pop) -#pragma pack(pop, Oodle) -#endif - -#endif // __OODLE2BASE_H_INCLUDED__ diff --git a/lib/oodle/src/lib.rs b/lib/oodle/src/lib.rs deleted file mode 100644 index 871daab..0000000 --- a/lib/oodle/src/lib.rs +++ /dev/null @@ -1,146 +0,0 @@ -#![allow(non_upper_case_globals)] -#![allow(non_camel_case_types)] -#![allow(non_snake_case)] - -use std::ptr; - -use color_eyre::{eyre, Result}; - -#[allow(dead_code)] -#[allow(clippy::identity_op)] -mod bindings { - include!(concat!(env!("OUT_DIR"), "/bindings.rs")); -} - -// Hardcoded chunk size of Bitsquid's bundle compression -pub const CHUNK_SIZE: usize = 512 * 1024; -pub const COMPRESSOR: bindings::OodleLZ_Compressor = - bindings::OodleLZ_Compressor_OodleLZ_Compressor_Kraken; -pub const LEVEL: bindings::OodleLZ_CompressionLevel = - bindings::OodleLZ_CompressionLevel_OodleLZ_CompressionLevel_Optimal2; - -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum OodleLZ_FuzzSafe { - Yes, - No, -} - -impl From for bindings::OodleLZ_FuzzSafe { - fn from(value: OodleLZ_FuzzSafe) -> Self { - match value { - OodleLZ_FuzzSafe::Yes => bindings::OodleLZ_FuzzSafe_OodleLZ_FuzzSafe_Yes, - OodleLZ_FuzzSafe::No => bindings::OodleLZ_FuzzSafe_OodleLZ_FuzzSafe_No, - } - } -} - -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum OodleLZ_CheckCRC { - Yes, - No, -} - -impl From for bindings::OodleLZ_CheckCRC { - fn from(value: OodleLZ_CheckCRC) -> Self { - match value { - OodleLZ_CheckCRC::Yes => bindings::OodleLZ_CheckCRC_OodleLZ_CheckCRC_Yes, - OodleLZ_CheckCRC::No => bindings::OodleLZ_CheckCRC_OodleLZ_CheckCRC_No, - } - } -} - -#[tracing::instrument(skip(data))] -pub fn decompress( - data: I, - fuzz_safe: OodleLZ_FuzzSafe, - check_crc: OodleLZ_CheckCRC, -) -> Result> -where - I: AsRef<[u8]>, -{ - let data = data.as_ref(); - let mut out = vec![0; CHUNK_SIZE]; - - let verbosity = if tracing::enabled!(tracing::Level::INFO) { - bindings::OodleLZ_Verbosity_OodleLZ_Verbosity_Minimal - } else if tracing::enabled!(tracing::Level::DEBUG) { - bindings::OodleLZ_Verbosity_OodleLZ_Verbosity_Some - } else if tracing::enabled!(tracing::Level::TRACE) { - bindings::OodleLZ_Verbosity_OodleLZ_Verbosity_Lots - } else { - bindings::OodleLZ_Verbosity_OodleLZ_Verbosity_None - }; - - let ret = unsafe { - bindings::OodleLZ_Decompress( - data.as_ptr() as *const _, - data.len() as isize, - out.as_mut_ptr() as *mut _, - out.len() as isize, - fuzz_safe.into(), - check_crc.into(), - verbosity, - ptr::null_mut(), - 0, - None, - ptr::null_mut(), - ptr::null_mut(), - 0, - bindings::OodleLZ_Decode_ThreadPhase_OodleLZ_Decode_Unthreaded, - ) - }; - - if ret == 0 { - eyre::bail!("Decompression failed"); - } - - Ok(out) -} - -#[tracing::instrument(skip(data))] -pub fn compress(data: I) -> Result> -where - I: AsRef<[u8]>, -{ - let mut raw = Vec::from(data.as_ref()); - raw.resize(CHUNK_SIZE, 0); - - // TODO: Query oodle for buffer size - let mut out = vec![0u8; CHUNK_SIZE]; - - let ret = unsafe { - bindings::OodleLZ_Compress( - COMPRESSOR, - raw.as_ptr() as *const _, - raw.len() as isize, - out.as_mut_ptr() as *mut _, - LEVEL, - ptr::null_mut(), - ptr::null_mut(), - ptr::null_mut(), - ptr::null_mut(), - 0, - ) - }; - - tracing::debug!(compressed_size = ret, "Compressed chunk"); - - if ret == 0 { - eyre::bail!("Compression failed"); - } - - out.resize(ret as usize, 0); - - Ok(out) -} - -pub fn get_decode_buffer_size(raw_size: usize, corruption_possible: bool) -> Result { - let size = unsafe { - bindings::OodleLZ_GetDecodeBufferSize( - COMPRESSOR, - raw_size as isize, - if corruption_possible { 1 } else { 0 }, - ) - }; - Ok(size as usize) -} diff --git a/lib/sdk/Cargo.toml b/lib/sdk/Cargo.toml index 4667a1c..d1bed0e 100644 --- a/lib/sdk/Cargo.toml +++ b/lib/sdk/Cargo.toml @@ -1,26 +1,22 @@ [package] name = "sdk" -version = "0.3.0" +version = "0.2.0" edition = "2021" [dependencies] -async-recursion = { workspace = true } -bitflags = { workspace = true } -byteorder = { workspace = true } -color-eyre = { workspace = true } -csv-async = { workspace = true } -fastrand = { workspace = true } -futures = { workspace = true } -futures-util = { workspace = true } -glob = { workspace = true } -luajit2-sys = { workspace = true } -nanorand = { workspace = true } -oodle = { workspace = true } -path-slash = { workspace = true } -pin-project-lite = { workspace = true } -serde = { workspace = true } -serde_sjson = { workspace = true } -tokio = { workspace = true } -tokio-stream = { workspace = true } -tracing = { workspace = true } -tracing-error = { workspace = true } +byteorder = "1.4.3" +color-eyre = "0.6.2" +csv-async = { version = "1.2.4", features = ["tokio", "serde"] } +fastrand = "1.8.0" +futures = "0.3.25" +futures-util = "0.3.24" +glob = "0.3.0" +libloading = "0.7.4" +nanorand = "0.7.0" +pin-project-lite = "0.2.9" +serde = { version = "1.0.147", features = ["derive"] } +serde_sjson = { path = "../../lib/serde_sjson", version = "*" } +tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] } +tokio-stream = { version = "0.1.11", features = ["fs", "io-util"] } +tracing = { version = "0.1.37", features = ["async-await"] } +tracing-error = "0.2.0" diff --git a/lib/sdk/src/binary.rs b/lib/sdk/src/binary.rs index 9348e1b..4782440 100644 --- a/lib/sdk/src/binary.rs +++ b/lib/sdk/src/binary.rs @@ -1,49 +1,4 @@ -use std::io::{Cursor, Read, Seek, Write}; - -use color_eyre::Result; - -use self::sync::{ReadExt, WriteExt}; - -pub trait FromBinary: Sized { - fn from_binary(r: &mut R) -> Result; -} - -pub trait ToBinary { - fn to_binary(&self) -> Result>; -} - -impl ToBinary for Vec { - fn to_binary(&self) -> Result> { - // TODO: Allocations for the vector could be optimized by first - // serializing one value, then calculating the size from that. - let mut bin = Cursor::new(Vec::new()); - bin.write_u32(self.len() as u32)?; - - for val in self.iter() { - let buf = val.to_binary()?; - bin.write_all(&buf)?; - } - - Ok(bin.into_inner()) - } -} - -impl FromBinary for Vec { - fn from_binary(r: &mut R) -> Result { - let size = r.read_u32()? as usize; - - let mut list = Vec::with_capacity(size); - - for _ in 0..size { - list.push(T::from_binary(r)?); - } - - Ok(list) - } -} - pub mod sync { - use std::ffi::CStr; use std::io::{self, Read, Seek, SeekFrom}; use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; @@ -134,23 +89,6 @@ pub mod sync { make_skip!(skip_u8, read_u8, u8); make_skip!(skip_u32, read_u32, u32); - // Implementation based on https://en.wikipedia.com/wiki/LEB128 - fn read_uleb128(&mut self) -> io::Result { - let mut result: u64 = 0; - let mut shift: u64 = 0; - - loop { - let byte = ReadExt::read_u8(self)? as u64; - result |= (byte & 0x7f) << shift; - - if byte < 0x80 { - return Ok(result); - } - - shift += 7; - } - } - fn skip_padding(&mut self) -> io::Result<()> { let pos = self.stream_position()?; let padding_size = 16 - (pos % 16); @@ -166,13 +104,25 @@ pub mod sync { } fn read_string_len(&mut self, len: usize) -> Result { - let pos = self.stream_position(); + let mut buf = vec![0; len]; + let res = self + .read_exact(&mut buf) + .map_err(Report::new) + .and_then(|_| { + String::from_utf8(buf).map_err(|err| { + let ascii = String::from_utf8_lossy(err.as_bytes()).to_string(); + let bytes = format!("{:?}", err.as_bytes()); + Report::new(err) + .with_section(move || bytes.header("Bytes:")) + .with_section(move || ascii.header("ASCII:")) + }) + }); - let res = read_string_len(self, len); if res.is_ok() { return res; } + let pos = self.stream_position(); if pos.is_ok() { res.with_section(|| { format!("{pos:#X} ({pos})", pos = pos.unwrap()).header("Position: ") @@ -232,22 +182,4 @@ pub mod sync { Err(err).with_section(|| format!("{pos:#X} ({pos})").header("Position: ")) } - - fn read_string_len(mut r: impl Read, len: usize) -> Result { - let mut buf = vec![0; len]; - r.read_exact(&mut buf) - .wrap_err_with(|| format!("Failed to read {} bytes", len))?; - - let res = match CStr::from_bytes_until_nul(&buf) { - Ok(s) => { - let s = s.to_str()?; - Ok(s.to_string()) - } - Err(_) => String::from_utf8(buf.clone()).map_err(Report::new), - }; - - res.wrap_err("Invalid binary for UTF8 string") - .with_section(|| format!("{}", String::from_utf8_lossy(&buf)).header("ASCI:")) - .with_section(|| format!("{:x?}", buf).header("Bytes:")) - } } diff --git a/lib/sdk/src/bundle/database.rs b/lib/sdk/src/bundle/database.rs deleted file mode 100644 index 185c62f..0000000 --- a/lib/sdk/src/bundle/database.rs +++ /dev/null @@ -1,285 +0,0 @@ -use std::collections::HashMap; -use std::io::Cursor; -use std::io::Read; -use std::io::Seek; -use std::io::Write; - -use color_eyre::eyre; -use color_eyre::Result; - -use crate::binary::sync::*; -use crate::binary::FromBinary; -use crate::binary::ToBinary; -use crate::murmur::Murmur64; -use crate::Bundle; - -use super::filetype::BundleFileType; - -const DATABASE_VERSION: u32 = 0x6; -const FILE_VERSION: u32 = 0x4; - -pub struct BundleFile { - pub name: String, - pub stream: String, - pub platform_specific: bool, - pub file_time: u64, -} - -pub struct FileName { - pub extension: BundleFileType, - pub name: Murmur64, -} - -pub struct BundleDatabase { - stored_files: HashMap>, - resource_hashes: HashMap, - bundle_contents: HashMap>, -} - -// Implements the partial Murmur that's used by the engine to compute bundle resource hashes, -// but in a way that the loop can be done outside the function. -#[inline(always)] -fn add_to_resource_hash(mut k: u64, name: impl Into) -> u64 { - const M: u64 = 0xc6a4a7935bd1e995; - const R: u64 = 47; - - let mut h: u64 = name.into(); - - k = k.wrapping_mul(M); - k ^= k >> R; - k = k.wrapping_mul(M); - - h ^= k; - k = M.wrapping_mul(h); - - k -} - -impl BundleDatabase { - pub fn bundles(&self) -> &HashMap> { - &self.stored_files - } - - pub fn files(&self) -> &HashMap> { - &self.bundle_contents - } - - pub fn add_bundle(&mut self, bundle: &Bundle) { - let hash = bundle.name().to_murmur64(); - let name = hash.to_string(); - let stream = format!("{}.stream", &name); - - tracing::trace!( - "Adding bundle '{} ({:?} | {:016X})' to database. Hash exists: {}", - bundle.name().display(), - bundle.name(), - hash, - self.stored_files.contains_key(&hash) - ); - - { - let entry = self.stored_files.entry(hash).or_default(); - let existing = entry.iter().position(|f| f.name == name); - - let file = BundleFile { - name, - stream, - file_time: 0, - platform_specific: false, - }; - - entry.push(file); - - if let Some(pos) = existing { - tracing::debug!("Found bundle '{}' at {}. Replacing.", hash.to_string(), pos); - entry.swap_remove(pos); - } - } - - let mut resource_hash = 0; - - for f in bundle.files() { - let name = f.base_name().to_murmur64(); - let file_name = FileName { - extension: f.file_type(), - name, - }; - - resource_hash = add_to_resource_hash(resource_hash, name); - - // TODO: Make sure each file name only exists once. Probably best to turn - // the `Vec` into a sorted `HashSet`. - self.bundle_contents - .entry(hash) - .or_default() - .push(file_name); - } - - self.resource_hashes.insert(hash, resource_hash); - } -} - -impl FromBinary for BundleDatabase { - #[tracing::instrument(name = "BundleDatabase::from_binary", skip_all)] - fn from_binary(r: &mut R) -> Result { - { - let format = r.read_u32()?; - eyre::ensure!( - format == DATABASE_VERSION, - "invalid file format, expected {:#X}, got {:#X}", - DATABASE_VERSION, - format - ); - } - - let num_entries = r.read_u32()? as usize; - let mut stored_files = HashMap::with_capacity(num_entries); - - for _ in 0..num_entries { - let hash = r.read_u64().map(Murmur64::from)?; - - let num_files = r.read_u32()? as usize; - let mut files = Vec::with_capacity(num_files); - - for _ in 0..num_files { - { - let version = r.read_u32()?; - eyre::ensure!( - version == FILE_VERSION, - "invalid file version, expected {:#X}, got {:#X}", - FILE_VERSION, - version - ); - } - - let len_name = r.read_u32()? as usize; - let mut buf = vec![0; len_name]; - r.read_exact(&mut buf)?; - - let name = String::from_utf8(buf)?; - - let len_stream = r.read_u32()? as usize; - let mut buf = vec![0; len_stream]; - r.read_exact(&mut buf)?; - - let stream = String::from_utf8(buf)?; - - let platform_specific = r.read_u8()? != 0; - - // TODO: Unknown what this is. In VT2's SDK, it's simply ignored, - // and always written as `0`, but in DT, it seems to be used. - let mut buffer = [0; 20]; - r.read_exact(&mut buffer)?; - - if cfg!(debug_assertions) && buffer.iter().any(|b| *b != 0) { - tracing::warn!("Unknown value in 20-byte buffer: {:?}", buffer); - } - - let file_time = r.read_u64()?; - - let file = BundleFile { - name, - stream, - platform_specific, - file_time, - }; - - files.push(file); - } - - stored_files.insert(hash, files); - } - - let num_hashes = r.read_u32()? as usize; - let mut resource_hashes = HashMap::with_capacity(num_hashes); - - for _ in 0..num_hashes { - let name = r.read_u64().map(Murmur64::from)?; - let hash = r.read_u64()?; - - resource_hashes.insert(name, hash); - } - - let num_contents = r.read_u32()? as usize; - let mut bundle_contents = HashMap::with_capacity(num_contents); - - for _ in 0..num_contents { - let hash = r.read_u64().map(Murmur64::from)?; - - let num_files = r.read_u32()? as usize; - let mut files = Vec::with_capacity(num_files); - - for _ in 0..num_files { - let extension = r.read_u64().map(BundleFileType::from)?; - let name = r.read_u64().map(Murmur64::from)?; - - files.push(FileName { extension, name }); - } - - bundle_contents.insert(hash, files); - } - - Ok(Self { - stored_files, - resource_hashes, - bundle_contents, - }) - } -} - -impl ToBinary for BundleDatabase { - #[tracing::instrument(name = "BundleDatabase::to_binary", skip_all)] - fn to_binary(&self) -> Result> { - let mut binary = Vec::new(); - - { - let mut w = Cursor::new(&mut binary); - - w.write_u32(DATABASE_VERSION)?; - - w.write_u32(self.stored_files.len() as u32)?; - - for (hash, files) in self.stored_files.iter() { - w.write_u64((*hash).into())?; - w.write_u32(files.len() as u32)?; - - for f in files.iter() { - w.write_u32(FILE_VERSION)?; - w.write_u32(f.name.len() as u32)?; - w.write_all(f.name.as_bytes())?; - w.write_u32(f.stream.len() as u32)?; - w.write_all(f.stream.as_bytes())?; - - w.write_u8(if f.platform_specific { 1 } else { 0 })?; - - // TODO: Don't know what goes here - let buffer = [0; 20]; - w.write_all(&buffer)?; - - w.write_u64(f.file_time)?; - } - } - - w.write_u32(self.resource_hashes.len() as u32)?; - - for (name, hash) in self.resource_hashes.iter() { - w.write_u64((*name).into())?; - w.write_u64(*hash)?; - } - - w.write_u32(self.bundle_contents.len() as u32)?; - - for (hash, contents) in self.bundle_contents.iter() { - w.write_u64((*hash).into())?; - w.write_u32(contents.len() as u32)?; - - for FileName { extension, name } in contents.iter() { - w.write_u64((*extension).into())?; - w.write_u64((*name).into())?; - } - } - } - - Ok(binary) - } -} diff --git a/lib/sdk/src/bundle/file.rs b/lib/sdk/src/bundle/file.rs index 6d49821..d5f0c58 100644 --- a/lib/sdk/src/bundle/file.rs +++ b/lib/sdk/src/bundle/file.rs @@ -1,32 +1,423 @@ use std::io::{Cursor, Read, Seek, Write}; use std::path::Path; -use bitflags::bitflags; use color_eyre::eyre::Context; use color_eyre::{eyre, Result}; use futures::future::join_all; +use serde::Serialize; use crate::binary::sync::*; use crate::filetype::*; -use crate::murmur::{HashGroup, IdString64, Murmur64}; +use crate::murmur::{HashGroup, Murmur64}; -use super::filetype::BundleFileType; +use super::EntryHeader; + +#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)] +pub enum BundleFileType { + Animation, + AnimationCurves, + Apb, + BakedLighting, + Bik, + BlendSet, + Bones, + Chroma, + CommonPackage, + Config, + Crypto, + Data, + Entity, + Flow, + Font, + Ies, + Ini, + Input, + Ivf, + Keys, + Level, + Lua, + Material, + Mod, + MouseCursor, + NavData, + NetworkConfig, + OddleNet, + Package, + Particles, + PhysicsProperties, + RenderConfig, + RtPipeline, + Scene, + Shader, + ShaderLibrary, + ShaderLibraryGroup, + ShadingEnvionmentMapping, + ShadingEnvironment, + Slug, + SlugAlbum, + SoundEnvironment, + SpuJob, + StateMachine, + StaticPVS, + Strings, + SurfaceProperties, + Texture, + TimpaniBank, + TimpaniMaster, + Tome, + Ugg, + Unit, + Upb, + VectorField, + Wav, + WwiseBank, + WwiseDep, + WwiseEvent, + WwiseMetadata, + WwiseStream, + Xml, + + Unknown(Murmur64), +} + +impl BundleFileType { + pub fn ext_name(&self) -> String { + match self { + BundleFileType::AnimationCurves => String::from("animation_curves"), + BundleFileType::Animation => String::from("animation"), + BundleFileType::Apb => String::from("apb"), + BundleFileType::BakedLighting => String::from("baked_lighting"), + BundleFileType::Bik => String::from("bik"), + BundleFileType::BlendSet => String::from("blend_set"), + BundleFileType::Bones => String::from("bones"), + BundleFileType::Chroma => String::from("chroma"), + BundleFileType::CommonPackage => String::from("common_package"), + BundleFileType::Config => String::from("config"), + BundleFileType::Crypto => String::from("crypto"), + BundleFileType::Data => String::from("data"), + BundleFileType::Entity => String::from("entity"), + BundleFileType::Flow => String::from("flow"), + BundleFileType::Font => String::from("font"), + BundleFileType::Ies => String::from("ies"), + BundleFileType::Ini => String::from("ini"), + BundleFileType::Input => String::from("input"), + BundleFileType::Ivf => String::from("ivf"), + BundleFileType::Keys => String::from("keys"), + BundleFileType::Level => String::from("level"), + BundleFileType::Lua => String::from("lua"), + BundleFileType::Material => String::from("material"), + BundleFileType::Mod => String::from("mod"), + BundleFileType::MouseCursor => String::from("mouse_cursor"), + BundleFileType::NavData => String::from("nav_data"), + BundleFileType::NetworkConfig => String::from("network_config"), + BundleFileType::OddleNet => String::from("oodle_net"), + BundleFileType::Package => String::from("package"), + BundleFileType::Particles => String::from("particles"), + BundleFileType::PhysicsProperties => String::from("physics_properties"), + BundleFileType::RenderConfig => String::from("render_config"), + BundleFileType::RtPipeline => String::from("rt_pipeline"), + BundleFileType::Scene => String::from("scene"), + BundleFileType::ShaderLibraryGroup => String::from("shader_library_group"), + BundleFileType::ShaderLibrary => String::from("shader_library"), + BundleFileType::Shader => String::from("shader"), + BundleFileType::ShadingEnvionmentMapping => String::from("shading_environment_mapping"), + BundleFileType::ShadingEnvironment => String::from("shading_environment"), + BundleFileType::SlugAlbum => String::from("slug_album"), + BundleFileType::Slug => String::from("slug"), + BundleFileType::SoundEnvironment => String::from("sound_environment"), + BundleFileType::SpuJob => String::from("spu_job"), + BundleFileType::StateMachine => String::from("state_machine"), + BundleFileType::StaticPVS => String::from("static_pvs"), + BundleFileType::Strings => String::from("strings"), + BundleFileType::SurfaceProperties => String::from("surface_properties"), + BundleFileType::Texture => String::from("texture"), + BundleFileType::TimpaniBank => String::from("timpani_bank"), + BundleFileType::TimpaniMaster => String::from("timpani_master"), + BundleFileType::Tome => String::from("tome"), + BundleFileType::Ugg => String::from("ugg"), + BundleFileType::Unit => String::from("unit"), + BundleFileType::Upb => String::from("upb"), + BundleFileType::VectorField => String::from("vector_field"), + BundleFileType::Wav => String::from("wav"), + BundleFileType::WwiseBank => String::from("wwise_bank"), + BundleFileType::WwiseDep => String::from("wwise_dep"), + BundleFileType::WwiseEvent => String::from("wwise_event"), + BundleFileType::WwiseMetadata => String::from("wwise_metadata"), + BundleFileType::WwiseStream => String::from("wwise_stream"), + BundleFileType::Xml => String::from("xml"), + + BundleFileType::Unknown(s) => format!("{s:016X}"), + } + } + + pub fn decompiled_ext_name(&self) -> String { + match self { + BundleFileType::Texture => String::from("dds"), + BundleFileType::WwiseBank => String::from("bnk"), + BundleFileType::WwiseStream => String::from("ogg"), + _ => self.ext_name(), + } + } + + pub fn hash(&self) -> Murmur64 { + Murmur64::from(*self) + } +} + +impl std::str::FromStr for BundleFileType { + type Err = color_eyre::Report; + + fn from_str(s: &str) -> Result { + let val = match s { + "animation_curves" => BundleFileType::AnimationCurves, + "animation" => BundleFileType::Animation, + "apb" => BundleFileType::Apb, + "baked_lighting" => BundleFileType::BakedLighting, + "bik" => BundleFileType::Bik, + "blend_set" => BundleFileType::BlendSet, + "bones" => BundleFileType::Bones, + "chroma" => BundleFileType::Chroma, + "common_package" => BundleFileType::CommonPackage, + "config" => BundleFileType::Config, + "crypto" => BundleFileType::Crypto, + "data" => BundleFileType::Data, + "entity" => BundleFileType::Entity, + "flow" => BundleFileType::Flow, + "font" => BundleFileType::Font, + "ies" => BundleFileType::Ies, + "ini" => BundleFileType::Ini, + "input" => BundleFileType::Input, + "ivf" => BundleFileType::Ivf, + "keys" => BundleFileType::Keys, + "level" => BundleFileType::Level, + "lua" => BundleFileType::Lua, + "material" => BundleFileType::Material, + "mod" => BundleFileType::Mod, + "mouse_cursor" => BundleFileType::MouseCursor, + "nav_data" => BundleFileType::NavData, + "network_config" => BundleFileType::NetworkConfig, + "oodle_net" => BundleFileType::OddleNet, + "package" => BundleFileType::Package, + "particles" => BundleFileType::Particles, + "physics_properties" => BundleFileType::PhysicsProperties, + "render_config" => BundleFileType::RenderConfig, + "rt_pipeline" => BundleFileType::RtPipeline, + "scene" => BundleFileType::Scene, + "shader_library_group" => BundleFileType::ShaderLibraryGroup, + "shader_library" => BundleFileType::ShaderLibrary, + "shader" => BundleFileType::Shader, + "shading_environment_mapping" => BundleFileType::ShadingEnvionmentMapping, + "shading_environment" => BundleFileType::ShadingEnvironment, + "slug_album" => BundleFileType::SlugAlbum, + "slug" => BundleFileType::Slug, + "sound_environment" => BundleFileType::SoundEnvironment, + "spu_job" => BundleFileType::SpuJob, + "state_machine" => BundleFileType::StateMachine, + "static_pvs" => BundleFileType::StaticPVS, + "strings" => BundleFileType::Strings, + "surface_properties" => BundleFileType::SurfaceProperties, + "texture" => BundleFileType::Texture, + "timpani_bank" => BundleFileType::TimpaniBank, + "timpani_master" => BundleFileType::TimpaniMaster, + "tome" => BundleFileType::Tome, + "ugg" => BundleFileType::Ugg, + "unit" => BundleFileType::Unit, + "upb" => BundleFileType::Upb, + "vector_field" => BundleFileType::VectorField, + "wav" => BundleFileType::Wav, + "wwise_bank" => BundleFileType::WwiseBank, + "wwise_dep" => BundleFileType::WwiseDep, + "wwise_event" => BundleFileType::WwiseEvent, + "wwise_metadata" => BundleFileType::WwiseMetadata, + "wwise_stream" => BundleFileType::WwiseStream, + "xml" => BundleFileType::Xml, + s => eyre::bail!("Unknown type string '{}'", s), + }; + + Ok(val) + } +} + +impl Serialize for BundleFileType { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let value = self.ext_name(); + value.serialize(serializer) + } +} + +impl From for BundleFileType { + fn from(value: Murmur64) -> Self { + Self::from(Into::::into(value)) + } +} + +impl From for BundleFileType { + fn from(hash: u64) -> BundleFileType { + match hash { + 0x931e336d7646cc26 => BundleFileType::Animation, + 0xdcfb9e18fff13984 => BundleFileType::AnimationCurves, + 0x3eed05ba83af5090 => BundleFileType::Apb, + 0x7ffdb779b04e4ed1 => BundleFileType::BakedLighting, + 0xaa5965f03029fa18 => BundleFileType::Bik, + 0xe301e8af94e3b5a3 => BundleFileType::BlendSet, + 0x18dead01056b72e9 => BundleFileType::Bones, + 0xb7893adf7567506a => BundleFileType::Chroma, + 0xfe9754bd19814a47 => BundleFileType::CommonPackage, + 0x82645835e6b73232 => BundleFileType::Config, + 0x69108ded1e3e634b => BundleFileType::Crypto, + 0x8fd0d44d20650b68 => BundleFileType::Data, + 0x9831ca893b0d087d => BundleFileType::Entity, + 0x92d3ee038eeb610d => BundleFileType::Flow, + 0x9efe0a916aae7880 => BundleFileType::Font, + 0x8f7d5a2c0f967655 => BundleFileType::Ies, + 0xd526a27da14f1dc5 => BundleFileType::Ini, + 0x2bbcabe5074ade9e => BundleFileType::Input, + 0xfa4a8e091a91201e => BundleFileType::Ivf, + 0xa62f9297dc969e85 => BundleFileType::Keys, + 0x2a690fd348fe9ac5 => BundleFileType::Level, + 0xa14e8dfa2cd117e2 => BundleFileType::Lua, + 0xeac0b497876adedf => BundleFileType::Material, + 0x3fcdd69156a46417 => BundleFileType::Mod, + 0xb277b11fe4a61d37 => BundleFileType::MouseCursor, + 0x169de9566953d264 => BundleFileType::NavData, + 0x3b1fa9e8f6bac374 => BundleFileType::NetworkConfig, + 0xb0f2c12eb107f4d8 => BundleFileType::OddleNet, + 0xad9c6d9ed1e5e77a => BundleFileType::Package, + 0xa8193123526fad64 => BundleFileType::Particles, + 0xbf21403a3ab0bbb1 => BundleFileType::PhysicsProperties, + 0x27862fe24795319c => BundleFileType::RenderConfig, + 0x9ca183c2d0e76dee => BundleFileType::RtPipeline, + 0x9d0a795bfe818d19 => BundleFileType::Scene, + 0xcce8d5b5f5ae333f => BundleFileType::Shader, + 0xe5ee32a477239a93 => BundleFileType::ShaderLibrary, + 0x9e5c3cc74575aeb5 => BundleFileType::ShaderLibraryGroup, + 0x250e0a11ac8e26f8 => BundleFileType::ShadingEnvionmentMapping, + 0xfe73c7dcff8a7ca5 => BundleFileType::ShadingEnvironment, + 0xa27b4d04a9ba6f9e => BundleFileType::Slug, + 0xe9fc9ea7042e5ec0 => BundleFileType::SlugAlbum, + 0xd8b27864a97ffdd7 => BundleFileType::SoundEnvironment, + 0xf97af9983c05b950 => BundleFileType::SpuJob, + 0xa486d4045106165c => BundleFileType::StateMachine, + 0xe3f0baa17d620321 => BundleFileType::StaticPVS, + 0x0d972bab10b40fd3 => BundleFileType::Strings, + 0xad2d3fa30d9ab394 => BundleFileType::SurfaceProperties, + 0xcd4238c6a0c69e32 => BundleFileType::Texture, + 0x99736be1fff739a4 => BundleFileType::TimpaniBank, + 0x00a3e6c59a2b9c6c => BundleFileType::TimpaniMaster, + 0x19c792357c99f49b => BundleFileType::Tome, + 0x712d6e3dd1024c9c => BundleFileType::Ugg, + 0xe0a48d0be9a7453f => BundleFileType::Unit, + 0xa99510c6e86dd3c2 => BundleFileType::Upb, + 0xf7505933166d6755 => BundleFileType::VectorField, + 0x786f65c00a816b19 => BundleFileType::Wav, + 0x535a7bd3e650d799 => BundleFileType::WwiseBank, + 0xaf32095c82f2b070 => BundleFileType::WwiseDep, + 0xaabdd317b58dfc8a => BundleFileType::WwiseEvent, + 0xd50a8b7e1c82b110 => BundleFileType::WwiseMetadata, + 0x504b55235d21440e => BundleFileType::WwiseStream, + 0x76015845a6003765 => BundleFileType::Xml, + + _ => BundleFileType::Unknown(Murmur64::from(hash)), + } + } +} + +impl From for u64 { + fn from(t: BundleFileType) -> u64 { + match t { + BundleFileType::Animation => 0x931e336d7646cc26, + BundleFileType::AnimationCurves => 0xdcfb9e18fff13984, + BundleFileType::Apb => 0x3eed05ba83af5090, + BundleFileType::BakedLighting => 0x7ffdb779b04e4ed1, + BundleFileType::Bik => 0xaa5965f03029fa18, + BundleFileType::BlendSet => 0xe301e8af94e3b5a3, + BundleFileType::Bones => 0x18dead01056b72e9, + BundleFileType::Chroma => 0xb7893adf7567506a, + BundleFileType::CommonPackage => 0xfe9754bd19814a47, + BundleFileType::Config => 0x82645835e6b73232, + BundleFileType::Crypto => 0x69108ded1e3e634b, + BundleFileType::Data => 0x8fd0d44d20650b68, + BundleFileType::Entity => 0x9831ca893b0d087d, + BundleFileType::Flow => 0x92d3ee038eeb610d, + BundleFileType::Font => 0x9efe0a916aae7880, + BundleFileType::Ies => 0x8f7d5a2c0f967655, + BundleFileType::Ini => 0xd526a27da14f1dc5, + BundleFileType::Input => 0x2bbcabe5074ade9e, + BundleFileType::Ivf => 0xfa4a8e091a91201e, + BundleFileType::Keys => 0xa62f9297dc969e85, + BundleFileType::Level => 0x2a690fd348fe9ac5, + BundleFileType::Lua => 0xa14e8dfa2cd117e2, + BundleFileType::Material => 0xeac0b497876adedf, + BundleFileType::Mod => 0x3fcdd69156a46417, + BundleFileType::MouseCursor => 0xb277b11fe4a61d37, + BundleFileType::NavData => 0x169de9566953d264, + BundleFileType::NetworkConfig => 0x3b1fa9e8f6bac374, + BundleFileType::OddleNet => 0xb0f2c12eb107f4d8, + BundleFileType::Package => 0xad9c6d9ed1e5e77a, + BundleFileType::Particles => 0xa8193123526fad64, + BundleFileType::PhysicsProperties => 0xbf21403a3ab0bbb1, + BundleFileType::RenderConfig => 0x27862fe24795319c, + BundleFileType::RtPipeline => 0x9ca183c2d0e76dee, + BundleFileType::Scene => 0x9d0a795bfe818d19, + BundleFileType::Shader => 0xcce8d5b5f5ae333f, + BundleFileType::ShaderLibrary => 0xe5ee32a477239a93, + BundleFileType::ShaderLibraryGroup => 0x9e5c3cc74575aeb5, + BundleFileType::ShadingEnvionmentMapping => 0x250e0a11ac8e26f8, + BundleFileType::ShadingEnvironment => 0xfe73c7dcff8a7ca5, + BundleFileType::Slug => 0xa27b4d04a9ba6f9e, + BundleFileType::SlugAlbum => 0xe9fc9ea7042e5ec0, + BundleFileType::SoundEnvironment => 0xd8b27864a97ffdd7, + BundleFileType::SpuJob => 0xf97af9983c05b950, + BundleFileType::StateMachine => 0xa486d4045106165c, + BundleFileType::StaticPVS => 0xe3f0baa17d620321, + BundleFileType::Strings => 0x0d972bab10b40fd3, + BundleFileType::SurfaceProperties => 0xad2d3fa30d9ab394, + BundleFileType::Texture => 0xcd4238c6a0c69e32, + BundleFileType::TimpaniBank => 0x99736be1fff739a4, + BundleFileType::TimpaniMaster => 0x00a3e6c59a2b9c6c, + BundleFileType::Tome => 0x19c792357c99f49b, + BundleFileType::Ugg => 0x712d6e3dd1024c9c, + BundleFileType::Unit => 0xe0a48d0be9a7453f, + BundleFileType::Upb => 0xa99510c6e86dd3c2, + BundleFileType::VectorField => 0xf7505933166d6755, + BundleFileType::Wav => 0x786f65c00a816b19, + BundleFileType::WwiseBank => 0x535a7bd3e650d799, + BundleFileType::WwiseDep => 0xaf32095c82f2b070, + BundleFileType::WwiseEvent => 0xaabdd317b58dfc8a, + BundleFileType::WwiseMetadata => 0xd50a8b7e1c82b110, + BundleFileType::WwiseStream => 0x504b55235d21440e, + BundleFileType::Xml => 0x76015845a6003765, + + BundleFileType::Unknown(hash) => hash.into(), + } + } +} +impl From for Murmur64 { + fn from(t: BundleFileType) -> Murmur64 { + t.into() + } +} + +impl std::fmt::Display for BundleFileType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.ext_name()) + } +} #[derive(Debug)] struct BundleFileHeader { variant: u32, - unknown_1: u8, size: usize, len_data_file_name: usize, } -#[derive(Clone, Debug)] pub struct BundleFileVariant { property: u32, data: Vec, data_file_name: Option, - // Seems to be related to whether there is a data path. - unknown_1: u8, } impl BundleFileVariant { @@ -39,7 +430,6 @@ impl BundleFileVariant { property: 0, data: Vec::new(), data_file_name: None, - unknown_1: 0, } } @@ -69,67 +459,47 @@ impl BundleFileVariant { R: Read + Seek, { let variant = r.read_u32()?; - let unknown_1 = r.read_u8()?; + r.skip_u8(0)?; let size = r.read_u32()? as usize; r.skip_u8(1)?; let len_data_file_name = r.read_u32()? as usize; Ok(BundleFileHeader { size, - unknown_1, variant, len_data_file_name, }) } #[tracing::instrument(skip_all)] - fn write_header(&self, w: &mut W, props: Properties) -> Result<()> + fn write_header(&self, w: &mut W) -> Result<()> where W: Write + Seek, { w.write_u32(self.property)?; - w.write_u8(self.unknown_1)?; + w.write_u8(0)?; + w.write_u32(self.data.len() as u32)?; + w.write_u8(1)?; let len_data_file_name = self.data_file_name.as_ref().map(|s| s.len()).unwrap_or(0); - - if props.contains(Properties::DATA) { - w.write_u32(len_data_file_name as u32)?; - w.write_u8(1)?; - w.write_u32(0)?; - } else { - w.write_u32(self.data.len() as u32)?; - w.write_u8(1)?; - w.write_u32(len_data_file_name as u32)?; - } + w.write_u32(len_data_file_name as u32)?; Ok(()) } } -bitflags! { - #[derive(Default, Clone, Copy, Debug)] - pub struct Properties: u32 { - const DATA = 0b100; - // A custom flag used by DTMT to signify a file altered by mods. - const MODDED = 1 << 31; - } -} - -#[derive(Clone, Debug)] pub struct BundleFile { file_type: BundleFileType, - name: IdString64, + name: String, variants: Vec, - props: Properties, } impl BundleFile { - pub fn new(name: impl Into, file_type: BundleFileType) -> Self { + pub fn new(name: String, file_type: BundleFileType) -> Self { Self { file_type, - name: name.into(), + name, variants: Vec::new(), - props: Properties::empty(), } } @@ -137,20 +507,12 @@ impl BundleFile { self.variants.push(variant) } - pub fn set_variants(&mut self, variants: Vec) { - self.variants = variants; - } - - pub fn set_props(&mut self, props: Properties) { - self.props = props; - } - - pub fn set_modded(&mut self, is_modded: bool) { - self.props.set(Properties::MODDED, is_modded); - } - - #[tracing::instrument(name = "File::read", skip(ctx, r))] - pub fn from_reader(ctx: &crate::Context, r: &mut R, props: Properties) -> Result + #[tracing::instrument( + name = "File::read", + skip_all, + fields(name = %meta.name_hash, ext = %meta.extension_hash, flags = meta.flags) + )] + pub fn from_reader(ctx: &crate::Context, r: &mut R, meta: &EntryHeader) -> Result where R: Read + Seek, { @@ -159,64 +521,36 @@ impl BundleFile { let name = ctx.lookup_hash(hash, HashGroup::Filename); let header_count = r.read_u32()? as usize; - tracing::trace!(header_count); let mut headers = Vec::with_capacity(header_count); r.skip_u32(0)?; - for i in 0..header_count { - let span = tracing::debug_span!("Read file header", i); - let _enter = span.enter(); - - let header = BundleFileVariant::read_header(r) - .wrap_err_with(|| format!("Failed to read header {i}"))?; - - // TODO: Figure out how `header.unknown_1` correlates to `properties::DATA` - // if props.contains(Properties::DATA) { - // tracing::debug!("props: {props:?} | unknown_1: {}", header.unknown_1) - // } - + for _ in 0..header_count { + let header = BundleFileVariant::read_header(r)?; headers.push(header); } let mut variants = Vec::with_capacity(header_count); for (i, header) in headers.into_iter().enumerate() { - let span = tracing::debug_span!( - "Read file data {}", - i, - size = header.size, - len_data_file_name = header.len_data_file_name - ); + let span = tracing::info_span!("Read file header {}", i, size = header.size); let _enter = span.enter(); - let (data, data_file_name) = if props.contains(Properties::DATA) { - let data = vec![]; + let mut data = vec![0; header.size]; + r.read_exact(&mut data) + .wrap_err_with(|| format!("failed to read header {i}"))?; + + let data_file_name = if header.len_data_file_name > 0 { let s = r - .read_string_len(header.size) - .wrap_err("Failed to read data file name")?; - - (data, Some(s)) + .read_string_len(header.len_data_file_name) + .wrap_err("failed to read data file name")?; + Some(s) } else { - let mut data = vec![0; header.size]; - r.read_exact(&mut data) - .wrap_err_with(|| format!("Failed to read file {i}"))?; - - let data_file_name = if header.len_data_file_name > 0 { - let s = r - .read_string_len(header.len_data_file_name) - .wrap_err("Failed to read data file name")?; - Some(s) - } else { - None - }; - - (data, data_file_name) + None }; let variant = BundleFileVariant { property: header.variant, data, data_file_name, - unknown_1: header.unknown_1, }; variants.push(variant); @@ -226,7 +560,6 @@ impl BundleFile { variants, file_type, name, - props, }) } @@ -235,7 +568,7 @@ impl BundleFile { let mut w = Cursor::new(Vec::new()); w.write_u64(self.file_type.hash().into())?; - w.write_u64(self.name.to_murmur64().into())?; + w.write_u64(Murmur64::hash(self.name.as_bytes()).into())?; w.write_u32(self.variants.len() as u32)?; // TODO: Figure out what this is @@ -243,40 +576,34 @@ impl BundleFile { for variant in self.variants.iter() { w.write_u32(variant.property())?; - w.write_u8(variant.unknown_1)?; + w.write_u8(0)?; + w.write_u32(variant.size() as u32)?; + w.write_u8(1)?; let len_data_file_name = variant.data_file_name().map(|s| s.len()).unwrap_or(0); - - if self.props.contains(Properties::DATA) { - w.write_u32(len_data_file_name as u32)?; - w.write_u8(1)?; - w.write_u32(0)?; - } else { - w.write_u32(variant.size() as u32)?; - w.write_u8(1)?; - w.write_u32(len_data_file_name as u32)?; - } + w.write_u32(len_data_file_name as u32)?; } for variant in self.variants.iter() { w.write_all(&variant.data)?; - if let Some(s) = &variant.data_file_name { - w.write_all(s.as_bytes())?; - } } Ok(w.into_inner()) } - #[tracing::instrument("File::from_sjson", skip(sjson, name), fields(name = %name.display()))] - pub async fn from_sjson( - name: IdString64, + #[tracing::instrument(name = "File::from_sjson", skip(sjson))] + pub async fn from_sjson( + name: String, file_type: BundleFileType, - sjson: impl AsRef, - root: impl AsRef + std::fmt::Debug, - ) -> Result { + sjson: S, + root: P, + ) -> Result + where + P: AsRef + std::fmt::Debug, + S: AsRef, + { match file_type { - BundleFileType::Lua => lua::compile(name, sjson).wrap_err("Failed to compile Lua file"), + BundleFileType::Lua => lua::compile(name, sjson).await, BundleFileType::Unknown(_) => { eyre::bail!("Unknown file type. Cannot compile from SJSON"); } @@ -289,16 +616,12 @@ impl BundleFile { } } - pub fn props(&self) -> Properties { - self.props - } - - pub fn base_name(&self) -> &IdString64 { + pub fn base_name(&self) -> &String { &self.name } pub fn name(&self, decompiled: bool, variant: Option) -> String { - let mut s = self.name.display().to_string(); + let mut s = self.name.clone(); s.push('.'); if let Some(variant) = variant { @@ -315,16 +638,12 @@ impl BundleFile { s } - pub fn matches_name(&self, name: &IdString64) -> bool { - if self.name == *name { - return true; - } - - if let IdString64::String(name) = name { - self.name(false, None) == *name || self.name(true, None) == *name - } else { - false - } + pub fn matches_name(&self, name: S) -> bool + where + S: AsRef, + { + let name = name.as_ref(); + self.name == name || self.name(false, None) == name || self.name(true, None) == name } pub fn file_type(&self) -> BundleFileType { @@ -392,7 +711,7 @@ impl BundleFile { } }; - let res = res.wrap_err_with(|| format!("Failed to decompile file {name}")); + let res = res.wrap_err_with(|| format!("failed to decompile file {name}")); match res { Ok(files) => files, Err(err) => { @@ -408,12 +727,6 @@ impl BundleFile { } } -impl PartialEq for BundleFile { - fn eq(&self, other: &Self) -> bool { - self.name == other.name && self.file_type == other.file_type - } -} - pub struct UserFile { // TODO: Might be able to avoid some allocations with a Cow here data: Vec, diff --git a/lib/sdk/src/bundle/filetype.rs b/lib/sdk/src/bundle/filetype.rs deleted file mode 100644 index 68ff6b5..0000000 --- a/lib/sdk/src/bundle/filetype.rs +++ /dev/null @@ -1,174 +0,0 @@ -use color_eyre::{eyre, Result}; -use serde::Serialize; - -use crate::murmur::Murmur64; - -macro_rules! make_enum { - ( - $( $variant:ident, $hash:expr, $ext:expr $(, $decompiled:expr)? ; )+ - ) => { - #[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)] - pub enum BundleFileType { - $( - $variant, - )+ - Unknown(Murmur64), - } - - impl BundleFileType { - pub fn ext_name(&self) -> String { - match self { - $( - Self::$variant => String::from($ext), - )+ - Self::Unknown(s) => format!("{s:016X}"), - } - } - - pub fn decompiled_ext_name(&self) -> String { - match self { - $( - $( Self::$variant => String::from($decompiled), )? - )+ - _ => self.ext_name(), - } - } - } - - impl std::str::FromStr for BundleFileType { - type Err = color_eyre::Report; - fn from_str(s: &str) -> Result { - match s { - $( - $ext => Ok(Self::$variant), - )+ - s => eyre::bail!("Unknown type string '{}'", s), - } - } - } - - impl From for BundleFileType { - fn from(h: u64) -> Self { - match h { - $( - $hash => Self::$variant, - )+ - hash => Self::Unknown(hash.into()), - } - } - } - - impl From for u64 { - fn from(t: BundleFileType) -> u64 { - match t { - $( - BundleFileType::$variant => $hash, - )+ - BundleFileType::Unknown(hash) => hash.into(), - } - } - } - } -} - -make_enum! { - AnimationCurves, 0xdcfb9e18fff13984, "animation_curves"; - Animation, 0x931e336d7646cc26, "animation"; - Apb, 0x3eed05ba83af5090, "apb"; - BakedLighting, 0x7ffdb779b04e4ed1, "baked_lighting"; - Bik, 0xaa5965f03029fa18, "bik"; - BlendSet, 0xe301e8af94e3b5a3, "blend_set"; - Bones, 0x18dead01056b72e9, "bones"; - Chroma, 0xb7893adf7567506a, "chroma"; - CommonPackage, 0xfe9754bd19814a47, "common_package"; - Config, 0x82645835e6b73232, "config"; - Crypto, 0x69108ded1e3e634b, "crypto"; - Data, 0x8fd0d44d20650b68, "data"; - Entity, 0x9831ca893b0d087d, "entity"; - Flow, 0x92d3ee038eeb610d, "flow"; - Font, 0x9efe0a916aae7880, "font"; - Ies, 0x8f7d5a2c0f967655, "ies"; - Ini, 0xd526a27da14f1dc5, "ini"; - Input, 0x2bbcabe5074ade9e, "input"; - Ivf, 0xfa4a8e091a91201e, "ivf"; - Keys, 0xa62f9297dc969e85, "keys"; - Level, 0x2a690fd348fe9ac5, "level"; - Lua, 0xa14e8dfa2cd117e2, "lua"; - Material, 0xeac0b497876adedf, "material"; - Mod, 0x3fcdd69156a46417, "mod"; - MouseCursor, 0xb277b11fe4a61d37, "mouse_cursor"; - NavData, 0x169de9566953d264, "nav_data"; - NetworkConfig, 0x3b1fa9e8f6bac374, "network_config"; - OddleNet, 0xb0f2c12eb107f4d8, "oodle_net"; - Package, 0xad9c6d9ed1e5e77a, "package"; - Particles, 0xa8193123526fad64, "particles"; - PhysicsProperties, 0xbf21403a3ab0bbb1, "physics_properties"; - RenderConfig, 0x27862fe24795319c, "render_config"; - RtPipeline, 0x9ca183c2d0e76dee, "rt_pipeline"; - Scene, 0x9d0a795bfe818d19, "scene"; - Shader, 0xcce8d5b5f5ae333f, "shader"; - ShaderLibrary, 0xe5ee32a477239a93, "shader_library"; - ShaderLibraryGroup, 0x9e5c3cc74575aeb5, "shader_library_group"; - ShadingEnvionmentMapping, 0x250e0a11ac8e26f8, "shading_envionment_mapping"; - ShadingEnvironment, 0xfe73c7dcff8a7ca5, "shading_environment"; - Slug, 0xa27b4d04a9ba6f9e, "slug"; - SlugAlbum, 0xe9fc9ea7042e5ec0, "slug_album"; - SoundEnvironment, 0xd8b27864a97ffdd7, "sound_environment"; - SpuJob, 0xf97af9983c05b950, "spu_job"; - StateMachine, 0xa486d4045106165c, "state_machine"; - StaticPVS, 0xe3f0baa17d620321, "static_pvs"; - Strings, 0x0d972bab10b40fd3, "strings"; - SurfaceProperties, 0xad2d3fa30d9ab394, "surface_properties"; - Texture, 0xcd4238c6a0c69e32, "texture", "dds"; - TimpaniBank, 0x99736be1fff739a4, "timpani_bank"; - TimpaniMaster, 0x00a3e6c59a2b9c6c, "timpani_master"; - Tome, 0x19c792357c99f49b, "tome"; - Ugg, 0x712d6e3dd1024c9c, "ugg"; - Unit, 0xe0a48d0be9a7453f, "unit"; - Upb, 0xa99510c6e86dd3c2, "upb"; - VectorField, 0xf7505933166d6755, "vector_field"; - Wav, 0x786f65c00a816b19, "wav"; - WwiseBank, 0x535a7bd3e650d799, "wwise_bank", "bnk"; - WwiseDep, 0xaf32095c82f2b070, "wwise_dep"; - WwiseEvent, 0xaabdd317b58dfc8a, "wwise_event"; - WwiseMetadata, 0xd50a8b7e1c82b110, "wwise_metadata"; - WwiseStream, 0x504b55235d21440e, "wwise_stream", "ogg"; - Xml, 0x76015845a6003765, "xml"; - Theme, 0x38BB9442048A7FBD, "theme"; - MissionThemes, 0x80F2DE893657F83A, "mission_themes"; -} - -impl BundleFileType { - pub fn hash(&self) -> Murmur64 { - Murmur64::from(*self) - } -} - -impl Serialize for BundleFileType { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - let value = self.ext_name(); - value.serialize(serializer) - } -} - -impl From for BundleFileType { - fn from(value: Murmur64) -> Self { - Self::from(Into::::into(value)) - } -} - -impl From for Murmur64 { - fn from(t: BundleFileType) -> Murmur64 { - let hash: u64 = t.into(); - Murmur64::from(hash) - } -} - -impl std::fmt::Display for BundleFileType { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.ext_name()) - } -} diff --git a/lib/sdk/src/bundle/mod.rs b/lib/sdk/src/bundle/mod.rs index edb71bb..5d48281 100644 --- a/lib/sdk/src/bundle/mod.rs +++ b/lib/sdk/src/bundle/mod.rs @@ -1,20 +1,17 @@ use std::io::{BufReader, Cursor, Read, Seek, SeekFrom, Write}; -use std::mem::size_of; use std::path::Path; use color_eyre::eyre::{self, Context, Result}; use color_eyre::{Help, Report, SectionExt}; -use oodle::{OodleLZ_CheckCRC, OodleLZ_FuzzSafe, CHUNK_SIZE}; use crate::binary::sync::*; -use crate::murmur::{HashGroup, IdString64, Murmur64}; +use crate::murmur::{HashGroup, Murmur64}; +use crate::oodle::types::{OodleLZ_CheckCRC, OodleLZ_FuzzSafe}; +use crate::oodle::CHUNK_SIZE; -pub(crate) mod database; pub(crate) mod file; -pub(crate) mod filetype; -pub use file::{BundleFile, BundleFileVariant, Properties}; -pub use filetype::BundleFileType; +pub use file::{BundleFile, BundleFileType}; #[derive(Clone, Copy, Debug, PartialEq, PartialOrd)] enum BundleFormat { @@ -43,24 +40,72 @@ impl From for u32 { } } +pub struct EntryHeader { + name_hash: Murmur64, + extension_hash: Murmur64, + flags: u32, +} + +impl EntryHeader { + #[tracing::instrument(name = "EntryHeader::from_reader", skip_all)] + fn from_reader(r: &mut R) -> Result + where + R: Read + Seek, + { + let extension_hash = Murmur64::from(r.read_u64()?); + let name_hash = Murmur64::from(r.read_u64()?); + let flags = r.read_u32()?; + + // NOTE: Known values so far: + // - 0x0: seems to be the default + // - 0x4: seems to be used for files that point to something in `data/` + // seems to correspond to a change in value in the header's 'unknown_3' + if flags != 0x0 { + tracing::debug!( + flags, + "Unexpected meta flags for file {name_hash:016X}.{extension_hash:016X}", + ); + } + + Ok(Self { + name_hash, + extension_hash, + flags, + }) + } + + #[tracing::instrument(name = "EntryHeader::to_writer", skip_all)] + fn to_writer(&self, w: &mut W) -> Result<()> + where + W: Write + Seek, + { + w.write_u64(self.extension_hash.into())?; + w.write_u64(self.name_hash.into())?; + w.write_u32(self.flags)?; + Ok(()) + } +} + pub struct Bundle { format: BundleFormat, properties: [Murmur64; 32], + headers: Vec, files: Vec, - name: IdString64, + name: String, } impl Bundle { - pub fn new>(name: S) -> Self { + pub fn new(name: String) -> Self { Self { - name: name.into(), + name, format: BundleFormat::F8, properties: [0.into(); 32], + headers: Vec::new(), files: Vec::new(), } } - pub fn get_name_from_path

(ctx: &crate::Context, path: P) -> IdString64 + pub fn get_name_from_path

(ctx: &crate::Context, path: P) -> String where P: AsRef, { @@ -69,31 +114,28 @@ impl Bundle { .and_then(|name| name.to_str()) .and_then(|name| Murmur64::try_from(name).ok()) .map(|hash| ctx.lookup_hash(hash, HashGroup::Filename)) - .unwrap_or_else(|| path.display().to_string().into()) + .unwrap_or_else(|| path.display().to_string()) } pub fn add_file(&mut self, file: BundleFile) { tracing::trace!("Adding file {}", file.name(false, None)); - let existing_index = self - .files - .iter() - .enumerate() - .find(|(_, f)| **f == file) - .map(|val| val.0); + let header = EntryHeader { + extension_hash: file.file_type().into(), + name_hash: Murmur64::hash(file.base_name().as_bytes()), + // TODO: Hard coded until we know what this is + flags: 0x0, + }; self.files.push(file); - - if let Some(i) = existing_index { - self.files.swap_remove(i); - } + self.headers.push(header); } #[tracing::instrument(skip(ctx, binary), fields(len_binary = binary.as_ref().len()))] - pub fn from_binary(ctx: &crate::Context, name: S, binary: B) -> Result + pub fn from_binary(ctx: &crate::Context, name: String, binary: B) -> Result where B: AsRef<[u8]>, - S: Into + std::fmt::Debug, { + let bundle_name = name; let mut r = BufReader::new(Cursor::new(binary)); let format = r.read_u32().and_then(BundleFormat::try_from)?; @@ -112,13 +154,9 @@ impl Bundle { *prop = Murmur64::from(r.read_u64()?); } - let mut file_props = Vec::with_capacity(num_entries); + let mut headers = Vec::with_capacity(num_entries); for _ in 0..num_entries { - // Skip two u64 that contain the extension hash and file name hash. - // We don't need them here, since we're reading the whole bundle into memory - // anyways. - r.seek(SeekFrom::Current((2 * size_of::()) as i64))?; - file_props.push(Properties::from_bits_truncate(r.read_u32()?)); + headers.push(EntryHeader::from_reader(&mut r)?); } let num_chunks = r.read_u32()? as usize; @@ -160,12 +198,14 @@ impl Bundle { decompressed.append(&mut compressed_buffer); } else { // TODO: Optimize to not reallocate? - let mut raw_buffer = oodle::decompress( - &compressed_buffer, - OodleLZ_FuzzSafe::No, - OodleLZ_CheckCRC::No, - ) - .wrap_err_with(|| format!("Failed to decompress chunk {chunk_index}"))?; + let oodle_lib = ctx.oodle.as_ref().unwrap(); + let mut raw_buffer = oodle_lib + .decompress( + &compressed_buffer, + OodleLZ_FuzzSafe::No, + OodleLZ_CheckCRC::No, + ) + .wrap_err_with(|| format!("failed to decompress chunk {chunk_index}"))?; if unpacked_size_tracked < CHUNK_SIZE { raw_buffer.resize(unpacked_size_tracked, 0); @@ -173,6 +213,8 @@ impl Bundle { unpacked_size_tracked -= CHUNK_SIZE; } + tracing::trace!(raw_size = raw_buffer.len()); + decompressed.append(&mut raw_buffer); } } @@ -187,26 +229,24 @@ impl Bundle { let mut r = Cursor::new(decompressed); let mut files = Vec::with_capacity(num_entries); - tracing::trace!(num_files = num_entries); - for (i, props) in file_props.iter().enumerate() { - let span = tracing::debug_span!("Read file {}", i); - let _enter = span.enter(); - - let file = BundleFile::from_reader(ctx, &mut r, *props) - .wrap_err_with(|| format!("Failed to read file {i}"))?; + for i in 0..num_entries { + let meta = headers.get(i).unwrap(); + let file = BundleFile::from_reader(ctx, &mut r, meta) + .wrap_err_with(|| format!("failed to read file {i}"))?; files.push(file); } Ok(Self { - name: name.into(), + name: bundle_name, format, + headers, files, properties, }) } #[tracing::instrument(skip_all)] - pub fn to_binary(&self) -> Result> { + pub fn to_binary(&self, ctx: &crate::Context) -> Result> { let mut w = Cursor::new(Vec::new()); w.write_u32(self.format.into())?; // TODO: Find out what this is. @@ -217,10 +257,8 @@ impl Bundle { w.write_u64((*prop).into())?; } - for file in self.files.iter() { - w.write_u64(file.file_type().into())?; - w.write_u64(file.base_name().to_murmur64().into())?; - w.write_u32(file.props().bits())?; + for meta in self.headers.iter() { + meta.to_writer(&mut w)?; } let unpacked_data = { @@ -228,15 +266,18 @@ impl Bundle { let _enter = span.enter(); tracing::trace!(num_files = self.files.len()); - self.files.iter().try_fold(Vec::new(), |mut data, file| { - data.append(&mut file.to_binary()?); - Ok::<_, Report>(data) - })? + self.files + .iter() + .fold(Ok::, Report>(Vec::new()), |data, file| { + let mut data = data?; + data.append(&mut file.to_binary()?); + Ok(data) + })? }; // Ceiling division (or division toward infinity) to calculate // the number of chunks required to fit the unpacked data. - let num_chunks = unpacked_data.len().div_ceil(CHUNK_SIZE); + let num_chunks = (unpacked_data.len() + CHUNK_SIZE - 1) / CHUNK_SIZE; tracing::trace!(num_chunks); w.write_u32(num_chunks as u32)?; @@ -252,10 +293,12 @@ impl Bundle { w.write_u32(0)?; let chunks = unpacked_data.chunks(CHUNK_SIZE); + + let oodle_lib = ctx.oodle.as_ref().expect("oodle library not defined"); let mut chunk_sizes = Vec::with_capacity(num_chunks); for chunk in chunks { - let compressed = oodle::compress(chunk)?; + let compressed = oodle_lib.compress(chunk)?; tracing::trace!( raw_chunk_size = chunk.len(), compressed_chunk_size = compressed.len() @@ -275,7 +318,7 @@ impl Bundle { Ok(w.into_inner()) } - pub fn name(&self) -> &IdString64 { + pub fn name(&self) -> &String { &self.name } @@ -292,7 +335,7 @@ impl Bundle { /// This is mainly useful for debugging purposes or /// to manullay inspect the raw data. #[tracing::instrument(skip_all)] -pub fn decompress(_ctx: &crate::Context, binary: B) -> Result> +pub fn decompress(ctx: &crate::Context, binary: B) -> Result> where B: AsRef<[u8]>, { @@ -356,8 +399,9 @@ where let mut compressed_buffer = vec![0u8; chunk_size]; r.read_exact(&mut compressed_buffer)?; + let oodle_lib = ctx.oodle.as_ref().unwrap(); // TODO: Optimize to not reallocate? - let mut raw_buffer = oodle::decompress( + let mut raw_buffer = oodle_lib.decompress( &compressed_buffer, OodleLZ_FuzzSafe::No, OodleLZ_CheckCRC::No, diff --git a/lib/sdk/src/context.rs b/lib/sdk/src/context.rs index 1500290..81a99f7 100644 --- a/lib/sdk/src/context.rs +++ b/lib/sdk/src/context.rs @@ -1,59 +1,12 @@ -use std::process::Command; -use std::{ffi::OsString, path::PathBuf}; +use std::path::PathBuf; -use crate::murmur::{Dictionary, HashGroup, IdString64, Murmur32, Murmur64}; - -pub struct CmdLine { - cmd: OsString, - args: Vec, -} - -impl CmdLine { - pub fn new(cmd: impl Into) -> Self { - Self { - cmd: cmd.into(), - args: vec![], - } - } - - pub fn arg(&mut self, arg: impl Into) -> &mut Self { - self.args.push(arg.into()); - self - } -} - -impl std::fmt::Debug for CmdLine { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("CmdLine") - .field("cmd", &self.cmd) - .field("args", &self.args) - .finish() - } -} - -impl std::fmt::Display for CmdLine { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "\"{}\"", self.cmd.to_string_lossy())?; - - for arg in &self.args { - write!(f, " \"{}\"", arg.to_string_lossy())?; - } - - Ok(()) - } -} - -impl From<&CmdLine> for Command { - fn from(value: &CmdLine) -> Self { - let mut cmd = Command::new(&value.cmd); - cmd.args(&value.args); - cmd - } -} +use crate::murmur::{Dictionary, HashGroup, Murmur32, Murmur64}; +use crate::oodle::Oodle; pub struct Context { pub lookup: Dictionary, - pub ljd: Option, + pub oodle: Option, + pub ljd: Option, pub revorb: Option, pub ww2ogg: Option, pub game_dir: Option, @@ -63,6 +16,7 @@ impl Context { pub fn new() -> Self { Self { lookup: Dictionary::new(), + oodle: None, ljd: None, revorb: None, ww2ogg: None, @@ -70,17 +24,17 @@ impl Context { } } - pub fn lookup_hash(&self, hash: M, group: HashGroup) -> IdString64 + pub fn lookup_hash(&self, hash: M, group: HashGroup) -> String where M: Into, { let hash = hash.into(); if let Some(s) = self.lookup.lookup(hash, group) { tracing::debug!(%hash, string = s, "Murmur64 lookup successful"); - s.to_string().into() + s.to_owned() } else { tracing::debug!(%hash, "Murmur64 lookup failed"); - hash.into() + format!("{hash:016X}") } } diff --git a/lib/sdk/src/filetype/lua.rs b/lib/sdk/src/filetype/lua.rs index dd0494e..87ac629 100644 --- a/lib/sdk/src/filetype/lua.rs +++ b/lib/sdk/src/filetype/lua.rs @@ -1,222 +1,85 @@ -use std::env; -use std::ffi::CStr; -use std::ffi::CString; -use std::io::Cursor; -use std::io::Read; -use std::io::Write; -use std::process::Command; +use std::io::{Cursor, Write}; -use color_eyre::eyre; -use color_eyre::eyre::Context; -use color_eyre::Result; -use luajit2_sys as lua; -use tokio::fs; +use color_eyre::{eyre::Context, Result}; +use tokio::{fs, process::Command}; -use crate::binary::sync::ReadExt; -use crate::binary::sync::WriteExt; -use crate::bundle::file::{BundleFileVariant, UserFile}; -use crate::murmur::IdString64; -use crate::{BundleFile, BundleFileType}; - -const BITSQUID_LUAJIT_HEADER: u32 = 0x8253461B; +use crate::{ + binary::sync::WriteExt, + bundle::file::{BundleFileVariant, UserFile}, + BundleFile, BundleFileType, +}; #[tracing::instrument(skip_all, fields(buf_len = data.as_ref().len()))] -pub(crate) async fn decompile(ctx: &crate::Context, data: T) -> Result> +pub(crate) async fn decompile(_ctx: &crate::Context, data: T) -> Result> where T: AsRef<[u8]>, { - let data = data.as_ref(); - let length = { - let mut r = Cursor::new(data); - r.read_u32()? as usize - }; - - // This skips the unknown bytes 5..12 - let content = &data[12..]; - eyre::ensure!( - content.len() == length, - "Content length doesn't match. Expected {}, got {}", - length, - content.len() - ); - - let name = { - let mut r = Cursor::new(content); - - eyre::ensure!( - r.read_u32()? == BITSQUID_LUAJIT_HEADER, - "Invalid magic bytes" - ); - - // Skip additional header bytes - let _ = r.read_uleb128()?; - let length = r.read_uleb128()? as usize; - - let mut buf = vec![0u8; length]; - r.read_exact(&mut buf)?; - let mut s = - String::from_utf8(buf).wrap_err("Invalid byte sequence for LuaJIT bytecode name")?; - // Remove the leading `@` - s.remove(0); - s - }; - - let mut temp = env::temp_dir(); - // Using the actual file name and keeping it in case of an error makes debugging easier. - // But to avoid creating a bunch of folders, we flatten the name. - temp.push(name.replace('/', "_")); - temp.set_extension("luao"); - - tracing::debug!( - "Writing temporary LuaJIT bytecode file to '{}'", - temp.display() - ); - - fs::write(&temp, content) - .await - .wrap_err_with(|| format!("Failed to write LuaJIT bytecode to '{}'", temp.display()))?; - - let mut cmd = ctx - .ljd - .as_ref() - .map(|c| c.into()) - .unwrap_or_else(|| Command::new("ljd")); - - cmd.arg("--catch_asserts") - .args(["--function_def_sugar", "false"]) - .args(["--function_def_self_arg", "true"]) - .args(["--unsafe", "false"]) - .arg("-f") - .arg(&temp); - - tracing::debug!("Executing command: '{:?}'", cmd); - - let output = cmd.output().wrap_err("Failed to run ljd")?; - - if !output.status.success() { - let err = eyre::eyre!( - "LJD exited with code {:?}:\n{}", - output.status.code(), - String::from_utf8_lossy(&output.stderr) - ); - tracing::error!("Failed to decompile '{}':\n{:?}", name, err); - } - - let content = output.stdout; - - // No need to wait for this, so we move it to a separate task. - tokio::spawn(async move { - if let Err(err) = fs::remove_file(&temp) - .await - .wrap_err_with(|| format!("Failed to remove temporary file '{}'", temp.display())) - { - tracing::warn!("{:?}", err); - } - }); - - Ok(vec![UserFile::with_name(content, name)]) + let mut _r = Cursor::new(data.as_ref()); + todo!(); } #[tracing::instrument(skip_all)] -pub fn compile(name: impl Into, code: impl AsRef) -> Result { - let name = name.into(); - let code = code.as_ref(); +pub(crate) async fn compile(name: String, code: S) -> Result +where + S: AsRef, +{ + let in_file_path = { + let mut path = std::env::temp_dir(); + let name: String = std::iter::repeat_with(fastrand::alphanumeric) + .take(10) + .collect(); + path.push(name + "-dtmt.lua"); - tracing::trace!( - "Compiling '{}', {} bytes of code", - name.display(), - code.len() - ); - - let bytecode = unsafe { - let state = lua::luaL_newstate(); - lua::luaL_openlibs(state); - - let name = CString::new(format!("@{}", name.display()).into_bytes()) - .wrap_err_with(|| format!("Cannot convert name into CString: {}", name.display()))?; - match lua::luaL_loadbuffer( - state, - code.as_ptr() as _, - code.len() as _, - name.as_ptr() as _, - ) as u32 - { - lua::LUA_OK => {} - lua::LUA_ERRSYNTAX => { - let err = lua::lua_tostring(state, -1); - let err = CStr::from_ptr(err).to_string_lossy().to_string(); - - lua::lua_close(state); - - eyre::bail!("Invalid syntax: {}", err); - } - lua::LUA_ERRMEM => { - lua::lua_close(state); - eyre::bail!("Failed to allocate sufficient memory to compile LuaJIT bytecode") - } - _ => unreachable!(), - } - lua::lua_setglobal(state, c"fn".as_ptr()); - - let run = c"return string.dump(fn, false)"; - match lua::luaL_loadstring(state, run.as_ptr()) as u32 { - lua::LUA_OK => {} - lua::LUA_ERRSYNTAX => { - let err = lua::lua_tostring(state, -1); - let err = CStr::from_ptr(err).to_string_lossy().to_string(); - - lua::lua_close(state); - - eyre::bail!("Invalid syntax: {}", err); - } - lua::LUA_ERRMEM => { - lua::lua_close(state); - eyre::bail!("Failed to allocate sufficient memory to compile LuaJIT bytecode") - } - _ => unreachable!(), - } - - match lua::lua_pcall(state, 0, 1, 0) as u32 { - lua::LUA_OK => { - // The binary data is pretty much guaranteed to contain NUL bytes, - // so we can't rely on `lua_tostring` and `CStr` here. Instead we have to - // explicitely query the string length and build our vector from that. - // However, on the bright side, we don't have to go through any string types anymore, - // and can instead treat it as raw bytes immediately. - let mut len = 0; - let data = lua::lua_tolstring(state, -1, &mut len) as *const u8; - let data = std::slice::from_raw_parts(data, len).to_vec(); - - lua::lua_close(state); - - data - } - lua::LUA_ERRRUN => { - let err = lua::lua_tostring(state, -1); - let err = CStr::from_ptr(err).to_string_lossy().to_string(); - - lua::lua_close(state); - - eyre::bail!("Failed to compile LuaJIT bytecode: {}", err); - } - lua::LUA_ERRMEM => { - lua::lua_close(state); - eyre::bail!("Failed to allocate sufficient memory to compile LuaJIT bytecode") - } - // We don't use an error handler function, so this should be unreachable - lua::LUA_ERRERR => unreachable!(), - _ => unreachable!(), - } + path + }; + + let out_file_path = { + let mut path = std::env::temp_dir(); + + let name: String = std::iter::repeat_with(fastrand::alphanumeric) + .take(10) + .collect(); + path.push(name + "-dtmt.luab"); + + path + }; + + fs::write(&in_file_path, code.as_ref().as_bytes()) + .await + .wrap_err_with(|| format!("failed to write file {}", in_file_path.display()))?; + + // TODO: Make executable name configurable + Command::new("luajit") + .arg("-bg") + .arg("-F") + .arg(name.clone() + ".lua") + .arg("-o") + .arg("Windows") + .arg(&in_file_path) + .arg(&out_file_path) + .status() + .await + .wrap_err("failed to compile to LuaJIT byte code")?; + + let mut data = Cursor::new(Vec::new()); + + let bytecode = { + let mut data = fs::read(&out_file_path) + .await + .wrap_err_with(|| format!("failed to read file {}", out_file_path.display()))?; + + // Add Fatshark's custom magic bytes + data[1] = 0x46; + data[2] = 0x53; + data[3] = 0x82; + + data }; - let mut data = Cursor::new(Vec::with_capacity(bytecode.len() + 12)); data.write_u32(bytecode.len() as u32)?; - // TODO: Figure out what these two values are - data.write_u32(0x2)?; - data.write_u32(0x0)?; - // Use Fatshark's custom magic bytes - data.write_all(&[0x1b, 0x46, 0x53, 0x82])?; - data.write_all(&bytecode[4..])?; + // I believe this is supposed to be a uleb128, but it seems to be always 0x2 in binary. + data.write_u64(0x2)?; + data.write_all(&bytecode)?; let mut file = BundleFile::new(name, BundleFileType::Lua); let mut variant = BundleFileVariant::new(); diff --git a/lib/sdk/src/filetype/package.rs b/lib/sdk/src/filetype/package.rs index 758f79f..00b7185 100644 --- a/lib/sdk/src/filetype/package.rs +++ b/lib/sdk/src/filetype/package.rs @@ -4,35 +4,23 @@ use std::ops::{Deref, DerefMut}; use std::path::{Path, PathBuf}; use std::str::FromStr; -use async_recursion::async_recursion; use color_eyre::eyre::{self, Context}; use color_eyre::Result; use tokio::fs; use crate::binary::sync::{ReadExt, WriteExt}; -use crate::bundle::file::UserFile; -use crate::bundle::filetype::BundleFileType; -use crate::murmur::{HashGroup, IdString64, Murmur64}; +use crate::bundle::file::{BundleFileType, UserFile}; +use crate::murmur::{HashGroup, Murmur64}; -/// Resolves a relative path that might contain wildcards into a list of -/// paths that exist on disk and match that wildcard. -/// This is similar to globbing in Unix shells, but with much less features. -/// -/// The only wilcard character allowed is `*`, and only at the end of the string, -/// where it matches all files recursively in that directory. -/// -/// `t` is an optional extension name, that may be used to force a wildcard -/// path to only match that file type `t`. #[tracing::instrument] -#[async_recursion] async fn resolve_wildcard( wildcard: P1, root: P2, t: Option, ) -> Result> where - P1: AsRef + std::fmt::Debug + std::marker::Send, - P2: AsRef + std::fmt::Debug + std::marker::Send + std::marker::Copy, + P1: AsRef + std::fmt::Debug, + P2: AsRef + std::fmt::Debug, { let wildcard = wildcard.as_ref(); @@ -68,46 +56,37 @@ where path.to_path_buf() }; - let meta = entry.metadata().await?; - if meta.is_dir() { - let wildcard = file_path.join("*"); - let inner_paths = resolve_wildcard(wildcard, root, t).await?; - paths.extend_from_slice(&inner_paths); - } else { - // Skip file if there is a desired extension `t`, but the file's - // extension name doesn't match - if t.is_some() { - let ext = file_path - .extension() - .and_then(|ext| ext.to_str()) - .and_then(|ext| BundleFileType::from_str(ext).ok()); + // Skip file if there is a desired extension `t`, but the file's + // extension name doesn't match + if t.is_some() { + let ext = file_path + .extension() + .and_then(|ext| ext.to_str()) + .and_then(|ext| BundleFileType::from_str(ext).ok()); - if ext != t { - tracing::warn!( - "Skipping wildcard result with invalid extension: {}", - file_path.display(), - ); - continue; - } + if ext != t { + tracing::debug!( + "Skipping wildcard result with invalid extension: {}", + file_path.display(), + ); + continue; } - - tracing::debug!("Found file {}", file_path.display()); - paths.push(file_path); } + + paths.push(file_path); } Ok(paths) } -type PackageType = HashMap>; +type PackageType = HashMap>; type PackageDefinition = HashMap>; #[derive(Default)] pub struct Package { - _name: IdString64, - _root: PathBuf, + name: String, + root: PathBuf, inner: PackageType, - flags: u8, } impl Deref for Package { @@ -125,35 +104,21 @@ impl DerefMut for Package { } impl Package { - pub fn new(name: impl Into, root: PathBuf) -> Self { - Self { - _name: name.into(), - _root: root, - inner: Default::default(), - flags: 1, - } - } - fn len(&self) -> usize { self.values().fold(0, |total, files| total + files.len()) } - pub fn add_file(&mut self, file_type: BundleFileType, name: impl Into) { + pub fn add_file>(&mut self, file_type: BundleFileType, name: P) { self.inner.entry(file_type).or_default().insert(name.into()); } #[tracing::instrument("Package::from_sjson", skip(sjson), fields(sjson_len = sjson.as_ref().len()))] - pub async fn from_sjson( - sjson: S, - name: impl Into + std::fmt::Debug, - root: P, - ) -> Result + pub async fn from_sjson(sjson: S, name: String, root: P) -> Result where P: AsRef + std::fmt::Debug, S: AsRef, { let root = root.as_ref(); - let name = name.into(); let definition: PackageDefinition = serde_sjson::from_str(sjson.as_ref())?; let mut inner: PackageType = Default::default(); @@ -162,7 +127,7 @@ impl Package { None } else { let t = BundleFileType::from_str(ty) - .wrap_err("Invalid file type in package definition")?; + .wrap_err("invalid file type in package definition")?; Some(t) }; @@ -187,20 +152,15 @@ impl Package { continue; }; - tracing::debug!("Adding file {}", path.display()); - inner - .entry(t) - .or_default() - .insert(path.display().to_string()); + inner.entry(t).or_default().insert(path); } } } let pkg = Self { inner, - _name: name, - _root: root.to_path_buf(), - flags: 1, + name, + root: root.to_path_buf(), }; Ok(pkg) @@ -210,13 +170,15 @@ impl Package { pub fn to_sjson(&self) -> Result { let mut map: PackageDefinition = Default::default(); - for (t, names) in self.iter() { - for name in names.iter() { - map.entry(t.ext_name()).or_default().insert(name.clone()); + for (t, paths) in self.iter() { + for path in paths.iter() { + map.entry(t.ext_name()) + .or_default() + .insert(path.display().to_string()); } } - serde_sjson::to_string(&map).wrap_err("Failed to serialize Package to SJSON") + serde_sjson::to_string(&map).wrap_err("failed to serialize Package to SJSON") } #[tracing::instrument("Package::from_binary", skip(binary, ctx), fields(binary_len = binary.as_ref().len()))] @@ -238,26 +200,14 @@ impl Package { for _ in 0..file_count { let t = BundleFileType::from(r.read_u64()?); let hash = Murmur64::from(r.read_u64()?); - let name = ctx.lookup_hash(hash, HashGroup::Filename); - inner - .entry(t) - .or_default() - .insert(name.display().to_string()); - } - - let flags = r.read_u8()?; - - if cfg!(debug_assertions) && flags != 1 { - tracing::warn!("Unexpected value for package flags: {:0x}", flags); - } else if (flags & 0xFE) >= 2 { - tracing::warn!("Resource Package has common packages. Ignoring."); + let path = ctx.lookup_hash(hash, HashGroup::Filename); + inner.entry(t).or_default().insert(PathBuf::from(path)); } let pkg = Self { inner, - _name: name.into(), - _root: PathBuf::new(), - flags, + name, + root: PathBuf::new(), }; Ok(pkg) @@ -271,15 +221,15 @@ impl Package { w.write_u32(0x2b)?; w.write_u32(self.values().flatten().count() as u32)?; - for (t, names) in self.iter() { - for name in names.iter() { + for (t, paths) in self.iter() { + for path in paths.iter() { w.write_u64(t.hash().into())?; - w.write_u64(Murmur64::hash(name.as_bytes()).into())?; + + let hash = Murmur64::hash(path.to_string_lossy().as_bytes()); + w.write_u64(hash.into())?; } } - w.write_u8(self.flags)?; - Ok(w.into_inner()) } } @@ -294,11 +244,17 @@ where Ok(vec![UserFile::new(s.into_bytes())]) } +// #[tracing::instrument(skip_all)] +// pub fn compile(_ctx: &crate::Context, data: String) -> Result> { +// let pkg = Package::from_sjson(data)?; +// pkg.to_binary() +// } + #[cfg(test)] mod test { use std::path::PathBuf; - use crate::bundle::filetype::BundleFileType; + use crate::BundleFileType; use super::resolve_wildcard; use super::Package; diff --git a/lib/sdk/src/filetype/strings.rs b/lib/sdk/src/filetype/strings.rs index 8643266..ca2ed8c 100644 --- a/lib/sdk/src/filetype/strings.rs +++ b/lib/sdk/src/filetype/strings.rs @@ -28,14 +28,10 @@ impl Language { #[derive(serde::Serialize)] pub struct Strings(HashMap>); -#[inline(always)] fn read_string(r: R) -> Result where R: Read, { - // We can safely ignore the warning here, as all data is already in memory, and no additional - // `BufReader` should be needed. - #[allow(clippy::unbuffered_bytes)] r.bytes() .take_while(|b| b.as_ref().map(|b| *b != 0).unwrap_or(false)) .map(|b| b.map_err(Report::new)) @@ -45,7 +41,7 @@ where impl Strings { #[tracing::instrument(skip_all, fields(languages = variants.len()))] - pub fn from_variants(ctx: &crate::Context, variants: &[BundleFileVariant]) -> Result { + pub fn from_variants(ctx: &crate::Context, variants: &Vec) -> Result { let mut map: HashMap> = HashMap::new(); for (i, variant) in variants.iter().enumerate() { @@ -80,7 +76,7 @@ impl Strings { } #[tracing::instrument(skip_all)] -pub fn decompile(ctx: &crate::Context, variants: &[BundleFileVariant]) -> Result> { +pub fn decompile(ctx: &crate::Context, variants: &Vec) -> Result> { let strings = Strings::from_variants(ctx, variants)?; let content = strings.to_sjson()?; diff --git a/lib/sdk/src/lib.rs b/lib/sdk/src/lib.rs index 9b1806b..2ecbfd7 100644 --- a/lib/sdk/src/lib.rs +++ b/lib/sdk/src/lib.rs @@ -1,13 +1,13 @@ -#![feature(test)] +#![feature(c_size_t)] mod binary; mod bundle; mod context; pub mod filetype; pub mod murmur; +mod oodle; -pub use binary::{FromBinary, ToBinary}; -pub use bundle::database::BundleDatabase; pub use bundle::decompress; -pub use bundle::{Bundle, BundleFile, BundleFileType, BundleFileVariant, Properties}; -pub use context::{CmdLine, Context}; +pub use bundle::{Bundle, BundleFile, BundleFileType}; +pub use context::Context; +pub use oodle::Oodle; diff --git a/lib/sdk/src/murmur/dictionary.rs b/lib/sdk/src/murmur/dictionary.rs index 267f0a4..322dded 100644 --- a/lib/sdk/src/murmur/dictionary.rs +++ b/lib/sdk/src/murmur/dictionary.rs @@ -55,24 +55,6 @@ pub struct Entry { group: HashGroup, } -impl Entry { - pub fn value(&self) -> &String { - &self.value - } - - pub fn long(&self) -> Murmur64 { - self.long - } - - pub fn short(&self) -> Murmur32 { - self.short - } - - pub fn group(&self) -> HashGroup { - self.group - } -} - pub struct Dictionary { entries: Vec, } @@ -147,14 +129,14 @@ impl Dictionary { Ok(()) } - pub fn add(&mut self, value: impl AsRef<[u8]>, group: HashGroup) { - let long = Murmur64::from(murmurhash64::hash(value.as_ref(), SEED as u64)); - let short = Murmur32::from(murmurhash64::hash32(value.as_ref(), SEED)); + pub fn add(&mut self, value: String, group: HashGroup) { + let long = Murmur64::from(murmurhash64::hash(value.as_bytes(), SEED as u64)); + let short = Murmur32::from(murmurhash64::hash32(value.as_bytes(), SEED)); let entry = Entry { long, short, - value: String::from_utf8_lossy(value.as_ref()).to_string(), + value, group, }; @@ -190,8 +172,4 @@ impl Dictionary { pub fn is_empty(&self) -> bool { self.entries.is_empty() } - - pub fn entries(&self) -> &Vec { - &self.entries - } } diff --git a/lib/sdk/src/murmur/idstring32.rs b/lib/sdk/src/murmur/idstring32.rs deleted file mode 100644 index 99ea7aa..0000000 --- a/lib/sdk/src/murmur/idstring32.rs +++ /dev/null @@ -1,162 +0,0 @@ -use std::fmt; - -use serde::{Deserializer, Serializer}; - -use super::Murmur32; - -// This type encodes the fact that when reading in a bundle, we don't always have a dictionary -// entry for every hash in there. So we do want to have the real string available when needed, -// but at the same time retain the original hash information for when we don't. -// This is especially important when wanting to write back the read bundle, as the hashes need to -// stay the same. -// The previous system of always turning hashes into strings worked well for the purpose of -// displaying hashes, but would have made it very hard to turn a stringyfied hash back into -// an actual hash. -#[derive(Clone, Debug, Eq)] -pub enum IdString32 { - Hash(Murmur32), - String(String), -} - -impl IdString32 { - pub fn to_murmur32(&self) -> Murmur32 { - match self { - Self::Hash(hash) => *hash, - Self::String(s) => Murmur32::hash(s.as_bytes()), - } - } - - pub fn display(&self) -> IdString32Display { - let s = match self { - IdString32::Hash(hash) => hash.to_string(), - IdString32::String(s) => s.clone(), - }; - - IdString32Display(s) - } - - pub fn is_string(&self) -> bool { - match self { - IdString32::Hash(_) => false, - IdString32::String(_) => true, - } - } - - pub fn is_hash(&self) -> bool { - match self { - IdString32::Hash(_) => true, - IdString32::String(_) => false, - } - } -} - -impl From for IdString32 { - fn from(value: String) -> Self { - Self::String(value) - } -} - -impl From for IdString32 { - fn from(value: u32) -> Self { - Self::Hash(value.into()) - } -} - -impl From for u32 { - fn from(value: IdString32) -> Self { - value.to_murmur32().into() - } -} - -impl From for IdString32 { - fn from(value: Murmur32) -> Self { - Self::Hash(value) - } -} - -impl From for Murmur32 { - fn from(value: IdString32) -> Self { - value.to_murmur32() - } -} - -impl PartialEq for IdString32 { - fn eq(&self, other: &Self) -> bool { - self.to_murmur32() == other.to_murmur32() - } -} - -impl std::hash::Hash for IdString32 { - fn hash(&self, state: &mut H) { - state.write_u32(self.to_murmur32().into()); - } -} - -impl serde::Serialize for IdString32 { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_u32(self.to_murmur32().into()) - } -} - -struct IdString32Visitor; - -impl<'de> serde::de::Visitor<'de> for IdString32Visitor { - type Value = IdString32; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("an u32 or a string") - } - - fn visit_u32(self, value: u32) -> Result - where - E: serde::de::Error, - { - Ok(IdString32::Hash(value.into())) - } - - fn visit_str(self, v: &str) -> Result - where - E: serde::de::Error, - { - Ok(IdString32::String(v.to_string())) - } - - fn visit_string(self, v: String) -> Result - where - E: serde::de::Error, - { - Ok(IdString32::String(v)) - } -} - -impl<'de> serde::Deserialize<'de> for IdString32 { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_u32(IdString32Visitor) - } -} - -pub struct IdString32Display(String); - -impl std::fmt::Display for IdString32Display { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.0) - } -} - -impl std::fmt::UpperHex for IdString32 { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - std::fmt::UpperHex::fmt(&self.to_murmur32(), f) - } -} - -impl std::fmt::LowerHex for IdString32 { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - std::fmt::LowerHex::fmt(&self.to_murmur32(), f) - } -} diff --git a/lib/sdk/src/murmur/idstring64.rs b/lib/sdk/src/murmur/idstring64.rs deleted file mode 100644 index 781a0cd..0000000 --- a/lib/sdk/src/murmur/idstring64.rs +++ /dev/null @@ -1,175 +0,0 @@ -use std::{fmt, path::Path}; - -use path_slash::PathExt as _; -use serde::{Deserializer, Serializer}; - -use super::Murmur64; - -// This type encodes the fact that when reading in a bundle, we don't always have a dictionary -// entry for every hash in there. So we do want to have the real string available when needed, -// but at the same time retain the original hash information for when we don't. -// This is especially important when wanting to write back the read bundle, as the hashes need to -// stay the same. -// The previous system of always turning hashes into strings worked well for the purpose of -// displaying hashes, but would have made it very hard to turn a stringyfied hash back into -// an actual hash. -#[derive(Clone, Debug, Eq)] -pub enum IdString64 { - Hash(Murmur64), - String(String), -} - -impl IdString64 { - pub fn to_murmur64(&self) -> Murmur64 { - match self { - Self::Hash(hash) => *hash, - Self::String(s) => Murmur64::hash(s.as_bytes()), - } - } - - pub fn display(&self) -> IdString64Display { - let s = match self { - IdString64::Hash(hash) => hash.to_string(), - IdString64::String(s) => s.clone(), - }; - - IdString64Display(s) - } - - pub fn is_string(&self) -> bool { - match self { - IdString64::Hash(_) => false, - IdString64::String(_) => true, - } - } - - pub fn is_hash(&self) -> bool { - match self { - IdString64::Hash(_) => true, - IdString64::String(_) => false, - } - } - - // Would love to have this as a proper `impl From`, but - // rustc will complain that it overlaps with the `impl From>`. - pub fn from_path(p: impl AsRef) -> Self { - Self::String(p.as_ref().to_slash_lossy().to_string()) - } -} - -impl From for IdString64 { - fn from(value: String) -> Self { - Self::String(value) - } -} - -impl From for IdString64 { - fn from(value: u64) -> Self { - Self::Hash(value.into()) - } -} - -impl From for IdString64 { - fn from(value: Murmur64) -> Self { - Self::Hash(value) - } -} - -impl From for Murmur64 { - fn from(value: IdString64) -> Self { - value.to_murmur64() - } -} - -impl From for u64 { - fn from(value: IdString64) -> Self { - value.to_murmur64().into() - } -} - -impl Default for IdString64 { - fn default() -> Self { - Self::Hash(0.into()) - } -} - -impl PartialEq for IdString64 { - fn eq(&self, other: &Self) -> bool { - self.to_murmur64() == other.to_murmur64() - } -} - -impl std::hash::Hash for IdString64 { - fn hash(&self, state: &mut H) { - state.write_u64(self.to_murmur64().into()); - } -} - -impl serde::Serialize for IdString64 { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_u64(self.to_murmur64().into()) - } -} - -struct IdString64Visitor; - -impl<'de> serde::de::Visitor<'de> for IdString64Visitor { - type Value = IdString64; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("an u64 or a string") - } - - fn visit_u64(self, value: u64) -> Result - where - E: serde::de::Error, - { - Ok(IdString64::Hash(value.into())) - } - - fn visit_str(self, v: &str) -> Result - where - E: serde::de::Error, - { - Ok(IdString64::String(v.to_string())) - } - - fn visit_string(self, v: String) -> Result - where - E: serde::de::Error, - { - Ok(IdString64::String(v)) - } -} - -impl<'de> serde::Deserialize<'de> for IdString64 { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_u64(IdString64Visitor) - } -} - -pub struct IdString64Display(String); - -impl std::fmt::Display for IdString64Display { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.0) - } -} - -impl std::fmt::UpperHex for IdString64 { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - std::fmt::UpperHex::fmt(&self.to_murmur64(), f) - } -} - -impl std::fmt::LowerHex for IdString64 { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - std::fmt::LowerHex::fmt(&self.to_murmur64(), f) - } -} diff --git a/lib/sdk/src/murmur/mod.rs b/lib/sdk/src/murmur/mod.rs index 6449d38..95e66fa 100644 --- a/lib/sdk/src/murmur/mod.rs +++ b/lib/sdk/src/murmur/mod.rs @@ -1,26 +1,239 @@ use std::fmt; use color_eyre::eyre::Context; -use color_eyre::{Report, Result}; +use color_eyre::Report; use serde::de::Visitor; -use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use serde::{Deserialize, Serialize}; +use serde::{Deserializer, Serializer}; mod dictionary; // Currently unused // mod murmurhash32; -mod idstring32; -mod idstring64; mod murmurhash64; -mod types; -mod util; pub const SEED: u32 = 0; -pub use dictionary::{Dictionary, Entry, HashGroup}; -pub use idstring32::*; -pub use idstring64::*; +pub use dictionary::Dictionary; +pub use dictionary::HashGroup; pub use murmurhash64::hash; pub use murmurhash64::hash32; pub use murmurhash64::hash_inverse as inverse; -pub use types::*; +fn _swap_bytes_u32(value: u32) -> u32 { + u32::from_le_bytes(value.to_be_bytes()) +} + +fn _swap_bytes_u64(value: u64) -> u64 { + u64::from_le_bytes(value.to_be_bytes()) +} + +#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq)] +pub struct Murmur64(u64); + +impl Murmur64 { + pub fn hash(s: B) -> Self + where + B: AsRef<[u8]>, + { + hash(s.as_ref(), SEED as u64).into() + } +} + +impl From for Murmur64 { + fn from(value: u64) -> Self { + Self(value) + } +} + +impl From for u64 { + fn from(value: Murmur64) -> Self { + value.0 + } +} + +impl TryFrom<&str> for Murmur64 { + type Error = Report; + + fn try_from(value: &str) -> Result { + u64::from_str_radix(value, 16) + .map(Self) + .wrap_err_with(|| format!("failed to convert value to Murmur64: {value}")) + } +} + +impl fmt::UpperHex for Murmur64 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::UpperHex::fmt(&self.0, f) + } +} + +impl fmt::Display for Murmur64 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::UpperHex::fmt(&self.0, f) + } +} + +impl<'de> Visitor<'de> for Murmur64 { + type Value = Self; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str( + "an usigned 64 bit integer \ + or a string in hexadecimal format encoding such an integer", + ) + } + + fn visit_f64(self, value: f64) -> Result + where + E: serde::de::Error, + { + let bytes = value.to_le_bytes(); + Ok(Self::from(u64::from_le_bytes(bytes))) + } + + fn visit_u64(self, value: u64) -> Result + where + E: serde::de::Error, + { + Ok(Self::from(value)) + } + + fn visit_str(self, value: &str) -> Result + where + E: serde::de::Error, + { + match Murmur64::try_from(value) { + Ok(hash) => Ok(hash), + Err(err) => Err(E::custom(format!( + "failed to convert '{value}' to Murmur64: {err}" + ))), + } + } +} + +impl<'de> Deserialize<'de> for Murmur64 { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + deserializer.deserialize_any(Self(0)) + } +} + +impl Serialize for Murmur64 { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(&format!("{self:016X}")) + } +} + +#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq)] +pub struct Murmur32(u32); + +impl Murmur32 { + pub fn hash(s: B) -> Self + where + B: AsRef<[u8]>, + { + hash32(s.as_ref(), SEED).into() + } +} + +impl From for Murmur32 { + fn from(value: u32) -> Self { + Self(value) + } +} + +impl From for u32 { + fn from(value: Murmur32) -> Self { + value.0 + } +} + +impl TryFrom<&str> for Murmur32 { + type Error = Report; + + fn try_from(value: &str) -> Result { + u32::from_str_radix(value, 16) + .map(Self) + .wrap_err_with(|| format!("failed to convert value to Murmur32: {value}")) + } +} + +impl fmt::UpperHex for Murmur32 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::UpperHex::fmt(&self.0, f) + } +} + +impl fmt::Display for Murmur32 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::UpperHex::fmt(&self.0, f) + } +} + +impl Serialize for Murmur32 { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(&format!("{self:08X}")) + } +} + +impl<'de> Visitor<'de> for Murmur32 { + type Value = Self; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str( + "an usigned 32 bit integer \ + or a string in hexadecimal format encoding such an integer", + ) + } + + fn visit_f64(self, value: f64) -> Result + where + E: serde::de::Error, + { + let bytes = value.to_le_bytes(); + self.visit_u32(u64::from_le_bytes(bytes) as u32) + } + + fn visit_u64(self, value: u64) -> Result + where + E: serde::de::Error, + { + self.visit_u32(value as u32) + } + + fn visit_u32(self, value: u32) -> Result + where + E: serde::de::Error, + { + Ok(Self::from(value)) + } + + fn visit_str(self, value: &str) -> Result + where + E: serde::de::Error, + { + match Murmur32::try_from(value) { + Ok(hash) => Ok(hash), + Err(err) => Err(E::custom(format!( + "failed to convert '{value}' to Murmur32: {err}" + ))), + } + } +} + +impl<'de> Deserialize<'de> for Murmur32 { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + deserializer.deserialize_any(Self(0)) + } +} diff --git a/lib/sdk/src/murmur/murmurhash64.rs b/lib/sdk/src/murmur/murmurhash64.rs index ca69852..f15248c 100644 --- a/lib/sdk/src/murmur/murmurhash64.rs +++ b/lib/sdk/src/murmur/murmurhash64.rs @@ -119,9 +119,4 @@ fn test_hash() { } #[test] -fn test_inverse() { - let h = hash("lua".as_bytes(), crate::murmur::SEED as u64); - let inv = hash_inverse(h, crate::murmur::SEED as u64); - assert_eq!(h, hash(&inv.to_le_bytes(), crate::murmur::SEED as u64)); - assert_ne!(h, hash(&inv.to_be_bytes(), crate::murmur::SEED as u64)); -} +fn test_inverse() {} diff --git a/lib/sdk/src/murmur/types.rs b/lib/sdk/src/murmur/types.rs deleted file mode 100644 index c66e2cf..0000000 --- a/lib/sdk/src/murmur/types.rs +++ /dev/null @@ -1,226 +0,0 @@ -use self::util::{parse_hex32, parse_hex64}; - -use super::*; - -#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq)] -pub struct Murmur64(u64); - -impl Murmur64 { - pub fn hash(s: B) -> Self - where - B: AsRef<[u8]>, - { - hash(s.as_ref(), SEED as u64).into() - } -} - -impl From for Murmur64 { - fn from(value: u64) -> Self { - Self(value) - } -} - -impl From for u64 { - fn from(value: Murmur64) -> Self { - value.0 - } -} - -impl TryFrom<&str> for Murmur64 { - type Error = Report; - - fn try_from(value: &str) -> Result { - parse_hex64(value) - .map(Self) - .wrap_err_with(|| format!("Failed to convert value to Murmur64: {value}")) - } -} - -impl fmt::UpperHex for Murmur64 { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::UpperHex::fmt(&self.0, f) - } -} - -impl fmt::LowerHex for Murmur64 { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::LowerHex::fmt(&self.0, f) - } -} - -impl fmt::Display for Murmur64 { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{:016X}", self) - } -} - -impl<'de> Visitor<'de> for Murmur64 { - type Value = Self; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str( - "an usigned 64 bit integer \ - or a string in hexadecimal format encoding such an integer", - ) - } - - fn visit_f64(self, value: f64) -> Result - where - E: serde::de::Error, - { - let bytes = value.to_le_bytes(); - Ok(Self::from(u64::from_le_bytes(bytes))) - } - - fn visit_u64(self, value: u64) -> Result - where - E: serde::de::Error, - { - Ok(Self::from(value)) - } - - fn visit_str(self, value: &str) -> Result - where - E: serde::de::Error, - { - match Murmur64::try_from(value) { - Ok(hash) => Ok(hash), - Err(err) => Err(E::custom(format!( - "failed to convert '{value}' to Murmur64: {err}" - ))), - } - } -} - -impl<'de> Deserialize<'de> for Murmur64 { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_any(Self(0)) - } -} - -impl Serialize for Murmur64 { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(&format!("{self:016X}")) - } -} - -#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq)] -pub struct Murmur32(u32); - -impl Murmur32 { - pub fn hash(s: B) -> Self - where - B: AsRef<[u8]>, - { - hash32(s.as_ref(), SEED).into() - } -} - -impl From for Murmur32 { - fn from(value: u32) -> Self { - Self(value) - } -} - -impl From for u32 { - fn from(value: Murmur32) -> Self { - value.0 - } -} - -impl TryFrom<&str> for Murmur32 { - type Error = Report; - - fn try_from(value: &str) -> Result { - parse_hex32(value) - .map(Self) - .wrap_err_with(|| format!("Failed to convert value to Murmur32: {value}")) - } -} - -impl fmt::UpperHex for Murmur32 { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::UpperHex::fmt(&self.0, f) - } -} - -impl fmt::LowerHex for Murmur32 { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::LowerHex::fmt(&self.0, f) - } -} - -impl fmt::Display for Murmur32 { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{:08X}", self) - } -} - -impl Serialize for Murmur32 { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(&format!("{self:08X}")) - } -} - -impl<'de> Visitor<'de> for Murmur32 { - type Value = Self; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str( - "an usigned 32 bit integer \ - or a string in hexadecimal format encoding such an integer", - ) - } - - fn visit_f64(self, value: f64) -> Result - where - E: serde::de::Error, - { - let bytes = value.to_le_bytes(); - self.visit_u32(u64::from_le_bytes(bytes) as u32) - } - - fn visit_u64(self, value: u64) -> Result - where - E: serde::de::Error, - { - self.visit_u32(value as u32) - } - - fn visit_u32(self, value: u32) -> Result - where - E: serde::de::Error, - { - Ok(Self::from(value)) - } - - fn visit_str(self, value: &str) -> Result - where - E: serde::de::Error, - { - match Murmur32::try_from(value) { - Ok(hash) => Ok(hash), - Err(err) => Err(E::custom(format!( - "failed to convert '{value}' to Murmur32: {err}" - ))), - } - } -} - -impl<'de> Deserialize<'de> for Murmur32 { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_any(Self(0)) - } -} diff --git a/lib/sdk/src/murmur/util.rs b/lib/sdk/src/murmur/util.rs deleted file mode 100644 index 134c4e7..0000000 --- a/lib/sdk/src/murmur/util.rs +++ /dev/null @@ -1,132 +0,0 @@ -use color_eyre::eyre::bail; -use color_eyre::Result; - -// Generates tables similar to these: -// https://github.com/zbjornson/fast-hex/blob/a3487bca95127634a61bfeae8f8bfc8f0e5baa3f/src/hex.cc#L20-L89 -// `upper` determines upper vs. lower bits (first character is `upper`). -const fn generate_byte_map(upper: bool) -> [u8; 256] { - let mut out = [0u8; 256]; - let factor = if upper { 16 } else { 1 }; - - let mut i = 0; - - while i < 256 { - match i { - 0x30..=0x39 => out[i] = factor * (i as u8 - 0x30), - 0x41..=0x46 => out[i] = factor * (9 + i as u8 - 0x40), - 0x61..=0x66 => out[i] = factor * (9 + i as u8 - 0x60), - _ => out[i] = u8::MAX, - } - i += 1; - } - - out -} - -const BYTE_MAP_UPPER: [u8; 256] = generate_byte_map(true); -const BYTE_MAP_LOWER: [u8; 256] = generate_byte_map(false); - -macro_rules! make_parse_hex { - ($name:ident, $ty:ty, $len:expr) => { - #[inline] - pub fn $name(s: impl AsRef) -> Result<$ty> { - // For the string to be valid hex characters, it needs to be ASCII. - // So we can simply treat it as a byte stream. - let s = s.as_ref().as_bytes(); - - if s.len() != $len { - bail!( - "String length doesn't match. Expected {}, got {}", - $len, - s.len() - ); - } - - let n = $len / 2; - let mut out: $ty = 0; - let mut i = 0; - - while i < n { - let j = i * 2; - - let c1 = BYTE_MAP_UPPER[s[j] as usize]; - if c1 == u8::MAX { - bail!("Invalid character '{:?}' ({})", char::from(c1), c1); - } - - let c2 = BYTE_MAP_LOWER[s[j + 1] as usize]; - if c2 == u8::MAX { - bail!("Invalid character '{:?}' ({})", char::from(c2), c2); - } - - out |= ((c1 + c2) as $ty) << (n - i - 1) * 8; - - i += 1; - } - - Ok(out) - } - }; -} - -make_parse_hex!(parse_hex64, u64, 16); -make_parse_hex!(parse_hex32, u32, 8); - -#[cfg(test)] -mod test { - use super::*; - - #[test] - fn parse_32() { - let hash = "A14E8DFA"; - assert_eq!(parse_hex32(hash).unwrap(), 0xA14E8DFA); - } - - #[test] - fn parse_64() { - let hash = "A14E8DFA2CD117E2"; - assert_eq!(parse_hex64(hash).unwrap(), 0xA14E8DFA2CD117E2); - } - - #[test] - fn std_from_radix_32() { - let hash = "A14E8DFA"; - assert_eq!(u32::from_str_radix(hash, 16).unwrap(), 0xA14E8DFA); - } - - #[test] - fn std_from_radix_64() { - let hash = "A14E8DFA2CD117E2"; - assert_eq!(u64::from_str_radix(hash, 16).unwrap(), 0xA14E8DFA2CD117E2); - } -} - -#[cfg(test)] -mod bench { - use super::{parse_hex32, parse_hex64}; - - extern crate test; - - const HASH32: &str = "A14E8DFA"; - const HASH64: &str = "A14E8DFA2CD117E2"; - - #[bench] - fn custom_32(b: &mut test::Bencher) { - b.iter(|| test::black_box(parse_hex32(test::black_box(HASH32)))) - } - - #[bench] - fn std_32(b: &mut test::Bencher) { - b.iter(|| test::black_box(u32::from_str_radix(test::black_box(HASH32), 16))) - } - - #[bench] - fn custom_64(b: &mut test::Bencher) { - b.iter(|| test::black_box(parse_hex64(test::black_box(HASH64)))) - } - - #[bench] - fn std_64(b: &mut test::Bencher) { - b.iter(|| test::black_box(u64::from_str_radix(test::black_box(HASH64), 16))) - } -} diff --git a/lib/sdk/src/oodle/mod.rs b/lib/sdk/src/oodle/mod.rs new file mode 100644 index 0000000..0dbb114 --- /dev/null +++ b/lib/sdk/src/oodle/mod.rs @@ -0,0 +1,143 @@ +use std::ffi::OsStr; +use std::ops::Deref; +use std::ptr; + +use color_eyre::eyre; +use color_eyre::Result; +use libloading::{Library, Symbol}; + +pub mod types; +use types::*; + +// Hardcoded chunk size of Bitsquid's bundle compression +pub const CHUNK_SIZE: usize = 512 * 1024; +pub const COMPRESSOR: OodleLZ_Compressor = OodleLZ_Compressor::Kraken; +pub const LEVEL: OodleLZ_CompressionLevel = OodleLZ_CompressionLevel::Optimal2; + +pub struct Oodle { + lib: Library, +} + +impl Oodle { + pub fn new

(lib: P) -> Result + where + P: AsRef, + { + let lib = unsafe { Library::new(lib)? }; + + unsafe { + let fun: Symbol = + lib.get(b"OodleCore_Plugins_SetPrintf\0")?; + let printf: Symbol = + lib.get(b"OodleCore_Plugin_Printf_Verbose\0")?; + + fun(*printf.deref()); + } + + Ok(Self { lib }) + } + + #[tracing::instrument(name = "Oodle::decompress", skip(self, data))] + pub fn decompress( + &self, + data: I, + fuzz_safe: OodleLZ_FuzzSafe, + check_crc: OodleLZ_CheckCRC, + ) -> Result> + where + I: AsRef<[u8]>, + { + let data = data.as_ref(); + let mut out = vec![0; CHUNK_SIZE]; + + let verbosity = if tracing::enabled!(tracing::Level::INFO) { + OodleLZ_Verbosity::Minimal + } else if tracing::enabled!(tracing::Level::DEBUG) { + OodleLZ_Verbosity::Some + } else if tracing::enabled!(tracing::Level::TRACE) { + OodleLZ_Verbosity::Lots + } else { + OodleLZ_Verbosity::None + }; + + let ret = unsafe { + let decompress: Symbol = self.lib.get(b"OodleLZ_Decompress\0")?; + + decompress( + data.as_ptr() as *const _, + data.len(), + out.as_mut_ptr() as *mut _, + out.len(), + fuzz_safe, + check_crc, + verbosity, + ptr::null_mut(), + 0, + ptr::null_mut(), + ptr::null_mut(), + ptr::null_mut(), + 0, + OodleLZ_Decode_ThreadPhase::UNTHREADED, + ) + }; + + if ret == 0 { + eyre::bail!("Decompression failed."); + } + + Ok(out) + } + + #[tracing::instrument(name = "Oodle::compress", skip(self, data))] + pub fn compress(&self, data: I) -> Result> + where + I: AsRef<[u8]>, + { + let mut raw = Vec::from(data.as_ref()); + raw.resize(CHUNK_SIZE, 0); + + // TODO: Query oodle for buffer size + let mut out = vec![0u8; CHUNK_SIZE]; + + let ret = unsafe { + let compress: Symbol = self.lib.get(b"OodleLZ_Compress\0")?; + + compress( + COMPRESSOR, + raw.as_ptr() as *const _, + raw.len(), + out.as_mut_ptr() as *mut _, + LEVEL, + ptr::null_mut(), + 0, + ptr::null_mut(), + ptr::null_mut(), + 0, + ) + }; + + tracing::debug!(compressed_size = ret, "Compressed chunk"); + + if ret == 0 { + eyre::bail!("Compression failed."); + } + + out.resize(ret as usize, 0); + + Ok(out) + } + + pub fn get_decode_buffer_size( + &self, + raw_size: usize, + corruption_possible: bool, + ) -> Result { + unsafe { + let f: Symbol = + self.lib.get(b"OodleLZ_GetDecodeBufferSize\0")?; + + let size = f(COMPRESSOR, raw_size, corruption_possible); + Ok(size) + } + } +} diff --git a/lib/sdk/src/oodle/types.rs b/lib/sdk/src/oodle/types.rs new file mode 100644 index 0000000..5d306f8 --- /dev/null +++ b/lib/sdk/src/oodle/types.rs @@ -0,0 +1,197 @@ +#![allow(dead_code)] +use core::ffi::{c_char, c_int, c_size_t, c_ulonglong, c_void}; + +// Type definitions taken from Unreal Engine's `oodle2.h` + +#[repr(C)] +#[allow(non_camel_case_types)] +#[derive(Clone, Copy, Debug)] +pub enum OodleLZ_FuzzSafe { + No = 0, + Yes = 1, +} + +impl From for OodleLZ_FuzzSafe { + fn from(value: bool) -> Self { + if value { + Self::Yes + } else { + Self::No + } + } +} + +#[repr(C)] +#[allow(non_camel_case_types)] +#[derive(Clone, Copy, Debug)] +pub enum OodleLZ_CheckCRC { + No = 0, + Yes = 1, + Force32 = 0x40000000, +} + +impl From for OodleLZ_CheckCRC { + fn from(value: bool) -> Self { + if value { + Self::Yes + } else { + Self::No + } + } +} + +#[repr(C)] +#[allow(non_camel_case_types)] +#[derive(Clone, Copy, Debug)] +pub enum OodleLZ_Verbosity { + None = 0, + Minimal = 1, + Some = 2, + Lots = 3, + Force32 = 0x40000000, +} + +#[repr(C)] +#[allow(non_camel_case_types)] +#[derive(Clone, Copy, Debug)] +pub enum OodleLZ_Decode_ThreadPhase { + Phase1 = 1, + Phase2 = 2, + PhaseAll = 3, +} + +impl OodleLZ_Decode_ThreadPhase { + pub const UNTHREADED: Self = OodleLZ_Decode_ThreadPhase::PhaseAll; +} + +#[repr(C)] +#[allow(non_camel_case_types)] +#[derive(Clone, Copy, Debug)] +pub enum OodleLZ_Compressor { + Invalid = -1, + // None = memcpy, pass through uncompressed bytes + None = 3, + + // NEW COMPRESSORS: + // Fast decompression and high compression ratios, amazing! + Kraken = 8, + // Leviathan = Kraken's big brother with higher compression, slightly slower decompression. + Leviathan = 13, + // Mermaid is between Kraken & Selkie - crazy fast, still decent compression. + Mermaid = 9, + // Selkie is a super-fast relative of Mermaid. For maximum decode speed. + Selkie = 11, + // Hydra, the many-headed beast = Leviathan, Kraken, Mermaid, or Selkie (see $OodleLZ_About_Hydra) + Hydra = 12, + BitKnit = 10, + // DEPRECATED but still supported + Lzb16 = 4, + Lzna = 7, + Lzh = 0, + Lzhlw = 1, + Lznib = 2, + Lzblw = 5, + Lza = 6, + Count = 14, + Force32 = 0x40000000, +} + +#[repr(C)] +#[allow(non_camel_case_types)] +#[derive(Clone, Copy, Debug)] +pub enum OodleLZ_CompressionLevel { + // don't compress, just copy raw bytes + None = 0, + // super fast mode, lower compression ratio + SuperFast = 1, + // fastest LZ mode with still decent compression ratio + VeryFast = 2, + // fast - good for daily use + Fast = 3, + // standard medium speed LZ mode + Normal = 4, + // optimal parse level 1 (faster optimal encoder) + Optimal1 = 5, + // optimal parse level 2 (recommended baseline optimal encoder) + Optimal2 = 6, + // optimal parse level 3 (slower optimal encoder) + Optimal3 = 7, + // optimal parse level 4 (very slow optimal encoder) + Optimal4 = 8, + // optimal parse level 5 (don't care about encode speed, maximum compression) + Optimal5 = 9, + // faster than SuperFast, less compression + HyperFast1 = -1, + // faster than HyperFast1, less compression + HyperFast2 = -2, + // faster than HyperFast2, less compression + HyperFast3 = -3, + // fastest, less compression + HyperFast4 = -4, + Force32 = 0x40000000, +} + +impl OodleLZ_CompressionLevel { + // alias hyperfast base level + pub const HYPERFAST: Self = OodleLZ_CompressionLevel::HyperFast1; + // alias optimal standard level + pub const OPTIMAL: Self = OodleLZ_CompressionLevel::Optimal2; + // maximum compression level + pub const MAX: Self = OodleLZ_CompressionLevel::Optimal5; + // fastest compression level + pub const MIN: Self = OodleLZ_CompressionLevel::HyperFast4; + pub const INVALID: Self = OodleLZ_CompressionLevel::Force32; +} + +#[allow(non_camel_case_types)] +pub type t_fp_OodleCore_Plugin_Printf = + extern "C" fn(level: c_int, file: *const c_char, line: c_int, fmt: *const c_char); + +#[allow(non_camel_case_types)] +pub type OodleLZ_Decompress = extern "C" fn( + compressed_buffer: *const c_void, + compressed_length: c_size_t, + raw_buffer: *mut c_void, + raw_length: c_size_t, + fuzz_safe: OodleLZ_FuzzSafe, + check_crc: OodleLZ_CheckCRC, + verbosity: OodleLZ_Verbosity, + decBufBase: *mut c_void, + decBufSize: c_size_t, + callback: *const c_void, + callback_user_data: *const c_void, + decoder_memory: *mut c_void, + decoder_memory_size: c_size_t, + thread_phase: OodleLZ_Decode_ThreadPhase, +) -> c_ulonglong; + +#[allow(non_camel_case_types)] +pub type OodleLZ_Compress = extern "C" fn( + compressor: OodleLZ_Compressor, + raw_buffer: *const c_void, + raw_len: c_size_t, + compressed_buffer: *mut c_void, + level: OodleLZ_CompressionLevel, + options: *const c_void, + dictionary_base: c_size_t, + lrm: *const c_void, + scratch_memory: *mut c_void, + scratch_size: c_size_t, +) -> c_ulonglong; + +#[allow(non_camel_case_types)] +pub type OodleLZ_GetDecodeBufferSize = extern "C" fn( + compressor: OodleLZ_Compressor, + raw_size: c_size_t, + corruption_possible: bool, +) -> c_size_t; + +#[allow(non_camel_case_types)] +pub type OodleCore_Plugins_SetPrintf = + extern "C" fn(f: t_fp_OodleCore_Plugin_Printf) -> t_fp_OodleCore_Plugin_Printf; + +#[allow(non_camel_case_types)] +pub type OodleCore_Plugin_Printf_Verbose = t_fp_OodleCore_Plugin_Printf; + +#[allow(non_camel_case_types)] +pub type OodleCore_Plugin_Printf_Default = t_fp_OodleCore_Plugin_Printf; diff --git a/lib/serde_sjson b/lib/serde_sjson new file mode 160000 index 0000000..39486e8 --- /dev/null +++ b/lib/serde_sjson @@ -0,0 +1 @@ +Subproject commit 39486e8503488a92b3e84af7cbf93fb67988e13f