Compare commits

..

No commits in common. "master" and "dtmm-v1.0.0-rc0" have entirely different histories.

106 changed files with 3171 additions and 8186 deletions

View file

@ -1,7 +0,0 @@
FROM dtmt-ci-base-linux
COPY . /src/dtmt
COPY --from=dtmt-ci-base-linux /src/*.lib /src/*.so /src/dtmt/lib/oodle/
RUN --mount=type=cache,id=cargo-registry,target=/cargo/registry \
--mount=type=cache,id=cargo-target,target=/src/dtmt/target \
cargo build --release --locked

View file

@ -1,35 +0,0 @@
FROM dtmt-ci-base-msvc
# Create dummy crates and copy their Cargo.toml, so that dependencies can be cached
RUN set -e; \
cargo new --bin crates/dtmt; \
cargo new --bin crates/dtmm; \
cargo new --lib lib/dtmt-shared; \
cargo new --lib lib/nexusmods; \
cargo new --lib lib/sdk; \
cargo new --lib lib/serde_sjson; \
cargo new --lib lib/ansi-parser
COPY Cargo.toml Cargo.lock /src/dtmt/
COPY crates/dtmt/Cargo.toml /src/dtmt/crates/dtmt/
COPY crates/dtmm/Cargo.toml /src/dtmt/crates/dtmm/
COPY lib/dtmt-shared/Cargo.toml /src/dtmt/lib/dtmt-shared/
COPY lib/nexusmods/Cargo.toml /src/dtmt/lib/nexusmods/
COPY lib/sdk/Cargo.toml /src/dtmt/lib/sdk/
COPY lib/serde_sjson/Cargo.toml /src/dtmt/lib/serde_sjson/
COPY lib/ansi-parser/Cargo.toml /src/dtmt/lib/ansi-parser/
# Crates with build scripts cannot be split that way, but they shouldn't change too often
COPY lib/luajit2-sys /src/dtmt/lib/luajit2-sys
COPY lib/oodle /src/dtmt/lib/oodle
# color-eyre needs to be copied, too, then, as it's used by `oodle`
COPY lib/color-eyre /src/dtmt/lib/color-eyre
COPY --from=dtmt-ci-base-msvc /src/*.lib /src/dtmt/lib/oodle/
RUN cargo build --release --target x86_64-pc-windows-msvc --locked -Zbuild-std
RUN rm -r crates lib
COPY . /src/dtmt
COPY --from=dtmt-ci-base-msvc /src/*.lib /src/dtmt/lib/oodle/
RUN cargo build --release --target x86_64-pc-windows-msvc --frozen -Zbuild-std

View file

@ -1,138 +0,0 @@
# https://jake-shadle.github.io/xwin/
FROM debian:bullseye-slim as xwin
ARG XWIN_VERSION=0.5.2
ARG XWIN_PREFIX="xwin-$XWIN_VERSION-x86_64-unknown-linux-musl"
ADD https://github.com/Jake-Shadle/xwin/releases/download/$XWIN_VERSION/$XWIN_PREFIX.tar.gz /root/$XWIN_PREFIX.tar.gz
RUN set -eux; \
apt-get update; \
apt-get install --no-install-recommends -y \
tar \
; \
# Install xwin to cargo/bin via github release. Note you could also just use `cargo install xwin`.
tar -xzv -f /root/$XWIN_PREFIX.tar.gz -C /usr/bin --strip-components=1 $XWIN_PREFIX/xwin; \
apt-get remove -y --auto-remove; \
rm -rf \
/var/lib/apt/lists/* \
/root/$XWIN_PREFIX.tar.gz;
RUN set -eux; \
# Splat the CRT and SDK files to /xwin/crt and /xwin/sdk respectively
xwin \
--log-level debug \
--cache-dir /root/.xwin-cache \
--manifest-version 16 \
--accept-license \
splat \
--output /xwin; \
# Even though this build step only exists temporary, to copy the
# final data out of, it still generates a cache entry on the Docker host.
# And to keep that to a minimum, we still delete the stuff we don't need.
rm -rf /root/.xwin-cache;
FROM rust:slim-bullseye as linux
RUN set -eux; \
apt-get update; \
apt-get install --no-install-recommends -y \
build-essential \
cmake \
curl \
git \
gpg \
jq \
libatk1.0-dev \
libclang-13-dev \
libglib2.0-dev \
libgtk-3-dev \
libpango1.0-dev \
libssl-dev \
libzstd-dev \
pkg-config; \
apt-get remove -y --auto-remove; \
rm -rf /var/lib/apt/lists/*; \
rustup default nightly
WORKDIR /src/dtmt
COPY lib/oodle/*.so lib/oodle/*.a /src/
FROM linux as msvc
ARG LLVM_VERSION=18
ENV KEYRINGS /usr/local/share/keyrings
ADD https://apt.llvm.org/llvm-snapshot.gpg.key /root/llvm-snapshot.gpg.key
ADD https://dl.winehq.org/wine-builds/winehq.key /root/winehq.key
RUN set -eux; \
mkdir -p $KEYRINGS; \
# clang/lld/llvm
gpg --dearmor > $KEYRINGS/llvm.gpg < /root/llvm-snapshot.gpg.key; \
# wine
gpg --dearmor > $KEYRINGS/winehq.gpg < /root/winehq.key; \
echo "deb [signed-by=$KEYRINGS/llvm.gpg] http://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-${LLVM_VERSION} main" > /etc/apt/sources.list.d/llvm.list; \
echo "deb [signed-by=$KEYRINGS/winehq.gpg] https://dl.winehq.org/wine-builds/debian/ bullseye main" > /etc/apt/sources.list.d/winehq.list; \
dpkg --add-architecture i386; \
apt-get update; \
apt-get install --no-install-recommends -y \
libclang-${LLVM_VERSION}-dev \
gcc-mingw-w64-x86-64 \
clang-${LLVM_VERSION} \
llvm-${LLVM_VERSION} \
lld-${LLVM_VERSION} \
winehq-staging \
; \
# ensure that clang/clang++ are callable directly
ln -s clang-${LLVM_VERSION} /usr/bin/clang && ln -s clang /usr/bin/clang++ && ln -s lld-${LLVM_VERSION} /usr/bin/ld.lld; \
# We also need to setup symlinks ourselves for the MSVC shims because they aren't in the debian packages
ln -s clang-${LLVM_VERSION} /usr/bin/clang-cl && ln -s llvm-ar-${LLVM_VERSION} /usr/bin/llvm-lib && ln -s lld-link-${LLVM_VERSION} /usr/bin/lld-link; \
# Verify the symlinks are correct
clang++ -v; \
ld.lld -v; \
# Doesn't have an actual -v/--version flag, but it still exits with 0
llvm-lib -v; \
clang-cl -v; \
lld-link --version; \
# Use clang instead of gcc when compiling and linking binaries targeting the host (eg proc macros, build files)
update-alternatives --install /usr/bin/cc cc /usr/bin/clang 100; \
update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++ 100; \
update-alternatives --install /usr/bin/ld ld /usr/bin/ld.lld 100; \
rustup target add x86_64-pc-windows-msvc; \
rustup component add rust-src; \
# Remove unneeded files to reduce image size
apt-get remove -y --auto-remove; \
rm -rf \
/var/lib/apt/lists/* \
/root/*.key;
COPY lib/oodle/*.lib /src
COPY --from=xwin /xwin /xwin
# Note that we're using the full target triple for each variable instead of the
# simple CC/CXX/AR shorthands to avoid issues when compiling any C/C++ code for
# build dependencies that need to compile and execute in the host environment
ENV CC_x86_64_pc_windows_msvc="clang-cl" \
CXX_x86_64_pc_windows_msvc="clang-cl" \
AR_x86_64_pc_windows_msvc="llvm-lib" \
# wine can be quite spammy with log messages and they're generally uninteresting
WINEDEBUG="-all" \
# Use wine to run test executables
CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_RUNNER="wine" \
# Note that we only disable unused-command-line-argument here since clang-cl
# doesn't implement all of the options supported by cl, but the ones it doesn't
# are _generally_ not interesting.
CL_FLAGS="-Wno-unused-command-line-argument -fuse-ld=lld-link /imsvc/xwin/crt/include /imsvc/xwin/sdk/include/ucrt /imsvc/xwin/sdk/include/um /imsvc/xwin/sdk/include/shared" \
# Let cargo know what linker to invoke if you haven't already specified it
# in a .cargo/config.toml file
CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_LINKER="lld-link" \
CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_RUSTFLAGS="-Lnative=/xwin/crt/lib/x86_64 -Lnative=/xwin/sdk/lib/um/x86_64 -Lnative=/xwin/sdk/lib/ucrt/x86_64"
# These are separate since docker/podman won't transform environment variables defined in the same ENV block
ENV CFLAGS_x86_64_pc_windows_msvc="$CL_FLAGS" \
CXXFLAGS_x86_64_pc_windows_msvc="$CL_FLAGS"
# Run wineboot just to setup the default WINEPREFIX so we don't do it every
# container run
RUN wine wineboot --init

View file

@ -1,230 +0,0 @@
# yaml-language-server: $schema=https://raw.githubusercontent.com/cappyzawa/concourse-pipeline-jsonschema/master/concourse_jsonschema.json#/definitions/Config
---
# The actual CI pipeline that is run per branch
resource_types:
- name: gitea-package
type: registry-image
source:
repository: registry.local:5000/gitea-package
- name: gitea-status
type: registry-image
source:
repository: registry.local:5000/gitea-status
- name: gitea-pr
type: registry-image
source:
repository: registry.local:5000/gitea-pr
resources:
- name: repo
type: git
source:
uri: http://forgejo:3000/bitsquid_dt/dtmt
branch: master
- name: repo-pr
type: gitea-pr
source:
access_token: ((gitea_api_key))
owner: ((owner))
repo: ((repo))
url: https://git.sclu1034.dev
- name: gitea-package
type: gitea-package
source:
access_token: ((gitea_api_key))
url: http://forgejo:3000
owner: bitsquid_dt
type: generic
name: dtmt
- name: status-build-msvc
type: gitea-status
source:
access_token: ((gitea_api_key))
url: http://forgejo:3000
owner: bitsquid_dt
repo: dtmt
context: build/msvc
description: "Build for the target platform: msvc"
- name: status-build-linux
type: gitea-status
source:
access_token: ((gitea_api_key))
url: http://forgejo:3000
owner: bitsquid_dt
repo: dtmt
context: build/linux
description: "Build for the target platform: linux"
jobs:
- name: set-pipelines
plan:
- in_parallel:
- get: repo-pr
trigger: true
- get: repo
- load_var: prs
file: repo-pr/prs.json
- across:
- var: pr
values: ((.:prs))
set_pipeline: dtmt-pr
file: repo/.ci/pipelines/pr.yml
vars:
pr: ((.:pr))
gitea_api_key: ((gitea_api_key))
instance_vars:
number: ((.:pr.number))
- name: build-msvc
on_success:
put: state-success
resource: status-build-msvc
no_get: true
params:
state: success
sha: ((.:git_sha))
on_failure:
put: state-failure
resource: status-build-msvc
no_get: true
params:
state: failure
sha: ((.:git_sha))
plan:
- get: repo
trigger: true
- load_var: git_sha
file: repo/.git/ref
- put: state-pending
resource: status-build-msvc
no_get: true
params:
state: pending
sha: ((.:git_sha))
- task: build
file: repo/.ci/tasks/build.yml
vars:
pr: ""
target: msvc
- load_var: version_number
reveal: true
file: artifact/version
- put: package
resource: gitea-package
no_get: true
inputs:
- artifact
params:
version: ((.:version_number))
fail_fast: true
override: true
globs:
- artifact/*.exe
- artifact/*.exe.sha256
- put: package
resource: gitea-package
no_get: true
inputs:
- artifact
params:
version: master
fail_fast: true
override: true
globs:
- artifact/*.exe
- artifact/*.exe.sha256
- name: build-linux
on_success:
put: state-success
resource: status-build-linux
no_get: true
params:
state: success
sha: ((.:git_sha))
on_failure:
put: state-failure
resource: status-build-linux
no_get: true
params:
state: failure
sha: ((.:git_sha))
plan:
- get: repo
trigger: true
- load_var: git_sha
file: repo/.git/ref
- put: state-pending
resource: status-build-linux
no_get: true
params:
state: pending
sha: ((.:git_sha))
- task: build
file: repo/.ci/tasks/build.yml
vars:
pr: ""
target: linux
gitea_url: http://forgejo:3000
gitea_api_key: ((gitea_api_key))
- load_var: version_number
reveal: true
file: artifact/version
- put: package
resource: gitea-package
no_get: true
inputs:
- artifact
params:
version: ((.:version_number))
fail_fast: true
override: true
globs:
- artifact/dtmt
- artifact/dtmm
- artifact/dtmm.sha256
- artifact/dtmt.sha256
- put: package
resource: gitea-package
no_get: true
inputs:
- artifact
params:
version: master
fail_fast: true
override: true
globs:
- artifact/dtmt
- artifact/dtmm
- artifact/dtmm.sha256
- artifact/dtmt.sha256

View file

@ -1,28 +0,0 @@
---
# The actual CI pipeline that is run per branch
resources:
- name: repo
type: git
source:
uri: https://git.sclu1034.dev/bitsquid_dt/dtmt
branch: ((branch))
jobs:
- name: build-msvc
plan:
- get: repo
trigger: true
- task: build
file: repo/.ci/tasks/build.yml
vars:
target: msvc
- name: build-linux
plan:
- get: repo
trigger: true
- task: build
file: repo/.ci/tasks/build.yml
vars:
target: linux

View file

@ -1,217 +0,0 @@
# yaml-language-server: $schema=https://raw.githubusercontent.com/cappyzawa/concourse-pipeline-jsonschema/master/concourse_jsonschema.json#/definitions/Config
---
# The actual CI pipeline that is run per branch
resource_types:
- name: gitea-package
type: registry-image
source:
repository: registry.local:5000/gitea-package
- name: gitea-status
type: registry-image
source:
repository: registry.local:5000/gitea-status
resources:
- name: repo
type: git
source:
uri: http://forgejo:3000/bitsquid_dt/dtmt
branch: ((pr.head.ref))
- name: gitea-package
type: gitea-package
source:
access_token: ((gitea_api_key))
url: http://forgejo:3000
owner: bitsquid_dt
type: generic
name: dtmt
- name: pr-status-lint-clippy
type: gitea-status
source:
access_token: ((gitea_api_key))
url: http://forgejo:3000
owner: bitsquid_dt
repo: dtmt
context: lint/clippy
description: Checking for common mistakes and opportunities for code improvement
- name: pr-status-build-msvc
type: gitea-status
source:
access_token: ((gitea_api_key))
url: http://forgejo:3000
owner: bitsquid_dt
repo: dtmt
context: build/msvc
description: "Build for the target platform: msvc"
- name: pr-status-build-linux
type: gitea-status
source:
access_token: ((gitea_api_key))
url: http://forgejo:3000
owner: bitsquid_dt
repo: dtmt
context: build/linux
description: "Build for the target platform: linux"
jobs:
- name: clippy
on_success:
put: state-success
resource: pr-status-lint-clippy
no_get: true
params:
state: success
sha: ((.:git_sha))
on_failure:
put: state-failure
resource: pr-status-lint-clippy
no_get: true
params:
state: failure
sha: ((.:git_sha))
plan:
- get: repo
trigger: true
- load_var: git_sha
file: repo/.git/ref
- put: state-pending
resource: pr-status-lint-clippy
no_get: true
params:
state: pending
sha: ((.:git_sha))
- task: check
file: repo/.ci/tasks/clippy.yml
vars:
gitea_api_key: ((gitea_api_key))
- name: build-msvc
on_success:
put: state-success
resource: pr-status-build-msvc
no_get: true
params:
state: success
sha: ((.:git_sha))
on_failure:
put: state-failure
resource: pr-status-build-msvc
no_get: true
params:
state: failure
sha: ((.:git_sha))
plan:
- get: repo
trigger: true
- load_var: git_sha
file: repo/.git/ref
- put: state-pending
resource: pr-status-build-msvc
no_get: true
params:
state: pending
sha: ((.:git_sha))
- task: build
file: repo/.ci/tasks/build.yml
vars:
target: msvc
pr: ((pr))
gitea_url: http://forgejo:3000
gitea_api_key: ((gitea_api_key))
- load_var: version_number
reveal: true
file: artifact/version
- put: package
resource: gitea-package
no_get: true
inputs:
- artifact
params:
version: ((.:version_number))
fail_fast: true
override: true
globs:
- artifact/dtmt
- artifact/dtmm
- artifact/*.exe
- artifact/*.sha256
- name: build-linux
on_success:
put: state-success
resource: pr-status-build-linux
no_get: true
params:
state: success
sha: ((.:git_sha))
on_failure:
put: state-failure
resource: pr-status-build-linux
no_get: true
params:
state: failure
sha: ((.:git_sha))
plan:
- get: repo
trigger: true
- load_var: git_sha
file: repo/.git/ref
- put: state-pending
resource: pr-status-build-linux
no_get: true
params:
state: pending
sha: ((.:git_sha))
- task: build
file: repo/.ci/tasks/build.yml
vars:
target: linux
pr: ((pr))
gitea_url: http://forgejo:3000
gitea_api_key: ((gitea_api_key))
- load_var: version_number
reveal: true
file: artifact/version
- put: package
resource: gitea-package
no_get: true
inputs:
- artifact
params:
version: ((.:version_number))
fail_fast: true
override: true
globs:
- artifact/dtmt
- artifact/dtmm
- artifact/*.exe
- artifact/*.sha256

View file

@ -1,62 +0,0 @@
#!/bin/bash
set -eu
if [ -n "$OUTPUT" ]; then
OUTPUT="$PWD/$OUTPUT"
else
OUTPUT=$(mktemp -d)
fi
title() {
printf "\033[1m%s\033[0m\n" "$1"
}
install_artifact() {
install -v -t "$OUTPUT/" "$1"
sha256sum "$1" | cut -d' ' -f1 > "$OUTPUT/$(basename "$1").sha256"
}
cd "repo"
PR=${PR:-}
if [ -n "$PR" ]; then
title "PR: $(echo "$PR" | jq '.number') - $(echo "$PR" | jq '.title')"
ref="pr-$(echo "$PR" | jq '.number')-$(git rev-parse --short "$(cat .git/ref || echo "HEAD")" 2>/dev/null || echo 'manual')"
elif [ -f ".git/branch"]; then
ref=$(cat .git/branch)-$(git rev-parse --short $ref)
else
ref=$(git rev-parse --short "$(cat .git/ref || echo "HEAD")")
fi
title "Version: '$ref'"
echo "$ref" > "$OUTPUT/version"
case "$TARGET" in
msvc)
cp /src/*.lib ./lib/oodle/
title "Building project for target $TARGET"
cargo build --color always --locked --release --target x86_64-pc-windows-msvc -Zbuild-std
title "Install artifacts"
install_artifact target/x86_64-pc-windows-msvc/release/dtmt.exe
install_artifact target/x86_64-pc-windows-msvc/release/dtmm.exe
;;
linux)
cp /src/*.a ./lib/oodle/
title "Building project for target $TARGET"
cargo build --color always --locked --profile release-lto
title "Installing artifacts"
install_artifact target/release-lto/dtmt
install_artifact target/release-lto/dtmm
;;
*)
echo -e "\033[31;1mEnv var 'TARGET' must either be 'msvc' or 'linux'. Got '$TARGET'.\033[0m" >&2
exit 1
esac
title "Done"

View file

@ -1,29 +0,0 @@
# yaml-language-server: $schema=https://raw.githubusercontent.com/cappyzawa/concourse-pipeline-jsonschema/master/concourse_jsonschema.json#/definitions/TaskConfig
---
platform: linux
image_resource:
name: ctmt-bi-base-((target))
type: registry-image
source:
repository: registry.local:5000/dtmt-ci-base-((target))
tag: latest
inputs:
- name: repo
outputs:
- name: artifact
caches:
- path: repo/target
- path: /usr/local/cargo/registry
params:
CI: "true"
TARGET: ((target))
PR: ((pr))
OUTPUT: artifact
run:
path: repo/.ci/tasks/build.sh

View file

@ -1,15 +0,0 @@
#!/bin/sh
set -eu
title() {
printf "\033[1m%s\033[0m\n" "$1"
}
title "Install clippy"
rustup component add clippy
title "Run clippy"
cargo clippy --color always --no-deps
title "Done"

View file

@ -1,26 +0,0 @@
# yaml-language-server: $schema=https://raw.githubusercontent.com/cappyzawa/concourse-pipeline-jsonschema/master/concourse_jsonschema.json#/definitions/TaskConfig
---
platform: linux
image_resource:
name: dtmt-ci-base-linux
type: registry-image
source:
repository: registry.local:5000/dtmt-ci-base-linux
tag: latest
inputs:
- name: repo
caches:
- path: repo/target
- path: /usr/local/cargo/registry
params:
CI: "true"
GITEA_API_KEY: ((gitea_api_key))
run:
path: .ci/tasks/clippy.sh
dir: repo

View file

@ -1,51 +0,0 @@
#!/bin/sh
set -ux
script="$1"
context="$2"
desc="$3"
if [ -z "$script" ]; then
echo "No script to run" >&2
exit 1
fi
if [ -z "$context" ]; then
echo "Missing 'context' for CI status report" >&2
exit 1
fi
if [ -z "$REF" ]; then
echo "Environment variable 'REF' must be set to a valid Git ref." >&2
exit 1
fi
if [ -z "$GITEA_API_KEY" ]; then
echo "Environment variable 'GITEA_API_KEY' must be set." >&2
exit 1
fi
notify() {
curl -X 'POST' \
-H 'Content-Type: application/json' \
-H 'Accept: application/json' \
-H "Authorization: token $GITEA_API_KEY" \
"https://git.sclu1034.dev/api/v1/repos/bitsquid_dt/dtmt/statuses/$REF" \
--data @- <<EOF
{
"context": "$2",
"description": "$3",
"state": "$1"
}
EOF
}
notify 'pending' "$context" "$desc"
if sh "$script"; then
notify 'success' "$context" "$desc"
else
notify 'failure' "$context" "$desc"
exit 1
fi

View file

@ -1,9 +0,0 @@
target/
docs/
data/
.git/
README.adoc
CHANGELOG.adoc
LICENSE
dictionary.csv
Justfile

View file

@ -1,5 +1,3 @@
/target
*.a
*.so
*.dll
*.lib
liboo2corelinux64.so
oo2core_8_win64.dll

6
.gitattributes vendored
View file

@ -1,6 +0,0 @@
* text=auto
*.xcf filter=lfs diff=lfs merge=lfs -text
*.ico filter=lfs diff=lfs merge=lfs -text
*.png filter=lfs diff=lfs merge=lfs -text
*.jpg filter=lfs diff=lfs merge=lfs -text

1
.gitignore vendored
View file

@ -1,7 +1,6 @@
/target
/data
.envrc
*.a
*.so
*.dll
*.lib

19
.gitmodules vendored
View file

@ -1,14 +1,9 @@
[submodule "lib/serde_sjson"]
path = lib/serde_sjson
url = https://git.sclu1034.dev/lucas/serde_sjson.git
[submodule "lib/luajit2-sys"]
path = lib/luajit2-sys
url = https://github.com/sclu1034/luajit2-sys.git
[submodule "lib/color-eyre"]
path = lib/color-eyre
url = https://github.com/sclu1034/color-eyre.git
branch = "fork"
[submodule "lib/ansi-parser"]
path = lib/ansi-parser
url = https://gitlab.com/lschwiderski/ansi-parser.git
branch = "issue/outdated-nom"
url = git@git.sclu1034.dev:lucas/serde_sjson.git
[submodule "lib/steamlocate-rs"]
path = lib/steamlocate-rs
url = git@github.com:sclu1034/steamlocate-rs.git
[submodule "crates/dtmm/assets/icons"]
path = crates/dtmm/assets/icons
url = https://github.com/tabler/tabler-icons

View file

@ -1,15 +0,0 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"config:recommended",
":combinePatchMinorReleases",
":enableVulnerabilityAlerts",
":rebaseStalePrs"
],
"prConcurrentLimit": 10,
"branchPrefix": "renovate/",
"baseBranches": [
"$default",
"/^release\\/.*/"
]
}

View file

@ -13,21 +13,11 @@
- dtmm: check mod order before deployment
- dtmt: add mod dependencies to config
- dtmm: match mods to Nexus and check for updates
- dtmt: add utility to migrate mod projects
- dtmm: reset dtkit-patch installations
- sdk: implement decompiling Lua files
- dtmm: fetch cover image for Nexus mods
- dtmm: fetch file version for Nexus mods
- dtmm: handle `nxm://` URIs via IPC and import the corresponding mod
- dtmm: Add button to open mod on nexusmods.com
- dtmt: Implement commands to list bundles and contents
- dtmt: Implement command to search for files
=== Fixed
- all: force unix path separators for engine values
- dtmt: fix extracing files with non-flattened file names
- oodle: fix static linking
== 2023-03-01

2435
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -7,73 +7,9 @@ members = [
"lib/oodle",
"lib/sdk",
"lib/serde_sjson",
"lib/luajit2-sys",
"lib/color-eyre",
"lib/steamlocate-rs",
]
exclude = ["lib/color-eyre"]
[workspace.dependencies]
ansi-parser = "0.9.1"
ansi_term = "0.12.1"
async-recursion = "1.0.5"
bincode = "1.3.3"
bitflags = "2.5.0"
byteorder = "1.4.3"
clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "string", "unicode"] }
cli-table = { version = "0.4.7", default-features = false, features = ["derive"] }
color-eyre = { path = "lib/color-eyre" }
colors-transform = "0.2.11"
confy = "0.6.1"
csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
druid = { version = "0.8", features = ["im", "serde", "image", "png", "jpeg", "bmp", "webp", "svg"] }
druid-widget-nursery = "0.1"
dtmt-shared = { path = "lib/dtmt-shared" }
fastrand = "2.1.0"
futures = "0.3.25"
futures-util = "0.3.24"
glob = "0.3.0"
interprocess = "2.1.0"
lazy_static = "1.4.0"
luajit2-sys = { path = "lib/luajit2-sys" }
minijinja = { version = "2.0.1", default-features = false }
nanorand = "0.7.0"
nexusmods = { path = "lib/nexusmods" }
notify = "8.0.0"
oodle = { path = "lib/oodle" }
open = "5.0.1"
path-clean = "1.0.1"
path-slash = "0.2.1"
pin-project-lite = "0.2.9"
promptly = "0.3.1"
sdk = { path = "lib/sdk" }
serde = { version = "1.0.152", features = ["derive", "rc"] }
serde_sjson = { path = "lib/serde_sjson" }
steamlocate = "2.0.0-beta.2"
strip-ansi-escapes = "0.2.0"
time = { version = "0.3.20", features = ["serde", "serde-well-known", "local-offset", "formatting", "macros"] }
tokio = { version = "1.23.0", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
tokio-stream = { version = "0.1.12", features = ["fs", "io-util"] }
tracing = { version = "0.1.37", features = ["async-await"] }
tracing-error = "0.2.0"
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
usvg = "0.25.0"
zip = { version = "2.1.3", default-features = false, features = ["deflate", "bzip2", "zstd", "time"] }
[profile.dev.package.backtrace]
opt-level = 3
[profile.release]
strip = "debuginfo"
# The MSVC toolchain cannot handle LTO properly. Some symbol related to
# panic unwind would always be missing.
# So we use a separate profile for when we can compile with LTO.
[profile.release-lto]
inherits = "release"
lto = true
[profile.perf]
inherits = "release"
strip = false
lto = true
debug = "line-tables-only"

View file

@ -1,59 +0,0 @@
set positional-arguments
fly_target := "main"
build-perf-dtmt:
cargo build --profile perf --bin dtmt
perf-dtmt *args='': build-perf-dtmt
perf record --call-graph dwarf ./target/perf/dtmt "$@"
ci-build: ci-build-msvc ci-build-linux
ci-build-msvc:
docker run --rm -ti --user $(id -u) -v ./:/src/dtmt dtmt-ci-base-msvc cargo --color always build --release --target x86_64-pc-windows-msvc --locked -Zbuild-std
ci-build-linux:
docker run --rm -ti --user $(id -u) -v ./:/src/dtmt dtmt-ci-base-linux cargo --color always build --profile release-lto --locked
build-image: build-image-msvc build-image-linux
build-image-msvc:
docker build -f .ci/Dockerfile.msvc .
build-image-linux:
docker build -f .ci/Dockerfile.linux .
ci-image:
# The MSVC image depends on the Linux image. So by building that first,
# we actually build both, and cache them, so that "building" the
# Linux image afterwards merely needs to pull the cache.
docker build --target msvc -t dtmt-ci-base-msvc -f .ci/image/Dockerfile .
docker build --target linux -t dtmt-ci-base-linux -f .ci/image/Dockerfile .
docker tag dtmt-ci-base-msvc registry.sclu1034.dev/dtmt-ci-base-msvc
docker tag dtmt-ci-base-linux registry.sclu1034.dev/dtmt-ci-base-linux
docker push registry.sclu1034.dev/dtmt-ci-base-msvc
docker push registry.sclu1034.dev/dtmt-ci-base-linux
set-base-pipeline:
fly -t {{fly_target}} set-pipeline \
--pipeline dtmt \
--config .ci/pipelines/base.yml \
-v gitea_api_key=${GITEA_API_KEY} \
-v owner=bitsquid_dt \
-v repo=dtmt
set-pr-pipeline pr:
curl \
-H "Authorization: ${GITEA_API_KEY}" \
-H 'Accept: application/json' \
'https://git.sclu1034.dev/api/v1/repos/bitsquid_dt/dtmt/pulls/{{pr}}' \
| yq -y '.' - > 'pr-{{pr}}.yaml'
fly -t main set-pipeline \
--pipeline dtmt-pr \
--config .ci/pipelines/pr.yml \
-v gitea_api_key=${GITEA_API_KEY} \
-i number={{pr}} \
-y branch="$(yq -y '.head.ref' 'pr-{{pr}}.yaml')" \
-y pr="$(cat 'pr-{{pr}}.yaml')"

View file

@ -2,48 +2,32 @@
name = "dtmm"
version = "0.1.0"
edition = "2021"
authors = ["Lucas Schwiderski <lucas@lschwiderski.de>"]
description = "DTMM is a GUI application to install and manage mods for the game."
documentation = "https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki"
repository = "https://git.sclu1034.dev/bitsquid_dt/dtmt"
homepage = "https://git.sclu1034.dev/bitsquid_dt/dtmt"
license-file = "LICENSE"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
ansi-parser = { workspace = true }
async-recursion = { workspace = true }
bincode = { workspace = true }
bitflags = { workspace = true }
clap = { workspace = true }
color-eyre = { workspace = true }
colors-transform = { workspace = true }
confy = { workspace = true }
druid = { workspace = true }
druid-widget-nursery = { workspace = true }
dtmt-shared = { workspace = true }
futures = { workspace = true }
interprocess = { workspace = true }
lazy_static = { workspace = true }
luajit2-sys = { workspace = true }
minijinja = { workspace = true }
nexusmods = { workspace = true }
oodle = { workspace = true }
open = { workspace = true }
path-slash = { workspace = true }
sdk = { workspace = true }
serde = { workspace = true }
serde_sjson = { workspace = true }
strip-ansi-escapes = { workspace = true }
time = { workspace = true }
tokio = { workspace = true }
tokio-stream = { workspace = true }
tracing = { workspace = true }
tracing-error = { workspace = true }
tracing-subscriber = { workspace = true }
usvg = { workspace = true }
zip = { workspace = true }
[build-dependencies]
winres = "0.1.12"
bitflags = "1.3.2"
clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "string", "unicode"] }
color-eyre = "0.6.2"
confy = "0.5.1"
druid = { version = "0.8", features = ["im", "serde", "image", "png", "jpeg", "bmp", "webp", "svg"] }
dtmt-shared = { path = "../../lib/dtmt-shared", version = "*" }
futures = "0.3.25"
oodle = { path = "../../lib/oodle", version = "*" }
sdk = { path = "../../lib/sdk", version = "*" }
nexusmods = { path = "../../lib/nexusmods", version = "*" }
serde_sjson = { path = "../../lib/serde_sjson", version = "*" }
serde = { version = "1.0.152", features = ["derive", "rc"] }
tokio = { version = "1.23.0", features = ["rt", "fs", "tracing", "sync"] }
tracing = "0.1.37"
tracing-error = "0.2.0"
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
zip = "0.6.4"
tokio-stream = { version = "0.1.12", features = ["fs"] }
path-slash = "0.2.1"
time = { version = "0.3.20", features = ["serde", "serde-well-known", "local-offset"] }
strip-ansi-escapes = "0.1.1"
lazy_static = "1.4.0"
colors-transform = "0.2.11"
usvg = "0.25.0"
druid-widget-nursery = "0.1"

BIN
crates/dtmm/assets/DTMM_logo.xcf (Stored with Git LFS)

Binary file not shown.

BIN
crates/dtmm/assets/DTMM_logo_256.png (Stored with Git LFS)

Binary file not shown.

BIN
crates/dtmm/assets/DTMM_logo_48.png (Stored with Git LFS)

Binary file not shown.

BIN
crates/dtmm/assets/DTMM_logo_64.png (Stored with Git LFS)

Binary file not shown.

BIN
crates/dtmm/assets/DTMM_logo_border.png (Stored with Git LFS)

Binary file not shown.

Binary file not shown.

BIN
crates/dtmm/assets/DTMM_logo_small.png (Stored with Git LFS)

Binary file not shown.

View file

@ -1,11 +0,0 @@
[Desktop Entry]
Name=DTMM
GenericName=Mod Manager
Comment=A graphical mod manager for Warhammer 40,000: Darktide
Exec=dtmm %u
Type=Application
Keywords=Mod;
StartupNotify=true
Categories=Utility;
MimeType=x-scheme-handler/nxm;
Icon=dtmm

BIN
crates/dtmm/assets/dtmm.ico (Stored with Git LFS)

Binary file not shown.

@ -0,0 +1 @@
Subproject commit 74838ded9980b6f134bb6f7edcf916cca4a2d97f

View file

@ -1,70 +0,0 @@
local StateGame = require("scripts/game_states/state_game")
local StateSplash = require("scripts/game_states/game/state_splash")
local GameStateMachine = require("scripts/foundation/utilities/game_state_machine")
local function hook(obj, fn_name, cb)
local orig = obj[fn_name]
obj[fn_name] = function(...)
return cb(orig, ...)
end
end
function init(mod_data, boot_gui)
local ModLoader = require("scripts/mods/mod_loader")
local mod_loader = ModLoader:new(mod_data, boot_gui)
-- The mod loader needs to remain active during game play, to
-- enable reloads
hook(StateGame, "update", function(func, dt, ...)
mod_loader:update(dt)
return func(dt, ...)
end)
-- Skip splash view
hook(StateSplash, "on_enter", function(func, self, ...)
local result = func(self, ...)
self._should_skip = true
self._continue = true
return result
end)
-- Trigger state change events
hook(GameStateMachine, "_change_state", function(func, self, ...)
local old_state = self._state
local old_state_name = old_state and self:current_state_name()
if old_state_name then
mod_loader:on_game_state_changed("exit", old_state_name, old_state)
end
local result = func(self, ...)
local new_state = self._state
local new_state_name = new_state and self:current_state_name()
if new_state_name then
mod_loader:on_game_state_changed("enter", new_state_name, new_state)
end
return result
end)
-- Trigger ending state change event
hook(GameStateMachine, "destroy", function(func, self, ...)
local old_state = self._state
local old_state_name = old_state and self:current_state_name()
if old_state_name then
mod_loader:on_game_state_changed("exit", old_state_name)
end
return func(self, ...)
end)
return mod_loader
end
return init

View file

@ -1,28 +0,0 @@
return {
{% for mod in mods %}
{
id = "{{ mod.id }}",
name = "{{ mod.name }}",
bundled = {{ mod.bundled }},
version = {{ mod.version }},
packages = {
{% for pkg in mod.packages %}
"{{ pkg }}",
{% endfor %}
},
run = function()
{% if mod.data is none %}
return dofile("{{ mod.init }}")
{% else %}
new_mod("{{ mod.id }}", {
mod_script = "{{ mod.init }}",
mod_data = "{{ mod.data }}",
{% if not mod.localization is none %}
mod_localization = "{{ mod.localization }}",
{% endif %}
})
{% endif %}
end,
},
{% endfor %}
}

View file

@ -1,412 +0,0 @@
-- Copyright on this file is owned by Fatshark.
-- It is extracted, used and modified with permission only for
-- the purpose of loading mods within Warhammer 40,000: Darktide.
local ModLoader = class("ModLoader")
local table_unpack = table.unpack or unpack
local table_pack = table.pack or pack
local ScriptGui = require("scripts/foundation/utilities/script_gui")
local FONT_MATERIAL = "content/ui/fonts/arial"
local LOG_LEVELS = {
spew = 4,
info = 3,
warning = 2,
error = 1
}
local DEFAULT_SETTINGS = {
log_level = LOG_LEVELS.error,
developer_mode = false
}
local Keyboard = Keyboard
local BUTTON_INDEX_R = Keyboard.button_index("r")
local BUTTON_INDEX_LEFT_SHIFT = Keyboard.button_index("left shift")
local BUTTON_INDEX_LEFT_CTRL = Keyboard.button_index("left ctrl")
ModLoader.init = function(self, mod_data, boot_gui)
table.dump(mod_data, nil, 5, function(...) Log.info("ModLoader", ...) end)
self._mod_data = mod_data
self._gui = boot_gui
self._settings = Application.user_setting("mod_settings") or DEFAULT_SETTINGS
self._mods = {}
self._num_mods = nil
self._chat_print_buffer = {}
self._reload_data = {}
self._ui_time = 0
self._state = "scanning"
end
ModLoader.developer_mode_enabled = function(self)
return self._settings.developer_mode
end
ModLoader.set_developer_mode = function(self, enabled)
self._settings.developer_mode = enabled
end
ModLoader._draw_state_to_gui = function(self, gui, dt)
local state = self._state
local t = self._ui_time + dt
self._ui_time = t
local status_str = "Loading mods"
if state == "scanning" then
status_str = "Scanning for mods"
elseif state == "loading" or state == "initializing" then
local mod = self._mods[self._mod_load_index]
status_str = string.format("Loading mod %q", mod.name)
end
local msg = status_str .. string.rep(".", (2 * t) % 4)
ScriptGui.text(gui, msg, FONT_MATERIAL, 25, Vector3(20, 30, 1), Color.white())
end
ModLoader.remove_gui = function(self)
self._gui = nil
end
ModLoader.mod_data = function(self, id)
-- Since this primarily exists for DMF,
-- we can optimize the search for its use case of looking for the
-- mod currently being loaded
local mod_data = self._mods[self._mod_load_index]
if mod_data.id ~= id then
mod_data = nil
for _, v in ipairs(self._mods) do
if v.id == id then
mod_data = v
end
end
end
return mod_data
end
ModLoader._check_reload = function()
return Keyboard.pressed(BUTTON_INDEX_R) and
Keyboard.button(BUTTON_INDEX_LEFT_SHIFT) +
Keyboard.button(BUTTON_INDEX_LEFT_CTRL) == 2
end
ModLoader.update = function(self, dt)
local chat_print_buffer = self._chat_print_buffer
local num_delayed_prints = #chat_print_buffer
if num_delayed_prints > 0 and Managers.chat then
for i = 1, num_delayed_prints, 1 do
-- TODO: Use new chat system
-- Managers.chat:add_local_system_message(1, chat_print_buffer[i], true)
chat_print_buffer[i] = nil
end
end
local old_state = self._state
if self._settings.developer_mode and self:_check_reload() then
self._reload_requested = true
end
if self._reload_requested and old_state == "done" then
self:_reload_mods()
end
if old_state == "done" then
self:_run_callbacks("update", dt)
elseif old_state == "scanning" then
Log.info("ModLoader", "Scanning for mods")
self:_build_mod_table()
self._state = self:_load_mod(1)
self._ui_time = 0
elseif old_state == "loading" then
local handle = self._loading_resource_handle
if ResourcePackage.has_loaded(handle) then
ResourcePackage.flush(handle)
local mod = self._mods[self._mod_load_index]
local next_index = mod.package_index + 1
local mod_data = mod.data
if next_index <= #mod_data.packages then
self:_load_package(mod, next_index)
else
self._state = "initializing"
end
end
elseif old_state == "initializing" then
local mod = self._mods[self._mod_load_index]
local mod_data = mod.data
Log.info("ModLoader", "Initializing mod %q", mod.name)
mod.state = "running"
local ok, object = xpcall(mod_data.run, function(err)
if type(err) == "string" then
return err .. "\n" .. Script.callstack()
else
return err
end
end)
if not ok then
if object.error then
object = string.format(
"%s\n<<Lua Stack>>\n%s\n<</Lua Stack>>\n<<Lua Locals>>\n%s\n<</Lua Locals>>\n<<Lua Self>>\n%s\n<</Lua Self>>",
object.error, object.traceback, object.locals, object.self)
end
Log.error("ModLoader", "Failed 'run' for %q: %s", mod.name, object)
end
mod.object = object or {}
self:_run_callback(mod, "init", self._reload_data[mod.id])
Log.info("ModLoader", "Finished loading %q", mod.name)
self._state = self:_load_mod(self._mod_load_index + 1)
end
local gui = self._gui
if gui then
self:_draw_state_to_gui(gui, dt)
end
if old_state ~= self._state then
Log.info("ModLoader", "%s -> %s", old_state, self._state)
end
end
ModLoader.all_mods_loaded = function(self)
return self._state == "done"
end
ModLoader.destroy = function(self)
self:_run_callbacks("on_destroy")
self:unload_all_mods()
end
ModLoader._run_callbacks = function(self, callback_name, ...)
for i = 1, self._num_mods, 1 do
local mod = self._mods[i]
if mod and not mod.callbacks_disabled then
self:_run_callback(mod, callback_name, ...)
end
end
end
ModLoader._run_callback = function(self, mod, callback_name, ...)
local object = mod.object
local cb = object[callback_name]
if not cb then
return
end
local args = table_pack(...)
local success, val = xpcall(
function() return cb(object, table_unpack(args)) end,
function(err)
if type(err) == "string" then
return err .. "\n" .. Script.callstack()
else
return err
end
end
)
if success then
return val
else
Log.error("ModLoader", "Failed to run callback %q for mod %q with id %q. Disabling callbacks until reload.",
callback_name, mod.name, mod.id)
if val.error then
Log.error("ModLoader",
"Error: %s\n<<Lua Stack>>\n%s<</Lua Stack>>\n<<Lua Locals>>\n%s<</Lua Locals>>\n<<Lua Self>>\n%s<</Lua Self>>",
val.error, val.traceback, val.locals, val.self)
else
Log.error("ModLoader", "Error: %s", val or "[unknown error]")
end
mod.callbacks_disabled = true
end
end
ModLoader._start_scan = function(self)
Log.info("ModLoader", "Starting mod scan")
self._state = "scanning"
end
ModLoader._build_mod_table = function(self)
fassert(table.is_empty(self._mods), "Trying to add mods to non-empty mod table")
for i, mod_data in ipairs(self._mod_data) do
Log.info(
"ModLoader",
"mods[%d] = id=%q | name=%q | version=%q | bundled=%s",
i,
mod_data.id,
mod_data.name,
mod_data.version,
tostring(mod_data.bundled)
)
self._mods[i] = {
id = mod_data.id,
state = "not_loaded",
callbacks_disabled = false,
name = mod_data.name,
loaded_packages = {},
packages = mod_data.packages,
data = mod_data,
bundled = mod_data.bundled or false,
}
end
self._num_mods = #self._mods
Log.info("ModLoader", "Found %i mods", self._num_mods)
end
ModLoader._load_mod = function(self, index)
self._ui_time = 0
local mods = self._mods
local mod = mods[index]
if not mod then
table.clear(self._reload_data)
return "done"
end
Log.info("ModLoader", "Loading mod %q", mod.id)
mod.state = "loading"
Crashify.print_property(string.format("Mod:%s", mod.name), true)
self._mod_load_index = index
if mod.bundled and mod.packages[1] then
self:_load_package(mod, 1)
return "loading"
else
return "initializing"
end
end
ModLoader._load_package = function(self, mod, index)
mod.package_index = index
local package_name = mod.packages[index]
if not package_name then
return
end
Log.info("ModLoader", "Loading package %q", package_name)
local resource_handle = Application.resource_package(package_name)
self._loading_resource_handle = resource_handle
ResourcePackage.load(resource_handle)
table.insert(mod.loaded_packages, resource_handle)
end
ModLoader.unload_all_mods = function(self)
if self._state ~= "done" then
Log.error("ModLoader", "Mods can't be unloaded, mod state is not \"done\". current: %q", self._state)
return
end
Log.info("ModLoader", "Unload all mod packages")
for i = self._num_mods, 1, -1 do
local mod = self._mods[i]
if mod then
self:unload_mod(i)
end
self._mods[i] = nil
end
self._num_mods = nil
self._state = "unloaded"
end
ModLoader.unload_mod = function(self, index)
local mod = self._mods[index]
if mod then
Log.info("ModLoader", "Unloading %q.", mod.name)
for _, handle in ipairs(mod.loaded_packages) do
ResourcePackage.unload(handle)
Application.release_resource_package(handle)
end
mod.state = "not_loaded"
else
Log.error("ModLoader", "Mod index %i can't be unloaded, has not been loaded", index)
end
end
ModLoader._reload_mods = function(self)
Log.info("ModLoader", "reloading mods")
for i = 1, self._num_mods, 1 do
local mod = self._mods[i]
if mod and mod.state == "running" then
Log.info("ModLoader", "reloading %s", mod.name)
self._reload_data[mod.id] = self:_run_callback(mod, "on_reload")
else
Log.info("ModLoader", "not reloading mod, state: %s", mod.state)
end
end
self:unload_all_mods()
self:_start_scan()
self._reload_requested = false
end
ModLoader.on_game_state_changed = function(self, status, state_name, state_object)
if self._state == "done" then
self:_run_callbacks("on_game_state_changed", status, state_name, state_object)
else
Log.warning("ModLoader", "Ignored on_game_state_changed call due to being in state %q", self._state)
end
end
ModLoader.print = function(self, level, str, ...)
local f = Log[level]
if f then
f("ModLoader", str, ...)
else
local message = string.format("[ModLoader][" .. level .. "] " .. str, ...)
local log_level = LOG_LEVELS[level] or 99
if log_level <= 2 then
print(message)
end
end
end
return ModLoader

View file

@ -0,0 +1,192 @@
local _G = _G
local rawget = rawget
local rawset = rawset
local log = function(category, format, ...)
local Log = rawget(_G, "Log")
if Log then
Log.info(category, format, ...)
else
print(string.format("[%s] %s", category or "", string.format(format or "", ...)))
end
end
-- Patch `GameStateMachine.init` to add our own state for loading mods.
-- In the future, Fatshark might provide us with a dedicated way to do this.
local function patch_mod_loading_state()
local StateBootSubStateBase = require("scripts/game_states/boot/state_boot_sub_state_base")
-- A necessary override.
-- The original does not proxy `dt` to `_state_update`, but we need that.
StateBootSubStateBase.update = function (self, dt)
local done, error = self:_state_update(dt)
local params = self._params
if error then
return StateError, { error }
elseif done then
local next_index = params.sub_state_index + 1
params.sub_state_index = next_index
local next_state_data = params.states[next_index]
if next_state_data then
return next_state_data[1], self._params
else
self._parent:sub_states_done()
end
end
end
local StateBootLoadMods = class("StateBootLoadMods", "StateBootSubStateBase")
StateBootLoadMods.on_enter = function (self, parent, params)
log("StateBootLoadMods", "Entered")
StateBootLoadMods.super.on_enter(self, parent, params)
local state_params = self:_state_params()
local package_manager = state_params.package_manager
self._state = "load_package"
self._package_manager = package_manager
self._package_handles = {
["packages/mods"] = package_manager:load("packages/mods", "StateBootLoadMods", nil),
["packages/dml"] = package_manager:load("packages/dml", "StateBootLoadMods", nil),
}
end
StateBootLoadMods._state_update = function (self, dt)
local state = self._state
local package_manager = self._package_manager
if state == "load_package" and package_manager:update() then
log("StateBootLoadMods", "Packages loaded, loading mods")
self._state = "load_mods"
local mod_loader = require("scripts/mods/dml/init")
self._mod_loader = mod_loader
local mod_data = require("scripts/mods/mod_data")
mod_loader:init(mod_data, self._parent:gui())
elseif state == "load_mods" and self._mod_loader:update(dt) then
log("StateBootLoadMods", "Mods loaded, exiting")
return true, false
end
return false, false
end
local GameStateMachine = require("scripts/foundation/utilities/game_state_machine")
local patched = false
local GameStateMachine_init = GameStateMachine.init
GameStateMachine.init = function(self, parent, start_state, params, ...)
if not patched then
log("mod_main", "Injecting mod loading state")
patched = true
-- Hardcoded position after `StateRequireScripts`.
-- We do want to wait until then, so that most of the game's core
-- systems are at least loaded and can be hooked, even if they aren't
-- running, yet.
local pos = 4
table.insert(params.states, pos, {
StateBootLoadMods,
{
package_manager = params.package_manager,
},
})
end
GameStateMachine_init(self, parent, start_state, params, ...)
end
log("mod_main", "Mod patching complete")
end
log("mod_main", "Initializing mods...")
local require_store = {}
Mods = {
-- Keep a backup of certain system libraries before
-- Fatshark's code scrubs them.
-- The loader can then decide to pass them on to mods, or ignore them
lua = setmetatable({}, {
io = io,
debug = debug,
ffi = ffi,
os = os,
load = load,
loadfile = loadfile,
loadstring = loadstring,
}),
require_store = require_store
}
local can_insert = function(filepath, new_result)
local store = require_store[filepath]
if not store or #store then
return true
end
if store[#store] ~= new_result then
return true
end
end
local original_require = require
require = function(filepath, ...)
local result = original_require(filepath, ...)
if result and type(result) == "table" then
if can_insert(filepath, result) then
require_store[filepath] = require_store[filepath] or {}
local store = require_store[filepath]
table.insert(store, result)
if Mods.hook then
Mods.hook.enable_by_file(filepath, #store)
end
end
end
return result
end
require("scripts/boot_init")
require("scripts/foundation/utilities/class")
-- The `__index` metamethod maps a proper identifier `CLASS.MyClassName` to the
-- stringified version of the key: `"MyClassName"`.
-- This allows using LuaCheck for the stringified class names in hook parameters.
_G.CLASS = setmetatable({}, {
__index = function(_, key)
return key
end
})
local original_class = class
class = function(class_name, super_name, ...)
local result = original_class(class_name, super_name, ...)
if not rawget(_G, class_name) then
rawset(_G, class_name, result)
end
if not rawget(_G.CLASS, class_name) then
rawset(_G.CLASS, class_name, result)
end
return result
end
require("scripts/main")
log("mod_main", "'scripts/main' loaded")
-- Override `init` to run our injection
function init()
patch_mod_loading_state()
-- As requested by Fatshark
local StateRequireScripts = require("scripts/game_states/boot/state_require_scripts")
StateRequireScripts._get_is_modded = function() return true end
Main:init()
end

View file

@ -1,216 +0,0 @@
local _G = _G
local rawget = rawget
local rawset = rawset
local log = function(category, format, ...)
local Log = rawget(_G, "Log")
if Log then
Log.info(category, format, ...)
else
print(string.format("[%s] %s", category or "", string.format(format or "", ...)))
end
end
log("mod_main", "Initializing mods...")
log("mod_main", "[DTMM] Deployment data:\n{{ deployment_info }}")
local require_store = {}
-- This token is treated as a string template and filled by DTMM during deployment.
-- This allows hiding unsafe I/O functions behind a setting.
-- When not replaced, it's also a valid table definition, thereby degrading gracefully.
local is_io_enabled = {{ is_io_enabled }} -- luacheck: ignore 113
local lua_libs = {
debug = debug,
os = {
date = os.date,
time = os.time,
clock = os.clock,
getenv = os.getenv,
difftime = os.difftime,
},
load = load,
loadfile = loadfile,
loadstring = loadstring,
}
if is_io_enabled then
lua_libs.io = io
lua_libs.os = os
lua_libs.ffi = require("ffi")
end
Mods = {
-- Keep a backup of certain system libraries before
-- Fatshark's code scrubs them.
-- The loader can then decide to pass them on to mods, or ignore them
lua = setmetatable({}, { __index = lua_libs }),
require_store = require_store,
original_require = require,
}
local can_insert = function(filepath, new_result)
local store = require_store[filepath]
if not store or #store then
return true
end
if store[#store] ~= new_result then
return true
end
end
local original_require = require
require = function(filepath, ...)
local result = original_require(filepath, ...)
if result and type(result) == "table" then
if can_insert(filepath, result) then
require_store[filepath] = require_store[filepath] or {}
local store = require_store[filepath]
table.insert(store, result)
if Mods.hook then
Mods.hook.enable_by_file(filepath, #store)
end
end
end
return result
end
require("scripts/boot_init")
require("scripts/foundation/utilities/class")
-- The `__index` metamethod maps a proper identifier `CLASS.MyClassName` to the
-- stringified version of the key: `"MyClassName"`.
-- This allows using LuaCheck for the stringified class names in hook parameters.
_G.CLASS = setmetatable({}, {
__index = function(_, key)
return key
end
})
local original_class = class
class = function(class_name, super_name, ...)
local result = original_class(class_name, super_name, ...)
if not rawget(_G, class_name) then
rawset(_G, class_name, result)
end
if not rawget(_G.CLASS, class_name) then
rawset(_G.CLASS, class_name, result)
end
return result
end
require("scripts/main")
log("mod_main", "'scripts/main' loaded")
-- We need to inject two states into two different state machines:
-- First, we inject one into the `"Main"` state machine at a specific location, so that we're
-- still early in the process, but right after `StateRequireScripts` where most game files
-- are already available to `require` and hook.
-- This is where the `ModLoader` is created initially.
-- Then, we inject into the very first position of the `"Game"` state machine. This runs right
-- after `StateGame._init_managers`, at which point all the parts needed for DMF and other mods
-- have been initialized.
-- This is where `ModLoader` will finally start loading mods.
local function patch_mod_loading_state()
local StateBootLoadDML = class("StateBootLoadDML", "StateBootSubStateBase")
local StateGameLoadMods = class("StateGameLoadMods")
StateBootLoadDML.on_enter = function(self, parent, params)
log("StateBootLoadDML", "Entered")
StateBootLoadDML.super.on_enter(self, parent, params)
local state_params = self:_state_params()
local package_manager = state_params.package_manager
self._package_manager = package_manager
self._package_handles = {
["packages/mods"] = package_manager:load("packages/mods", "StateBootLoadDML", nil),
}
end
StateBootLoadDML._state_update = function(self, _)
local package_manager = self._package_manager
if package_manager:update() then
local mod_data = require("scripts/mods/mod_data")
local create_mod_loader = require("scripts/mods/init")
local mod_loader = create_mod_loader(mod_data)
Managers.mod = mod_loader
log("StateBootLoadDML", "DML loaded, exiting")
return true, false
end
return false, false
end
function StateGameLoadMods:on_enter(_, params)
log("StateGameLoadMods", "Entered")
self._next_state = require("scripts/game_states/game/state_splash")
self._next_state_params = params
end
function StateGameLoadMods:update(_)
-- We're relying on the fact that DML internally makes sure
-- that `Managers.mod:update()` is being called appropriately.
-- The implementation as of this writing is to hook `StateGame.update`.
if Managers.mod:all_mods_loaded() then
Log.info("StateGameLoadMods", "Mods loaded, exiting")
return self._next_state, self._next_state_params
end
end
local GameStateMachine = require("scripts/foundation/utilities/game_state_machine")
local GameStateMachine_init = GameStateMachine.init
GameStateMachine.init = function(self, parent, start_state, params, creation_context, state_change_callbacks, name)
if name == "Main" then
log("mod_main", "Injecting StateBootLoadDML")
-- Hardcoded position after `StateRequireScripts`.
-- We need to wait until then to even begin most of our stuff,
-- so that most of the game's core systems are at least loaded and can be hooked,
-- even if they aren't running, yet.
local pos = 4
table.insert(params.states, pos, {
StateBootLoadDML,
{
package_manager = params.package_manager,
},
})
GameStateMachine_init(self, parent, start_state, params, creation_context, state_change_callbacks, name)
elseif name == "Game" then
log("mod_main", "Injection StateGameLoadMods")
-- The second time around, we want to be the first, so we pass our own
-- 'start_state'.
-- We can't just have the state machine be initialized and then change its `_next_state`, as by the end of
-- `init`, a bunch of stuff will already be initialized.
GameStateMachine_init(self, parent, StateGameLoadMods, params, creation_context, state_change_callbacks, name)
-- And since we're done now, we can revert the function to its original
GameStateMachine.init = GameStateMachine_init
else
-- In all other cases, simply call the original
GameStateMachine_init(self, parent, start_state, params, creation_context, state_change_callbacks, name)
end
end
end
-- Override `init` to run our injection
function init()
patch_mod_loading_state()
-- As requested by Fatshark
local StateRequireScripts = require("scripts/game_states/boot/state_require_scripts")
StateRequireScripts._get_is_modded = function() return true end
Main:init()
end
-- vim: ft=lua

View file

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2020-2023 Paweł Kuna
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -1,8 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-alert-circle" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"/>
<path d="M3 12a9 9 0 1 0 18 0a9 9 0 0 0 -18 0" />
<path d="M12 8v4" />
<path d="M12 16h.01" />
</svg>

Before

Width:  |  Height:  |  Size: 396 B

View file

@ -1,8 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-alert-triangle" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"/>
<path d="M10.24 3.957l-8.422 14.06a1.989 1.989 0 0 0 1.7 2.983h16.845a1.989 1.989 0 0 0 1.7 -2.983l-8.423 -14.06a1.989 1.989 0 0 0 -3.4 0z" />
<path d="M12 9v4" />
<path d="M12 17h.01" />
</svg>

Before

Width:  |  Height:  |  Size: 491 B

View file

@ -1,8 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-cloud-download" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"/>
<path d="M19 18a3.5 3.5 0 0 0 0 -7h-1a5 4.5 0 0 0 -11 -2a4.6 4.4 0 0 0 -2.1 8.4" />
<path d="M12 13l0 9" />
<path d="M9 19l3 3l3 -3" />
</svg>

Before

Width:  |  Height:  |  Size: 439 B

View file

@ -1,7 +0,0 @@
fn main() {
if cfg!(target_os = "windows") {
let mut res = winres::WindowsResource::new();
res.set_icon("assets/dtmm.ico");
res.compile().unwrap();
}
}

View file

@ -1,17 +1,18 @@
use std::collections::HashMap;
use std::io::ErrorKind;
use std::io::{Cursor, ErrorKind, Read};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use color_eyre::eyre::{self, Context};
use color_eyre::{Help, Report, Result};
use druid::im::Vector;
use druid::ImageBuf;
use druid::{FileInfo, ImageBuf};
use dtmt_shared::ModConfig;
use nexusmods::Api as NexusApi;
use tokio::fs::{self, DirEntry, File};
use tokio::fs::{self, DirEntry};
use tokio_stream::wrappers::ReadDirStream;
use tokio_stream::StreamExt;
use zip::ZipArchive;
use crate::state::{ActionState, InitialLoadResult, ModInfo, ModOrder, NexusInfo, PackageInfo};
use crate::util;
@ -19,6 +20,161 @@ use crate::util::config::{ConfigSerialize, LoadOrderEntry};
use super::read_sjson_file;
#[tracing::instrument(skip(state))]
pub(crate) async fn import_mod(state: ActionState, info: FileInfo) -> Result<ModInfo> {
let data = fs::read(&info.path)
.await
.wrap_err_with(|| format!("Failed to read file {}", info.path.display()))?;
let data = Cursor::new(data);
let nexus = if let Some((_, id, _, _)) = info
.path
.file_name()
.and_then(|s| s.to_str())
.and_then(NexusApi::parse_file_name)
{
if !state.nexus_api_key.is_empty() {
let api = NexusApi::new(state.nexus_api_key.to_string())?;
let mod_info = api
.mods_id(id)
.await
.wrap_err_with(|| format!("Failed to query mod {} from Nexus", id))?;
Some(NexusInfo::from(mod_info))
} else {
None
}
} else {
None
};
let mut archive = ZipArchive::new(data).wrap_err("Failed to open ZIP archive")?;
if tracing::enabled!(tracing::Level::DEBUG) {
let names = archive.file_names().fold(String::new(), |mut s, name| {
s.push('\n');
s.push_str(name);
s
});
tracing::debug!("Archive contents:{}", names);
}
let dir_name = {
let f = archive.by_index(0).wrap_err("Archive is empty")?;
if !f.is_dir() {
let err = eyre::eyre!("archive does not have a top-level directory");
return Err(err).with_suggestion(|| "Use 'dtmt build' to create the mod archive.");
}
let name = f.name();
// The directory name is returned with a trailing slash, which we don't want
name[..(name.len().saturating_sub(1))].to_string()
};
tracing::info!("Importing mod {}", dir_name);
let names: Vec<_> = archive.file_names().map(|s| s.to_string()).collect();
let mod_cfg: ModConfig = {
let name = names
.iter()
.find(|name| name.ends_with("dtmt.cfg"))
.ok_or_else(|| eyre::eyre!("archive does not contain mod config"))?;
let mut f = archive
.by_name(name)
.wrap_err("Failed to read mod config from archive")?;
let mut buf = Vec::with_capacity(f.size() as usize);
f.read_to_end(&mut buf)
.wrap_err("Failed to read mod config from archive")?;
let data = String::from_utf8(buf).wrap_err("Mod config is not valid UTF-8")?;
serde_sjson::from_str(&data).wrap_err("Failed to deserialize mod config")?
};
tracing::debug!(?mod_cfg);
let files: HashMap<String, Vec<String>> = {
let name = names
.iter()
.find(|name| name.ends_with("files.sjson"))
.ok_or_else(|| eyre::eyre!("archive does not contain file index"))?;
let mut f = archive
.by_name(name)
.wrap_err("Failed to read file index from archive")?;
let mut buf = Vec::with_capacity(f.size() as usize);
f.read_to_end(&mut buf)
.wrap_err("Failed to read file index from archive")?;
let data = String::from_utf8(buf).wrap_err("File index is not valid UTF-8")?;
serde_sjson::from_str(&data).wrap_err("Failed to deserialize file index")?
};
tracing::trace!(?files);
let image = if let Some(path) = &mod_cfg.image {
let name = names
.iter()
.find(|name| name.ends_with(&path.display().to_string()))
.ok_or_else(|| eyre::eyre!("archive does not contain configured image file"))?;
let mut f = archive
.by_name(name)
.wrap_err("Failed to read image file from archive")?;
let mut buf = Vec::with_capacity(f.size() as usize);
f.read_to_end(&mut buf)
.wrap_err("Failed to read file index from archive")?;
// Druid somehow doesn't return an error compatible with eyre, here.
// So we have to wrap through `Display` manually.
let img = match ImageBuf::from_data(&buf) {
Ok(img) => img,
Err(err) => {
let err = Report::msg(err.to_string()).wrap_err("Invalid image data");
return Err(err).with_suggestion(|| {
"Supported formats are: PNG, JPEG, Bitmap and WebP".to_string()
});
}
};
Some(img)
} else {
None
};
let mod_dir = state.mod_dir;
tracing::trace!("Creating mods directory {}", mod_dir.display());
fs::create_dir_all(Arc::as_ref(&mod_dir))
.await
.wrap_err_with(|| format!("Failed to create data directory {}", mod_dir.display()))?;
tracing::trace!("Extracting mod archive to {}", mod_dir.display());
archive
.extract(Arc::as_ref(&mod_dir))
.wrap_err_with(|| format!("Failed to extract archive to {}", mod_dir.display()))?;
if let Some(nexus) = &nexus {
let data = serde_sjson::to_string(nexus).wrap_err("Failed to serialize Nexus info")?;
let path = mod_dir.join(&mod_cfg.id).join("nexus.sjson");
fs::write(&path, data.as_bytes())
.await
.wrap_err_with(|| format!("Failed to write Nexus info to '{}'", path.display()))?;
}
let packages = files
.into_iter()
.map(|(name, files)| Arc::new(PackageInfo::new(name, files.into_iter().collect())))
.collect();
let info = ModInfo::new(mod_cfg, packages, image, nexus);
Ok(info)
}
#[tracing::instrument(skip(state))]
pub(crate) async fn delete_mod(state: ActionState, info: &ModInfo) -> Result<()> {
let mod_dir = state.mod_dir.join(&info.id);
@ -73,13 +229,9 @@ async fn read_mod_dir_entry(res: Result<DirEntry>) -> Result<ModInfo> {
Err(err) => return Err(err),
};
let files: HashMap<String, Vec<String>> = if cfg.bundled {
read_sjson_file(&index_path)
let files: HashMap<String, Vec<String>> = read_sjson_file(&index_path)
.await
.wrap_err_with(|| format!("Failed to read file index '{}'", index_path.display()))?
} else {
Default::default()
};
.wrap_err_with(|| format!("Failed to read file index '{}'", index_path.display()))?;
let image = if let Some(path) = &cfg.image {
let path = entry.path().join(path);
@ -161,21 +313,22 @@ where
}
pub(crate) fn check_mod_order(state: &ActionState) -> Result<()> {
if tracing::enabled!(tracing::Level::DEBUG) {
let order = state
.mods
.iter()
.enumerate()
.filter(|(_, i)| i.enabled)
.fold(String::new(), |mut s, (i, info)| {
s.push_str(&format!("{}: {} - {}\n", i, info.id, info.name));
s
});
tracing::debug!("Mod order:\n{}", order);
{
let first = state.mods.get(0);
if first.is_none() || !(first.unwrap().id == "dml" && first.unwrap().enabled) {
// TODO: Add a suggestion where to get it, once that's published
eyre::bail!("'Darktide Mod Loader' needs to be installed, enabled and at the top of the load order");
}
}
for (i, mod_info) in state.mods.iter().enumerate().filter(|(_, i)| i.enabled) {
state
.mods
.iter()
.filter(|i| i.enabled)
.enumerate()
.for_each(|(i, info)| tracing::debug!(i, ?info));
for (i, mod_info) in state.mods.iter().filter(|i| i.enabled).enumerate() {
for dep in &mod_info.depends {
let dep_info = state.mods.iter().enumerate().find(|(_, m)| m.id == dep.id);
@ -271,28 +424,15 @@ pub(crate) async fn load_initial(path: PathBuf, is_default: bool) -> Result<Init
.await
.wrap_err("Failed to read config file")?;
// Create or truncate the log file
let log_path = config.data_dir.join("dtmm.log");
tokio::spawn(async move {
let _ = File::create(&log_path).await;
tracing::debug!("Truncated log file");
});
let game_info = tokio::task::spawn_blocking(dtmt_shared::collect_game_info)
.await
.wrap_err("Failed to spawn task to collect Steam game info")?;
let game_info = match game_info {
Ok(game_info) => game_info,
Err(err) => {
tracing::error!("Failed to collect game info: {:?}", err);
None
}
};
.wrap_err("Failed to collect Steam game info")?;
{
if config.game_dir.is_none() && game_info.is_none() {
tracing::error!("No Game Directory set. Head to the 'Settings' tab to set it manually",);
}
}
let mod_dir = config.data_dir.join("mods");
let mods = load_mods(mod_dir, config.mod_order.iter())

View file

@ -1,816 +0,0 @@
use std::io::{Cursor, ErrorKind};
use std::path::{Path, PathBuf};
use std::str::FromStr;
use std::sync::Arc;
use color_eyre::eyre::Context;
use color_eyre::{eyre, Help, Report, Result};
use futures::StreamExt;
use futures::{stream, TryStreamExt};
use minijinja::Environment;
use sdk::filetype::lua;
use sdk::filetype::package::Package;
use sdk::murmur::Murmur64;
use sdk::{
Bundle, BundleDatabase, BundleFile, BundleFileType, BundleFileVariant, FromBinary, ToBinary,
};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use tokio::fs::{self, DirEntry};
use tokio::io::AsyncWriteExt;
use tracing::Instrument;
use super::read_sjson_file;
use crate::controller::app::check_mod_order;
use crate::state::{ActionState, PackageInfo};
pub const MOD_BUNDLE_NAME: &str = "packages/mods";
pub const BOOT_BUNDLE_NAME: &str = "packages/boot";
pub const BUNDLE_DATABASE_NAME: &str = "bundle_database.data";
pub const MOD_BOOT_SCRIPT: &str = "scripts/mod_main";
pub const MOD_DATA_SCRIPT: &str = "scripts/mods/mod_data";
pub const SETTINGS_FILE_PATH: &str = "application_settings/settings_common.ini";
pub const DEPLOYMENT_DATA_PATH: &str = "dtmm-deployment.sjson";
#[derive(Debug, Serialize, Deserialize)]
pub struct DeploymentData {
pub bundles: Vec<String>,
pub mod_folders: Vec<String>,
#[serde(with = "time::serde::iso8601")]
pub timestamp: OffsetDateTime,
}
#[tracing::instrument]
async fn read_file_with_backup<P>(path: P) -> Result<Vec<u8>>
where
P: AsRef<Path> + std::fmt::Debug,
{
let path = path.as_ref();
let backup_path = {
let mut p = PathBuf::from(path);
let ext = if let Some(ext) = p.extension() {
ext.to_string_lossy().to_string() + ".bak"
} else {
String::from("bak")
};
p.set_extension(ext);
p
};
let file_name = path
.file_name()
.map(|s| s.to_string_lossy().to_string())
.unwrap_or_else(|| String::from("file"));
let bin = match fs::read(&backup_path).await {
Ok(bin) => bin,
Err(err) if err.kind() == ErrorKind::NotFound => {
// TODO: This doesn't need to be awaited here, yet.
// I only need to make sure it has finished before writing the changed bundle.
tracing::debug!(
"Backup does not exist. Backing up original {} to '{}'",
file_name,
backup_path.display()
);
fs::copy(path, &backup_path).await.wrap_err_with(|| {
format!(
"Failed to back up {} '{}' to '{}'",
file_name,
path.display(),
backup_path.display()
)
})?;
tracing::debug!("Reading {} from original '{}'", file_name, path.display());
fs::read(path).await.wrap_err_with(|| {
format!("Failed to read {} file: {}", file_name, path.display())
})?
}
Err(err) => {
return Err(err).wrap_err_with(|| {
format!(
"Failed to read {} from backup '{}'",
file_name,
backup_path.display()
)
});
}
};
Ok(bin)
}
#[tracing::instrument(skip_all)]
async fn patch_game_settings(state: Arc<ActionState>) -> Result<()> {
let settings_path = state.game_dir.join("bundle").join(SETTINGS_FILE_PATH);
let settings = read_file_with_backup(&settings_path)
.await
.wrap_err("Failed to read settings.ini")?;
let settings = String::from_utf8(settings).wrap_err("Settings.ini is not valid UTF-8")?;
let mut f = fs::File::create(&settings_path)
.await
.wrap_err_with(|| format!("Failed to open {}", settings_path.display()))?;
let Some(i) = settings.find("boot_script =") else {
eyre::bail!("couldn't find 'boot_script' field");
};
f.write_all(settings[0..i].as_bytes()).await?;
f.write_all(b"boot_script = \"scripts/mod_main\"").await?;
let Some(j) = settings[i..].find('\n') else {
eyre::bail!("couldn't find end of 'boot_script' field");
};
f.write_all(settings[(i + j)..].as_bytes()).await?;
Ok(())
}
#[tracing::instrument(skip_all, fields(package = info.name))]
fn make_package(info: &PackageInfo) -> Result<Package> {
let mut pkg = Package::new(info.name.clone(), PathBuf::new());
for f in &info.files {
let mut it = f.rsplit('.');
let file_type = it
.next()
.ok_or_else(|| eyre::eyre!("missing file extension"))
.and_then(BundleFileType::from_str)
.wrap_err("Invalid file name in package info")?;
let name: String = it.collect();
pkg.add_file(file_type, name);
}
Ok(pkg)
}
#[tracing::instrument]
async fn copy_recursive(
from: impl Into<PathBuf> + std::fmt::Debug,
to: impl AsRef<Path> + std::fmt::Debug,
) -> Result<()> {
let to = to.as_ref();
#[tracing::instrument]
async fn handle_dir(from: PathBuf) -> Result<Vec<(bool, DirEntry)>> {
let mut dir = fs::read_dir(&from)
.await
.wrap_err("Failed to read directory")?;
let mut entries = Vec::new();
while let Some(entry) = dir.next_entry().await? {
let meta = entry.metadata().await.wrap_err_with(|| {
format!("Failed to get metadata for '{}'", entry.path().display())
})?;
entries.push((meta.is_dir(), entry));
}
Ok(entries)
}
let base = from.into();
stream::unfold(vec![base.clone()], |mut state| async {
let from = state.pop()?;
let inner = match handle_dir(from).await {
Ok(entries) => {
for (is_dir, entry) in &entries {
if *is_dir {
state.push(entry.path());
}
}
stream::iter(entries).map(Ok).left_stream()
}
Err(e) => stream::once(async { Err(e) }).right_stream(),
};
Some((inner, state))
})
.flatten()
.try_for_each(|(is_dir, entry)| {
let path = entry.path();
let dest = path
.strip_prefix(&base)
.map(|suffix| to.join(suffix))
.expect("all entries are relative to the directory we are walking");
async move {
if is_dir {
tracing::trace!("Creating directory '{}'", dest.display());
// Instead of trying to filter "already exists" errors out explicitly,
// we just ignore all. It'll fail eventually with the next copy operation.
let _ = fs::create_dir(&dest).await;
Ok(())
} else {
tracing::trace!("Copying file '{}' -> '{}'", path.display(), dest.display());
fs::copy(&path, &dest).await.map(|_| ()).wrap_err_with(|| {
format!(
"Failed to copy file '{}' -> '{}'",
path.display(),
dest.display()
)
})
}
}
})
.await
.map(|_| ())
}
#[tracing::instrument(skip(state))]
async fn copy_mod_folders(state: Arc<ActionState>) -> Result<Vec<String>> {
let game_dir = Arc::clone(&state.game_dir);
let mut tasks = Vec::new();
for mod_info in state.mods.iter().filter(|m| m.enabled && !m.bundled) {
let span = tracing::trace_span!("copying legacy mod", name = mod_info.name);
let _enter = span.enter();
let mod_id = mod_info.id.clone();
let mod_dir = Arc::clone(&state.mod_dir);
let game_dir = Arc::clone(&game_dir);
let task = async move {
let from = mod_dir.join(&mod_id);
let to = game_dir.join("mods").join(&mod_id);
tracing::debug!(from = %from.display(), to = %to.display(), "Copying legacy mod '{}'", mod_id);
let _ = fs::create_dir_all(&to).await;
copy_recursive(&from, &to).await.wrap_err_with(|| {
format!(
"Failed to copy legacy mod from '{}' to '{}'",
from.display(),
to.display()
)
})?;
Ok::<_, Report>(mod_id)
};
tasks.push(task);
}
let ids = futures::future::try_join_all(tasks).await?;
Ok(ids)
}
fn build_mod_data_lua(state: Arc<ActionState>) -> Result<String> {
#[derive(Serialize)]
struct TemplateDataMod {
id: String,
name: String,
bundled: bool,
version: String,
init: String,
data: Option<String>,
localization: Option<String>,
packages: Vec<String>,
}
let mut env = Environment::new();
env.set_trim_blocks(true);
env.set_lstrip_blocks(true);
env.add_template("mod_data.lua", include_str!("../../assets/mod_data.lua.j2"))
.wrap_err("Failed to compile template for `mod_data.lua`")?;
let tmpl = env
.get_template("mod_data.lua")
.wrap_err("Failed to get template `mod_data.lua`")?;
let data: Vec<TemplateDataMod> = state
.mods
.iter()
.filter_map(|m| {
if !m.enabled {
return None;
}
Some(TemplateDataMod {
id: m.id.clone(),
name: m.name.clone(),
bundled: m.bundled,
version: m.version.clone(),
init: m.resources.init.to_string_lossy().to_string(),
data: m
.resources
.data
.as_ref()
.map(|p| p.to_string_lossy().to_string()),
localization: m
.resources
.localization
.as_ref()
.map(|p| p.to_string_lossy().to_string()),
packages: m.packages.iter().map(|p| p.name.clone()).collect(),
})
})
.collect();
let lua = tmpl
.render(minijinja::context!(mods => data))
.wrap_err("Failed to render template `mod_data.lua`")?;
tracing::debug!("mod_data.lua:\n{}", lua);
Ok(lua)
}
#[tracing::instrument(skip_all)]
async fn build_bundles(state: Arc<ActionState>) -> Result<Vec<Bundle>> {
let mut mod_bundle = Bundle::new(MOD_BUNDLE_NAME.to_string());
let mut tasks = Vec::new();
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
let mut bundles = Vec::new();
let mut add_lua_asset = |name: &str, data: &str| {
let span = tracing::info_span!("Compiling Lua", name, data_len = data.len());
let _enter = span.enter();
let file = lua::compile(name.to_string(), data).wrap_err("Failed to compile Lua")?;
mod_bundle.add_file(file);
Ok::<_, Report>(())
};
build_mod_data_lua(state.clone())
.wrap_err("Failed to build 'mod_data.lua'")
.and_then(|data| add_lua_asset(MOD_DATA_SCRIPT, &data))?;
add_lua_asset("scripts/mods/init", include_str!("../../assets/init.lua"))?;
add_lua_asset(
"scripts/mods/mod_loader",
include_str!("../../assets/mod_loader.lua"),
)?;
tracing::trace!("Preparing tasks to deploy bundle files");
for mod_info in state.mods.iter().filter(|m| m.enabled && m.bundled) {
let span = tracing::trace_span!("building mod packages", name = mod_info.name);
let _enter = span.enter();
let mod_dir = state.mod_dir.join(&mod_info.id);
for pkg_info in &mod_info.packages {
let span = tracing::trace_span!("building package", name = pkg_info.name);
let _enter = span.enter();
tracing::trace!(
"Building package {} for mod {}",
pkg_info.name,
mod_info.name
);
let pkg = make_package(pkg_info).wrap_err("Failed to make package")?;
let mut variant = BundleFileVariant::new();
let bin = pkg
.to_binary()
.wrap_err("Failed to serialize package to binary")?;
variant.set_data(bin);
let mut file = BundleFile::new(pkg_info.name.clone(), BundleFileType::Package);
file.add_variant(variant);
tracing::trace!(
"Compiled package {} for mod {}",
pkg_info.name,
mod_info.name
);
mod_bundle.add_file(file);
let bundle_name = format!("{:016x}", Murmur64::hash(&pkg_info.name));
let src = mod_dir.join(&bundle_name);
let dest = bundle_dir.join(&bundle_name);
let pkg_name = pkg_info.name.clone();
let mod_name = mod_info.name.clone();
// Explicitely drop the guard, so that we can move the span
// into the async operation
drop(_enter);
let ctx = state.ctx.clone();
let task = async move {
let bundle = {
let bin = fs::read(&src).await.wrap_err_with(|| {
format!("Failed to read bundle file '{}'", src.display())
})?;
let name = Bundle::get_name_from_path(&ctx, &src);
Bundle::from_binary(&ctx, name, bin)
.wrap_err_with(|| format!("Failed to parse bundle '{}'", src.display()))?
};
tracing::debug!(
src = %src.display(),
dest = %dest.display(),
"Copying bundle '{}' for mod '{}'",
pkg_name,
mod_name,
);
// We attempt to remove any previous file, so that the hard link can be created.
// We can reasonably ignore errors here, as a 'NotFound' is actually fine, the copy
// may be possible despite an error here, or the error will be reported by it anyways.
// TODO: There is a chance that we delete an actual game bundle, but with 64bit
// hashes, it's low enough for now, and the setup required to detect
// "game bundle vs mod bundle" is non-trivial.
let _ = fs::remove_file(&dest).await;
fs::copy(&src, &dest).await.wrap_err_with(|| {
format!(
"Failed to copy bundle {pkg_name} for mod {mod_name}. Src: {}, dest: {}",
src.display(),
dest.display()
)
})?;
Ok::<Bundle, color_eyre::Report>(bundle)
}
.instrument(span);
tasks.push(task);
}
}
tracing::debug!("Copying {} mod bundles", tasks.len());
let mut tasks = stream::iter(tasks).buffer_unordered(10);
while let Some(res) = tasks.next().await {
let bundle = res?;
bundles.push(bundle);
}
{
let path = bundle_dir.join(format!("{:x}", mod_bundle.name().to_murmur64()));
tracing::trace!("Writing mod bundle to '{}'", path.display());
fs::write(&path, mod_bundle.to_binary()?)
.await
.wrap_err_with(|| format!("Failed to write bundle to '{}'", path.display()))?;
}
bundles.push(mod_bundle);
Ok(bundles)
}
#[tracing::instrument(skip_all)]
async fn patch_boot_bundle(
state: Arc<ActionState>,
deployment_info: &String,
) -> Result<Vec<Bundle>> {
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
let bundle_path = bundle_dir.join(format!("{:x}", Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes())));
let mut bundles = Vec::with_capacity(2);
let mut boot_bundle = async {
let bin = read_file_with_backup(&bundle_path)
.await
.wrap_err("Failed to read boot bundle")?;
Bundle::from_binary(&state.ctx, BOOT_BUNDLE_NAME.to_string(), bin)
.wrap_err("Failed to parse boot bundle")
}
.instrument(tracing::trace_span!("read boot bundle"))
.await
.wrap_err_with(|| format!("Failed to read bundle '{}'", BOOT_BUNDLE_NAME))?;
{
tracing::trace!("Adding mod package file to boot bundle");
let span = tracing::trace_span!("create mod package file");
let _enter = span.enter();
let mut pkg = Package::new(MOD_BUNDLE_NAME.to_string(), PathBuf::new());
for mod_info in &state.mods {
for pkg_info in &mod_info.packages {
pkg.add_file(BundleFileType::Package, &pkg_info.name);
}
}
pkg.add_file(BundleFileType::Lua, MOD_DATA_SCRIPT);
let mut variant = BundleFileVariant::new();
variant.set_data(pkg.to_binary()?);
let mut f = BundleFile::new(MOD_BUNDLE_NAME.to_string(), BundleFileType::Package);
f.add_variant(variant);
boot_bundle.add_file(f);
}
{
let span = tracing::debug_span!("Importing mod main script");
let _enter = span.enter();
let mut env = Environment::new();
env.set_trim_blocks(true);
env.set_lstrip_blocks(true);
env.add_template("mod_main.lua", include_str!("../../assets/mod_main.lua.j2"))
.wrap_err("Failed to compile template for `mod_main.lua`")?;
let tmpl = env
.get_template("mod_main.lua")
.wrap_err("Failed to get template `mod_main.lua`")?;
let is_io_enabled = if state.is_io_enabled { "true" } else { "false" };
let deployment_info = deployment_info.replace("\"", "\\\"").replace("\n", "\\n");
let lua = tmpl
.render(minijinja::context!(is_io_enabled => is_io_enabled, deployment_info => deployment_info))
.wrap_err("Failed to render template `mod_main.lua`")?;
tracing::trace!("Main script rendered:\n===========\n{}\n=============", lua);
let file = lua::compile(MOD_BOOT_SCRIPT.to_string(), lua)
.wrap_err("Failed to compile mod main Lua file")?;
boot_bundle.add_file(file);
}
async {
let bin = boot_bundle
.to_binary()
.wrap_err("Failed to serialize boot bundle")?;
fs::write(&bundle_path, bin)
.await
.wrap_err_with(|| format!("Failed to write main bundle: {}", bundle_path.display()))
}
.instrument(tracing::trace_span!("write boot bundle"))
.await?;
bundles.push(boot_bundle);
Ok(bundles)
}
#[tracing::instrument(skip_all, fields(bundles = bundles.as_ref().len()))]
async fn patch_bundle_database<B>(state: Arc<ActionState>, bundles: B) -> Result<()>
where
B: AsRef<[Bundle]>,
{
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
let database_path = bundle_dir.join(BUNDLE_DATABASE_NAME);
let mut db = {
let bin = read_file_with_backup(&database_path)
.await
.wrap_err("Failed to read bundle database")?;
let mut r = Cursor::new(bin);
let db = BundleDatabase::from_binary(&mut r).wrap_err("Failed to parse bundle database")?;
tracing::trace!("Finished parsing bundle database");
db
};
for bundle in bundles.as_ref() {
tracing::trace!("Adding '{}' to bundle database", bundle.name().display());
db.add_bundle(bundle);
}
{
let bin = db
.to_binary()
.wrap_err("Failed to serialize bundle database")?;
fs::write(&database_path, bin).await.wrap_err_with(|| {
format!(
"failed to write bundle database to '{}'",
database_path.display()
)
})?;
}
Ok(())
}
#[tracing::instrument(skip_all, fields(bundles = bundles.as_ref().len()))]
fn build_deployment_data(
bundles: impl AsRef<[Bundle]>,
mod_folders: impl AsRef<[String]>,
) -> Result<String> {
let info = DeploymentData {
timestamp: OffsetDateTime::now_utc(),
bundles: bundles
.as_ref()
.iter()
.map(|bundle| format!("{:x}", bundle.name().to_murmur64()))
.collect(),
// TODO:
mod_folders: mod_folders
.as_ref()
.iter()
.map(|folder| folder.clone())
.collect(),
};
serde_sjson::to_string(&info).wrap_err("Failed to serizalize deployment data")
}
#[tracing::instrument(skip_all, fields(
game_dir = %state.game_dir.display(),
mods = state.mods.len()
))]
pub(crate) async fn deploy_mods(state: ActionState) -> Result<()> {
let state = Arc::new(state);
let bundle_dir = state.game_dir.join("bundle");
let boot_bundle_path = format!("{:016x}", Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes()));
if fs::metadata(bundle_dir.join(format!("{boot_bundle_path}.patch_999")))
.await
.is_ok()
{
let err = eyre::eyre!("Found dtkit-patch-based mod installation.");
return Err(err)
.with_suggestion(|| {
"If you're a mod author and saved projects directly in 'mods/', \
use DTMT to migrate them to the new project structure."
.to_string()
})
.with_suggestion(|| {
"Click 'Reset Game' to remove the previous mod installation.".to_string()
});
}
let (_, game_info, deployment_info) = tokio::try_join!(
async {
fs::metadata(&bundle_dir)
.await
.wrap_err("Failed to open game bundle directory")
.with_suggestion(|| "Double-check 'Game Directory' in the Settings tab.")
},
async {
tokio::task::spawn_blocking(dtmt_shared::collect_game_info)
.await
.map_err(Report::new)
},
async {
let path = state.game_dir.join(DEPLOYMENT_DATA_PATH);
match read_sjson_file::<_, DeploymentData>(&path).await {
Ok(data) => Ok(Some(data)),
Err(err) => {
if let Some(err) = err.downcast_ref::<std::io::Error>()
&& err.kind() == ErrorKind::NotFound
{
Ok(None)
} else {
Err(err).wrap_err(format!(
"Failed to read deployment data from: {}",
path.display()
))
}
}
}
}
)
.wrap_err("Failed to gather deployment information")?;
let game_info = match game_info {
Ok(game_info) => game_info,
Err(err) => {
tracing::error!("Failed to collect game info: {:#?}", err);
None
}
};
tracing::debug!(?game_info, ?deployment_info);
if let Some(game_info) = game_info {
if deployment_info
.as_ref()
.map(|i| game_info.last_updated > i.timestamp)
.unwrap_or(false)
{
tracing::warn!(
"Game was updated since last mod deployment. \
Attempting to reconcile game files."
);
tokio::try_join!(
async {
let path = bundle_dir.join(BUNDLE_DATABASE_NAME);
let backup_path = path.with_extension("data.bak");
fs::copy(&path, &backup_path)
.await
.wrap_err("Failed to re-create backup for bundle database.")
},
async {
let path = bundle_dir.join(boot_bundle_path);
let backup_path = path.with_extension("bak");
fs::copy(&path, &backup_path)
.await
.wrap_err("Failed to re-create backup for boot bundle")
}
)
.with_suggestion(|| {
"Reset the game using 'Reset Game', then verify game files.".to_string()
})?;
tracing::info!(
"Successfully re-created game file backups. \
Continuing mod deployment."
);
}
}
check_mod_order(&state)?;
tracing::info!(
"Deploying {} mods to '{}'.",
state.mods.iter().filter(|i| i.enabled).count(),
bundle_dir.display()
);
tracing::info!("Copy legacy mod folders");
let mod_folders = copy_mod_folders(state.clone())
.await
.wrap_err("Failed to copy mod folders")?;
tracing::info!("Build mod bundles");
let mut bundles = build_bundles(state.clone())
.await
.wrap_err("Failed to build mod bundles")?;
let new_deployment_info = build_deployment_data(&bundles, &mod_folders)
.wrap_err("Failed to build new deployment data")?;
tracing::info!("Patch boot bundle");
let mut boot_bundles = patch_boot_bundle(state.clone(), &new_deployment_info)
.await
.wrap_err("Failed to patch boot bundle")?;
bundles.append(&mut boot_bundles);
if let Some(info) = &deployment_info {
let bundle_dir = Arc::new(bundle_dir);
// Remove bundles from the previous deployment that don't match the current one.
// I.e. mods that used to be installed/enabled but aren't anymore.
{
let tasks = info.bundles.iter().cloned().filter_map(|file_name| {
let is_being_deployed = bundles.iter().any(|b2| {
let name = format!("{:016x}", b2.name());
file_name == name
});
if !is_being_deployed {
let bundle_dir = bundle_dir.clone();
let task = async move {
let path = bundle_dir.join(&file_name);
tracing::debug!("Removing unused bundle '{}'", file_name);
if let Err(err) = fs::remove_file(&path).await.wrap_err_with(|| {
format!("Failed to remove unused bundle '{}'", path.display())
}) {
tracing::error!("{:?}", err);
}
};
Some(task)
} else {
None
}
});
futures::future::join_all(tasks).await;
}
// Do the same thing for mod folders
{
let tasks = info.mod_folders.iter().filter_map(|mod_id| {
let is_being_deployed = mod_folders.iter().any(|id| id == mod_id);
if !is_being_deployed {
let path = bundle_dir.join("mods").join(mod_id);
tracing::debug!("Removing unused mod folder '{}'", path.display());
let task = async move {
if let Err(err) = fs::remove_dir_all(&path).await.wrap_err_with(|| {
format!("Failed to remove unused legacy mod '{}'", path.display())
}) {
tracing::error!("{:?}", err);
}
};
Some(task)
} else {
None
}
});
futures::future::join_all(tasks).await;
}
}
tracing::info!("Patch game settings");
patch_game_settings(state.clone())
.await
.wrap_err("Failed to patch game settings")?;
tracing::info!("Patching bundle database");
patch_bundle_database(state.clone(), &bundles)
.await
.wrap_err("Failed to patch bundle database")?;
tracing::info!("Writing deployment data");
{
let path = state.game_dir.join(DEPLOYMENT_DATA_PATH);
fs::write(&path, &new_deployment_info)
.await
.wrap_err_with(|| format!("Failed to write deployment data to '{}'", path.display()))?;
}
tracing::info!("Finished deploying mods");
Ok(())
}

View file

@ -1,19 +1,44 @@
use std::io::{self, ErrorKind};
use std::io::{Cursor, ErrorKind};
use std::path::{Path, PathBuf};
use std::str::FromStr;
use std::sync::Arc;
use color_eyre::eyre::Context;
use color_eyre::{eyre, Result};
use color_eyre::{eyre, Help, Report, Result};
use futures::stream;
use futures::StreamExt;
use path_slash::PathBufExt;
use sdk::filetype::lua;
use sdk::filetype::package::Package;
use sdk::murmur::Murmur64;
use tokio::fs::{self};
use tokio::io::AsyncWriteExt;
use crate::controller::deploy::{
DeploymentData, BOOT_BUNDLE_NAME, BUNDLE_DATABASE_NAME, DEPLOYMENT_DATA_PATH,
use sdk::{
Bundle, BundleDatabase, BundleFile, BundleFileType, BundleFileVariant, FromBinary, ToBinary,
};
use crate::state::ActionState;
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use tokio::fs;
use tokio::io::AsyncWriteExt;
use tracing::Instrument;
use super::deploy::SETTINGS_FILE_PATH;
use super::read_sjson_file;
use crate::controller::app::check_mod_order;
use crate::state::{ActionState, PackageInfo};
const MOD_BUNDLE_NAME: &str = "packages/mods";
const BOOT_BUNDLE_NAME: &str = "packages/boot";
const DML_BUNDLE_NAME: &str = "packages/dml";
const BUNDLE_DATABASE_NAME: &str = "bundle_database.data";
const MOD_BOOT_SCRIPT: &str = "scripts/mod_main";
const MOD_DATA_SCRIPT: &str = "scripts/mods/mod_data";
const SETTINGS_FILE_PATH: &str = "application_settings/settings_common.ini";
const DEPLOYMENT_DATA_PATH: &str = "dtmm-deployment.sjson";
#[derive(Debug, Serialize, Deserialize)]
struct DeploymentData {
bundles: Vec<String>,
#[serde(with = "time::serde::iso8601")]
timestamp: OffsetDateTime,
}
#[tracing::instrument]
async fn read_file_with_backup<P>(path: P) -> Result<Vec<u8>>
@ -103,56 +128,516 @@ async fn patch_game_settings(state: Arc<ActionState>) -> Result<()> {
Ok(())
}
#[tracing::instrument(skip_all, fields(package = info.name))]
fn make_package(info: &PackageInfo) -> Result<Package> {
let mut pkg = Package::new(info.name.clone(), PathBuf::new());
for f in &info.files {
let mut it = f.rsplit('.');
let file_type = it
.next()
.ok_or_else(|| eyre::eyre!("missing file extension"))
.and_then(BundleFileType::from_str)
.wrap_err("Invalid file name in package info")?;
let name: String = it.collect();
pkg.add_file(file_type, name);
}
Ok(pkg)
}
fn build_mod_data_lua(state: Arc<ActionState>) -> String {
let mut lua = String::from("return {\n");
// DMF is handled explicitely by the loading procedures, as it actually drives most of that
// and should therefore not show up in the load order.
for mod_info in state.mods.iter().filter(|m| m.id != "dml" && m.enabled) {
lua.push_str(" {\n name = \"");
lua.push_str(&mod_info.name);
lua.push_str("\",\n id = \"");
lua.push_str(&mod_info.id);
lua.push_str("\",\n run = function()\n");
let resources = &mod_info.resources;
if resources.data.is_some() || resources.localization.is_some() {
lua.push_str(" new_mod(\"");
lua.push_str(&mod_info.id);
lua.push_str("\", {\n mod_script = \"");
lua.push_str(&resources.init.to_slash_lossy());
if let Some(data) = resources.data.as_ref() {
lua.push_str("\",\n mod_data = \"");
lua.push_str(&data.to_slash_lossy());
}
if let Some(localization) = &resources.localization {
lua.push_str("\",\n mod_localization = \"");
lua.push_str(&localization.to_slash_lossy());
}
lua.push_str("\",\n })\n");
} else {
lua.push_str(" return dofile(\"");
lua.push_str(&resources.init.to_slash_lossy());
lua.push_str("\")\n");
}
lua.push_str(" end,\n packages = {\n");
for pkg_info in &mod_info.packages {
lua.push_str(" \"");
lua.push_str(&pkg_info.name);
lua.push_str("\",\n");
}
lua.push_str(" },\n },\n");
}
lua.push('}');
tracing::debug!("mod_data_lua:\n{}", lua);
lua
}
#[tracing::instrument(skip_all)]
async fn reset_dtkit_patch(state: ActionState) -> Result<()> {
let bundle_dir = state.game_dir.join("bundle");
async fn build_bundles(state: Arc<ActionState>) -> Result<Vec<Bundle>> {
let mut mod_bundle = Bundle::new(MOD_BUNDLE_NAME.to_string());
let mut tasks = Vec::new();
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
let mut bundles = Vec::new();
{
let path = bundle_dir.join(BUNDLE_DATABASE_NAME);
let backup_path = path.with_extension("data.bak");
fs::rename(&backup_path, &path).await.wrap_err_with(|| {
let span = tracing::debug_span!("Building mod data script");
let _enter = span.enter();
let lua = build_mod_data_lua(state.clone());
let file =
lua::compile(MOD_DATA_SCRIPT, &lua).wrap_err("Failed to compile mod data Lua file")?;
mod_bundle.add_file(file);
}
for mod_info in state.mods.iter().filter(|m| m.id != "dml" && m.enabled) {
let span = tracing::trace_span!("building mod packages", name = mod_info.name);
let _enter = span.enter();
let mod_dir = state.mod_dir.join(&mod_info.id);
for pkg_info in &mod_info.packages {
let span = tracing::trace_span!("building package", name = pkg_info.name);
let _enter = span.enter();
let pkg = make_package(pkg_info).wrap_err("Failed to make package")?;
let mut variant = BundleFileVariant::new();
let bin = pkg
.to_binary()
.wrap_err("Failed to serialize package to binary")?;
variant.set_data(bin);
let mut file = BundleFile::new(pkg_info.name.clone(), BundleFileType::Package);
file.add_variant(variant);
mod_bundle.add_file(file);
let bundle_name = Murmur64::hash(&pkg_info.name)
.to_string()
.to_ascii_lowercase();
let src = mod_dir.join(&bundle_name);
let dest = bundle_dir.join(&bundle_name);
let pkg_name = pkg_info.name.clone();
let mod_name = mod_info.name.clone();
// Explicitely drop the guard, so that we can move the span
// into the async operation
drop(_enter);
let ctx = state.ctx.clone();
let task = async move {
let bundle = {
let bin = fs::read(&src).await.wrap_err_with(|| {
format!("Failed to read bundle file '{}'", src.display())
})?;
let name = Bundle::get_name_from_path(&ctx, &src);
Bundle::from_binary(&ctx, name, bin)
.wrap_err_with(|| format!("Failed to parse bundle '{}'", src.display()))?
};
tracing::debug!(
src = %src.display(),
dest = %dest.display(),
"Copying bundle '{}' for mod '{}'",
pkg_name,
mod_name,
);
// We attempt to remove any previous file, so that the hard link can be created.
// We can reasonably ignore errors here, as a 'NotFound' is actually fine, the copy
// may be possible despite an error here, or the error will be reported by it anyways.
// TODO: There is a chance that we delete an actual game bundle, but with 64bit
// hashes, it's low enough for now, and the setup required to detect
// "game bundle vs mod bundle" is non-trivial.
let _ = fs::remove_file(&dest).await;
fs::copy(&src, &dest).await.wrap_err_with(|| {
format!(
"Failed to move bundle database backup '{}' -> '{}'",
backup_path.display(),
path.display()
"Failed to copy bundle {pkg_name} for mod {mod_name}. Src: {}, dest: {}",
src.display(),
dest.display()
)
})?;
tracing::trace!("Reverted bundle database from backup");
}
for path in [
bundle_dir.join(format!(
"{:016x}.patch_999",
Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes())
)),
state.game_dir.join("binaries/mod_loader"),
state.game_dir.join("toggle_darktide_mods.bat"),
state.game_dir.join("README.md"),
] {
match fs::remove_file(&path).await {
Ok(_) => tracing::trace!("Removed file '{}'", path.display()),
Err(err) if err.kind() != io::ErrorKind::NotFound => {
tracing::error!("Failed to remove file '{}': {}", path.display(), err)
Ok::<Bundle, color_eyre::Report>(bundle)
}
Err(_) => {}
.instrument(span);
tasks.push(task);
}
}
// We deliberately skip the `mods/` directory here.
// Many modders did their development right in there, and as people are prone to not read
// error messages and guides in full, there is bound to be someone who would have
// deleted all their source code if this removed the `mods/` folder.
for path in [state.game_dir.join("tools")] {
match fs::remove_dir_all(&path).await {
Ok(_) => tracing::trace!("Removed directory '{}'", path.display()),
Err(err) if err.kind() != io::ErrorKind::NotFound => {
tracing::error!("Failed to remove directory '{}': {}", path.display(), err)
tracing::debug!("Copying {} mod bundles", tasks.len());
let mut tasks = stream::iter(tasks).buffer_unordered(10);
while let Some(res) = tasks.next().await {
let bundle = res?;
bundles.push(bundle);
}
Err(_) => {}
{
let path = bundle_dir.join(format!("{:x}", mod_bundle.name().to_murmur64()));
tracing::trace!("Writing mod bundle to '{}'", path.display());
fs::write(&path, mod_bundle.to_binary()?)
.await
.wrap_err_with(|| format!("Failed to write bundle to '{}'", path.display()))?;
}
bundles.push(mod_bundle);
Ok(bundles)
}
#[tracing::instrument(skip_all)]
async fn patch_boot_bundle(state: Arc<ActionState>) -> Result<Vec<Bundle>> {
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
let bundle_path = bundle_dir.join(format!("{:x}", Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes())));
let mut bundles = Vec::with_capacity(2);
let mut boot_bundle = async {
let bin = read_file_with_backup(&bundle_path)
.await
.wrap_err("Failed to read boot bundle")?;
Bundle::from_binary(&state.ctx, BOOT_BUNDLE_NAME.to_string(), bin)
.wrap_err("Failed to parse boot bundle")
}
.instrument(tracing::trace_span!("read boot bundle"))
.await
.wrap_err_with(|| format!("Failed to read bundle '{}'", BOOT_BUNDLE_NAME))?;
{
tracing::trace!("Adding mod package file to boot bundle");
let span = tracing::trace_span!("create mod package file");
let _enter = span.enter();
let mut pkg = Package::new(MOD_BUNDLE_NAME.to_string(), PathBuf::new());
for mod_info in &state.mods {
for pkg_info in &mod_info.packages {
pkg.add_file(BundleFileType::Package, &pkg_info.name);
}
}
tracing::info!("Removed dtkit-patch-based mod installation.");
pkg.add_file(BundleFileType::Lua, MOD_DATA_SCRIPT);
let mut variant = BundleFileVariant::new();
variant.set_data(pkg.to_binary()?);
let mut f = BundleFile::new(MOD_BUNDLE_NAME.to_string(), BundleFileType::Package);
f.add_variant(variant);
boot_bundle.add_file(f);
}
{
tracing::trace!("Handling DML packages and bundle");
let span = tracing::trace_span!("handle DML");
let _enter = span.enter();
let mut variant = BundleFileVariant::new();
let mod_info = state
.mods
.iter()
.find(|m| m.id == "dml")
.ok_or_else(|| eyre::eyre!("DML not found in mod list"))?;
let pkg_info = mod_info
.packages
.get(0)
.ok_or_else(|| eyre::eyre!("invalid mod package for DML"))
.with_suggestion(|| "Re-download and import the newest version.".to_string())?;
let bundle_name = Murmur64::hash(&pkg_info.name)
.to_string()
.to_ascii_lowercase();
let src = state.mod_dir.join(&mod_info.id).join(&bundle_name);
{
let bin = fs::read(&src)
.await
.wrap_err_with(|| format!("Failed to read bundle file '{}'", src.display()))?;
let name = Bundle::get_name_from_path(&state.ctx, &src);
let dml_bundle = Bundle::from_binary(&state.ctx, name, bin)
.wrap_err_with(|| format!("Failed to parse bundle '{}'", src.display()))?;
bundles.push(dml_bundle);
};
{
let dest = bundle_dir.join(&bundle_name);
let pkg_name = pkg_info.name.clone();
let mod_name = mod_info.name.clone();
tracing::debug!(
"Copying bundle {} for mod {}: {} -> {}",
pkg_name,
mod_name,
src.display(),
dest.display()
);
// We attempt to remove any previous file, so that the hard link can be created.
// We can reasonably ignore errors here, as a 'NotFound' is actually fine, the copy
// may be possible despite an error here, or the error will be reported by it anyways.
// TODO: There is a chance that we delete an actual game bundle, but with 64bit
// hashes, it's low enough for now, and the setup required to detect
// "game bundle vs mod bundle" is non-trivial.
let _ = fs::remove_file(&dest).await;
fs::copy(&src, &dest).await.wrap_err_with(|| {
format!(
"Failed to copy bundle {pkg_name} for mod {mod_name}. Src: {}, dest: {}",
src.display(),
dest.display()
)
})?;
}
let pkg = make_package(pkg_info).wrap_err("Failed to create package file for dml")?;
variant.set_data(pkg.to_binary()?);
let mut f = BundleFile::new(DML_BUNDLE_NAME.to_string(), BundleFileType::Package);
f.add_variant(variant);
boot_bundle.add_file(f);
}
{
let span = tracing::debug_span!("Importing mod main script");
let _enter = span.enter();
let lua = include_str!("../../assets/mod_main.lua");
let file =
lua::compile(MOD_BOOT_SCRIPT, lua).wrap_err("Failed to compile mod main Lua file")?;
boot_bundle.add_file(file);
}
async {
let bin = boot_bundle
.to_binary()
.wrap_err("Failed to serialize boot bundle")?;
fs::write(&bundle_path, bin)
.await
.wrap_err_with(|| format!("Failed to write main bundle: {}", bundle_path.display()))
}
.instrument(tracing::trace_span!("write boot bundle"))
.await?;
bundles.push(boot_bundle);
Ok(bundles)
}
#[tracing::instrument(skip_all, fields(bundles = bundles.as_ref().len()))]
async fn patch_bundle_database<B>(state: Arc<ActionState>, bundles: B) -> Result<()>
where
B: AsRef<[Bundle]>,
{
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
let database_path = bundle_dir.join(BUNDLE_DATABASE_NAME);
let mut db = {
let bin = read_file_with_backup(&database_path)
.await
.wrap_err("Failed to read bundle database")?;
let mut r = Cursor::new(bin);
let db = BundleDatabase::from_binary(&mut r).wrap_err("Failed to parse bundle database")?;
tracing::trace!("Finished parsing bundle database");
db
};
for bundle in bundles.as_ref() {
tracing::trace!("Adding '{}' to bundle database", bundle.name().display());
db.add_bundle(bundle);
}
{
let bin = db
.to_binary()
.wrap_err("Failed to serialize bundle database")?;
fs::write(&database_path, bin).await.wrap_err_with(|| {
format!(
"failed to write bundle database to '{}'",
database_path.display()
)
})?;
}
Ok(())
}
#[tracing::instrument(skip_all, fields(bundles = bundles.as_ref().len()))]
async fn write_deployment_data<B>(state: Arc<ActionState>, bundles: B) -> Result<()>
where
B: AsRef<[Bundle]>,
{
let info = DeploymentData {
timestamp: OffsetDateTime::now_utc(),
bundles: bundles
.as_ref()
.iter()
.map(|bundle| format!("{:x}", bundle.name().to_murmur64()))
.collect(),
};
let path = state.game_dir.join(DEPLOYMENT_DATA_PATH);
let data = serde_sjson::to_string(&info).wrap_err("Failed to serizalie deployment data")?;
fs::write(&path, &data)
.await
.wrap_err_with(|| format!("Failed to write deployment data to '{}'", path.display()))?;
Ok(())
}
#[tracing::instrument(skip_all, fields(
game_dir = %state.game_dir.display(),
mods = state.mods.len()
))]
pub(crate) async fn deploy_mods(state: ActionState) -> Result<()> {
let state = Arc::new(state);
let (_, game_info, deployment_info) = tokio::try_join!(
async {
let path = state.game_dir.join("bundle");
fs::metadata(&path)
.await
.wrap_err("Failed to open game bundle directory")
.with_suggestion(|| "Double-check 'Game Directory' in the Settings tab.")
},
async {
tokio::task::spawn_blocking(dtmt_shared::collect_game_info)
.await
.map_err(Report::new)
},
async {
let path = state.game_dir.join(DEPLOYMENT_DATA_PATH);
match read_sjson_file::<_, DeploymentData>(path)
.await
{
Ok(data) => Ok(Some(data)),
Err(err) => {
if let Some(err) = err.downcast_ref::<std::io::Error>() && err.kind() == ErrorKind::NotFound {
Ok(None)
} else {
Err(err).wrap_err("Failed to read deployment data")
}
}
}
}
)
.wrap_err("Failed to gather deployment information")?;
tracing::debug!(?game_info, ?deployment_info);
if let Some(game_info) = game_info {
if deployment_info
.as_ref()
.map(|i| game_info.last_updated > i.timestamp)
.unwrap_or(false)
{
eyre::bail!("Game was updated since last mod deployment. Please reset first.");
}
}
check_mod_order(&state)?;
tracing::info!(
"Deploying {} mods to {}",
state.mods.iter().filter(|i| i.enabled).count(),
state.game_dir.join("bundle").display()
);
tracing::info!("Build mod bundles");
let mut bundles = build_bundles(state.clone())
.await
.wrap_err("Failed to build mod bundles")?;
tracing::info!("Patch boot bundle");
let mut more_bundles = patch_boot_bundle(state.clone())
.await
.wrap_err("Failed to patch boot bundle")?;
bundles.append(&mut more_bundles);
if let Some(info) = &deployment_info {
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
let tasks = info
.bundles
.iter()
.cloned()
.map(|v| (v, bundle_dir.clone()))
.filter_map(|(file_name, bundle_dir)| {
let contains = bundles.iter().any(|b2| {
let name = b2.name().to_murmur64().to_string();
file_name == name
});
if !contains {
let task = async move {
let path = bundle_dir.join(&file_name);
tracing::debug!("Removing unused bundle '{}'", file_name);
if let Err(err) = fs::remove_file(&path).await.wrap_err_with(|| {
format!("Failed to remove unused bundle '{}'", path.display())
}) {
tracing::error!("{:?}", err);
}
};
Some(task)
} else {
None
}
});
futures::future::join_all(tasks).await;
}
tracing::info!("Patch game settings");
patch_game_settings(state.clone())
.await
.wrap_err("Failed to patch game settings")?;
tracing::info!("Patching bundle database");
patch_bundle_database(state.clone(), &bundles)
.await
.wrap_err("Failed to patch bundle database")?;
tracing::info!("Writing deployment data");
write_deployment_data(state.clone(), &bundles)
.await
.wrap_err("Failed to write deployment data")?;
tracing::info!("Finished deploying mods");
Ok(())
}
@ -164,14 +649,6 @@ pub(crate) async fn reset_mod_deployment(state: ActionState) -> Result<()> {
tracing::info!("Resetting mod deployment in {}", bundle_dir.display());
if fs::metadata(bundle_dir.join(format!("{boot_bundle_path}.patch_999")))
.await
.is_ok()
{
tracing::info!("Found dtkit-patch-based mod installation. Removing.");
return reset_dtkit_patch(state).await;
}
tracing::debug!("Reading mod deployment");
let info: DeploymentData = {

View file

@ -1,584 +0,0 @@
use std::collections::HashMap;
use std::ffi::CStr;
use std::io::{Cursor, Read, Seek, Write};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use color_eyre::eyre::{self, Context};
use color_eyre::{Help, Report, Result};
use druid::im::Vector;
use druid::{FileInfo, ImageBuf};
use dtmt_shared::{ModConfig, ModConfigResources};
use luajit2_sys as lua;
use nexusmods::Api as NexusApi;
use tokio::fs;
use zip::ZipArchive;
use crate::state::{ActionState, ModInfo, NexusInfo, PackageInfo};
fn find_archive_file<R: Read + Seek>(
archive: &ZipArchive<R>,
name: impl AsRef<str>,
) -> Option<String> {
let path = archive
.file_names()
.find(|path| path.ends_with(name.as_ref()))
.map(|s| s.to_string());
path
}
fn image_data_to_buffer(data: impl AsRef<[u8]>) -> Result<ImageBuf> {
// Druid somehow doesn't return an error compatible with eyre, here.
// So we have to wrap through `Display` manually.
ImageBuf::from_data(data.as_ref()).map_err(|err| {
Report::msg(err.to_string())
.wrap_err("Invalid image data")
.suggestion("Supported formats are: PNG, JPEG, Bitmap and WebP")
})
}
// Runs the content of a `.mod` file to extract what data we can get
// from legacy mods.
// 1. Create a global function `new_mod` that stores
// the relevant bits in global variables.
// 2. Run the `.mod` file, which will return a table.
// 3. Run the `run` function from that table.
// 4. Access the global variables from #1.
#[tracing::instrument]
fn parse_mod_id_file(data: &str) -> Result<(String, ModConfigResources)> {
tracing::debug!("Parsing mod file:\n{}", data);
let ret = unsafe {
let state = lua::luaL_newstate();
lua::luaL_openlibs(state);
let run = b"
function fassert() end
function new_mod(id, resources)
_G.id = id
_G.script = resources.mod_script
_G.data = resources.mod_data
_G.localization = resources.mod_localization
end
\0";
match lua::luaL_loadstring(state, run.as_ptr() as _) as u32 {
lua::LUA_OK => {}
lua::LUA_ERRSYNTAX => {
let err = lua::lua_tostring(state, -1);
let err = CStr::from_ptr(err).to_string_lossy().to_string();
lua::lua_close(state);
eyre::bail!("Invalid syntax: {}", err);
}
lua::LUA_ERRMEM => {
lua::lua_close(state);
eyre::bail!("Failed to allocate sufficient memory to create `new_mod`")
}
_ => unreachable!(),
}
match lua::lua_pcall(state, 0, 0, 0) as u32 {
lua::LUA_OK => {}
lua::LUA_ERRRUN => {
let err = lua::lua_tostring(state, -1);
let err = CStr::from_ptr(err).to_string_lossy().to_string();
lua::lua_close(state);
eyre::bail!("Failed to run buffer: {}", err);
}
lua::LUA_ERRMEM => {
lua::lua_close(state);
eyre::bail!("Failed to allocate sufficient memory to run buffer")
}
// We don't use an error handler function, so this should be unreachable
lua::LUA_ERRERR => unreachable!(),
_ => unreachable!(),
}
let name = b".mod\0";
match lua::luaL_loadbuffer(
state,
data.as_ptr() as _,
data.len() as _,
name.as_ptr() as _,
) as u32
{
lua::LUA_OK => {}
lua::LUA_ERRSYNTAX => {
let err = lua::lua_tostring(state, -1);
let err = CStr::from_ptr(err).to_string_lossy().to_string();
lua::lua_close(state);
eyre::bail!("Invalid syntax: {}", err);
}
lua::LUA_ERRMEM => {
lua::lua_close(state);
eyre::bail!("Failed to allocate sufficient memory to load `.mod` file buffer")
}
_ => unreachable!(),
}
match lua::lua_pcall(state, 0, 1, 0) as u32 {
lua::LUA_OK => {}
lua::LUA_ERRRUN => {
let err = lua::lua_tostring(state, -1);
let err = CStr::from_ptr(err).to_string_lossy().to_string();
lua::lua_close(state);
eyre::bail!("Failed to run `.mod` file: {}", err);
}
lua::LUA_ERRMEM => {
lua::lua_close(state);
eyre::bail!("Failed to allocate sufficient memory to run `.mod` file")
}
// We don't use an error handler function, so this should be unreachable
lua::LUA_ERRERR => unreachable!(),
_ => unreachable!(),
}
let key = b"run\0";
lua::lua_pushstring(state, key.as_ptr() as _);
lua::lua_gettable(state, -2);
match lua::lua_pcall(state, 0, 0, 0) as u32 {
lua::LUA_OK => {}
lua::LUA_ERRRUN => {
let err = lua::lua_tostring(state, -1);
let err = CStr::from_ptr(err).to_string_lossy().to_string();
lua::lua_close(state);
eyre::bail!("Failed to run `.mod.run`: {}", err);
}
lua::LUA_ERRMEM => {
lua::lua_close(state);
eyre::bail!("Failed to allocate sufficient memory to run `.mod.run`")
}
// We don't use an error handler function, so this should be unreachable
lua::LUA_ERRERR => unreachable!(),
_ => unreachable!(),
}
let get_global = |state, key: &[u8]| {
lua::lua_getglobal(state, key.as_ptr() as _);
if lua::lua_isnil(state, -1) != 0 {
return Ok(None);
}
let s = lua::lua_tostring(state, -1);
if s.is_null() {
eyre::bail!("Expected string, got NULL");
}
let ret = CStr::from_ptr(s).to_string_lossy().to_string();
lua::lua_pop(state, 1);
Ok(Some(ret))
};
let mod_id = get_global(state, b"id\0")
.and_then(|s| s.ok_or_else(|| eyre::eyre!("Got `nil`")))
.wrap_err("Failed to get `id`")?;
let resources = ModConfigResources {
init: get_global(state, b"script\0")
.and_then(|s| s.map(PathBuf::from).ok_or_else(|| eyre::eyre!("Got `nil`")))
.wrap_err("Failed to get `script`.")?,
data: get_global(state, b"data\0")
.wrap_err("Failed to get `data`.")?
.map(PathBuf::from),
localization: get_global(state, b"localization\0")
.wrap_err("Failed to get `localization`")?
.map(PathBuf::from),
};
lua::lua_close(state);
(mod_id, resources)
};
Ok(ret)
}
// Extracts the mod configuration from the mod archive.
// This may either be a proper `dtmt.cfg`, or the legacy `<mod_name>.mod` ID file.
//
// It also returns the directory where this file was found, used as root path. This
// allows flexibility in what the directory structure is exactly, since many people
// still end up creating tarbombs and Nexus does its own re-packaging.
#[tracing::instrument(skip(archive))]
fn extract_mod_config<R: Read + Seek>(archive: &mut ZipArchive<R>) -> Result<(ModConfig, String)> {
let legacy_mod_data = if let Some(name) = find_archive_file(archive, ".mod") {
let (mod_id, resources) = {
let mut f = archive
.by_name(&name)
.wrap_err("Failed to read `.mod` file from archive")?;
let mut buf = Vec::with_capacity(f.size() as usize);
f.read_to_end(&mut buf)
.wrap_err("Failed to read `.mod` file from archive")?;
let data = String::from_utf8(buf).wrap_err("`.mod` file is not valid UTF-8")?;
parse_mod_id_file(&data)
.wrap_err("Invalid `.mod` file")
.note(
"The `.mod` file's `run` function may not contain any additional logic \
besides the default.",
)
.suggestion("Contact the mod author to fix this.")?
};
let root = if let Some(index) = name.rfind('/') {
name[..index].to_string()
} else {
String::new()
};
Some((mod_id, resources, root))
} else {
None
};
tracing::debug!(?legacy_mod_data);
if let Some(name) = find_archive_file(archive, "dtmt.cfg") {
let mut f = archive
.by_name(&name)
.wrap_err("Failed to read mod config from archive")?;
let mut buf = Vec::with_capacity(f.size() as usize);
f.read_to_end(&mut buf)
.wrap_err("Failed to read mod config from archive")?;
let data = String::from_utf8(buf).wrap_err("Mod config is not valid UTF-8")?;
let mut cfg: ModConfig = serde_sjson::from_str(&data)
.wrap_err("Failed to deserialize mod config")
.suggestion("Contact the mod author to fix this.")?;
if let Some((mod_id, resources, root)) = legacy_mod_data {
if cfg.id != mod_id {
let err = eyre::eyre!("Mod ID in `dtmt.cfg` does not match mod ID in `.mod` file");
return Err(err).suggestion("Contact the mod author to fix this.");
}
cfg.resources = resources;
// Enforce that packages are skipped
cfg.bundled = false;
cfg.packages = vec![];
Ok((cfg, root))
} else {
let root = name
.strip_suffix("dtmt.cfg")
.expect("String must end with that suffix")
.to_string();
Ok((cfg, root))
}
} else if let Some((mod_id, resources, root)) = legacy_mod_data {
let cfg = ModConfig {
bundled: false,
dir: PathBuf::new(),
id: mod_id.clone(),
name: mod_id,
summary: "A mod for the game Warhammer 40,000: Darktide".into(),
version: "N/A".into(),
description: None,
author: None,
image: None,
categories: Vec::new(),
packages: Vec::new(),
resources,
depends: Vec::new(),
name_overrides: Default::default(),
};
Ok((cfg, root))
} else {
eyre::bail!(
"Mod needs a config file or `.mod` file. \
Please get in touch with the author to provide a properly packaged mod."
);
}
}
#[tracing::instrument(skip(archive))]
fn extract_bundled_mod<R: Read + Seek>(
archive: &mut ZipArchive<R>,
root: String,
dest: impl AsRef<Path> + std::fmt::Debug,
) -> Result<Vector<Arc<PackageInfo>>> {
let files: HashMap<String, Vec<String>> = {
let name = archive
.file_names()
.find(|name| name.ends_with("files.sjson"))
.map(|s| s.to_string())
.ok_or_else(|| eyre::eyre!("archive does not contain file index"))?;
let mut f = archive
.by_name(&name)
.wrap_err("Failed to read file index from archive")?;
let mut buf = Vec::with_capacity(f.size() as usize);
f.read_to_end(&mut buf)
.wrap_err("Failed to read file index from archive")?;
let data = String::from_utf8(buf).wrap_err("File index is not valid UTF-8")?;
serde_sjson::from_str(&data).wrap_err("Failed to deserialize file index")?
};
tracing::trace!(?files);
let dest = dest.as_ref();
tracing::trace!("Extracting mod archive to {}", dest.display());
archive
.extract(dest)
.wrap_err_with(|| format!("Failed to extract archive to {}", dest.display()))?;
let packages = files
.into_iter()
.map(|(name, files)| Arc::new(PackageInfo::new(name, files.into_iter().collect())))
.collect();
tracing::trace!(?packages);
Ok(packages)
}
#[tracing::instrument(skip(archive))]
fn extract_legacy_mod<R: Read + Seek>(
archive: &mut ZipArchive<R>,
root: String,
dest: impl Into<PathBuf> + std::fmt::Debug,
) -> Result<()> {
let dest = dest.into();
let file_count = archive.len();
for i in 0..file_count {
let mut f = archive
.by_index(i)
.wrap_err_with(|| format!("Failed to get file at index {}", i))?;
let Some(name) = f.enclosed_name().map(|p| p.to_path_buf()) else {
let err = eyre::eyre!("File name in archive is not a safe path value.").suggestion(
"Only use well-known applications to create the ZIP archive, \
and don't create paths that point outside the archive directory.",
);
return Err(err);
};
let Ok(suffix) = name.strip_prefix(&root) else {
tracing::warn!(
"Skipping file outside of the mod root directory: {}",
name.display()
);
continue;
};
let name = dest.join(suffix);
if f.is_dir() {
// The majority of errors will actually be "X already exists".
// But rather than filter them invidually, we just ignore all of them.
// If there is a legitimate error of "couldn't create X", it will eventually fail when
// we try to put a file in there.
tracing::trace!("Creating directory '{}'", name.display());
let _ = std::fs::create_dir_all(&name);
} else {
let mut buf = Vec::with_capacity(f.size() as usize);
f.read_to_end(&mut buf)
.wrap_err_with(|| format!("Failed to read file '{}'", name.display()))?;
tracing::trace!("Writing file '{}'", name.display());
let mut out = std::fs::OpenOptions::new()
.write(true)
.create(true)
.open(&name)
.wrap_err_with(|| format!("Failed to open file '{}'", name.display()))?;
out.write_all(&buf)
.wrap_err_with(|| format!("Failed to write to '{}'", name.display()))?;
}
}
Ok(())
}
#[tracing::instrument(skip(state))]
pub(crate) async fn import_from_file(state: ActionState, info: FileInfo) -> Result<ModInfo> {
let data = fs::read(&info.path)
.await
.wrap_err_with(|| format!("Failed to read file {}", info.path.display()))?;
let nexus = if let Some((_, id, version, timestamp)) = info
.path
.file_name()
.and_then(|s| s.to_str())
.and_then(NexusApi::parse_file_name)
{
if !state.nexus_api_key.is_empty() {
let api = NexusApi::new(state.nexus_api_key.to_string())?;
let mod_info = api
.mods_id(id)
.await
.wrap_err_with(|| format!("Failed to query mod {} from Nexus", id))?;
let version = match api.file_version(id, timestamp).await {
Ok(version) => version,
Err(err) => {
let err = Report::new(err);
tracing::warn!(
"Failed to fetch version for Nexus download. \
Falling back to file name:\n{:?}",
err
);
version
}
};
let info = NexusInfo::from(mod_info);
tracing::debug!(version, ?info);
Some((info, version))
} else {
None
}
} else {
None
};
tracing::trace!(?nexus);
import_mod(state, nexus, data).await
}
#[tracing::instrument(skip(state))]
pub(crate) async fn import_from_nxm(state: ActionState, uri: String) -> Result<ModInfo> {
let url = uri
.parse()
.wrap_err_with(|| format!("Invalid Uri '{}'", uri))?;
let api = NexusApi::new(state.nexus_api_key.to_string())?;
let (mod_info, file_info, data) = api
.handle_nxm(url)
.await
.wrap_err_with(|| format!("Failed to download mod from NXM uri '{}'", uri))?;
let nexus = NexusInfo::from(mod_info);
import_mod(state, Some((nexus, file_info.version)), data).await
}
#[tracing::instrument(skip(state, data), fields(data = data.len()))]
pub(crate) async fn import_mod(
state: ActionState,
nexus: Option<(NexusInfo, String)>,
data: Vec<u8>,
) -> Result<ModInfo> {
let data = Cursor::new(data);
let mut archive = ZipArchive::new(data).wrap_err("Failed to open ZIP archive")?;
if tracing::enabled!(tracing::Level::DEBUG) {
let names = archive.file_names().fold(String::new(), |mut s, name| {
s.push('\n');
s.push_str(name);
s
});
tracing::debug!("Archive contents:{}", names);
}
let (mut mod_cfg, root) =
extract_mod_config(&mut archive).wrap_err("Failed to extract mod configuration")?;
tracing::info!("Importing mod {} ({})", mod_cfg.name, mod_cfg.id);
let mod_dir = state.data_dir.join(state.mod_dir.as_ref());
let dest = mod_dir.join(&mod_cfg.id);
tracing::trace!("Creating mods directory {}", dest.display());
fs::create_dir_all(&dest)
.await
.wrap_err_with(|| format!("Failed to create data directory '{}'", dest.display()))?;
let image = if let Some(path) = &mod_cfg.image {
let name = archive
.file_names()
.find(|name| name.ends_with(&path.display().to_string()))
.map(|s| s.to_string())
.ok_or_else(|| eyre::eyre!("archive does not contain configured image file"))?;
let mut f = archive
.by_name(&name)
.wrap_err("Failed to read image file from archive")?;
let mut buf = Vec::with_capacity(f.size() as usize);
f.read_to_end(&mut buf)
.wrap_err("Failed to read file index from archive")?;
let img = image_data_to_buffer(buf)?;
Some(img)
} else if let Some((nexus, _)) = &nexus {
let api = NexusApi::new(state.nexus_api_key.to_string())?;
let url = nexus.picture_url.as_ref();
let data = api
.picture(url)
.await
.wrap_err_with(|| format!("Failed to download Nexus image from '{}'", url))?;
let img = image_data_to_buffer(&data)?;
let name = "image.bin";
let path = dest.join(name);
match fs::write(&path, &data).await {
Ok(_) => {
mod_cfg.image = Some(name.into());
Some(img)
}
Err(err) => {
let err = Report::new(err).wrap_err(format!(
"Failed to write Nexus picture to file '{}'",
path.display()
));
tracing::error!("{:?}", err);
None
}
}
} else {
None
};
tracing::trace!(?image);
tracing::debug!(root, ?mod_cfg);
let packages = if mod_cfg.bundled {
extract_bundled_mod(&mut archive, root, &mod_dir).wrap_err("Failed to extract mod")?
} else {
extract_legacy_mod(&mut archive, root, &dest).wrap_err("Failed to extract legacy mod")?;
if let Some((_, version)) = &nexus {
// We use the version number stored in the `ModInfo` to compare against the `NexusInfo`
// for version checks. So for this one, we can't actually rely on merely shadowing,
// like with the other fields.
mod_cfg.version = version.clone();
}
let data = serde_sjson::to_string(&mod_cfg).wrap_err("Failed to serialize mod config")?;
fs::write(dest.join("dtmt.cfg"), &data)
.await
.wrap_err("Failed to write mod config")?;
Default::default()
};
if let Some((nexus, _)) = &nexus {
let data = serde_sjson::to_string(nexus).wrap_err("Failed to serialize Nexus info")?;
let path = dest.join("nexus.sjson");
fs::write(&path, data.as_bytes())
.await
.wrap_err_with(|| format!("Failed to write Nexus info to '{}'", path.display()))?;
}
let info = ModInfo::new(mod_cfg, packages, image, nexus.map(|(info, _)| info));
Ok(info)
}

View file

@ -5,9 +5,7 @@ use serde::Deserialize;
use tokio::fs;
pub mod app;
pub mod deploy;
pub mod game;
pub mod import;
pub mod worker;
#[tracing::instrument]

View file

@ -5,17 +5,12 @@ use color_eyre::Help;
use color_eyre::Report;
use color_eyre::Result;
use druid::{ExtEventSink, SingleUse, Target};
use tokio::fs::OpenOptions;
use tokio::io::AsyncWriteExt;
use tokio::runtime::Runtime;
use tokio::sync::mpsc::UnboundedReceiver;
use tokio::sync::RwLock;
use crate::controller::app::*;
use crate::controller::deploy::deploy_mods;
use crate::controller::game::*;
use crate::controller::import::*;
use crate::state::AsyncAction;
use crate::state::ACTION_FINISH_CHECK_UPDATE;
use crate::state::ACTION_FINISH_LOAD_INITIAL;
@ -38,10 +33,6 @@ async fn handle_action(
action_queue: Arc<RwLock<UnboundedReceiver<AsyncAction>>>,
) {
while let Some(action) = action_queue.write().await.recv().await {
if cfg!(debug_assertions) && !matches!(action, AsyncAction::Log(_)) {
tracing::debug!(?action);
}
let event_sink = event_sink.clone();
match action {
AsyncAction::DeployMods(state) => tokio::spawn(async move {
@ -57,7 +48,7 @@ async fn handle_action(
.expect("failed to send command");
}),
AsyncAction::AddMod(state, info) => tokio::spawn(async move {
match import_from_file(state, info)
match import_mod(state, info)
.await
.wrap_err("Failed to import mod")
{
@ -177,44 +168,13 @@ async fn handle_action(
)
.expect("failed to send command");
}),
AsyncAction::Log((state, line)) => tokio::spawn(async move {
if let Ok(mut f) = OpenOptions::new()
.append(true)
.open(state.data_dir.join("dtmm.log"))
.await
{
let _ = f.write_all(&line).await;
}
}),
AsyncAction::NxmDownload(state, uri) => tokio::spawn(async move {
match import_from_nxm(state, uri)
.await
.wrap_err("Failed to handle NXM URI")
{
Ok(mod_info) => {
event_sink
.write()
.await
.submit_command(
ACTION_FINISH_ADD_MOD,
SingleUse::new(Arc::new(mod_info)),
Target::Auto,
)
.expect("failed to send command");
}
Err(err) => {
tracing::error!("{:?}", err);
send_error(event_sink.clone(), err).await;
}
}
}),
};
}
}
async fn handle_log(
event_sink: Arc<RwLock<ExtEventSink>>,
log_queue: Arc<RwLock<UnboundedReceiver<Vec<u8>>>>,
log_queue: Arc<RwLock<UnboundedReceiver<String>>>,
) {
while let Some(line) = log_queue.write().await.recv().await {
let event_sink = event_sink.clone();
@ -229,7 +189,7 @@ async fn handle_log(
pub(crate) fn work_thread(
event_sink: Arc<RwLock<ExtEventSink>>,
action_queue: Arc<RwLock<UnboundedReceiver<AsyncAction>>>,
log_queue: Arc<RwLock<UnboundedReceiver<Vec<u8>>>>,
log_queue: Arc<RwLock<UnboundedReceiver<String>>>,
) -> Result<()> {
let rt = Runtime::new()?;

View file

@ -1,69 +1,33 @@
#![recursion_limit = "256"]
#![feature(let_chains)]
#![feature(iterator_try_collect)]
#![feature(arc_unwrap_or_clone)]
#![windows_subsystem = "windows"]
use std::path::PathBuf;
use std::sync::Arc;
use clap::command;
use clap::parser::ValueSource;
use clap::{command, value_parser, Arg};
use color_eyre::eyre::{self, Context};
use color_eyre::{Report, Result, Section};
use clap::value_parser;
use clap::Arg;
use color_eyre::eyre;
use color_eyre::{Report, Result};
use druid::AppLauncher;
use interprocess::local_socket::{prelude::*, GenericNamespaced, ListenerOptions};
use tokio::sync::RwLock;
use crate::controller::worker::work_thread;
use crate::state::{AsyncAction, ACTION_HANDLE_NXM};
use crate::state::AsyncAction;
use crate::state::{Delegate, State};
use crate::ui::theme;
use crate::util::log::LogLevel;
mod controller;
mod state;
mod util {
pub mod ansi;
pub mod config;
pub mod log;
}
mod ui;
// As explained in https://docs.rs/interprocess/2.1.0/interprocess/local_socket/struct.Name.html
// namespaces are supported on both platforms we care about: Windows and Linux.
const IPC_ADDRESS: &str = "dtmm.sock";
#[tracing::instrument]
fn notify_nxm_download(
uri: impl AsRef<str> + std::fmt::Debug,
level: Option<LogLevel>,
) -> Result<()> {
util::log::create_tracing_subscriber(level, None);
tracing::debug!("Received Uri '{}', sending to main process.", uri.as_ref());
let mut stream = LocalSocketStream::connect(
IPC_ADDRESS
.to_ns_name::<GenericNamespaced>()
.expect("Invalid socket name"),
)
.wrap_err_with(|| format!("Failed to connect to '{}'", IPC_ADDRESS))
.suggestion("Make sure the main window is open.")?;
tracing::debug!("Connected to main process at '{}'", IPC_ADDRESS);
bincode::serialize_into(&mut stream, uri.as_ref()).wrap_err("Failed to send URI")?;
// We don't really care what the message is, we just need an acknowledgement.
let _: String = bincode::deserialize_from(&mut stream).wrap_err("Failed to receive reply")?;
tracing::info!(
"Notified DTMM with uri '{}'. Check the main window.",
uri.as_ref()
);
Ok(())
}
#[tracing::instrument]
fn main() -> Result<()> {
color_eyre::install()?;
@ -81,40 +45,45 @@ fn main() -> Result<()> {
.value_parser(value_parser!(PathBuf))
.default_value(default_config_path.to_string_lossy().to_string()),
)
.arg(
Arg::new("log-level")
.long("log-level")
.help("The maximum level of log events to print")
.value_parser(value_parser!(LogLevel))
.default_value("info"),
)
.arg(
Arg::new("nxm")
.help("An `nxm://` URI to download")
.required(false),
)
.get_matches();
let level = if matches.value_source("log-level") == Some(ValueSource::DefaultValue) {
None
} else {
matches.get_one::<LogLevel>("log-level").cloned()
};
if let Some(uri) = matches.get_one::<String>("nxm") {
return notify_nxm_download(uri, level).wrap_err("Failed to send NXM Uri to main window.");
}
let (log_tx, log_rx) = tokio::sync::mpsc::unbounded_channel();
util::log::create_tracing_subscriber(level, Some(log_tx));
util::log::create_tracing_subscriber(log_tx);
let (action_tx, action_rx) = tokio::sync::mpsc::unbounded_channel();
// let config = util::config::read_config(&default_config_path, &matches)
// .wrap_err("Failed to read config file")?;
// let game_info = dtmt_shared::collect_game_info();
// tracing::debug!(?config, ?game_info);
// let game_dir = config.game_dir.or_else(|| game_info.map(|i| i.path));
// if game_dir.is_none() {
// let err =
// eyre::eyre!("No Game Directory set. Head to the 'Settings' tab to set it manually",);
// event_sink
// .submit_command(ACTION_SHOW_ERROR_DIALOG, SingleUse::new(err), Target::Auto)
// .expect("failed to send command");
// }
// let initial_state = {
// let mut state = State::new(
// config.path,
// game_dir.unwrap_or_default(),
// config.data_dir.unwrap_or_default(),
// config.nexus_api_key.unwrap_or_default(),
// );
// state.mods = load_mods(state.get_mod_dir(), config.mod_order.iter())
// .wrap_err("Failed to load mods")?;
// state
// };
let config_path = matches
.get_one::<PathBuf>("config")
.cloned()
.expect("argument has default value");
let is_config_default = matches.value_source("config") == Some(ValueSource::DefaultValue);
let is_config_default = matches.value_source("config") != Some(ValueSource::DefaultValue);
if action_tx
.send(AsyncAction::LoadInitial((config_path, is_config_default)))
.is_err()
@ -129,79 +98,16 @@ fn main() -> Result<()> {
let event_sink = launcher.get_external_handle();
{
let span = tracing::info_span!(IPC_ADDRESS, "nxm-socket");
let _guard = span.enter();
let event_sink = event_sink.clone();
let server = ListenerOptions::new()
.name(
IPC_ADDRESS
.to_ns_name::<GenericNamespaced>()
.expect("Invalid socket name"),
)
.create_sync()
.wrap_err("Failed to create IPC listener")?;
tracing::debug!("IPC server listening on '{}'", IPC_ADDRESS);
// Drop the guard here, so that we can re-enter the same span in the thread.
drop(_guard);
std::thread::Builder::new()
.name("nxm-socket".into())
.spawn(move || {
let _guard = span.enter();
loop {
let res = server.accept().wrap_err_with(|| {
format!("IPC server failed to listen on '{}'", IPC_ADDRESS)
});
match res {
Ok(mut stream) => {
let res = bincode::deserialize_from(&mut stream)
.wrap_err("Failed to read message")
.and_then(|uri: String| {
tracing::trace!(uri, "Received NXM uri");
event_sink
.submit_command(ACTION_HANDLE_NXM, uri, druid::Target::Auto)
.wrap_err("Failed to start NXM download")
});
match res {
Ok(()) => {
let _ = bincode::serialize_into(&mut stream, "Ok");
}
Err(err) => {
tracing::error!("{:?}", err);
let _ = bincode::serialize_into(&mut stream, "Error");
}
}
}
Err(err) => {
tracing::error!("Failed to receive client connection: {:?}", err)
}
}
}
})
.wrap_err("Failed to create thread")?;
}
std::thread::Builder::new()
.name("work-thread".into())
.spawn(move || {
std::thread::spawn(move || {
let event_sink = Arc::new(RwLock::new(event_sink));
let action_rx = Arc::new(RwLock::new(action_rx));
let log_rx = Arc::new(RwLock::new(log_rx));
loop {
if let Err(err) = work_thread(event_sink.clone(), action_rx.clone(), log_rx.clone())
{
if let Err(err) = work_thread(event_sink.clone(), action_rx.clone(), log_rx.clone()) {
tracing::error!("Work thread failed, restarting: {:?}", err);
}
}
})
.wrap_err("Failed to create thread")?;
});
launcher.launch(State::new()).map_err(Report::new)
}

View file

@ -2,7 +2,6 @@ use std::path::PathBuf;
use std::sync::Arc;
use druid::im::{HashMap, Vector};
use druid::text::RichText;
use druid::{Data, ImageBuf, Lens, WindowHandle, WindowId};
use dtmt_shared::ModConfig;
use nexusmods::Mod as NexusMod;
@ -72,40 +71,26 @@ impl From<dtmt_shared::ModDependency> for ModDependency {
#[derive(Clone, Data, Debug, Lens, serde::Serialize, serde::Deserialize)]
pub(crate) struct NexusInfo {
pub author: String,
pub category_id: u64,
pub created_timestamp: i64,
pub description: Arc<String>,
pub id: u64,
pub name: String,
pub picture_url: Arc<String>,
pub summary: Arc<String>,
pub uid: u64,
pub updated_timestamp: i64,
pub uploaded_by: String,
pub version: String,
pub author: String,
pub summary: Arc<String>,
pub description: Arc<String>,
}
impl From<NexusMod> for NexusInfo {
fn from(value: NexusMod) -> Self {
Self {
author: value.author,
category_id: value.category_id,
created_timestamp: value.created_timestamp.unix_timestamp(),
description: Arc::new(value.description),
id: value.mod_id,
name: value.name,
picture_url: Arc::new(value.picture_url.into()),
summary: Arc::new(value.summary),
uid: value.uid,
updated_timestamp: value.updated_timestamp.unix_timestamp(),
uploaded_by: value.uploaded_by,
version: value.version,
author: value.author,
summary: Arc::new(value.summary),
description: Arc::new(value.description),
}
}
}
#[derive(Clone, Data, Lens)]
#[derive(Clone, Data, Debug, Lens)]
pub(crate) struct ModInfo {
pub id: String,
pub name: String,
@ -116,51 +101,17 @@ pub(crate) struct ModInfo {
pub image: Option<ImageBuf>,
pub version: String,
pub enabled: bool,
pub depends: Vector<ModDependency>,
pub bundled: bool,
#[lens(ignore)]
#[data(ignore)]
pub packages: Vector<Arc<PackageInfo>>,
#[lens(ignore)]
#[data(ignore)]
pub resources: ModResourceInfo,
pub depends: Vector<ModDependency>,
#[data(ignore)]
pub nexus: Option<NexusInfo>,
}
impl std::fmt::Debug for ModInfo {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("ModInfo")
.field("id", &self.id)
.field("name", &self.name)
.field("summary", &self.summary)
.field(
"description",
&(match &self.description {
Some(desc) => format!("Some(String[0..{}])", desc.len()),
None => "None".to_string(),
}),
)
.field("categories", &self.categories)
.field("author", &self.author)
.field(
"image",
&(match &self.image {
Some(image) => format!("Some(ImageBuf[{}x{}])", image.width(), image.height()),
None => "None".to_string(),
}),
)
.field("version", &self.version)
.field("enabled", &self.enabled)
.field("packages", &format!("Vec[0..{}]", self.packages.len()))
.field("resources", &self.resources)
.field("depends", &self.depends)
.field("bundled", &self.bundled)
.field("nexus", &self.nexus)
.finish()
}
}
impl ModInfo {
pub fn new(
cfg: ModConfig,
@ -177,7 +128,6 @@ impl ModInfo {
version: cfg.version,
enabled: false,
packages,
bundled: cfg.bundled,
image,
categories: cfg.categories.into_iter().collect(),
resources: ModResourceInfo {
@ -202,11 +152,12 @@ pub(crate) struct State {
pub is_save_in_progress: bool,
pub is_next_save_pending: bool,
pub is_update_in_progress: bool,
pub is_io_enabled: bool,
pub game_dir: Arc<PathBuf>,
pub data_dir: Arc<PathBuf>,
pub nexus_api_key: Arc<String>,
pub log: Vector<RichText>,
#[data(ignore)]
pub log: Arc<String>,
// True, when the initial loading of configuration and mods is still in progress
pub loading: bool,
@ -239,12 +190,11 @@ impl State {
is_save_in_progress: false,
is_next_save_pending: false,
is_update_in_progress: false,
is_io_enabled: false,
config_path: Arc::new(PathBuf::new()),
game_dir: Arc::new(PathBuf::new()),
data_dir: Arc::new(PathBuf::new()),
nexus_api_key: Arc::new(String::new()),
log: Vector::new(),
log: Arc::new(String::new()),
windows: HashMap::new(),
loading: true,
}
@ -273,4 +223,9 @@ impl State {
pub fn can_move_mod_up(&self) -> bool {
self.selected_mod_index.map(|i| i > 0).unwrap_or(false)
}
pub(crate) fn add_log_line(&mut self, line: String) {
let log = Arc::make_mut(&mut self.log);
log.push_str(&line);
}
}

View file

@ -1,17 +1,13 @@
use std::path::PathBuf;
use std::sync::Arc;
use std::{path::PathBuf, sync::Arc};
use color_eyre::Report;
use druid::im::Vector;
use druid::{
AppDelegate, Command, DelegateCtx, Env, FileInfo, Handled, Selector, SingleUse, Target,
WindowHandle, WindowId,
im::Vector, AppDelegate, Command, DelegateCtx, Env, FileInfo, Handled, Selector, SingleUse,
Target, WindowHandle, WindowId,
};
use tokio::sync::mpsc::UnboundedSender;
use crate::ui::window;
use crate::util::ansi::ansi_to_rich_text;
use crate::util::config::Config;
use crate::{ui::window, util::config::Config};
use super::{ModInfo, State};
@ -32,12 +28,11 @@ pub(crate) const ACTION_START_RESET_DEPLOYMENT: Selector =
pub(crate) const ACTION_FINISH_RESET_DEPLOYMENT: Selector =
Selector::new("dtmm.action.finish-reset-deployment");
pub(crate) const ACTION_HANDLE_NXM: Selector<String> = Selector::new("dtmm.action.handle-nxm");
pub(crate) const ACTION_ADD_MOD: Selector<FileInfo> = Selector::new("dtmm.action.add-mod");
pub(crate) const ACTION_FINISH_ADD_MOD: Selector<SingleUse<Arc<ModInfo>>> =
Selector::new("dtmm.action.finish-add-mod");
pub(crate) const ACTION_LOG: Selector<SingleUse<Vec<u8>>> = Selector::new("dtmm.action.log");
pub(crate) const ACTION_LOG: Selector<SingleUse<String>> = Selector::new("dtmm.action.log");
pub(crate) const ACTION_START_SAVE_SETTINGS: Selector =
Selector::new("dtmm.action.start-save-settings");
@ -61,8 +56,6 @@ pub(crate) type InitialLoadResult = (Config, Vector<Arc<ModInfo>>);
pub(crate) const ACTION_FINISH_LOAD_INITIAL: Selector<SingleUse<Option<InitialLoadResult>>> =
Selector::new("dtmm.action.finish-load-initial");
pub(crate) const ACTION_OPEN_LINK: Selector<Arc<String>> = Selector::new("dtmm.action.open-link");
// A sub-selection of `State`'s fields that are required in `AsyncAction`s and that are
// `Send + Sync`
pub(crate) struct ActionState {
@ -73,7 +66,6 @@ pub(crate) struct ActionState {
pub config_path: Arc<PathBuf>,
pub ctx: Arc<sdk::Context>,
pub nexus_api_key: Arc<String>,
pub is_io_enabled: bool,
}
impl From<State> for ActionState {
@ -86,7 +78,6 @@ impl From<State> for ActionState {
config_path: state.config_path,
ctx: state.ctx,
nexus_api_key: state.nexus_api_key,
is_io_enabled: state.is_io_enabled,
}
}
}
@ -99,32 +90,6 @@ pub(crate) enum AsyncAction {
SaveSettings(ActionState),
CheckUpdates(ActionState),
LoadInitial((PathBuf, bool)),
Log((ActionState, Vec<u8>)),
NxmDownload(ActionState, String),
}
impl std::fmt::Debug for AsyncAction {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
AsyncAction::DeployMods(_) => write!(f, "AsyncAction::DeployMods(_state)"),
AsyncAction::ResetDeployment(_) => write!(f, "AsyncAction::ResetDeployment(_state)"),
AsyncAction::AddMod(_, info) => write!(f, "AsyncAction::AddMod(_state, {:?})", info),
AsyncAction::DeleteMod(_, info) => {
write!(f, "AsyncAction::DeleteMod(_state, {:?})", info)
}
AsyncAction::SaveSettings(_) => write!(f, "AsyncAction::SaveSettings(_state)"),
AsyncAction::CheckUpdates(_) => write!(f, "AsyncAction::CheckUpdates(_state)"),
AsyncAction::LoadInitial((path, is_default)) => write!(
f,
"AsyncAction::LoadInitial(({:?}, {:?}))",
path, is_default
),
AsyncAction::Log(_) => write!(f, "AsyncAction::Log(_)"),
AsyncAction::NxmDownload(_, uri) => {
write!(f, "AsyncAction::NxmDownload(_state, {})", uri)
}
}
}
}
pub(crate) struct Delegate {
@ -257,20 +222,6 @@ impl AppDelegate<State> for Delegate {
Handled::Yes
}
cmd if cmd.is(ACTION_HANDLE_NXM) => {
let uri = cmd
.get(ACTION_HANDLE_NXM)
.expect("command type match but didn't contain the expected value");
if self
.sender
.send(AsyncAction::NxmDownload(state.clone().into(), uri.clone()))
.is_err()
{
tracing::error!("Failed to queue action to download NXM mod");
}
Handled::Yes
}
cmd if cmd.is(ACTION_ADD_MOD) => {
let info = cmd
.get(ACTION_ADD_MOD)
@ -300,22 +251,9 @@ impl AppDelegate<State> for Delegate {
let line = cmd
.get(ACTION_LOG)
.expect("command type matched but didn't contain the expected value");
if let Some(line) = line.take() {
{
let line = String::from_utf8_lossy(&line);
state.log.push_back(ansi_to_rich_text(line.trim()));
state.add_log_line(line);
}
if self
.sender
.send(AsyncAction::Log((state.clone().into(), line)))
.is_err()
{
tracing::error!("Failed to queue action to add mod");
}
}
Handled::Yes
}
cmd if cmd.is(ACTION_START_SAVE_SETTINGS) => {
@ -432,28 +370,12 @@ impl AppDelegate<State> for Delegate {
state.config_path = Arc::new(config.path);
state.data_dir = Arc::new(config.data_dir);
state.game_dir = Arc::new(config.game_dir.unwrap_or_default());
state.nexus_api_key = Arc::new(config.nexus_api_key.unwrap_or_default());
state.is_io_enabled = config.unsafe_io;
}
state.loading = false;
Handled::Yes
}
cmd if cmd.is(ACTION_OPEN_LINK) => {
let url = cmd
.get(ACTION_OPEN_LINK)
.expect("command type matched but didn't contain the expected value");
if let Err(err) = open::that_detached(Arc::as_ref(url)) {
tracing::error!(
"{:?}",
Report::new(err).wrap_err(format!("Failed to open url '{}'", url))
);
}
Handled::Yes
}
_ => Handled::No,
}
}

View file

@ -42,7 +42,6 @@ impl Lens<State, Option<Arc<ModInfo>>> for SelectedModLens {
/// A Lens that maps an `im::Vector<T>` to `im::Vector<(usize, T)>`,
/// where each element in the destination vector includes its index in the
/// source vector.
#[allow(dead_code)]
pub(crate) struct IndexedVectorLens;
impl<T: Data> Lens<Vector<T>, Vector<(usize, T)>> for IndexedVectorLens {

View file

@ -17,7 +17,6 @@ macro_rules! make_color {
}
make_color!(TOP_BAR_BACKGROUND_COLOR, COLOR_BG1);
make_color!(LINK_COLOR, COLOR_ACCENT);
#[allow(dead_code)]
pub mod gruvbox_dark {
@ -69,10 +68,23 @@ pub mod gruvbox_dark {
}
pub trait ColorExt {
fn lighten(&self, fac: f32) -> Self;
fn darken(&self, fac: f32) -> Self;
}
impl ColorExt for Color {
fn lighten(&self, fac: f32) -> Self {
let (r, g, b, a) = self.as_rgba();
let rgb = Rgb::from(r as f32, g as f32, b as f32);
let rgb = rgb.lighten(fac);
Self::rgba(
rgb.get_red() as f64,
rgb.get_green() as f64,
rgb.get_blue() as f64,
a,
)
}
fn darken(&self, fac: f32) -> Self {
let (r, g, b, a) = self.as_rgba();
let rgb = Rgb::from(r as f32, g as f32, b as f32);

View file

@ -3,8 +3,8 @@ use usvg::{
Error, Fill, LineCap, LineJoin, NodeKind, NonZeroPositiveF64, Options, Paint, Stroke, Tree,
};
pub static ALERT_CIRCLE: &str = include_str!("../../../assets/tabler-icons/alert-circle.svg");
pub static CLOUD_DOWNLOAD: &str = include_str!("../../../assets/tabler-icons/cloud-download.svg");
pub static ALERT_CIRCLE: &str = include_str!("../../../assets/icons/icons/alert-circle.svg");
pub static ALERT_TRIANGLE: &str = include_str!("../../../assets/icons/icons/alert-triangle.svg");
pub fn parse_svg(svg: &str) -> Result<Tree, Error> {
let opt = Options::default();

View file

@ -80,19 +80,11 @@ impl<W: Widget<State>> Controller<State, W> for DirtyStateController {
) {
// Only start tracking changes after the initial load has finished
if old_data.loading == data.loading {
if compare_state_fields!(
old_data,
data,
mods,
game_dir,
data_dir,
nexus_api_key,
is_io_enabled
) {
if compare_state_fields!(old_data, data, mods, game_dir, data_dir, nexus_api_key) {
ctx.submit_command(ACTION_START_SAVE_SETTINGS);
}
if compare_state_fields!(old_data, data, mods, game_dir, is_io_enabled) {
if compare_state_fields!(old_data, data, mods, game_dir) {
ctx.submit_command(ACTION_SET_DIRTY);
}
}

View file

@ -2,11 +2,16 @@ use std::path::PathBuf;
use std::sync::Arc;
use druid::text::Formatter;
use druid::{Data, Widget};
pub mod border;
pub mod button;
pub mod controller;
pub trait ExtraWidgetExt<T: Data>: Widget<T> + Sized + 'static {}
impl<T: Data, W: Widget<T> + 'static> ExtraWidgetExt<T> for W {}
pub(crate) struct PathBufFormatter;
impl PathBufFormatter {

View file

@ -1,49 +1,19 @@
use color_eyre::{Handler, HelpInfo, Report};
use druid::widget::{CrossAxisAlignment, Flex, Label, LineBreaking};
use druid::{Data, WidgetExt, WindowDesc, WindowHandle};
use color_eyre::Report;
use druid::widget::{CrossAxisAlignment, Flex, Label, LineBreaking, MainAxisAlignment};
use druid::{Data, WidgetExt, WindowDesc, WindowHandle, WindowSizePolicy};
use crate::ui::theme;
use crate::ui::widget::button::Button;
const WINDOW_SIZE: (f64, f64) = (600., 250.);
/// Show an error dialog.
/// The title and message are extracted from the error chain in the given `Report`.
pub fn error<T: Data>(err: Report, _parent: WindowHandle) -> WindowDesc<T> {
let (title, msg) = {
let count = err.chain().count();
if count == 1 {
// If there is only one error, that's all we can show.
(
String::from("An error occurred!"),
err.root_cause().to_string(),
)
let msg = {
let msg = format!("A critical error ocurred: {:?}", err);
if let Ok(stripped) = strip_ansi_escapes::strip(msg.as_bytes()) {
String::from_utf8_lossy(&stripped).to_string()
} else {
let first = err.chain().next().unwrap();
let root = err.root_cause();
// If there is more than one error in the chain we want to show
// - The first one: This will describe the overall operation that failed
// - The root cause: The actual thing that failed (e.g. 'No such file or directory')
// - The one before the root cause: With diligent `wrap_err` usage, this will provide
// context to the root cause (e.g. the file name we failed to access)
//
// If there are only two errors, the first one is also the context to the root cause.
if count > 2 {
// The second to last one, the context to the root cause
let context = err.chain().nth(count - 2).unwrap();
(format!("{first}!"), format!("{}: {}", context, root))
} else {
("An error occurred!".to_string(), format!("{}: {}", first, root))
}
msg
}
};
let title = Label::new(title)
.with_text_size(24.)
.with_text_color(theme::COLOR_RED_LIGHT);
let text = Label::new(msg).with_line_break_mode(LineBreaking::WordWrap);
let button = Button::with_label("Ok")
@ -52,40 +22,19 @@ pub fn error<T: Data>(err: Report, _parent: WindowHandle) -> WindowDesc<T> {
})
.align_right();
let mut widget = Flex::column()
.cross_axis_alignment(CrossAxisAlignment::Start)
.with_child(title)
.with_default_spacer()
.with_child(text);
if let Some(handler) = err.handler().downcast_ref::<Handler>() {
let mut first = true;
for section in handler.sections() {
if let HelpInfo::Suggestion(data, _) = section {
if first {
widget.add_default_spacer();
first = false;
}
let w = Flex::row()
.cross_axis_alignment(CrossAxisAlignment::Start)
.with_child(Label::new("Suggestion:").with_text_color(theme::COLOR_GREEN_LIGHT))
.with_spacer(2.)
.with_child(
Label::new(data.to_string()).with_line_break_mode(LineBreaking::WordWrap),
);
widget.add_child(w);
}
}
}
let widget = widget.with_flex_spacer(1.).with_child(button).padding(10.);
let widget = Flex::column()
.main_axis_alignment(MainAxisAlignment::SpaceBetween)
.must_fill_main_axis(true)
.cross_axis_alignment(CrossAxisAlignment::End)
.with_child(text)
.with_spacer(20.)
.with_child(button)
.padding(10.);
WindowDesc::new(widget)
.title("Critical Error")
.title("Error")
.show_titlebar(true)
.with_min_size(WINDOW_SIZE)
.window_size_policy(WindowSizePolicy::Content)
.set_always_on_top(true)
.resizable(false)
}

View file

@ -2,7 +2,6 @@ use std::str::FromStr;
use std::sync::Arc;
use druid::im::Vector;
use druid::text::RichTextBuilder;
use druid::widget::{
Checkbox, CrossAxisAlignment, Either, Flex, Image, Label, LineBreaking, List,
MainAxisAlignment, Maybe, Scroll, SizedBox, Split, Svg, SvgData, TextBox, ViewSwitcher,
@ -17,12 +16,11 @@ use druid_widget_nursery::WidgetExt as _;
use lazy_static::lazy_static;
use crate::state::{
ModInfo, NexusInfo, NexusInfoLens, State, View, ACTION_ADD_MOD, ACTION_OPEN_LINK,
ACTION_SELECTED_MOD_DOWN, ACTION_SELECTED_MOD_UP, ACTION_SELECT_MOD, ACTION_SET_WINDOW_HANDLE,
ACTION_START_CHECK_UPDATE, ACTION_START_DELETE_SELECTED_MOD, ACTION_START_DEPLOY,
ACTION_START_RESET_DEPLOYMENT,
ModInfo, NexusInfo, NexusInfoLens, State, View, ACTION_ADD_MOD, ACTION_SELECTED_MOD_DOWN,
ACTION_SELECTED_MOD_UP, ACTION_SELECT_MOD, ACTION_SET_WINDOW_HANDLE, ACTION_START_CHECK_UPDATE,
ACTION_START_DELETE_SELECTED_MOD, ACTION_START_DEPLOY, ACTION_START_RESET_DEPLOYMENT,
};
use crate::ui::theme::{self, ColorExt, COLOR_GREEN_LIGHT};
use crate::ui::theme::{self, ColorExt, COLOR_YELLOW_LIGHT};
use crate::ui::widget::border::Border;
use crate::ui::widget::button::Button;
use crate::ui::widget::controller::{
@ -137,22 +135,17 @@ fn build_mod_list() -> impl Widget<State> {
})
.lens(lens!((usize, Arc<ModInfo>, bool), 1).then(ModInfo::enabled.in_arc()));
let name = Label::dynamic(|info: &Arc<ModInfo>, _| {
info.nexus
.as_ref()
.map(|n| n.name.clone())
.unwrap_or_else(|| info.name.clone())
})
.lens(lens!((usize, Arc<ModInfo>, bool), 1));
let name =
Label::raw().lens(lens!((usize, Arc<ModInfo>, bool), 1).then(ModInfo::name.in_arc()));
let version = {
let icon = {
let tree =
theme::icons::parse_svg(theme::icons::CLOUD_DOWNLOAD).expect("invalid SVG");
theme::icons::parse_svg(theme::icons::ALERT_TRIANGLE).expect("invalid SVG");
let tree = theme::icons::recolor_icon(tree, true, COLOR_GREEN_LIGHT);
let tree = theme::icons::recolor_icon(tree, true, COLOR_YELLOW_LIGHT);
Svg::new(tree).fix_height(druid::theme::TEXT_SIZE_NORMAL)
Svg::new(Arc::new(tree)).fix_height(druid::theme::TEXT_SIZE_NORMAL)
};
Either::new(
@ -309,11 +302,13 @@ fn build_mod_details_info() -> impl Widget<State> {
// Force the label to take up the entire details' pane width,
// so that we can center-align it.
.expand_width()
.lens(NexusInfoLens::new(NexusInfo::name, ModInfo::name).in_arc());
.lens(ModInfo::name.in_arc());
let summary = Label::raw()
.with_line_break_mode(LineBreaking::WordWrap)
.lens(NexusInfoLens::new(NexusInfo::summary, ModInfo::summary).in_arc());
// TODO: Image/icon?
let version_line = Label::dynamic(|info: &Arc<ModInfo>, _| {
let author = info
.nexus
@ -345,28 +340,6 @@ fn build_mod_details_info() -> impl Widget<State> {
}
});
let nexus_link = Maybe::or_empty(|| {
let link = Label::raw().lens(NexusInfo::id.map(
|id| {
let url = format!("https://nexusmods.com/warhammer40kdarktide/mods/{}", id);
let mut builder = RichTextBuilder::new();
builder
.push("Open on Nexusmods")
.underline(true)
.text_color(theme::LINK_COLOR)
.link(ACTION_OPEN_LINK.with(Arc::new(url)));
builder.build()
},
|_, _| {},
));
Flex::column()
.cross_axis_alignment(CrossAxisAlignment::Start)
.main_axis_alignment(MainAxisAlignment::Start)
.with_child(link)
.with_spacer(4.)
})
.lens(ModInfo::nexus.in_arc());
let details = Flex::column()
.cross_axis_alignment(CrossAxisAlignment::Start)
.main_axis_alignment(MainAxisAlignment::Start)
@ -374,7 +347,6 @@ fn build_mod_details_info() -> impl Widget<State> {
.with_spacer(4.)
.with_child(summary)
.with_spacer(4.)
.with_child(nexus_link)
.with_child(version_line)
.with_spacer(4.)
.with_child(categories)
@ -389,6 +361,8 @@ fn build_mod_details_info() -> impl Widget<State> {
.must_fill_main_axis(true)
.cross_axis_alignment(CrossAxisAlignment::Start)
.with_child(image)
// .with_spacer(4.)
// .with_flex_child(details, 1.)
.with_child(details)
},
Flex::column,
@ -451,29 +425,6 @@ fn build_view_settings() -> impl Widget<State> {
.with_flex_child(TextBox::new().expand_width().lens(State::nexus_api_key), 1.)
.expand_width();
let io_setting = Flex::row()
.must_fill_main_axis(true)
.main_axis_alignment(MainAxisAlignment::Start)
.with_child(Label::new("Enable unsafe I/O:"))
.with_default_spacer()
.with_child(Checkbox::from_label(Label::dynamic(
|enabled: &bool, _: &Env| {
if *enabled {
"Enabled".into()
} else {
"Disabled".into()
}
},
)))
.lens(State::is_io_enabled)
.tooltip(|_: &State, _: &Env| {
"Enabling this gives ANY mod full access to your files \
and the ability to load arbitrary software libraries.\n\
Only enable this if it is crucial for a mod's functionality, \
and you are sure none of the ones you have installed are malicious."
})
.expand_width();
let content = Flex::column()
.must_fill_main_axis(true)
.cross_axis_alignment(CrossAxisAlignment::Start)
@ -481,8 +432,6 @@ fn build_view_settings() -> impl Widget<State> {
.with_default_spacer()
.with_child(game_dir_setting)
.with_default_spacer()
.with_child(io_setting)
.with_default_spacer()
.with_child(nexus_apy_key_setting);
SizedBox::new(content)
@ -502,18 +451,17 @@ fn build_main() -> impl Widget<State> {
}
fn build_log_view() -> impl Widget<State> {
let list = List::new(|| {
Label::raw()
.with_font(FontDescriptor::new(FontFamily::MONOSPACE))
let font = FontDescriptor::new(FontFamily::MONOSPACE);
let label = Label::raw()
.with_font(font)
.with_line_break_mode(LineBreaking::WordWrap)
})
.lens(State::log)
.padding(4.)
.scroll()
.vertical()
.controller(AutoScrollController);
let inner = Border::new(list)
let inner = Border::new(label)
.with_color(theme::COLOR_FG2)
.with_top_border(1.);

View file

@ -1,92 +0,0 @@
use ansi_parser::{AnsiParser, AnsiSequence, Output};
use druid::text::{RichText, RichTextBuilder};
use druid::{Color, FontStyle, FontWeight};
use crate::ui::theme;
#[derive(Default, Debug)]
struct TextState {
color: Option<Color>,
dim: bool,
bold: bool,
underline: bool,
strikethrough: bool,
italic: bool,
}
pub fn ansi_to_rich_text(input: &str) -> RichText {
let mut builder = RichTextBuilder::new();
let mut state = TextState::default();
for token in input.ansi_parse() {
match token {
Output::TextBlock(text) => {
let mut attr = builder.push(text);
attr.underline(state.underline);
attr.strikethrough(state.strikethrough);
if state.bold {
attr.weight(FontWeight::BOLD);
}
if state.italic {
attr.style(FontStyle::Italic);
}
if let Some(color) = state.color {
attr.text_color(color);
}
}
Output::Escape(AnsiSequence::SetGraphicsMode(values)) => {
for v in values {
match v {
0 => {
state = Default::default();
break;
}
1 => state.bold = true,
2 => state.dim = true,
3 => state.italic = true,
4 => state.underline = true,
9 => state.strikethrough = true,
22 => {
state.bold = false;
state.dim = false;
}
23 => state.italic = false,
24 => state.underline = false,
29 => state.underline = false,
30..=40 | 90..=100 => {
let mut col = v - 30;
if col > 9 {
state.bold = true;
col -= 60;
}
state.color = match col {
// This escape code is usually called 'black', but is actually used
// as "foreground color", in regards to light themes.
0 => Some(theme::COLOR_FG),
1 => Some(theme::COLOR_RED_LIGHT),
2 => Some(theme::COLOR_GREEN_LIGHT),
3 => Some(theme::COLOR_YELLOW_LIGHT),
4 => Some(theme::COLOR_BLUE_LIGHT),
5 => Some(theme::COLOR_PURPLE_LIGHT),
6 => Some(theme::COLOR_AQUA_LIGHT),
// Similarly, 'white' is the background color
7 => Some(theme::COLOR_BG),
9 => None,
_ => unreachable!(),
};
}
_ => {}
}
}
}
Output::Escape(_) => {}
}
}
builder.build()
}

View file

@ -30,7 +30,6 @@ pub(crate) struct ConfigSerialize<'a> {
data_dir: &'a Path,
nexus_api_key: &'a String,
mod_order: Vec<LoadOrderEntrySerialize<'a>>,
unsafe_io: bool,
}
impl<'a> From<&'a ActionState> for ConfigSerialize<'a> {
@ -39,7 +38,6 @@ impl<'a> From<&'a ActionState> for ConfigSerialize<'a> {
game_dir: &state.game_dir,
data_dir: &state.data_dir,
nexus_api_key: &state.nexus_api_key,
unsafe_io: state.is_io_enabled,
mod_order: state
.mods
.iter()
@ -63,8 +61,6 @@ pub(crate) struct Config {
#[serde(default = "get_default_data_dir")]
pub data_dir: PathBuf,
pub game_dir: Option<PathBuf>,
#[serde(default)]
pub unsafe_io: bool,
pub nexus_api_key: Option<String>,
#[serde(default)]
pub mod_order: Vec<LoadOrderEntry>,
@ -125,9 +121,6 @@ where
.wrap_err_with(|| format!("Invalid config file {}", path.display()))?;
cfg.path = path;
tracing::debug!("Read config file '{}': {:?}", cfg.path.display(), cfg);
Ok(cfg)
}
Err(err) if err.kind() == ErrorKind::NotFound => {
@ -136,11 +129,6 @@ where
.wrap_err_with(|| format!("Failed to read config file {}", path.display()))?;
}
tracing::debug!(
"Config file not found at '{}', creating default.",
path.display()
);
{
let parent = default_path
.parent()
@ -156,7 +144,6 @@ where
game_dir: None,
nexus_api_key: None,
mod_order: Vec::new(),
unsafe_io: false,
};
{
@ -164,7 +151,7 @@ where
.wrap_err("Failed to serialize default config value")?;
fs::write(&config.path, data).await.wrap_err_with(|| {
format!(
"Failed to write default config to {}",
"failed to write default config to {}",
config.path.display()
)
})?;

View file

@ -1,4 +1,3 @@
use clap::ValueEnum;
use tokio::sync::mpsc::UnboundedSender;
use tracing_error::ErrorLayer;
use tracing_subscriber::filter::FilterFn;
@ -8,34 +7,12 @@ use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::prelude::*;
use tracing_subscriber::EnvFilter;
#[derive(Clone, Copy, Debug, ValueEnum)]
pub enum LogLevel {
Trace,
Debug,
Info,
Warn,
Error,
}
impl From<LogLevel> for EnvFilter {
fn from(level: LogLevel) -> Self {
let filter = match level {
LogLevel::Trace => "error,dtmm=trace,sdk=trace",
LogLevel::Debug => "error,dtmm=debug,sdk=debug",
LogLevel::Info => "error,dtmm=info",
LogLevel::Warn => "error,dtmm=warn",
LogLevel::Error => "error",
};
EnvFilter::new(filter)
}
}
pub struct ChannelWriter {
tx: UnboundedSender<Vec<u8>>,
tx: UnboundedSender<String>,
}
impl ChannelWriter {
pub fn new(tx: UnboundedSender<Vec<u8>>) -> Self {
pub fn new(tx: UnboundedSender<String>) -> Self {
Self { tx }
}
}
@ -43,9 +20,12 @@ impl ChannelWriter {
impl std::io::Write for ChannelWriter {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
let tx = self.tx.clone();
let stripped = strip_ansi_escapes::strip(buf)?;
let string = String::from_utf8_lossy(&stripped).to_string();
// The `send` errors when the receiving end has closed.
// But there's not much we can do at that point, so we just ignore it.
let _ = tx.send(buf.to_vec());
let _ = tx.send(string);
Ok(buf.len())
}
@ -55,36 +35,27 @@ impl std::io::Write for ChannelWriter {
}
}
pub fn create_tracing_subscriber(level: Option<LogLevel>, tx: Option<UnboundedSender<Vec<u8>>>) {
let mut env_layer = if let Some(level) = level {
EnvFilter::from(level)
} else if cfg!(debug_assertions) {
pub fn create_tracing_subscriber(tx: UnboundedSender<String>) {
let env_layer = if cfg!(debug_assertions) {
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info"))
} else {
EnvFilter::new("error,dtmm=info")
};
// The internal implementation of Druid's GTK file dialog turns
// cancelling the dialog into an error. The, also internal, wrapper
// then logs and swallows the error.
// Therefore, as a consumer of the library, we don't have any way
// to customize this behavior, and instead have to filter out the
// tracing event.
env_layer = env_layer.add_directive(
"druid_shell::backend::gtk::window=off"
.parse()
.expect("Invalid env filter directive"),
);
let stdout_layer = if cfg!(debug_assertions) {
let layer = fmt::layer().pretty();
Some(layer)
} else {
None
};
let stdout_layer = fmt::layer().pretty();
let channel_layer = tx.map(|tx| {
fmt::layer()
let channel_layer = fmt::layer()
// TODO: Re-enable and implement a formatter for the Druid widget
.with_ansi(false)
.event_format(dtmt_shared::Formatter)
.fmt_fields(debug_fn(dtmt_shared::format_fields))
.with_writer(move || ChannelWriter::new(tx.clone()))
.with_filter(FilterFn::new(dtmt_shared::filter_fields))
});
.with_filter(FilterFn::new(dtmt_shared::filter_fields));
tracing_subscriber::registry()
.with(env_layer)

View file

@ -4,40 +4,34 @@ version = "0.3.0"
edition = "2021"
[dependencies]
async-recursion = { workspace = true }
clap = { workspace = true }
cli-table = { workspace = true }
color-eyre = { workspace = true }
confy = { workspace = true }
csv-async = { workspace = true }
dtmt-shared = { workspace = true }
futures = { workspace = true }
futures-util = { workspace = true }
glob = { workspace = true }
luajit2-sys = { workspace = true }
minijinja = { workspace = true }
nanorand = { workspace = true }
notify = { workspace = true }
oodle = { workspace = true }
path-clean = { workspace = true }
path-slash = { workspace = true }
pin-project-lite = { workspace = true }
promptly = { workspace = true }
sdk = { workspace = true }
serde = { workspace = true }
serde_sjson = { workspace = true }
tokio = { workspace = true }
tokio-stream = { workspace = true }
tracing = { workspace = true }
tracing-error = { workspace = true }
tracing-subscriber = { workspace = true }
zip = { workspace = true }
# Cannot be a workspace dependencies when it's optional
shlex = { version = "1.2.0", optional = true }
clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "unicode"] }
cli-table = { version = "0.4.7", default-features = false, features = ["derive"] }
color-eyre = "0.6.2"
confy = "0.5.1"
csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
dtmt-shared = { path = "../../lib/dtmt-shared", version = "*" }
futures = "0.3.25"
futures-util = "0.3.24"
glob = "0.3.0"
libloading = "0.7.4"
nanorand = "0.7.0"
oodle = { path = "../../lib/oodle", version = "*" }
pin-project-lite = "0.2.9"
promptly = "0.3.1"
sdk = { path = "../../lib/sdk", version = "*" }
serde_sjson = { path = "../../lib/serde_sjson", version = "*" }
serde = { version = "1.0.147", features = ["derive"] }
string_template = "0.2.1"
tokio-stream = { version = "0.1.11", features = ["fs", "io-util"] }
tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
tracing-error = "0.2.0"
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
tracing = { version = "0.1.37", features = ["async-await"] }
zip = "0.6.3"
path-clean = "1.0.1"
path-slash = "0.2.1"
async-recursion = "1.0.2"
notify = "5.1.0"
[dev-dependencies]
tempfile = "3.3.0"
[features]
shlex-bench = ["dep:shlex"]

View file

@ -55,7 +55,6 @@ pub(crate) fn command_definition() -> Command {
)
}
/// Try to find a `dtmt.cfg` in the given directory or traverse up the parents.
#[tracing::instrument]
async fn find_project_config(dir: Option<PathBuf>) -> Result<ModConfig> {
let (path, mut file) = if let Some(path) = dir {
@ -103,44 +102,39 @@ async fn find_project_config(dir: Option<PathBuf>) -> Result<ModConfig> {
Ok(cfg)
}
/// Iterate over the paths in the given `Package` and
/// compile each file by its file type.
#[tracing::instrument(skip_all)]
async fn compile_package_files(pkg: &Package, cfg: &ModConfig) -> Result<Vec<BundleFile>> {
let root = Arc::new(&cfg.dir);
let name_overrides = &cfg.name_overrides;
async fn compile_package_files<P>(pkg: &Package, root: P) -> Result<Vec<BundleFile>>
where
P: AsRef<Path> + std::fmt::Debug,
{
let root = Arc::new(root.as_ref());
let tasks = pkg
.iter()
.flat_map(|(file_type, names)| {
names.iter().map(|name| {
.flat_map(|(file_type, paths)| {
paths.iter().map(|path| {
(
*file_type,
name,
path,
// Cloning the `Arc` here solves the issue that in the next `.map`, I need to
// `move` the closure parameters, but can't `move` `root` before it was cloned.
root.clone(),
)
})
})
.map(|(file_type, name, root)| async move {
let path = PathBuf::from(name);
let sjson = fs::read_to_string(&path)
.await
.wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?;
.map(|(file_type, path, root)| async move {
let sjson = fs::read_to_string(&path).await?;
let name = path.with_extension("").to_slash_lossy().to_string();
let name = if let Some(new_name) = name_overrides.get(&name) {
let new_name = match u64::from_str_radix(new_name, 16) {
Ok(hash) => IdString64::from(hash),
Err(_) => IdString64::from(new_name.clone()),
};
tracing::info!("Overriding '{}' -> '{}'", name, new_name.display());
new_name
} else {
IdString64::from(name.clone())
};
BundleFile::from_sjson(name, file_type, sjson, root.as_ref()).await
let mut path = path.clone();
path.set_extension("");
BundleFile::from_sjson(
path.to_slash_lossy().to_string(),
file_type,
sjson,
root.as_ref(),
)
.await
});
let results = futures::stream::iter(tasks)
@ -151,14 +145,13 @@ async fn compile_package_files(pkg: &Package, cfg: &ModConfig) -> Result<Vec<Bun
results.into_iter().collect()
}
/// Read a `.package` file, collect the referenced files
/// and compile all of them into a bundle.
#[tracing::instrument]
async fn build_package(
cfg: &ModConfig,
package: impl AsRef<Path> + std::fmt::Debug,
) -> Result<Bundle> {
let root = &cfg.dir;
async fn build_package<P1, P2>(package: P1, root: P2) -> Result<Bundle>
where
P1: AsRef<Path> + std::fmt::Debug,
P2: AsRef<Path> + std::fmt::Debug,
{
let root = root.as_ref();
let package = package.as_ref();
let mut path = root.join(package);
@ -172,7 +165,7 @@ async fn build_package(
.await
.wrap_err_with(|| format!("Invalid package file {}", &pkg_name))?;
let files = compile_package_files(&pkg, cfg).await?;
let files = compile_package_files(&pkg, root).await?;
let mut bundle = Bundle::new(pkg_name);
for file in files {
bundle.add_file(file);
@ -181,8 +174,6 @@ async fn build_package(
Ok(bundle)
}
/// Cleans the path of internal parent (`../`) or self (`./`) components,
/// and ensures that it is relative.
fn normalize_file_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
let path = path.as_ref();
@ -263,14 +254,14 @@ pub(crate) async fn read_project_config(dir: Option<PathBuf>) -> Result<ModConfi
Ok(cfg)
}
#[tracing::instrument]
pub(crate) async fn build<P>(
pub(crate) async fn build<P1, P2>(
cfg: &ModConfig,
out_path: impl AsRef<Path> + std::fmt::Debug,
game_dir: Arc<Option<P>>,
out_path: P1,
game_dir: Arc<Option<P2>>,
) -> Result<()>
where
P: AsRef<Path> + std::fmt::Debug,
P1: AsRef<Path>,
P2: AsRef<Path>,
{
let out_path = out_path.as_ref();
@ -295,7 +286,7 @@ where
);
}
let bundle = build_package(&cfg, path).await.wrap_err_with(|| {
let bundle = build_package(path, &cfg.dir).await.wrap_err_with(|| {
format!(
"Failed to build package '{}' at '{}'",
path.display(),

View file

@ -1,174 +0,0 @@
use std::{io::Cursor, path::PathBuf};
use clap::{value_parser, Arg, ArgMatches, Command};
use color_eyre::{eyre::Context as _, Result};
use sdk::murmur::{HashGroup, IdString64, Murmur64};
use sdk::{BundleDatabase, FromBinary as _};
use tokio::fs;
pub(crate) fn command_definition() -> Command {
Command::new("db")
.about("Various operations regarding `bundle_database.data`.")
.subcommand_required(true)
.subcommand(
Command::new("list-files")
.about("List bundle contents")
.arg(
Arg::new("database")
.required(true)
.help("Path to the bundle database")
.value_parser(value_parser!(PathBuf)),
)
.arg(
Arg::new("bundle")
.help("The bundle name. If omitted, all bundles will be listed.")
.required(false),
),
)
.subcommand(
Command::new("list-bundles").about("List bundles").arg(
Arg::new("database")
.required(true)
.help("Path to the bundle database")
.value_parser(value_parser!(PathBuf)),
),
)
.subcommand(
Command::new("find-file")
.about("Find the bundle a file belongs to")
.arg(
Arg::new("database")
.required(true)
.help("Path to the bundle database")
.value_parser(value_parser!(PathBuf)),
)
.arg(
Arg::new("file-name")
.required(true)
.help("Name of the file. May be a hash in hex representation or a string"),
),
)
}
#[tracing::instrument(skip_all)]
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
let Some((op, sub_matches)) = matches.subcommand() else {
unreachable!("clap is configured to require a subcommand");
};
let database = {
let path = sub_matches
.get_one::<PathBuf>("database")
.expect("argument is required");
let binary = fs::read(&path)
.await
.wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?;
let mut r = Cursor::new(binary);
BundleDatabase::from_binary(&mut r).wrap_err("Failed to parse bundle database")?
};
match op {
"list-files" => {
let index = database.files();
if let Some(bundle) = sub_matches.get_one::<String>("bundle") {
let hash = u64::from_str_radix(bundle, 16)
.map(Murmur64::from)
.wrap_err("Invalid hex sequence")?;
if let Some(files) = index.get(&hash) {
for file in files {
let name = ctx.lookup_hash(file.name, HashGroup::Filename);
let extension = file.extension.ext_name();
println!("{}.{}", name.display(), extension);
}
} else {
tracing::info!("Bundle {} not found in the database", bundle);
}
} else {
for (bundle_hash, files) in index.iter() {
let bundle_name = ctx.lookup_hash(*bundle_hash, HashGroup::Filename);
match bundle_name {
IdString64::String(name) => {
println!("{:016x} {}", bundle_hash, name);
}
IdString64::Hash(hash) => {
println!("{:016x}", hash);
}
}
for file in files {
let name = ctx.lookup_hash(file.name, HashGroup::Filename);
let extension = file.extension.ext_name();
match name {
IdString64::String(name) => {
println!("\t{:016x}.{:<12} {}", file.name, extension, name);
}
IdString64::Hash(hash) => {
println!("\t{:016x}.{}", hash, extension);
}
}
}
println!();
}
}
Ok(())
}
"list-bundles" => {
for bundle_hash in database.bundles().keys() {
let bundle_name = ctx.lookup_hash(*bundle_hash, HashGroup::Filename);
match bundle_name {
IdString64::String(name) => {
println!("{:016x} {}", bundle_hash, name);
}
IdString64::Hash(hash) => {
println!("{:016x}", hash);
}
}
}
Ok(())
}
"find-file" => {
let name = sub_matches
.get_one::<String>("file-name")
.expect("required argument");
let name = match u64::from_str_radix(name, 16).map(Murmur64::from) {
Ok(hash) => hash,
Err(_) => Murmur64::hash(name),
};
let bundles = database.files().iter().filter_map(|(bundle_hash, files)| {
if files.iter().any(|file| file.name == name) {
Some(bundle_hash)
} else {
None
}
});
let mut found = false;
for bundle in bundles {
found = true;
println!("{:016x}", bundle);
}
if !found {
std::process::exit(1);
}
Ok(())
}
_ => unreachable!(
"clap is configured to require a subcommand, and they're all handled above"
),
}
}

View file

@ -1,20 +1,17 @@
use std::ffi::OsStr;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
use color_eyre::eyre::{self, bail, Context, Result};
use color_eyre::eyre::{self, Context, Result};
use color_eyre::{Help, Report};
use futures::future::try_join_all;
use futures::StreamExt;
use glob::Pattern;
use sdk::{Bundle, BundleFile, CmdLine};
use sdk::{Bundle, BundleFile};
use tokio::fs;
use crate::cmd::util::resolve_bundle_paths;
use crate::shell_parse::ShellParser;
#[inline]
fn parse_glob_pattern(s: &str) -> Result<Pattern, String> {
match Pattern::new(s) {
Ok(p) => Ok(p),
@ -22,7 +19,6 @@ fn parse_glob_pattern(s: &str) -> Result<Pattern, String> {
}
}
#[inline]
fn flatten_name(s: &str) -> String {
s.replace('/', "_")
}
@ -93,81 +89,30 @@ pub(crate) fn command_definition() -> Command {
Arg::new("ljd")
.long("ljd")
.help(
"A custom command line to execute ljd with. It is treated as follows:\n\
* if the argument is a valid path to an existing file:\n\
** if the file is called 'main.py', it is assumed that 'python.exe' \
exists in PATH to execute this with.\n\
** otherwise it is treated as an executable\n\
* if it's a single word, it's treated as an executable in PATH\n\
* otherwise it is treated as a command line template.\n\
In any case, the application being run must accept ljd's flags '-c' and '-f'.",
"Path to a custom ljd executable. If not set, \
`ljd` will be called from PATH.",
)
.default_value("ljd"),
)
// .arg(
// Arg::new("revorb")
// .long("revorb")
// .help(
// "Path to a custom revorb executable. If not set, \
// `revorb` will be called from PATH.",
// )
// .default_value("revorb"),
// )
// .arg(
// Arg::new("ww2ogg")
// .long("ww2ogg")
// .help(
// "Path to a custom ww2ogg executable. If not set, \
// `ww2ogg` will be called from PATH.\nSee the documentation for how \
// to set up the script for this.",
// )
// .default_value("ww2ogg"),
// )
}
#[tracing::instrument]
async fn parse_command_line_template(tmpl: &String) -> Result<CmdLine> {
if tmpl.trim().is_empty() {
eyre::bail!("Command line template must not be empty");
}
let mut cmd = if matches!(fs::try_exists(tmpl).await, Ok(true)) {
let path = PathBuf::from(tmpl);
if path.file_name() == Some(OsStr::new("main.py")) {
let mut cmd = CmdLine::new("python");
cmd.arg(path);
cmd
} else {
CmdLine::new(path)
}
} else {
let mut parsed = ShellParser::new(tmpl.as_bytes());
// Safety: The initial `tmpl` was a `&String` (i.e. valid UTF-8), and `shlex` does not
// insert or remove characters, nor does it split UTF-8 characters.
// So the resulting byte stream is still valid UTF-8.
let mut cmd = CmdLine::new(unsafe {
let bytes = parsed.next().expect("Template is not empty");
String::from_utf8_unchecked(bytes.to_vec())
});
while let Some(arg) = parsed.next() {
// Safety: See above.
cmd.arg(unsafe { String::from_utf8_unchecked(arg.to_vec()) });
}
if parsed.errored {
bail!("Invalid command line template");
}
cmd
};
// Add ljd flags
cmd.arg("-c");
tracing::debug!("Parsed command line template: {:?}", cmd);
Ok(cmd)
.arg(
Arg::new("revorb")
.long("revorb")
.help(
"Path to a custom revorb executable. If not set, \
`revorb` will be called from PATH.",
)
.default_value("revorb"),
)
.arg(
Arg::new("ww2ogg")
.long("ww2ogg")
.help(
"Path to a custom ww2ogg executable. If not set, \
`ww2ogg` will be called from PATH.\nSee the documentation for how \
to set up the script for this.",
)
.default_value("ww2ogg"),
)
}
#[tracing::instrument(skip_all)]
@ -176,19 +121,16 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
let ljd_bin = matches
.get_one::<String>("ljd")
.expect("no default value for 'ljd' parameter");
// let revorb_bin = matches
// .get_one::<String>("revorb")
// .expect("no default value for 'revorb' parameter");
// let ww2ogg_bin = matches
// .get_one::<String>("ww2ogg")
// .expect("no default value for 'ww2ogg' parameter");
let revorb_bin = matches
.get_one::<String>("revorb")
.expect("no default value for 'revorb' parameter");
let ww2ogg_bin = matches
.get_one::<String>("ww2ogg")
.expect("no default value for 'ww2ogg' parameter");
ctx.ljd = parse_command_line_template(ljd_bin)
.await
.map(Option::Some)
.wrap_err("Failed to parse command line template for flag 'ljd'")?;
// ctx.revorb = Some(revorb_bin.clone());
// ctx.ww2ogg = Some(ww2ogg_bin.clone());
ctx.ljd = Some(ljd_bin.clone());
ctx.revorb = Some(revorb_bin.clone());
ctx.ww2ogg = Some(ww2ogg_bin.clone());
}
let includes = match matches.get_many::<Pattern>("include") {

View file

@ -36,18 +36,6 @@ enum OutputFormat {
Text,
}
fn format_byte_size(size: usize) -> String {
if size < 1024 {
format!("{} Bytes", size)
} else if size < 1024 * 1024 {
format!("{} kB", size / 1024)
} else if size < 1024 * 1024 * 1024 {
format!("{} MB", size / (1024 * 1024))
} else {
format!("{} GB", size / (1024 * 1024 * 1024))
}
}
#[tracing::instrument(skip(ctx))]
async fn print_bundle_contents<P>(ctx: &sdk::Context, path: P, fmt: OutputFormat) -> Result<()>
where
@ -62,11 +50,7 @@ where
match fmt {
OutputFormat::Text => {
println!(
"Bundle: {} ({:016x})",
bundle.name().display(),
bundle.name()
);
println!("Bundle: {}", bundle.name().display());
for f in bundle.files().iter() {
if f.variants().len() != 1 {
@ -79,10 +63,9 @@ where
let v = &f.variants()[0];
println!(
"\t{}.{}: {} ({})",
"\t{}.{}: {} bytes",
f.base_name().display(),
f.file_type().ext_name(),
format_byte_size(v.size()),
v.size()
);
}

View file

@ -1,7 +1,6 @@
use clap::{ArgMatches, Command};
use color_eyre::eyre::Result;
mod db;
mod decompress;
mod extract;
mod inject;
@ -15,7 +14,6 @@ pub(crate) fn command_definition() -> Command {
.subcommand(extract::command_definition())
.subcommand(inject::command_definition())
.subcommand(list::command_definition())
.subcommand(db::command_definition())
}
#[tracing::instrument(skip_all)]
@ -25,7 +23,6 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
Some(("extract", sub_matches)) => extract::run(ctx, sub_matches).await,
Some(("inject", sub_matches)) => inject::run(ctx, sub_matches).await,
Some(("list", sub_matches)) => list::run(ctx, sub_matches).await,
Some(("db", sub_matches)) => db::run(ctx, sub_matches).await,
_ => unreachable!(
"clap is configured to require a subcommand, and they're all handled above"
),

View file

@ -145,10 +145,7 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
.get_one::<HashGroup>("group")
.expect("required argument not found");
let r: BufReader<Box<dyn tokio::io::AsyncRead + std::marker::Unpin>> = if let Some(name) =
path.file_name()
&& name == "-"
{
let r: BufReader<Box<dyn tokio::io::AsyncRead + std::marker::Unpin>> = if let Some(name) = path.file_name() && name == "-" {
let f = tokio::io::stdin();
BufReader::new(Box::new(f))
} else {

View file

@ -1,407 +0,0 @@
use std::collections::HashMap;
use std::ffi::{CStr, CString};
use std::path::{Path, PathBuf};
use clap::{value_parser, Arg, ArgMatches, Command};
use color_eyre::eyre::{self, Context};
use color_eyre::{Help, Report, Result};
use dtmt_shared::{ModConfig, ModConfigResources, ModDependency};
use futures::FutureExt;
use luajit2_sys as lua;
use tokio::fs;
use tokio_stream::wrappers::ReadDirStream;
use tokio_stream::StreamExt;
pub(crate) fn command_definition() -> Command {
Command::new("migrate")
.about("Migrate a mod project from the loose file structure to DTMT.")
.arg(
Arg::new("mod-file")
.required(true)
.value_parser(value_parser!(PathBuf))
.help("The path to the mod's '<id>.mod' file."),
)
.arg(
Arg::new("directory")
.required(true)
.value_parser(value_parser!(PathBuf))
.help(
"The directory to create the mod in. Within this directory, \
DTMT will create a new folder named after the mod ID and migrate files \
into that folder.",
),
)
}
#[derive(Clone, Debug)]
struct ModFile {
id: String,
init: PathBuf,
data: Option<PathBuf>,
localization: Option<PathBuf>,
}
// This piece of Lua code stubs DMF functions and runs a mod's `.mod` file to extract
// the contained information.
static MOD_FILE_RUNNER: &str = r#"
_DATA = {}
function fassert() end
function new_mod(id, options)
_DATA.id = id
_DATA.init = options.mod_script
_DATA.data = options.mod_data
_DATA.localization = options.mod_localization
end
dmf = {
dofile = function(self, file)
_DATA.init = file
end
}
_MOD().run()
"#;
#[tracing::instrument]
async fn evaluate_mod_file(path: impl AsRef<Path> + std::fmt::Debug) -> Result<ModFile> {
let path = path.as_ref();
let code = fs::read(path)
.await
.wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?;
tokio::task::spawn_blocking(move || unsafe {
let state = lua::luaL_newstate();
lua::luaL_openlibs(state);
let code = CString::new(code).expect("Cannot build CString");
let name = CString::new("_MOD").expect("Cannot build CString");
match lua::luaL_loadstring(state, code.as_ptr()) as u32 {
lua::LUA_OK => {}
lua::LUA_ERRSYNTAX => {
let err = lua::lua_tostring(state, -1);
let err = CStr::from_ptr(err).to_string_lossy().to_string();
lua::lua_close(state);
eyre::bail!("Invalid syntax: {}", err);
}
lua::LUA_ERRMEM => {
lua::lua_close(state);
eyre::bail!("Failed to allocate sufficient memory")
}
_ => unreachable!(),
}
tracing::trace!("Loaded '.mod' code");
lua::lua_setglobal(state, name.as_ptr());
let code = CString::new(MOD_FILE_RUNNER).expect("Cannot build CString");
match lua::luaL_loadstring(state, code.as_ptr()) as u32 {
lua::LUA_OK => {}
lua::LUA_ERRSYNTAX => {
let err = lua::lua_tostring(state, -1);
let err = CStr::from_ptr(err).to_string_lossy().to_string();
lua::lua_close(state);
eyre::bail!("Invalid syntax: {}", err);
}
lua::LUA_ERRMEM => {
lua::lua_close(state);
eyre::bail!("Failed to allocate sufficient memory")
}
_ => unreachable!(),
}
match lua::lua_pcall(state, 0, 1, 0) as u32 {
lua::LUA_OK => {}
lua::LUA_ERRRUN => {
let err = lua::lua_tostring(state, -1);
let err = CStr::from_ptr(err).to_string_lossy().to_string();
lua::lua_close(state);
eyre::bail!("Failed to evaluate '.mod' file: {}", err);
}
lua::LUA_ERRMEM => {
lua::lua_close(state);
eyre::bail!("Failed to allocate sufficient memory")
}
// We don't use an error handler function, so this should be unreachable
lua::LUA_ERRERR => unreachable!(),
_ => unreachable!(),
}
tracing::trace!("Loaded file runner code");
let name = CString::new("_DATA").expect("Cannot build CString");
lua::lua_getglobal(state, name.as_ptr());
let id = {
let name = CString::new("id").expect("Cannot build CString");
lua::lua_getfield(state, -1, name.as_ptr());
let val = {
let ptr = lua::lua_tostring(state, -1);
let str = CStr::from_ptr(ptr);
str.to_str()
.expect("ID value is not a valid string")
.to_string()
};
lua::lua_pop(state, 1);
val
};
let path_prefix = format!("{id}/");
let init = {
let name = CString::new("init").expect("Cannot build CString");
lua::lua_getfield(state, -1, name.as_ptr());
let val = {
let ptr = lua::lua_tostring(state, -1);
let str = CStr::from_ptr(ptr);
str.to_str().expect("ID value is not a valid string")
};
lua::lua_pop(state, 1);
PathBuf::from(val.strip_prefix(&path_prefix).unwrap_or(val))
};
let data = {
let name = CString::new("data").expect("Cannot build CString");
lua::lua_getfield(state, -1, name.as_ptr());
if lua::lua_isnil(state, -1) > 0 {
None
} else {
let val = {
let ptr = lua::lua_tostring(state, -1);
let str = CStr::from_ptr(ptr);
str.to_str().expect("ID value is not a valid string")
};
lua::lua_pop(state, 1);
Some(PathBuf::from(val.strip_prefix(&path_prefix).unwrap_or(val)))
}
};
let localization = {
let name = CString::new("localization").expect("Cannot build CString");
lua::lua_getfield(state, -1, name.as_ptr());
if lua::lua_isnil(state, -1) > 0 {
None
} else {
let val = {
let ptr = lua::lua_tostring(state, -1);
let str = CStr::from_ptr(ptr);
str.to_str().expect("ID value is not a valid string")
};
lua::lua_pop(state, 1);
Some(PathBuf::from(val.strip_prefix(&path_prefix).unwrap_or(val)))
}
};
lua::lua_close(state);
let mod_file = ModFile {
id,
init,
data,
localization,
};
tracing::trace!(?mod_file);
Ok(mod_file)
})
.await
.map_err(Report::new)
.flatten()
.wrap_err("Failed to run mod file handler")
}
#[async_recursion::async_recursion]
#[tracing::instrument]
async fn process_directory<P1, P2>(path: P1, prefix: P2) -> Result<()>
where
P1: AsRef<Path> + std::fmt::Debug + std::marker::Send,
P2: AsRef<Path> + std::fmt::Debug + std::marker::Send,
{
let path = path.as_ref();
let prefix = prefix.as_ref();
let read_dir = fs::read_dir(&path)
.await
.wrap_err_with(|| format!("Failed to read directory '{}'", path.display()))?;
let stream = ReadDirStream::new(read_dir).map(|res| res.wrap_err("Failed to read dir entry"));
tokio::pin!(stream);
while let Some(res) = stream.next().await {
let entry = res?;
let in_path = entry.path();
let out_path = prefix.join(entry.file_name());
let t = entry.file_type().await?;
if t.is_dir() {
process_directory(in_path, out_path).await?;
} else {
tracing::trace!(
"Copying file '{}' -> '{}'",
in_path.display(),
out_path.display()
);
let res = fs::create_dir_all(prefix)
.then(|_| fs::copy(&in_path, &out_path))
.await
.wrap_err_with(|| {
format!(
"Failed to copy '{}' -> '{}'",
in_path.display(),
out_path.display()
)
});
if let Err(err) = res {
tracing::error!("{:?}", err);
}
}
}
Ok(())
}
#[tracing::instrument(skip_all)]
pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
let (mod_file, in_dir) = {
let path = matches
.get_one::<PathBuf>("mod-file")
.expect("Parameter is required");
let mod_file = evaluate_mod_file(&path)
.await
.wrap_err("Failed to evaluate '.mod' file")?;
(
mod_file,
path.parent().expect("A file path always has a parent"),
)
};
let out_dir = matches
.get_one::<PathBuf>("directory")
.expect("Parameter is required");
{
let is_dir = fs::metadata(out_dir)
.await
.map(|meta| meta.is_dir())
.unwrap_or(false);
if !is_dir {
let err = eyre::eyre!("Invalid output directory '{}'", out_dir.display());
return Err(err)
.with_suggestion(|| "Make sure the directory exists and is writable.".to_string());
}
}
let out_dir = out_dir.join(&mod_file.id);
fs::create_dir(&out_dir)
.await
.wrap_err_with(|| format!("Failed to create mod directory '{}'", out_dir.display()))?;
tracing::info!("Created mod directory '{}'", out_dir.display());
println!(
"Enter additional information about your mod '{}'!",
&mod_file.id
);
let name = promptly::prompt_default("Display name", mod_file.id.clone())
.map(|s: String| s.trim().to_string())?;
let summary = promptly::prompt("Short summary").map(|s: String| s.trim().to_string())?;
let author =
promptly::prompt_opt("Author").map(|opt| opt.map(|s: String| s.trim().to_string()))?;
let version = promptly::prompt_default("Version", String::from("0.1.0"))
.map(|s: String| s.trim().to_string())?;
let categories = promptly::prompt("Categories (comma separated list)")
.map(|s: String| s.trim().to_string())
.map(|s: String| s.split(',').map(|s| s.trim().to_string()).collect())?;
let packages = vec![PathBuf::from("packages/mods").join(&mod_file.id)];
let dtmt_cfg = ModConfig {
dir: out_dir,
id: mod_file.id,
name,
summary,
author,
version,
description: None,
image: None,
categories,
packages,
resources: ModConfigResources {
init: mod_file.init,
data: mod_file.data,
localization: mod_file.localization,
},
depends: vec![ModDependency::ID(String::from("DMF"))],
bundled: true,
name_overrides: HashMap::new(),
};
tracing::debug!(?dtmt_cfg);
{
let path = dtmt_cfg.dir.join("dtmt.cfg");
let data = serde_sjson::to_string(&dtmt_cfg).wrap_err("Failed to serialize dtmt.cfg")?;
fs::write(&path, &data)
.await
.wrap_err_with(|| format!("Failed to write '{}'", path.display()))?;
tracing::info!("Created mod configuration at '{}'", path.display());
}
{
let path = dtmt_cfg
.dir
.join(&dtmt_cfg.packages[0])
.with_extension("package");
let data = {
let mut map = HashMap::new();
map.insert("lua", vec![format!("scripts/mods/{}/*", dtmt_cfg.id)]);
map
};
let data = serde_sjson::to_string(&data).wrap_err("Failed to serialize package file")?;
fs::create_dir_all(path.parent().unwrap())
.then(|_| fs::write(&path, &data))
.await
.wrap_err_with(|| format!("Failed to write '{}'", path.display()))?;
tracing::info!("Created package file at '{}'", path.display());
}
{
let path = in_dir.join("scripts");
let scripts_dir = dtmt_cfg.dir.join("scripts");
process_directory(&path, &scripts_dir)
.await
.wrap_err_with(|| {
format!(
"Failed to copy files from '{}' to '{}'",
path.display(),
scripts_dir.display()
)
})?;
tracing::info!("Copied script files to '{}'", scripts_dir.display());
}
Ok(())
}

View file

@ -1,30 +1,18 @@
use std::collections::HashMap;
use std::path::PathBuf;
use clap::{Arg, ArgMatches, Command};
use color_eyre::eyre::{self, Context, Result};
use color_eyre::Help;
use futures::{StreamExt, TryStreamExt};
use minijinja::Environment;
use string_template::Template;
use tokio::fs::{self, DirBuilder};
const TEMPLATES: [(&str, &str); 5] = [
(
"dtmt.cfg",
r#"//
// This is your mod's main configuration file. It tells DTMT how to build the mod,
// and DTMM what to display to your users.
// Certain files have been pre-filled by the template, the ones commented out (`//`)
// are optional.
//
// A unique identifier (preferably lower case, alphanumeric)
id = "{{id}}"
// The display name that your users will see.
// This doesn't have to be unique, but you still want to avoid being confused with other
// mods.
r#"id = "{{id}}"
name = "{{name}}"
// It's good practice to increase this number whenever you publish changes.
// It's up to you if you use SemVer or something simpler like `1970-12-24`. It should sort and
// compare well, though.
version = "0.1.0"
// author = ""
@ -44,25 +32,16 @@ categories = [
// A list of mod IDs that this mod depends on. You can find
// those IDs by downloading the mod and extracting their `dtmt.cfg`.
// To make your fellow modders' lives easier, publish your own mods' IDs
// somewhere visible, such as the Nexusmods page.
depends = [
DMF
]
// The primary resources that serve as the entry point to your
// mod's code. Unless for very specific use cases, the generated
// values shouldn't be changed.
resources = {
init = "scripts/mods/{{id}}/init"
data = "scripts/mods/{{id}}/data"
localization = "scripts/mods/{{id}}/localization"
}
// The list of packages, or bundles, to build.
// Each one corresponds to a package definition in the named folder.
// For mods that contain only code and/or a few small assets, a single
// package will suffice.
packages = [
"packages/mods/{{id}}"
]
@ -80,6 +59,7 @@ packages = [
r#"local mod = get_mod("{{id}}")
-- Your mod code goes here.
-- https://vmf-docs.verminti.de
"#,
),
(
@ -157,23 +137,19 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()>
tracing::debug!(root = %root.display(), name, id);
let render_ctx = minijinja::context!(name => name.as_str(), id => id.as_str());
let env = Environment::new();
let mut data = HashMap::new();
data.insert("name", name.as_str());
data.insert("id", id.as_str());
let templates = TEMPLATES
.iter()
.map(|(path_tmpl, content_tmpl)| {
env.render_str(path_tmpl, &render_ctx)
.wrap_err_with(|| format!("Failed to render template: {}", path_tmpl))
.and_then(|path| {
env.render_named_str(&path, content_tmpl, &render_ctx)
.wrap_err_with(|| format!("Failed to render template '{}'", &path))
.map(|content| (root.join(path), content))
let path = Template::new(path_tmpl).render(&data);
let content = Template::new(content_tmpl).render(&data);
(root.join(path), content)
})
})
.map(|res| async move {
match res {
Ok((path, content)) => {
.map(|(path, content)| async move {
let dir = path
.parent()
.ok_or_else(|| eyre::eyre!("invalid root path"))?;
@ -182,20 +158,13 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()>
.recursive(true)
.create(&dir)
.await
.wrap_err_with(|| {
format!("Failed to create directory {}", dir.display())
})?;
.wrap_err_with(|| format!("Failed to create directory {}", dir.display()))?;
tracing::trace!("Writing file {}", path.display());
fs::write(&path, content.as_bytes())
.await
.wrap_err_with(|| {
format!("Failed to write content to path {}", path.display())
})
}
Err(e) => Err(e),
}
.wrap_err_with(|| format!("Failed to write content to path {}", path.display()))
});
futures::stream::iter(templates)

View file

@ -1,5 +1,6 @@
use std::io::{Cursor, Write};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use clap::{value_parser, Arg, ArgMatches, Command};
use color_eyre::eyre::{Context, Result};
@ -7,9 +8,9 @@ use color_eyre::Help;
use dtmt_shared::ModConfig;
use path_slash::{PathBufExt, PathExt};
use tokio::fs;
use tokio::sync::Mutex;
use tokio_stream::wrappers::ReadDirStream;
use tokio_stream::StreamExt;
use zip::write::SimpleFileOptions;
use zip::ZipWriter;
use crate::cmd::build::read_project_config;
@ -50,7 +51,11 @@ pub(crate) fn command_definition() -> Command {
}
#[async_recursion::async_recursion]
async fn process_directory<P1, P2, W>(zip: &mut ZipWriter<W>, path: P1, prefix: P2) -> Result<()>
async fn process_directory<P1, P2, W>(
zip: Arc<Mutex<ZipWriter<W>>>,
path: P1,
prefix: P2,
) -> Result<()>
where
P1: AsRef<Path> + std::marker::Send,
P2: AsRef<Path> + std::marker::Send,
@ -59,7 +64,9 @@ where
let path = path.as_ref();
let prefix = prefix.as_ref();
zip.add_directory(prefix.to_slash_lossy(), SimpleFileOptions::default())?;
zip.lock()
.await
.add_directory(prefix.to_slash_lossy(), Default::default())?;
let read_dir = fs::read_dir(&path)
.await
@ -80,11 +87,12 @@ where
.await
.wrap_err_with(|| format!("Failed to read '{}'", in_path.display()))?;
{
zip.start_file(out_path.to_slash_lossy(), SimpleFileOptions::default())?;
let mut zip = zip.lock().await;
zip.start_file(out_path.to_slash_lossy(), Default::default())?;
zip.write_all(&data)?;
}
} else if t.is_dir() {
process_directory(zip, in_path, out_path).await?;
process_directory(zip.clone(), in_path, out_path).await?;
}
}
@ -99,12 +107,16 @@ where
let path = path.as_ref();
let dest = dest.as_ref();
let mut zip = ZipWriter::new(Cursor::new(Vec::with_capacity(1024)));
let data = Cursor::new(Vec::new());
let zip = ZipWriter::new(data);
let zip = Arc::new(Mutex::new(zip));
process_directory(&mut zip, path, PathBuf::from(&cfg.id))
process_directory(zip.clone(), path, PathBuf::from(&cfg.id))
.await
.wrap_err("Failed to add directory to archive")?;
let mut zip = zip.lock().await;
{
let name = PathBuf::from(&cfg.id).join("dtmt.cfg");
let path = cfg.dir.join("dtmt.cfg");
@ -113,7 +125,7 @@ where
.await
.wrap_err_with(|| format!("Failed to read mod config at {}", path.display()))?;
zip.start_file(name.to_slash_lossy(), SimpleFileOptions::default())?;
zip.start_file(name.to_slash_lossy(), Default::default())?;
zip.write_all(&data)?;
}

View file

@ -8,7 +8,7 @@ use tokio::fs;
use tokio_stream::wrappers::ReadDirStream;
#[tracing::instrument]
pub async fn process_path<P>(path: P) -> Vec<PathBuf>
pub async fn foo<P>(path: P) -> Vec<PathBuf>
where
P: AsRef<Path> + std::fmt::Debug,
{
@ -98,10 +98,7 @@ where
I: Iterator<Item = PathBuf> + std::fmt::Debug,
{
let tasks = paths.map(|p| async move {
// Clippy doesn't understand that the block here is required to `move` in the reference.
// The task is spawned to make sure tokio can distribute these over threads.
#[allow(clippy::redundant_async_block)]
match tokio::spawn(async move { process_path(&p).await }).await {
match tokio::spawn(async move { foo(&p).await }).await {
Ok(paths) => paths,
Err(err) => {
tracing::error!(%err, "failed to spawn task to resolve bundle paths");
@ -114,9 +111,6 @@ where
results.into_iter().flatten().collect()
}
// `tracing::instrument` generates code that triggers this warning.
// Not much we can do to prevent that.
#[allow(clippy::let_with_type_underscore)]
#[tracing::instrument(skip_all)]
pub fn resolve_bundle_paths<I>(paths: I) -> impl Stream<Item = PathBuf>
where
@ -135,12 +129,12 @@ mod tests {
use tempfile::tempdir;
use tokio::process::Command;
use super::process_path;
use super::foo;
#[tokio::test]
async fn resolve_single_file() {
let path = PathBuf::from("foo");
let paths = process_path(&path).await;
let paths = foo(&path).await;
assert_eq!(paths.len(), 1);
assert_eq!(paths[0], path);
}
@ -148,7 +142,7 @@ mod tests {
#[tokio::test]
async fn resolve_empty_directory() {
let dir = tempdir().expect("failed to create temporary directory");
let paths = process_path(dir).await;
let paths = foo(dir).await;
assert!(paths.is_empty());
}
@ -176,7 +170,7 @@ mod tests {
.await
.expect("failed to create temporary files");
let paths = process_path(dir).await;
let paths = foo(dir).await;
assert_eq!(bundle_names.len(), paths.len());

View file

@ -77,14 +77,17 @@ pub(crate) fn command_definition() -> Command {
)
}
#[tracing::instrument]
async fn compile(
async fn compile<P1, P2, P3>(
cfg: &ModConfig,
out_path: impl AsRef<Path> + std::fmt::Debug,
archive_path: impl AsRef<Path> + std::fmt::Debug,
game_dir: Arc<Option<impl AsRef<Path> + std::fmt::Debug>>,
) -> Result<()> {
let out_path = out_path.as_ref();
out_path: P1,
archive_path: P2,
game_dir: Arc<Option<P3>>,
) -> Result<()>
where
P1: AsRef<Path> + std::marker::Copy,
P2: AsRef<Path>,
P3: AsRef<Path>,
{
build(cfg, out_path, game_dir)
.await
.wrap_err("Failed to build bundles")?;

View file

@ -1,7 +1,5 @@
#![feature(io_error_more)]
#![feature(let_chains)]
#![feature(result_flattening)]
#![feature(test)]
#![windows_subsystem = "console"]
use std::path::PathBuf;
@ -21,14 +19,12 @@ mod cmd {
pub mod build;
pub mod bundle;
pub mod dictionary;
pub mod migrate;
pub mod murmur;
pub mod new;
pub mod package;
mod util;
pub mod watch;
}
mod shell_parse;
#[derive(Default, Deserialize, Serialize)]
struct GlobalConfig {
@ -56,7 +52,6 @@ async fn main() -> Result<()> {
.subcommand(cmd::build::command_definition())
.subcommand(cmd::bundle::command_definition())
.subcommand(cmd::dictionary::command_definition())
.subcommand(cmd::migrate::command_definition())
.subcommand(cmd::murmur::command_definition())
.subcommand(cmd::new::command_definition())
.subcommand(cmd::package::command_definition())
@ -133,7 +128,6 @@ async fn main() -> Result<()> {
Some(("build", sub_matches)) => cmd::build::run(ctx, sub_matches).await?,
Some(("bundle", sub_matches)) => cmd::bundle::run(ctx, sub_matches).await?,
Some(("dictionary", sub_matches)) => cmd::dictionary::run(ctx, sub_matches).await?,
Some(("migrate", sub_matches)) => cmd::migrate::run(ctx, sub_matches).await?,
Some(("murmur", sub_matches)) => cmd::murmur::run(ctx, sub_matches).await?,
Some(("new", sub_matches)) => cmd::new::run(ctx, sub_matches).await?,
Some(("package", sub_matches)) => cmd::package::run(ctx, sub_matches).await?,

View file

@ -1,189 +0,0 @@
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum ParserState {
Start,
Word,
SingleQuote,
DoubleQuote,
}
pub struct ShellParser<'a> {
bytes: &'a [u8],
offset: usize,
pub errored: bool,
}
impl<'a> ShellParser<'a> {
pub fn new(bytes: &'a [u8]) -> Self {
Self {
bytes,
offset: 0,
errored: false,
}
}
fn parse_word(&mut self) -> Option<&'a [u8]> {
// The start of the current word. Certain leading characters should be ignored,
// so this might change.
let mut start = self.offset;
let mut state = ParserState::Start;
while self.offset < self.bytes.len() {
let c = self.bytes[self.offset];
self.offset += 1;
match state {
ParserState::Start => match c {
// Ignore leading whitespace
b' ' | b'\t' | b'\n' => start += 1,
b'\'' => {
state = ParserState::SingleQuote;
start += 1;
}
b'"' => {
state = ParserState::DoubleQuote;
start += 1;
}
_ => {
state = ParserState::Word;
}
},
ParserState::Word => match c {
// Unquoted whitespace ends the current word
b' ' | b'\t' | b'\n' => {
return Some(&self.bytes[start..self.offset - 1]);
}
_ => {}
},
ParserState::SingleQuote => match c {
b'\'' => {
return Some(&self.bytes[start..(self.offset - 1)]);
}
_ => {}
},
ParserState::DoubleQuote => match c {
b'"' => {
return Some(&self.bytes[start..(self.offset - 1)]);
}
_ => {}
},
}
}
match state {
ParserState::Start => None,
ParserState::Word => Some(&self.bytes[start..self.offset]),
ParserState::SingleQuote | ParserState::DoubleQuote => {
self.errored = true;
None
}
}
}
}
impl<'a> Iterator for ShellParser<'a> {
type Item = &'a [u8];
fn next(&mut self) -> Option<Self::Item> {
self.parse_word()
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_one_word() {
let mut it = ShellParser::new(b"hello");
assert_eq!(it.next(), Some("hello".as_bytes()));
assert_eq!(it.next(), None);
}
#[test]
fn test_one_single() {
let mut it = ShellParser::new(b"'hello'");
assert_eq!(it.next(), Some("hello".as_bytes()));
assert_eq!(it.next(), None);
}
#[test]
fn test_open_quote() {
let mut it = ShellParser::new(b"'hello");
assert_eq!(it.next(), None);
assert!(it.errored)
}
#[test]
fn test_ww2ogg() {
let mut it = ShellParser::new(
b"ww2ogg.exe --pcb \"/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin\"",
);
assert_eq!(it.next(), Some("ww2ogg.exe".as_bytes()));
assert_eq!(it.next(), Some("--pcb".as_bytes()));
assert_eq!(
it.next(),
Some("/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin".as_bytes())
);
assert_eq!(it.next(), None);
}
}
#[cfg(test)]
mod bench {
extern crate test;
use super::*;
#[cfg(feature = "shlex-bench")]
use shlex::bytes::Shlex;
use test::Bencher;
mod ww2ogg {
use super::*;
#[bench]
fn custom(b: &mut Bencher) {
let val = test::black_box(
b"ww2ogg.exe --pcb \"/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin\"",
);
b.iter(|| {
let it = ShellParser::new(val);
let _: Vec<_> = test::black_box(it.collect());
})
}
#[cfg(feature = "shlex-bench")]
#[bench]
fn shlex(b: &mut Bencher) {
let val = test::black_box(
b"ww2ogg.exe --pcb \"/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin\"",
);
b.iter(|| {
let it = Shlex::new(val);
let _: Vec<_> = test::black_box(it.collect());
})
}
}
mod one_single {
use super::*;
#[bench]
fn custom(b: &mut Bencher) {
let val = test::black_box(b"'hello'");
b.iter(|| {
let it = ShellParser::new(val);
let _: Vec<_> = test::black_box(it.collect());
})
}
#[cfg(feature = "shlex-bench")]
#[bench]
fn shlex(b: &mut Bencher) {
let val = test::black_box(b"'hello'");
b.iter(|| {
let it = Shlex::new(val);
let _: Vec<_> = test::black_box(it.collect());
})
}
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 130 B

After

Width:  |  Height:  |  Size: 58 KiB

@ -1 +0,0 @@
Subproject commit 27beb4bc1ffd2865a432e13f0588b5351ff419bf

@ -1 +0,0 @@
Subproject commit 228b8ca37ee79ab9afa45c40da415e4dcb029751

View file

@ -6,11 +6,10 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
ansi_term = { workspace = true }
color-eyre = { workspace = true }
serde = { workspace = true }
steamlocate = { workspace = true }
time = { workspace = true }
tracing = { workspace = true }
tracing-error = { workspace = true }
tracing-subscriber = { workspace = true }
color-eyre = "0.6.2"
serde = "1.0.152"
steamlocate = { path = "../../lib/steamlocate-rs", version = "*" }
time = { version = "0.3.19", features = ["formatting", "local-offset", "macros"] }
tracing = "0.1.37"
tracing-error = "0.2.0"
tracing-subscriber = "0.3.16"

View file

@ -1,75 +1,52 @@
use std::collections::HashMap;
use std::path::PathBuf;
use color_eyre::eyre::{OptionExt as _, WrapErr as _};
use color_eyre::Result;
use serde::{Deserialize, Serialize};
use steamlocate::SteamDir;
use time::OffsetDateTime;
pub use log::*;
mod log;
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
pub use log::*;
use serde::Deserialize;
use steamlocate::SteamDir;
use time::OffsetDateTime;
#[derive(Clone, Debug, Default, Deserialize)]
pub struct ModConfigResources {
pub init: PathBuf,
#[serde(default, skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub data: Option<PathBuf>,
#[serde(default, skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub localization: Option<PathBuf>,
}
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum ModOrder {
Before,
After,
}
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[serde(untagged)]
pub enum ModDependency {
ID(String),
Config { id: String, order: ModOrder },
}
// A bit dumb, but serde doesn't support literal values with the
// `default` attribute, only paths.
fn default_true() -> bool {
true
}
// Similarly dumb, as the `skip_serializing_if` attribute needs a function
fn is_true(val: &bool) -> bool {
*val
}
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
#[derive(Clone, Debug, Default, Deserialize)]
pub struct ModConfig {
#[serde(skip)]
pub dir: PathBuf,
pub id: String,
pub name: String,
pub summary: String,
pub version: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub author: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub version: String,
pub image: Option<PathBuf>,
#[serde(default)]
pub categories: Vec<String>,
#[serde(default)]
pub packages: Vec<PathBuf>,
pub resources: ModConfigResources,
#[serde(default)]
pub depends: Vec<ModDependency>,
#[serde(default = "default_true", skip_serializing_if = "is_true")]
pub bundled: bool,
#[serde(default)]
pub name_overrides: HashMap<String, String>,
}
pub const STEAMAPP_ID: u32 = 1361210;
@ -80,23 +57,30 @@ pub struct GameInfo {
pub last_updated: OffsetDateTime,
}
pub fn collect_game_info() -> Result<Option<GameInfo>> {
let dir = SteamDir::locate().wrap_err("Failed to locate Steam installation")?;
let found = dir
.find_app(STEAMAPP_ID)
.wrap_err("Failed to look up game by Steam app ID")?;
let Some((app, _)) = found else {
return Ok(None);
pub fn collect_game_info() -> Option<GameInfo> {
let mut dir = if let Some(dir) = SteamDir::locate() {
dir
} else {
tracing::debug!("Failed to locate Steam installation");
return None;
};
let last_updated = app
.last_updated
.ok_or_eyre("Missing field 'last_updated'")?;
let found = dir
.app(&STEAMAPP_ID)
.and_then(|app| app.vdf.get("LastUpdated").map(|v| (app.path.clone(), v)));
Ok(Some(GameInfo {
path: app.install_dir.into(),
last_updated: last_updated.into(),
}))
let Some((path, last_updated)) = found else {
tracing::debug!("Found Steam, but failed to find game installation");
return None;
};
let Some(last_updated) = last_updated
.as_value()
.and_then(|v| v.to::<i64>())
.and_then(|v| OffsetDateTime::from_unix_timestamp(v).ok()) else {
tracing::error!("Found Steam game, but couldn't read 'LastUpdate'.");
return None;
};
Some(GameInfo { path, last_updated })
}

View file

@ -1,11 +1,10 @@
use std::fmt::Result;
use ansi_term::Color;
use time::format_description::FormatItem;
use time::macros::format_description;
use time::OffsetDateTime;
use tracing::field::Field;
use tracing::{Event, Level, Metadata, Subscriber};
use tracing::{Event, Metadata, Subscriber};
use tracing_error::ErrorLayer;
use tracing_subscriber::filter::FilterFn;
use tracing_subscriber::fmt::format::{debug_fn, Writer};
@ -50,28 +49,7 @@ where
let time = OffsetDateTime::now_local().unwrap_or_else(|_| OffsetDateTime::now_utc());
let time = time.format(TIME_FORMAT).map_err(|_| std::fmt::Error)?;
let level = meta.level();
// Sadly, tracing's `Level` is a struct, not an enum, so we can't properly `match` it.
let color = if *level == Level::TRACE {
Color::Purple
} else if *level == Level::DEBUG {
Color::Blue
} else if *level == Level::INFO {
Color::Green
} else if *level == Level::WARN {
Color::Yellow
} else if *level == Level::ERROR {
Color::Red
} else {
unreachable!()
};
write!(
writer,
"[{}] [{:>5}] ",
time,
color.bold().paint(format!("{}", level))
)?;
write!(writer, "[{}] [{:>5}] ", time, meta.level())?;
ctx.field_format().format_fields(writer.by_ref(), event)?;
@ -84,7 +62,7 @@ pub fn create_tracing_subscriber() {
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::try_new("info").unwrap());
let (dev_stdout_layer, prod_stdout_layer, filter_layer) = if cfg!(debug_assertions) {
let fmt_layer = fmt::layer().pretty().with_writer(std::io::stderr);
let fmt_layer = fmt::layer().pretty();
(Some(fmt_layer), None, None)
} else {
// Creates a layer that
@ -93,7 +71,6 @@ pub fn create_tracing_subscriber() {
// - does not print spans/targets
// - only prints time, not date
let fmt_layer = fmt::layer()
.with_writer(std::io::stderr)
.event_format(Formatter)
.fmt_fields(debug_fn(format_fields));

@ -1 +0,0 @@
Subproject commit 6d94a4dd2c296bf1f044ee4c70fb10dca4c1c241

View file

@ -9,10 +9,10 @@ edition = "2021"
futures = "0.3.26"
lazy_static = "1.4.0"
regex = "1.7.1"
reqwest = { version = "0.12.4" }
reqwest = { version = "0.11.14" }
serde = { version = "1.0.152", features = ["derive"] }
serde_json = "1.0.94"
thiserror = "2.0.0"
thiserror = "1.0.39"
time = { version = "0.3.20", features = ["serde"] }
tracing = "0.1.37"
url = { version = "2.3.1", features = ["serde"] }

View file

@ -4,7 +4,7 @@ use std::convert::Infallible;
use lazy_static::lazy_static;
use regex::Regex;
use reqwest::header::{HeaderMap, HeaderValue, InvalidHeaderValue};
use reqwest::{Client, IntoUrl, RequestBuilder, Url};
use reqwest::{Client, RequestBuilder, Url};
use serde::Deserialize;
use thiserror::Error;
@ -28,7 +28,7 @@ pub enum Error {
HTTP(#[from] reqwest::Error),
#[error("invalid URL: {0:?}")]
URLParseError(#[from] url::ParseError),
#[error("failed to deserialize due to {error}: {json}")]
#[error("failed to deserialize '{error}': {json}")]
Deserialize {
json: String,
error: serde_json::Error,
@ -37,10 +37,8 @@ pub enum Error {
InvalidHeaderValue(#[from] InvalidHeaderValue),
#[error("this error cannot happen")]
Infallible(#[from] Infallible),
#[error("invalid NXM URL '{url}': {0}", url = .1.as_str())]
#[error("invalid NXM URL '{}': {0}", .1.as_str())]
InvalidNXM(&'static str, Url),
#[error("{0}")]
Custom(String),
}
pub type Result<T> = std::result::Result<T, Error>;
@ -104,45 +102,6 @@ impl Api {
self.send(req).await
}
#[tracing::instrument(skip(self))]
pub async fn file_version<T>(&self, id: u64, timestamp: T) -> Result<String>
where
T: std::fmt::Debug,
OffsetDateTime: PartialEq<T>,
{
let url = BASE_URL_GAME.join(&format!("mods/{id}/files.json"))?;
let req = self.client.get(url);
let files: FileList = self.send(req).await?;
let Some(file) = files
.files
.into_iter()
.find(|file| file.uploaded_timestamp == timestamp)
else {
let err = Error::Custom("Timestamp does not match any file".into());
return Err(err);
};
Ok(file.version)
}
#[tracing::instrument(skip(self))]
pub async fn picture(&self, url: impl IntoUrl + std::fmt::Debug) -> Result<Vec<u8>> {
let res = self.client.get(url).send().await?.error_for_status()?;
res.bytes()
.await
.map(|bytes| bytes.to_vec())
.map_err(From::from)
}
#[tracing::instrument(skip(self))]
pub async fn get_file_by_id(&self, mod_id: u64, file_id: u64) -> Result<File> {
let url = BASE_URL_GAME.join(&format!("mods/{mod_id}/files/{file_id}.json"))?;
let req = self.client.get(url);
self.send(req).await
}
pub fn parse_file_name<S: AsRef<str>>(
name: S,
) -> Option<(String, u64, String, OffsetDateTime)> {
@ -153,7 +112,7 @@ impl Api {
RE.captures(name.as_ref()).and_then(|cap| {
let name = cap.name("name").map(|s| s.as_str().to_string())?;
let mod_id = cap.name("mod_id").and_then(|s| s.as_str().parse().ok())?;
let version = cap.name("version").map(|s| s.as_str().replace('-', "."))?;
let version = cap.name("version").map(|s| s.as_str().to_string())?;
let updated = cap
.name("updated")
.and_then(|s| s.as_str().parse().ok())
@ -181,7 +140,7 @@ impl Api {
self.send(req).await
}
pub async fn handle_nxm(&self, url: Url) -> Result<(Mod, File, Vec<u8>)> {
pub async fn handle_nxm(&self, url: Url) -> Result<(Mod, Vec<u8>)> {
let nxm = Self::parse_nxm(url.clone())?;
let user = self.user_validate().await?;
@ -190,20 +149,19 @@ impl Api {
return Err(Error::InvalidNXM("user_id mismtach", url));
}
let (mod_data, file_info, download_info) = futures::try_join!(
let (mod_data, download_info) = futures::try_join!(
self.mods_id(nxm.mod_id),
self.get_file_by_id(nxm.mod_id, nxm.file_id),
self.mods_download_link(nxm.mod_id, nxm.file_id, nxm.key, nxm.expires)
)?;
let Some(download_url) = download_info.first().map(|i| i.uri.clone()) else {
let Some(download_url) = download_info.get(0).map(|i| i.uri.clone()) else {
return Err(Error::InvalidNXM("no download link", url));
};
let req = self.client.get(download_url);
let data = req.send().await?.bytes().await?;
Ok((mod_data, file_info, data.to_vec()))
Ok((mod_data, data.to_vec()))
}
pub fn parse_nxm(nxm: Url) -> Result<Nxm> {
@ -218,14 +176,11 @@ impl Api {
}
let Some(mut segments) = nxm.path_segments() else {
return Err(Error::InvalidNXM("Missing path segments", nxm));
return Err(Error::InvalidNXM("Cannot be a base", nxm));
};
if segments.next() != Some("mods") {
return Err(Error::InvalidNXM(
"Unexpected path segment, expected 'mods'",
nxm,
));
return Err(Error::InvalidNXM("Unexpected path segment", nxm));
}
let Some(mod_id) = segments.next().and_then(|id| id.parse().ok()) else {
@ -233,10 +188,7 @@ impl Api {
};
if segments.next() != Some("files") {
return Err(Error::InvalidNXM(
"Unexpected path segment, expected 'files'",
nxm,
));
return Err(Error::InvalidNXM("Unexpected path segment", nxm));
}
let Some(file_id) = segments.next().and_then(|id| id.parse().ok()) else {
@ -251,7 +203,7 @@ impl Api {
}
let Some(key) = query.get("key") else {
return Err(Error::InvalidNXM("Missing query field 'key'", nxm));
return Err(Error::InvalidNXM("Missing 'key'", nxm));
};
let expires = query
@ -259,12 +211,12 @@ impl Api {
.and_then(|expires| expires.parse().ok())
.and_then(|expires| OffsetDateTime::from_unix_timestamp(expires).ok());
let Some(expires) = expires else {
return Err(Error::InvalidNXM("Missing query field 'expires'", nxm));
return Err(Error::InvalidNXM("Missing 'expires'", nxm));
};
let user_id = query.get("user_id").and_then(|id| id.parse().ok());
let Some(user_id) = user_id else {
return Err(Error::InvalidNXM("Missing query field 'user_id'", nxm));
return Err(Error::InvalidNXM("Missing 'user_id'", nxm));
};
Ok(Nxm {

View file

@ -64,35 +64,6 @@ pub struct Mod {
// pub contains_adult_content: bool,
}
#[derive(Debug, Deserialize)]
pub struct File {
pub id: Vec<u64>,
pub uid: u64,
pub file_id: u64,
pub name: String,
pub version: String,
pub category_id: u64,
pub category_name: String,
pub is_primary: bool,
pub size: u64,
pub file_name: String,
#[serde(with = "time::serde::timestamp")]
pub uploaded_timestamp: OffsetDateTime,
pub mod_version: String,
pub external_virus_scan_url: String,
pub description: String,
pub size_kb: u64,
pub size_in_bytes: u64,
pub changelog_html: Option<String>,
pub content_preview_link: String,
}
#[derive(Debug, Deserialize)]
pub struct FileList {
pub files: Vec<File>,
// pub file_updates: Vec<serde_json::Value>,
}
#[derive(Debug, Deserialize)]
pub struct DownloadLink {
pub name: String,

View file

@ -6,8 +6,8 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
color-eyre = { workspace = true }
tracing = { workspace = true }
color-eyre = "0.6.2"
tracing = "0.1.37"
[build-dependencies]
bindgen = "0.71.0"
bindgen = "0.64.0"

View file

@ -1,21 +1,27 @@
extern crate bindgen;
use std::env;
use std::path::PathBuf;
fn main() {
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").expect("No CARGO_MANIFEST_DIR");
println!("cargo:rustc-link-search=native={}", &manifest_dir);
// Tell cargo to look for shared libraries in the specified directory
if let Ok(manifest_dir) = std::env::var("CARGO_MANIFEST_DIR") {
println!("cargo:rustc-link-search={}", manifest_dir);
dbg!(&manifest_dir);
}
if std::env::var("CARGO_CFG_TARGET_FAMILY") == Ok(String::from("windows")) {
let lib_name = if cfg!(debug_assertions) {
let lib_name = if std::env::var("CARGO_CFG_WINDOWS").is_ok() {
if cfg!(debug_assertions) {
"oo2core_win64_debug"
} else {
"oo2core_win64"
};
println!("cargo:rustc-link-lib=static={}", lib_name);
} else {
println!("cargo:rustc-link-lib=static=oo2corelinux64");
println!("cargo:rustc-link-lib=stdc++");
}
} else {
"oo2corelinux64"
};
println!("cargo:rustc-link-lib={}", lib_name);
dbg!(&lib_name);
println!("cargo:rerun-if-changed=oodle2.h");
@ -31,7 +37,7 @@ fn main() {
.blocklist_file("stdlib.h")
// Tell cargo to invalidate the built crate whenever any of the
// included header files changed.
.parse_callbacks(Box::new(bindgen::CargoCallbacks::new()))
.parse_callbacks(Box::new(bindgen::CargoCallbacks))
// Finish the builder and generate the bindings.
.generate()
// Unwrap the Result and panic on failure.

View file

@ -7,7 +7,6 @@ use std::ptr;
use color_eyre::{eyre, Result};
#[allow(dead_code)]
#[allow(clippy::identity_op)]
mod bindings {
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
}

View file

@ -4,23 +4,24 @@ version = "0.3.0"
edition = "2021"
[dependencies]
async-recursion = { workspace = true }
bitflags = { workspace = true }
byteorder = { workspace = true }
color-eyre = { workspace = true }
csv-async = { workspace = true }
fastrand = { workspace = true }
futures = { workspace = true }
futures-util = { workspace = true }
glob = { workspace = true }
luajit2-sys = { workspace = true }
nanorand = { workspace = true }
oodle = { workspace = true }
path-slash = { workspace = true }
pin-project-lite = { workspace = true }
serde = { workspace = true }
serde_sjson = { workspace = true }
tokio = { workspace = true }
tokio-stream = { workspace = true }
tracing = { workspace = true }
tracing-error = { workspace = true }
bitflags = "1.3.2"
byteorder = "1.4.3"
color-eyre = "0.6.2"
csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
fastrand = "1.8.0"
futures = "0.3.25"
futures-util = "0.3.24"
glob = "0.3.0"
libloading = "0.7.4"
nanorand = "0.7.0"
pin-project-lite = "0.2.9"
serde = { version = "1.0.147", features = ["derive"] }
serde_sjson = { path = "../../lib/serde_sjson", version = "*" }
oodle = { path = "../../lib/oodle", version = "*" }
tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
tokio-stream = { version = "0.1.11", features = ["fs", "io-util"] }
tracing = { version = "0.1.37", features = ["async-await"] }
tracing-error = "0.2.0"
luajit2-sys = "0.0.2"
async-recursion = "1.0.2"
path-slash = "0.2.1"

View file

@ -43,7 +43,6 @@ impl<T: FromBinary> FromBinary for Vec<T> {
}
pub mod sync {
use std::ffi::CStr;
use std::io::{self, Read, Seek, SeekFrom};
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
@ -134,23 +133,6 @@ pub mod sync {
make_skip!(skip_u8, read_u8, u8);
make_skip!(skip_u32, read_u32, u32);
// Implementation based on https://en.wikipedia.com/wiki/LEB128
fn read_uleb128(&mut self) -> io::Result<u64> {
let mut result: u64 = 0;
let mut shift: u64 = 0;
loop {
let byte = ReadExt::read_u8(self)? as u64;
result |= (byte & 0x7f) << shift;
if byte < 0x80 {
return Ok(result);
}
shift += 7;
}
}
fn skip_padding(&mut self) -> io::Result<()> {
let pos = self.stream_position()?;
let padding_size = 16 - (pos % 16);
@ -166,13 +148,25 @@ pub mod sync {
}
fn read_string_len(&mut self, len: usize) -> Result<String> {
let pos = self.stream_position();
let mut buf = vec![0; len];
let res = self
.read_exact(&mut buf)
.map_err(Report::new)
.and_then(|_| {
String::from_utf8(buf).map_err(|err| {
let ascii = String::from_utf8_lossy(err.as_bytes()).to_string();
let bytes = format!("{:?}", err.as_bytes());
Report::new(err)
.with_section(move || bytes.header("Bytes:"))
.with_section(move || ascii.header("ASCII:"))
})
});
let res = read_string_len(self, len);
if res.is_ok() {
return res;
}
let pos = self.stream_position();
if pos.is_ok() {
res.with_section(|| {
format!("{pos:#X} ({pos})", pos = pos.unwrap()).header("Position: ")
@ -232,22 +226,4 @@ pub mod sync {
Err(err).with_section(|| format!("{pos:#X} ({pos})").header("Position: "))
}
fn read_string_len(mut r: impl Read, len: usize) -> Result<String> {
let mut buf = vec![0; len];
r.read_exact(&mut buf)
.wrap_err_with(|| format!("Failed to read {} bytes", len))?;
let res = match CStr::from_bytes_until_nul(&buf) {
Ok(s) => {
let s = s.to_str()?;
Ok(s.to_string())
}
Err(_) => String::from_utf8(buf.clone()).map_err(Report::new),
};
res.wrap_err("Invalid binary for UTF8 string")
.with_section(|| format!("{}", String::from_utf8_lossy(&buf)).header("ASCI:"))
.with_section(|| format!("{:x?}", buf).header("Bytes:"))
}
}

View file

@ -13,21 +13,21 @@ use crate::binary::ToBinary;
use crate::murmur::Murmur64;
use crate::Bundle;
use super::filetype::BundleFileType;
use super::file::BundleFileType;
const DATABASE_VERSION: u32 = 0x6;
const FILE_VERSION: u32 = 0x4;
pub struct BundleFile {
pub name: String,
pub stream: String,
pub platform_specific: bool,
pub file_time: u64,
name: String,
stream: String,
platform_specific: bool,
file_time: u64,
}
pub struct FileName {
pub extension: BundleFileType,
pub name: Murmur64,
extension: BundleFileType,
name: Murmur64,
}
pub struct BundleDatabase {
@ -36,34 +36,7 @@ pub struct BundleDatabase {
bundle_contents: HashMap<Murmur64, Vec<FileName>>,
}
// Implements the partial Murmur that's used by the engine to compute bundle resource hashes,
// but in a way that the loop can be done outside the function.
#[inline(always)]
fn add_to_resource_hash(mut k: u64, name: impl Into<u64>) -> u64 {
const M: u64 = 0xc6a4a7935bd1e995;
const R: u64 = 47;
let mut h: u64 = name.into();
k = k.wrapping_mul(M);
k ^= k >> R;
k = k.wrapping_mul(M);
h ^= k;
k = M.wrapping_mul(h);
k
}
impl BundleDatabase {
pub fn bundles(&self) -> &HashMap<Murmur64, Vec<BundleFile>> {
&self.stored_files
}
pub fn files(&self) -> &HashMap<Murmur64, Vec<FileName>> {
&self.bundle_contents
}
pub fn add_bundle(&mut self, bundle: &Bundle) {
let hash = bundle.name().to_murmur64();
let name = hash.to_string();
@ -96,26 +69,20 @@ impl BundleDatabase {
}
}
let mut resource_hash = 0;
for f in bundle.files() {
let name = f.base_name().to_murmur64();
let file_name = FileName {
extension: f.file_type(),
name,
name: f.base_name().to_murmur64(),
};
resource_hash = add_to_resource_hash(resource_hash, name);
// TODO: Compute actual resource hash
self.resource_hashes.insert(hash, 0);
// TODO: Make sure each file name only exists once. Probably best to turn
// the `Vec` into a sorted `HashSet`.
self.bundle_contents
.entry(hash)
.or_default()
.push(file_name);
}
self.resource_hashes.insert(hash, resource_hash);
}
}
@ -136,7 +103,7 @@ impl FromBinary for BundleDatabase {
let mut stored_files = HashMap::with_capacity(num_entries);
for _ in 0..num_entries {
let hash = r.read_u64().map(Murmur64::from)?;
let hash = Murmur64::from(r.read_u64()?);
let num_files = r.read_u32()? as usize;
let mut files = Vec::with_capacity(num_files);
@ -194,7 +161,7 @@ impl FromBinary for BundleDatabase {
let mut resource_hashes = HashMap::with_capacity(num_hashes);
for _ in 0..num_hashes {
let name = r.read_u64().map(Murmur64::from)?;
let name = Murmur64::from(r.read_u64()?);
let hash = r.read_u64()?;
resource_hashes.insert(name, hash);
@ -204,14 +171,14 @@ impl FromBinary for BundleDatabase {
let mut bundle_contents = HashMap::with_capacity(num_contents);
for _ in 0..num_contents {
let hash = r.read_u64().map(Murmur64::from)?;
let hash = Murmur64::from(r.read_u64()?);
let num_files = r.read_u32()? as usize;
let mut files = Vec::with_capacity(num_files);
for _ in 0..num_files {
let extension = r.read_u64().map(BundleFileType::from)?;
let name = r.read_u64().map(Murmur64::from)?;
let extension = BundleFileType::from(r.read_u64()?);
let name = Murmur64::from(r.read_u64()?);
files.push(FileName { extension, name });
}

View file

@ -5,12 +5,407 @@ use bitflags::bitflags;
use color_eyre::eyre::Context;
use color_eyre::{eyre, Result};
use futures::future::join_all;
use serde::Serialize;
use crate::binary::sync::*;
use crate::filetype::*;
use crate::murmur::{HashGroup, IdString64, Murmur64};
use super::filetype::BundleFileType;
#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)]
pub enum BundleFileType {
Animation,
AnimationCurves,
Apb,
BakedLighting,
Bik,
BlendSet,
Bones,
Chroma,
CommonPackage,
Config,
Crypto,
Data,
Entity,
Flow,
Font,
Ies,
Ini,
Input,
Ivf,
Keys,
Level,
Lua,
Material,
Mod,
MouseCursor,
NavData,
NetworkConfig,
OddleNet,
Package,
Particles,
PhysicsProperties,
RenderConfig,
RtPipeline,
Scene,
Shader,
ShaderLibrary,
ShaderLibraryGroup,
ShadingEnvionmentMapping,
ShadingEnvironment,
Slug,
SlugAlbum,
SoundEnvironment,
SpuJob,
StateMachine,
StaticPVS,
Strings,
SurfaceProperties,
Texture,
TimpaniBank,
TimpaniMaster,
Tome,
Ugg,
Unit,
Upb,
VectorField,
Wav,
WwiseBank,
WwiseDep,
WwiseEvent,
WwiseMetadata,
WwiseStream,
Xml,
Unknown(Murmur64),
}
impl BundleFileType {
pub fn ext_name(&self) -> String {
match self {
BundleFileType::AnimationCurves => String::from("animation_curves"),
BundleFileType::Animation => String::from("animation"),
BundleFileType::Apb => String::from("apb"),
BundleFileType::BakedLighting => String::from("baked_lighting"),
BundleFileType::Bik => String::from("bik"),
BundleFileType::BlendSet => String::from("blend_set"),
BundleFileType::Bones => String::from("bones"),
BundleFileType::Chroma => String::from("chroma"),
BundleFileType::CommonPackage => String::from("common_package"),
BundleFileType::Config => String::from("config"),
BundleFileType::Crypto => String::from("crypto"),
BundleFileType::Data => String::from("data"),
BundleFileType::Entity => String::from("entity"),
BundleFileType::Flow => String::from("flow"),
BundleFileType::Font => String::from("font"),
BundleFileType::Ies => String::from("ies"),
BundleFileType::Ini => String::from("ini"),
BundleFileType::Input => String::from("input"),
BundleFileType::Ivf => String::from("ivf"),
BundleFileType::Keys => String::from("keys"),
BundleFileType::Level => String::from("level"),
BundleFileType::Lua => String::from("lua"),
BundleFileType::Material => String::from("material"),
BundleFileType::Mod => String::from("mod"),
BundleFileType::MouseCursor => String::from("mouse_cursor"),
BundleFileType::NavData => String::from("nav_data"),
BundleFileType::NetworkConfig => String::from("network_config"),
BundleFileType::OddleNet => String::from("oodle_net"),
BundleFileType::Package => String::from("package"),
BundleFileType::Particles => String::from("particles"),
BundleFileType::PhysicsProperties => String::from("physics_properties"),
BundleFileType::RenderConfig => String::from("render_config"),
BundleFileType::RtPipeline => String::from("rt_pipeline"),
BundleFileType::Scene => String::from("scene"),
BundleFileType::ShaderLibraryGroup => String::from("shader_library_group"),
BundleFileType::ShaderLibrary => String::from("shader_library"),
BundleFileType::Shader => String::from("shader"),
BundleFileType::ShadingEnvionmentMapping => String::from("shading_environment_mapping"),
BundleFileType::ShadingEnvironment => String::from("shading_environment"),
BundleFileType::SlugAlbum => String::from("slug_album"),
BundleFileType::Slug => String::from("slug"),
BundleFileType::SoundEnvironment => String::from("sound_environment"),
BundleFileType::SpuJob => String::from("spu_job"),
BundleFileType::StateMachine => String::from("state_machine"),
BundleFileType::StaticPVS => String::from("static_pvs"),
BundleFileType::Strings => String::from("strings"),
BundleFileType::SurfaceProperties => String::from("surface_properties"),
BundleFileType::Texture => String::from("texture"),
BundleFileType::TimpaniBank => String::from("timpani_bank"),
BundleFileType::TimpaniMaster => String::from("timpani_master"),
BundleFileType::Tome => String::from("tome"),
BundleFileType::Ugg => String::from("ugg"),
BundleFileType::Unit => String::from("unit"),
BundleFileType::Upb => String::from("upb"),
BundleFileType::VectorField => String::from("vector_field"),
BundleFileType::Wav => String::from("wav"),
BundleFileType::WwiseBank => String::from("wwise_bank"),
BundleFileType::WwiseDep => String::from("wwise_dep"),
BundleFileType::WwiseEvent => String::from("wwise_event"),
BundleFileType::WwiseMetadata => String::from("wwise_metadata"),
BundleFileType::WwiseStream => String::from("wwise_stream"),
BundleFileType::Xml => String::from("xml"),
BundleFileType::Unknown(s) => format!("{s:016X}"),
}
}
pub fn decompiled_ext_name(&self) -> String {
match self {
BundleFileType::Texture => String::from("dds"),
BundleFileType::WwiseBank => String::from("bnk"),
BundleFileType::WwiseStream => String::from("ogg"),
_ => self.ext_name(),
}
}
pub fn hash(&self) -> Murmur64 {
Murmur64::from(*self)
}
}
impl std::str::FromStr for BundleFileType {
type Err = color_eyre::Report;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let val = match s {
"animation_curves" => BundleFileType::AnimationCurves,
"animation" => BundleFileType::Animation,
"apb" => BundleFileType::Apb,
"baked_lighting" => BundleFileType::BakedLighting,
"bik" => BundleFileType::Bik,
"blend_set" => BundleFileType::BlendSet,
"bones" => BundleFileType::Bones,
"chroma" => BundleFileType::Chroma,
"common_package" => BundleFileType::CommonPackage,
"config" => BundleFileType::Config,
"crypto" => BundleFileType::Crypto,
"data" => BundleFileType::Data,
"entity" => BundleFileType::Entity,
"flow" => BundleFileType::Flow,
"font" => BundleFileType::Font,
"ies" => BundleFileType::Ies,
"ini" => BundleFileType::Ini,
"input" => BundleFileType::Input,
"ivf" => BundleFileType::Ivf,
"keys" => BundleFileType::Keys,
"level" => BundleFileType::Level,
"lua" => BundleFileType::Lua,
"material" => BundleFileType::Material,
"mod" => BundleFileType::Mod,
"mouse_cursor" => BundleFileType::MouseCursor,
"nav_data" => BundleFileType::NavData,
"network_config" => BundleFileType::NetworkConfig,
"oodle_net" => BundleFileType::OddleNet,
"package" => BundleFileType::Package,
"particles" => BundleFileType::Particles,
"physics_properties" => BundleFileType::PhysicsProperties,
"render_config" => BundleFileType::RenderConfig,
"rt_pipeline" => BundleFileType::RtPipeline,
"scene" => BundleFileType::Scene,
"shader_library_group" => BundleFileType::ShaderLibraryGroup,
"shader_library" => BundleFileType::ShaderLibrary,
"shader" => BundleFileType::Shader,
"shading_environment_mapping" => BundleFileType::ShadingEnvionmentMapping,
"shading_environment" => BundleFileType::ShadingEnvironment,
"slug_album" => BundleFileType::SlugAlbum,
"slug" => BundleFileType::Slug,
"sound_environment" => BundleFileType::SoundEnvironment,
"spu_job" => BundleFileType::SpuJob,
"state_machine" => BundleFileType::StateMachine,
"static_pvs" => BundleFileType::StaticPVS,
"strings" => BundleFileType::Strings,
"surface_properties" => BundleFileType::SurfaceProperties,
"texture" => BundleFileType::Texture,
"timpani_bank" => BundleFileType::TimpaniBank,
"timpani_master" => BundleFileType::TimpaniMaster,
"tome" => BundleFileType::Tome,
"ugg" => BundleFileType::Ugg,
"unit" => BundleFileType::Unit,
"upb" => BundleFileType::Upb,
"vector_field" => BundleFileType::VectorField,
"wav" => BundleFileType::Wav,
"wwise_bank" => BundleFileType::WwiseBank,
"wwise_dep" => BundleFileType::WwiseDep,
"wwise_event" => BundleFileType::WwiseEvent,
"wwise_metadata" => BundleFileType::WwiseMetadata,
"wwise_stream" => BundleFileType::WwiseStream,
"xml" => BundleFileType::Xml,
s => eyre::bail!("Unknown type string '{}'", s),
};
Ok(val)
}
}
impl Serialize for BundleFileType {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let value = self.ext_name();
value.serialize(serializer)
}
}
impl From<Murmur64> for BundleFileType {
fn from(value: Murmur64) -> Self {
Self::from(Into::<u64>::into(value))
}
}
impl From<u64> for BundleFileType {
fn from(hash: u64) -> BundleFileType {
match hash {
0x931e336d7646cc26 => BundleFileType::Animation,
0xdcfb9e18fff13984 => BundleFileType::AnimationCurves,
0x3eed05ba83af5090 => BundleFileType::Apb,
0x7ffdb779b04e4ed1 => BundleFileType::BakedLighting,
0xaa5965f03029fa18 => BundleFileType::Bik,
0xe301e8af94e3b5a3 => BundleFileType::BlendSet,
0x18dead01056b72e9 => BundleFileType::Bones,
0xb7893adf7567506a => BundleFileType::Chroma,
0xfe9754bd19814a47 => BundleFileType::CommonPackage,
0x82645835e6b73232 => BundleFileType::Config,
0x69108ded1e3e634b => BundleFileType::Crypto,
0x8fd0d44d20650b68 => BundleFileType::Data,
0x9831ca893b0d087d => BundleFileType::Entity,
0x92d3ee038eeb610d => BundleFileType::Flow,
0x9efe0a916aae7880 => BundleFileType::Font,
0x8f7d5a2c0f967655 => BundleFileType::Ies,
0xd526a27da14f1dc5 => BundleFileType::Ini,
0x2bbcabe5074ade9e => BundleFileType::Input,
0xfa4a8e091a91201e => BundleFileType::Ivf,
0xa62f9297dc969e85 => BundleFileType::Keys,
0x2a690fd348fe9ac5 => BundleFileType::Level,
0xa14e8dfa2cd117e2 => BundleFileType::Lua,
0xeac0b497876adedf => BundleFileType::Material,
0x3fcdd69156a46417 => BundleFileType::Mod,
0xb277b11fe4a61d37 => BundleFileType::MouseCursor,
0x169de9566953d264 => BundleFileType::NavData,
0x3b1fa9e8f6bac374 => BundleFileType::NetworkConfig,
0xb0f2c12eb107f4d8 => BundleFileType::OddleNet,
0xad9c6d9ed1e5e77a => BundleFileType::Package,
0xa8193123526fad64 => BundleFileType::Particles,
0xbf21403a3ab0bbb1 => BundleFileType::PhysicsProperties,
0x27862fe24795319c => BundleFileType::RenderConfig,
0x9ca183c2d0e76dee => BundleFileType::RtPipeline,
0x9d0a795bfe818d19 => BundleFileType::Scene,
0xcce8d5b5f5ae333f => BundleFileType::Shader,
0xe5ee32a477239a93 => BundleFileType::ShaderLibrary,
0x9e5c3cc74575aeb5 => BundleFileType::ShaderLibraryGroup,
0x250e0a11ac8e26f8 => BundleFileType::ShadingEnvionmentMapping,
0xfe73c7dcff8a7ca5 => BundleFileType::ShadingEnvironment,
0xa27b4d04a9ba6f9e => BundleFileType::Slug,
0xe9fc9ea7042e5ec0 => BundleFileType::SlugAlbum,
0xd8b27864a97ffdd7 => BundleFileType::SoundEnvironment,
0xf97af9983c05b950 => BundleFileType::SpuJob,
0xa486d4045106165c => BundleFileType::StateMachine,
0xe3f0baa17d620321 => BundleFileType::StaticPVS,
0x0d972bab10b40fd3 => BundleFileType::Strings,
0xad2d3fa30d9ab394 => BundleFileType::SurfaceProperties,
0xcd4238c6a0c69e32 => BundleFileType::Texture,
0x99736be1fff739a4 => BundleFileType::TimpaniBank,
0x00a3e6c59a2b9c6c => BundleFileType::TimpaniMaster,
0x19c792357c99f49b => BundleFileType::Tome,
0x712d6e3dd1024c9c => BundleFileType::Ugg,
0xe0a48d0be9a7453f => BundleFileType::Unit,
0xa99510c6e86dd3c2 => BundleFileType::Upb,
0xf7505933166d6755 => BundleFileType::VectorField,
0x786f65c00a816b19 => BundleFileType::Wav,
0x535a7bd3e650d799 => BundleFileType::WwiseBank,
0xaf32095c82f2b070 => BundleFileType::WwiseDep,
0xaabdd317b58dfc8a => BundleFileType::WwiseEvent,
0xd50a8b7e1c82b110 => BundleFileType::WwiseMetadata,
0x504b55235d21440e => BundleFileType::WwiseStream,
0x76015845a6003765 => BundleFileType::Xml,
_ => BundleFileType::Unknown(Murmur64::from(hash)),
}
}
}
impl From<BundleFileType> for u64 {
fn from(t: BundleFileType) -> u64 {
match t {
BundleFileType::Animation => 0x931e336d7646cc26,
BundleFileType::AnimationCurves => 0xdcfb9e18fff13984,
BundleFileType::Apb => 0x3eed05ba83af5090,
BundleFileType::BakedLighting => 0x7ffdb779b04e4ed1,
BundleFileType::Bik => 0xaa5965f03029fa18,
BundleFileType::BlendSet => 0xe301e8af94e3b5a3,
BundleFileType::Bones => 0x18dead01056b72e9,
BundleFileType::Chroma => 0xb7893adf7567506a,
BundleFileType::CommonPackage => 0xfe9754bd19814a47,
BundleFileType::Config => 0x82645835e6b73232,
BundleFileType::Crypto => 0x69108ded1e3e634b,
BundleFileType::Data => 0x8fd0d44d20650b68,
BundleFileType::Entity => 0x9831ca893b0d087d,
BundleFileType::Flow => 0x92d3ee038eeb610d,
BundleFileType::Font => 0x9efe0a916aae7880,
BundleFileType::Ies => 0x8f7d5a2c0f967655,
BundleFileType::Ini => 0xd526a27da14f1dc5,
BundleFileType::Input => 0x2bbcabe5074ade9e,
BundleFileType::Ivf => 0xfa4a8e091a91201e,
BundleFileType::Keys => 0xa62f9297dc969e85,
BundleFileType::Level => 0x2a690fd348fe9ac5,
BundleFileType::Lua => 0xa14e8dfa2cd117e2,
BundleFileType::Material => 0xeac0b497876adedf,
BundleFileType::Mod => 0x3fcdd69156a46417,
BundleFileType::MouseCursor => 0xb277b11fe4a61d37,
BundleFileType::NavData => 0x169de9566953d264,
BundleFileType::NetworkConfig => 0x3b1fa9e8f6bac374,
BundleFileType::OddleNet => 0xb0f2c12eb107f4d8,
BundleFileType::Package => 0xad9c6d9ed1e5e77a,
BundleFileType::Particles => 0xa8193123526fad64,
BundleFileType::PhysicsProperties => 0xbf21403a3ab0bbb1,
BundleFileType::RenderConfig => 0x27862fe24795319c,
BundleFileType::RtPipeline => 0x9ca183c2d0e76dee,
BundleFileType::Scene => 0x9d0a795bfe818d19,
BundleFileType::Shader => 0xcce8d5b5f5ae333f,
BundleFileType::ShaderLibrary => 0xe5ee32a477239a93,
BundleFileType::ShaderLibraryGroup => 0x9e5c3cc74575aeb5,
BundleFileType::ShadingEnvionmentMapping => 0x250e0a11ac8e26f8,
BundleFileType::ShadingEnvironment => 0xfe73c7dcff8a7ca5,
BundleFileType::Slug => 0xa27b4d04a9ba6f9e,
BundleFileType::SlugAlbum => 0xe9fc9ea7042e5ec0,
BundleFileType::SoundEnvironment => 0xd8b27864a97ffdd7,
BundleFileType::SpuJob => 0xf97af9983c05b950,
BundleFileType::StateMachine => 0xa486d4045106165c,
BundleFileType::StaticPVS => 0xe3f0baa17d620321,
BundleFileType::Strings => 0x0d972bab10b40fd3,
BundleFileType::SurfaceProperties => 0xad2d3fa30d9ab394,
BundleFileType::Texture => 0xcd4238c6a0c69e32,
BundleFileType::TimpaniBank => 0x99736be1fff739a4,
BundleFileType::TimpaniMaster => 0x00a3e6c59a2b9c6c,
BundleFileType::Tome => 0x19c792357c99f49b,
BundleFileType::Ugg => 0x712d6e3dd1024c9c,
BundleFileType::Unit => 0xe0a48d0be9a7453f,
BundleFileType::Upb => 0xa99510c6e86dd3c2,
BundleFileType::VectorField => 0xf7505933166d6755,
BundleFileType::Wav => 0x786f65c00a816b19,
BundleFileType::WwiseBank => 0x535a7bd3e650d799,
BundleFileType::WwiseDep => 0xaf32095c82f2b070,
BundleFileType::WwiseEvent => 0xaabdd317b58dfc8a,
BundleFileType::WwiseMetadata => 0xd50a8b7e1c82b110,
BundleFileType::WwiseStream => 0x504b55235d21440e,
BundleFileType::Xml => 0x76015845a6003765,
BundleFileType::Unknown(hash) => hash.into(),
}
}
}
impl From<BundleFileType> for Murmur64 {
fn from(t: BundleFileType) -> Murmur64 {
let hash: u64 = t.into();
Murmur64::from(hash)
}
}
impl std::fmt::Display for BundleFileType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.ext_name())
}
}
#[derive(Debug)]
struct BundleFileHeader {
@ -106,7 +501,7 @@ impl BundleFileVariant {
}
bitflags! {
#[derive(Default, Clone, Copy, Debug)]
#[derive(Default)]
pub struct Properties: u32 {
const DATA = 0b100;
}
@ -120,7 +515,7 @@ pub struct BundleFile {
}
impl BundleFile {
pub fn new(name: impl Into<IdString64>, file_type: BundleFileType) -> Self {
pub fn new(name: String, file_type: BundleFileType) -> Self {
Self {
file_type,
name: name.into(),
@ -252,15 +647,20 @@ impl BundleFile {
Ok(w.into_inner())
}
#[tracing::instrument("File::from_sjson", skip(sjson, name), fields(name = %name.display()))]
pub async fn from_sjson(
name: IdString64,
#[tracing::instrument(name = "File::from_sjson", skip(sjson))]
pub async fn from_sjson<P, S>(
name: String,
file_type: BundleFileType,
sjson: impl AsRef<str>,
root: impl AsRef<Path> + std::fmt::Debug,
) -> Result<Self> {
sjson: S,
root: P,
) -> Result<Self>
where
P: AsRef<Path> + std::fmt::Debug,
S: AsRef<str>,
{
match file_type {
BundleFileType::Lua => lua::compile(name, sjson).wrap_err("Failed to compile Lua file"),
BundleFileType::Lua => lua::compile(name.clone(), sjson)
.wrap_err_with(|| format!("Failed to compile Lua file '{}'", name)),
BundleFileType::Unknown(_) => {
eyre::bail!("Unknown file type. Cannot compile from SJSON");
}
@ -299,7 +699,10 @@ impl BundleFile {
s
}
pub fn matches_name(&self, name: impl Into<IdString64>) -> bool {
pub fn matches_name<S>(&self, name: S) -> bool
where
S: Into<IdString64>,
{
let name = name.into();
if self.name == name {
return true;

View file

@ -1,400 +0,0 @@
use color_eyre::{eyre, Result};
use serde::Serialize;
use crate::murmur::Murmur64;
#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)]
pub enum BundleFileType {
Animation,
AnimationCurves,
Apb,
BakedLighting,
Bik,
BlendSet,
Bones,
Chroma,
CommonPackage,
Config,
Crypto,
Data,
Entity,
Flow,
Font,
Ies,
Ini,
Input,
Ivf,
Keys,
Level,
Lua,
Material,
Mod,
MouseCursor,
NavData,
NetworkConfig,
OddleNet,
Package,
Particles,
PhysicsProperties,
RenderConfig,
RtPipeline,
Scene,
Shader,
ShaderLibrary,
ShaderLibraryGroup,
ShadingEnvionmentMapping,
ShadingEnvironment,
Slug,
SlugAlbum,
SoundEnvironment,
SpuJob,
StateMachine,
StaticPVS,
Strings,
SurfaceProperties,
Texture,
TimpaniBank,
TimpaniMaster,
Tome,
Ugg,
Unit,
Upb,
VectorField,
Wav,
WwiseBank,
WwiseDep,
WwiseEvent,
WwiseMetadata,
WwiseStream,
Xml,
Unknown(Murmur64),
}
impl BundleFileType {
pub fn ext_name(&self) -> String {
match self {
BundleFileType::AnimationCurves => String::from("animation_curves"),
BundleFileType::Animation => String::from("animation"),
BundleFileType::Apb => String::from("apb"),
BundleFileType::BakedLighting => String::from("baked_lighting"),
BundleFileType::Bik => String::from("bik"),
BundleFileType::BlendSet => String::from("blend_set"),
BundleFileType::Bones => String::from("bones"),
BundleFileType::Chroma => String::from("chroma"),
BundleFileType::CommonPackage => String::from("common_package"),
BundleFileType::Config => String::from("config"),
BundleFileType::Crypto => String::from("crypto"),
BundleFileType::Data => String::from("data"),
BundleFileType::Entity => String::from("entity"),
BundleFileType::Flow => String::from("flow"),
BundleFileType::Font => String::from("font"),
BundleFileType::Ies => String::from("ies"),
BundleFileType::Ini => String::from("ini"),
BundleFileType::Input => String::from("input"),
BundleFileType::Ivf => String::from("ivf"),
BundleFileType::Keys => String::from("keys"),
BundleFileType::Level => String::from("level"),
BundleFileType::Lua => String::from("lua"),
BundleFileType::Material => String::from("material"),
BundleFileType::Mod => String::from("mod"),
BundleFileType::MouseCursor => String::from("mouse_cursor"),
BundleFileType::NavData => String::from("nav_data"),
BundleFileType::NetworkConfig => String::from("network_config"),
BundleFileType::OddleNet => String::from("oodle_net"),
BundleFileType::Package => String::from("package"),
BundleFileType::Particles => String::from("particles"),
BundleFileType::PhysicsProperties => String::from("physics_properties"),
BundleFileType::RenderConfig => String::from("render_config"),
BundleFileType::RtPipeline => String::from("rt_pipeline"),
BundleFileType::Scene => String::from("scene"),
BundleFileType::ShaderLibraryGroup => String::from("shader_library_group"),
BundleFileType::ShaderLibrary => String::from("shader_library"),
BundleFileType::Shader => String::from("shader"),
BundleFileType::ShadingEnvionmentMapping => String::from("shading_environment_mapping"),
BundleFileType::ShadingEnvironment => String::from("shading_environment"),
BundleFileType::SlugAlbum => String::from("slug_album"),
BundleFileType::Slug => String::from("slug"),
BundleFileType::SoundEnvironment => String::from("sound_environment"),
BundleFileType::SpuJob => String::from("spu_job"),
BundleFileType::StateMachine => String::from("state_machine"),
BundleFileType::StaticPVS => String::from("static_pvs"),
BundleFileType::Strings => String::from("strings"),
BundleFileType::SurfaceProperties => String::from("surface_properties"),
BundleFileType::Texture => String::from("texture"),
BundleFileType::TimpaniBank => String::from("timpani_bank"),
BundleFileType::TimpaniMaster => String::from("timpani_master"),
BundleFileType::Tome => String::from("tome"),
BundleFileType::Ugg => String::from("ugg"),
BundleFileType::Unit => String::from("unit"),
BundleFileType::Upb => String::from("upb"),
BundleFileType::VectorField => String::from("vector_field"),
BundleFileType::Wav => String::from("wav"),
BundleFileType::WwiseBank => String::from("wwise_bank"),
BundleFileType::WwiseDep => String::from("wwise_dep"),
BundleFileType::WwiseEvent => String::from("wwise_event"),
BundleFileType::WwiseMetadata => String::from("wwise_metadata"),
BundleFileType::WwiseStream => String::from("wwise_stream"),
BundleFileType::Xml => String::from("xml"),
BundleFileType::Unknown(s) => format!("{s:016X}"),
}
}
pub fn decompiled_ext_name(&self) -> String {
match self {
BundleFileType::Texture => String::from("dds"),
BundleFileType::WwiseBank => String::from("bnk"),
BundleFileType::WwiseStream => String::from("ogg"),
_ => self.ext_name(),
}
}
pub fn hash(&self) -> Murmur64 {
Murmur64::from(*self)
}
}
impl std::str::FromStr for BundleFileType {
type Err = color_eyre::Report;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let val = match s {
"animation_curves" => BundleFileType::AnimationCurves,
"animation" => BundleFileType::Animation,
"apb" => BundleFileType::Apb,
"baked_lighting" => BundleFileType::BakedLighting,
"bik" => BundleFileType::Bik,
"blend_set" => BundleFileType::BlendSet,
"bones" => BundleFileType::Bones,
"chroma" => BundleFileType::Chroma,
"common_package" => BundleFileType::CommonPackage,
"config" => BundleFileType::Config,
"crypto" => BundleFileType::Crypto,
"data" => BundleFileType::Data,
"entity" => BundleFileType::Entity,
"flow" => BundleFileType::Flow,
"font" => BundleFileType::Font,
"ies" => BundleFileType::Ies,
"ini" => BundleFileType::Ini,
"input" => BundleFileType::Input,
"ivf" => BundleFileType::Ivf,
"keys" => BundleFileType::Keys,
"level" => BundleFileType::Level,
"lua" => BundleFileType::Lua,
"material" => BundleFileType::Material,
"mod" => BundleFileType::Mod,
"mouse_cursor" => BundleFileType::MouseCursor,
"nav_data" => BundleFileType::NavData,
"network_config" => BundleFileType::NetworkConfig,
"oodle_net" => BundleFileType::OddleNet,
"package" => BundleFileType::Package,
"particles" => BundleFileType::Particles,
"physics_properties" => BundleFileType::PhysicsProperties,
"render_config" => BundleFileType::RenderConfig,
"rt_pipeline" => BundleFileType::RtPipeline,
"scene" => BundleFileType::Scene,
"shader_library_group" => BundleFileType::ShaderLibraryGroup,
"shader_library" => BundleFileType::ShaderLibrary,
"shader" => BundleFileType::Shader,
"shading_environment_mapping" => BundleFileType::ShadingEnvionmentMapping,
"shading_environment" => BundleFileType::ShadingEnvironment,
"slug_album" => BundleFileType::SlugAlbum,
"slug" => BundleFileType::Slug,
"sound_environment" => BundleFileType::SoundEnvironment,
"spu_job" => BundleFileType::SpuJob,
"state_machine" => BundleFileType::StateMachine,
"static_pvs" => BundleFileType::StaticPVS,
"strings" => BundleFileType::Strings,
"surface_properties" => BundleFileType::SurfaceProperties,
"texture" => BundleFileType::Texture,
"timpani_bank" => BundleFileType::TimpaniBank,
"timpani_master" => BundleFileType::TimpaniMaster,
"tome" => BundleFileType::Tome,
"ugg" => BundleFileType::Ugg,
"unit" => BundleFileType::Unit,
"upb" => BundleFileType::Upb,
"vector_field" => BundleFileType::VectorField,
"wav" => BundleFileType::Wav,
"wwise_bank" => BundleFileType::WwiseBank,
"wwise_dep" => BundleFileType::WwiseDep,
"wwise_event" => BundleFileType::WwiseEvent,
"wwise_metadata" => BundleFileType::WwiseMetadata,
"wwise_stream" => BundleFileType::WwiseStream,
"xml" => BundleFileType::Xml,
s => eyre::bail!("Unknown type string '{}'", s),
};
Ok(val)
}
}
impl Serialize for BundleFileType {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let value = self.ext_name();
value.serialize(serializer)
}
}
impl From<Murmur64> for BundleFileType {
fn from(value: Murmur64) -> Self {
Self::from(Into::<u64>::into(value))
}
}
impl From<u64> for BundleFileType {
fn from(hash: u64) -> BundleFileType {
match hash {
0x931e336d7646cc26 => BundleFileType::Animation,
0xdcfb9e18fff13984 => BundleFileType::AnimationCurves,
0x3eed05ba83af5090 => BundleFileType::Apb,
0x7ffdb779b04e4ed1 => BundleFileType::BakedLighting,
0xaa5965f03029fa18 => BundleFileType::Bik,
0xe301e8af94e3b5a3 => BundleFileType::BlendSet,
0x18dead01056b72e9 => BundleFileType::Bones,
0xb7893adf7567506a => BundleFileType::Chroma,
0xfe9754bd19814a47 => BundleFileType::CommonPackage,
0x82645835e6b73232 => BundleFileType::Config,
0x69108ded1e3e634b => BundleFileType::Crypto,
0x8fd0d44d20650b68 => BundleFileType::Data,
0x9831ca893b0d087d => BundleFileType::Entity,
0x92d3ee038eeb610d => BundleFileType::Flow,
0x9efe0a916aae7880 => BundleFileType::Font,
0x8f7d5a2c0f967655 => BundleFileType::Ies,
0xd526a27da14f1dc5 => BundleFileType::Ini,
0x2bbcabe5074ade9e => BundleFileType::Input,
0xfa4a8e091a91201e => BundleFileType::Ivf,
0xa62f9297dc969e85 => BundleFileType::Keys,
0x2a690fd348fe9ac5 => BundleFileType::Level,
0xa14e8dfa2cd117e2 => BundleFileType::Lua,
0xeac0b497876adedf => BundleFileType::Material,
0x3fcdd69156a46417 => BundleFileType::Mod,
0xb277b11fe4a61d37 => BundleFileType::MouseCursor,
0x169de9566953d264 => BundleFileType::NavData,
0x3b1fa9e8f6bac374 => BundleFileType::NetworkConfig,
0xb0f2c12eb107f4d8 => BundleFileType::OddleNet,
0xad9c6d9ed1e5e77a => BundleFileType::Package,
0xa8193123526fad64 => BundleFileType::Particles,
0xbf21403a3ab0bbb1 => BundleFileType::PhysicsProperties,
0x27862fe24795319c => BundleFileType::RenderConfig,
0x9ca183c2d0e76dee => BundleFileType::RtPipeline,
0x9d0a795bfe818d19 => BundleFileType::Scene,
0xcce8d5b5f5ae333f => BundleFileType::Shader,
0xe5ee32a477239a93 => BundleFileType::ShaderLibrary,
0x9e5c3cc74575aeb5 => BundleFileType::ShaderLibraryGroup,
0x250e0a11ac8e26f8 => BundleFileType::ShadingEnvionmentMapping,
0xfe73c7dcff8a7ca5 => BundleFileType::ShadingEnvironment,
0xa27b4d04a9ba6f9e => BundleFileType::Slug,
0xe9fc9ea7042e5ec0 => BundleFileType::SlugAlbum,
0xd8b27864a97ffdd7 => BundleFileType::SoundEnvironment,
0xf97af9983c05b950 => BundleFileType::SpuJob,
0xa486d4045106165c => BundleFileType::StateMachine,
0xe3f0baa17d620321 => BundleFileType::StaticPVS,
0x0d972bab10b40fd3 => BundleFileType::Strings,
0xad2d3fa30d9ab394 => BundleFileType::SurfaceProperties,
0xcd4238c6a0c69e32 => BundleFileType::Texture,
0x99736be1fff739a4 => BundleFileType::TimpaniBank,
0x00a3e6c59a2b9c6c => BundleFileType::TimpaniMaster,
0x19c792357c99f49b => BundleFileType::Tome,
0x712d6e3dd1024c9c => BundleFileType::Ugg,
0xe0a48d0be9a7453f => BundleFileType::Unit,
0xa99510c6e86dd3c2 => BundleFileType::Upb,
0xf7505933166d6755 => BundleFileType::VectorField,
0x786f65c00a816b19 => BundleFileType::Wav,
0x535a7bd3e650d799 => BundleFileType::WwiseBank,
0xaf32095c82f2b070 => BundleFileType::WwiseDep,
0xaabdd317b58dfc8a => BundleFileType::WwiseEvent,
0xd50a8b7e1c82b110 => BundleFileType::WwiseMetadata,
0x504b55235d21440e => BundleFileType::WwiseStream,
0x76015845a6003765 => BundleFileType::Xml,
_ => BundleFileType::Unknown(Murmur64::from(hash)),
}
}
}
impl From<BundleFileType> for u64 {
fn from(t: BundleFileType) -> u64 {
match t {
BundleFileType::Animation => 0x931e336d7646cc26,
BundleFileType::AnimationCurves => 0xdcfb9e18fff13984,
BundleFileType::Apb => 0x3eed05ba83af5090,
BundleFileType::BakedLighting => 0x7ffdb779b04e4ed1,
BundleFileType::Bik => 0xaa5965f03029fa18,
BundleFileType::BlendSet => 0xe301e8af94e3b5a3,
BundleFileType::Bones => 0x18dead01056b72e9,
BundleFileType::Chroma => 0xb7893adf7567506a,
BundleFileType::CommonPackage => 0xfe9754bd19814a47,
BundleFileType::Config => 0x82645835e6b73232,
BundleFileType::Crypto => 0x69108ded1e3e634b,
BundleFileType::Data => 0x8fd0d44d20650b68,
BundleFileType::Entity => 0x9831ca893b0d087d,
BundleFileType::Flow => 0x92d3ee038eeb610d,
BundleFileType::Font => 0x9efe0a916aae7880,
BundleFileType::Ies => 0x8f7d5a2c0f967655,
BundleFileType::Ini => 0xd526a27da14f1dc5,
BundleFileType::Input => 0x2bbcabe5074ade9e,
BundleFileType::Ivf => 0xfa4a8e091a91201e,
BundleFileType::Keys => 0xa62f9297dc969e85,
BundleFileType::Level => 0x2a690fd348fe9ac5,
BundleFileType::Lua => 0xa14e8dfa2cd117e2,
BundleFileType::Material => 0xeac0b497876adedf,
BundleFileType::Mod => 0x3fcdd69156a46417,
BundleFileType::MouseCursor => 0xb277b11fe4a61d37,
BundleFileType::NavData => 0x169de9566953d264,
BundleFileType::NetworkConfig => 0x3b1fa9e8f6bac374,
BundleFileType::OddleNet => 0xb0f2c12eb107f4d8,
BundleFileType::Package => 0xad9c6d9ed1e5e77a,
BundleFileType::Particles => 0xa8193123526fad64,
BundleFileType::PhysicsProperties => 0xbf21403a3ab0bbb1,
BundleFileType::RenderConfig => 0x27862fe24795319c,
BundleFileType::RtPipeline => 0x9ca183c2d0e76dee,
BundleFileType::Scene => 0x9d0a795bfe818d19,
BundleFileType::Shader => 0xcce8d5b5f5ae333f,
BundleFileType::ShaderLibrary => 0xe5ee32a477239a93,
BundleFileType::ShaderLibraryGroup => 0x9e5c3cc74575aeb5,
BundleFileType::ShadingEnvionmentMapping => 0x250e0a11ac8e26f8,
BundleFileType::ShadingEnvironment => 0xfe73c7dcff8a7ca5,
BundleFileType::Slug => 0xa27b4d04a9ba6f9e,
BundleFileType::SlugAlbum => 0xe9fc9ea7042e5ec0,
BundleFileType::SoundEnvironment => 0xd8b27864a97ffdd7,
BundleFileType::SpuJob => 0xf97af9983c05b950,
BundleFileType::StateMachine => 0xa486d4045106165c,
BundleFileType::StaticPVS => 0xe3f0baa17d620321,
BundleFileType::Strings => 0x0d972bab10b40fd3,
BundleFileType::SurfaceProperties => 0xad2d3fa30d9ab394,
BundleFileType::Texture => 0xcd4238c6a0c69e32,
BundleFileType::TimpaniBank => 0x99736be1fff739a4,
BundleFileType::TimpaniMaster => 0x00a3e6c59a2b9c6c,
BundleFileType::Tome => 0x19c792357c99f49b,
BundleFileType::Ugg => 0x712d6e3dd1024c9c,
BundleFileType::Unit => 0xe0a48d0be9a7453f,
BundleFileType::Upb => 0xa99510c6e86dd3c2,
BundleFileType::VectorField => 0xf7505933166d6755,
BundleFileType::Wav => 0x786f65c00a816b19,
BundleFileType::WwiseBank => 0x535a7bd3e650d799,
BundleFileType::WwiseDep => 0xaf32095c82f2b070,
BundleFileType::WwiseEvent => 0xaabdd317b58dfc8a,
BundleFileType::WwiseMetadata => 0xd50a8b7e1c82b110,
BundleFileType::WwiseStream => 0x504b55235d21440e,
BundleFileType::Xml => 0x76015845a6003765,
BundleFileType::Unknown(hash) => hash.into(),
}
}
}
impl From<BundleFileType> for Murmur64 {
fn from(t: BundleFileType) -> Murmur64 {
let hash: u64 = t.into();
Murmur64::from(hash)
}
}
impl std::fmt::Display for BundleFileType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.ext_name())
}
}

View file

@ -12,10 +12,8 @@ use crate::murmur::{HashGroup, IdString64, Murmur64};
pub(crate) mod database;
pub(crate) mod file;
pub(crate) mod filetype;
pub use file::{BundleFile, BundleFileVariant};
pub use filetype::BundleFileType;
pub use file::{BundleFile, BundleFileType, BundleFileVariant};
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd)]
enum BundleFormat {
@ -229,9 +227,12 @@ impl Bundle {
let _enter = span.enter();
tracing::trace!(num_files = self.files.len());
self.files.iter().try_fold(Vec::new(), |mut data, file| {
self.files
.iter()
.fold(Ok::<Vec<u8>, Report>(Vec::new()), |data, file| {
let mut data = data?;
data.append(&mut file.to_binary()?);
Ok::<_, Report>(data)
Ok(data)
})?
};

View file

@ -1,59 +1,10 @@
use std::process::Command;
use std::{ffi::OsString, path::PathBuf};
use std::path::PathBuf;
use crate::murmur::{Dictionary, HashGroup, IdString64, Murmur32, Murmur64};
pub struct CmdLine {
cmd: OsString,
args: Vec<OsString>,
}
impl CmdLine {
pub fn new(cmd: impl Into<OsString>) -> Self {
Self {
cmd: cmd.into(),
args: vec![],
}
}
pub fn arg(&mut self, arg: impl Into<OsString>) -> &mut Self {
self.args.push(arg.into());
self
}
}
impl std::fmt::Debug for CmdLine {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("CmdLine")
.field("cmd", &self.cmd)
.field("args", &self.args)
.finish()
}
}
impl std::fmt::Display for CmdLine {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "\"{}\"", self.cmd.to_string_lossy())?;
for arg in &self.args {
write!(f, " \"{}\"", arg.to_string_lossy())?;
}
Ok(())
}
}
impl From<&CmdLine> for Command {
fn from(value: &CmdLine) -> Self {
let mut cmd = Command::new(&value.cmd);
cmd.args(&value.args);
cmd
}
}
pub struct Context {
pub lookup: Dictionary,
pub ljd: Option<CmdLine>,
pub ljd: Option<String>,
pub revorb: Option<String>,
pub ww2ogg: Option<String>,
pub game_dir: Option<PathBuf>,

View file

@ -1,139 +1,41 @@
use std::env;
use std::ffi::CStr;
use std::ffi::CString;
use std::io::Cursor;
use std::io::Read;
use std::io::Write;
use std::process::Command;
use color_eyre::eyre;
use color_eyre::eyre::Context;
use color_eyre::Result;
use luajit2_sys as lua;
use tokio::fs;
use crate::binary::sync::ReadExt;
use crate::binary::sync::WriteExt;
use crate::bundle::file::{BundleFileVariant, UserFile};
use crate::murmur::IdString64;
use crate::{BundleFile, BundleFileType};
const BITSQUID_LUAJIT_HEADER: u32 = 0x8253461B;
#[tracing::instrument(skip_all, fields(buf_len = data.as_ref().len()))]
pub(crate) async fn decompile<T>(ctx: &crate::Context, data: T) -> Result<Vec<UserFile>>
pub(crate) async fn decompile<T>(_ctx: &crate::Context, data: T) -> Result<Vec<UserFile>>
where
T: AsRef<[u8]>,
{
let data = data.as_ref();
let length = {
let mut r = Cursor::new(data);
r.read_u32()? as usize
};
// This skips the unknown bytes 5..12
let content = &data[12..];
eyre::ensure!(
content.len() == length,
"Content length doesn't match. Expected {}, got {}",
length,
content.len()
);
let name = {
let mut r = Cursor::new(content);
eyre::ensure!(
r.read_u32()? == BITSQUID_LUAJIT_HEADER,
"Invalid magic bytes"
);
// Skip additional header bytes
let _ = r.read_uleb128()?;
let length = r.read_uleb128()? as usize;
let mut buf = vec![0u8; length];
r.read_exact(&mut buf)?;
let mut s =
String::from_utf8(buf).wrap_err("Invalid byte sequence for LuaJIT bytecode name")?;
// Remove the leading `@`
s.remove(0);
s
};
let mut temp = env::temp_dir();
// Using the actual file name and keeping it in case of an error makes debugging easier.
// But to avoid creating a bunch of folders, we flatten the name.
temp.push(name.replace('/', "_"));
temp.set_extension("luao");
tracing::debug!(
"Writing temporary LuaJIT bytecode file to '{}'",
temp.display()
);
fs::write(&temp, content)
.await
.wrap_err_with(|| format!("Failed to write LuaJIT bytecode to '{}'", temp.display()))?;
let mut cmd = ctx
.ljd
.as_ref()
.map(|c| c.into())
.unwrap_or_else(|| Command::new("ljd"));
cmd.arg("--catch_asserts")
.args(["--function_def_sugar", "false"])
.args(["--function_def_self_arg", "true"])
.args(["--unsafe", "false"])
.arg("-f")
.arg(&temp);
tracing::debug!("Executing command: '{:?}'", cmd);
let output = cmd.output().wrap_err("Failed to run ljd")?;
if !output.status.success() {
let err = eyre::eyre!(
"LJD exited with code {:?}:\n{}",
output.status.code(),
String::from_utf8_lossy(&output.stderr)
);
tracing::error!("Failed to decompile '{}':\n{:?}", name, err);
}
let content = output.stdout;
// No need to wait for this, so we move it to a separate task.
tokio::spawn(async move {
if let Err(err) = fs::remove_file(&temp)
.await
.wrap_err_with(|| format!("Failed to remove temporary file '{}'", temp.display()))
{
tracing::warn!("{:?}", err);
}
});
Ok(vec![UserFile::with_name(content, name)])
let mut _r = Cursor::new(data.as_ref());
todo!();
}
#[tracing::instrument(skip_all)]
pub fn compile(name: impl Into<IdString64>, code: impl AsRef<str>) -> Result<BundleFile> {
pub fn compile<S, C>(name: S, code: C) -> Result<BundleFile>
where
S: Into<String>,
C: AsRef<str>,
{
let name = name.into();
let code = code.as_ref();
tracing::trace!(
"Compiling '{}', {} bytes of code",
name.display(),
code.as_bytes().len()
);
let bytecode = unsafe {
let state = lua::luaL_newstate();
lua::luaL_openlibs(state);
let name = CString::new(format!("@{}", name.display()).into_bytes())
.wrap_err_with(|| format!("Cannot convert name into CString: {}", name.display()))?;
let name = CString::new(name.as_bytes())
.wrap_err_with(|| format!("Cannot convert name into CString: {}", name))?;
match lua::luaL_loadbuffer(
state,
code.as_ptr() as _,

View file

@ -7,22 +7,13 @@ use std::str::FromStr;
use async_recursion::async_recursion;
use color_eyre::eyre::{self, Context};
use color_eyre::Result;
use path_slash::PathBufExt;
use tokio::fs;
use crate::binary::sync::{ReadExt, WriteExt};
use crate::bundle::file::UserFile;
use crate::bundle::filetype::BundleFileType;
use crate::murmur::{HashGroup, IdString64, Murmur64};
use crate::bundle::file::{BundleFileType, UserFile};
use crate::murmur::{HashGroup, Murmur64};
/// Resolves a relative path that might contain wildcards into a list of
/// paths that exist on disk and match that wildcard.
/// This is similar to globbing in Unix shells, but with much less features.
///
/// The only wilcard character allowed is `*`, and only at the end of the string,
/// where it matches all files recursively in that directory.
///
/// `t` is an optional extension name, that may be used to force a wildcard
/// path to only match that file type `t`.
#[tracing::instrument]
#[async_recursion]
async fn resolve_wildcard<P1, P2>(
@ -99,12 +90,12 @@ where
Ok(paths)
}
type PackageType = HashMap<BundleFileType, HashSet<String>>;
type PackageType = HashMap<BundleFileType, HashSet<PathBuf>>;
type PackageDefinition = HashMap<String, HashSet<String>>;
#[derive(Default)]
pub struct Package {
_name: IdString64,
_name: String,
_root: PathBuf,
inner: PackageType,
flags: u8,
@ -125,9 +116,9 @@ impl DerefMut for Package {
}
impl Package {
pub fn new(name: impl Into<IdString64>, root: PathBuf) -> Self {
pub fn new(name: String, root: PathBuf) -> Self {
Self {
_name: name.into(),
_name: name,
_root: root,
inner: Default::default(),
flags: 1,
@ -138,22 +129,17 @@ impl Package {
self.values().fold(0, |total, files| total + files.len())
}
pub fn add_file(&mut self, file_type: BundleFileType, name: impl Into<String>) {
pub fn add_file<P: Into<PathBuf>>(&mut self, file_type: BundleFileType, name: P) {
self.inner.entry(file_type).or_default().insert(name.into());
}
#[tracing::instrument("Package::from_sjson", skip(sjson), fields(sjson_len = sjson.as_ref().len()))]
pub async fn from_sjson<P, S>(
sjson: S,
name: impl Into<IdString64> + std::fmt::Debug,
root: P,
) -> Result<Self>
pub async fn from_sjson<P, S>(sjson: S, name: String, root: P) -> Result<Self>
where
P: AsRef<Path> + std::fmt::Debug,
S: AsRef<str>,
{
let root = root.as_ref();
let name = name.into();
let definition: PackageDefinition = serde_sjson::from_str(sjson.as_ref())?;
let mut inner: PackageType = Default::default();
@ -187,11 +173,7 @@ impl Package {
continue;
};
tracing::debug!("Adding file {}", path.display());
inner
.entry(t)
.or_default()
.insert(path.display().to_string());
inner.entry(t).or_default().insert(path);
}
}
}
@ -210,9 +192,11 @@ impl Package {
pub fn to_sjson(&self) -> Result<String> {
let mut map: PackageDefinition = Default::default();
for (t, names) in self.iter() {
for name in names.iter() {
map.entry(t.ext_name()).or_default().insert(name.clone());
for (t, paths) in self.iter() {
for path in paths.iter() {
map.entry(t.ext_name())
.or_default()
.insert(path.display().to_string());
}
}
@ -238,11 +222,11 @@ impl Package {
for _ in 0..file_count {
let t = BundleFileType::from(r.read_u64()?);
let hash = Murmur64::from(r.read_u64()?);
let name = ctx.lookup_hash(hash, HashGroup::Filename);
let path = ctx.lookup_hash(hash, HashGroup::Filename);
inner
.entry(t)
.or_default()
.insert(name.display().to_string());
.insert(PathBuf::from(path.display().to_string()));
}
let flags = r.read_u8()?;
@ -255,7 +239,7 @@ impl Package {
let pkg = Self {
inner,
_name: name.into(),
_name: name,
_root: PathBuf::new(),
flags,
};
@ -271,10 +255,12 @@ impl Package {
w.write_u32(0x2b)?;
w.write_u32(self.values().flatten().count() as u32)?;
for (t, names) in self.iter() {
for name in names.iter() {
for (t, paths) in self.iter() {
for path in paths.iter() {
w.write_u64(t.hash().into())?;
w.write_u64(Murmur64::hash(name.as_bytes()).into())?;
let hash = Murmur64::hash(path.to_slash_lossy().as_bytes());
w.write_u64(hash.into())?;
}
}
@ -294,11 +280,17 @@ where
Ok(vec![UserFile::new(s.into_bytes())])
}
// #[tracing::instrument(skip_all)]
// pub fn compile(_ctx: &crate::Context, data: String) -> Result<Vec<u8>> {
// let pkg = Package::from_sjson(data)?;
// pkg.to_binary()
// }
#[cfg(test)]
mod test {
use std::path::PathBuf;
use crate::bundle::filetype::BundleFileType;
use crate::BundleFileType;
use super::resolve_wildcard;
use super::Package;

View file

@ -1,5 +1,3 @@
#![feature(test)]
mod binary;
mod bundle;
mod context;
@ -10,4 +8,4 @@ pub use binary::{FromBinary, ToBinary};
pub use bundle::database::BundleDatabase;
pub use bundle::decompress;
pub use bundle::{Bundle, BundleFile, BundleFileType, BundleFileVariant};
pub use context::{CmdLine, Context};
pub use context::Context;

View file

@ -147,14 +147,14 @@ impl Dictionary {
Ok(())
}
pub fn add(&mut self, value: impl AsRef<[u8]>, group: HashGroup) {
let long = Murmur64::from(murmurhash64::hash(value.as_ref(), SEED as u64));
let short = Murmur32::from(murmurhash64::hash32(value.as_ref(), SEED));
pub fn add(&mut self, value: String, group: HashGroup) {
let long = Murmur64::from(murmurhash64::hash(value.as_bytes(), SEED as u64));
let short = Murmur32::from(murmurhash64::hash32(value.as_bytes(), SEED));
let entry = Entry {
long,
short,
value: String::from_utf8_lossy(value.as_ref()).to_string(),
value,
group,
};

View file

@ -1,162 +0,0 @@
use std::fmt;
use serde::{Deserializer, Serializer};
use super::Murmur32;
// This type encodes the fact that when reading in a bundle, we don't always have a dictionary
// entry for every hash in there. So we do want to have the real string available when needed,
// but at the same time retain the original hash information for when we don't.
// This is especially important when wanting to write back the read bundle, as the hashes need to
// stay the same.
// The previous system of always turning hashes into strings worked well for the purpose of
// displaying hashes, but would have made it very hard to turn a stringyfied hash back into
// an actual hash.
#[derive(Clone, Debug, Eq)]
pub enum IdString32 {
Hash(Murmur32),
String(String),
}
impl IdString32 {
pub fn to_murmur32(&self) -> Murmur32 {
match self {
Self::Hash(hash) => *hash,
Self::String(s) => Murmur32::hash(s.as_bytes()),
}
}
pub fn display(&self) -> IdString32Display {
let s = match self {
IdString32::Hash(hash) => hash.to_string(),
IdString32::String(s) => s.clone(),
};
IdString32Display(s)
}
pub fn is_string(&self) -> bool {
match self {
IdString32::Hash(_) => false,
IdString32::String(_) => true,
}
}
pub fn is_hash(&self) -> bool {
match self {
IdString32::Hash(_) => true,
IdString32::String(_) => false,
}
}
}
impl From<String> for IdString32 {
fn from(value: String) -> Self {
Self::String(value)
}
}
impl From<u32> for IdString32 {
fn from(value: u32) -> Self {
Self::Hash(value.into())
}
}
impl From<IdString32> for u32 {
fn from(value: IdString32) -> Self {
value.to_murmur32().into()
}
}
impl From<Murmur32> for IdString32 {
fn from(value: Murmur32) -> Self {
Self::Hash(value)
}
}
impl From<IdString32> for Murmur32 {
fn from(value: IdString32) -> Self {
value.to_murmur32()
}
}
impl PartialEq for IdString32 {
fn eq(&self, other: &Self) -> bool {
self.to_murmur32() == other.to_murmur32()
}
}
impl std::hash::Hash for IdString32 {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
state.write_u32(self.to_murmur32().into());
}
}
impl serde::Serialize for IdString32 {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_u32(self.to_murmur32().into())
}
}
struct IdString32Visitor;
impl<'de> serde::de::Visitor<'de> for IdString32Visitor {
type Value = IdString32;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("an u32 or a string")
}
fn visit_u32<E>(self, value: u32) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(IdString32::Hash(value.into()))
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(IdString32::String(v.to_string()))
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(IdString32::String(v))
}
}
impl<'de> serde::Deserialize<'de> for IdString32 {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_u32(IdString32Visitor)
}
}
pub struct IdString32Display(String);
impl std::fmt::Display for IdString32Display {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl std::fmt::UpperHex for IdString32 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
std::fmt::UpperHex::fmt(&self.to_murmur32(), f)
}
}
impl std::fmt::LowerHex for IdString32 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
std::fmt::LowerHex::fmt(&self.to_murmur32(), f)
}
}

Some files were not shown because too many files have changed in this diff Show more