Compare commits
1 commit
master
...
feat/code-
Author | SHA1 | Date | |
---|---|---|---|
77475b6dee |
95 changed files with 3172 additions and 6235 deletions
|
@ -1,7 +1,35 @@
|
|||
FROM dtmt-ci-base-linux
|
||||
|
||||
# Create dummy crates and copy their Cargo.toml, so that dependencies can be cached
|
||||
RUN set -e; \
|
||||
cargo new --bin crates/dtmt; \
|
||||
cargo new --bin crates/dtmm; \
|
||||
cargo new --lib lib/dtmt-shared; \
|
||||
cargo new --lib lib/nexusmods; \
|
||||
cargo new --lib lib/sdk; \
|
||||
cargo new --lib lib/serde_sjson; \
|
||||
cargo new --lib lib/ansi-parser
|
||||
|
||||
COPY Cargo.toml Cargo.lock /src/dtmt/
|
||||
COPY crates/dtmt/Cargo.toml /src/dtmt/crates/dtmt/
|
||||
COPY crates/dtmm/Cargo.toml /src/dtmt/crates/dtmm/
|
||||
COPY lib/dtmt-shared/Cargo.toml /src/dtmt/lib/dtmt-shared/
|
||||
COPY lib/nexusmods/Cargo.toml /src/dtmt/lib/nexusmods/
|
||||
COPY lib/sdk/Cargo.toml /src/dtmt/lib/sdk/
|
||||
COPY lib/serde_sjson/Cargo.toml /src/dtmt/lib/serde_sjson/
|
||||
COPY lib/ansi-parser/Cargo.toml /src/dtmt/lib/ansi-parser/
|
||||
|
||||
# Crates with build scripts cannot be split that way, but they shouldn't change too often
|
||||
COPY lib/luajit2-sys /src/dtmt/lib/luajit2-sys
|
||||
COPY lib/oodle /src/dtmt/lib/oodle
|
||||
# color-eyre needs to be copied, too, then, as it's used by `oodle`
|
||||
COPY lib/color-eyre /src/dtmt/lib/color-eyre
|
||||
COPY --from=dtmt-ci-base-linux /src/*.lib /src/dtmt/lib/oodle/
|
||||
|
||||
RUN cargo build --release --locked
|
||||
RUN rm -r crates lib
|
||||
|
||||
COPY . /src/dtmt
|
||||
COPY --from=dtmt-ci-base-linux /src/*.lib /src/*.so /src/dtmt/lib/oodle/
|
||||
RUN --mount=type=cache,id=cargo-registry,target=/cargo/registry \
|
||||
--mount=type=cache,id=cargo-target,target=/src/dtmt/target \
|
||||
cargo build --release --locked
|
||||
|
||||
RUN cargo build --release --locked
|
||||
|
|
24
.ci/image/Dockerfile.linux
Normal file
24
.ci/image/Dockerfile.linux
Normal file
|
@ -0,0 +1,24 @@
|
|||
FROM rust:slim-bullseye
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
apt-get install --no-install-recommends -y \
|
||||
build-essential \
|
||||
curl \
|
||||
gpg \
|
||||
jq \
|
||||
libatk1.0-dev \
|
||||
libclang-13-dev \
|
||||
libglib2.0-dev \
|
||||
libgtk-3-dev \
|
||||
libpango1.0-dev \
|
||||
libssl-dev \
|
||||
libzstd-dev \
|
||||
pkg-config; \
|
||||
apt-get remove -y --auto-remove; \
|
||||
rm -rf /var/lib/apt/lists/*; \
|
||||
rustup default nightly
|
||||
|
||||
WORKDIR /src/dtmt
|
||||
|
||||
COPY *.so *.a /src/
|
|
@ -1,70 +1,14 @@
|
|||
# https://jake-shadle.github.io/xwin/
|
||||
FROM debian:bullseye-slim as xwin
|
||||
FROM dtmt-ci-base-linux
|
||||
|
||||
ARG XWIN_VERSION=0.5.2
|
||||
ARG XWIN_PREFIX="xwin-$XWIN_VERSION-x86_64-unknown-linux-musl"
|
||||
ADD https://github.com/Jake-Shadle/xwin/releases/download/$XWIN_VERSION/$XWIN_PREFIX.tar.gz /root/$XWIN_PREFIX.tar.gz
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
apt-get install --no-install-recommends -y \
|
||||
tar \
|
||||
; \
|
||||
# Install xwin to cargo/bin via github release. Note you could also just use `cargo install xwin`.
|
||||
tar -xzv -f /root/$XWIN_PREFIX.tar.gz -C /usr/bin --strip-components=1 $XWIN_PREFIX/xwin; \
|
||||
apt-get remove -y --auto-remove; \
|
||||
rm -rf \
|
||||
/var/lib/apt/lists/* \
|
||||
/root/$XWIN_PREFIX.tar.gz;
|
||||
|
||||
RUN set -eux; \
|
||||
# Splat the CRT and SDK files to /xwin/crt and /xwin/sdk respectively
|
||||
xwin \
|
||||
--log-level debug \
|
||||
--cache-dir /root/.xwin-cache \
|
||||
--manifest-version 16 \
|
||||
--accept-license \
|
||||
splat \
|
||||
--output /xwin; \
|
||||
# Even though this build step only exists temporary, to copy the
|
||||
# final data out of, it still generates a cache entry on the Docker host.
|
||||
# And to keep that to a minimum, we still delete the stuff we don't need.
|
||||
rm -rf /root/.xwin-cache;
|
||||
|
||||
FROM rust:slim-bullseye as linux
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
apt-get install --no-install-recommends -y \
|
||||
build-essential \
|
||||
cmake \
|
||||
curl \
|
||||
git \
|
||||
gpg \
|
||||
jq \
|
||||
libatk1.0-dev \
|
||||
libclang-13-dev \
|
||||
libglib2.0-dev \
|
||||
libgtk-3-dev \
|
||||
libpango1.0-dev \
|
||||
libssl-dev \
|
||||
libzstd-dev \
|
||||
pkg-config; \
|
||||
apt-get remove -y --auto-remove; \
|
||||
rm -rf /var/lib/apt/lists/*; \
|
||||
rustup default nightly
|
||||
|
||||
WORKDIR /src/dtmt
|
||||
|
||||
COPY lib/oodle/*.so lib/oodle/*.a /src/
|
||||
|
||||
FROM linux as msvc
|
||||
|
||||
ARG LLVM_VERSION=18
|
||||
ENV KEYRINGS /usr/local/share/keyrings
|
||||
ARG XWIN_VERSION=0.2.11
|
||||
ARG XWIN_PREFIX="xwin-$XWIN_VERSION-x86_64-unknown-linux-musl"
|
||||
ARG OSSLSIGNCODE_VERSION=2.7
|
||||
|
||||
ADD https://apt.llvm.org/llvm-snapshot.gpg.key /root/llvm-snapshot.gpg.key
|
||||
ADD https://dl.winehq.org/wine-builds/winehq.key /root/winehq.key
|
||||
ADD https://github.com/Jake-Shadle/xwin/releases/download/$XWIN_VERSION/$XWIN_PREFIX.tar.gz /root/$XWIN_PREFIX.tar.gz
|
||||
|
||||
RUN set -eux; \
|
||||
mkdir -p $KEYRINGS; \
|
||||
|
@ -72,22 +16,28 @@ RUN set -eux; \
|
|||
gpg --dearmor > $KEYRINGS/llvm.gpg < /root/llvm-snapshot.gpg.key; \
|
||||
# wine
|
||||
gpg --dearmor > $KEYRINGS/winehq.gpg < /root/winehq.key; \
|
||||
echo "deb [signed-by=$KEYRINGS/llvm.gpg] http://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-${LLVM_VERSION} main" > /etc/apt/sources.list.d/llvm.list; \
|
||||
echo "deb [signed-by=$KEYRINGS/llvm.gpg] http://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-13 main" > /etc/apt/sources.list.d/llvm.list; \
|
||||
echo "deb [signed-by=$KEYRINGS/winehq.gpg] https://dl.winehq.org/wine-builds/debian/ bullseye main" > /etc/apt/sources.list.d/winehq.list; \
|
||||
dpkg --add-architecture i386; \
|
||||
apt-get update; \
|
||||
apt-get install --no-install-recommends -y \
|
||||
libclang-${LLVM_VERSION}-dev \
|
||||
gcc-mingw-w64-x86-64 \
|
||||
clang-${LLVM_VERSION} \
|
||||
llvm-${LLVM_VERSION} \
|
||||
lld-${LLVM_VERSION} \
|
||||
winehq-staging \
|
||||
; \
|
||||
cmake \
|
||||
libssl-dev \
|
||||
libcurl4-openssl-dev \
|
||||
zlib1g-dev \
|
||||
python3 \
|
||||
libclang-13-dev \
|
||||
gcc-mingw-w64-x86-64 \
|
||||
clang-13 \
|
||||
llvm-13 \
|
||||
lld-13 \
|
||||
winehq-staging \
|
||||
tar \
|
||||
; \
|
||||
# ensure that clang/clang++ are callable directly
|
||||
ln -s clang-${LLVM_VERSION} /usr/bin/clang && ln -s clang /usr/bin/clang++ && ln -s lld-${LLVM_VERSION} /usr/bin/ld.lld; \
|
||||
ln -s clang-13 /usr/bin/clang && ln -s clang /usr/bin/clang++ && ln -s lld-13 /usr/bin/ld.lld; \
|
||||
# We also need to setup symlinks ourselves for the MSVC shims because they aren't in the debian packages
|
||||
ln -s clang-${LLVM_VERSION} /usr/bin/clang-cl && ln -s llvm-ar-${LLVM_VERSION} /usr/bin/llvm-lib && ln -s lld-link-${LLVM_VERSION} /usr/bin/lld-link; \
|
||||
ln -s clang-13 /usr/bin/clang-cl && ln -s llvm-ar-13 /usr/bin/llvm-lib && ln -s lld-link-13 /usr/bin/lld-link; \
|
||||
# Verify the symlinks are correct
|
||||
clang++ -v; \
|
||||
ld.lld -v; \
|
||||
|
@ -101,14 +51,31 @@ RUN set -eux; \
|
|||
update-alternatives --install /usr/bin/ld ld /usr/bin/ld.lld 100; \
|
||||
rustup target add x86_64-pc-windows-msvc; \
|
||||
rustup component add rust-src; \
|
||||
# Install xwin to cargo/bin via github release. Note you could also just use `cargo install xwin`.
|
||||
tar -xzv -f /root/$XWIN_PREFIX.tar.gz -C /usr/local/cargo/bin --strip-components=1 $XWIN_PREFIX/xwin; \
|
||||
# Splat the CRT and SDK files to /xwin/crt and /xwin/sdk respectively
|
||||
xwin --accept-license splat --output /xwin; \
|
||||
# Remove unneeded files to reduce image size
|
||||
apt-get remove -y --auto-remove; \
|
||||
rm -rf \
|
||||
/var/lib/apt/lists/* \
|
||||
/root/*.key;
|
||||
.xwin-cache \
|
||||
/usr/local/cargo/bin/xwin \
|
||||
/root/$XWIN_PREFIX.tar.gz \
|
||||
/var/lib/apt/lists/* \
|
||||
/root/*.key;
|
||||
|
||||
COPY lib/oodle/*.lib /src
|
||||
COPY --from=xwin /xwin /xwin
|
||||
ADD https://github.com/mtrojnar/osslsigncode/archive/refs/tags/${OSSLSIGNCODE_VERSION}.tar.gz /root/osslsigncode.tar.gz
|
||||
|
||||
RUN set -ex; \
|
||||
tar -xzv -f /root/osslsigncode.tar.gz;\
|
||||
cd osslsigncode-${OSSLSIGNCODE_VERSION}; \
|
||||
cmake -B build -S . -DCMAKE_BUILD_TYPE=Release; \
|
||||
cmake --build build; \
|
||||
cmake --install build; \
|
||||
cd ../; \
|
||||
rm -rf \
|
||||
/root/osslsigncode.tar.gz \
|
||||
osslsigncode-${OSSLSIGNCODE_VERSION};
|
||||
|
||||
# Note that we're using the full target triple for each variable instead of the
|
||||
# simple CC/CXX/AR shorthands to avoid issues when compiling any C/C++ code for
|
||||
|
@ -136,3 +103,7 @@ ENV CFLAGS_x86_64_pc_windows_msvc="$CL_FLAGS" \
|
|||
# Run wineboot just to setup the default WINEPREFIX so we don't do it every
|
||||
# container run
|
||||
RUN wine wineboot --init
|
||||
|
||||
WORKDIR /src/dtmt
|
||||
|
||||
COPY *.lib /src
|
43
.ci/pipelines/base-pipeline.yml
Normal file
43
.ci/pipelines/base-pipeline.yml
Normal file
|
@ -0,0 +1,43 @@
|
|||
---
|
||||
|
||||
# The base pipeline that runs continuously, checks for branches and
|
||||
# creates a new pipeline instance for each of them.
|
||||
|
||||
resource_types:
|
||||
- name: gitea-pr
|
||||
type: registry-image
|
||||
source:
|
||||
repository: registry.local:5000/gitea-pr
|
||||
|
||||
resources:
|
||||
- name: repo-pr
|
||||
type: gitea-pr
|
||||
source:
|
||||
access_token: ((gitea_api_key))
|
||||
owner: ((owner))
|
||||
repo: ((repo))
|
||||
url: https://git.sclu1034.dev
|
||||
- name: repo
|
||||
type: git
|
||||
source:
|
||||
uri: https://git.sclu1034.dev/bitsquid_dt/dtmt
|
||||
|
||||
jobs:
|
||||
- name: set-pipelines
|
||||
plan:
|
||||
- in_parallel:
|
||||
- get: repo-pr
|
||||
trigger: true
|
||||
- get: repo
|
||||
- load_var: prs
|
||||
file: repo-pr/prs.json
|
||||
- across:
|
||||
- var: pr
|
||||
values: ((.:prs))
|
||||
set_pipeline: dtmt-pr
|
||||
file: repo/.ci/pipelines/pr.yml
|
||||
vars:
|
||||
pr: ((.:pr))
|
||||
gitea_api_key: ((gitea_api_key))
|
||||
instance_vars:
|
||||
pr: ((.:pr.number))
|
|
@ -1,230 +0,0 @@
|
|||
# yaml-language-server: $schema=https://raw.githubusercontent.com/cappyzawa/concourse-pipeline-jsonschema/master/concourse_jsonschema.json#/definitions/Config
|
||||
---
|
||||
|
||||
# The actual CI pipeline that is run per branch
|
||||
resource_types:
|
||||
- name: gitea-package
|
||||
type: registry-image
|
||||
source:
|
||||
repository: registry.local:5000/gitea-package
|
||||
|
||||
- name: gitea-status
|
||||
type: registry-image
|
||||
source:
|
||||
repository: registry.local:5000/gitea-status
|
||||
|
||||
- name: gitea-pr
|
||||
type: registry-image
|
||||
source:
|
||||
repository: registry.local:5000/gitea-pr
|
||||
|
||||
|
||||
resources:
|
||||
- name: repo
|
||||
type: git
|
||||
source:
|
||||
uri: http://forgejo:3000/bitsquid_dt/dtmt
|
||||
branch: master
|
||||
|
||||
- name: repo-pr
|
||||
type: gitea-pr
|
||||
source:
|
||||
access_token: ((gitea_api_key))
|
||||
owner: ((owner))
|
||||
repo: ((repo))
|
||||
url: https://git.sclu1034.dev
|
||||
|
||||
- name: gitea-package
|
||||
type: gitea-package
|
||||
source:
|
||||
access_token: ((gitea_api_key))
|
||||
url: http://forgejo:3000
|
||||
owner: bitsquid_dt
|
||||
type: generic
|
||||
name: dtmt
|
||||
|
||||
|
||||
- name: status-build-msvc
|
||||
type: gitea-status
|
||||
source:
|
||||
access_token: ((gitea_api_key))
|
||||
url: http://forgejo:3000
|
||||
owner: bitsquid_dt
|
||||
repo: dtmt
|
||||
context: build/msvc
|
||||
description: "Build for the target platform: msvc"
|
||||
|
||||
- name: status-build-linux
|
||||
type: gitea-status
|
||||
source:
|
||||
access_token: ((gitea_api_key))
|
||||
url: http://forgejo:3000
|
||||
owner: bitsquid_dt
|
||||
repo: dtmt
|
||||
context: build/linux
|
||||
description: "Build for the target platform: linux"
|
||||
|
||||
|
||||
jobs:
|
||||
- name: set-pipelines
|
||||
plan:
|
||||
- in_parallel:
|
||||
- get: repo-pr
|
||||
trigger: true
|
||||
|
||||
- get: repo
|
||||
|
||||
- load_var: prs
|
||||
file: repo-pr/prs.json
|
||||
|
||||
- across:
|
||||
- var: pr
|
||||
values: ((.:prs))
|
||||
set_pipeline: dtmt-pr
|
||||
file: repo/.ci/pipelines/pr.yml
|
||||
vars:
|
||||
pr: ((.:pr))
|
||||
gitea_api_key: ((gitea_api_key))
|
||||
instance_vars:
|
||||
number: ((.:pr.number))
|
||||
|
||||
|
||||
- name: build-msvc
|
||||
on_success:
|
||||
put: state-success
|
||||
resource: status-build-msvc
|
||||
no_get: true
|
||||
params:
|
||||
state: success
|
||||
sha: ((.:git_sha))
|
||||
|
||||
on_failure:
|
||||
put: state-failure
|
||||
resource: status-build-msvc
|
||||
no_get: true
|
||||
params:
|
||||
state: failure
|
||||
sha: ((.:git_sha))
|
||||
|
||||
plan:
|
||||
- get: repo
|
||||
trigger: true
|
||||
|
||||
- load_var: git_sha
|
||||
file: repo/.git/ref
|
||||
|
||||
- put: state-pending
|
||||
resource: status-build-msvc
|
||||
no_get: true
|
||||
params:
|
||||
state: pending
|
||||
sha: ((.:git_sha))
|
||||
|
||||
- task: build
|
||||
file: repo/.ci/tasks/build.yml
|
||||
vars:
|
||||
pr: ""
|
||||
target: msvc
|
||||
|
||||
- load_var: version_number
|
||||
reveal: true
|
||||
file: artifact/version
|
||||
|
||||
- put: package
|
||||
resource: gitea-package
|
||||
no_get: true
|
||||
inputs:
|
||||
- artifact
|
||||
params:
|
||||
version: ((.:version_number))
|
||||
fail_fast: true
|
||||
override: true
|
||||
globs:
|
||||
- artifact/*.exe
|
||||
- artifact/*.exe.sha256
|
||||
|
||||
- put: package
|
||||
resource: gitea-package
|
||||
no_get: true
|
||||
inputs:
|
||||
- artifact
|
||||
params:
|
||||
version: master
|
||||
fail_fast: true
|
||||
override: true
|
||||
globs:
|
||||
- artifact/*.exe
|
||||
- artifact/*.exe.sha256
|
||||
|
||||
- name: build-linux
|
||||
on_success:
|
||||
put: state-success
|
||||
resource: status-build-linux
|
||||
no_get: true
|
||||
params:
|
||||
state: success
|
||||
sha: ((.:git_sha))
|
||||
|
||||
on_failure:
|
||||
put: state-failure
|
||||
resource: status-build-linux
|
||||
no_get: true
|
||||
params:
|
||||
state: failure
|
||||
sha: ((.:git_sha))
|
||||
|
||||
plan:
|
||||
- get: repo
|
||||
trigger: true
|
||||
|
||||
- load_var: git_sha
|
||||
file: repo/.git/ref
|
||||
|
||||
- put: state-pending
|
||||
resource: status-build-linux
|
||||
no_get: true
|
||||
params:
|
||||
state: pending
|
||||
sha: ((.:git_sha))
|
||||
|
||||
- task: build
|
||||
file: repo/.ci/tasks/build.yml
|
||||
vars:
|
||||
pr: ""
|
||||
target: linux
|
||||
gitea_url: http://forgejo:3000
|
||||
gitea_api_key: ((gitea_api_key))
|
||||
|
||||
- load_var: version_number
|
||||
reveal: true
|
||||
file: artifact/version
|
||||
|
||||
- put: package
|
||||
resource: gitea-package
|
||||
no_get: true
|
||||
inputs:
|
||||
- artifact
|
||||
params:
|
||||
version: ((.:version_number))
|
||||
fail_fast: true
|
||||
override: true
|
||||
globs:
|
||||
- artifact/dtmt
|
||||
- artifact/dtmm
|
||||
- artifact/dtmm.sha256
|
||||
- artifact/dtmt.sha256
|
||||
|
||||
- put: package
|
||||
resource: gitea-package
|
||||
no_get: true
|
||||
inputs:
|
||||
- artifact
|
||||
params:
|
||||
version: master
|
||||
fail_fast: true
|
||||
override: true
|
||||
globs:
|
||||
- artifact/dtmt
|
||||
- artifact/dtmm
|
||||
- artifact/dtmm.sha256
|
||||
- artifact/dtmt.sha256
|
|
@ -2,17 +2,6 @@
|
|||
---
|
||||
|
||||
# The actual CI pipeline that is run per branch
|
||||
resource_types:
|
||||
- name: gitea-package
|
||||
type: registry-image
|
||||
source:
|
||||
repository: registry.local:5000/gitea-package
|
||||
|
||||
- name: gitea-status
|
||||
type: registry-image
|
||||
source:
|
||||
repository: registry.local:5000/gitea-status
|
||||
|
||||
|
||||
resources:
|
||||
- name: repo
|
||||
|
@ -21,197 +10,53 @@ resources:
|
|||
uri: http://forgejo:3000/bitsquid_dt/dtmt
|
||||
branch: ((pr.head.ref))
|
||||
|
||||
- name: gitea-package
|
||||
type: gitea-package
|
||||
source:
|
||||
access_token: ((gitea_api_key))
|
||||
url: http://forgejo:3000
|
||||
owner: bitsquid_dt
|
||||
type: generic
|
||||
name: dtmt
|
||||
|
||||
- name: pr-status-lint-clippy
|
||||
type: gitea-status
|
||||
source:
|
||||
access_token: ((gitea_api_key))
|
||||
url: http://forgejo:3000
|
||||
owner: bitsquid_dt
|
||||
repo: dtmt
|
||||
context: lint/clippy
|
||||
description: Checking for common mistakes and opportunities for code improvement
|
||||
|
||||
- name: pr-status-build-msvc
|
||||
type: gitea-status
|
||||
source:
|
||||
access_token: ((gitea_api_key))
|
||||
url: http://forgejo:3000
|
||||
owner: bitsquid_dt
|
||||
repo: dtmt
|
||||
context: build/msvc
|
||||
description: "Build for the target platform: msvc"
|
||||
|
||||
- name: pr-status-build-linux
|
||||
type: gitea-status
|
||||
source:
|
||||
access_token: ((gitea_api_key))
|
||||
url: http://forgejo:3000
|
||||
owner: bitsquid_dt
|
||||
repo: dtmt
|
||||
context: build/linux
|
||||
description: "Build for the target platform: linux"
|
||||
|
||||
|
||||
jobs:
|
||||
- name: clippy
|
||||
on_success:
|
||||
put: state-success
|
||||
resource: pr-status-lint-clippy
|
||||
no_get: true
|
||||
params:
|
||||
state: success
|
||||
sha: ((.:git_sha))
|
||||
|
||||
on_failure:
|
||||
put: state-failure
|
||||
resource: pr-status-lint-clippy
|
||||
no_get: true
|
||||
params:
|
||||
state: failure
|
||||
sha: ((.:git_sha))
|
||||
|
||||
plan:
|
||||
- get: repo
|
||||
trigger: true
|
||||
|
||||
- load_var: git_sha
|
||||
- load_var: ref
|
||||
file: repo/.git/ref
|
||||
|
||||
- put: state-pending
|
||||
resource: pr-status-lint-clippy
|
||||
no_get: true
|
||||
params:
|
||||
state: pending
|
||||
sha: ((.:git_sha))
|
||||
|
||||
- task: check
|
||||
file: repo/.ci/tasks/clippy.yml
|
||||
vars:
|
||||
ref: ((.:ref))
|
||||
gitea_api_key: ((gitea_api_key))
|
||||
|
||||
|
||||
- name: build-msvc
|
||||
on_success:
|
||||
put: state-success
|
||||
resource: pr-status-build-msvc
|
||||
no_get: true
|
||||
params:
|
||||
state: success
|
||||
sha: ((.:git_sha))
|
||||
|
||||
on_failure:
|
||||
put: state-failure
|
||||
resource: pr-status-build-msvc
|
||||
no_get: true
|
||||
params:
|
||||
state: failure
|
||||
sha: ((.:git_sha))
|
||||
|
||||
plan:
|
||||
- get: repo
|
||||
trigger: true
|
||||
|
||||
- load_var: git_sha
|
||||
- load_var: ref
|
||||
file: repo/.git/ref
|
||||
|
||||
- put: state-pending
|
||||
resource: pr-status-build-msvc
|
||||
no_get: true
|
||||
params:
|
||||
state: pending
|
||||
sha: ((.:git_sha))
|
||||
|
||||
- task: build
|
||||
file: repo/.ci/tasks/build.yml
|
||||
vars:
|
||||
target: msvc
|
||||
pr: ((pr))
|
||||
output: artifact
|
||||
ref: ((.:ref))
|
||||
gitea_url: http://forgejo:3000
|
||||
gitea_api_key: ((gitea_api_key))
|
||||
|
||||
- load_var: version_number
|
||||
reveal: true
|
||||
file: artifact/version
|
||||
|
||||
- put: package
|
||||
resource: gitea-package
|
||||
no_get: true
|
||||
inputs:
|
||||
- artifact
|
||||
params:
|
||||
version: ((.:version_number))
|
||||
fail_fast: true
|
||||
override: true
|
||||
globs:
|
||||
- artifact/dtmt
|
||||
- artifact/dtmm
|
||||
- artifact/*.exe
|
||||
- artifact/*.sha256
|
||||
|
||||
- name: build-linux
|
||||
on_success:
|
||||
put: state-success
|
||||
resource: pr-status-build-linux
|
||||
no_get: true
|
||||
params:
|
||||
state: success
|
||||
sha: ((.:git_sha))
|
||||
|
||||
on_failure:
|
||||
put: state-failure
|
||||
resource: pr-status-build-linux
|
||||
no_get: true
|
||||
params:
|
||||
state: failure
|
||||
sha: ((.:git_sha))
|
||||
|
||||
plan:
|
||||
- get: repo
|
||||
trigger: true
|
||||
|
||||
- load_var: git_sha
|
||||
- load_var: ref
|
||||
file: repo/.git/ref
|
||||
|
||||
- put: state-pending
|
||||
resource: pr-status-build-linux
|
||||
no_get: true
|
||||
params:
|
||||
state: pending
|
||||
sha: ((.:git_sha))
|
||||
|
||||
- task: build
|
||||
file: repo/.ci/tasks/build.yml
|
||||
vars:
|
||||
target: linux
|
||||
pr: ((pr))
|
||||
output: artifact
|
||||
ref: ((.:ref))
|
||||
gitea_url: http://forgejo:3000
|
||||
gitea_api_key: ((gitea_api_key))
|
||||
|
||||
- load_var: version_number
|
||||
reveal: true
|
||||
file: artifact/version
|
||||
|
||||
- put: package
|
||||
resource: gitea-package
|
||||
no_get: true
|
||||
inputs:
|
||||
- artifact
|
||||
params:
|
||||
version: ((.:version_number))
|
||||
fail_fast: true
|
||||
override: true
|
||||
globs:
|
||||
- artifact/dtmt
|
||||
- artifact/dtmm
|
||||
- artifact/*.exe
|
||||
- artifact/*.sha256
|
||||
|
||||
- task: upload
|
||||
file: repo/.ci/tasks/upload.yml
|
||||
vars:
|
||||
input: artifact
|
||||
pr: ((.:pr))
|
||||
gitea_api_key: ((gitea_api_key))
|
||||
gitea_user: bitsquid_dt
|
||||
gitea_url: http://forgejo:3000
|
||||
|
|
|
@ -1,62 +1,27 @@
|
|||
#!/bin/bash
|
||||
#!/bin/sh
|
||||
|
||||
set -eu
|
||||
|
||||
if [ -n "$OUTPUT" ]; then
|
||||
OUTPUT="$PWD/$OUTPUT"
|
||||
else
|
||||
OUTPUT=$(mktemp -d)
|
||||
fi
|
||||
|
||||
title() {
|
||||
printf "\033[1m%s\033[0m\n" "$1"
|
||||
}
|
||||
|
||||
install_artifact() {
|
||||
install -v -t "$OUTPUT/" "$1"
|
||||
sha256sum "$1" | cut -d' ' -f1 > "$OUTPUT/$(basename "$1").sha256"
|
||||
}
|
||||
|
||||
cd "repo"
|
||||
|
||||
PR=${PR:-}
|
||||
|
||||
if [ -n "$PR" ]; then
|
||||
title "PR: $(echo "$PR" | jq '.number') - $(echo "$PR" | jq '.title')"
|
||||
ref="pr-$(echo "$PR" | jq '.number')-$(git rev-parse --short "$(cat .git/ref || echo "HEAD")" 2>/dev/null || echo 'manual')"
|
||||
elif [ -f ".git/branch"]; then
|
||||
ref=$(cat .git/branch)-$(git rev-parse --short $ref)
|
||||
else
|
||||
ref=$(git rev-parse --short "$(cat .git/ref || echo "HEAD")")
|
||||
fi
|
||||
|
||||
title "Version: '$ref'"
|
||||
echo "$ref" > "$OUTPUT/version"
|
||||
set -eux
|
||||
|
||||
case "$TARGET" in
|
||||
msvc)
|
||||
cp /src/*.lib ./lib/oodle/
|
||||
|
||||
title "Building project for target $TARGET"
|
||||
cargo build --color always --locked --release --target x86_64-pc-windows-msvc -Zbuild-std
|
||||
|
||||
title "Install artifacts"
|
||||
install_artifact target/x86_64-pc-windows-msvc/release/dtmt.exe
|
||||
install_artifact target/x86_64-pc-windows-msvc/release/dtmm.exe
|
||||
if [ -d "$OUTPUT" ]; then
|
||||
install -t "$OUTPUT/" target/x86_64-pc-windows-msvc/release/dtmt.exe
|
||||
install -t "$OUTPUT/" target/x86_64-pc-windows-msvc/release/dtmm.exe
|
||||
fi
|
||||
;;
|
||||
linux)
|
||||
cp /src/*.a ./lib/oodle/
|
||||
|
||||
title "Building project for target $TARGET"
|
||||
cargo build --color always --locked --profile release-lto
|
||||
|
||||
title "Installing artifacts"
|
||||
install_artifact target/release-lto/dtmt
|
||||
install_artifact target/release-lto/dtmm
|
||||
if [ -d "$OUTPUT" ]; then
|
||||
install -t "$OUTPUT/" target/release/dtmt
|
||||
install -t "$OUTPUT/" target/release/dtmm
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
echo -e "\033[31;1mEnv var 'TARGET' must either be 'msvc' or 'linux'. Got '$TARGET'.\033[0m" >&2
|
||||
echo "Env var 'TARGET' must either be 'msvc' or 'linux'. Got '$TARGET'." >&2
|
||||
exit 1
|
||||
esac
|
||||
|
||||
title "Done"
|
||||
|
|
|
@ -13,7 +13,7 @@ inputs:
|
|||
- name: repo
|
||||
|
||||
outputs:
|
||||
- name: artifact
|
||||
- name: artifacts
|
||||
|
||||
caches:
|
||||
- path: repo/target
|
||||
|
@ -22,8 +22,14 @@ caches:
|
|||
params:
|
||||
CI: "true"
|
||||
TARGET: ((target))
|
||||
PR: ((pr))
|
||||
OUTPUT: artifact
|
||||
GITEA_API_KEY: ((gitea_api_key))
|
||||
REF: ((ref))
|
||||
OUTPUT: artifacts
|
||||
|
||||
run:
|
||||
path: repo/.ci/tasks/build.sh
|
||||
path: .ci/util/run.sh
|
||||
dir: repo
|
||||
args:
|
||||
- .ci/tasks/build.sh
|
||||
- build/((target))
|
||||
- "Build for the target platform: ((target))"
|
||||
|
|
|
@ -1,15 +1,7 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -eu
|
||||
set -eux
|
||||
|
||||
title() {
|
||||
printf "\033[1m%s\033[0m\n" "$1"
|
||||
}
|
||||
|
||||
title "Install clippy"
|
||||
rustup component add clippy
|
||||
|
||||
title "Run clippy"
|
||||
cargo clippy --color always --no-deps
|
||||
|
||||
title "Done"
|
||||
|
|
|
@ -19,8 +19,13 @@ caches:
|
|||
params:
|
||||
CI: "true"
|
||||
GITEA_API_KEY: ((gitea_api_key))
|
||||
REF: ((ref))
|
||||
|
||||
run:
|
||||
path: .ci/tasks/clippy.sh
|
||||
path: .ci/util/run.sh
|
||||
dir: repo
|
||||
args:
|
||||
- .ci/tasks/clippy.sh
|
||||
- lint/clippy
|
||||
- "Checking for common mistakes and opportunities for code improvement"
|
||||
|
||||
|
|
31
.ci/tasks/upload.sh
Executable file
31
.ci/tasks/upload.sh
Executable file
|
@ -0,0 +1,31 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -eu
|
||||
|
||||
artifacts="$PWD/artifacts"
|
||||
repo="$PWD/repo"
|
||||
|
||||
base_url="${GITEA_URL}/api/packages/${GITEA_USER}/generic"
|
||||
|
||||
cd "$repo"
|
||||
|
||||
if [ -n "$PR" ]; then
|
||||
echo "PR: $(echo "$PR" | jq '.number') - $(echo "$PR" | jq '.title')"
|
||||
ref="pr-$(echo "$PR" | jq '.number')-$(git rev-parse --short HEAD 2>/dev/null || echo 'manual')"
|
||||
else
|
||||
ref=$(git describe --tags)
|
||||
fi
|
||||
|
||||
echo "ref: $ref"
|
||||
|
||||
# TODO: If this is a tag, check the tag name to determine which
|
||||
# binary was affected and only upload that.
|
||||
for f in dtmt dtmt.exe dtmm dtmm.exe; do
|
||||
if [ -f "$artifacts/$f" ]; then
|
||||
url="$base_url/$(basename -s .exe $f)/$ref/$f"
|
||||
curl -i -X 'PUT' \
|
||||
--user "concourse:$GITEA_API_KEY" \
|
||||
--upload-file "$artifacts/$f" \
|
||||
"$url"
|
||||
fi
|
||||
done
|
24
.ci/tasks/upload.yml
Normal file
24
.ci/tasks/upload.yml
Normal file
|
@ -0,0 +1,24 @@
|
|||
# yaml-language-server: $schema=https://raw.githubusercontent.com/cappyzawa/concourse-pipeline-jsonschema/master/concourse_jsonschema.json#/definitions/TaskConfig
|
||||
---
|
||||
platform: linux
|
||||
|
||||
image_resource:
|
||||
name: python-script
|
||||
type: registry-image
|
||||
source:
|
||||
repository: registry.local:5000/python-script
|
||||
tag: latest
|
||||
|
||||
inputs:
|
||||
- name: repo
|
||||
- name: ((input))
|
||||
|
||||
params:
|
||||
CI: "true"
|
||||
GITEA_API_KEY: ((gitea_api_key))
|
||||
GITEA_URL: ((gitea_url))
|
||||
GITEA_USER: ((user))
|
||||
PR: ((pr))
|
||||
|
||||
run:
|
||||
path: repo/.ci/tasks/upload.sh
|
6
.gitattributes
vendored
6
.gitattributes
vendored
|
@ -1,6 +0,0 @@
|
|||
* text=auto
|
||||
|
||||
*.xcf filter=lfs diff=lfs merge=lfs -text
|
||||
*.ico filter=lfs diff=lfs merge=lfs -text
|
||||
*.png filter=lfs diff=lfs merge=lfs -text
|
||||
*.jpg filter=lfs diff=lfs merge=lfs -text
|
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -6,3 +6,6 @@
|
|||
*.dll
|
||||
*.lib
|
||||
dictionary.csv
|
||||
|
||||
signing/*
|
||||
!signing/.gitkeep
|
||||
|
|
1
.gitmodules
vendored
1
.gitmodules
vendored
|
@ -7,7 +7,6 @@
|
|||
[submodule "lib/color-eyre"]
|
||||
path = lib/color-eyre
|
||||
url = https://github.com/sclu1034/color-eyre.git
|
||||
branch = "fork"
|
||||
[submodule "lib/ansi-parser"]
|
||||
path = lib/ansi-parser
|
||||
url = https://gitlab.com/lschwiderski/ansi-parser.git
|
||||
|
|
15
.renovaterc
15
.renovaterc
|
@ -1,15 +0,0 @@
|
|||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"extends": [
|
||||
"config:recommended",
|
||||
":combinePatchMinorReleases",
|
||||
":enableVulnerabilityAlerts",
|
||||
":rebaseStalePrs"
|
||||
],
|
||||
"prConcurrentLimit": 10,
|
||||
"branchPrefix": "renovate/",
|
||||
"baseBranches": [
|
||||
"$default",
|
||||
"/^release\\/.*/"
|
||||
]
|
||||
}
|
|
@ -16,12 +16,6 @@
|
|||
- dtmt: add utility to migrate mod projects
|
||||
- dtmm: reset dtkit-patch installations
|
||||
- sdk: implement decompiling Lua files
|
||||
- dtmm: fetch cover image for Nexus mods
|
||||
- dtmm: fetch file version for Nexus mods
|
||||
- dtmm: handle `nxm://` URIs via IPC and import the corresponding mod
|
||||
- dtmm: Add button to open mod on nexusmods.com
|
||||
- dtmt: Implement commands to list bundles and contents
|
||||
- dtmt: Implement command to search for files
|
||||
|
||||
=== Fixed
|
||||
|
||||
|
|
1992
Cargo.lock
generated
1992
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
60
Cargo.toml
60
Cargo.toml
|
@ -7,57 +7,15 @@ members = [
|
|||
"lib/oodle",
|
||||
"lib/sdk",
|
||||
"lib/serde_sjson",
|
||||
"lib/luajit2-sys",
|
||||
"lib/color-eyre",
|
||||
]
|
||||
exclude = ["lib/color-eyre"]
|
||||
exclude = [
|
||||
"lib/color-eyre",
|
||||
"lib/ansi-parser",
|
||||
]
|
||||
|
||||
[workspace.dependencies]
|
||||
ansi-parser = "0.9.1"
|
||||
ansi_term = "0.12.1"
|
||||
async-recursion = "1.0.5"
|
||||
bincode = "1.3.3"
|
||||
bitflags = "2.5.0"
|
||||
byteorder = "1.4.3"
|
||||
clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "string", "unicode"] }
|
||||
cli-table = { version = "0.4.7", default-features = false, features = ["derive"] }
|
||||
[patch.crates-io]
|
||||
color-eyre = { path = "lib/color-eyre" }
|
||||
colors-transform = "0.2.11"
|
||||
confy = "0.6.1"
|
||||
csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
|
||||
druid = { version = "0.8", features = ["im", "serde", "image", "png", "jpeg", "bmp", "webp", "svg"] }
|
||||
druid-widget-nursery = "0.1"
|
||||
dtmt-shared = { path = "lib/dtmt-shared" }
|
||||
fastrand = "2.1.0"
|
||||
futures = "0.3.25"
|
||||
futures-util = "0.3.24"
|
||||
glob = "0.3.0"
|
||||
interprocess = "2.1.0"
|
||||
lazy_static = "1.4.0"
|
||||
luajit2-sys = { path = "lib/luajit2-sys" }
|
||||
minijinja = { version = "2.0.1", default-features = false }
|
||||
nanorand = "0.7.0"
|
||||
nexusmods = { path = "lib/nexusmods" }
|
||||
notify = "8.0.0"
|
||||
oodle = { path = "lib/oodle" }
|
||||
open = "5.0.1"
|
||||
path-clean = "1.0.1"
|
||||
path-slash = "0.2.1"
|
||||
pin-project-lite = "0.2.9"
|
||||
promptly = "0.3.1"
|
||||
sdk = { path = "lib/sdk" }
|
||||
serde = { version = "1.0.152", features = ["derive", "rc"] }
|
||||
serde_sjson = { path = "lib/serde_sjson" }
|
||||
steamlocate = "2.0.0-beta.2"
|
||||
strip-ansi-escapes = "0.2.0"
|
||||
time = { version = "0.3.20", features = ["serde", "serde-well-known", "local-offset", "formatting", "macros"] }
|
||||
tokio = { version = "1.23.0", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
|
||||
tokio-stream = { version = "0.1.12", features = ["fs", "io-util"] }
|
||||
tracing = { version = "0.1.37", features = ["async-await"] }
|
||||
tracing-error = "0.2.0"
|
||||
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
|
||||
usvg = "0.25.0"
|
||||
zip = { version = "2.1.3", default-features = false, features = ["deflate", "bzip2", "zstd", "time"] }
|
||||
ansi-parser = { path = "lib/ansi-parser" }
|
||||
|
||||
[profile.dev.package.backtrace]
|
||||
opt-level = 3
|
||||
|
@ -71,9 +29,3 @@ strip = "debuginfo"
|
|||
[profile.release-lto]
|
||||
inherits = "release"
|
||||
lto = true
|
||||
|
||||
[profile.perf]
|
||||
inherits = "release"
|
||||
strip = false
|
||||
lto = true
|
||||
debug = "line-tables-only"
|
||||
|
|
48
Justfile
48
Justfile
|
@ -1,13 +1,3 @@
|
|||
set positional-arguments
|
||||
|
||||
fly_target := "main"
|
||||
|
||||
build-perf-dtmt:
|
||||
cargo build --profile perf --bin dtmt
|
||||
|
||||
perf-dtmt *args='': build-perf-dtmt
|
||||
perf record --call-graph dwarf ./target/perf/dtmt "$@"
|
||||
|
||||
ci-build: ci-build-msvc ci-build-linux
|
||||
|
||||
ci-build-msvc:
|
||||
|
@ -24,36 +14,14 @@ build-image-msvc:
|
|||
build-image-linux:
|
||||
docker build -f .ci/Dockerfile.linux .
|
||||
|
||||
ci-image:
|
||||
# The MSVC image depends on the Linux image. So by building that first,
|
||||
# we actually build both, and cache them, so that "building" the
|
||||
# Linux image afterwards merely needs to pull the cache.
|
||||
docker build --target msvc -t dtmt-ci-base-msvc -f .ci/image/Dockerfile .
|
||||
docker build --target linux -t dtmt-ci-base-linux -f .ci/image/Dockerfile .
|
||||
ci-image: ci-image-msvc ci-image-linux
|
||||
|
||||
ci-image-msvc: ci-image-linux
|
||||
docker build -t dtmt-ci-base-msvc -f .ci/image/Dockerfile.msvc .ci/image
|
||||
docker tag dtmt-ci-base-msvc registry.sclu1034.dev/dtmt-ci-base-msvc
|
||||
docker tag dtmt-ci-base-linux registry.sclu1034.dev/dtmt-ci-base-linux
|
||||
docker push registry.sclu1034.dev/dtmt-ci-base-msvc
|
||||
|
||||
ci-image-linux:
|
||||
docker build -t dtmt-ci-base-linux -f .ci/image/Dockerfile.linux .ci/image
|
||||
docker tag dtmt-ci-base-linux registry.sclu1034.dev/dtmt-ci-base-linux
|
||||
docker push registry.sclu1034.dev/dtmt-ci-base-linux
|
||||
|
||||
set-base-pipeline:
|
||||
fly -t {{fly_target}} set-pipeline \
|
||||
--pipeline dtmt \
|
||||
--config .ci/pipelines/base.yml \
|
||||
-v gitea_api_key=${GITEA_API_KEY} \
|
||||
-v owner=bitsquid_dt \
|
||||
-v repo=dtmt
|
||||
|
||||
set-pr-pipeline pr:
|
||||
curl \
|
||||
-H "Authorization: ${GITEA_API_KEY}" \
|
||||
-H 'Accept: application/json' \
|
||||
'https://git.sclu1034.dev/api/v1/repos/bitsquid_dt/dtmt/pulls/{{pr}}' \
|
||||
| yq -y '.' - > 'pr-{{pr}}.yaml'
|
||||
fly -t main set-pipeline \
|
||||
--pipeline dtmt-pr \
|
||||
--config .ci/pipelines/pr.yml \
|
||||
-v gitea_api_key=${GITEA_API_KEY} \
|
||||
-i number={{pr}} \
|
||||
-y branch="$(yq -y '.head.ref' 'pr-{{pr}}.yaml')" \
|
||||
-y pr="$(cat 'pr-{{pr}}.yaml')"
|
||||
|
||||
|
|
|
@ -2,48 +2,34 @@
|
|||
name = "dtmm"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
authors = ["Lucas Schwiderski <lucas@lschwiderski.de>"]
|
||||
description = "DTMM is a GUI application to install and manage mods for the game."
|
||||
documentation = "https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki"
|
||||
repository = "https://git.sclu1034.dev/bitsquid_dt/dtmt"
|
||||
homepage = "https://git.sclu1034.dev/bitsquid_dt/dtmt"
|
||||
license-file = "LICENSE"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
ansi-parser = { workspace = true }
|
||||
async-recursion = { workspace = true }
|
||||
bincode = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
clap = { workspace = true }
|
||||
color-eyre = { workspace = true }
|
||||
colors-transform = { workspace = true }
|
||||
confy = { workspace = true }
|
||||
druid = { workspace = true }
|
||||
druid-widget-nursery = { workspace = true }
|
||||
dtmt-shared = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
interprocess = { workspace = true }
|
||||
lazy_static = { workspace = true }
|
||||
luajit2-sys = { workspace = true }
|
||||
minijinja = { workspace = true }
|
||||
nexusmods = { workspace = true }
|
||||
oodle = { workspace = true }
|
||||
open = { workspace = true }
|
||||
path-slash = { workspace = true }
|
||||
sdk = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_sjson = { workspace = true }
|
||||
strip-ansi-escapes = { workspace = true }
|
||||
time = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tokio-stream = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-error = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
usvg = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
winres = "0.1.12"
|
||||
bitflags = "1.3.2"
|
||||
clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "string", "unicode"] }
|
||||
color-eyre = "0.6.2"
|
||||
confy = "0.5.1"
|
||||
druid = { version = "0.8", features = ["im", "serde", "image", "png", "jpeg", "bmp", "webp", "svg"] }
|
||||
dtmt-shared = { path = "../../lib/dtmt-shared", version = "*" }
|
||||
futures = "0.3.25"
|
||||
oodle = { path = "../../lib/oodle", version = "*" }
|
||||
sdk = { path = "../../lib/sdk", version = "*" }
|
||||
nexusmods = { path = "../../lib/nexusmods", version = "*" }
|
||||
serde_sjson = { path = "../../lib/serde_sjson", version = "*" }
|
||||
serde = { version = "1.0.152", features = ["derive", "rc"] }
|
||||
tokio = { version = "1.23.0", features = ["rt", "fs", "tracing", "sync"] }
|
||||
tracing = "0.1.37"
|
||||
tracing-error = "0.2.0"
|
||||
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
|
||||
zip = "0.6.4"
|
||||
tokio-stream = { version = "0.1.12", features = ["fs"] }
|
||||
path-slash = "0.2.1"
|
||||
time = { version = "0.3.20", features = ["serde", "serde-well-known", "local-offset"] }
|
||||
strip-ansi-escapes = "0.1.1"
|
||||
lazy_static = "1.4.0"
|
||||
colors-transform = "0.2.11"
|
||||
usvg = "0.25.0"
|
||||
druid-widget-nursery = "0.1"
|
||||
ansi-parser = "0.8.0"
|
||||
string_template = "0.2.1"
|
||||
|
|
BIN
crates/dtmm/assets/DTMM_logo.xcf
(Stored with Git LFS)
BIN
crates/dtmm/assets/DTMM_logo.xcf
(Stored with Git LFS)
Binary file not shown.
BIN
crates/dtmm/assets/DTMM_logo_256.png
(Stored with Git LFS)
BIN
crates/dtmm/assets/DTMM_logo_256.png
(Stored with Git LFS)
Binary file not shown.
BIN
crates/dtmm/assets/DTMM_logo_48.png
(Stored with Git LFS)
BIN
crates/dtmm/assets/DTMM_logo_48.png
(Stored with Git LFS)
Binary file not shown.
BIN
crates/dtmm/assets/DTMM_logo_64.png
(Stored with Git LFS)
BIN
crates/dtmm/assets/DTMM_logo_64.png
(Stored with Git LFS)
Binary file not shown.
BIN
crates/dtmm/assets/DTMM_logo_border.png
(Stored with Git LFS)
BIN
crates/dtmm/assets/DTMM_logo_border.png
(Stored with Git LFS)
Binary file not shown.
BIN
crates/dtmm/assets/DTMM_logo_faint_glow.png
(Stored with Git LFS)
BIN
crates/dtmm/assets/DTMM_logo_faint_glow.png
(Stored with Git LFS)
Binary file not shown.
BIN
crates/dtmm/assets/DTMM_logo_small.png
(Stored with Git LFS)
BIN
crates/dtmm/assets/DTMM_logo_small.png
(Stored with Git LFS)
Binary file not shown.
|
@ -1,11 +0,0 @@
|
|||
[Desktop Entry]
|
||||
Name=DTMM
|
||||
GenericName=Mod Manager
|
||||
Comment=A graphical mod manager for Warhammer 40,000: Darktide
|
||||
Exec=dtmm %u
|
||||
Type=Application
|
||||
Keywords=Mod;
|
||||
StartupNotify=true
|
||||
Categories=Utility;
|
||||
MimeType=x-scheme-handler/nxm;
|
||||
Icon=dtmm
|
BIN
crates/dtmm/assets/dtmm.ico
(Stored with Git LFS)
BIN
crates/dtmm/assets/dtmm.ico
(Stored with Git LFS)
Binary file not shown.
|
@ -1,70 +0,0 @@
|
|||
local StateGame = require("scripts/game_states/state_game")
|
||||
local StateSplash = require("scripts/game_states/game/state_splash")
|
||||
local GameStateMachine = require("scripts/foundation/utilities/game_state_machine")
|
||||
|
||||
local function hook(obj, fn_name, cb)
|
||||
local orig = obj[fn_name]
|
||||
|
||||
obj[fn_name] = function(...)
|
||||
return cb(orig, ...)
|
||||
end
|
||||
end
|
||||
|
||||
function init(mod_data, boot_gui)
|
||||
local ModLoader = require("scripts/mods/mod_loader")
|
||||
local mod_loader = ModLoader:new(mod_data, boot_gui)
|
||||
|
||||
-- The mod loader needs to remain active during game play, to
|
||||
-- enable reloads
|
||||
hook(StateGame, "update", function(func, dt, ...)
|
||||
mod_loader:update(dt)
|
||||
return func(dt, ...)
|
||||
end)
|
||||
|
||||
-- Skip splash view
|
||||
hook(StateSplash, "on_enter", function(func, self, ...)
|
||||
local result = func(self, ...)
|
||||
|
||||
self._should_skip = true
|
||||
self._continue = true
|
||||
|
||||
return result
|
||||
end)
|
||||
|
||||
-- Trigger state change events
|
||||
hook(GameStateMachine, "_change_state", function(func, self, ...)
|
||||
local old_state = self._state
|
||||
local old_state_name = old_state and self:current_state_name()
|
||||
|
||||
if old_state_name then
|
||||
mod_loader:on_game_state_changed("exit", old_state_name, old_state)
|
||||
end
|
||||
|
||||
local result = func(self, ...)
|
||||
|
||||
local new_state = self._state
|
||||
local new_state_name = new_state and self:current_state_name()
|
||||
|
||||
if new_state_name then
|
||||
mod_loader:on_game_state_changed("enter", new_state_name, new_state)
|
||||
end
|
||||
|
||||
return result
|
||||
end)
|
||||
|
||||
-- Trigger ending state change event
|
||||
hook(GameStateMachine, "destroy", function(func, self, ...)
|
||||
local old_state = self._state
|
||||
local old_state_name = old_state and self:current_state_name()
|
||||
|
||||
if old_state_name then
|
||||
mod_loader:on_game_state_changed("exit", old_state_name)
|
||||
end
|
||||
|
||||
return func(self, ...)
|
||||
end)
|
||||
|
||||
return mod_loader
|
||||
end
|
||||
|
||||
return init
|
|
@ -1,28 +0,0 @@
|
|||
return {
|
||||
{% for mod in mods %}
|
||||
{
|
||||
id = "{{ mod.id }}",
|
||||
name = "{{ mod.name }}",
|
||||
bundled = {{ mod.bundled }},
|
||||
version = {{ mod.version }},
|
||||
packages = {
|
||||
{% for pkg in mod.packages %}
|
||||
"{{ pkg }}",
|
||||
{% endfor %}
|
||||
},
|
||||
run = function()
|
||||
{% if mod.data is none %}
|
||||
return dofile("{{ mod.init }}")
|
||||
{% else %}
|
||||
new_mod("{{ mod.id }}", {
|
||||
mod_script = "{{ mod.init }}",
|
||||
mod_data = "{{ mod.data }}",
|
||||
{% if not mod.localization is none %}
|
||||
mod_localization = "{{ mod.localization }}",
|
||||
{% endif %}
|
||||
})
|
||||
{% endif %}
|
||||
end,
|
||||
},
|
||||
{% endfor %}
|
||||
}
|
|
@ -1,412 +0,0 @@
|
|||
-- Copyright on this file is owned by Fatshark.
|
||||
-- It is extracted, used and modified with permission only for
|
||||
-- the purpose of loading mods within Warhammer 40,000: Darktide.
|
||||
local ModLoader = class("ModLoader")
|
||||
|
||||
local table_unpack = table.unpack or unpack
|
||||
local table_pack = table.pack or pack
|
||||
|
||||
local ScriptGui = require("scripts/foundation/utilities/script_gui")
|
||||
|
||||
local FONT_MATERIAL = "content/ui/fonts/arial"
|
||||
|
||||
local LOG_LEVELS = {
|
||||
spew = 4,
|
||||
info = 3,
|
||||
warning = 2,
|
||||
error = 1
|
||||
}
|
||||
local DEFAULT_SETTINGS = {
|
||||
log_level = LOG_LEVELS.error,
|
||||
developer_mode = false
|
||||
}
|
||||
|
||||
local Keyboard = Keyboard
|
||||
local BUTTON_INDEX_R = Keyboard.button_index("r")
|
||||
local BUTTON_INDEX_LEFT_SHIFT = Keyboard.button_index("left shift")
|
||||
local BUTTON_INDEX_LEFT_CTRL = Keyboard.button_index("left ctrl")
|
||||
|
||||
ModLoader.init = function(self, mod_data, boot_gui)
|
||||
table.dump(mod_data, nil, 5, function(...) Log.info("ModLoader", ...) end)
|
||||
|
||||
self._mod_data = mod_data
|
||||
self._gui = boot_gui
|
||||
|
||||
self._settings = Application.user_setting("mod_settings") or DEFAULT_SETTINGS
|
||||
|
||||
self._mods = {}
|
||||
self._num_mods = nil
|
||||
self._chat_print_buffer = {}
|
||||
self._reload_data = {}
|
||||
self._ui_time = 0
|
||||
|
||||
self._state = "scanning"
|
||||
end
|
||||
|
||||
ModLoader.developer_mode_enabled = function(self)
|
||||
return self._settings.developer_mode
|
||||
end
|
||||
|
||||
ModLoader.set_developer_mode = function(self, enabled)
|
||||
self._settings.developer_mode = enabled
|
||||
end
|
||||
|
||||
ModLoader._draw_state_to_gui = function(self, gui, dt)
|
||||
local state = self._state
|
||||
local t = self._ui_time + dt
|
||||
self._ui_time = t
|
||||
local status_str = "Loading mods"
|
||||
|
||||
if state == "scanning" then
|
||||
status_str = "Scanning for mods"
|
||||
elseif state == "loading" or state == "initializing" then
|
||||
local mod = self._mods[self._mod_load_index]
|
||||
status_str = string.format("Loading mod %q", mod.name)
|
||||
end
|
||||
|
||||
local msg = status_str .. string.rep(".", (2 * t) % 4)
|
||||
ScriptGui.text(gui, msg, FONT_MATERIAL, 25, Vector3(20, 30, 1), Color.white())
|
||||
end
|
||||
|
||||
ModLoader.remove_gui = function(self)
|
||||
self._gui = nil
|
||||
end
|
||||
|
||||
ModLoader.mod_data = function(self, id)
|
||||
-- Since this primarily exists for DMF,
|
||||
-- we can optimize the search for its use case of looking for the
|
||||
-- mod currently being loaded
|
||||
local mod_data = self._mods[self._mod_load_index]
|
||||
|
||||
if mod_data.id ~= id then
|
||||
mod_data = nil
|
||||
|
||||
for _, v in ipairs(self._mods) do
|
||||
if v.id == id then
|
||||
mod_data = v
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return mod_data
|
||||
end
|
||||
|
||||
ModLoader._check_reload = function()
|
||||
return Keyboard.pressed(BUTTON_INDEX_R) and
|
||||
Keyboard.button(BUTTON_INDEX_LEFT_SHIFT) +
|
||||
Keyboard.button(BUTTON_INDEX_LEFT_CTRL) == 2
|
||||
end
|
||||
|
||||
ModLoader.update = function(self, dt)
|
||||
local chat_print_buffer = self._chat_print_buffer
|
||||
local num_delayed_prints = #chat_print_buffer
|
||||
|
||||
if num_delayed_prints > 0 and Managers.chat then
|
||||
for i = 1, num_delayed_prints, 1 do
|
||||
-- TODO: Use new chat system
|
||||
-- Managers.chat:add_local_system_message(1, chat_print_buffer[i], true)
|
||||
|
||||
chat_print_buffer[i] = nil
|
||||
end
|
||||
end
|
||||
|
||||
local old_state = self._state
|
||||
|
||||
if self._settings.developer_mode and self:_check_reload() then
|
||||
self._reload_requested = true
|
||||
end
|
||||
|
||||
if self._reload_requested and old_state == "done" then
|
||||
self:_reload_mods()
|
||||
end
|
||||
|
||||
if old_state == "done" then
|
||||
self:_run_callbacks("update", dt)
|
||||
elseif old_state == "scanning" then
|
||||
Log.info("ModLoader", "Scanning for mods")
|
||||
self:_build_mod_table()
|
||||
|
||||
self._state = self:_load_mod(1)
|
||||
self._ui_time = 0
|
||||
elseif old_state == "loading" then
|
||||
local handle = self._loading_resource_handle
|
||||
|
||||
if ResourcePackage.has_loaded(handle) then
|
||||
ResourcePackage.flush(handle)
|
||||
|
||||
local mod = self._mods[self._mod_load_index]
|
||||
local next_index = mod.package_index + 1
|
||||
local mod_data = mod.data
|
||||
|
||||
if next_index <= #mod_data.packages then
|
||||
self:_load_package(mod, next_index)
|
||||
else
|
||||
self._state = "initializing"
|
||||
end
|
||||
end
|
||||
elseif old_state == "initializing" then
|
||||
local mod = self._mods[self._mod_load_index]
|
||||
local mod_data = mod.data
|
||||
|
||||
Log.info("ModLoader", "Initializing mod %q", mod.name)
|
||||
|
||||
mod.state = "running"
|
||||
local ok, object = xpcall(mod_data.run, function(err)
|
||||
if type(err) == "string" then
|
||||
return err .. "\n" .. Script.callstack()
|
||||
else
|
||||
return err
|
||||
end
|
||||
end)
|
||||
|
||||
if not ok then
|
||||
if object.error then
|
||||
object = string.format(
|
||||
"%s\n<<Lua Stack>>\n%s\n<</Lua Stack>>\n<<Lua Locals>>\n%s\n<</Lua Locals>>\n<<Lua Self>>\n%s\n<</Lua Self>>",
|
||||
object.error, object.traceback, object.locals, object.self)
|
||||
end
|
||||
|
||||
Log.error("ModLoader", "Failed 'run' for %q: %s", mod.name, object)
|
||||
end
|
||||
|
||||
mod.object = object or {}
|
||||
|
||||
self:_run_callback(mod, "init", self._reload_data[mod.id])
|
||||
|
||||
Log.info("ModLoader", "Finished loading %q", mod.name)
|
||||
|
||||
self._state = self:_load_mod(self._mod_load_index + 1)
|
||||
end
|
||||
|
||||
local gui = self._gui
|
||||
if gui then
|
||||
self:_draw_state_to_gui(gui, dt)
|
||||
end
|
||||
|
||||
if old_state ~= self._state then
|
||||
Log.info("ModLoader", "%s -> %s", old_state, self._state)
|
||||
end
|
||||
end
|
||||
|
||||
ModLoader.all_mods_loaded = function(self)
|
||||
return self._state == "done"
|
||||
end
|
||||
|
||||
ModLoader.destroy = function(self)
|
||||
self:_run_callbacks("on_destroy")
|
||||
self:unload_all_mods()
|
||||
end
|
||||
|
||||
ModLoader._run_callbacks = function(self, callback_name, ...)
|
||||
for i = 1, self._num_mods, 1 do
|
||||
local mod = self._mods[i]
|
||||
|
||||
if mod and not mod.callbacks_disabled then
|
||||
self:_run_callback(mod, callback_name, ...)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
ModLoader._run_callback = function(self, mod, callback_name, ...)
|
||||
local object = mod.object
|
||||
local cb = object[callback_name]
|
||||
|
||||
if not cb then
|
||||
return
|
||||
end
|
||||
|
||||
local args = table_pack(...)
|
||||
|
||||
local success, val = xpcall(
|
||||
function() return cb(object, table_unpack(args)) end,
|
||||
function(err)
|
||||
if type(err) == "string" then
|
||||
return err .. "\n" .. Script.callstack()
|
||||
else
|
||||
return err
|
||||
end
|
||||
end
|
||||
)
|
||||
|
||||
if success then
|
||||
return val
|
||||
else
|
||||
Log.error("ModLoader", "Failed to run callback %q for mod %q with id %q. Disabling callbacks until reload.",
|
||||
callback_name, mod.name, mod.id)
|
||||
if val.error then
|
||||
Log.error("ModLoader",
|
||||
"Error: %s\n<<Lua Stack>>\n%s<</Lua Stack>>\n<<Lua Locals>>\n%s<</Lua Locals>>\n<<Lua Self>>\n%s<</Lua Self>>",
|
||||
val.error, val.traceback, val.locals, val.self)
|
||||
else
|
||||
Log.error("ModLoader", "Error: %s", val or "[unknown error]")
|
||||
end
|
||||
|
||||
mod.callbacks_disabled = true
|
||||
end
|
||||
end
|
||||
|
||||
ModLoader._start_scan = function(self)
|
||||
Log.info("ModLoader", "Starting mod scan")
|
||||
self._state = "scanning"
|
||||
end
|
||||
|
||||
ModLoader._build_mod_table = function(self)
|
||||
fassert(table.is_empty(self._mods), "Trying to add mods to non-empty mod table")
|
||||
|
||||
for i, mod_data in ipairs(self._mod_data) do
|
||||
Log.info(
|
||||
"ModLoader",
|
||||
"mods[%d] = id=%q | name=%q | version=%q | bundled=%s",
|
||||
i,
|
||||
mod_data.id,
|
||||
mod_data.name,
|
||||
mod_data.version,
|
||||
tostring(mod_data.bundled)
|
||||
)
|
||||
|
||||
self._mods[i] = {
|
||||
id = mod_data.id,
|
||||
state = "not_loaded",
|
||||
callbacks_disabled = false,
|
||||
name = mod_data.name,
|
||||
loaded_packages = {},
|
||||
packages = mod_data.packages,
|
||||
data = mod_data,
|
||||
bundled = mod_data.bundled or false,
|
||||
}
|
||||
end
|
||||
|
||||
self._num_mods = #self._mods
|
||||
|
||||
Log.info("ModLoader", "Found %i mods", self._num_mods)
|
||||
end
|
||||
|
||||
ModLoader._load_mod = function(self, index)
|
||||
self._ui_time = 0
|
||||
local mods = self._mods
|
||||
local mod = mods[index]
|
||||
|
||||
if not mod then
|
||||
table.clear(self._reload_data)
|
||||
|
||||
return "done"
|
||||
end
|
||||
|
||||
Log.info("ModLoader", "Loading mod %q", mod.id)
|
||||
|
||||
mod.state = "loading"
|
||||
|
||||
Crashify.print_property(string.format("Mod:%s", mod.name), true)
|
||||
|
||||
self._mod_load_index = index
|
||||
|
||||
if mod.bundled and mod.packages[1] then
|
||||
self:_load_package(mod, 1)
|
||||
return "loading"
|
||||
else
|
||||
return "initializing"
|
||||
end
|
||||
end
|
||||
|
||||
ModLoader._load_package = function(self, mod, index)
|
||||
mod.package_index = index
|
||||
local package_name = mod.packages[index]
|
||||
|
||||
if not package_name then
|
||||
return
|
||||
end
|
||||
|
||||
Log.info("ModLoader", "Loading package %q", package_name)
|
||||
|
||||
local resource_handle = Application.resource_package(package_name)
|
||||
self._loading_resource_handle = resource_handle
|
||||
|
||||
ResourcePackage.load(resource_handle)
|
||||
|
||||
table.insert(mod.loaded_packages, resource_handle)
|
||||
end
|
||||
|
||||
ModLoader.unload_all_mods = function(self)
|
||||
if self._state ~= "done" then
|
||||
Log.error("ModLoader", "Mods can't be unloaded, mod state is not \"done\". current: %q", self._state)
|
||||
|
||||
return
|
||||
end
|
||||
|
||||
Log.info("ModLoader", "Unload all mod packages")
|
||||
|
||||
for i = self._num_mods, 1, -1 do
|
||||
local mod = self._mods[i]
|
||||
|
||||
if mod then
|
||||
self:unload_mod(i)
|
||||
end
|
||||
|
||||
self._mods[i] = nil
|
||||
end
|
||||
|
||||
self._num_mods = nil
|
||||
self._state = "unloaded"
|
||||
end
|
||||
|
||||
ModLoader.unload_mod = function(self, index)
|
||||
local mod = self._mods[index]
|
||||
|
||||
if mod then
|
||||
Log.info("ModLoader", "Unloading %q.", mod.name)
|
||||
|
||||
for _, handle in ipairs(mod.loaded_packages) do
|
||||
ResourcePackage.unload(handle)
|
||||
Application.release_resource_package(handle)
|
||||
end
|
||||
|
||||
mod.state = "not_loaded"
|
||||
else
|
||||
Log.error("ModLoader", "Mod index %i can't be unloaded, has not been loaded", index)
|
||||
end
|
||||
end
|
||||
|
||||
ModLoader._reload_mods = function(self)
|
||||
Log.info("ModLoader", "reloading mods")
|
||||
|
||||
for i = 1, self._num_mods, 1 do
|
||||
local mod = self._mods[i]
|
||||
|
||||
if mod and mod.state == "running" then
|
||||
Log.info("ModLoader", "reloading %s", mod.name)
|
||||
|
||||
self._reload_data[mod.id] = self:_run_callback(mod, "on_reload")
|
||||
else
|
||||
Log.info("ModLoader", "not reloading mod, state: %s", mod.state)
|
||||
end
|
||||
end
|
||||
|
||||
self:unload_all_mods()
|
||||
self:_start_scan()
|
||||
|
||||
self._reload_requested = false
|
||||
end
|
||||
|
||||
ModLoader.on_game_state_changed = function(self, status, state_name, state_object)
|
||||
if self._state == "done" then
|
||||
self:_run_callbacks("on_game_state_changed", status, state_name, state_object)
|
||||
else
|
||||
Log.warning("ModLoader", "Ignored on_game_state_changed call due to being in state %q", self._state)
|
||||
end
|
||||
end
|
||||
|
||||
ModLoader.print = function(self, level, str, ...)
|
||||
local f = Log[level]
|
||||
if f then
|
||||
f("ModLoader", str, ...)
|
||||
else
|
||||
local message = string.format("[ModLoader][" .. level .. "] " .. str, ...)
|
||||
local log_level = LOG_LEVELS[level] or 99
|
||||
|
||||
if log_level <= 2 then
|
||||
print(message)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return ModLoader
|
|
@ -11,15 +11,108 @@ local log = function(category, format, ...)
|
|||
end
|
||||
end
|
||||
|
||||
-- Patch `GameStateMachine.init` to add our own state for loading mods.
|
||||
-- In the future, Fatshark might provide us with a dedicated way to do this.
|
||||
local function patch_mod_loading_state()
|
||||
local StateBootSubStateBase = require("scripts/game_states/boot/state_boot_sub_state_base")
|
||||
|
||||
-- A necessary override.
|
||||
-- The original does not proxy `dt` to `_state_update`, but we need that.
|
||||
StateBootSubStateBase.update = function(self, dt)
|
||||
local done, error = self:_state_update(dt)
|
||||
local params = self._params
|
||||
|
||||
if error then
|
||||
return StateError, { error }
|
||||
elseif done then
|
||||
local next_index = params.sub_state_index + 1
|
||||
params.sub_state_index = next_index
|
||||
local next_state_data = params.states[next_index]
|
||||
|
||||
if next_state_data then
|
||||
return next_state_data[1], self._params
|
||||
else
|
||||
self._parent:sub_states_done()
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local StateBootLoadMods = class("StateBootLoadMods", "StateBootSubStateBase")
|
||||
|
||||
StateBootLoadMods.on_enter = function(self, parent, params)
|
||||
log("StateBootLoadMods", "Entered")
|
||||
StateBootLoadMods.super.on_enter(self, parent, params)
|
||||
|
||||
local state_params = self:_state_params()
|
||||
local package_manager = state_params.package_manager
|
||||
|
||||
self._state = "load_package"
|
||||
self._package_manager = package_manager
|
||||
self._package_handles = {
|
||||
["packages/mods"] = package_manager:load("packages/mods", "StateBootLoadMods", nil),
|
||||
["packages/dml"] = package_manager:load("packages/dml", "StateBootLoadMods", nil),
|
||||
}
|
||||
end
|
||||
|
||||
StateBootLoadMods._state_update = function(self, dt)
|
||||
local state = self._state
|
||||
local package_manager = self._package_manager
|
||||
|
||||
if state == "load_package" and package_manager:update() then
|
||||
log("StateBootLoadMods", "Packages loaded, loading mods")
|
||||
self._state = "load_mods"
|
||||
local ModLoader = require("scripts/mods/dml/init")
|
||||
|
||||
local mod_data = require("scripts/mods/mod_data")
|
||||
local mod_loader = ModLoader:new(mod_data, self._parent:gui())
|
||||
|
||||
self._mod_loader = mod_loader
|
||||
Managers.mod = mod_loader
|
||||
elseif state == "load_mods" and self._mod_loader:update(dt) then
|
||||
log("StateBootLoadMods", "Mods loaded, exiting")
|
||||
return true, false
|
||||
end
|
||||
|
||||
return false, false
|
||||
end
|
||||
|
||||
local GameStateMachine = require("scripts/foundation/utilities/game_state_machine")
|
||||
|
||||
local patched = false
|
||||
|
||||
local GameStateMachine_init = GameStateMachine.init
|
||||
GameStateMachine.init = function(self, parent, start_state, params, ...)
|
||||
if not patched then
|
||||
log("mod_main", "Injecting mod loading state")
|
||||
patched = true
|
||||
|
||||
-- Hardcoded position after `StateRequireScripts`.
|
||||
-- We do want to wait until then, so that most of the game's core
|
||||
-- systems are at least loaded and can be hooked, even if they aren't
|
||||
-- running, yet.
|
||||
local pos = 4
|
||||
table.insert(params.states, pos, {
|
||||
StateBootLoadMods,
|
||||
{
|
||||
package_manager = params.package_manager,
|
||||
},
|
||||
})
|
||||
end
|
||||
|
||||
GameStateMachine_init(self, parent, start_state, params, ...)
|
||||
end
|
||||
|
||||
log("mod_main", "Mod patching complete")
|
||||
end
|
||||
|
||||
log("mod_main", "Initializing mods...")
|
||||
log("mod_main", "[DTMM] Deployment data:\n{{ deployment_info }}")
|
||||
|
||||
local require_store = {}
|
||||
|
||||
-- This token is treated as a string template and filled by DTMM during deployment.
|
||||
-- This allows hiding unsafe I/O functions behind a setting.
|
||||
-- When not replaced, it's also a valid table definition, thereby degrading gracefully.
|
||||
local is_io_enabled = {{ is_io_enabled }} -- luacheck: ignore 113
|
||||
-- It's also a valid table definition, thereby degrading gracefully when not replaced.
|
||||
local is_io_enabled = { { is_io_enabled } } -- luacheck: ignore 113
|
||||
local lua_libs = {
|
||||
debug = debug,
|
||||
os = {
|
||||
|
@ -45,8 +138,7 @@ Mods = {
|
|||
-- Fatshark's code scrubs them.
|
||||
-- The loader can then decide to pass them on to mods, or ignore them
|
||||
lua = setmetatable({}, { __index = lua_libs }),
|
||||
require_store = require_store,
|
||||
original_require = require,
|
||||
require_store = require_store
|
||||
}
|
||||
|
||||
local can_insert = function(filepath, new_result)
|
||||
|
@ -106,102 +198,6 @@ end
|
|||
require("scripts/main")
|
||||
log("mod_main", "'scripts/main' loaded")
|
||||
|
||||
-- We need to inject two states into two different state machines:
|
||||
-- First, we inject one into the `"Main"` state machine at a specific location, so that we're
|
||||
-- still early in the process, but right after `StateRequireScripts` where most game files
|
||||
-- are already available to `require` and hook.
|
||||
-- This is where the `ModLoader` is created initially.
|
||||
-- Then, we inject into the very first position of the `"Game"` state machine. This runs right
|
||||
-- after `StateGame._init_managers`, at which point all the parts needed for DMF and other mods
|
||||
-- have been initialized.
|
||||
-- This is where `ModLoader` will finally start loading mods.
|
||||
local function patch_mod_loading_state()
|
||||
local StateBootLoadDML = class("StateBootLoadDML", "StateBootSubStateBase")
|
||||
local StateGameLoadMods = class("StateGameLoadMods")
|
||||
|
||||
StateBootLoadDML.on_enter = function(self, parent, params)
|
||||
log("StateBootLoadDML", "Entered")
|
||||
StateBootLoadDML.super.on_enter(self, parent, params)
|
||||
|
||||
local state_params = self:_state_params()
|
||||
local package_manager = state_params.package_manager
|
||||
|
||||
self._package_manager = package_manager
|
||||
self._package_handles = {
|
||||
["packages/mods"] = package_manager:load("packages/mods", "StateBootLoadDML", nil),
|
||||
}
|
||||
end
|
||||
|
||||
StateBootLoadDML._state_update = function(self, _)
|
||||
local package_manager = self._package_manager
|
||||
|
||||
if package_manager:update() then
|
||||
local mod_data = require("scripts/mods/mod_data")
|
||||
|
||||
local create_mod_loader = require("scripts/mods/init")
|
||||
local mod_loader = create_mod_loader(mod_data)
|
||||
|
||||
Managers.mod = mod_loader
|
||||
|
||||
log("StateBootLoadDML", "DML loaded, exiting")
|
||||
return true, false
|
||||
end
|
||||
|
||||
return false, false
|
||||
end
|
||||
|
||||
|
||||
function StateGameLoadMods:on_enter(_, params)
|
||||
log("StateGameLoadMods", "Entered")
|
||||
self._next_state = require("scripts/game_states/game/state_splash")
|
||||
self._next_state_params = params
|
||||
end
|
||||
|
||||
function StateGameLoadMods:update(_)
|
||||
-- We're relying on the fact that DML internally makes sure
|
||||
-- that `Managers.mod:update()` is being called appropriately.
|
||||
-- The implementation as of this writing is to hook `StateGame.update`.
|
||||
if Managers.mod:all_mods_loaded() then
|
||||
Log.info("StateGameLoadMods", "Mods loaded, exiting")
|
||||
return self._next_state, self._next_state_params
|
||||
end
|
||||
end
|
||||
|
||||
local GameStateMachine = require("scripts/foundation/utilities/game_state_machine")
|
||||
local GameStateMachine_init = GameStateMachine.init
|
||||
GameStateMachine.init = function(self, parent, start_state, params, creation_context, state_change_callbacks, name)
|
||||
if name == "Main" then
|
||||
log("mod_main", "Injecting StateBootLoadDML")
|
||||
|
||||
-- Hardcoded position after `StateRequireScripts`.
|
||||
-- We need to wait until then to even begin most of our stuff,
|
||||
-- so that most of the game's core systems are at least loaded and can be hooked,
|
||||
-- even if they aren't running, yet.
|
||||
local pos = 4
|
||||
table.insert(params.states, pos, {
|
||||
StateBootLoadDML,
|
||||
{
|
||||
package_manager = params.package_manager,
|
||||
},
|
||||
})
|
||||
|
||||
GameStateMachine_init(self, parent, start_state, params, creation_context, state_change_callbacks, name)
|
||||
elseif name == "Game" then
|
||||
log("mod_main", "Injection StateGameLoadMods")
|
||||
-- The second time around, we want to be the first, so we pass our own
|
||||
-- 'start_state'.
|
||||
-- We can't just have the state machine be initialized and then change its `_next_state`, as by the end of
|
||||
-- `init`, a bunch of stuff will already be initialized.
|
||||
GameStateMachine_init(self, parent, StateGameLoadMods, params, creation_context, state_change_callbacks, name)
|
||||
-- And since we're done now, we can revert the function to its original
|
||||
GameStateMachine.init = GameStateMachine_init
|
||||
else
|
||||
-- In all other cases, simply call the original
|
||||
GameStateMachine_init(self, parent, start_state, params, creation_context, state_change_callbacks, name)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Override `init` to run our injection
|
||||
function init()
|
||||
patch_mod_loading_state()
|
||||
|
@ -212,5 +208,3 @@ function init()
|
|||
|
||||
Main:init()
|
||||
end
|
||||
|
||||
-- vim: ft=lua
|
|
@ -1,8 +0,0 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-cloud-download" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
|
||||
<path stroke="none" d="M0 0h24v24H0z" fill="none"/>
|
||||
<path d="M19 18a3.5 3.5 0 0 0 0 -7h-1a5 4.5 0 0 0 -11 -2a4.6 4.4 0 0 0 -2.1 8.4" />
|
||||
<path d="M12 13l0 9" />
|
||||
<path d="M9 19l3 3l3 -3" />
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 439 B |
|
@ -1,7 +0,0 @@
|
|||
fn main() {
|
||||
if cfg!(target_os = "windows") {
|
||||
let mut res = winres::WindowsResource::new();
|
||||
res.set_icon("assets/dtmm.ico");
|
||||
res.compile().unwrap();
|
||||
}
|
||||
}
|
|
@ -1,17 +1,18 @@
|
|||
use std::collections::HashMap;
|
||||
use std::io::ErrorKind;
|
||||
use std::io::{Cursor, ErrorKind, Read};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use color_eyre::eyre::{self, Context};
|
||||
use color_eyre::{Help, Report, Result};
|
||||
use druid::im::Vector;
|
||||
use druid::ImageBuf;
|
||||
use druid::{FileInfo, ImageBuf};
|
||||
use dtmt_shared::ModConfig;
|
||||
use nexusmods::Api as NexusApi;
|
||||
use tokio::fs::{self, DirEntry, File};
|
||||
use tokio_stream::wrappers::ReadDirStream;
|
||||
use tokio_stream::StreamExt;
|
||||
use zip::ZipArchive;
|
||||
|
||||
use crate::state::{ActionState, InitialLoadResult, ModInfo, ModOrder, NexusInfo, PackageInfo};
|
||||
use crate::util;
|
||||
|
@ -19,6 +20,161 @@ use crate::util::config::{ConfigSerialize, LoadOrderEntry};
|
|||
|
||||
use super::read_sjson_file;
|
||||
|
||||
#[tracing::instrument(skip(state))]
|
||||
pub(crate) async fn import_mod(state: ActionState, info: FileInfo) -> Result<ModInfo> {
|
||||
let data = fs::read(&info.path)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to read file {}", info.path.display()))?;
|
||||
let data = Cursor::new(data);
|
||||
|
||||
let nexus = if let Some((_, id, _, _)) = info
|
||||
.path
|
||||
.file_name()
|
||||
.and_then(|s| s.to_str())
|
||||
.and_then(NexusApi::parse_file_name)
|
||||
{
|
||||
if !state.nexus_api_key.is_empty() {
|
||||
let api = NexusApi::new(state.nexus_api_key.to_string())?;
|
||||
let mod_info = api
|
||||
.mods_id(id)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to query mod {} from Nexus", id))?;
|
||||
Some(NexusInfo::from(mod_info))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut archive = ZipArchive::new(data).wrap_err("Failed to open ZIP archive")?;
|
||||
|
||||
if tracing::enabled!(tracing::Level::DEBUG) {
|
||||
let names = archive.file_names().fold(String::new(), |mut s, name| {
|
||||
s.push('\n');
|
||||
s.push_str(name);
|
||||
s
|
||||
});
|
||||
tracing::debug!("Archive contents:{}", names);
|
||||
}
|
||||
|
||||
let dir_name = {
|
||||
let f = archive.by_index(0).wrap_err("Archive is empty")?;
|
||||
|
||||
if !f.is_dir() {
|
||||
let err = eyre::eyre!("archive does not have a top-level directory");
|
||||
return Err(err).with_suggestion(|| "Use 'dtmt build' to create the mod archive.");
|
||||
}
|
||||
|
||||
let name = f.name();
|
||||
// The directory name is returned with a trailing slash, which we don't want
|
||||
name[..(name.len().saturating_sub(1))].to_string()
|
||||
};
|
||||
|
||||
tracing::info!("Importing mod {}", dir_name);
|
||||
|
||||
let names: Vec<_> = archive.file_names().map(|s| s.to_string()).collect();
|
||||
|
||||
let mod_cfg: ModConfig = {
|
||||
let name = names
|
||||
.iter()
|
||||
.find(|name| name.ends_with("dtmt.cfg"))
|
||||
.ok_or_else(|| eyre::eyre!("archive does not contain mod config"))?;
|
||||
|
||||
let mut f = archive
|
||||
.by_name(name)
|
||||
.wrap_err("Failed to read mod config from archive")?;
|
||||
|
||||
let mut buf = Vec::with_capacity(f.size() as usize);
|
||||
f.read_to_end(&mut buf)
|
||||
.wrap_err("Failed to read mod config from archive")?;
|
||||
|
||||
let data = String::from_utf8(buf).wrap_err("Mod config is not valid UTF-8")?;
|
||||
|
||||
serde_sjson::from_str(&data).wrap_err("Failed to deserialize mod config")?
|
||||
};
|
||||
|
||||
tracing::debug!(?mod_cfg);
|
||||
|
||||
let files: HashMap<String, Vec<String>> = {
|
||||
let name = names
|
||||
.iter()
|
||||
.find(|name| name.ends_with("files.sjson"))
|
||||
.ok_or_else(|| eyre::eyre!("archive does not contain file index"))?;
|
||||
|
||||
let mut f = archive
|
||||
.by_name(name)
|
||||
.wrap_err("Failed to read file index from archive")?;
|
||||
let mut buf = Vec::with_capacity(f.size() as usize);
|
||||
f.read_to_end(&mut buf)
|
||||
.wrap_err("Failed to read file index from archive")?;
|
||||
|
||||
let data = String::from_utf8(buf).wrap_err("File index is not valid UTF-8")?;
|
||||
|
||||
serde_sjson::from_str(&data).wrap_err("Failed to deserialize file index")?
|
||||
};
|
||||
|
||||
tracing::trace!(?files);
|
||||
|
||||
let image = if let Some(path) = &mod_cfg.image {
|
||||
let name = names
|
||||
.iter()
|
||||
.find(|name| name.ends_with(&path.display().to_string()))
|
||||
.ok_or_else(|| eyre::eyre!("archive does not contain configured image file"))?;
|
||||
|
||||
let mut f = archive
|
||||
.by_name(name)
|
||||
.wrap_err("Failed to read image file from archive")?;
|
||||
let mut buf = Vec::with_capacity(f.size() as usize);
|
||||
f.read_to_end(&mut buf)
|
||||
.wrap_err("Failed to read file index from archive")?;
|
||||
|
||||
// Druid somehow doesn't return an error compatible with eyre, here.
|
||||
// So we have to wrap through `Display` manually.
|
||||
let img = match ImageBuf::from_data(&buf) {
|
||||
Ok(img) => img,
|
||||
Err(err) => {
|
||||
let err = Report::msg(err.to_string()).wrap_err("Invalid image data");
|
||||
return Err(err).with_suggestion(|| {
|
||||
"Supported formats are: PNG, JPEG, Bitmap and WebP".to_string()
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
Some(img)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mod_dir = state.mod_dir;
|
||||
|
||||
tracing::trace!("Creating mods directory {}", mod_dir.display());
|
||||
fs::create_dir_all(Arc::as_ref(&mod_dir))
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to create data directory {}", mod_dir.display()))?;
|
||||
|
||||
tracing::trace!("Extracting mod archive to {}", mod_dir.display());
|
||||
archive
|
||||
.extract(Arc::as_ref(&mod_dir))
|
||||
.wrap_err_with(|| format!("Failed to extract archive to {}", mod_dir.display()))?;
|
||||
|
||||
if let Some(nexus) = &nexus {
|
||||
let data = serde_sjson::to_string(nexus).wrap_err("Failed to serialize Nexus info")?;
|
||||
let path = mod_dir.join(&mod_cfg.id).join("nexus.sjson");
|
||||
fs::write(&path, data.as_bytes())
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to write Nexus info to '{}'", path.display()))?;
|
||||
}
|
||||
|
||||
let packages = files
|
||||
.into_iter()
|
||||
.map(|(name, files)| Arc::new(PackageInfo::new(name, files.into_iter().collect())))
|
||||
.collect();
|
||||
let info = ModInfo::new(mod_cfg, packages, image, nexus);
|
||||
|
||||
Ok(info)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(state))]
|
||||
pub(crate) async fn delete_mod(state: ActionState, info: &ModInfo) -> Result<()> {
|
||||
let mod_dir = state.mod_dir.join(&info.id);
|
||||
|
@ -73,13 +229,9 @@ async fn read_mod_dir_entry(res: Result<DirEntry>) -> Result<ModInfo> {
|
|||
Err(err) => return Err(err),
|
||||
};
|
||||
|
||||
let files: HashMap<String, Vec<String>> = if cfg.bundled {
|
||||
read_sjson_file(&index_path)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to read file index '{}'", index_path.display()))?
|
||||
} else {
|
||||
Default::default()
|
||||
};
|
||||
let files: HashMap<String, Vec<String>> = read_sjson_file(&index_path)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to read file index '{}'", index_path.display()))?;
|
||||
|
||||
let image = if let Some(path) = &cfg.image {
|
||||
let path = entry.path().join(path);
|
||||
|
@ -161,21 +313,27 @@ where
|
|||
}
|
||||
|
||||
pub(crate) fn check_mod_order(state: &ActionState) -> Result<()> {
|
||||
{
|
||||
let first = state.mods.get(0);
|
||||
if first.is_none() || !(first.unwrap().id == "dml" && first.unwrap().enabled) {
|
||||
// TODO: Add a suggestion where to get it, once that's published
|
||||
eyre::bail!("'Darktide Mod Loader' needs to be installed, enabled and at the top of the load order");
|
||||
}
|
||||
}
|
||||
|
||||
if tracing::enabled!(tracing::Level::DEBUG) {
|
||||
let order = state
|
||||
.mods
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter(|(_, i)| i.enabled)
|
||||
.fold(String::new(), |mut s, (i, info)| {
|
||||
let order = state.mods.iter().filter(|i| i.enabled).enumerate().fold(
|
||||
String::new(),
|
||||
|mut s, (i, info)| {
|
||||
s.push_str(&format!("{}: {} - {}\n", i, info.id, info.name));
|
||||
s
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
tracing::debug!("Mod order:\n{}", order);
|
||||
}
|
||||
|
||||
for (i, mod_info) in state.mods.iter().enumerate().filter(|(_, i)| i.enabled) {
|
||||
for (i, mod_info) in state.mods.iter().filter(|i| i.enabled).enumerate() {
|
||||
for dep in &mod_info.depends {
|
||||
let dep_info = state.mods.iter().enumerate().find(|(_, m)| m.id == dep.id);
|
||||
|
||||
|
@ -280,18 +438,12 @@ pub(crate) async fn load_initial(path: PathBuf, is_default: bool) -> Result<Init
|
|||
|
||||
let game_info = tokio::task::spawn_blocking(dtmt_shared::collect_game_info)
|
||||
.await
|
||||
.wrap_err("Failed to spawn task to collect Steam game info")?;
|
||||
.wrap_err("Failed to collect Steam game info")?;
|
||||
|
||||
let game_info = match game_info {
|
||||
Ok(game_info) => game_info,
|
||||
Err(err) => {
|
||||
tracing::error!("Failed to collect game info: {:?}", err);
|
||||
None
|
||||
{
|
||||
if config.game_dir.is_none() && game_info.is_none() {
|
||||
tracing::error!("No Game Directory set. Head to the 'Settings' tab to set it manually",);
|
||||
}
|
||||
};
|
||||
|
||||
if config.game_dir.is_none() && game_info.is_none() {
|
||||
tracing::error!("No Game Directory set. Head to the 'Settings' tab to set it manually",);
|
||||
}
|
||||
|
||||
let mod_dir = config.data_dir.join("mods");
|
||||
|
|
|
@ -1,816 +0,0 @@
|
|||
use std::io::{Cursor, ErrorKind};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::{eyre, Help, Report, Result};
|
||||
use futures::StreamExt;
|
||||
use futures::{stream, TryStreamExt};
|
||||
use minijinja::Environment;
|
||||
use sdk::filetype::lua;
|
||||
use sdk::filetype::package::Package;
|
||||
use sdk::murmur::Murmur64;
|
||||
use sdk::{
|
||||
Bundle, BundleDatabase, BundleFile, BundleFileType, BundleFileVariant, FromBinary, ToBinary,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
use tokio::fs::{self, DirEntry};
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tracing::Instrument;
|
||||
|
||||
use super::read_sjson_file;
|
||||
use crate::controller::app::check_mod_order;
|
||||
use crate::state::{ActionState, PackageInfo};
|
||||
|
||||
pub const MOD_BUNDLE_NAME: &str = "packages/mods";
|
||||
pub const BOOT_BUNDLE_NAME: &str = "packages/boot";
|
||||
pub const BUNDLE_DATABASE_NAME: &str = "bundle_database.data";
|
||||
pub const MOD_BOOT_SCRIPT: &str = "scripts/mod_main";
|
||||
pub const MOD_DATA_SCRIPT: &str = "scripts/mods/mod_data";
|
||||
pub const SETTINGS_FILE_PATH: &str = "application_settings/settings_common.ini";
|
||||
pub const DEPLOYMENT_DATA_PATH: &str = "dtmm-deployment.sjson";
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct DeploymentData {
|
||||
pub bundles: Vec<String>,
|
||||
pub mod_folders: Vec<String>,
|
||||
#[serde(with = "time::serde::iso8601")]
|
||||
pub timestamp: OffsetDateTime,
|
||||
}
|
||||
|
||||
#[tracing::instrument]
|
||||
async fn read_file_with_backup<P>(path: P) -> Result<Vec<u8>>
|
||||
where
|
||||
P: AsRef<Path> + std::fmt::Debug,
|
||||
{
|
||||
let path = path.as_ref();
|
||||
let backup_path = {
|
||||
let mut p = PathBuf::from(path);
|
||||
let ext = if let Some(ext) = p.extension() {
|
||||
ext.to_string_lossy().to_string() + ".bak"
|
||||
} else {
|
||||
String::from("bak")
|
||||
};
|
||||
p.set_extension(ext);
|
||||
p
|
||||
};
|
||||
|
||||
let file_name = path
|
||||
.file_name()
|
||||
.map(|s| s.to_string_lossy().to_string())
|
||||
.unwrap_or_else(|| String::from("file"));
|
||||
|
||||
let bin = match fs::read(&backup_path).await {
|
||||
Ok(bin) => bin,
|
||||
Err(err) if err.kind() == ErrorKind::NotFound => {
|
||||
// TODO: This doesn't need to be awaited here, yet.
|
||||
// I only need to make sure it has finished before writing the changed bundle.
|
||||
tracing::debug!(
|
||||
"Backup does not exist. Backing up original {} to '{}'",
|
||||
file_name,
|
||||
backup_path.display()
|
||||
);
|
||||
fs::copy(path, &backup_path).await.wrap_err_with(|| {
|
||||
format!(
|
||||
"Failed to back up {} '{}' to '{}'",
|
||||
file_name,
|
||||
path.display(),
|
||||
backup_path.display()
|
||||
)
|
||||
})?;
|
||||
|
||||
tracing::debug!("Reading {} from original '{}'", file_name, path.display());
|
||||
fs::read(path).await.wrap_err_with(|| {
|
||||
format!("Failed to read {} file: {}", file_name, path.display())
|
||||
})?
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err).wrap_err_with(|| {
|
||||
format!(
|
||||
"Failed to read {} from backup '{}'",
|
||||
file_name,
|
||||
backup_path.display()
|
||||
)
|
||||
});
|
||||
}
|
||||
};
|
||||
Ok(bin)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
async fn patch_game_settings(state: Arc<ActionState>) -> Result<()> {
|
||||
let settings_path = state.game_dir.join("bundle").join(SETTINGS_FILE_PATH);
|
||||
|
||||
let settings = read_file_with_backup(&settings_path)
|
||||
.await
|
||||
.wrap_err("Failed to read settings.ini")?;
|
||||
let settings = String::from_utf8(settings).wrap_err("Settings.ini is not valid UTF-8")?;
|
||||
|
||||
let mut f = fs::File::create(&settings_path)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to open {}", settings_path.display()))?;
|
||||
|
||||
let Some(i) = settings.find("boot_script =") else {
|
||||
eyre::bail!("couldn't find 'boot_script' field");
|
||||
};
|
||||
|
||||
f.write_all(settings[0..i].as_bytes()).await?;
|
||||
f.write_all(b"boot_script = \"scripts/mod_main\"").await?;
|
||||
|
||||
let Some(j) = settings[i..].find('\n') else {
|
||||
eyre::bail!("couldn't find end of 'boot_script' field");
|
||||
};
|
||||
|
||||
f.write_all(settings[(i + j)..].as_bytes()).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all, fields(package = info.name))]
|
||||
fn make_package(info: &PackageInfo) -> Result<Package> {
|
||||
let mut pkg = Package::new(info.name.clone(), PathBuf::new());
|
||||
|
||||
for f in &info.files {
|
||||
let mut it = f.rsplit('.');
|
||||
let file_type = it
|
||||
.next()
|
||||
.ok_or_else(|| eyre::eyre!("missing file extension"))
|
||||
.and_then(BundleFileType::from_str)
|
||||
.wrap_err("Invalid file name in package info")?;
|
||||
let name: String = it.collect();
|
||||
pkg.add_file(file_type, name);
|
||||
}
|
||||
|
||||
Ok(pkg)
|
||||
}
|
||||
|
||||
#[tracing::instrument]
|
||||
async fn copy_recursive(
|
||||
from: impl Into<PathBuf> + std::fmt::Debug,
|
||||
to: impl AsRef<Path> + std::fmt::Debug,
|
||||
) -> Result<()> {
|
||||
let to = to.as_ref();
|
||||
|
||||
#[tracing::instrument]
|
||||
async fn handle_dir(from: PathBuf) -> Result<Vec<(bool, DirEntry)>> {
|
||||
let mut dir = fs::read_dir(&from)
|
||||
.await
|
||||
.wrap_err("Failed to read directory")?;
|
||||
let mut entries = Vec::new();
|
||||
|
||||
while let Some(entry) = dir.next_entry().await? {
|
||||
let meta = entry.metadata().await.wrap_err_with(|| {
|
||||
format!("Failed to get metadata for '{}'", entry.path().display())
|
||||
})?;
|
||||
entries.push((meta.is_dir(), entry));
|
||||
}
|
||||
|
||||
Ok(entries)
|
||||
}
|
||||
|
||||
let base = from.into();
|
||||
stream::unfold(vec![base.clone()], |mut state| async {
|
||||
let from = state.pop()?;
|
||||
let inner = match handle_dir(from).await {
|
||||
Ok(entries) => {
|
||||
for (is_dir, entry) in &entries {
|
||||
if *is_dir {
|
||||
state.push(entry.path());
|
||||
}
|
||||
}
|
||||
stream::iter(entries).map(Ok).left_stream()
|
||||
}
|
||||
Err(e) => stream::once(async { Err(e) }).right_stream(),
|
||||
};
|
||||
|
||||
Some((inner, state))
|
||||
})
|
||||
.flatten()
|
||||
.try_for_each(|(is_dir, entry)| {
|
||||
let path = entry.path();
|
||||
let dest = path
|
||||
.strip_prefix(&base)
|
||||
.map(|suffix| to.join(suffix))
|
||||
.expect("all entries are relative to the directory we are walking");
|
||||
|
||||
async move {
|
||||
if is_dir {
|
||||
tracing::trace!("Creating directory '{}'", dest.display());
|
||||
// Instead of trying to filter "already exists" errors out explicitly,
|
||||
// we just ignore all. It'll fail eventually with the next copy operation.
|
||||
let _ = fs::create_dir(&dest).await;
|
||||
Ok(())
|
||||
} else {
|
||||
tracing::trace!("Copying file '{}' -> '{}'", path.display(), dest.display());
|
||||
fs::copy(&path, &dest).await.map(|_| ()).wrap_err_with(|| {
|
||||
format!(
|
||||
"Failed to copy file '{}' -> '{}'",
|
||||
path.display(),
|
||||
dest.display()
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
.await
|
||||
.map(|_| ())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(state))]
|
||||
async fn copy_mod_folders(state: Arc<ActionState>) -> Result<Vec<String>> {
|
||||
let game_dir = Arc::clone(&state.game_dir);
|
||||
|
||||
let mut tasks = Vec::new();
|
||||
|
||||
for mod_info in state.mods.iter().filter(|m| m.enabled && !m.bundled) {
|
||||
let span = tracing::trace_span!("copying legacy mod", name = mod_info.name);
|
||||
let _enter = span.enter();
|
||||
|
||||
let mod_id = mod_info.id.clone();
|
||||
let mod_dir = Arc::clone(&state.mod_dir);
|
||||
let game_dir = Arc::clone(&game_dir);
|
||||
|
||||
let task = async move {
|
||||
let from = mod_dir.join(&mod_id);
|
||||
let to = game_dir.join("mods").join(&mod_id);
|
||||
|
||||
tracing::debug!(from = %from.display(), to = %to.display(), "Copying legacy mod '{}'", mod_id);
|
||||
let _ = fs::create_dir_all(&to).await;
|
||||
copy_recursive(&from, &to).await.wrap_err_with(|| {
|
||||
format!(
|
||||
"Failed to copy legacy mod from '{}' to '{}'",
|
||||
from.display(),
|
||||
to.display()
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok::<_, Report>(mod_id)
|
||||
};
|
||||
tasks.push(task);
|
||||
}
|
||||
|
||||
let ids = futures::future::try_join_all(tasks).await?;
|
||||
Ok(ids)
|
||||
}
|
||||
|
||||
fn build_mod_data_lua(state: Arc<ActionState>) -> Result<String> {
|
||||
#[derive(Serialize)]
|
||||
struct TemplateDataMod {
|
||||
id: String,
|
||||
name: String,
|
||||
bundled: bool,
|
||||
version: String,
|
||||
init: String,
|
||||
data: Option<String>,
|
||||
localization: Option<String>,
|
||||
packages: Vec<String>,
|
||||
}
|
||||
|
||||
let mut env = Environment::new();
|
||||
env.set_trim_blocks(true);
|
||||
env.set_lstrip_blocks(true);
|
||||
env.add_template("mod_data.lua", include_str!("../../assets/mod_data.lua.j2"))
|
||||
.wrap_err("Failed to compile template for `mod_data.lua`")?;
|
||||
let tmpl = env
|
||||
.get_template("mod_data.lua")
|
||||
.wrap_err("Failed to get template `mod_data.lua`")?;
|
||||
|
||||
let data: Vec<TemplateDataMod> = state
|
||||
.mods
|
||||
.iter()
|
||||
.filter_map(|m| {
|
||||
if !m.enabled {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(TemplateDataMod {
|
||||
id: m.id.clone(),
|
||||
name: m.name.clone(),
|
||||
bundled: m.bundled,
|
||||
version: m.version.clone(),
|
||||
init: m.resources.init.to_string_lossy().to_string(),
|
||||
data: m
|
||||
.resources
|
||||
.data
|
||||
.as_ref()
|
||||
.map(|p| p.to_string_lossy().to_string()),
|
||||
localization: m
|
||||
.resources
|
||||
.localization
|
||||
.as_ref()
|
||||
.map(|p| p.to_string_lossy().to_string()),
|
||||
packages: m.packages.iter().map(|p| p.name.clone()).collect(),
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
let lua = tmpl
|
||||
.render(minijinja::context!(mods => data))
|
||||
.wrap_err("Failed to render template `mod_data.lua`")?;
|
||||
|
||||
tracing::debug!("mod_data.lua:\n{}", lua);
|
||||
|
||||
Ok(lua)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
async fn build_bundles(state: Arc<ActionState>) -> Result<Vec<Bundle>> {
|
||||
let mut mod_bundle = Bundle::new(MOD_BUNDLE_NAME.to_string());
|
||||
let mut tasks = Vec::new();
|
||||
|
||||
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
|
||||
|
||||
let mut bundles = Vec::new();
|
||||
|
||||
let mut add_lua_asset = |name: &str, data: &str| {
|
||||
let span = tracing::info_span!("Compiling Lua", name, data_len = data.len());
|
||||
let _enter = span.enter();
|
||||
|
||||
let file = lua::compile(name.to_string(), data).wrap_err("Failed to compile Lua")?;
|
||||
|
||||
mod_bundle.add_file(file);
|
||||
|
||||
Ok::<_, Report>(())
|
||||
};
|
||||
|
||||
build_mod_data_lua(state.clone())
|
||||
.wrap_err("Failed to build 'mod_data.lua'")
|
||||
.and_then(|data| add_lua_asset(MOD_DATA_SCRIPT, &data))?;
|
||||
add_lua_asset("scripts/mods/init", include_str!("../../assets/init.lua"))?;
|
||||
add_lua_asset(
|
||||
"scripts/mods/mod_loader",
|
||||
include_str!("../../assets/mod_loader.lua"),
|
||||
)?;
|
||||
|
||||
tracing::trace!("Preparing tasks to deploy bundle files");
|
||||
|
||||
for mod_info in state.mods.iter().filter(|m| m.enabled && m.bundled) {
|
||||
let span = tracing::trace_span!("building mod packages", name = mod_info.name);
|
||||
let _enter = span.enter();
|
||||
|
||||
let mod_dir = state.mod_dir.join(&mod_info.id);
|
||||
for pkg_info in &mod_info.packages {
|
||||
let span = tracing::trace_span!("building package", name = pkg_info.name);
|
||||
let _enter = span.enter();
|
||||
|
||||
tracing::trace!(
|
||||
"Building package {} for mod {}",
|
||||
pkg_info.name,
|
||||
mod_info.name
|
||||
);
|
||||
|
||||
let pkg = make_package(pkg_info).wrap_err("Failed to make package")?;
|
||||
let mut variant = BundleFileVariant::new();
|
||||
let bin = pkg
|
||||
.to_binary()
|
||||
.wrap_err("Failed to serialize package to binary")?;
|
||||
variant.set_data(bin);
|
||||
let mut file = BundleFile::new(pkg_info.name.clone(), BundleFileType::Package);
|
||||
file.add_variant(variant);
|
||||
|
||||
tracing::trace!(
|
||||
"Compiled package {} for mod {}",
|
||||
pkg_info.name,
|
||||
mod_info.name
|
||||
);
|
||||
|
||||
mod_bundle.add_file(file);
|
||||
|
||||
let bundle_name = format!("{:016x}", Murmur64::hash(&pkg_info.name));
|
||||
let src = mod_dir.join(&bundle_name);
|
||||
let dest = bundle_dir.join(&bundle_name);
|
||||
let pkg_name = pkg_info.name.clone();
|
||||
let mod_name = mod_info.name.clone();
|
||||
|
||||
// Explicitely drop the guard, so that we can move the span
|
||||
// into the async operation
|
||||
drop(_enter);
|
||||
|
||||
let ctx = state.ctx.clone();
|
||||
|
||||
let task = async move {
|
||||
let bundle = {
|
||||
let bin = fs::read(&src).await.wrap_err_with(|| {
|
||||
format!("Failed to read bundle file '{}'", src.display())
|
||||
})?;
|
||||
let name = Bundle::get_name_from_path(&ctx, &src);
|
||||
Bundle::from_binary(&ctx, name, bin)
|
||||
.wrap_err_with(|| format!("Failed to parse bundle '{}'", src.display()))?
|
||||
};
|
||||
|
||||
tracing::debug!(
|
||||
src = %src.display(),
|
||||
dest = %dest.display(),
|
||||
"Copying bundle '{}' for mod '{}'",
|
||||
pkg_name,
|
||||
mod_name,
|
||||
);
|
||||
// We attempt to remove any previous file, so that the hard link can be created.
|
||||
// We can reasonably ignore errors here, as a 'NotFound' is actually fine, the copy
|
||||
// may be possible despite an error here, or the error will be reported by it anyways.
|
||||
// TODO: There is a chance that we delete an actual game bundle, but with 64bit
|
||||
// hashes, it's low enough for now, and the setup required to detect
|
||||
// "game bundle vs mod bundle" is non-trivial.
|
||||
let _ = fs::remove_file(&dest).await;
|
||||
fs::copy(&src, &dest).await.wrap_err_with(|| {
|
||||
format!(
|
||||
"Failed to copy bundle {pkg_name} for mod {mod_name}. Src: {}, dest: {}",
|
||||
src.display(),
|
||||
dest.display()
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok::<Bundle, color_eyre::Report>(bundle)
|
||||
}
|
||||
.instrument(span);
|
||||
|
||||
tasks.push(task);
|
||||
}
|
||||
}
|
||||
|
||||
tracing::debug!("Copying {} mod bundles", tasks.len());
|
||||
|
||||
let mut tasks = stream::iter(tasks).buffer_unordered(10);
|
||||
|
||||
while let Some(res) = tasks.next().await {
|
||||
let bundle = res?;
|
||||
bundles.push(bundle);
|
||||
}
|
||||
|
||||
{
|
||||
let path = bundle_dir.join(format!("{:x}", mod_bundle.name().to_murmur64()));
|
||||
tracing::trace!("Writing mod bundle to '{}'", path.display());
|
||||
fs::write(&path, mod_bundle.to_binary()?)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to write bundle to '{}'", path.display()))?;
|
||||
}
|
||||
|
||||
bundles.push(mod_bundle);
|
||||
|
||||
Ok(bundles)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
async fn patch_boot_bundle(
|
||||
state: Arc<ActionState>,
|
||||
deployment_info: &String,
|
||||
) -> Result<Vec<Bundle>> {
|
||||
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
|
||||
let bundle_path = bundle_dir.join(format!("{:x}", Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes())));
|
||||
|
||||
let mut bundles = Vec::with_capacity(2);
|
||||
|
||||
let mut boot_bundle = async {
|
||||
let bin = read_file_with_backup(&bundle_path)
|
||||
.await
|
||||
.wrap_err("Failed to read boot bundle")?;
|
||||
|
||||
Bundle::from_binary(&state.ctx, BOOT_BUNDLE_NAME.to_string(), bin)
|
||||
.wrap_err("Failed to parse boot bundle")
|
||||
}
|
||||
.instrument(tracing::trace_span!("read boot bundle"))
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to read bundle '{}'", BOOT_BUNDLE_NAME))?;
|
||||
|
||||
{
|
||||
tracing::trace!("Adding mod package file to boot bundle");
|
||||
let span = tracing::trace_span!("create mod package file");
|
||||
let _enter = span.enter();
|
||||
|
||||
let mut pkg = Package::new(MOD_BUNDLE_NAME.to_string(), PathBuf::new());
|
||||
|
||||
for mod_info in &state.mods {
|
||||
for pkg_info in &mod_info.packages {
|
||||
pkg.add_file(BundleFileType::Package, &pkg_info.name);
|
||||
}
|
||||
}
|
||||
|
||||
pkg.add_file(BundleFileType::Lua, MOD_DATA_SCRIPT);
|
||||
|
||||
let mut variant = BundleFileVariant::new();
|
||||
variant.set_data(pkg.to_binary()?);
|
||||
let mut f = BundleFile::new(MOD_BUNDLE_NAME.to_string(), BundleFileType::Package);
|
||||
f.add_variant(variant);
|
||||
|
||||
boot_bundle.add_file(f);
|
||||
}
|
||||
|
||||
{
|
||||
let span = tracing::debug_span!("Importing mod main script");
|
||||
let _enter = span.enter();
|
||||
|
||||
let mut env = Environment::new();
|
||||
env.set_trim_blocks(true);
|
||||
env.set_lstrip_blocks(true);
|
||||
env.add_template("mod_main.lua", include_str!("../../assets/mod_main.lua.j2"))
|
||||
.wrap_err("Failed to compile template for `mod_main.lua`")?;
|
||||
let tmpl = env
|
||||
.get_template("mod_main.lua")
|
||||
.wrap_err("Failed to get template `mod_main.lua`")?;
|
||||
|
||||
let is_io_enabled = if state.is_io_enabled { "true" } else { "false" };
|
||||
let deployment_info = deployment_info.replace("\"", "\\\"").replace("\n", "\\n");
|
||||
let lua = tmpl
|
||||
.render(minijinja::context!(is_io_enabled => is_io_enabled, deployment_info => deployment_info))
|
||||
.wrap_err("Failed to render template `mod_main.lua`")?;
|
||||
|
||||
tracing::trace!("Main script rendered:\n===========\n{}\n=============", lua);
|
||||
let file = lua::compile(MOD_BOOT_SCRIPT.to_string(), lua)
|
||||
.wrap_err("Failed to compile mod main Lua file")?;
|
||||
|
||||
boot_bundle.add_file(file);
|
||||
}
|
||||
|
||||
async {
|
||||
let bin = boot_bundle
|
||||
.to_binary()
|
||||
.wrap_err("Failed to serialize boot bundle")?;
|
||||
fs::write(&bundle_path, bin)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to write main bundle: {}", bundle_path.display()))
|
||||
}
|
||||
.instrument(tracing::trace_span!("write boot bundle"))
|
||||
.await?;
|
||||
|
||||
bundles.push(boot_bundle);
|
||||
|
||||
Ok(bundles)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all, fields(bundles = bundles.as_ref().len()))]
|
||||
async fn patch_bundle_database<B>(state: Arc<ActionState>, bundles: B) -> Result<()>
|
||||
where
|
||||
B: AsRef<[Bundle]>,
|
||||
{
|
||||
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
|
||||
let database_path = bundle_dir.join(BUNDLE_DATABASE_NAME);
|
||||
|
||||
let mut db = {
|
||||
let bin = read_file_with_backup(&database_path)
|
||||
.await
|
||||
.wrap_err("Failed to read bundle database")?;
|
||||
let mut r = Cursor::new(bin);
|
||||
let db = BundleDatabase::from_binary(&mut r).wrap_err("Failed to parse bundle database")?;
|
||||
tracing::trace!("Finished parsing bundle database");
|
||||
db
|
||||
};
|
||||
|
||||
for bundle in bundles.as_ref() {
|
||||
tracing::trace!("Adding '{}' to bundle database", bundle.name().display());
|
||||
db.add_bundle(bundle);
|
||||
}
|
||||
|
||||
{
|
||||
let bin = db
|
||||
.to_binary()
|
||||
.wrap_err("Failed to serialize bundle database")?;
|
||||
fs::write(&database_path, bin).await.wrap_err_with(|| {
|
||||
format!(
|
||||
"failed to write bundle database to '{}'",
|
||||
database_path.display()
|
||||
)
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all, fields(bundles = bundles.as_ref().len()))]
|
||||
fn build_deployment_data(
|
||||
bundles: impl AsRef<[Bundle]>,
|
||||
mod_folders: impl AsRef<[String]>,
|
||||
) -> Result<String> {
|
||||
let info = DeploymentData {
|
||||
timestamp: OffsetDateTime::now_utc(),
|
||||
bundles: bundles
|
||||
.as_ref()
|
||||
.iter()
|
||||
.map(|bundle| format!("{:x}", bundle.name().to_murmur64()))
|
||||
.collect(),
|
||||
// TODO:
|
||||
mod_folders: mod_folders
|
||||
.as_ref()
|
||||
.iter()
|
||||
.map(|folder| folder.clone())
|
||||
.collect(),
|
||||
};
|
||||
serde_sjson::to_string(&info).wrap_err("Failed to serizalize deployment data")
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all, fields(
|
||||
game_dir = %state.game_dir.display(),
|
||||
mods = state.mods.len()
|
||||
))]
|
||||
pub(crate) async fn deploy_mods(state: ActionState) -> Result<()> {
|
||||
let state = Arc::new(state);
|
||||
let bundle_dir = state.game_dir.join("bundle");
|
||||
let boot_bundle_path = format!("{:016x}", Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes()));
|
||||
|
||||
if fs::metadata(bundle_dir.join(format!("{boot_bundle_path}.patch_999")))
|
||||
.await
|
||||
.is_ok()
|
||||
{
|
||||
let err = eyre::eyre!("Found dtkit-patch-based mod installation.");
|
||||
return Err(err)
|
||||
.with_suggestion(|| {
|
||||
"If you're a mod author and saved projects directly in 'mods/', \
|
||||
use DTMT to migrate them to the new project structure."
|
||||
.to_string()
|
||||
})
|
||||
.with_suggestion(|| {
|
||||
"Click 'Reset Game' to remove the previous mod installation.".to_string()
|
||||
});
|
||||
}
|
||||
|
||||
let (_, game_info, deployment_info) = tokio::try_join!(
|
||||
async {
|
||||
fs::metadata(&bundle_dir)
|
||||
.await
|
||||
.wrap_err("Failed to open game bundle directory")
|
||||
.with_suggestion(|| "Double-check 'Game Directory' in the Settings tab.")
|
||||
},
|
||||
async {
|
||||
tokio::task::spawn_blocking(dtmt_shared::collect_game_info)
|
||||
.await
|
||||
.map_err(Report::new)
|
||||
},
|
||||
async {
|
||||
let path = state.game_dir.join(DEPLOYMENT_DATA_PATH);
|
||||
match read_sjson_file::<_, DeploymentData>(&path).await {
|
||||
Ok(data) => Ok(Some(data)),
|
||||
Err(err) => {
|
||||
if let Some(err) = err.downcast_ref::<std::io::Error>()
|
||||
&& err.kind() == ErrorKind::NotFound
|
||||
{
|
||||
Ok(None)
|
||||
} else {
|
||||
Err(err).wrap_err(format!(
|
||||
"Failed to read deployment data from: {}",
|
||||
path.display()
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
.wrap_err("Failed to gather deployment information")?;
|
||||
|
||||
let game_info = match game_info {
|
||||
Ok(game_info) => game_info,
|
||||
Err(err) => {
|
||||
tracing::error!("Failed to collect game info: {:#?}", err);
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
tracing::debug!(?game_info, ?deployment_info);
|
||||
|
||||
if let Some(game_info) = game_info {
|
||||
if deployment_info
|
||||
.as_ref()
|
||||
.map(|i| game_info.last_updated > i.timestamp)
|
||||
.unwrap_or(false)
|
||||
{
|
||||
tracing::warn!(
|
||||
"Game was updated since last mod deployment. \
|
||||
Attempting to reconcile game files."
|
||||
);
|
||||
|
||||
tokio::try_join!(
|
||||
async {
|
||||
let path = bundle_dir.join(BUNDLE_DATABASE_NAME);
|
||||
let backup_path = path.with_extension("data.bak");
|
||||
|
||||
fs::copy(&path, &backup_path)
|
||||
.await
|
||||
.wrap_err("Failed to re-create backup for bundle database.")
|
||||
},
|
||||
async {
|
||||
let path = bundle_dir.join(boot_bundle_path);
|
||||
let backup_path = path.with_extension("bak");
|
||||
|
||||
fs::copy(&path, &backup_path)
|
||||
.await
|
||||
.wrap_err("Failed to re-create backup for boot bundle")
|
||||
}
|
||||
)
|
||||
.with_suggestion(|| {
|
||||
"Reset the game using 'Reset Game', then verify game files.".to_string()
|
||||
})?;
|
||||
|
||||
tracing::info!(
|
||||
"Successfully re-created game file backups. \
|
||||
Continuing mod deployment."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
check_mod_order(&state)?;
|
||||
|
||||
tracing::info!(
|
||||
"Deploying {} mods to '{}'.",
|
||||
state.mods.iter().filter(|i| i.enabled).count(),
|
||||
bundle_dir.display()
|
||||
);
|
||||
|
||||
tracing::info!("Copy legacy mod folders");
|
||||
let mod_folders = copy_mod_folders(state.clone())
|
||||
.await
|
||||
.wrap_err("Failed to copy mod folders")?;
|
||||
|
||||
tracing::info!("Build mod bundles");
|
||||
let mut bundles = build_bundles(state.clone())
|
||||
.await
|
||||
.wrap_err("Failed to build mod bundles")?;
|
||||
|
||||
let new_deployment_info = build_deployment_data(&bundles, &mod_folders)
|
||||
.wrap_err("Failed to build new deployment data")?;
|
||||
|
||||
tracing::info!("Patch boot bundle");
|
||||
let mut boot_bundles = patch_boot_bundle(state.clone(), &new_deployment_info)
|
||||
.await
|
||||
.wrap_err("Failed to patch boot bundle")?;
|
||||
bundles.append(&mut boot_bundles);
|
||||
|
||||
if let Some(info) = &deployment_info {
|
||||
let bundle_dir = Arc::new(bundle_dir);
|
||||
// Remove bundles from the previous deployment that don't match the current one.
|
||||
// I.e. mods that used to be installed/enabled but aren't anymore.
|
||||
{
|
||||
let tasks = info.bundles.iter().cloned().filter_map(|file_name| {
|
||||
let is_being_deployed = bundles.iter().any(|b2| {
|
||||
let name = format!("{:016x}", b2.name());
|
||||
file_name == name
|
||||
});
|
||||
|
||||
if !is_being_deployed {
|
||||
let bundle_dir = bundle_dir.clone();
|
||||
let task = async move {
|
||||
let path = bundle_dir.join(&file_name);
|
||||
|
||||
tracing::debug!("Removing unused bundle '{}'", file_name);
|
||||
|
||||
if let Err(err) = fs::remove_file(&path).await.wrap_err_with(|| {
|
||||
format!("Failed to remove unused bundle '{}'", path.display())
|
||||
}) {
|
||||
tracing::error!("{:?}", err);
|
||||
}
|
||||
};
|
||||
Some(task)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
futures::future::join_all(tasks).await;
|
||||
}
|
||||
|
||||
// Do the same thing for mod folders
|
||||
{
|
||||
let tasks = info.mod_folders.iter().filter_map(|mod_id| {
|
||||
let is_being_deployed = mod_folders.iter().any(|id| id == mod_id);
|
||||
|
||||
if !is_being_deployed {
|
||||
let path = bundle_dir.join("mods").join(mod_id);
|
||||
tracing::debug!("Removing unused mod folder '{}'", path.display());
|
||||
|
||||
let task = async move {
|
||||
if let Err(err) = fs::remove_dir_all(&path).await.wrap_err_with(|| {
|
||||
format!("Failed to remove unused legacy mod '{}'", path.display())
|
||||
}) {
|
||||
tracing::error!("{:?}", err);
|
||||
}
|
||||
};
|
||||
|
||||
Some(task)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
futures::future::join_all(tasks).await;
|
||||
}
|
||||
}
|
||||
|
||||
tracing::info!("Patch game settings");
|
||||
patch_game_settings(state.clone())
|
||||
.await
|
||||
.wrap_err("Failed to patch game settings")?;
|
||||
|
||||
tracing::info!("Patching bundle database");
|
||||
patch_bundle_database(state.clone(), &bundles)
|
||||
.await
|
||||
.wrap_err("Failed to patch bundle database")?;
|
||||
|
||||
tracing::info!("Writing deployment data");
|
||||
{
|
||||
let path = state.game_dir.join(DEPLOYMENT_DATA_PATH);
|
||||
fs::write(&path, &new_deployment_info)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to write deployment data to '{}'", path.display()))?;
|
||||
}
|
||||
|
||||
tracing::info!("Finished deploying mods");
|
||||
Ok(())
|
||||
}
|
|
@ -1,19 +1,46 @@
|
|||
use std::io::{self, ErrorKind};
|
||||
use std::collections::HashMap;
|
||||
use std::io::{self, Cursor, ErrorKind};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::{eyre, Result};
|
||||
use color_eyre::{eyre, Help, Report, Result};
|
||||
use futures::stream;
|
||||
use futures::StreamExt;
|
||||
use path_slash::PathBufExt;
|
||||
use sdk::filetype::lua;
|
||||
use sdk::filetype::package::Package;
|
||||
use sdk::murmur::Murmur64;
|
||||
use tokio::fs::{self};
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
use crate::controller::deploy::{
|
||||
DeploymentData, BOOT_BUNDLE_NAME, BUNDLE_DATABASE_NAME, DEPLOYMENT_DATA_PATH,
|
||||
use sdk::{
|
||||
Bundle, BundleDatabase, BundleFile, BundleFileType, BundleFileVariant, FromBinary, ToBinary,
|
||||
};
|
||||
use crate::state::ActionState;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use string_template::Template;
|
||||
use time::OffsetDateTime;
|
||||
use tokio::fs;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tracing::Instrument;
|
||||
|
||||
use super::deploy::SETTINGS_FILE_PATH;
|
||||
use super::read_sjson_file;
|
||||
use crate::controller::app::check_mod_order;
|
||||
use crate::state::{ActionState, PackageInfo};
|
||||
|
||||
const MOD_BUNDLE_NAME: &str = "packages/mods";
|
||||
const BOOT_BUNDLE_NAME: &str = "packages/boot";
|
||||
const DML_BUNDLE_NAME: &str = "packages/dml";
|
||||
const BUNDLE_DATABASE_NAME: &str = "bundle_database.data";
|
||||
const MOD_BOOT_SCRIPT: &str = "scripts/mod_main";
|
||||
const MOD_DATA_SCRIPT: &str = "scripts/mods/mod_data";
|
||||
const SETTINGS_FILE_PATH: &str = "application_settings/settings_common.ini";
|
||||
const DEPLOYMENT_DATA_PATH: &str = "dtmm-deployment.sjson";
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct DeploymentData {
|
||||
bundles: Vec<String>,
|
||||
#[serde(with = "time::serde::iso8601")]
|
||||
timestamp: OffsetDateTime,
|
||||
}
|
||||
|
||||
#[tracing::instrument]
|
||||
async fn read_file_with_backup<P>(path: P) -> Result<Vec<u8>>
|
||||
|
@ -103,6 +130,585 @@ async fn patch_game_settings(state: Arc<ActionState>) -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all, fields(package = info.name))]
|
||||
fn make_package(info: &PackageInfo) -> Result<Package> {
|
||||
let mut pkg = Package::new(info.name.clone(), PathBuf::new());
|
||||
|
||||
for f in &info.files {
|
||||
let mut it = f.rsplit('.');
|
||||
let file_type = it
|
||||
.next()
|
||||
.ok_or_else(|| eyre::eyre!("missing file extension"))
|
||||
.and_then(BundleFileType::from_str)
|
||||
.wrap_err("Invalid file name in package info")?;
|
||||
let name: String = it.collect();
|
||||
pkg.add_file(file_type, name);
|
||||
}
|
||||
|
||||
Ok(pkg)
|
||||
}
|
||||
|
||||
fn build_mod_data_lua(state: Arc<ActionState>) -> String {
|
||||
let mut lua = String::from("return {\n");
|
||||
|
||||
// DMF is handled explicitely by the loading procedures, as it actually drives most of that
|
||||
// and should therefore not show up in the load order.
|
||||
for mod_info in state.mods.iter().filter(|m| m.id != "dml" && m.enabled) {
|
||||
lua.push_str(" {\n name = \"");
|
||||
lua.push_str(&mod_info.name);
|
||||
|
||||
lua.push_str("\",\n id = \"");
|
||||
lua.push_str(&mod_info.id);
|
||||
|
||||
lua.push_str("\",\n run = function()\n");
|
||||
|
||||
let resources = &mod_info.resources;
|
||||
if resources.data.is_some() || resources.localization.is_some() {
|
||||
lua.push_str(" new_mod(\"");
|
||||
lua.push_str(&mod_info.id);
|
||||
lua.push_str("\", {\n mod_script = \"");
|
||||
lua.push_str(&resources.init.to_slash_lossy());
|
||||
|
||||
if let Some(data) = resources.data.as_ref() {
|
||||
lua.push_str("\",\n mod_data = \"");
|
||||
lua.push_str(&data.to_slash_lossy());
|
||||
}
|
||||
|
||||
if let Some(localization) = &resources.localization {
|
||||
lua.push_str("\",\n mod_localization = \"");
|
||||
lua.push_str(&localization.to_slash_lossy());
|
||||
}
|
||||
|
||||
lua.push_str("\",\n })\n");
|
||||
} else {
|
||||
lua.push_str(" return dofile(\"");
|
||||
lua.push_str(&resources.init.to_slash_lossy());
|
||||
lua.push_str("\")\n");
|
||||
}
|
||||
|
||||
lua.push_str(" end,\n packages = {\n");
|
||||
|
||||
for pkg_info in &mod_info.packages {
|
||||
lua.push_str(" \"");
|
||||
lua.push_str(&pkg_info.name);
|
||||
lua.push_str("\",\n");
|
||||
}
|
||||
|
||||
lua.push_str(" },\n },\n");
|
||||
}
|
||||
|
||||
lua.push('}');
|
||||
|
||||
tracing::debug!("mod_data_lua:\n{}", lua);
|
||||
|
||||
lua
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
async fn build_bundles(state: Arc<ActionState>) -> Result<Vec<Bundle>> {
|
||||
let mut mod_bundle = Bundle::new(MOD_BUNDLE_NAME.to_string());
|
||||
let mut tasks = Vec::new();
|
||||
|
||||
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
|
||||
|
||||
let mut bundles = Vec::new();
|
||||
|
||||
{
|
||||
tracing::trace!("Building mod data script");
|
||||
|
||||
let span = tracing::debug_span!("Building mod data script");
|
||||
let _enter = span.enter();
|
||||
|
||||
let lua = build_mod_data_lua(state.clone());
|
||||
|
||||
tracing::trace!("Compiling mod data script");
|
||||
|
||||
let file =
|
||||
lua::compile(MOD_DATA_SCRIPT, &lua).wrap_err("Failed to compile mod data Lua file")?;
|
||||
|
||||
tracing::trace!("Compile mod data script");
|
||||
|
||||
mod_bundle.add_file(file);
|
||||
}
|
||||
|
||||
tracing::trace!("Preparing tasks to deploy bundle files");
|
||||
|
||||
for mod_info in state.mods.iter().filter(|m| m.id != "dml" && m.enabled) {
|
||||
let span = tracing::trace_span!("building mod packages", name = mod_info.name);
|
||||
let _enter = span.enter();
|
||||
|
||||
let mod_dir = state.mod_dir.join(&mod_info.id);
|
||||
for pkg_info in &mod_info.packages {
|
||||
let span = tracing::trace_span!("building package", name = pkg_info.name);
|
||||
let _enter = span.enter();
|
||||
|
||||
tracing::trace!(
|
||||
"Building package {} for mod {}",
|
||||
pkg_info.name,
|
||||
mod_info.name
|
||||
);
|
||||
|
||||
let pkg = make_package(pkg_info).wrap_err("Failed to make package")?;
|
||||
let mut variant = BundleFileVariant::new();
|
||||
let bin = pkg
|
||||
.to_binary()
|
||||
.wrap_err("Failed to serialize package to binary")?;
|
||||
variant.set_data(bin);
|
||||
let mut file = BundleFile::new(pkg_info.name.clone(), BundleFileType::Package);
|
||||
file.add_variant(variant);
|
||||
|
||||
tracing::trace!(
|
||||
"Compiled package {} for mod {}",
|
||||
pkg_info.name,
|
||||
mod_info.name
|
||||
);
|
||||
|
||||
mod_bundle.add_file(file);
|
||||
|
||||
let bundle_name = format!("{:016x}", Murmur64::hash(&pkg_info.name));
|
||||
let src = mod_dir.join(&bundle_name);
|
||||
let dest = bundle_dir.join(&bundle_name);
|
||||
let pkg_name = pkg_info.name.clone();
|
||||
let mod_name = mod_info.name.clone();
|
||||
|
||||
// Explicitely drop the guard, so that we can move the span
|
||||
// into the async operation
|
||||
drop(_enter);
|
||||
|
||||
let ctx = state.ctx.clone();
|
||||
|
||||
let task = async move {
|
||||
let bundle = {
|
||||
let bin = fs::read(&src).await.wrap_err_with(|| {
|
||||
format!("Failed to read bundle file '{}'", src.display())
|
||||
})?;
|
||||
let name = Bundle::get_name_from_path(&ctx, &src);
|
||||
Bundle::from_binary(&ctx, name, bin)
|
||||
.wrap_err_with(|| format!("Failed to parse bundle '{}'", src.display()))?
|
||||
};
|
||||
|
||||
tracing::debug!(
|
||||
src = %src.display(),
|
||||
dest = %dest.display(),
|
||||
"Copying bundle '{}' for mod '{}'",
|
||||
pkg_name,
|
||||
mod_name,
|
||||
);
|
||||
// We attempt to remove any previous file, so that the hard link can be created.
|
||||
// We can reasonably ignore errors here, as a 'NotFound' is actually fine, the copy
|
||||
// may be possible despite an error here, or the error will be reported by it anyways.
|
||||
// TODO: There is a chance that we delete an actual game bundle, but with 64bit
|
||||
// hashes, it's low enough for now, and the setup required to detect
|
||||
// "game bundle vs mod bundle" is non-trivial.
|
||||
let _ = fs::remove_file(&dest).await;
|
||||
fs::copy(&src, &dest).await.wrap_err_with(|| {
|
||||
format!(
|
||||
"Failed to copy bundle {pkg_name} for mod {mod_name}. Src: {}, dest: {}",
|
||||
src.display(),
|
||||
dest.display()
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok::<Bundle, color_eyre::Report>(bundle)
|
||||
}
|
||||
.instrument(span);
|
||||
|
||||
tasks.push(task);
|
||||
}
|
||||
}
|
||||
|
||||
tracing::debug!("Copying {} mod bundles", tasks.len());
|
||||
|
||||
let mut tasks = stream::iter(tasks).buffer_unordered(10);
|
||||
|
||||
while let Some(res) = tasks.next().await {
|
||||
let bundle = res?;
|
||||
bundles.push(bundle);
|
||||
}
|
||||
|
||||
{
|
||||
let path = bundle_dir.join(format!("{:x}", mod_bundle.name().to_murmur64()));
|
||||
tracing::trace!("Writing mod bundle to '{}'", path.display());
|
||||
fs::write(&path, mod_bundle.to_binary()?)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to write bundle to '{}'", path.display()))?;
|
||||
}
|
||||
|
||||
bundles.push(mod_bundle);
|
||||
|
||||
Ok(bundles)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
async fn patch_boot_bundle(state: Arc<ActionState>) -> Result<Vec<Bundle>> {
|
||||
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
|
||||
let bundle_path = bundle_dir.join(format!("{:x}", Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes())));
|
||||
|
||||
let mut bundles = Vec::with_capacity(2);
|
||||
|
||||
let mut boot_bundle = async {
|
||||
let bin = read_file_with_backup(&bundle_path)
|
||||
.await
|
||||
.wrap_err("Failed to read boot bundle")?;
|
||||
|
||||
Bundle::from_binary(&state.ctx, BOOT_BUNDLE_NAME.to_string(), bin)
|
||||
.wrap_err("Failed to parse boot bundle")
|
||||
}
|
||||
.instrument(tracing::trace_span!("read boot bundle"))
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to read bundle '{}'", BOOT_BUNDLE_NAME))?;
|
||||
|
||||
{
|
||||
tracing::trace!("Adding mod package file to boot bundle");
|
||||
let span = tracing::trace_span!("create mod package file");
|
||||
let _enter = span.enter();
|
||||
|
||||
let mut pkg = Package::new(MOD_BUNDLE_NAME.to_string(), PathBuf::new());
|
||||
|
||||
for mod_info in &state.mods {
|
||||
for pkg_info in &mod_info.packages {
|
||||
pkg.add_file(BundleFileType::Package, &pkg_info.name);
|
||||
}
|
||||
}
|
||||
|
||||
pkg.add_file(BundleFileType::Lua, MOD_DATA_SCRIPT);
|
||||
|
||||
let mut variant = BundleFileVariant::new();
|
||||
variant.set_data(pkg.to_binary()?);
|
||||
let mut f = BundleFile::new(MOD_BUNDLE_NAME.to_string(), BundleFileType::Package);
|
||||
f.add_variant(variant);
|
||||
|
||||
boot_bundle.add_file(f);
|
||||
}
|
||||
|
||||
{
|
||||
tracing::trace!("Handling DML packages and bundle");
|
||||
let span = tracing::trace_span!("handle DML");
|
||||
let _enter = span.enter();
|
||||
|
||||
let mut variant = BundleFileVariant::new();
|
||||
|
||||
let mod_info = state
|
||||
.mods
|
||||
.iter()
|
||||
.find(|m| m.id == "dml")
|
||||
.ok_or_else(|| eyre::eyre!("DML not found in mod list"))?;
|
||||
let pkg_info = mod_info
|
||||
.packages
|
||||
.get(0)
|
||||
.ok_or_else(|| eyre::eyre!("invalid mod package for DML"))
|
||||
.with_suggestion(|| "Re-download and import the newest version.".to_string())?;
|
||||
let bundle_name = format!("{:016x}", Murmur64::hash(&pkg_info.name));
|
||||
let src = state.mod_dir.join(&mod_info.id).join(&bundle_name);
|
||||
|
||||
{
|
||||
let bin = fs::read(&src)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to read bundle file '{}'", src.display()))?;
|
||||
let name = Bundle::get_name_from_path(&state.ctx, &src);
|
||||
|
||||
let dml_bundle = Bundle::from_binary(&state.ctx, name, bin)
|
||||
.wrap_err_with(|| format!("Failed to parse bundle '{}'", src.display()))?;
|
||||
|
||||
bundles.push(dml_bundle);
|
||||
};
|
||||
|
||||
{
|
||||
let dest = bundle_dir.join(&bundle_name);
|
||||
let pkg_name = pkg_info.name.clone();
|
||||
let mod_name = mod_info.name.clone();
|
||||
|
||||
tracing::debug!(
|
||||
"Copying bundle {} for mod {}: {} -> {}",
|
||||
pkg_name,
|
||||
mod_name,
|
||||
src.display(),
|
||||
dest.display()
|
||||
);
|
||||
// We attempt to remove any previous file, so that the hard link can be created.
|
||||
// We can reasonably ignore errors here, as a 'NotFound' is actually fine, the copy
|
||||
// may be possible despite an error here, or the error will be reported by it anyways.
|
||||
// TODO: There is a chance that we delete an actual game bundle, but with 64bit
|
||||
// hashes, it's low enough for now, and the setup required to detect
|
||||
// "game bundle vs mod bundle" is non-trivial.
|
||||
let _ = fs::remove_file(&dest).await;
|
||||
fs::copy(&src, &dest).await.wrap_err_with(|| {
|
||||
format!(
|
||||
"Failed to copy bundle {pkg_name} for mod {mod_name}. Src: {}, dest: {}",
|
||||
src.display(),
|
||||
dest.display()
|
||||
)
|
||||
})?;
|
||||
}
|
||||
|
||||
let pkg = make_package(pkg_info).wrap_err("Failed to create package file for dml")?;
|
||||
variant.set_data(pkg.to_binary()?);
|
||||
|
||||
let mut f = BundleFile::new(DML_BUNDLE_NAME.to_string(), BundleFileType::Package);
|
||||
f.add_variant(variant);
|
||||
|
||||
boot_bundle.add_file(f);
|
||||
}
|
||||
|
||||
{
|
||||
let span = tracing::debug_span!("Importing mod main script");
|
||||
let _enter = span.enter();
|
||||
|
||||
let is_io_enabled = format!("{}", state.is_io_enabled);
|
||||
let mut data = HashMap::new();
|
||||
data.insert("is_io_enabled", is_io_enabled.as_str());
|
||||
|
||||
let tmpl = include_str!("../../assets/mod_main.lua");
|
||||
let lua = Template::new(tmpl).render(&data);
|
||||
tracing::trace!("Main script rendered:\n===========\n{}\n=============", lua);
|
||||
let file =
|
||||
lua::compile(MOD_BOOT_SCRIPT, lua).wrap_err("Failed to compile mod main Lua file")?;
|
||||
|
||||
boot_bundle.add_file(file);
|
||||
}
|
||||
|
||||
async {
|
||||
let bin = boot_bundle
|
||||
.to_binary()
|
||||
.wrap_err("Failed to serialize boot bundle")?;
|
||||
fs::write(&bundle_path, bin)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to write main bundle: {}", bundle_path.display()))
|
||||
}
|
||||
.instrument(tracing::trace_span!("write boot bundle"))
|
||||
.await?;
|
||||
|
||||
bundles.push(boot_bundle);
|
||||
|
||||
Ok(bundles)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all, fields(bundles = bundles.as_ref().len()))]
|
||||
async fn patch_bundle_database<B>(state: Arc<ActionState>, bundles: B) -> Result<()>
|
||||
where
|
||||
B: AsRef<[Bundle]>,
|
||||
{
|
||||
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
|
||||
let database_path = bundle_dir.join(BUNDLE_DATABASE_NAME);
|
||||
|
||||
let mut db = {
|
||||
let bin = read_file_with_backup(&database_path)
|
||||
.await
|
||||
.wrap_err("Failed to read bundle database")?;
|
||||
let mut r = Cursor::new(bin);
|
||||
let db = BundleDatabase::from_binary(&mut r).wrap_err("Failed to parse bundle database")?;
|
||||
tracing::trace!("Finished parsing bundle database");
|
||||
db
|
||||
};
|
||||
|
||||
for bundle in bundles.as_ref() {
|
||||
tracing::trace!("Adding '{}' to bundle database", bundle.name().display());
|
||||
db.add_bundle(bundle);
|
||||
}
|
||||
|
||||
{
|
||||
let bin = db
|
||||
.to_binary()
|
||||
.wrap_err("Failed to serialize bundle database")?;
|
||||
fs::write(&database_path, bin).await.wrap_err_with(|| {
|
||||
format!(
|
||||
"failed to write bundle database to '{}'",
|
||||
database_path.display()
|
||||
)
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all, fields(bundles = bundles.as_ref().len()))]
|
||||
async fn write_deployment_data<B>(state: Arc<ActionState>, bundles: B) -> Result<()>
|
||||
where
|
||||
B: AsRef<[Bundle]>,
|
||||
{
|
||||
let info = DeploymentData {
|
||||
timestamp: OffsetDateTime::now_utc(),
|
||||
bundles: bundles
|
||||
.as_ref()
|
||||
.iter()
|
||||
.map(|bundle| format!("{:x}", bundle.name().to_murmur64()))
|
||||
.collect(),
|
||||
};
|
||||
let path = state.game_dir.join(DEPLOYMENT_DATA_PATH);
|
||||
let data = serde_sjson::to_string(&info).wrap_err("Failed to serizalie deployment data")?;
|
||||
|
||||
fs::write(&path, &data)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to write deployment data to '{}'", path.display()))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all, fields(
|
||||
game_dir = %state.game_dir.display(),
|
||||
mods = state.mods.len()
|
||||
))]
|
||||
pub(crate) async fn deploy_mods(state: ActionState) -> Result<()> {
|
||||
let state = Arc::new(state);
|
||||
let bundle_dir = state.game_dir.join("bundle");
|
||||
let boot_bundle_path = format!("{:016x}", Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes()));
|
||||
|
||||
if fs::metadata(bundle_dir.join(format!("{boot_bundle_path}.patch_999")))
|
||||
.await
|
||||
.is_ok()
|
||||
{
|
||||
let err = eyre::eyre!("Found dtkit-patch-based mod installation.");
|
||||
return Err(err)
|
||||
.with_suggestion(|| {
|
||||
"If you're a mod author and saved projects directly in 'mods/', \
|
||||
use DTMT to migrate them to the new project structure."
|
||||
.to_string()
|
||||
})
|
||||
.with_suggestion(|| {
|
||||
"Click 'Reset Game' to remove the previous mod installation.".to_string()
|
||||
});
|
||||
}
|
||||
|
||||
let (_, game_info, deployment_info) = tokio::try_join!(
|
||||
async {
|
||||
fs::metadata(&bundle_dir)
|
||||
.await
|
||||
.wrap_err("Failed to open game bundle directory")
|
||||
.with_suggestion(|| "Double-check 'Game Directory' in the Settings tab.")
|
||||
},
|
||||
async {
|
||||
tokio::task::spawn_blocking(dtmt_shared::collect_game_info)
|
||||
.await
|
||||
.map_err(Report::new)
|
||||
},
|
||||
async {
|
||||
let path = state.game_dir.join(DEPLOYMENT_DATA_PATH);
|
||||
match read_sjson_file::<_, DeploymentData>(path)
|
||||
.await
|
||||
{
|
||||
Ok(data) => Ok(Some(data)),
|
||||
Err(err) => {
|
||||
if let Some(err) = err.downcast_ref::<std::io::Error>() && err.kind() == ErrorKind::NotFound {
|
||||
Ok(None)
|
||||
} else {
|
||||
Err(err).wrap_err("Failed to read deployment data")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
.wrap_err("Failed to gather deployment information")?;
|
||||
|
||||
tracing::debug!(?game_info, ?deployment_info);
|
||||
|
||||
if let Some(game_info) = game_info {
|
||||
if deployment_info
|
||||
.as_ref()
|
||||
.map(|i| game_info.last_updated > i.timestamp)
|
||||
.unwrap_or(false)
|
||||
{
|
||||
tracing::warn!(
|
||||
"Game was updated since last mod deployment. \
|
||||
Attempting to reconcile game files."
|
||||
);
|
||||
|
||||
tokio::try_join!(
|
||||
async {
|
||||
let path = bundle_dir.join(BUNDLE_DATABASE_NAME);
|
||||
let backup_path = path.with_extension("data.bak");
|
||||
|
||||
fs::copy(&path, &backup_path)
|
||||
.await
|
||||
.wrap_err("Failed to re-create backup for bundle database.")
|
||||
},
|
||||
async {
|
||||
let path = bundle_dir.join(boot_bundle_path);
|
||||
let backup_path = path.with_extension("bak");
|
||||
|
||||
fs::copy(&path, &backup_path)
|
||||
.await
|
||||
.wrap_err("Failed to re-create backup for boot bundle")
|
||||
}
|
||||
)
|
||||
.with_suggestion(|| {
|
||||
"Reset the game using 'Reset Game', then verify game files.".to_string()
|
||||
})?;
|
||||
|
||||
tracing::info!(
|
||||
"Successfully re-created game file backups. \
|
||||
Continuing mod deployment."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
check_mod_order(&state)?;
|
||||
|
||||
tracing::info!(
|
||||
"Deploying {} mods to '{}'.",
|
||||
state.mods.iter().filter(|i| i.enabled).count(),
|
||||
bundle_dir.display()
|
||||
);
|
||||
|
||||
tracing::info!("Build mod bundles");
|
||||
let mut bundles = build_bundles(state.clone())
|
||||
.await
|
||||
.wrap_err("Failed to build mod bundles")?;
|
||||
|
||||
tracing::info!("Patch boot bundle");
|
||||
let mut more_bundles = patch_boot_bundle(state.clone())
|
||||
.await
|
||||
.wrap_err("Failed to patch boot bundle")?;
|
||||
bundles.append(&mut more_bundles);
|
||||
|
||||
if let Some(info) = &deployment_info {
|
||||
let bundle_dir = Arc::new(bundle_dir);
|
||||
let tasks = info.bundles.iter().cloned().filter_map(|file_name| {
|
||||
let contains = bundles.iter().any(|b2| {
|
||||
let name = format!("{:016x}", b2.name());
|
||||
file_name == name
|
||||
});
|
||||
|
||||
if !contains {
|
||||
let bundle_dir = bundle_dir.clone();
|
||||
let task = async move {
|
||||
let path = bundle_dir.join(&file_name);
|
||||
|
||||
tracing::debug!("Removing unused bundle '{}'", file_name);
|
||||
|
||||
if let Err(err) = fs::remove_file(&path).await.wrap_err_with(|| {
|
||||
format!("Failed to remove unused bundle '{}'", path.display())
|
||||
}) {
|
||||
tracing::error!("{:?}", err);
|
||||
}
|
||||
};
|
||||
Some(task)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
futures::future::join_all(tasks).await;
|
||||
}
|
||||
|
||||
tracing::info!("Patch game settings");
|
||||
patch_game_settings(state.clone())
|
||||
.await
|
||||
.wrap_err("Failed to patch game settings")?;
|
||||
|
||||
tracing::info!("Patching bundle database");
|
||||
patch_bundle_database(state.clone(), &bundles)
|
||||
.await
|
||||
.wrap_err("Failed to patch bundle database")?;
|
||||
|
||||
tracing::info!("Writing deployment data");
|
||||
write_deployment_data(state.clone(), &bundles)
|
||||
.await
|
||||
.wrap_err("Failed to write deployment data")?;
|
||||
|
||||
tracing::info!("Finished deploying mods");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
async fn reset_dtkit_patch(state: ActionState) -> Result<()> {
|
||||
let bundle_dir = state.game_dir.join("bundle");
|
||||
|
|
|
@ -1,584 +0,0 @@
|
|||
use std::collections::HashMap;
|
||||
use std::ffi::CStr;
|
||||
use std::io::{Cursor, Read, Seek, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use color_eyre::eyre::{self, Context};
|
||||
use color_eyre::{Help, Report, Result};
|
||||
use druid::im::Vector;
|
||||
use druid::{FileInfo, ImageBuf};
|
||||
use dtmt_shared::{ModConfig, ModConfigResources};
|
||||
use luajit2_sys as lua;
|
||||
use nexusmods::Api as NexusApi;
|
||||
use tokio::fs;
|
||||
use zip::ZipArchive;
|
||||
|
||||
use crate::state::{ActionState, ModInfo, NexusInfo, PackageInfo};
|
||||
|
||||
fn find_archive_file<R: Read + Seek>(
|
||||
archive: &ZipArchive<R>,
|
||||
name: impl AsRef<str>,
|
||||
) -> Option<String> {
|
||||
let path = archive
|
||||
.file_names()
|
||||
.find(|path| path.ends_with(name.as_ref()))
|
||||
.map(|s| s.to_string());
|
||||
path
|
||||
}
|
||||
|
||||
fn image_data_to_buffer(data: impl AsRef<[u8]>) -> Result<ImageBuf> {
|
||||
// Druid somehow doesn't return an error compatible with eyre, here.
|
||||
// So we have to wrap through `Display` manually.
|
||||
ImageBuf::from_data(data.as_ref()).map_err(|err| {
|
||||
Report::msg(err.to_string())
|
||||
.wrap_err("Invalid image data")
|
||||
.suggestion("Supported formats are: PNG, JPEG, Bitmap and WebP")
|
||||
})
|
||||
}
|
||||
|
||||
// Runs the content of a `.mod` file to extract what data we can get
|
||||
// from legacy mods.
|
||||
// 1. Create a global function `new_mod` that stores
|
||||
// the relevant bits in global variables.
|
||||
// 2. Run the `.mod` file, which will return a table.
|
||||
// 3. Run the `run` function from that table.
|
||||
// 4. Access the global variables from #1.
|
||||
#[tracing::instrument]
|
||||
fn parse_mod_id_file(data: &str) -> Result<(String, ModConfigResources)> {
|
||||
tracing::debug!("Parsing mod file:\n{}", data);
|
||||
|
||||
let ret = unsafe {
|
||||
let state = lua::luaL_newstate();
|
||||
lua::luaL_openlibs(state);
|
||||
|
||||
let run = b"
|
||||
function fassert() end
|
||||
function new_mod(id, resources)
|
||||
_G.id = id
|
||||
_G.script = resources.mod_script
|
||||
_G.data = resources.mod_data
|
||||
_G.localization = resources.mod_localization
|
||||
end
|
||||
\0";
|
||||
match lua::luaL_loadstring(state, run.as_ptr() as _) as u32 {
|
||||
lua::LUA_OK => {}
|
||||
lua::LUA_ERRSYNTAX => {
|
||||
let err = lua::lua_tostring(state, -1);
|
||||
let err = CStr::from_ptr(err).to_string_lossy().to_string();
|
||||
|
||||
lua::lua_close(state);
|
||||
|
||||
eyre::bail!("Invalid syntax: {}", err);
|
||||
}
|
||||
lua::LUA_ERRMEM => {
|
||||
lua::lua_close(state);
|
||||
eyre::bail!("Failed to allocate sufficient memory to create `new_mod`")
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
match lua::lua_pcall(state, 0, 0, 0) as u32 {
|
||||
lua::LUA_OK => {}
|
||||
lua::LUA_ERRRUN => {
|
||||
let err = lua::lua_tostring(state, -1);
|
||||
let err = CStr::from_ptr(err).to_string_lossy().to_string();
|
||||
|
||||
lua::lua_close(state);
|
||||
|
||||
eyre::bail!("Failed to run buffer: {}", err);
|
||||
}
|
||||
lua::LUA_ERRMEM => {
|
||||
lua::lua_close(state);
|
||||
eyre::bail!("Failed to allocate sufficient memory to run buffer")
|
||||
}
|
||||
// We don't use an error handler function, so this should be unreachable
|
||||
lua::LUA_ERRERR => unreachable!(),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
let name = b".mod\0";
|
||||
match lua::luaL_loadbuffer(
|
||||
state,
|
||||
data.as_ptr() as _,
|
||||
data.len() as _,
|
||||
name.as_ptr() as _,
|
||||
) as u32
|
||||
{
|
||||
lua::LUA_OK => {}
|
||||
lua::LUA_ERRSYNTAX => {
|
||||
let err = lua::lua_tostring(state, -1);
|
||||
let err = CStr::from_ptr(err).to_string_lossy().to_string();
|
||||
|
||||
lua::lua_close(state);
|
||||
|
||||
eyre::bail!("Invalid syntax: {}", err);
|
||||
}
|
||||
lua::LUA_ERRMEM => {
|
||||
lua::lua_close(state);
|
||||
eyre::bail!("Failed to allocate sufficient memory to load `.mod` file buffer")
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
match lua::lua_pcall(state, 0, 1, 0) as u32 {
|
||||
lua::LUA_OK => {}
|
||||
lua::LUA_ERRRUN => {
|
||||
let err = lua::lua_tostring(state, -1);
|
||||
let err = CStr::from_ptr(err).to_string_lossy().to_string();
|
||||
|
||||
lua::lua_close(state);
|
||||
|
||||
eyre::bail!("Failed to run `.mod` file: {}", err);
|
||||
}
|
||||
lua::LUA_ERRMEM => {
|
||||
lua::lua_close(state);
|
||||
eyre::bail!("Failed to allocate sufficient memory to run `.mod` file")
|
||||
}
|
||||
// We don't use an error handler function, so this should be unreachable
|
||||
lua::LUA_ERRERR => unreachable!(),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
let key = b"run\0";
|
||||
lua::lua_pushstring(state, key.as_ptr() as _);
|
||||
lua::lua_gettable(state, -2);
|
||||
|
||||
match lua::lua_pcall(state, 0, 0, 0) as u32 {
|
||||
lua::LUA_OK => {}
|
||||
lua::LUA_ERRRUN => {
|
||||
let err = lua::lua_tostring(state, -1);
|
||||
let err = CStr::from_ptr(err).to_string_lossy().to_string();
|
||||
|
||||
lua::lua_close(state);
|
||||
|
||||
eyre::bail!("Failed to run `.mod.run`: {}", err);
|
||||
}
|
||||
lua::LUA_ERRMEM => {
|
||||
lua::lua_close(state);
|
||||
eyre::bail!("Failed to allocate sufficient memory to run `.mod.run`")
|
||||
}
|
||||
// We don't use an error handler function, so this should be unreachable
|
||||
lua::LUA_ERRERR => unreachable!(),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
let get_global = |state, key: &[u8]| {
|
||||
lua::lua_getglobal(state, key.as_ptr() as _);
|
||||
|
||||
if lua::lua_isnil(state, -1) != 0 {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let s = lua::lua_tostring(state, -1);
|
||||
|
||||
if s.is_null() {
|
||||
eyre::bail!("Expected string, got NULL");
|
||||
}
|
||||
|
||||
let ret = CStr::from_ptr(s).to_string_lossy().to_string();
|
||||
lua::lua_pop(state, 1);
|
||||
Ok(Some(ret))
|
||||
};
|
||||
|
||||
let mod_id = get_global(state, b"id\0")
|
||||
.and_then(|s| s.ok_or_else(|| eyre::eyre!("Got `nil`")))
|
||||
.wrap_err("Failed to get `id`")?;
|
||||
|
||||
let resources = ModConfigResources {
|
||||
init: get_global(state, b"script\0")
|
||||
.and_then(|s| s.map(PathBuf::from).ok_or_else(|| eyre::eyre!("Got `nil`")))
|
||||
.wrap_err("Failed to get `script`.")?,
|
||||
data: get_global(state, b"data\0")
|
||||
.wrap_err("Failed to get `data`.")?
|
||||
.map(PathBuf::from),
|
||||
localization: get_global(state, b"localization\0")
|
||||
.wrap_err("Failed to get `localization`")?
|
||||
.map(PathBuf::from),
|
||||
};
|
||||
|
||||
lua::lua_close(state);
|
||||
|
||||
(mod_id, resources)
|
||||
};
|
||||
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
// Extracts the mod configuration from the mod archive.
|
||||
// This may either be a proper `dtmt.cfg`, or the legacy `<mod_name>.mod` ID file.
|
||||
//
|
||||
// It also returns the directory where this file was found, used as root path. This
|
||||
// allows flexibility in what the directory structure is exactly, since many people
|
||||
// still end up creating tarbombs and Nexus does its own re-packaging.
|
||||
#[tracing::instrument(skip(archive))]
|
||||
fn extract_mod_config<R: Read + Seek>(archive: &mut ZipArchive<R>) -> Result<(ModConfig, String)> {
|
||||
let legacy_mod_data = if let Some(name) = find_archive_file(archive, ".mod") {
|
||||
let (mod_id, resources) = {
|
||||
let mut f = archive
|
||||
.by_name(&name)
|
||||
.wrap_err("Failed to read `.mod` file from archive")?;
|
||||
|
||||
let mut buf = Vec::with_capacity(f.size() as usize);
|
||||
f.read_to_end(&mut buf)
|
||||
.wrap_err("Failed to read `.mod` file from archive")?;
|
||||
|
||||
let data = String::from_utf8(buf).wrap_err("`.mod` file is not valid UTF-8")?;
|
||||
parse_mod_id_file(&data)
|
||||
.wrap_err("Invalid `.mod` file")
|
||||
.note(
|
||||
"The `.mod` file's `run` function may not contain any additional logic \
|
||||
besides the default.",
|
||||
)
|
||||
.suggestion("Contact the mod author to fix this.")?
|
||||
};
|
||||
|
||||
let root = if let Some(index) = name.rfind('/') {
|
||||
name[..index].to_string()
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
|
||||
Some((mod_id, resources, root))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
tracing::debug!(?legacy_mod_data);
|
||||
|
||||
if let Some(name) = find_archive_file(archive, "dtmt.cfg") {
|
||||
let mut f = archive
|
||||
.by_name(&name)
|
||||
.wrap_err("Failed to read mod config from archive")?;
|
||||
|
||||
let mut buf = Vec::with_capacity(f.size() as usize);
|
||||
f.read_to_end(&mut buf)
|
||||
.wrap_err("Failed to read mod config from archive")?;
|
||||
|
||||
let data = String::from_utf8(buf).wrap_err("Mod config is not valid UTF-8")?;
|
||||
|
||||
let mut cfg: ModConfig = serde_sjson::from_str(&data)
|
||||
.wrap_err("Failed to deserialize mod config")
|
||||
.suggestion("Contact the mod author to fix this.")?;
|
||||
|
||||
if let Some((mod_id, resources, root)) = legacy_mod_data {
|
||||
if cfg.id != mod_id {
|
||||
let err = eyre::eyre!("Mod ID in `dtmt.cfg` does not match mod ID in `.mod` file");
|
||||
return Err(err).suggestion("Contact the mod author to fix this.");
|
||||
}
|
||||
|
||||
cfg.resources = resources;
|
||||
|
||||
// Enforce that packages are skipped
|
||||
cfg.bundled = false;
|
||||
cfg.packages = vec![];
|
||||
|
||||
Ok((cfg, root))
|
||||
} else {
|
||||
let root = name
|
||||
.strip_suffix("dtmt.cfg")
|
||||
.expect("String must end with that suffix")
|
||||
.to_string();
|
||||
|
||||
Ok((cfg, root))
|
||||
}
|
||||
} else if let Some((mod_id, resources, root)) = legacy_mod_data {
|
||||
let cfg = ModConfig {
|
||||
bundled: false,
|
||||
dir: PathBuf::new(),
|
||||
id: mod_id.clone(),
|
||||
name: mod_id,
|
||||
summary: "A mod for the game Warhammer 40,000: Darktide".into(),
|
||||
version: "N/A".into(),
|
||||
description: None,
|
||||
author: None,
|
||||
image: None,
|
||||
categories: Vec::new(),
|
||||
packages: Vec::new(),
|
||||
resources,
|
||||
depends: Vec::new(),
|
||||
name_overrides: Default::default(),
|
||||
};
|
||||
|
||||
Ok((cfg, root))
|
||||
} else {
|
||||
eyre::bail!(
|
||||
"Mod needs a config file or `.mod` file. \
|
||||
Please get in touch with the author to provide a properly packaged mod."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(archive))]
|
||||
fn extract_bundled_mod<R: Read + Seek>(
|
||||
archive: &mut ZipArchive<R>,
|
||||
root: String,
|
||||
dest: impl AsRef<Path> + std::fmt::Debug,
|
||||
) -> Result<Vector<Arc<PackageInfo>>> {
|
||||
let files: HashMap<String, Vec<String>> = {
|
||||
let name = archive
|
||||
.file_names()
|
||||
.find(|name| name.ends_with("files.sjson"))
|
||||
.map(|s| s.to_string())
|
||||
.ok_or_else(|| eyre::eyre!("archive does not contain file index"))?;
|
||||
|
||||
let mut f = archive
|
||||
.by_name(&name)
|
||||
.wrap_err("Failed to read file index from archive")?;
|
||||
let mut buf = Vec::with_capacity(f.size() as usize);
|
||||
f.read_to_end(&mut buf)
|
||||
.wrap_err("Failed to read file index from archive")?;
|
||||
|
||||
let data = String::from_utf8(buf).wrap_err("File index is not valid UTF-8")?;
|
||||
serde_sjson::from_str(&data).wrap_err("Failed to deserialize file index")?
|
||||
};
|
||||
|
||||
tracing::trace!(?files);
|
||||
|
||||
let dest = dest.as_ref();
|
||||
tracing::trace!("Extracting mod archive to {}", dest.display());
|
||||
archive
|
||||
.extract(dest)
|
||||
.wrap_err_with(|| format!("Failed to extract archive to {}", dest.display()))?;
|
||||
|
||||
let packages = files
|
||||
.into_iter()
|
||||
.map(|(name, files)| Arc::new(PackageInfo::new(name, files.into_iter().collect())))
|
||||
.collect();
|
||||
|
||||
tracing::trace!(?packages);
|
||||
|
||||
Ok(packages)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(archive))]
|
||||
fn extract_legacy_mod<R: Read + Seek>(
|
||||
archive: &mut ZipArchive<R>,
|
||||
root: String,
|
||||
dest: impl Into<PathBuf> + std::fmt::Debug,
|
||||
) -> Result<()> {
|
||||
let dest = dest.into();
|
||||
let file_count = archive.len();
|
||||
|
||||
for i in 0..file_count {
|
||||
let mut f = archive
|
||||
.by_index(i)
|
||||
.wrap_err_with(|| format!("Failed to get file at index {}", i))?;
|
||||
|
||||
let Some(name) = f.enclosed_name().map(|p| p.to_path_buf()) else {
|
||||
let err = eyre::eyre!("File name in archive is not a safe path value.").suggestion(
|
||||
"Only use well-known applications to create the ZIP archive, \
|
||||
and don't create paths that point outside the archive directory.",
|
||||
);
|
||||
return Err(err);
|
||||
};
|
||||
|
||||
let Ok(suffix) = name.strip_prefix(&root) else {
|
||||
tracing::warn!(
|
||||
"Skipping file outside of the mod root directory: {}",
|
||||
name.display()
|
||||
);
|
||||
continue;
|
||||
};
|
||||
let name = dest.join(suffix);
|
||||
|
||||
if f.is_dir() {
|
||||
// The majority of errors will actually be "X already exists".
|
||||
// But rather than filter them invidually, we just ignore all of them.
|
||||
// If there is a legitimate error of "couldn't create X", it will eventually fail when
|
||||
// we try to put a file in there.
|
||||
tracing::trace!("Creating directory '{}'", name.display());
|
||||
let _ = std::fs::create_dir_all(&name);
|
||||
} else {
|
||||
let mut buf = Vec::with_capacity(f.size() as usize);
|
||||
f.read_to_end(&mut buf)
|
||||
.wrap_err_with(|| format!("Failed to read file '{}'", name.display()))?;
|
||||
|
||||
tracing::trace!("Writing file '{}'", name.display());
|
||||
let mut out = std::fs::OpenOptions::new()
|
||||
.write(true)
|
||||
.create(true)
|
||||
.open(&name)
|
||||
.wrap_err_with(|| format!("Failed to open file '{}'", name.display()))?;
|
||||
|
||||
out.write_all(&buf)
|
||||
.wrap_err_with(|| format!("Failed to write to '{}'", name.display()))?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(state))]
|
||||
pub(crate) async fn import_from_file(state: ActionState, info: FileInfo) -> Result<ModInfo> {
|
||||
let data = fs::read(&info.path)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to read file {}", info.path.display()))?;
|
||||
|
||||
let nexus = if let Some((_, id, version, timestamp)) = info
|
||||
.path
|
||||
.file_name()
|
||||
.and_then(|s| s.to_str())
|
||||
.and_then(NexusApi::parse_file_name)
|
||||
{
|
||||
if !state.nexus_api_key.is_empty() {
|
||||
let api = NexusApi::new(state.nexus_api_key.to_string())?;
|
||||
let mod_info = api
|
||||
.mods_id(id)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to query mod {} from Nexus", id))?;
|
||||
|
||||
let version = match api.file_version(id, timestamp).await {
|
||||
Ok(version) => version,
|
||||
Err(err) => {
|
||||
let err = Report::new(err);
|
||||
tracing::warn!(
|
||||
"Failed to fetch version for Nexus download. \
|
||||
Falling back to file name:\n{:?}",
|
||||
err
|
||||
);
|
||||
version
|
||||
}
|
||||
};
|
||||
|
||||
let info = NexusInfo::from(mod_info);
|
||||
tracing::debug!(version, ?info);
|
||||
|
||||
Some((info, version))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
tracing::trace!(?nexus);
|
||||
|
||||
import_mod(state, nexus, data).await
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(state))]
|
||||
pub(crate) async fn import_from_nxm(state: ActionState, uri: String) -> Result<ModInfo> {
|
||||
let url = uri
|
||||
.parse()
|
||||
.wrap_err_with(|| format!("Invalid Uri '{}'", uri))?;
|
||||
|
||||
let api = NexusApi::new(state.nexus_api_key.to_string())?;
|
||||
let (mod_info, file_info, data) = api
|
||||
.handle_nxm(url)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to download mod from NXM uri '{}'", uri))?;
|
||||
|
||||
let nexus = NexusInfo::from(mod_info);
|
||||
import_mod(state, Some((nexus, file_info.version)), data).await
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(state, data), fields(data = data.len()))]
|
||||
pub(crate) async fn import_mod(
|
||||
state: ActionState,
|
||||
nexus: Option<(NexusInfo, String)>,
|
||||
data: Vec<u8>,
|
||||
) -> Result<ModInfo> {
|
||||
let data = Cursor::new(data);
|
||||
let mut archive = ZipArchive::new(data).wrap_err("Failed to open ZIP archive")?;
|
||||
|
||||
if tracing::enabled!(tracing::Level::DEBUG) {
|
||||
let names = archive.file_names().fold(String::new(), |mut s, name| {
|
||||
s.push('\n');
|
||||
s.push_str(name);
|
||||
s
|
||||
});
|
||||
tracing::debug!("Archive contents:{}", names);
|
||||
}
|
||||
|
||||
let (mut mod_cfg, root) =
|
||||
extract_mod_config(&mut archive).wrap_err("Failed to extract mod configuration")?;
|
||||
tracing::info!("Importing mod {} ({})", mod_cfg.name, mod_cfg.id);
|
||||
|
||||
let mod_dir = state.data_dir.join(state.mod_dir.as_ref());
|
||||
let dest = mod_dir.join(&mod_cfg.id);
|
||||
tracing::trace!("Creating mods directory {}", dest.display());
|
||||
fs::create_dir_all(&dest)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to create data directory '{}'", dest.display()))?;
|
||||
|
||||
let image = if let Some(path) = &mod_cfg.image {
|
||||
let name = archive
|
||||
.file_names()
|
||||
.find(|name| name.ends_with(&path.display().to_string()))
|
||||
.map(|s| s.to_string())
|
||||
.ok_or_else(|| eyre::eyre!("archive does not contain configured image file"))?;
|
||||
|
||||
let mut f = archive
|
||||
.by_name(&name)
|
||||
.wrap_err("Failed to read image file from archive")?;
|
||||
let mut buf = Vec::with_capacity(f.size() as usize);
|
||||
f.read_to_end(&mut buf)
|
||||
.wrap_err("Failed to read file index from archive")?;
|
||||
|
||||
let img = image_data_to_buffer(buf)?;
|
||||
Some(img)
|
||||
} else if let Some((nexus, _)) = &nexus {
|
||||
let api = NexusApi::new(state.nexus_api_key.to_string())?;
|
||||
let url = nexus.picture_url.as_ref();
|
||||
let data = api
|
||||
.picture(url)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to download Nexus image from '{}'", url))?;
|
||||
|
||||
let img = image_data_to_buffer(&data)?;
|
||||
|
||||
let name = "image.bin";
|
||||
let path = dest.join(name);
|
||||
match fs::write(&path, &data).await {
|
||||
Ok(_) => {
|
||||
mod_cfg.image = Some(name.into());
|
||||
Some(img)
|
||||
}
|
||||
Err(err) => {
|
||||
let err = Report::new(err).wrap_err(format!(
|
||||
"Failed to write Nexus picture to file '{}'",
|
||||
path.display()
|
||||
));
|
||||
tracing::error!("{:?}", err);
|
||||
None
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
tracing::trace!(?image);
|
||||
tracing::debug!(root, ?mod_cfg);
|
||||
|
||||
let packages = if mod_cfg.bundled {
|
||||
extract_bundled_mod(&mut archive, root, &mod_dir).wrap_err("Failed to extract mod")?
|
||||
} else {
|
||||
extract_legacy_mod(&mut archive, root, &dest).wrap_err("Failed to extract legacy mod")?;
|
||||
|
||||
if let Some((_, version)) = &nexus {
|
||||
// We use the version number stored in the `ModInfo` to compare against the `NexusInfo`
|
||||
// for version checks. So for this one, we can't actually rely on merely shadowing,
|
||||
// like with the other fields.
|
||||
mod_cfg.version = version.clone();
|
||||
}
|
||||
|
||||
let data = serde_sjson::to_string(&mod_cfg).wrap_err("Failed to serialize mod config")?;
|
||||
fs::write(dest.join("dtmt.cfg"), &data)
|
||||
.await
|
||||
.wrap_err("Failed to write mod config")?;
|
||||
|
||||
Default::default()
|
||||
};
|
||||
|
||||
if let Some((nexus, _)) = &nexus {
|
||||
let data = serde_sjson::to_string(nexus).wrap_err("Failed to serialize Nexus info")?;
|
||||
let path = dest.join("nexus.sjson");
|
||||
fs::write(&path, data.as_bytes())
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to write Nexus info to '{}'", path.display()))?;
|
||||
}
|
||||
|
||||
let info = ModInfo::new(mod_cfg, packages, image, nexus.map(|(info, _)| info));
|
||||
Ok(info)
|
||||
}
|
|
@ -5,9 +5,7 @@ use serde::Deserialize;
|
|||
use tokio::fs;
|
||||
|
||||
pub mod app;
|
||||
pub mod deploy;
|
||||
pub mod game;
|
||||
pub mod import;
|
||||
pub mod worker;
|
||||
|
||||
#[tracing::instrument]
|
||||
|
|
|
@ -13,9 +13,7 @@ use tokio::sync::mpsc::UnboundedReceiver;
|
|||
use tokio::sync::RwLock;
|
||||
|
||||
use crate::controller::app::*;
|
||||
use crate::controller::deploy::deploy_mods;
|
||||
use crate::controller::game::*;
|
||||
use crate::controller::import::*;
|
||||
use crate::state::AsyncAction;
|
||||
use crate::state::ACTION_FINISH_CHECK_UPDATE;
|
||||
use crate::state::ACTION_FINISH_LOAD_INITIAL;
|
||||
|
@ -38,9 +36,7 @@ async fn handle_action(
|
|||
action_queue: Arc<RwLock<UnboundedReceiver<AsyncAction>>>,
|
||||
) {
|
||||
while let Some(action) = action_queue.write().await.recv().await {
|
||||
if cfg!(debug_assertions) && !matches!(action, AsyncAction::Log(_)) {
|
||||
tracing::debug!(?action);
|
||||
}
|
||||
tracing::debug!(?action);
|
||||
|
||||
let event_sink = event_sink.clone();
|
||||
match action {
|
||||
|
@ -57,7 +53,7 @@ async fn handle_action(
|
|||
.expect("failed to send command");
|
||||
}),
|
||||
AsyncAction::AddMod(state, info) => tokio::spawn(async move {
|
||||
match import_from_file(state, info)
|
||||
match import_mod(state, info)
|
||||
.await
|
||||
.wrap_err("Failed to import mod")
|
||||
{
|
||||
|
@ -186,28 +182,6 @@ async fn handle_action(
|
|||
let _ = f.write_all(&line).await;
|
||||
}
|
||||
}),
|
||||
AsyncAction::NxmDownload(state, uri) => tokio::spawn(async move {
|
||||
match import_from_nxm(state, uri)
|
||||
.await
|
||||
.wrap_err("Failed to handle NXM URI")
|
||||
{
|
||||
Ok(mod_info) => {
|
||||
event_sink
|
||||
.write()
|
||||
.await
|
||||
.submit_command(
|
||||
ACTION_FINISH_ADD_MOD,
|
||||
SingleUse::new(Arc::new(mod_info)),
|
||||
Target::Auto,
|
||||
)
|
||||
.expect("failed to send command");
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::error!("{:?}", err);
|
||||
send_error(event_sink.clone(), err).await;
|
||||
}
|
||||
}
|
||||
}),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#![recursion_limit = "256"]
|
||||
#![feature(let_chains)]
|
||||
#![feature(iterator_try_collect)]
|
||||
#![feature(arc_unwrap_or_clone)]
|
||||
#![windows_subsystem = "windows"]
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
@ -9,13 +9,12 @@ use std::sync::Arc;
|
|||
use clap::parser::ValueSource;
|
||||
use clap::{command, value_parser, Arg};
|
||||
use color_eyre::eyre::{self, Context};
|
||||
use color_eyre::{Report, Result, Section};
|
||||
use color_eyre::{Report, Result};
|
||||
use druid::AppLauncher;
|
||||
use interprocess::local_socket::{prelude::*, GenericNamespaced, ListenerOptions};
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use crate::controller::worker::work_thread;
|
||||
use crate::state::{AsyncAction, ACTION_HANDLE_NXM};
|
||||
use crate::state::AsyncAction;
|
||||
use crate::state::{Delegate, State};
|
||||
use crate::ui::theme;
|
||||
use crate::util::log::LogLevel;
|
||||
|
@ -29,41 +28,6 @@ mod util {
|
|||
}
|
||||
mod ui;
|
||||
|
||||
// As explained in https://docs.rs/interprocess/2.1.0/interprocess/local_socket/struct.Name.html
|
||||
// namespaces are supported on both platforms we care about: Windows and Linux.
|
||||
const IPC_ADDRESS: &str = "dtmm.sock";
|
||||
|
||||
#[tracing::instrument]
|
||||
fn notify_nxm_download(
|
||||
uri: impl AsRef<str> + std::fmt::Debug,
|
||||
level: Option<LogLevel>,
|
||||
) -> Result<()> {
|
||||
util::log::create_tracing_subscriber(level, None);
|
||||
|
||||
tracing::debug!("Received Uri '{}', sending to main process.", uri.as_ref());
|
||||
|
||||
let mut stream = LocalSocketStream::connect(
|
||||
IPC_ADDRESS
|
||||
.to_ns_name::<GenericNamespaced>()
|
||||
.expect("Invalid socket name"),
|
||||
)
|
||||
.wrap_err_with(|| format!("Failed to connect to '{}'", IPC_ADDRESS))
|
||||
.suggestion("Make sure the main window is open.")?;
|
||||
|
||||
tracing::debug!("Connected to main process at '{}'", IPC_ADDRESS);
|
||||
|
||||
bincode::serialize_into(&mut stream, uri.as_ref()).wrap_err("Failed to send URI")?;
|
||||
|
||||
// We don't really care what the message is, we just need an acknowledgement.
|
||||
let _: String = bincode::deserialize_from(&mut stream).wrap_err("Failed to receive reply")?;
|
||||
|
||||
tracing::info!(
|
||||
"Notified DTMM with uri '{}'. Check the main window.",
|
||||
uri.as_ref()
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument]
|
||||
fn main() -> Result<()> {
|
||||
color_eyre::install()?;
|
||||
|
@ -88,25 +52,15 @@ fn main() -> Result<()> {
|
|||
.value_parser(value_parser!(LogLevel))
|
||||
.default_value("info"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("nxm")
|
||||
.help("An `nxm://` URI to download")
|
||||
.required(false),
|
||||
)
|
||||
.get_matches();
|
||||
|
||||
let (log_tx, log_rx) = tokio::sync::mpsc::unbounded_channel();
|
||||
let level = if matches.value_source("log-level") == Some(ValueSource::DefaultValue) {
|
||||
None
|
||||
} else {
|
||||
matches.get_one::<LogLevel>("log-level").cloned()
|
||||
};
|
||||
|
||||
if let Some(uri) = matches.get_one::<String>("nxm") {
|
||||
return notify_nxm_download(uri, level).wrap_err("Failed to send NXM Uri to main window.");
|
||||
}
|
||||
|
||||
let (log_tx, log_rx) = tokio::sync::mpsc::unbounded_channel();
|
||||
util::log::create_tracing_subscriber(level, Some(log_tx));
|
||||
util::log::create_tracing_subscriber(log_tx, level);
|
||||
|
||||
let (action_tx, action_rx) = tokio::sync::mpsc::unbounded_channel();
|
||||
|
||||
|
@ -129,65 +83,6 @@ fn main() -> Result<()> {
|
|||
|
||||
let event_sink = launcher.get_external_handle();
|
||||
|
||||
{
|
||||
let span = tracing::info_span!(IPC_ADDRESS, "nxm-socket");
|
||||
let _guard = span.enter();
|
||||
|
||||
let event_sink = event_sink.clone();
|
||||
let server = ListenerOptions::new()
|
||||
.name(
|
||||
IPC_ADDRESS
|
||||
.to_ns_name::<GenericNamespaced>()
|
||||
.expect("Invalid socket name"),
|
||||
)
|
||||
.create_sync()
|
||||
.wrap_err("Failed to create IPC listener")?;
|
||||
|
||||
tracing::debug!("IPC server listening on '{}'", IPC_ADDRESS);
|
||||
|
||||
// Drop the guard here, so that we can re-enter the same span in the thread.
|
||||
drop(_guard);
|
||||
|
||||
std::thread::Builder::new()
|
||||
.name("nxm-socket".into())
|
||||
.spawn(move || {
|
||||
let _guard = span.enter();
|
||||
|
||||
loop {
|
||||
let res = server.accept().wrap_err_with(|| {
|
||||
format!("IPC server failed to listen on '{}'", IPC_ADDRESS)
|
||||
});
|
||||
|
||||
match res {
|
||||
Ok(mut stream) => {
|
||||
let res = bincode::deserialize_from(&mut stream)
|
||||
.wrap_err("Failed to read message")
|
||||
.and_then(|uri: String| {
|
||||
tracing::trace!(uri, "Received NXM uri");
|
||||
|
||||
event_sink
|
||||
.submit_command(ACTION_HANDLE_NXM, uri, druid::Target::Auto)
|
||||
.wrap_err("Failed to start NXM download")
|
||||
});
|
||||
match res {
|
||||
Ok(()) => {
|
||||
let _ = bincode::serialize_into(&mut stream, "Ok");
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::error!("{:?}", err);
|
||||
let _ = bincode::serialize_into(&mut stream, "Error");
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::error!("Failed to receive client connection: {:?}", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.wrap_err("Failed to create thread")?;
|
||||
}
|
||||
|
||||
std::thread::Builder::new()
|
||||
.name("work-thread".into())
|
||||
.spawn(move || {
|
||||
|
@ -201,7 +96,7 @@ fn main() -> Result<()> {
|
|||
}
|
||||
}
|
||||
})
|
||||
.wrap_err("Failed to create thread")?;
|
||||
.wrap_err("Work thread panicked")?;
|
||||
|
||||
launcher.launch(State::new()).map_err(Report::new)
|
||||
}
|
||||
|
|
|
@ -72,40 +72,26 @@ impl From<dtmt_shared::ModDependency> for ModDependency {
|
|||
|
||||
#[derive(Clone, Data, Debug, Lens, serde::Serialize, serde::Deserialize)]
|
||||
pub(crate) struct NexusInfo {
|
||||
pub author: String,
|
||||
pub category_id: u64,
|
||||
pub created_timestamp: i64,
|
||||
pub description: Arc<String>,
|
||||
pub id: u64,
|
||||
pub name: String,
|
||||
pub picture_url: Arc<String>,
|
||||
pub summary: Arc<String>,
|
||||
pub uid: u64,
|
||||
pub updated_timestamp: i64,
|
||||
pub uploaded_by: String,
|
||||
pub version: String,
|
||||
pub author: String,
|
||||
pub summary: Arc<String>,
|
||||
pub description: Arc<String>,
|
||||
}
|
||||
|
||||
impl From<NexusMod> for NexusInfo {
|
||||
fn from(value: NexusMod) -> Self {
|
||||
Self {
|
||||
author: value.author,
|
||||
category_id: value.category_id,
|
||||
created_timestamp: value.created_timestamp.unix_timestamp(),
|
||||
description: Arc::new(value.description),
|
||||
id: value.mod_id,
|
||||
name: value.name,
|
||||
picture_url: Arc::new(value.picture_url.into()),
|
||||
summary: Arc::new(value.summary),
|
||||
uid: value.uid,
|
||||
updated_timestamp: value.updated_timestamp.unix_timestamp(),
|
||||
uploaded_by: value.uploaded_by,
|
||||
version: value.version,
|
||||
author: value.author,
|
||||
summary: Arc::new(value.summary),
|
||||
description: Arc::new(value.description),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Data, Lens)]
|
||||
#[derive(Clone, Data, Debug, Lens)]
|
||||
pub(crate) struct ModInfo {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
|
@ -116,51 +102,17 @@ pub(crate) struct ModInfo {
|
|||
pub image: Option<ImageBuf>,
|
||||
pub version: String,
|
||||
pub enabled: bool,
|
||||
pub depends: Vector<ModDependency>,
|
||||
pub bundled: bool,
|
||||
#[lens(ignore)]
|
||||
#[data(ignore)]
|
||||
pub packages: Vector<Arc<PackageInfo>>,
|
||||
#[lens(ignore)]
|
||||
#[data(ignore)]
|
||||
pub resources: ModResourceInfo,
|
||||
pub depends: Vector<ModDependency>,
|
||||
#[data(ignore)]
|
||||
pub nexus: Option<NexusInfo>,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for ModInfo {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("ModInfo")
|
||||
.field("id", &self.id)
|
||||
.field("name", &self.name)
|
||||
.field("summary", &self.summary)
|
||||
.field(
|
||||
"description",
|
||||
&(match &self.description {
|
||||
Some(desc) => format!("Some(String[0..{}])", desc.len()),
|
||||
None => "None".to_string(),
|
||||
}),
|
||||
)
|
||||
.field("categories", &self.categories)
|
||||
.field("author", &self.author)
|
||||
.field(
|
||||
"image",
|
||||
&(match &self.image {
|
||||
Some(image) => format!("Some(ImageBuf[{}x{}])", image.width(), image.height()),
|
||||
None => "None".to_string(),
|
||||
}),
|
||||
)
|
||||
.field("version", &self.version)
|
||||
.field("enabled", &self.enabled)
|
||||
.field("packages", &format!("Vec[0..{}]", self.packages.len()))
|
||||
.field("resources", &self.resources)
|
||||
.field("depends", &self.depends)
|
||||
.field("bundled", &self.bundled)
|
||||
.field("nexus", &self.nexus)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl ModInfo {
|
||||
pub fn new(
|
||||
cfg: ModConfig,
|
||||
|
@ -177,7 +129,6 @@ impl ModInfo {
|
|||
version: cfg.version,
|
||||
enabled: false,
|
||||
packages,
|
||||
bundled: cfg.bundled,
|
||||
image,
|
||||
categories: cfg.categories.into_iter().collect(),
|
||||
resources: ModResourceInfo {
|
||||
|
|
|
@ -32,7 +32,6 @@ pub(crate) const ACTION_START_RESET_DEPLOYMENT: Selector =
|
|||
pub(crate) const ACTION_FINISH_RESET_DEPLOYMENT: Selector =
|
||||
Selector::new("dtmm.action.finish-reset-deployment");
|
||||
|
||||
pub(crate) const ACTION_HANDLE_NXM: Selector<String> = Selector::new("dtmm.action.handle-nxm");
|
||||
pub(crate) const ACTION_ADD_MOD: Selector<FileInfo> = Selector::new("dtmm.action.add-mod");
|
||||
pub(crate) const ACTION_FINISH_ADD_MOD: Selector<SingleUse<Arc<ModInfo>>> =
|
||||
Selector::new("dtmm.action.finish-add-mod");
|
||||
|
@ -61,8 +60,6 @@ pub(crate) type InitialLoadResult = (Config, Vector<Arc<ModInfo>>);
|
|||
pub(crate) const ACTION_FINISH_LOAD_INITIAL: Selector<SingleUse<Option<InitialLoadResult>>> =
|
||||
Selector::new("dtmm.action.finish-load-initial");
|
||||
|
||||
pub(crate) const ACTION_OPEN_LINK: Selector<Arc<String>> = Selector::new("dtmm.action.open-link");
|
||||
|
||||
// A sub-selection of `State`'s fields that are required in `AsyncAction`s and that are
|
||||
// `Send + Sync`
|
||||
pub(crate) struct ActionState {
|
||||
|
@ -100,7 +97,6 @@ pub(crate) enum AsyncAction {
|
|||
CheckUpdates(ActionState),
|
||||
LoadInitial((PathBuf, bool)),
|
||||
Log((ActionState, Vec<u8>)),
|
||||
NxmDownload(ActionState, String),
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for AsyncAction {
|
||||
|
@ -120,9 +116,6 @@ impl std::fmt::Debug for AsyncAction {
|
|||
path, is_default
|
||||
),
|
||||
AsyncAction::Log(_) => write!(f, "AsyncAction::Log(_)"),
|
||||
AsyncAction::NxmDownload(_, uri) => {
|
||||
write!(f, "AsyncAction::NxmDownload(_state, {})", uri)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -257,20 +250,6 @@ impl AppDelegate<State> for Delegate {
|
|||
|
||||
Handled::Yes
|
||||
}
|
||||
cmd if cmd.is(ACTION_HANDLE_NXM) => {
|
||||
let uri = cmd
|
||||
.get(ACTION_HANDLE_NXM)
|
||||
.expect("command type match but didn't contain the expected value");
|
||||
|
||||
if self
|
||||
.sender
|
||||
.send(AsyncAction::NxmDownload(state.clone().into(), uri.clone()))
|
||||
.is_err()
|
||||
{
|
||||
tracing::error!("Failed to queue action to download NXM mod");
|
||||
}
|
||||
Handled::Yes
|
||||
}
|
||||
cmd if cmd.is(ACTION_ADD_MOD) => {
|
||||
let info = cmd
|
||||
.get(ACTION_ADD_MOD)
|
||||
|
@ -432,7 +411,6 @@ impl AppDelegate<State> for Delegate {
|
|||
state.config_path = Arc::new(config.path);
|
||||
state.data_dir = Arc::new(config.data_dir);
|
||||
state.game_dir = Arc::new(config.game_dir.unwrap_or_default());
|
||||
state.nexus_api_key = Arc::new(config.nexus_api_key.unwrap_or_default());
|
||||
state.is_io_enabled = config.unsafe_io;
|
||||
}
|
||||
|
||||
|
@ -440,20 +418,6 @@ impl AppDelegate<State> for Delegate {
|
|||
|
||||
Handled::Yes
|
||||
}
|
||||
cmd if cmd.is(ACTION_OPEN_LINK) => {
|
||||
let url = cmd
|
||||
.get(ACTION_OPEN_LINK)
|
||||
.expect("command type matched but didn't contain the expected value");
|
||||
|
||||
if let Err(err) = open::that_detached(Arc::as_ref(url)) {
|
||||
tracing::error!(
|
||||
"{:?}",
|
||||
Report::new(err).wrap_err(format!("Failed to open url '{}'", url))
|
||||
);
|
||||
}
|
||||
|
||||
Handled::Yes
|
||||
}
|
||||
_ => Handled::No,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,7 +42,6 @@ impl Lens<State, Option<Arc<ModInfo>>> for SelectedModLens {
|
|||
/// A Lens that maps an `im::Vector<T>` to `im::Vector<(usize, T)>`,
|
||||
/// where each element in the destination vector includes its index in the
|
||||
/// source vector.
|
||||
#[allow(dead_code)]
|
||||
pub(crate) struct IndexedVectorLens;
|
||||
|
||||
impl<T: Data> Lens<Vector<T>, Vector<(usize, T)>> for IndexedVectorLens {
|
||||
|
|
|
@ -17,7 +17,6 @@ macro_rules! make_color {
|
|||
}
|
||||
|
||||
make_color!(TOP_BAR_BACKGROUND_COLOR, COLOR_BG1);
|
||||
make_color!(LINK_COLOR, COLOR_ACCENT);
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub mod gruvbox_dark {
|
||||
|
@ -69,10 +68,23 @@ pub mod gruvbox_dark {
|
|||
}
|
||||
|
||||
pub trait ColorExt {
|
||||
fn lighten(&self, fac: f32) -> Self;
|
||||
fn darken(&self, fac: f32) -> Self;
|
||||
}
|
||||
|
||||
impl ColorExt for Color {
|
||||
fn lighten(&self, fac: f32) -> Self {
|
||||
let (r, g, b, a) = self.as_rgba();
|
||||
let rgb = Rgb::from(r as f32, g as f32, b as f32);
|
||||
let rgb = rgb.lighten(fac);
|
||||
Self::rgba(
|
||||
rgb.get_red() as f64,
|
||||
rgb.get_green() as f64,
|
||||
rgb.get_blue() as f64,
|
||||
a,
|
||||
)
|
||||
}
|
||||
|
||||
fn darken(&self, fac: f32) -> Self {
|
||||
let (r, g, b, a) = self.as_rgba();
|
||||
let rgb = Rgb::from(r as f32, g as f32, b as f32);
|
||||
|
|
|
@ -4,7 +4,7 @@ use usvg::{
|
|||
};
|
||||
|
||||
pub static ALERT_CIRCLE: &str = include_str!("../../../assets/tabler-icons/alert-circle.svg");
|
||||
pub static CLOUD_DOWNLOAD: &str = include_str!("../../../assets/tabler-icons/cloud-download.svg");
|
||||
pub static ALERT_TRIANGLE: &str = include_str!("../../../assets/tabler-icons/alert-triangle.svg");
|
||||
|
||||
pub fn parse_svg(svg: &str) -> Result<Tree, Error> {
|
||||
let opt = Options::default();
|
||||
|
|
|
@ -2,11 +2,16 @@ use std::path::PathBuf;
|
|||
use std::sync::Arc;
|
||||
|
||||
use druid::text::Formatter;
|
||||
use druid::{Data, Widget};
|
||||
|
||||
pub mod border;
|
||||
pub mod button;
|
||||
pub mod controller;
|
||||
|
||||
pub trait ExtraWidgetExt<T: Data>: Widget<T> + Sized + 'static {}
|
||||
|
||||
impl<T: Data, W: Widget<T> + 'static> ExtraWidgetExt<T> for W {}
|
||||
|
||||
pub(crate) struct PathBufFormatter;
|
||||
|
||||
impl PathBufFormatter {
|
||||
|
|
|
@ -7,14 +7,11 @@ use crate::ui::widget::button::Button;
|
|||
|
||||
const WINDOW_SIZE: (f64, f64) = (600., 250.);
|
||||
|
||||
/// Show an error dialog.
|
||||
/// The title and message are extracted from the error chain in the given `Report`.
|
||||
pub fn error<T: Data>(err: Report, _parent: WindowHandle) -> WindowDesc<T> {
|
||||
let (title, msg) = {
|
||||
let count = err.chain().count();
|
||||
|
||||
if count == 1 {
|
||||
// If there is only one error, that's all we can show.
|
||||
(
|
||||
String::from("An error occurred!"),
|
||||
err.root_cause().to_string(),
|
||||
|
@ -23,20 +20,13 @@ pub fn error<T: Data>(err: Report, _parent: WindowHandle) -> WindowDesc<T> {
|
|||
let first = err.chain().next().unwrap();
|
||||
let root = err.root_cause();
|
||||
|
||||
// If there is more than one error in the chain we want to show
|
||||
// - The first one: This will describe the overall operation that failed
|
||||
// - The root cause: The actual thing that failed (e.g. 'No such file or directory')
|
||||
// - The one before the root cause: With diligent `wrap_err` usage, this will provide
|
||||
// context to the root cause (e.g. the file name we failed to access)
|
||||
//
|
||||
// If there are only two errors, the first one is also the context to the root cause.
|
||||
if count > 2 {
|
||||
// The second to last one, the context to the root cause
|
||||
let context = err.chain().nth(count - 2).unwrap();
|
||||
|
||||
(format!("{first}!"), format!("{}: {}", context, root))
|
||||
} else {
|
||||
("An error occurred!".to_string(), format!("{}: {}", first, root))
|
||||
(format!("{first}!"), root.to_string())
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -2,7 +2,6 @@ use std::str::FromStr;
|
|||
use std::sync::Arc;
|
||||
|
||||
use druid::im::Vector;
|
||||
use druid::text::RichTextBuilder;
|
||||
use druid::widget::{
|
||||
Checkbox, CrossAxisAlignment, Either, Flex, Image, Label, LineBreaking, List,
|
||||
MainAxisAlignment, Maybe, Scroll, SizedBox, Split, Svg, SvgData, TextBox, ViewSwitcher,
|
||||
|
@ -17,12 +16,11 @@ use druid_widget_nursery::WidgetExt as _;
|
|||
use lazy_static::lazy_static;
|
||||
|
||||
use crate::state::{
|
||||
ModInfo, NexusInfo, NexusInfoLens, State, View, ACTION_ADD_MOD, ACTION_OPEN_LINK,
|
||||
ACTION_SELECTED_MOD_DOWN, ACTION_SELECTED_MOD_UP, ACTION_SELECT_MOD, ACTION_SET_WINDOW_HANDLE,
|
||||
ACTION_START_CHECK_UPDATE, ACTION_START_DELETE_SELECTED_MOD, ACTION_START_DEPLOY,
|
||||
ACTION_START_RESET_DEPLOYMENT,
|
||||
ModInfo, NexusInfo, NexusInfoLens, State, View, ACTION_ADD_MOD, ACTION_SELECTED_MOD_DOWN,
|
||||
ACTION_SELECTED_MOD_UP, ACTION_SELECT_MOD, ACTION_SET_WINDOW_HANDLE, ACTION_START_CHECK_UPDATE,
|
||||
ACTION_START_DELETE_SELECTED_MOD, ACTION_START_DEPLOY, ACTION_START_RESET_DEPLOYMENT,
|
||||
};
|
||||
use crate::ui::theme::{self, ColorExt, COLOR_GREEN_LIGHT};
|
||||
use crate::ui::theme::{self, ColorExt, COLOR_YELLOW_LIGHT};
|
||||
use crate::ui::widget::border::Border;
|
||||
use crate::ui::widget::button::Button;
|
||||
use crate::ui::widget::controller::{
|
||||
|
@ -137,22 +135,17 @@ fn build_mod_list() -> impl Widget<State> {
|
|||
})
|
||||
.lens(lens!((usize, Arc<ModInfo>, bool), 1).then(ModInfo::enabled.in_arc()));
|
||||
|
||||
let name = Label::dynamic(|info: &Arc<ModInfo>, _| {
|
||||
info.nexus
|
||||
.as_ref()
|
||||
.map(|n| n.name.clone())
|
||||
.unwrap_or_else(|| info.name.clone())
|
||||
})
|
||||
.lens(lens!((usize, Arc<ModInfo>, bool), 1));
|
||||
let name =
|
||||
Label::raw().lens(lens!((usize, Arc<ModInfo>, bool), 1).then(ModInfo::name.in_arc()));
|
||||
|
||||
let version = {
|
||||
let icon = {
|
||||
let tree =
|
||||
theme::icons::parse_svg(theme::icons::CLOUD_DOWNLOAD).expect("invalid SVG");
|
||||
theme::icons::parse_svg(theme::icons::ALERT_TRIANGLE).expect("invalid SVG");
|
||||
|
||||
let tree = theme::icons::recolor_icon(tree, true, COLOR_GREEN_LIGHT);
|
||||
let tree = theme::icons::recolor_icon(tree, true, COLOR_YELLOW_LIGHT);
|
||||
|
||||
Svg::new(tree).fix_height(druid::theme::TEXT_SIZE_NORMAL)
|
||||
Svg::new(Arc::new(tree)).fix_height(druid::theme::TEXT_SIZE_NORMAL)
|
||||
};
|
||||
|
||||
Either::new(
|
||||
|
@ -309,11 +302,13 @@ fn build_mod_details_info() -> impl Widget<State> {
|
|||
// Force the label to take up the entire details' pane width,
|
||||
// so that we can center-align it.
|
||||
.expand_width()
|
||||
.lens(NexusInfoLens::new(NexusInfo::name, ModInfo::name).in_arc());
|
||||
.lens(ModInfo::name.in_arc());
|
||||
let summary = Label::raw()
|
||||
.with_line_break_mode(LineBreaking::WordWrap)
|
||||
.lens(NexusInfoLens::new(NexusInfo::summary, ModInfo::summary).in_arc());
|
||||
|
||||
// TODO: Image/icon?
|
||||
|
||||
let version_line = Label::dynamic(|info: &Arc<ModInfo>, _| {
|
||||
let author = info
|
||||
.nexus
|
||||
|
@ -345,28 +340,6 @@ fn build_mod_details_info() -> impl Widget<State> {
|
|||
}
|
||||
});
|
||||
|
||||
let nexus_link = Maybe::or_empty(|| {
|
||||
let link = Label::raw().lens(NexusInfo::id.map(
|
||||
|id| {
|
||||
let url = format!("https://nexusmods.com/warhammer40kdarktide/mods/{}", id);
|
||||
let mut builder = RichTextBuilder::new();
|
||||
builder
|
||||
.push("Open on Nexusmods")
|
||||
.underline(true)
|
||||
.text_color(theme::LINK_COLOR)
|
||||
.link(ACTION_OPEN_LINK.with(Arc::new(url)));
|
||||
builder.build()
|
||||
},
|
||||
|_, _| {},
|
||||
));
|
||||
Flex::column()
|
||||
.cross_axis_alignment(CrossAxisAlignment::Start)
|
||||
.main_axis_alignment(MainAxisAlignment::Start)
|
||||
.with_child(link)
|
||||
.with_spacer(4.)
|
||||
})
|
||||
.lens(ModInfo::nexus.in_arc());
|
||||
|
||||
let details = Flex::column()
|
||||
.cross_axis_alignment(CrossAxisAlignment::Start)
|
||||
.main_axis_alignment(MainAxisAlignment::Start)
|
||||
|
@ -374,7 +347,6 @@ fn build_mod_details_info() -> impl Widget<State> {
|
|||
.with_spacer(4.)
|
||||
.with_child(summary)
|
||||
.with_spacer(4.)
|
||||
.with_child(nexus_link)
|
||||
.with_child(version_line)
|
||||
.with_spacer(4.)
|
||||
.with_child(categories)
|
||||
|
@ -389,6 +361,8 @@ fn build_mod_details_info() -> impl Widget<State> {
|
|||
.must_fill_main_axis(true)
|
||||
.cross_axis_alignment(CrossAxisAlignment::Start)
|
||||
.with_child(image)
|
||||
// .with_spacer(4.)
|
||||
// .with_flex_child(details, 1.)
|
||||
.with_child(details)
|
||||
},
|
||||
Flex::column,
|
||||
|
|
|
@ -125,9 +125,6 @@ where
|
|||
.wrap_err_with(|| format!("Invalid config file {}", path.display()))?;
|
||||
|
||||
cfg.path = path;
|
||||
|
||||
tracing::debug!("Read config file '{}': {:?}", cfg.path.display(), cfg);
|
||||
|
||||
Ok(cfg)
|
||||
}
|
||||
Err(err) if err.kind() == ErrorKind::NotFound => {
|
||||
|
@ -136,11 +133,6 @@ where
|
|||
.wrap_err_with(|| format!("Failed to read config file {}", path.display()))?;
|
||||
}
|
||||
|
||||
tracing::debug!(
|
||||
"Config file not found at '{}', creating default.",
|
||||
path.display()
|
||||
);
|
||||
|
||||
{
|
||||
let parent = default_path
|
||||
.parent()
|
||||
|
|
|
@ -8,7 +8,7 @@ use tracing_subscriber::layer::SubscriberExt;
|
|||
use tracing_subscriber::prelude::*;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
#[derive(Clone, Copy, Debug, ValueEnum)]
|
||||
#[derive(Clone, Copy, ValueEnum)]
|
||||
pub enum LogLevel {
|
||||
Trace,
|
||||
Debug,
|
||||
|
@ -55,8 +55,8 @@ impl std::io::Write for ChannelWriter {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn create_tracing_subscriber(level: Option<LogLevel>, tx: Option<UnboundedSender<Vec<u8>>>) {
|
||||
let mut env_layer = if let Some(level) = level {
|
||||
pub fn create_tracing_subscriber(tx: UnboundedSender<Vec<u8>>, level: Option<LogLevel>) {
|
||||
let env_layer = if let Some(level) = level {
|
||||
EnvFilter::from(level)
|
||||
} else if cfg!(debug_assertions) {
|
||||
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info"))
|
||||
|
@ -64,27 +64,13 @@ pub fn create_tracing_subscriber(level: Option<LogLevel>, tx: Option<UnboundedSe
|
|||
EnvFilter::new("error,dtmm=info")
|
||||
};
|
||||
|
||||
// The internal implementation of Druid's GTK file dialog turns
|
||||
// cancelling the dialog into an error. The, also internal, wrapper
|
||||
// then logs and swallows the error.
|
||||
// Therefore, as a consumer of the library, we don't have any way
|
||||
// to customize this behavior, and instead have to filter out the
|
||||
// tracing event.
|
||||
env_layer = env_layer.add_directive(
|
||||
"druid_shell::backend::gtk::window=off"
|
||||
.parse()
|
||||
.expect("Invalid env filter directive"),
|
||||
);
|
||||
|
||||
let stdout_layer = fmt::layer().pretty();
|
||||
|
||||
let channel_layer = tx.map(|tx| {
|
||||
fmt::layer()
|
||||
.event_format(dtmt_shared::Formatter)
|
||||
.fmt_fields(debug_fn(dtmt_shared::format_fields))
|
||||
.with_writer(move || ChannelWriter::new(tx.clone()))
|
||||
.with_filter(FilterFn::new(dtmt_shared::filter_fields))
|
||||
});
|
||||
let channel_layer = fmt::layer()
|
||||
.event_format(dtmt_shared::Formatter)
|
||||
.fmt_fields(debug_fn(dtmt_shared::format_fields))
|
||||
.with_writer(move || ChannelWriter::new(tx.clone()))
|
||||
.with_filter(FilterFn::new(dtmt_shared::filter_fields));
|
||||
|
||||
tracing_subscriber::registry()
|
||||
.with(env_layer)
|
||||
|
|
|
@ -4,40 +4,36 @@ version = "0.3.0"
|
|||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
async-recursion = { workspace = true }
|
||||
clap = { workspace = true }
|
||||
cli-table = { workspace = true }
|
||||
color-eyre = { workspace = true }
|
||||
confy = { workspace = true }
|
||||
csv-async = { workspace = true }
|
||||
dtmt-shared = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
futures-util = { workspace = true }
|
||||
glob = { workspace = true }
|
||||
luajit2-sys = { workspace = true }
|
||||
minijinja = { workspace = true }
|
||||
nanorand = { workspace = true }
|
||||
notify = { workspace = true }
|
||||
oodle = { workspace = true }
|
||||
path-clean = { workspace = true }
|
||||
path-slash = { workspace = true }
|
||||
pin-project-lite = { workspace = true }
|
||||
promptly = { workspace = true }
|
||||
sdk = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_sjson = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tokio-stream = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-error = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
# Cannot be a workspace dependencies when it's optional
|
||||
shlex = { version = "1.2.0", optional = true }
|
||||
clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "unicode"] }
|
||||
cli-table = { version = "0.4.7", default-features = false, features = ["derive"] }
|
||||
color-eyre = "0.6.2"
|
||||
confy = "0.5.1"
|
||||
csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
|
||||
dtmt-shared = { path = "../../lib/dtmt-shared", version = "*" }
|
||||
futures = "0.3.25"
|
||||
futures-util = "0.3.24"
|
||||
glob = "0.3.0"
|
||||
libloading = "0.7.4"
|
||||
nanorand = "0.7.0"
|
||||
oodle = { path = "../../lib/oodle", version = "*" }
|
||||
pin-project-lite = "0.2.9"
|
||||
promptly = "0.3.1"
|
||||
sdk = { path = "../../lib/sdk", version = "*" }
|
||||
serde_sjson = { path = "../../lib/serde_sjson", version = "*" }
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
string_template = "0.2.1"
|
||||
tokio-stream = { version = "0.1.11", features = ["fs", "io-util"] }
|
||||
tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
|
||||
tracing-error = "0.2.0"
|
||||
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
|
||||
tracing = { version = "0.1.37", features = ["async-await"] }
|
||||
zip = "0.6.3"
|
||||
path-clean = "1.0.1"
|
||||
path-slash = "0.2.1"
|
||||
async-recursion = "1.0.2"
|
||||
notify = "5.1.0"
|
||||
luajit2-sys = { path = "../../lib/luajit2-sys", version = "*" }
|
||||
shlex = "1.2.0"
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.3.0"
|
||||
|
||||
[features]
|
||||
shlex-bench = ["dep:shlex"]
|
||||
|
|
|
@ -55,7 +55,6 @@ pub(crate) fn command_definition() -> Command {
|
|||
)
|
||||
}
|
||||
|
||||
/// Try to find a `dtmt.cfg` in the given directory or traverse up the parents.
|
||||
#[tracing::instrument]
|
||||
async fn find_project_config(dir: Option<PathBuf>) -> Result<ModConfig> {
|
||||
let (path, mut file) = if let Some(path) = dir {
|
||||
|
@ -103,44 +102,39 @@ async fn find_project_config(dir: Option<PathBuf>) -> Result<ModConfig> {
|
|||
Ok(cfg)
|
||||
}
|
||||
|
||||
/// Iterate over the paths in the given `Package` and
|
||||
/// compile each file by its file type.
|
||||
#[tracing::instrument(skip_all)]
|
||||
async fn compile_package_files(pkg: &Package, cfg: &ModConfig) -> Result<Vec<BundleFile>> {
|
||||
let root = Arc::new(&cfg.dir);
|
||||
let name_overrides = &cfg.name_overrides;
|
||||
async fn compile_package_files<P>(pkg: &Package, root: P) -> Result<Vec<BundleFile>>
|
||||
where
|
||||
P: AsRef<Path> + std::fmt::Debug,
|
||||
{
|
||||
let root = Arc::new(root.as_ref());
|
||||
|
||||
let tasks = pkg
|
||||
.iter()
|
||||
.flat_map(|(file_type, names)| {
|
||||
names.iter().map(|name| {
|
||||
.flat_map(|(file_type, paths)| {
|
||||
paths.iter().map(|path| {
|
||||
(
|
||||
*file_type,
|
||||
name,
|
||||
path,
|
||||
// Cloning the `Arc` here solves the issue that in the next `.map`, I need to
|
||||
// `move` the closure parameters, but can't `move` `root` before it was cloned.
|
||||
root.clone(),
|
||||
)
|
||||
})
|
||||
})
|
||||
.map(|(file_type, name, root)| async move {
|
||||
let path = PathBuf::from(name);
|
||||
let sjson = fs::read_to_string(&path)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?;
|
||||
.map(|(file_type, path, root)| async move {
|
||||
let sjson = fs::read_to_string(&path).await?;
|
||||
|
||||
let name = path.with_extension("").to_slash_lossy().to_string();
|
||||
let name = if let Some(new_name) = name_overrides.get(&name) {
|
||||
let new_name = match u64::from_str_radix(new_name, 16) {
|
||||
Ok(hash) => IdString64::from(hash),
|
||||
Err(_) => IdString64::from(new_name.clone()),
|
||||
};
|
||||
tracing::info!("Overriding '{}' -> '{}'", name, new_name.display());
|
||||
new_name
|
||||
} else {
|
||||
IdString64::from(name.clone())
|
||||
};
|
||||
BundleFile::from_sjson(name, file_type, sjson, root.as_ref()).await
|
||||
let mut path = path.clone();
|
||||
path.set_extension("");
|
||||
|
||||
BundleFile::from_sjson(
|
||||
path.to_slash_lossy().to_string(),
|
||||
file_type,
|
||||
sjson,
|
||||
root.as_ref(),
|
||||
)
|
||||
.await
|
||||
});
|
||||
|
||||
let results = futures::stream::iter(tasks)
|
||||
|
@ -151,14 +145,13 @@ async fn compile_package_files(pkg: &Package, cfg: &ModConfig) -> Result<Vec<Bun
|
|||
results.into_iter().collect()
|
||||
}
|
||||
|
||||
/// Read a `.package` file, collect the referenced files
|
||||
/// and compile all of them into a bundle.
|
||||
#[tracing::instrument]
|
||||
async fn build_package(
|
||||
cfg: &ModConfig,
|
||||
package: impl AsRef<Path> + std::fmt::Debug,
|
||||
) -> Result<Bundle> {
|
||||
let root = &cfg.dir;
|
||||
async fn build_package<P1, P2>(package: P1, root: P2) -> Result<Bundle>
|
||||
where
|
||||
P1: AsRef<Path> + std::fmt::Debug,
|
||||
P2: AsRef<Path> + std::fmt::Debug,
|
||||
{
|
||||
let root = root.as_ref();
|
||||
let package = package.as_ref();
|
||||
|
||||
let mut path = root.join(package);
|
||||
|
@ -172,7 +165,7 @@ async fn build_package(
|
|||
.await
|
||||
.wrap_err_with(|| format!("Invalid package file {}", &pkg_name))?;
|
||||
|
||||
let files = compile_package_files(&pkg, cfg).await?;
|
||||
let files = compile_package_files(&pkg, root).await?;
|
||||
let mut bundle = Bundle::new(pkg_name);
|
||||
for file in files {
|
||||
bundle.add_file(file);
|
||||
|
@ -181,8 +174,6 @@ async fn build_package(
|
|||
Ok(bundle)
|
||||
}
|
||||
|
||||
/// Cleans the path of internal parent (`../`) or self (`./`) components,
|
||||
/// and ensures that it is relative.
|
||||
fn normalize_file_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
|
||||
let path = path.as_ref();
|
||||
|
||||
|
@ -263,14 +254,14 @@ pub(crate) async fn read_project_config(dir: Option<PathBuf>) -> Result<ModConfi
|
|||
Ok(cfg)
|
||||
}
|
||||
|
||||
#[tracing::instrument]
|
||||
pub(crate) async fn build<P>(
|
||||
pub(crate) async fn build<P1, P2>(
|
||||
cfg: &ModConfig,
|
||||
out_path: impl AsRef<Path> + std::fmt::Debug,
|
||||
game_dir: Arc<Option<P>>,
|
||||
out_path: P1,
|
||||
game_dir: Arc<Option<P2>>,
|
||||
) -> Result<()>
|
||||
where
|
||||
P: AsRef<Path> + std::fmt::Debug,
|
||||
P1: AsRef<Path>,
|
||||
P2: AsRef<Path>,
|
||||
{
|
||||
let out_path = out_path.as_ref();
|
||||
|
||||
|
@ -295,7 +286,7 @@ where
|
|||
);
|
||||
}
|
||||
|
||||
let bundle = build_package(&cfg, path).await.wrap_err_with(|| {
|
||||
let bundle = build_package(path, &cfg.dir).await.wrap_err_with(|| {
|
||||
format!(
|
||||
"Failed to build package '{}' at '{}'",
|
||||
path.display(),
|
||||
|
|
|
@ -1,174 +0,0 @@
|
|||
use std::{io::Cursor, path::PathBuf};
|
||||
|
||||
use clap::{value_parser, Arg, ArgMatches, Command};
|
||||
use color_eyre::{eyre::Context as _, Result};
|
||||
use sdk::murmur::{HashGroup, IdString64, Murmur64};
|
||||
use sdk::{BundleDatabase, FromBinary as _};
|
||||
use tokio::fs;
|
||||
|
||||
pub(crate) fn command_definition() -> Command {
|
||||
Command::new("db")
|
||||
.about("Various operations regarding `bundle_database.data`.")
|
||||
.subcommand_required(true)
|
||||
.subcommand(
|
||||
Command::new("list-files")
|
||||
.about("List bundle contents")
|
||||
.arg(
|
||||
Arg::new("database")
|
||||
.required(true)
|
||||
.help("Path to the bundle database")
|
||||
.value_parser(value_parser!(PathBuf)),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("bundle")
|
||||
.help("The bundle name. If omitted, all bundles will be listed.")
|
||||
.required(false),
|
||||
),
|
||||
)
|
||||
.subcommand(
|
||||
Command::new("list-bundles").about("List bundles").arg(
|
||||
Arg::new("database")
|
||||
.required(true)
|
||||
.help("Path to the bundle database")
|
||||
.value_parser(value_parser!(PathBuf)),
|
||||
),
|
||||
)
|
||||
.subcommand(
|
||||
Command::new("find-file")
|
||||
.about("Find the bundle a file belongs to")
|
||||
.arg(
|
||||
Arg::new("database")
|
||||
.required(true)
|
||||
.help("Path to the bundle database")
|
||||
.value_parser(value_parser!(PathBuf)),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("file-name")
|
||||
.required(true)
|
||||
.help("Name of the file. May be a hash in hex representation or a string"),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
||||
let Some((op, sub_matches)) = matches.subcommand() else {
|
||||
unreachable!("clap is configured to require a subcommand");
|
||||
};
|
||||
|
||||
let database = {
|
||||
let path = sub_matches
|
||||
.get_one::<PathBuf>("database")
|
||||
.expect("argument is required");
|
||||
|
||||
let binary = fs::read(&path)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?;
|
||||
|
||||
let mut r = Cursor::new(binary);
|
||||
|
||||
BundleDatabase::from_binary(&mut r).wrap_err("Failed to parse bundle database")?
|
||||
};
|
||||
|
||||
match op {
|
||||
"list-files" => {
|
||||
let index = database.files();
|
||||
|
||||
if let Some(bundle) = sub_matches.get_one::<String>("bundle") {
|
||||
let hash = u64::from_str_radix(bundle, 16)
|
||||
.map(Murmur64::from)
|
||||
.wrap_err("Invalid hex sequence")?;
|
||||
|
||||
if let Some(files) = index.get(&hash) {
|
||||
for file in files {
|
||||
let name = ctx.lookup_hash(file.name, HashGroup::Filename);
|
||||
let extension = file.extension.ext_name();
|
||||
println!("{}.{}", name.display(), extension);
|
||||
}
|
||||
} else {
|
||||
tracing::info!("Bundle {} not found in the database", bundle);
|
||||
}
|
||||
} else {
|
||||
for (bundle_hash, files) in index.iter() {
|
||||
let bundle_name = ctx.lookup_hash(*bundle_hash, HashGroup::Filename);
|
||||
|
||||
match bundle_name {
|
||||
IdString64::String(name) => {
|
||||
println!("{:016x} {}", bundle_hash, name);
|
||||
}
|
||||
IdString64::Hash(hash) => {
|
||||
println!("{:016x}", hash);
|
||||
}
|
||||
}
|
||||
|
||||
for file in files {
|
||||
let name = ctx.lookup_hash(file.name, HashGroup::Filename);
|
||||
let extension = file.extension.ext_name();
|
||||
|
||||
match name {
|
||||
IdString64::String(name) => {
|
||||
println!("\t{:016x}.{:<12} {}", file.name, extension, name);
|
||||
}
|
||||
IdString64::Hash(hash) => {
|
||||
println!("\t{:016x}.{}", hash, extension);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
"list-bundles" => {
|
||||
for bundle_hash in database.bundles().keys() {
|
||||
let bundle_name = ctx.lookup_hash(*bundle_hash, HashGroup::Filename);
|
||||
|
||||
match bundle_name {
|
||||
IdString64::String(name) => {
|
||||
println!("{:016x} {}", bundle_hash, name);
|
||||
}
|
||||
IdString64::Hash(hash) => {
|
||||
println!("{:016x}", hash);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
"find-file" => {
|
||||
let name = sub_matches
|
||||
.get_one::<String>("file-name")
|
||||
.expect("required argument");
|
||||
let name = match u64::from_str_radix(name, 16).map(Murmur64::from) {
|
||||
Ok(hash) => hash,
|
||||
Err(_) => Murmur64::hash(name),
|
||||
};
|
||||
|
||||
let bundles = database.files().iter().filter_map(|(bundle_hash, files)| {
|
||||
if files.iter().any(|file| file.name == name) {
|
||||
Some(bundle_hash)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
let mut found = false;
|
||||
|
||||
for bundle in bundles {
|
||||
found = true;
|
||||
println!("{:016x}", bundle);
|
||||
}
|
||||
|
||||
if !found {
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
_ => unreachable!(
|
||||
"clap is configured to require a subcommand, and they're all handled above"
|
||||
),
|
||||
}
|
||||
}
|
|
@ -3,7 +3,7 @@ use std::path::{Path, PathBuf};
|
|||
use std::sync::Arc;
|
||||
|
||||
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
|
||||
use color_eyre::eyre::{self, bail, Context, Result};
|
||||
use color_eyre::eyre::{self, Context, Result};
|
||||
use color_eyre::{Help, Report};
|
||||
use futures::future::try_join_all;
|
||||
use futures::StreamExt;
|
||||
|
@ -12,9 +12,7 @@ use sdk::{Bundle, BundleFile, CmdLine};
|
|||
use tokio::fs;
|
||||
|
||||
use crate::cmd::util::resolve_bundle_paths;
|
||||
use crate::shell_parse::ShellParser;
|
||||
|
||||
#[inline]
|
||||
fn parse_glob_pattern(s: &str) -> Result<Pattern, String> {
|
||||
match Pattern::new(s) {
|
||||
Ok(p) => Ok(p),
|
||||
|
@ -22,7 +20,6 @@ fn parse_glob_pattern(s: &str) -> Result<Pattern, String> {
|
|||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flatten_name(s: &str) -> String {
|
||||
s.replace('/', "_")
|
||||
}
|
||||
|
@ -134,29 +131,26 @@ async fn parse_command_line_template(tmpl: &String) -> Result<CmdLine> {
|
|||
let mut cmd = if matches!(fs::try_exists(tmpl).await, Ok(true)) {
|
||||
let path = PathBuf::from(tmpl);
|
||||
if path.file_name() == Some(OsStr::new("main.py")) {
|
||||
let arg = path.display().to_string();
|
||||
let mut cmd = CmdLine::new("python");
|
||||
cmd.arg(path);
|
||||
cmd.arg(shlex::quote(&arg).to_string());
|
||||
cmd
|
||||
} else {
|
||||
CmdLine::new(path)
|
||||
}
|
||||
} else {
|
||||
let mut parsed = ShellParser::new(tmpl.as_bytes());
|
||||
// Safety: The initial `tmpl` was a `&String` (i.e. valid UTF-8), and `shlex` does not
|
||||
// insert or remove characters, nor does it split UTF-8 characters.
|
||||
// So the resulting byte stream is still valid UTF-8.
|
||||
let mut cmd = CmdLine::new(unsafe {
|
||||
let bytes = parsed.next().expect("Template is not empty");
|
||||
String::from_utf8_unchecked(bytes.to_vec())
|
||||
});
|
||||
let Some(args) = shlex::split(tmpl) else {
|
||||
eyre::bail!("Invalid shell syntax");
|
||||
};
|
||||
|
||||
while let Some(arg) = parsed.next() {
|
||||
// Safety: See above.
|
||||
cmd.arg(unsafe { String::from_utf8_unchecked(arg.to_vec()) });
|
||||
}
|
||||
// We already checked that the template is not empty
|
||||
let mut cmd = CmdLine::new(args[0].clone());
|
||||
let mut it = args.iter();
|
||||
// Skip the first one, that's the command name
|
||||
it.next();
|
||||
|
||||
if parsed.errored {
|
||||
bail!("Invalid command line template");
|
||||
for arg in it {
|
||||
cmd.arg(arg);
|
||||
}
|
||||
|
||||
cmd
|
||||
|
|
|
@ -36,18 +36,6 @@ enum OutputFormat {
|
|||
Text,
|
||||
}
|
||||
|
||||
fn format_byte_size(size: usize) -> String {
|
||||
if size < 1024 {
|
||||
format!("{} Bytes", size)
|
||||
} else if size < 1024 * 1024 {
|
||||
format!("{} kB", size / 1024)
|
||||
} else if size < 1024 * 1024 * 1024 {
|
||||
format!("{} MB", size / (1024 * 1024))
|
||||
} else {
|
||||
format!("{} GB", size / (1024 * 1024 * 1024))
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(ctx))]
|
||||
async fn print_bundle_contents<P>(ctx: &sdk::Context, path: P, fmt: OutputFormat) -> Result<()>
|
||||
where
|
||||
|
@ -62,11 +50,7 @@ where
|
|||
|
||||
match fmt {
|
||||
OutputFormat::Text => {
|
||||
println!(
|
||||
"Bundle: {} ({:016x})",
|
||||
bundle.name().display(),
|
||||
bundle.name()
|
||||
);
|
||||
println!("Bundle: {}", bundle.name().display());
|
||||
|
||||
for f in bundle.files().iter() {
|
||||
if f.variants().len() != 1 {
|
||||
|
@ -79,10 +63,9 @@ where
|
|||
|
||||
let v = &f.variants()[0];
|
||||
println!(
|
||||
"\t{}.{}: {} ({})",
|
||||
"\t{}.{}: {} bytes",
|
||||
f.base_name().display(),
|
||||
f.file_type().ext_name(),
|
||||
format_byte_size(v.size()),
|
||||
v.size()
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
use clap::{ArgMatches, Command};
|
||||
use color_eyre::eyre::Result;
|
||||
|
||||
mod db;
|
||||
mod decompress;
|
||||
mod extract;
|
||||
mod inject;
|
||||
|
@ -15,7 +14,6 @@ pub(crate) fn command_definition() -> Command {
|
|||
.subcommand(extract::command_definition())
|
||||
.subcommand(inject::command_definition())
|
||||
.subcommand(list::command_definition())
|
||||
.subcommand(db::command_definition())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
|
@ -25,7 +23,6 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
|||
Some(("extract", sub_matches)) => extract::run(ctx, sub_matches).await,
|
||||
Some(("inject", sub_matches)) => inject::run(ctx, sub_matches).await,
|
||||
Some(("list", sub_matches)) => list::run(ctx, sub_matches).await,
|
||||
Some(("db", sub_matches)) => db::run(ctx, sub_matches).await,
|
||||
_ => unreachable!(
|
||||
"clap is configured to require a subcommand, and they're all handled above"
|
||||
),
|
||||
|
|
|
@ -145,10 +145,7 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
|
|||
.get_one::<HashGroup>("group")
|
||||
.expect("required argument not found");
|
||||
|
||||
let r: BufReader<Box<dyn tokio::io::AsyncRead + std::marker::Unpin>> = if let Some(name) =
|
||||
path.file_name()
|
||||
&& name == "-"
|
||||
{
|
||||
let r: BufReader<Box<dyn tokio::io::AsyncRead + std::marker::Unpin>> = if let Some(name) = path.file_name() && name == "-" {
|
||||
let f = tokio::io::stdin();
|
||||
BufReader::new(Box::new(f))
|
||||
} else {
|
||||
|
|
|
@ -350,8 +350,6 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()>
|
|||
localization: mod_file.localization,
|
||||
},
|
||||
depends: vec![ModDependency::ID(String::from("DMF"))],
|
||||
bundled: true,
|
||||
name_overrides: HashMap::new(),
|
||||
};
|
||||
|
||||
tracing::debug!(?dtmt_cfg);
|
||||
|
|
|
@ -1,30 +1,18 @@
|
|||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use clap::{Arg, ArgMatches, Command};
|
||||
use color_eyre::eyre::{self, Context, Result};
|
||||
use color_eyre::Help;
|
||||
use futures::{StreamExt, TryStreamExt};
|
||||
use minijinja::Environment;
|
||||
use string_template::Template;
|
||||
use tokio::fs::{self, DirBuilder};
|
||||
|
||||
const TEMPLATES: [(&str, &str); 5] = [
|
||||
(
|
||||
"dtmt.cfg",
|
||||
r#"//
|
||||
// This is your mod's main configuration file. It tells DTMT how to build the mod,
|
||||
// and DTMM what to display to your users.
|
||||
// Certain files have been pre-filled by the template, the ones commented out (`//`)
|
||||
// are optional.
|
||||
//
|
||||
// A unique identifier (preferably lower case, alphanumeric)
|
||||
id = "{{id}}"
|
||||
// The display name that your users will see.
|
||||
// This doesn't have to be unique, but you still want to avoid being confused with other
|
||||
// mods.
|
||||
r#"id = "{{id}}"
|
||||
name = "{{name}}"
|
||||
// It's good practice to increase this number whenever you publish changes.
|
||||
// It's up to you if you use SemVer or something simpler like `1970-12-24`. It should sort and
|
||||
// compare well, though.
|
||||
version = "0.1.0"
|
||||
// author = ""
|
||||
|
||||
|
@ -44,25 +32,16 @@ categories = [
|
|||
|
||||
// A list of mod IDs that this mod depends on. You can find
|
||||
// those IDs by downloading the mod and extracting their `dtmt.cfg`.
|
||||
// To make your fellow modders' lives easier, publish your own mods' IDs
|
||||
// somewhere visible, such as the Nexusmods page.
|
||||
depends = [
|
||||
DMF
|
||||
]
|
||||
|
||||
// The primary resources that serve as the entry point to your
|
||||
// mod's code. Unless for very specific use cases, the generated
|
||||
// values shouldn't be changed.
|
||||
resources = {
|
||||
init = "scripts/mods/{{id}}/init"
|
||||
data = "scripts/mods/{{id}}/data"
|
||||
localization = "scripts/mods/{{id}}/localization"
|
||||
}
|
||||
|
||||
// The list of packages, or bundles, to build.
|
||||
// Each one corresponds to a package definition in the named folder.
|
||||
// For mods that contain only code and/or a few small assets, a single
|
||||
// package will suffice.
|
||||
packages = [
|
||||
"packages/mods/{{id}}"
|
||||
]
|
||||
|
@ -80,6 +59,7 @@ packages = [
|
|||
r#"local mod = get_mod("{{id}}")
|
||||
|
||||
-- Your mod code goes here.
|
||||
-- https://vmf-docs.verminti.de
|
||||
"#,
|
||||
),
|
||||
(
|
||||
|
@ -157,45 +137,34 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()>
|
|||
|
||||
tracing::debug!(root = %root.display(), name, id);
|
||||
|
||||
let render_ctx = minijinja::context!(name => name.as_str(), id => id.as_str());
|
||||
let env = Environment::new();
|
||||
let mut data = HashMap::new();
|
||||
data.insert("name", name.as_str());
|
||||
data.insert("id", id.as_str());
|
||||
|
||||
let templates = TEMPLATES
|
||||
.iter()
|
||||
.map(|(path_tmpl, content_tmpl)| {
|
||||
env.render_str(path_tmpl, &render_ctx)
|
||||
.wrap_err_with(|| format!("Failed to render template: {}", path_tmpl))
|
||||
.and_then(|path| {
|
||||
env.render_named_str(&path, content_tmpl, &render_ctx)
|
||||
.wrap_err_with(|| format!("Failed to render template '{}'", &path))
|
||||
.map(|content| (root.join(path), content))
|
||||
})
|
||||
let path = Template::new(path_tmpl).render(&data);
|
||||
let content = Template::new(content_tmpl).render(&data);
|
||||
|
||||
(root.join(path), content)
|
||||
})
|
||||
.map(|res| async move {
|
||||
match res {
|
||||
Ok((path, content)) => {
|
||||
let dir = path
|
||||
.parent()
|
||||
.ok_or_else(|| eyre::eyre!("invalid root path"))?;
|
||||
.map(|(path, content)| async move {
|
||||
let dir = path
|
||||
.parent()
|
||||
.ok_or_else(|| eyre::eyre!("invalid root path"))?;
|
||||
|
||||
DirBuilder::new()
|
||||
.recursive(true)
|
||||
.create(&dir)
|
||||
.await
|
||||
.wrap_err_with(|| {
|
||||
format!("Failed to create directory {}", dir.display())
|
||||
})?;
|
||||
DirBuilder::new()
|
||||
.recursive(true)
|
||||
.create(&dir)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to create directory {}", dir.display()))?;
|
||||
|
||||
tracing::trace!("Writing file {}", path.display());
|
||||
tracing::trace!("Writing file {}", path.display());
|
||||
|
||||
fs::write(&path, content.as_bytes())
|
||||
.await
|
||||
.wrap_err_with(|| {
|
||||
format!("Failed to write content to path {}", path.display())
|
||||
})
|
||||
}
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
fs::write(&path, content.as_bytes())
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to write content to path {}", path.display()))
|
||||
});
|
||||
|
||||
futures::stream::iter(templates)
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use std::io::{Cursor, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use clap::{value_parser, Arg, ArgMatches, Command};
|
||||
use color_eyre::eyre::{Context, Result};
|
||||
|
@ -7,9 +8,9 @@ use color_eyre::Help;
|
|||
use dtmt_shared::ModConfig;
|
||||
use path_slash::{PathBufExt, PathExt};
|
||||
use tokio::fs;
|
||||
use tokio::sync::Mutex;
|
||||
use tokio_stream::wrappers::ReadDirStream;
|
||||
use tokio_stream::StreamExt;
|
||||
use zip::write::SimpleFileOptions;
|
||||
use zip::ZipWriter;
|
||||
|
||||
use crate::cmd::build::read_project_config;
|
||||
|
@ -50,7 +51,11 @@ pub(crate) fn command_definition() -> Command {
|
|||
}
|
||||
|
||||
#[async_recursion::async_recursion]
|
||||
async fn process_directory<P1, P2, W>(zip: &mut ZipWriter<W>, path: P1, prefix: P2) -> Result<()>
|
||||
async fn process_directory<P1, P2, W>(
|
||||
zip: Arc<Mutex<ZipWriter<W>>>,
|
||||
path: P1,
|
||||
prefix: P2,
|
||||
) -> Result<()>
|
||||
where
|
||||
P1: AsRef<Path> + std::marker::Send,
|
||||
P2: AsRef<Path> + std::marker::Send,
|
||||
|
@ -59,7 +64,9 @@ where
|
|||
let path = path.as_ref();
|
||||
let prefix = prefix.as_ref();
|
||||
|
||||
zip.add_directory(prefix.to_slash_lossy(), SimpleFileOptions::default())?;
|
||||
zip.lock()
|
||||
.await
|
||||
.add_directory(prefix.to_slash_lossy(), Default::default())?;
|
||||
|
||||
let read_dir = fs::read_dir(&path)
|
||||
.await
|
||||
|
@ -80,11 +87,12 @@ where
|
|||
.await
|
||||
.wrap_err_with(|| format!("Failed to read '{}'", in_path.display()))?;
|
||||
{
|
||||
zip.start_file(out_path.to_slash_lossy(), SimpleFileOptions::default())?;
|
||||
let mut zip = zip.lock().await;
|
||||
zip.start_file(out_path.to_slash_lossy(), Default::default())?;
|
||||
zip.write_all(&data)?;
|
||||
}
|
||||
} else if t.is_dir() {
|
||||
process_directory(zip, in_path, out_path).await?;
|
||||
process_directory(zip.clone(), in_path, out_path).await?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -99,12 +107,16 @@ where
|
|||
let path = path.as_ref();
|
||||
let dest = dest.as_ref();
|
||||
|
||||
let mut zip = ZipWriter::new(Cursor::new(Vec::with_capacity(1024)));
|
||||
let data = Cursor::new(Vec::new());
|
||||
let zip = ZipWriter::new(data);
|
||||
let zip = Arc::new(Mutex::new(zip));
|
||||
|
||||
process_directory(&mut zip, path, PathBuf::from(&cfg.id))
|
||||
process_directory(zip.clone(), path, PathBuf::from(&cfg.id))
|
||||
.await
|
||||
.wrap_err("Failed to add directory to archive")?;
|
||||
|
||||
let mut zip = zip.lock().await;
|
||||
|
||||
{
|
||||
let name = PathBuf::from(&cfg.id).join("dtmt.cfg");
|
||||
let path = cfg.dir.join("dtmt.cfg");
|
||||
|
@ -113,7 +125,7 @@ where
|
|||
.await
|
||||
.wrap_err_with(|| format!("Failed to read mod config at {}", path.display()))?;
|
||||
|
||||
zip.start_file(name.to_slash_lossy(), SimpleFileOptions::default())?;
|
||||
zip.start_file(name.to_slash_lossy(), Default::default())?;
|
||||
zip.write_all(&data)?;
|
||||
}
|
||||
|
||||
|
|
|
@ -77,14 +77,17 @@ pub(crate) fn command_definition() -> Command {
|
|||
)
|
||||
}
|
||||
|
||||
#[tracing::instrument]
|
||||
async fn compile(
|
||||
async fn compile<P1, P2, P3>(
|
||||
cfg: &ModConfig,
|
||||
out_path: impl AsRef<Path> + std::fmt::Debug,
|
||||
archive_path: impl AsRef<Path> + std::fmt::Debug,
|
||||
game_dir: Arc<Option<impl AsRef<Path> + std::fmt::Debug>>,
|
||||
) -> Result<()> {
|
||||
let out_path = out_path.as_ref();
|
||||
out_path: P1,
|
||||
archive_path: P2,
|
||||
game_dir: Arc<Option<P3>>,
|
||||
) -> Result<()>
|
||||
where
|
||||
P1: AsRef<Path> + std::marker::Copy,
|
||||
P2: AsRef<Path>,
|
||||
P3: AsRef<Path>,
|
||||
{
|
||||
build(cfg, out_path, game_dir)
|
||||
.await
|
||||
.wrap_err("Failed to build bundles")?;
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
#![feature(io_error_more)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(result_flattening)]
|
||||
#![feature(test)]
|
||||
#![windows_subsystem = "console"]
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
@ -28,7 +27,6 @@ mod cmd {
|
|||
mod util;
|
||||
pub mod watch;
|
||||
}
|
||||
mod shell_parse;
|
||||
|
||||
#[derive(Default, Deserialize, Serialize)]
|
||||
struct GlobalConfig {
|
||||
|
|
|
@ -1,189 +0,0 @@
|
|||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||
enum ParserState {
|
||||
Start,
|
||||
Word,
|
||||
SingleQuote,
|
||||
DoubleQuote,
|
||||
}
|
||||
|
||||
pub struct ShellParser<'a> {
|
||||
bytes: &'a [u8],
|
||||
offset: usize,
|
||||
pub errored: bool,
|
||||
}
|
||||
|
||||
impl<'a> ShellParser<'a> {
|
||||
pub fn new(bytes: &'a [u8]) -> Self {
|
||||
Self {
|
||||
bytes,
|
||||
offset: 0,
|
||||
errored: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_word(&mut self) -> Option<&'a [u8]> {
|
||||
// The start of the current word. Certain leading characters should be ignored,
|
||||
// so this might change.
|
||||
let mut start = self.offset;
|
||||
let mut state = ParserState::Start;
|
||||
|
||||
while self.offset < self.bytes.len() {
|
||||
let c = self.bytes[self.offset];
|
||||
self.offset += 1;
|
||||
|
||||
match state {
|
||||
ParserState::Start => match c {
|
||||
// Ignore leading whitespace
|
||||
b' ' | b'\t' | b'\n' => start += 1,
|
||||
b'\'' => {
|
||||
state = ParserState::SingleQuote;
|
||||
start += 1;
|
||||
}
|
||||
b'"' => {
|
||||
state = ParserState::DoubleQuote;
|
||||
start += 1;
|
||||
}
|
||||
_ => {
|
||||
state = ParserState::Word;
|
||||
}
|
||||
},
|
||||
ParserState::Word => match c {
|
||||
// Unquoted whitespace ends the current word
|
||||
b' ' | b'\t' | b'\n' => {
|
||||
return Some(&self.bytes[start..self.offset - 1]);
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
ParserState::SingleQuote => match c {
|
||||
b'\'' => {
|
||||
return Some(&self.bytes[start..(self.offset - 1)]);
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
ParserState::DoubleQuote => match c {
|
||||
b'"' => {
|
||||
return Some(&self.bytes[start..(self.offset - 1)]);
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
match state {
|
||||
ParserState::Start => None,
|
||||
ParserState::Word => Some(&self.bytes[start..self.offset]),
|
||||
ParserState::SingleQuote | ParserState::DoubleQuote => {
|
||||
self.errored = true;
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for ShellParser<'a> {
|
||||
type Item = &'a [u8];
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.parse_word()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_one_word() {
|
||||
let mut it = ShellParser::new(b"hello");
|
||||
assert_eq!(it.next(), Some("hello".as_bytes()));
|
||||
assert_eq!(it.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_one_single() {
|
||||
let mut it = ShellParser::new(b"'hello'");
|
||||
assert_eq!(it.next(), Some("hello".as_bytes()));
|
||||
assert_eq!(it.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_open_quote() {
|
||||
let mut it = ShellParser::new(b"'hello");
|
||||
assert_eq!(it.next(), None);
|
||||
assert!(it.errored)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ww2ogg() {
|
||||
let mut it = ShellParser::new(
|
||||
b"ww2ogg.exe --pcb \"/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin\"",
|
||||
);
|
||||
assert_eq!(it.next(), Some("ww2ogg.exe".as_bytes()));
|
||||
assert_eq!(it.next(), Some("--pcb".as_bytes()));
|
||||
assert_eq!(
|
||||
it.next(),
|
||||
Some("/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin".as_bytes())
|
||||
);
|
||||
assert_eq!(it.next(), None);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod bench {
|
||||
extern crate test;
|
||||
|
||||
use super::*;
|
||||
#[cfg(feature = "shlex-bench")]
|
||||
use shlex::bytes::Shlex;
|
||||
use test::Bencher;
|
||||
|
||||
mod ww2ogg {
|
||||
use super::*;
|
||||
|
||||
#[bench]
|
||||
fn custom(b: &mut Bencher) {
|
||||
let val = test::black_box(
|
||||
b"ww2ogg.exe --pcb \"/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin\"",
|
||||
);
|
||||
b.iter(|| {
|
||||
let it = ShellParser::new(val);
|
||||
let _: Vec<_> = test::black_box(it.collect());
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "shlex-bench")]
|
||||
#[bench]
|
||||
fn shlex(b: &mut Bencher) {
|
||||
let val = test::black_box(
|
||||
b"ww2ogg.exe --pcb \"/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin\"",
|
||||
);
|
||||
b.iter(|| {
|
||||
let it = Shlex::new(val);
|
||||
let _: Vec<_> = test::black_box(it.collect());
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
mod one_single {
|
||||
use super::*;
|
||||
|
||||
#[bench]
|
||||
fn custom(b: &mut Bencher) {
|
||||
let val = test::black_box(b"'hello'");
|
||||
b.iter(|| {
|
||||
let it = ShellParser::new(val);
|
||||
let _: Vec<_> = test::black_box(it.collect());
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "shlex-bench")]
|
||||
#[bench]
|
||||
fn shlex(b: &mut Bencher) {
|
||||
let val = test::black_box(b"'hello'");
|
||||
b.iter(|| {
|
||||
let it = Shlex::new(val);
|
||||
let _: Vec<_> = test::black_box(it.collect());
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
Binary file not shown.
Before Width: | Height: | Size: 130 B After Width: | Height: | Size: 58 KiB |
|
@ -1 +1 @@
|
|||
Subproject commit 228b8ca37ee79ab9afa45c40da415e4dcb029751
|
||||
Subproject commit 55f8c6b7481d462e50ee4a03a43253d80d648ae2
|
|
@ -6,11 +6,11 @@ edition = "2021"
|
|||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
ansi_term = { workspace = true }
|
||||
color-eyre = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
steamlocate = { workspace = true }
|
||||
time = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-error = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
ansi_term = "0.12.1"
|
||||
color-eyre = "0.6.2"
|
||||
serde = "1.0.152"
|
||||
steamlocate = "2.0.0-alpha.0"
|
||||
time = { version = "0.3.19", features = ["formatting", "local-offset", "macros"] }
|
||||
tracing = "0.1.37"
|
||||
tracing-error = "0.2.0"
|
||||
tracing-subscriber = "0.3.16"
|
||||
|
|
|
@ -1,16 +1,12 @@
|
|||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use color_eyre::eyre::{OptionExt as _, WrapErr as _};
|
||||
use color_eyre::Result;
|
||||
mod log;
|
||||
|
||||
pub use log::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use steamlocate::SteamDir;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
pub use log::*;
|
||||
|
||||
mod log;
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
|
||||
pub struct ModConfigResources {
|
||||
pub init: PathBuf,
|
||||
|
@ -34,17 +30,6 @@ pub enum ModDependency {
|
|||
Config { id: String, order: ModOrder },
|
||||
}
|
||||
|
||||
// A bit dumb, but serde doesn't support literal values with the
|
||||
// `default` attribute, only paths.
|
||||
fn default_true() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// Similarly dumb, as the `skip_serializing_if` attribute needs a function
|
||||
fn is_true(val: &bool) -> bool {
|
||||
*val
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
|
||||
pub struct ModConfig {
|
||||
#[serde(skip)]
|
||||
|
@ -66,10 +51,6 @@ pub struct ModConfig {
|
|||
pub resources: ModConfigResources,
|
||||
#[serde(default)]
|
||||
pub depends: Vec<ModDependency>,
|
||||
#[serde(default = "default_true", skip_serializing_if = "is_true")]
|
||||
pub bundled: bool,
|
||||
#[serde(default)]
|
||||
pub name_overrides: HashMap<String, String>,
|
||||
}
|
||||
|
||||
pub const STEAMAPP_ID: u32 = 1361210;
|
||||
|
@ -80,23 +61,25 @@ pub struct GameInfo {
|
|||
pub last_updated: OffsetDateTime,
|
||||
}
|
||||
|
||||
pub fn collect_game_info() -> Result<Option<GameInfo>> {
|
||||
let dir = SteamDir::locate().wrap_err("Failed to locate Steam installation")?;
|
||||
|
||||
let found = dir
|
||||
.find_app(STEAMAPP_ID)
|
||||
.wrap_err("Failed to look up game by Steam app ID")?;
|
||||
|
||||
let Some((app, _)) = found else {
|
||||
return Ok(None);
|
||||
pub fn collect_game_info() -> Option<GameInfo> {
|
||||
let mut dir = if let Some(dir) = SteamDir::locate() {
|
||||
dir
|
||||
} else {
|
||||
tracing::debug!("Failed to locate Steam installation");
|
||||
return None;
|
||||
};
|
||||
|
||||
let last_updated = app
|
||||
.last_updated
|
||||
.ok_or_eyre("Missing field 'last_updated'")?;
|
||||
let found = dir
|
||||
.app(&STEAMAPP_ID)
|
||||
.and_then(|app| app.last_updated.map(|v| (app.path.clone(), v)));
|
||||
|
||||
Ok(Some(GameInfo {
|
||||
path: app.install_dir.into(),
|
||||
let Some((path, last_updated)) = found else {
|
||||
tracing::debug!("Found Steam, but failed to find game installation");
|
||||
return None;
|
||||
};
|
||||
|
||||
Some(GameInfo {
|
||||
path,
|
||||
last_updated: last_updated.into(),
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
|
|
@ -84,7 +84,7 @@ pub fn create_tracing_subscriber() {
|
|||
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::try_new("info").unwrap());
|
||||
|
||||
let (dev_stdout_layer, prod_stdout_layer, filter_layer) = if cfg!(debug_assertions) {
|
||||
let fmt_layer = fmt::layer().pretty().with_writer(std::io::stderr);
|
||||
let fmt_layer = fmt::layer().pretty();
|
||||
(Some(fmt_layer), None, None)
|
||||
} else {
|
||||
// Creates a layer that
|
||||
|
@ -93,7 +93,6 @@ pub fn create_tracing_subscriber() {
|
|||
// - does not print spans/targets
|
||||
// - only prints time, not date
|
||||
let fmt_layer = fmt::layer()
|
||||
.with_writer(std::io::stderr)
|
||||
.event_format(Formatter)
|
||||
.fmt_fields(debug_fn(format_fields));
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 6d94a4dd2c296bf1f044ee4c70fb10dca4c1c241
|
||||
Subproject commit 24da35e631099e914d6fc1bcc863228c48e540ec
|
|
@ -9,10 +9,10 @@ edition = "2021"
|
|||
futures = "0.3.26"
|
||||
lazy_static = "1.4.0"
|
||||
regex = "1.7.1"
|
||||
reqwest = { version = "0.12.4" }
|
||||
reqwest = { version = "0.11.14" }
|
||||
serde = { version = "1.0.152", features = ["derive"] }
|
||||
serde_json = "1.0.94"
|
||||
thiserror = "2.0.0"
|
||||
thiserror = "1.0.39"
|
||||
time = { version = "0.3.20", features = ["serde"] }
|
||||
tracing = "0.1.37"
|
||||
url = { version = "2.3.1", features = ["serde"] }
|
||||
|
|
|
@ -4,7 +4,7 @@ use std::convert::Infallible;
|
|||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use reqwest::header::{HeaderMap, HeaderValue, InvalidHeaderValue};
|
||||
use reqwest::{Client, IntoUrl, RequestBuilder, Url};
|
||||
use reqwest::{Client, RequestBuilder, Url};
|
||||
use serde::Deserialize;
|
||||
use thiserror::Error;
|
||||
|
||||
|
@ -28,7 +28,7 @@ pub enum Error {
|
|||
HTTP(#[from] reqwest::Error),
|
||||
#[error("invalid URL: {0:?}")]
|
||||
URLParseError(#[from] url::ParseError),
|
||||
#[error("failed to deserialize due to {error}: {json}")]
|
||||
#[error("failed to deserialize '{error}': {json}")]
|
||||
Deserialize {
|
||||
json: String,
|
||||
error: serde_json::Error,
|
||||
|
@ -37,10 +37,8 @@ pub enum Error {
|
|||
InvalidHeaderValue(#[from] InvalidHeaderValue),
|
||||
#[error("this error cannot happen")]
|
||||
Infallible(#[from] Infallible),
|
||||
#[error("invalid NXM URL '{url}': {0}", url = .1.as_str())]
|
||||
#[error("invalid NXM URL '{}': {0}", .1.as_str())]
|
||||
InvalidNXM(&'static str, Url),
|
||||
#[error("{0}")]
|
||||
Custom(String),
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
|
@ -104,45 +102,6 @@ impl Api {
|
|||
self.send(req).await
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub async fn file_version<T>(&self, id: u64, timestamp: T) -> Result<String>
|
||||
where
|
||||
T: std::fmt::Debug,
|
||||
OffsetDateTime: PartialEq<T>,
|
||||
{
|
||||
let url = BASE_URL_GAME.join(&format!("mods/{id}/files.json"))?;
|
||||
let req = self.client.get(url);
|
||||
let files: FileList = self.send(req).await?;
|
||||
|
||||
let Some(file) = files
|
||||
.files
|
||||
.into_iter()
|
||||
.find(|file| file.uploaded_timestamp == timestamp)
|
||||
else {
|
||||
let err = Error::Custom("Timestamp does not match any file".into());
|
||||
return Err(err);
|
||||
};
|
||||
|
||||
Ok(file.version)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub async fn picture(&self, url: impl IntoUrl + std::fmt::Debug) -> Result<Vec<u8>> {
|
||||
let res = self.client.get(url).send().await?.error_for_status()?;
|
||||
|
||||
res.bytes()
|
||||
.await
|
||||
.map(|bytes| bytes.to_vec())
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub async fn get_file_by_id(&self, mod_id: u64, file_id: u64) -> Result<File> {
|
||||
let url = BASE_URL_GAME.join(&format!("mods/{mod_id}/files/{file_id}.json"))?;
|
||||
let req = self.client.get(url);
|
||||
self.send(req).await
|
||||
}
|
||||
|
||||
pub fn parse_file_name<S: AsRef<str>>(
|
||||
name: S,
|
||||
) -> Option<(String, u64, String, OffsetDateTime)> {
|
||||
|
@ -153,7 +112,7 @@ impl Api {
|
|||
RE.captures(name.as_ref()).and_then(|cap| {
|
||||
let name = cap.name("name").map(|s| s.as_str().to_string())?;
|
||||
let mod_id = cap.name("mod_id").and_then(|s| s.as_str().parse().ok())?;
|
||||
let version = cap.name("version").map(|s| s.as_str().replace('-', "."))?;
|
||||
let version = cap.name("version").map(|s| s.as_str().to_string())?;
|
||||
let updated = cap
|
||||
.name("updated")
|
||||
.and_then(|s| s.as_str().parse().ok())
|
||||
|
@ -181,7 +140,7 @@ impl Api {
|
|||
self.send(req).await
|
||||
}
|
||||
|
||||
pub async fn handle_nxm(&self, url: Url) -> Result<(Mod, File, Vec<u8>)> {
|
||||
pub async fn handle_nxm(&self, url: Url) -> Result<(Mod, Vec<u8>)> {
|
||||
let nxm = Self::parse_nxm(url.clone())?;
|
||||
|
||||
let user = self.user_validate().await?;
|
||||
|
@ -190,20 +149,19 @@ impl Api {
|
|||
return Err(Error::InvalidNXM("user_id mismtach", url));
|
||||
}
|
||||
|
||||
let (mod_data, file_info, download_info) = futures::try_join!(
|
||||
let (mod_data, download_info) = futures::try_join!(
|
||||
self.mods_id(nxm.mod_id),
|
||||
self.get_file_by_id(nxm.mod_id, nxm.file_id),
|
||||
self.mods_download_link(nxm.mod_id, nxm.file_id, nxm.key, nxm.expires)
|
||||
)?;
|
||||
|
||||
let Some(download_url) = download_info.first().map(|i| i.uri.clone()) else {
|
||||
let Some(download_url) = download_info.get(0).map(|i| i.uri.clone()) else {
|
||||
return Err(Error::InvalidNXM("no download link", url));
|
||||
};
|
||||
|
||||
let req = self.client.get(download_url);
|
||||
let data = req.send().await?.bytes().await?;
|
||||
|
||||
Ok((mod_data, file_info, data.to_vec()))
|
||||
Ok((mod_data, data.to_vec()))
|
||||
}
|
||||
|
||||
pub fn parse_nxm(nxm: Url) -> Result<Nxm> {
|
||||
|
@ -212,20 +170,17 @@ impl Api {
|
|||
}
|
||||
|
||||
// Now it makes sense, why Nexus calls this field `game_domain_name`, when it's just
|
||||
// another path segment in the regular API calls.
|
||||
// another path segmentin the regular API calls.
|
||||
if nxm.host_str() != Some(GAME_ID) {
|
||||
return Err(Error::InvalidNXM("Invalid game domain name", nxm));
|
||||
}
|
||||
|
||||
let Some(mut segments) = nxm.path_segments() else {
|
||||
return Err(Error::InvalidNXM("Missing path segments", nxm));
|
||||
return Err(Error::InvalidNXM("Cannot be a base", nxm));
|
||||
};
|
||||
|
||||
if segments.next() != Some("mods") {
|
||||
return Err(Error::InvalidNXM(
|
||||
"Unexpected path segment, expected 'mods'",
|
||||
nxm,
|
||||
));
|
||||
return Err(Error::InvalidNXM("Unexpected path segment", nxm));
|
||||
}
|
||||
|
||||
let Some(mod_id) = segments.next().and_then(|id| id.parse().ok()) else {
|
||||
|
@ -233,10 +188,7 @@ impl Api {
|
|||
};
|
||||
|
||||
if segments.next() != Some("files") {
|
||||
return Err(Error::InvalidNXM(
|
||||
"Unexpected path segment, expected 'files'",
|
||||
nxm,
|
||||
));
|
||||
return Err(Error::InvalidNXM("Unexpected path segment", nxm));
|
||||
}
|
||||
|
||||
let Some(file_id) = segments.next().and_then(|id| id.parse().ok()) else {
|
||||
|
@ -251,7 +203,7 @@ impl Api {
|
|||
}
|
||||
|
||||
let Some(key) = query.get("key") else {
|
||||
return Err(Error::InvalidNXM("Missing query field 'key'", nxm));
|
||||
return Err(Error::InvalidNXM("Missing 'key'", nxm));
|
||||
};
|
||||
|
||||
let expires = query
|
||||
|
@ -259,12 +211,12 @@ impl Api {
|
|||
.and_then(|expires| expires.parse().ok())
|
||||
.and_then(|expires| OffsetDateTime::from_unix_timestamp(expires).ok());
|
||||
let Some(expires) = expires else {
|
||||
return Err(Error::InvalidNXM("Missing query field 'expires'", nxm));
|
||||
return Err(Error::InvalidNXM("Missing 'expires'", nxm));
|
||||
};
|
||||
|
||||
let user_id = query.get("user_id").and_then(|id| id.parse().ok());
|
||||
let Some(user_id) = user_id else {
|
||||
return Err(Error::InvalidNXM("Missing query field 'user_id'", nxm));
|
||||
let Some(user_id) = user_id else {
|
||||
return Err(Error::InvalidNXM("Missing 'user_id'", nxm));
|
||||
};
|
||||
|
||||
Ok(Nxm {
|
||||
|
|
|
@ -64,35 +64,6 @@ pub struct Mod {
|
|||
// pub contains_adult_content: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct File {
|
||||
pub id: Vec<u64>,
|
||||
pub uid: u64,
|
||||
pub file_id: u64,
|
||||
pub name: String,
|
||||
pub version: String,
|
||||
pub category_id: u64,
|
||||
pub category_name: String,
|
||||
pub is_primary: bool,
|
||||
pub size: u64,
|
||||
pub file_name: String,
|
||||
#[serde(with = "time::serde::timestamp")]
|
||||
pub uploaded_timestamp: OffsetDateTime,
|
||||
pub mod_version: String,
|
||||
pub external_virus_scan_url: String,
|
||||
pub description: String,
|
||||
pub size_kb: u64,
|
||||
pub size_in_bytes: u64,
|
||||
pub changelog_html: Option<String>,
|
||||
pub content_preview_link: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct FileList {
|
||||
pub files: Vec<File>,
|
||||
// pub file_updates: Vec<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct DownloadLink {
|
||||
pub name: String,
|
||||
|
|
|
@ -6,8 +6,8 @@ edition = "2021"
|
|||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
color-eyre = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
color-eyre = "0.6.2"
|
||||
tracing = "0.1.37"
|
||||
|
||||
[build-dependencies]
|
||||
bindgen = "0.71.0"
|
||||
bindgen = "0.64.0"
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
extern crate bindgen;
|
||||
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
|
||||
|
@ -31,7 +33,7 @@ fn main() {
|
|||
.blocklist_file("stdlib.h")
|
||||
// Tell cargo to invalidate the built crate whenever any of the
|
||||
// included header files changed.
|
||||
.parse_callbacks(Box::new(bindgen::CargoCallbacks::new()))
|
||||
.parse_callbacks(Box::new(bindgen::CargoCallbacks))
|
||||
// Finish the builder and generate the bindings.
|
||||
.generate()
|
||||
// Unwrap the Result and panic on failure.
|
||||
|
|
|
@ -7,7 +7,6 @@ use std::ptr;
|
|||
use color_eyre::{eyre, Result};
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[allow(clippy::identity_op)]
|
||||
mod bindings {
|
||||
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
|
||||
}
|
||||
|
|
|
@ -4,23 +4,24 @@ version = "0.3.0"
|
|||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
async-recursion = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
byteorder = { workspace = true }
|
||||
color-eyre = { workspace = true }
|
||||
csv-async = { workspace = true }
|
||||
fastrand = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
futures-util = { workspace = true }
|
||||
glob = { workspace = true }
|
||||
luajit2-sys = { workspace = true }
|
||||
nanorand = { workspace = true }
|
||||
oodle = { workspace = true }
|
||||
path-slash = { workspace = true }
|
||||
pin-project-lite = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_sjson = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tokio-stream = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-error = { workspace = true }
|
||||
bitflags = "1.3.2"
|
||||
byteorder = "1.4.3"
|
||||
color-eyre = "0.6.2"
|
||||
csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
|
||||
fastrand = "1.8.0"
|
||||
futures = "0.3.25"
|
||||
futures-util = "0.3.24"
|
||||
glob = "0.3.0"
|
||||
libloading = "0.7.4"
|
||||
nanorand = "0.7.0"
|
||||
pin-project-lite = "0.2.9"
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
serde_sjson = { path = "../../lib/serde_sjson", version = "*" }
|
||||
oodle = { path = "../../lib/oodle", version = "*" }
|
||||
tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
|
||||
tokio-stream = { version = "0.1.11", features = ["fs", "io-util"] }
|
||||
tracing = { version = "0.1.37", features = ["async-await"] }
|
||||
tracing-error = "0.2.0"
|
||||
luajit2-sys = { path = "../../lib/luajit2-sys", version = "*" }
|
||||
async-recursion = "1.0.2"
|
||||
path-slash = "0.2.1"
|
||||
|
|
|
@ -43,7 +43,6 @@ impl<T: FromBinary> FromBinary for Vec<T> {
|
|||
}
|
||||
|
||||
pub mod sync {
|
||||
use std::ffi::CStr;
|
||||
use std::io::{self, Read, Seek, SeekFrom};
|
||||
|
||||
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
|
||||
|
@ -166,13 +165,25 @@ pub mod sync {
|
|||
}
|
||||
|
||||
fn read_string_len(&mut self, len: usize) -> Result<String> {
|
||||
let pos = self.stream_position();
|
||||
let mut buf = vec![0; len];
|
||||
let res = self
|
||||
.read_exact(&mut buf)
|
||||
.map_err(Report::new)
|
||||
.and_then(|_| {
|
||||
String::from_utf8(buf).map_err(|err| {
|
||||
let ascii = String::from_utf8_lossy(err.as_bytes()).to_string();
|
||||
let bytes = format!("{:?}", err.as_bytes());
|
||||
Report::new(err)
|
||||
.with_section(move || bytes.header("Bytes:"))
|
||||
.with_section(move || ascii.header("ASCII:"))
|
||||
})
|
||||
});
|
||||
|
||||
let res = read_string_len(self, len);
|
||||
if res.is_ok() {
|
||||
return res;
|
||||
}
|
||||
|
||||
let pos = self.stream_position();
|
||||
if pos.is_ok() {
|
||||
res.with_section(|| {
|
||||
format!("{pos:#X} ({pos})", pos = pos.unwrap()).header("Position: ")
|
||||
|
@ -232,22 +243,4 @@ pub mod sync {
|
|||
|
||||
Err(err).with_section(|| format!("{pos:#X} ({pos})").header("Position: "))
|
||||
}
|
||||
|
||||
fn read_string_len(mut r: impl Read, len: usize) -> Result<String> {
|
||||
let mut buf = vec![0; len];
|
||||
r.read_exact(&mut buf)
|
||||
.wrap_err_with(|| format!("Failed to read {} bytes", len))?;
|
||||
|
||||
let res = match CStr::from_bytes_until_nul(&buf) {
|
||||
Ok(s) => {
|
||||
let s = s.to_str()?;
|
||||
Ok(s.to_string())
|
||||
}
|
||||
Err(_) => String::from_utf8(buf.clone()).map_err(Report::new),
|
||||
};
|
||||
|
||||
res.wrap_err("Invalid binary for UTF8 string")
|
||||
.with_section(|| format!("{}", String::from_utf8_lossy(&buf)).header("ASCI:"))
|
||||
.with_section(|| format!("{:x?}", buf).header("Bytes:"))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,21 +13,21 @@ use crate::binary::ToBinary;
|
|||
use crate::murmur::Murmur64;
|
||||
use crate::Bundle;
|
||||
|
||||
use super::filetype::BundleFileType;
|
||||
use super::file::BundleFileType;
|
||||
|
||||
const DATABASE_VERSION: u32 = 0x6;
|
||||
const FILE_VERSION: u32 = 0x4;
|
||||
|
||||
pub struct BundleFile {
|
||||
pub name: String,
|
||||
pub stream: String,
|
||||
pub platform_specific: bool,
|
||||
pub file_time: u64,
|
||||
name: String,
|
||||
stream: String,
|
||||
platform_specific: bool,
|
||||
file_time: u64,
|
||||
}
|
||||
|
||||
pub struct FileName {
|
||||
pub extension: BundleFileType,
|
||||
pub name: Murmur64,
|
||||
extension: BundleFileType,
|
||||
name: Murmur64,
|
||||
}
|
||||
|
||||
pub struct BundleDatabase {
|
||||
|
@ -36,34 +36,7 @@ pub struct BundleDatabase {
|
|||
bundle_contents: HashMap<Murmur64, Vec<FileName>>,
|
||||
}
|
||||
|
||||
// Implements the partial Murmur that's used by the engine to compute bundle resource hashes,
|
||||
// but in a way that the loop can be done outside the function.
|
||||
#[inline(always)]
|
||||
fn add_to_resource_hash(mut k: u64, name: impl Into<u64>) -> u64 {
|
||||
const M: u64 = 0xc6a4a7935bd1e995;
|
||||
const R: u64 = 47;
|
||||
|
||||
let mut h: u64 = name.into();
|
||||
|
||||
k = k.wrapping_mul(M);
|
||||
k ^= k >> R;
|
||||
k = k.wrapping_mul(M);
|
||||
|
||||
h ^= k;
|
||||
k = M.wrapping_mul(h);
|
||||
|
||||
k
|
||||
}
|
||||
|
||||
impl BundleDatabase {
|
||||
pub fn bundles(&self) -> &HashMap<Murmur64, Vec<BundleFile>> {
|
||||
&self.stored_files
|
||||
}
|
||||
|
||||
pub fn files(&self) -> &HashMap<Murmur64, Vec<FileName>> {
|
||||
&self.bundle_contents
|
||||
}
|
||||
|
||||
pub fn add_bundle(&mut self, bundle: &Bundle) {
|
||||
let hash = bundle.name().to_murmur64();
|
||||
let name = hash.to_string();
|
||||
|
@ -96,26 +69,20 @@ impl BundleDatabase {
|
|||
}
|
||||
}
|
||||
|
||||
let mut resource_hash = 0;
|
||||
|
||||
for f in bundle.files() {
|
||||
let name = f.base_name().to_murmur64();
|
||||
let file_name = FileName {
|
||||
extension: f.file_type(),
|
||||
name,
|
||||
name: f.base_name().to_murmur64(),
|
||||
};
|
||||
|
||||
resource_hash = add_to_resource_hash(resource_hash, name);
|
||||
// TODO: Compute actual resource hash
|
||||
self.resource_hashes.insert(hash, 0);
|
||||
|
||||
// TODO: Make sure each file name only exists once. Probably best to turn
|
||||
// the `Vec` into a sorted `HashSet`.
|
||||
self.bundle_contents
|
||||
.entry(hash)
|
||||
.or_default()
|
||||
.push(file_name);
|
||||
}
|
||||
|
||||
self.resource_hashes.insert(hash, resource_hash);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -136,7 +103,7 @@ impl FromBinary for BundleDatabase {
|
|||
let mut stored_files = HashMap::with_capacity(num_entries);
|
||||
|
||||
for _ in 0..num_entries {
|
||||
let hash = r.read_u64().map(Murmur64::from)?;
|
||||
let hash = Murmur64::from(r.read_u64()?);
|
||||
|
||||
let num_files = r.read_u32()? as usize;
|
||||
let mut files = Vec::with_capacity(num_files);
|
||||
|
@ -194,7 +161,7 @@ impl FromBinary for BundleDatabase {
|
|||
let mut resource_hashes = HashMap::with_capacity(num_hashes);
|
||||
|
||||
for _ in 0..num_hashes {
|
||||
let name = r.read_u64().map(Murmur64::from)?;
|
||||
let name = Murmur64::from(r.read_u64()?);
|
||||
let hash = r.read_u64()?;
|
||||
|
||||
resource_hashes.insert(name, hash);
|
||||
|
@ -204,14 +171,14 @@ impl FromBinary for BundleDatabase {
|
|||
let mut bundle_contents = HashMap::with_capacity(num_contents);
|
||||
|
||||
for _ in 0..num_contents {
|
||||
let hash = r.read_u64().map(Murmur64::from)?;
|
||||
let hash = Murmur64::from(r.read_u64()?);
|
||||
|
||||
let num_files = r.read_u32()? as usize;
|
||||
let mut files = Vec::with_capacity(num_files);
|
||||
|
||||
for _ in 0..num_files {
|
||||
let extension = r.read_u64().map(BundleFileType::from)?;
|
||||
let name = r.read_u64().map(Murmur64::from)?;
|
||||
let extension = BundleFileType::from(r.read_u64()?);
|
||||
let name = Murmur64::from(r.read_u64()?);
|
||||
|
||||
files.push(FileName { extension, name });
|
||||
}
|
||||
|
|
|
@ -5,12 +5,407 @@ use bitflags::bitflags;
|
|||
use color_eyre::eyre::Context;
|
||||
use color_eyre::{eyre, Result};
|
||||
use futures::future::join_all;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::binary::sync::*;
|
||||
use crate::filetype::*;
|
||||
use crate::murmur::{HashGroup, IdString64, Murmur64};
|
||||
|
||||
use super::filetype::BundleFileType;
|
||||
#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)]
|
||||
pub enum BundleFileType {
|
||||
Animation,
|
||||
AnimationCurves,
|
||||
Apb,
|
||||
BakedLighting,
|
||||
Bik,
|
||||
BlendSet,
|
||||
Bones,
|
||||
Chroma,
|
||||
CommonPackage,
|
||||
Config,
|
||||
Crypto,
|
||||
Data,
|
||||
Entity,
|
||||
Flow,
|
||||
Font,
|
||||
Ies,
|
||||
Ini,
|
||||
Input,
|
||||
Ivf,
|
||||
Keys,
|
||||
Level,
|
||||
Lua,
|
||||
Material,
|
||||
Mod,
|
||||
MouseCursor,
|
||||
NavData,
|
||||
NetworkConfig,
|
||||
OddleNet,
|
||||
Package,
|
||||
Particles,
|
||||
PhysicsProperties,
|
||||
RenderConfig,
|
||||
RtPipeline,
|
||||
Scene,
|
||||
Shader,
|
||||
ShaderLibrary,
|
||||
ShaderLibraryGroup,
|
||||
ShadingEnvionmentMapping,
|
||||
ShadingEnvironment,
|
||||
Slug,
|
||||
SlugAlbum,
|
||||
SoundEnvironment,
|
||||
SpuJob,
|
||||
StateMachine,
|
||||
StaticPVS,
|
||||
Strings,
|
||||
SurfaceProperties,
|
||||
Texture,
|
||||
TimpaniBank,
|
||||
TimpaniMaster,
|
||||
Tome,
|
||||
Ugg,
|
||||
Unit,
|
||||
Upb,
|
||||
VectorField,
|
||||
Wav,
|
||||
WwiseBank,
|
||||
WwiseDep,
|
||||
WwiseEvent,
|
||||
WwiseMetadata,
|
||||
WwiseStream,
|
||||
Xml,
|
||||
|
||||
Unknown(Murmur64),
|
||||
}
|
||||
|
||||
impl BundleFileType {
|
||||
pub fn ext_name(&self) -> String {
|
||||
match self {
|
||||
BundleFileType::AnimationCurves => String::from("animation_curves"),
|
||||
BundleFileType::Animation => String::from("animation"),
|
||||
BundleFileType::Apb => String::from("apb"),
|
||||
BundleFileType::BakedLighting => String::from("baked_lighting"),
|
||||
BundleFileType::Bik => String::from("bik"),
|
||||
BundleFileType::BlendSet => String::from("blend_set"),
|
||||
BundleFileType::Bones => String::from("bones"),
|
||||
BundleFileType::Chroma => String::from("chroma"),
|
||||
BundleFileType::CommonPackage => String::from("common_package"),
|
||||
BundleFileType::Config => String::from("config"),
|
||||
BundleFileType::Crypto => String::from("crypto"),
|
||||
BundleFileType::Data => String::from("data"),
|
||||
BundleFileType::Entity => String::from("entity"),
|
||||
BundleFileType::Flow => String::from("flow"),
|
||||
BundleFileType::Font => String::from("font"),
|
||||
BundleFileType::Ies => String::from("ies"),
|
||||
BundleFileType::Ini => String::from("ini"),
|
||||
BundleFileType::Input => String::from("input"),
|
||||
BundleFileType::Ivf => String::from("ivf"),
|
||||
BundleFileType::Keys => String::from("keys"),
|
||||
BundleFileType::Level => String::from("level"),
|
||||
BundleFileType::Lua => String::from("lua"),
|
||||
BundleFileType::Material => String::from("material"),
|
||||
BundleFileType::Mod => String::from("mod"),
|
||||
BundleFileType::MouseCursor => String::from("mouse_cursor"),
|
||||
BundleFileType::NavData => String::from("nav_data"),
|
||||
BundleFileType::NetworkConfig => String::from("network_config"),
|
||||
BundleFileType::OddleNet => String::from("oodle_net"),
|
||||
BundleFileType::Package => String::from("package"),
|
||||
BundleFileType::Particles => String::from("particles"),
|
||||
BundleFileType::PhysicsProperties => String::from("physics_properties"),
|
||||
BundleFileType::RenderConfig => String::from("render_config"),
|
||||
BundleFileType::RtPipeline => String::from("rt_pipeline"),
|
||||
BundleFileType::Scene => String::from("scene"),
|
||||
BundleFileType::ShaderLibraryGroup => String::from("shader_library_group"),
|
||||
BundleFileType::ShaderLibrary => String::from("shader_library"),
|
||||
BundleFileType::Shader => String::from("shader"),
|
||||
BundleFileType::ShadingEnvionmentMapping => String::from("shading_environment_mapping"),
|
||||
BundleFileType::ShadingEnvironment => String::from("shading_environment"),
|
||||
BundleFileType::SlugAlbum => String::from("slug_album"),
|
||||
BundleFileType::Slug => String::from("slug"),
|
||||
BundleFileType::SoundEnvironment => String::from("sound_environment"),
|
||||
BundleFileType::SpuJob => String::from("spu_job"),
|
||||
BundleFileType::StateMachine => String::from("state_machine"),
|
||||
BundleFileType::StaticPVS => String::from("static_pvs"),
|
||||
BundleFileType::Strings => String::from("strings"),
|
||||
BundleFileType::SurfaceProperties => String::from("surface_properties"),
|
||||
BundleFileType::Texture => String::from("texture"),
|
||||
BundleFileType::TimpaniBank => String::from("timpani_bank"),
|
||||
BundleFileType::TimpaniMaster => String::from("timpani_master"),
|
||||
BundleFileType::Tome => String::from("tome"),
|
||||
BundleFileType::Ugg => String::from("ugg"),
|
||||
BundleFileType::Unit => String::from("unit"),
|
||||
BundleFileType::Upb => String::from("upb"),
|
||||
BundleFileType::VectorField => String::from("vector_field"),
|
||||
BundleFileType::Wav => String::from("wav"),
|
||||
BundleFileType::WwiseBank => String::from("wwise_bank"),
|
||||
BundleFileType::WwiseDep => String::from("wwise_dep"),
|
||||
BundleFileType::WwiseEvent => String::from("wwise_event"),
|
||||
BundleFileType::WwiseMetadata => String::from("wwise_metadata"),
|
||||
BundleFileType::WwiseStream => String::from("wwise_stream"),
|
||||
BundleFileType::Xml => String::from("xml"),
|
||||
|
||||
BundleFileType::Unknown(s) => format!("{s:016X}"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn decompiled_ext_name(&self) -> String {
|
||||
match self {
|
||||
BundleFileType::Texture => String::from("dds"),
|
||||
BundleFileType::WwiseBank => String::from("bnk"),
|
||||
BundleFileType::WwiseStream => String::from("ogg"),
|
||||
_ => self.ext_name(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hash(&self) -> Murmur64 {
|
||||
Murmur64::from(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for BundleFileType {
|
||||
type Err = color_eyre::Report;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let val = match s {
|
||||
"animation_curves" => BundleFileType::AnimationCurves,
|
||||
"animation" => BundleFileType::Animation,
|
||||
"apb" => BundleFileType::Apb,
|
||||
"baked_lighting" => BundleFileType::BakedLighting,
|
||||
"bik" => BundleFileType::Bik,
|
||||
"blend_set" => BundleFileType::BlendSet,
|
||||
"bones" => BundleFileType::Bones,
|
||||
"chroma" => BundleFileType::Chroma,
|
||||
"common_package" => BundleFileType::CommonPackage,
|
||||
"config" => BundleFileType::Config,
|
||||
"crypto" => BundleFileType::Crypto,
|
||||
"data" => BundleFileType::Data,
|
||||
"entity" => BundleFileType::Entity,
|
||||
"flow" => BundleFileType::Flow,
|
||||
"font" => BundleFileType::Font,
|
||||
"ies" => BundleFileType::Ies,
|
||||
"ini" => BundleFileType::Ini,
|
||||
"input" => BundleFileType::Input,
|
||||
"ivf" => BundleFileType::Ivf,
|
||||
"keys" => BundleFileType::Keys,
|
||||
"level" => BundleFileType::Level,
|
||||
"lua" => BundleFileType::Lua,
|
||||
"material" => BundleFileType::Material,
|
||||
"mod" => BundleFileType::Mod,
|
||||
"mouse_cursor" => BundleFileType::MouseCursor,
|
||||
"nav_data" => BundleFileType::NavData,
|
||||
"network_config" => BundleFileType::NetworkConfig,
|
||||
"oodle_net" => BundleFileType::OddleNet,
|
||||
"package" => BundleFileType::Package,
|
||||
"particles" => BundleFileType::Particles,
|
||||
"physics_properties" => BundleFileType::PhysicsProperties,
|
||||
"render_config" => BundleFileType::RenderConfig,
|
||||
"rt_pipeline" => BundleFileType::RtPipeline,
|
||||
"scene" => BundleFileType::Scene,
|
||||
"shader_library_group" => BundleFileType::ShaderLibraryGroup,
|
||||
"shader_library" => BundleFileType::ShaderLibrary,
|
||||
"shader" => BundleFileType::Shader,
|
||||
"shading_environment_mapping" => BundleFileType::ShadingEnvionmentMapping,
|
||||
"shading_environment" => BundleFileType::ShadingEnvironment,
|
||||
"slug_album" => BundleFileType::SlugAlbum,
|
||||
"slug" => BundleFileType::Slug,
|
||||
"sound_environment" => BundleFileType::SoundEnvironment,
|
||||
"spu_job" => BundleFileType::SpuJob,
|
||||
"state_machine" => BundleFileType::StateMachine,
|
||||
"static_pvs" => BundleFileType::StaticPVS,
|
||||
"strings" => BundleFileType::Strings,
|
||||
"surface_properties" => BundleFileType::SurfaceProperties,
|
||||
"texture" => BundleFileType::Texture,
|
||||
"timpani_bank" => BundleFileType::TimpaniBank,
|
||||
"timpani_master" => BundleFileType::TimpaniMaster,
|
||||
"tome" => BundleFileType::Tome,
|
||||
"ugg" => BundleFileType::Ugg,
|
||||
"unit" => BundleFileType::Unit,
|
||||
"upb" => BundleFileType::Upb,
|
||||
"vector_field" => BundleFileType::VectorField,
|
||||
"wav" => BundleFileType::Wav,
|
||||
"wwise_bank" => BundleFileType::WwiseBank,
|
||||
"wwise_dep" => BundleFileType::WwiseDep,
|
||||
"wwise_event" => BundleFileType::WwiseEvent,
|
||||
"wwise_metadata" => BundleFileType::WwiseMetadata,
|
||||
"wwise_stream" => BundleFileType::WwiseStream,
|
||||
"xml" => BundleFileType::Xml,
|
||||
s => eyre::bail!("Unknown type string '{}'", s),
|
||||
};
|
||||
|
||||
Ok(val)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for BundleFileType {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let value = self.ext_name();
|
||||
value.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Murmur64> for BundleFileType {
|
||||
fn from(value: Murmur64) -> Self {
|
||||
Self::from(Into::<u64>::into(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<u64> for BundleFileType {
|
||||
fn from(hash: u64) -> BundleFileType {
|
||||
match hash {
|
||||
0x931e336d7646cc26 => BundleFileType::Animation,
|
||||
0xdcfb9e18fff13984 => BundleFileType::AnimationCurves,
|
||||
0x3eed05ba83af5090 => BundleFileType::Apb,
|
||||
0x7ffdb779b04e4ed1 => BundleFileType::BakedLighting,
|
||||
0xaa5965f03029fa18 => BundleFileType::Bik,
|
||||
0xe301e8af94e3b5a3 => BundleFileType::BlendSet,
|
||||
0x18dead01056b72e9 => BundleFileType::Bones,
|
||||
0xb7893adf7567506a => BundleFileType::Chroma,
|
||||
0xfe9754bd19814a47 => BundleFileType::CommonPackage,
|
||||
0x82645835e6b73232 => BundleFileType::Config,
|
||||
0x69108ded1e3e634b => BundleFileType::Crypto,
|
||||
0x8fd0d44d20650b68 => BundleFileType::Data,
|
||||
0x9831ca893b0d087d => BundleFileType::Entity,
|
||||
0x92d3ee038eeb610d => BundleFileType::Flow,
|
||||
0x9efe0a916aae7880 => BundleFileType::Font,
|
||||
0x8f7d5a2c0f967655 => BundleFileType::Ies,
|
||||
0xd526a27da14f1dc5 => BundleFileType::Ini,
|
||||
0x2bbcabe5074ade9e => BundleFileType::Input,
|
||||
0xfa4a8e091a91201e => BundleFileType::Ivf,
|
||||
0xa62f9297dc969e85 => BundleFileType::Keys,
|
||||
0x2a690fd348fe9ac5 => BundleFileType::Level,
|
||||
0xa14e8dfa2cd117e2 => BundleFileType::Lua,
|
||||
0xeac0b497876adedf => BundleFileType::Material,
|
||||
0x3fcdd69156a46417 => BundleFileType::Mod,
|
||||
0xb277b11fe4a61d37 => BundleFileType::MouseCursor,
|
||||
0x169de9566953d264 => BundleFileType::NavData,
|
||||
0x3b1fa9e8f6bac374 => BundleFileType::NetworkConfig,
|
||||
0xb0f2c12eb107f4d8 => BundleFileType::OddleNet,
|
||||
0xad9c6d9ed1e5e77a => BundleFileType::Package,
|
||||
0xa8193123526fad64 => BundleFileType::Particles,
|
||||
0xbf21403a3ab0bbb1 => BundleFileType::PhysicsProperties,
|
||||
0x27862fe24795319c => BundleFileType::RenderConfig,
|
||||
0x9ca183c2d0e76dee => BundleFileType::RtPipeline,
|
||||
0x9d0a795bfe818d19 => BundleFileType::Scene,
|
||||
0xcce8d5b5f5ae333f => BundleFileType::Shader,
|
||||
0xe5ee32a477239a93 => BundleFileType::ShaderLibrary,
|
||||
0x9e5c3cc74575aeb5 => BundleFileType::ShaderLibraryGroup,
|
||||
0x250e0a11ac8e26f8 => BundleFileType::ShadingEnvionmentMapping,
|
||||
0xfe73c7dcff8a7ca5 => BundleFileType::ShadingEnvironment,
|
||||
0xa27b4d04a9ba6f9e => BundleFileType::Slug,
|
||||
0xe9fc9ea7042e5ec0 => BundleFileType::SlugAlbum,
|
||||
0xd8b27864a97ffdd7 => BundleFileType::SoundEnvironment,
|
||||
0xf97af9983c05b950 => BundleFileType::SpuJob,
|
||||
0xa486d4045106165c => BundleFileType::StateMachine,
|
||||
0xe3f0baa17d620321 => BundleFileType::StaticPVS,
|
||||
0x0d972bab10b40fd3 => BundleFileType::Strings,
|
||||
0xad2d3fa30d9ab394 => BundleFileType::SurfaceProperties,
|
||||
0xcd4238c6a0c69e32 => BundleFileType::Texture,
|
||||
0x99736be1fff739a4 => BundleFileType::TimpaniBank,
|
||||
0x00a3e6c59a2b9c6c => BundleFileType::TimpaniMaster,
|
||||
0x19c792357c99f49b => BundleFileType::Tome,
|
||||
0x712d6e3dd1024c9c => BundleFileType::Ugg,
|
||||
0xe0a48d0be9a7453f => BundleFileType::Unit,
|
||||
0xa99510c6e86dd3c2 => BundleFileType::Upb,
|
||||
0xf7505933166d6755 => BundleFileType::VectorField,
|
||||
0x786f65c00a816b19 => BundleFileType::Wav,
|
||||
0x535a7bd3e650d799 => BundleFileType::WwiseBank,
|
||||
0xaf32095c82f2b070 => BundleFileType::WwiseDep,
|
||||
0xaabdd317b58dfc8a => BundleFileType::WwiseEvent,
|
||||
0xd50a8b7e1c82b110 => BundleFileType::WwiseMetadata,
|
||||
0x504b55235d21440e => BundleFileType::WwiseStream,
|
||||
0x76015845a6003765 => BundleFileType::Xml,
|
||||
|
||||
_ => BundleFileType::Unknown(Murmur64::from(hash)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BundleFileType> for u64 {
|
||||
fn from(t: BundleFileType) -> u64 {
|
||||
match t {
|
||||
BundleFileType::Animation => 0x931e336d7646cc26,
|
||||
BundleFileType::AnimationCurves => 0xdcfb9e18fff13984,
|
||||
BundleFileType::Apb => 0x3eed05ba83af5090,
|
||||
BundleFileType::BakedLighting => 0x7ffdb779b04e4ed1,
|
||||
BundleFileType::Bik => 0xaa5965f03029fa18,
|
||||
BundleFileType::BlendSet => 0xe301e8af94e3b5a3,
|
||||
BundleFileType::Bones => 0x18dead01056b72e9,
|
||||
BundleFileType::Chroma => 0xb7893adf7567506a,
|
||||
BundleFileType::CommonPackage => 0xfe9754bd19814a47,
|
||||
BundleFileType::Config => 0x82645835e6b73232,
|
||||
BundleFileType::Crypto => 0x69108ded1e3e634b,
|
||||
BundleFileType::Data => 0x8fd0d44d20650b68,
|
||||
BundleFileType::Entity => 0x9831ca893b0d087d,
|
||||
BundleFileType::Flow => 0x92d3ee038eeb610d,
|
||||
BundleFileType::Font => 0x9efe0a916aae7880,
|
||||
BundleFileType::Ies => 0x8f7d5a2c0f967655,
|
||||
BundleFileType::Ini => 0xd526a27da14f1dc5,
|
||||
BundleFileType::Input => 0x2bbcabe5074ade9e,
|
||||
BundleFileType::Ivf => 0xfa4a8e091a91201e,
|
||||
BundleFileType::Keys => 0xa62f9297dc969e85,
|
||||
BundleFileType::Level => 0x2a690fd348fe9ac5,
|
||||
BundleFileType::Lua => 0xa14e8dfa2cd117e2,
|
||||
BundleFileType::Material => 0xeac0b497876adedf,
|
||||
BundleFileType::Mod => 0x3fcdd69156a46417,
|
||||
BundleFileType::MouseCursor => 0xb277b11fe4a61d37,
|
||||
BundleFileType::NavData => 0x169de9566953d264,
|
||||
BundleFileType::NetworkConfig => 0x3b1fa9e8f6bac374,
|
||||
BundleFileType::OddleNet => 0xb0f2c12eb107f4d8,
|
||||
BundleFileType::Package => 0xad9c6d9ed1e5e77a,
|
||||
BundleFileType::Particles => 0xa8193123526fad64,
|
||||
BundleFileType::PhysicsProperties => 0xbf21403a3ab0bbb1,
|
||||
BundleFileType::RenderConfig => 0x27862fe24795319c,
|
||||
BundleFileType::RtPipeline => 0x9ca183c2d0e76dee,
|
||||
BundleFileType::Scene => 0x9d0a795bfe818d19,
|
||||
BundleFileType::Shader => 0xcce8d5b5f5ae333f,
|
||||
BundleFileType::ShaderLibrary => 0xe5ee32a477239a93,
|
||||
BundleFileType::ShaderLibraryGroup => 0x9e5c3cc74575aeb5,
|
||||
BundleFileType::ShadingEnvionmentMapping => 0x250e0a11ac8e26f8,
|
||||
BundleFileType::ShadingEnvironment => 0xfe73c7dcff8a7ca5,
|
||||
BundleFileType::Slug => 0xa27b4d04a9ba6f9e,
|
||||
BundleFileType::SlugAlbum => 0xe9fc9ea7042e5ec0,
|
||||
BundleFileType::SoundEnvironment => 0xd8b27864a97ffdd7,
|
||||
BundleFileType::SpuJob => 0xf97af9983c05b950,
|
||||
BundleFileType::StateMachine => 0xa486d4045106165c,
|
||||
BundleFileType::StaticPVS => 0xe3f0baa17d620321,
|
||||
BundleFileType::Strings => 0x0d972bab10b40fd3,
|
||||
BundleFileType::SurfaceProperties => 0xad2d3fa30d9ab394,
|
||||
BundleFileType::Texture => 0xcd4238c6a0c69e32,
|
||||
BundleFileType::TimpaniBank => 0x99736be1fff739a4,
|
||||
BundleFileType::TimpaniMaster => 0x00a3e6c59a2b9c6c,
|
||||
BundleFileType::Tome => 0x19c792357c99f49b,
|
||||
BundleFileType::Ugg => 0x712d6e3dd1024c9c,
|
||||
BundleFileType::Unit => 0xe0a48d0be9a7453f,
|
||||
BundleFileType::Upb => 0xa99510c6e86dd3c2,
|
||||
BundleFileType::VectorField => 0xf7505933166d6755,
|
||||
BundleFileType::Wav => 0x786f65c00a816b19,
|
||||
BundleFileType::WwiseBank => 0x535a7bd3e650d799,
|
||||
BundleFileType::WwiseDep => 0xaf32095c82f2b070,
|
||||
BundleFileType::WwiseEvent => 0xaabdd317b58dfc8a,
|
||||
BundleFileType::WwiseMetadata => 0xd50a8b7e1c82b110,
|
||||
BundleFileType::WwiseStream => 0x504b55235d21440e,
|
||||
BundleFileType::Xml => 0x76015845a6003765,
|
||||
|
||||
BundleFileType::Unknown(hash) => hash.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl From<BundleFileType> for Murmur64 {
|
||||
fn from(t: BundleFileType) -> Murmur64 {
|
||||
let hash: u64 = t.into();
|
||||
Murmur64::from(hash)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for BundleFileType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.ext_name())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct BundleFileHeader {
|
||||
|
@ -106,7 +501,7 @@ impl BundleFileVariant {
|
|||
}
|
||||
|
||||
bitflags! {
|
||||
#[derive(Default, Clone, Copy, Debug)]
|
||||
#[derive(Default)]
|
||||
pub struct Properties: u32 {
|
||||
const DATA = 0b100;
|
||||
}
|
||||
|
@ -120,7 +515,7 @@ pub struct BundleFile {
|
|||
}
|
||||
|
||||
impl BundleFile {
|
||||
pub fn new(name: impl Into<IdString64>, file_type: BundleFileType) -> Self {
|
||||
pub fn new(name: String, file_type: BundleFileType) -> Self {
|
||||
Self {
|
||||
file_type,
|
||||
name: name.into(),
|
||||
|
@ -252,15 +647,20 @@ impl BundleFile {
|
|||
Ok(w.into_inner())
|
||||
}
|
||||
|
||||
#[tracing::instrument("File::from_sjson", skip(sjson, name), fields(name = %name.display()))]
|
||||
pub async fn from_sjson(
|
||||
name: IdString64,
|
||||
#[tracing::instrument(name = "File::from_sjson", skip(sjson))]
|
||||
pub async fn from_sjson<P, S>(
|
||||
name: String,
|
||||
file_type: BundleFileType,
|
||||
sjson: impl AsRef<str>,
|
||||
root: impl AsRef<Path> + std::fmt::Debug,
|
||||
) -> Result<Self> {
|
||||
sjson: S,
|
||||
root: P,
|
||||
) -> Result<Self>
|
||||
where
|
||||
P: AsRef<Path> + std::fmt::Debug,
|
||||
S: AsRef<str>,
|
||||
{
|
||||
match file_type {
|
||||
BundleFileType::Lua => lua::compile(name, sjson).wrap_err("Failed to compile Lua file"),
|
||||
BundleFileType::Lua => lua::compile(name.clone(), sjson)
|
||||
.wrap_err_with(|| format!("Failed to compile Lua file '{}'", name)),
|
||||
BundleFileType::Unknown(_) => {
|
||||
eyre::bail!("Unknown file type. Cannot compile from SJSON");
|
||||
}
|
||||
|
@ -299,7 +699,10 @@ impl BundleFile {
|
|||
s
|
||||
}
|
||||
|
||||
pub fn matches_name(&self, name: impl Into<IdString64>) -> bool {
|
||||
pub fn matches_name<S>(&self, name: S) -> bool
|
||||
where
|
||||
S: Into<IdString64>,
|
||||
{
|
||||
let name = name.into();
|
||||
if self.name == name {
|
||||
return true;
|
||||
|
|
|
@ -1,400 +0,0 @@
|
|||
use color_eyre::{eyre, Result};
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::murmur::Murmur64;
|
||||
|
||||
#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)]
|
||||
pub enum BundleFileType {
|
||||
Animation,
|
||||
AnimationCurves,
|
||||
Apb,
|
||||
BakedLighting,
|
||||
Bik,
|
||||
BlendSet,
|
||||
Bones,
|
||||
Chroma,
|
||||
CommonPackage,
|
||||
Config,
|
||||
Crypto,
|
||||
Data,
|
||||
Entity,
|
||||
Flow,
|
||||
Font,
|
||||
Ies,
|
||||
Ini,
|
||||
Input,
|
||||
Ivf,
|
||||
Keys,
|
||||
Level,
|
||||
Lua,
|
||||
Material,
|
||||
Mod,
|
||||
MouseCursor,
|
||||
NavData,
|
||||
NetworkConfig,
|
||||
OddleNet,
|
||||
Package,
|
||||
Particles,
|
||||
PhysicsProperties,
|
||||
RenderConfig,
|
||||
RtPipeline,
|
||||
Scene,
|
||||
Shader,
|
||||
ShaderLibrary,
|
||||
ShaderLibraryGroup,
|
||||
ShadingEnvionmentMapping,
|
||||
ShadingEnvironment,
|
||||
Slug,
|
||||
SlugAlbum,
|
||||
SoundEnvironment,
|
||||
SpuJob,
|
||||
StateMachine,
|
||||
StaticPVS,
|
||||
Strings,
|
||||
SurfaceProperties,
|
||||
Texture,
|
||||
TimpaniBank,
|
||||
TimpaniMaster,
|
||||
Tome,
|
||||
Ugg,
|
||||
Unit,
|
||||
Upb,
|
||||
VectorField,
|
||||
Wav,
|
||||
WwiseBank,
|
||||
WwiseDep,
|
||||
WwiseEvent,
|
||||
WwiseMetadata,
|
||||
WwiseStream,
|
||||
Xml,
|
||||
|
||||
Unknown(Murmur64),
|
||||
}
|
||||
|
||||
impl BundleFileType {
|
||||
pub fn ext_name(&self) -> String {
|
||||
match self {
|
||||
BundleFileType::AnimationCurves => String::from("animation_curves"),
|
||||
BundleFileType::Animation => String::from("animation"),
|
||||
BundleFileType::Apb => String::from("apb"),
|
||||
BundleFileType::BakedLighting => String::from("baked_lighting"),
|
||||
BundleFileType::Bik => String::from("bik"),
|
||||
BundleFileType::BlendSet => String::from("blend_set"),
|
||||
BundleFileType::Bones => String::from("bones"),
|
||||
BundleFileType::Chroma => String::from("chroma"),
|
||||
BundleFileType::CommonPackage => String::from("common_package"),
|
||||
BundleFileType::Config => String::from("config"),
|
||||
BundleFileType::Crypto => String::from("crypto"),
|
||||
BundleFileType::Data => String::from("data"),
|
||||
BundleFileType::Entity => String::from("entity"),
|
||||
BundleFileType::Flow => String::from("flow"),
|
||||
BundleFileType::Font => String::from("font"),
|
||||
BundleFileType::Ies => String::from("ies"),
|
||||
BundleFileType::Ini => String::from("ini"),
|
||||
BundleFileType::Input => String::from("input"),
|
||||
BundleFileType::Ivf => String::from("ivf"),
|
||||
BundleFileType::Keys => String::from("keys"),
|
||||
BundleFileType::Level => String::from("level"),
|
||||
BundleFileType::Lua => String::from("lua"),
|
||||
BundleFileType::Material => String::from("material"),
|
||||
BundleFileType::Mod => String::from("mod"),
|
||||
BundleFileType::MouseCursor => String::from("mouse_cursor"),
|
||||
BundleFileType::NavData => String::from("nav_data"),
|
||||
BundleFileType::NetworkConfig => String::from("network_config"),
|
||||
BundleFileType::OddleNet => String::from("oodle_net"),
|
||||
BundleFileType::Package => String::from("package"),
|
||||
BundleFileType::Particles => String::from("particles"),
|
||||
BundleFileType::PhysicsProperties => String::from("physics_properties"),
|
||||
BundleFileType::RenderConfig => String::from("render_config"),
|
||||
BundleFileType::RtPipeline => String::from("rt_pipeline"),
|
||||
BundleFileType::Scene => String::from("scene"),
|
||||
BundleFileType::ShaderLibraryGroup => String::from("shader_library_group"),
|
||||
BundleFileType::ShaderLibrary => String::from("shader_library"),
|
||||
BundleFileType::Shader => String::from("shader"),
|
||||
BundleFileType::ShadingEnvionmentMapping => String::from("shading_environment_mapping"),
|
||||
BundleFileType::ShadingEnvironment => String::from("shading_environment"),
|
||||
BundleFileType::SlugAlbum => String::from("slug_album"),
|
||||
BundleFileType::Slug => String::from("slug"),
|
||||
BundleFileType::SoundEnvironment => String::from("sound_environment"),
|
||||
BundleFileType::SpuJob => String::from("spu_job"),
|
||||
BundleFileType::StateMachine => String::from("state_machine"),
|
||||
BundleFileType::StaticPVS => String::from("static_pvs"),
|
||||
BundleFileType::Strings => String::from("strings"),
|
||||
BundleFileType::SurfaceProperties => String::from("surface_properties"),
|
||||
BundleFileType::Texture => String::from("texture"),
|
||||
BundleFileType::TimpaniBank => String::from("timpani_bank"),
|
||||
BundleFileType::TimpaniMaster => String::from("timpani_master"),
|
||||
BundleFileType::Tome => String::from("tome"),
|
||||
BundleFileType::Ugg => String::from("ugg"),
|
||||
BundleFileType::Unit => String::from("unit"),
|
||||
BundleFileType::Upb => String::from("upb"),
|
||||
BundleFileType::VectorField => String::from("vector_field"),
|
||||
BundleFileType::Wav => String::from("wav"),
|
||||
BundleFileType::WwiseBank => String::from("wwise_bank"),
|
||||
BundleFileType::WwiseDep => String::from("wwise_dep"),
|
||||
BundleFileType::WwiseEvent => String::from("wwise_event"),
|
||||
BundleFileType::WwiseMetadata => String::from("wwise_metadata"),
|
||||
BundleFileType::WwiseStream => String::from("wwise_stream"),
|
||||
BundleFileType::Xml => String::from("xml"),
|
||||
|
||||
BundleFileType::Unknown(s) => format!("{s:016X}"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn decompiled_ext_name(&self) -> String {
|
||||
match self {
|
||||
BundleFileType::Texture => String::from("dds"),
|
||||
BundleFileType::WwiseBank => String::from("bnk"),
|
||||
BundleFileType::WwiseStream => String::from("ogg"),
|
||||
_ => self.ext_name(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hash(&self) -> Murmur64 {
|
||||
Murmur64::from(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for BundleFileType {
|
||||
type Err = color_eyre::Report;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let val = match s {
|
||||
"animation_curves" => BundleFileType::AnimationCurves,
|
||||
"animation" => BundleFileType::Animation,
|
||||
"apb" => BundleFileType::Apb,
|
||||
"baked_lighting" => BundleFileType::BakedLighting,
|
||||
"bik" => BundleFileType::Bik,
|
||||
"blend_set" => BundleFileType::BlendSet,
|
||||
"bones" => BundleFileType::Bones,
|
||||
"chroma" => BundleFileType::Chroma,
|
||||
"common_package" => BundleFileType::CommonPackage,
|
||||
"config" => BundleFileType::Config,
|
||||
"crypto" => BundleFileType::Crypto,
|
||||
"data" => BundleFileType::Data,
|
||||
"entity" => BundleFileType::Entity,
|
||||
"flow" => BundleFileType::Flow,
|
||||
"font" => BundleFileType::Font,
|
||||
"ies" => BundleFileType::Ies,
|
||||
"ini" => BundleFileType::Ini,
|
||||
"input" => BundleFileType::Input,
|
||||
"ivf" => BundleFileType::Ivf,
|
||||
"keys" => BundleFileType::Keys,
|
||||
"level" => BundleFileType::Level,
|
||||
"lua" => BundleFileType::Lua,
|
||||
"material" => BundleFileType::Material,
|
||||
"mod" => BundleFileType::Mod,
|
||||
"mouse_cursor" => BundleFileType::MouseCursor,
|
||||
"nav_data" => BundleFileType::NavData,
|
||||
"network_config" => BundleFileType::NetworkConfig,
|
||||
"oodle_net" => BundleFileType::OddleNet,
|
||||
"package" => BundleFileType::Package,
|
||||
"particles" => BundleFileType::Particles,
|
||||
"physics_properties" => BundleFileType::PhysicsProperties,
|
||||
"render_config" => BundleFileType::RenderConfig,
|
||||
"rt_pipeline" => BundleFileType::RtPipeline,
|
||||
"scene" => BundleFileType::Scene,
|
||||
"shader_library_group" => BundleFileType::ShaderLibraryGroup,
|
||||
"shader_library" => BundleFileType::ShaderLibrary,
|
||||
"shader" => BundleFileType::Shader,
|
||||
"shading_environment_mapping" => BundleFileType::ShadingEnvionmentMapping,
|
||||
"shading_environment" => BundleFileType::ShadingEnvironment,
|
||||
"slug_album" => BundleFileType::SlugAlbum,
|
||||
"slug" => BundleFileType::Slug,
|
||||
"sound_environment" => BundleFileType::SoundEnvironment,
|
||||
"spu_job" => BundleFileType::SpuJob,
|
||||
"state_machine" => BundleFileType::StateMachine,
|
||||
"static_pvs" => BundleFileType::StaticPVS,
|
||||
"strings" => BundleFileType::Strings,
|
||||
"surface_properties" => BundleFileType::SurfaceProperties,
|
||||
"texture" => BundleFileType::Texture,
|
||||
"timpani_bank" => BundleFileType::TimpaniBank,
|
||||
"timpani_master" => BundleFileType::TimpaniMaster,
|
||||
"tome" => BundleFileType::Tome,
|
||||
"ugg" => BundleFileType::Ugg,
|
||||
"unit" => BundleFileType::Unit,
|
||||
"upb" => BundleFileType::Upb,
|
||||
"vector_field" => BundleFileType::VectorField,
|
||||
"wav" => BundleFileType::Wav,
|
||||
"wwise_bank" => BundleFileType::WwiseBank,
|
||||
"wwise_dep" => BundleFileType::WwiseDep,
|
||||
"wwise_event" => BundleFileType::WwiseEvent,
|
||||
"wwise_metadata" => BundleFileType::WwiseMetadata,
|
||||
"wwise_stream" => BundleFileType::WwiseStream,
|
||||
"xml" => BundleFileType::Xml,
|
||||
s => eyre::bail!("Unknown type string '{}'", s),
|
||||
};
|
||||
|
||||
Ok(val)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for BundleFileType {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let value = self.ext_name();
|
||||
value.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Murmur64> for BundleFileType {
|
||||
fn from(value: Murmur64) -> Self {
|
||||
Self::from(Into::<u64>::into(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<u64> for BundleFileType {
|
||||
fn from(hash: u64) -> BundleFileType {
|
||||
match hash {
|
||||
0x931e336d7646cc26 => BundleFileType::Animation,
|
||||
0xdcfb9e18fff13984 => BundleFileType::AnimationCurves,
|
||||
0x3eed05ba83af5090 => BundleFileType::Apb,
|
||||
0x7ffdb779b04e4ed1 => BundleFileType::BakedLighting,
|
||||
0xaa5965f03029fa18 => BundleFileType::Bik,
|
||||
0xe301e8af94e3b5a3 => BundleFileType::BlendSet,
|
||||
0x18dead01056b72e9 => BundleFileType::Bones,
|
||||
0xb7893adf7567506a => BundleFileType::Chroma,
|
||||
0xfe9754bd19814a47 => BundleFileType::CommonPackage,
|
||||
0x82645835e6b73232 => BundleFileType::Config,
|
||||
0x69108ded1e3e634b => BundleFileType::Crypto,
|
||||
0x8fd0d44d20650b68 => BundleFileType::Data,
|
||||
0x9831ca893b0d087d => BundleFileType::Entity,
|
||||
0x92d3ee038eeb610d => BundleFileType::Flow,
|
||||
0x9efe0a916aae7880 => BundleFileType::Font,
|
||||
0x8f7d5a2c0f967655 => BundleFileType::Ies,
|
||||
0xd526a27da14f1dc5 => BundleFileType::Ini,
|
||||
0x2bbcabe5074ade9e => BundleFileType::Input,
|
||||
0xfa4a8e091a91201e => BundleFileType::Ivf,
|
||||
0xa62f9297dc969e85 => BundleFileType::Keys,
|
||||
0x2a690fd348fe9ac5 => BundleFileType::Level,
|
||||
0xa14e8dfa2cd117e2 => BundleFileType::Lua,
|
||||
0xeac0b497876adedf => BundleFileType::Material,
|
||||
0x3fcdd69156a46417 => BundleFileType::Mod,
|
||||
0xb277b11fe4a61d37 => BundleFileType::MouseCursor,
|
||||
0x169de9566953d264 => BundleFileType::NavData,
|
||||
0x3b1fa9e8f6bac374 => BundleFileType::NetworkConfig,
|
||||
0xb0f2c12eb107f4d8 => BundleFileType::OddleNet,
|
||||
0xad9c6d9ed1e5e77a => BundleFileType::Package,
|
||||
0xa8193123526fad64 => BundleFileType::Particles,
|
||||
0xbf21403a3ab0bbb1 => BundleFileType::PhysicsProperties,
|
||||
0x27862fe24795319c => BundleFileType::RenderConfig,
|
||||
0x9ca183c2d0e76dee => BundleFileType::RtPipeline,
|
||||
0x9d0a795bfe818d19 => BundleFileType::Scene,
|
||||
0xcce8d5b5f5ae333f => BundleFileType::Shader,
|
||||
0xe5ee32a477239a93 => BundleFileType::ShaderLibrary,
|
||||
0x9e5c3cc74575aeb5 => BundleFileType::ShaderLibraryGroup,
|
||||
0x250e0a11ac8e26f8 => BundleFileType::ShadingEnvionmentMapping,
|
||||
0xfe73c7dcff8a7ca5 => BundleFileType::ShadingEnvironment,
|
||||
0xa27b4d04a9ba6f9e => BundleFileType::Slug,
|
||||
0xe9fc9ea7042e5ec0 => BundleFileType::SlugAlbum,
|
||||
0xd8b27864a97ffdd7 => BundleFileType::SoundEnvironment,
|
||||
0xf97af9983c05b950 => BundleFileType::SpuJob,
|
||||
0xa486d4045106165c => BundleFileType::StateMachine,
|
||||
0xe3f0baa17d620321 => BundleFileType::StaticPVS,
|
||||
0x0d972bab10b40fd3 => BundleFileType::Strings,
|
||||
0xad2d3fa30d9ab394 => BundleFileType::SurfaceProperties,
|
||||
0xcd4238c6a0c69e32 => BundleFileType::Texture,
|
||||
0x99736be1fff739a4 => BundleFileType::TimpaniBank,
|
||||
0x00a3e6c59a2b9c6c => BundleFileType::TimpaniMaster,
|
||||
0x19c792357c99f49b => BundleFileType::Tome,
|
||||
0x712d6e3dd1024c9c => BundleFileType::Ugg,
|
||||
0xe0a48d0be9a7453f => BundleFileType::Unit,
|
||||
0xa99510c6e86dd3c2 => BundleFileType::Upb,
|
||||
0xf7505933166d6755 => BundleFileType::VectorField,
|
||||
0x786f65c00a816b19 => BundleFileType::Wav,
|
||||
0x535a7bd3e650d799 => BundleFileType::WwiseBank,
|
||||
0xaf32095c82f2b070 => BundleFileType::WwiseDep,
|
||||
0xaabdd317b58dfc8a => BundleFileType::WwiseEvent,
|
||||
0xd50a8b7e1c82b110 => BundleFileType::WwiseMetadata,
|
||||
0x504b55235d21440e => BundleFileType::WwiseStream,
|
||||
0x76015845a6003765 => BundleFileType::Xml,
|
||||
|
||||
_ => BundleFileType::Unknown(Murmur64::from(hash)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BundleFileType> for u64 {
|
||||
fn from(t: BundleFileType) -> u64 {
|
||||
match t {
|
||||
BundleFileType::Animation => 0x931e336d7646cc26,
|
||||
BundleFileType::AnimationCurves => 0xdcfb9e18fff13984,
|
||||
BundleFileType::Apb => 0x3eed05ba83af5090,
|
||||
BundleFileType::BakedLighting => 0x7ffdb779b04e4ed1,
|
||||
BundleFileType::Bik => 0xaa5965f03029fa18,
|
||||
BundleFileType::BlendSet => 0xe301e8af94e3b5a3,
|
||||
BundleFileType::Bones => 0x18dead01056b72e9,
|
||||
BundleFileType::Chroma => 0xb7893adf7567506a,
|
||||
BundleFileType::CommonPackage => 0xfe9754bd19814a47,
|
||||
BundleFileType::Config => 0x82645835e6b73232,
|
||||
BundleFileType::Crypto => 0x69108ded1e3e634b,
|
||||
BundleFileType::Data => 0x8fd0d44d20650b68,
|
||||
BundleFileType::Entity => 0x9831ca893b0d087d,
|
||||
BundleFileType::Flow => 0x92d3ee038eeb610d,
|
||||
BundleFileType::Font => 0x9efe0a916aae7880,
|
||||
BundleFileType::Ies => 0x8f7d5a2c0f967655,
|
||||
BundleFileType::Ini => 0xd526a27da14f1dc5,
|
||||
BundleFileType::Input => 0x2bbcabe5074ade9e,
|
||||
BundleFileType::Ivf => 0xfa4a8e091a91201e,
|
||||
BundleFileType::Keys => 0xa62f9297dc969e85,
|
||||
BundleFileType::Level => 0x2a690fd348fe9ac5,
|
||||
BundleFileType::Lua => 0xa14e8dfa2cd117e2,
|
||||
BundleFileType::Material => 0xeac0b497876adedf,
|
||||
BundleFileType::Mod => 0x3fcdd69156a46417,
|
||||
BundleFileType::MouseCursor => 0xb277b11fe4a61d37,
|
||||
BundleFileType::NavData => 0x169de9566953d264,
|
||||
BundleFileType::NetworkConfig => 0x3b1fa9e8f6bac374,
|
||||
BundleFileType::OddleNet => 0xb0f2c12eb107f4d8,
|
||||
BundleFileType::Package => 0xad9c6d9ed1e5e77a,
|
||||
BundleFileType::Particles => 0xa8193123526fad64,
|
||||
BundleFileType::PhysicsProperties => 0xbf21403a3ab0bbb1,
|
||||
BundleFileType::RenderConfig => 0x27862fe24795319c,
|
||||
BundleFileType::RtPipeline => 0x9ca183c2d0e76dee,
|
||||
BundleFileType::Scene => 0x9d0a795bfe818d19,
|
||||
BundleFileType::Shader => 0xcce8d5b5f5ae333f,
|
||||
BundleFileType::ShaderLibrary => 0xe5ee32a477239a93,
|
||||
BundleFileType::ShaderLibraryGroup => 0x9e5c3cc74575aeb5,
|
||||
BundleFileType::ShadingEnvionmentMapping => 0x250e0a11ac8e26f8,
|
||||
BundleFileType::ShadingEnvironment => 0xfe73c7dcff8a7ca5,
|
||||
BundleFileType::Slug => 0xa27b4d04a9ba6f9e,
|
||||
BundleFileType::SlugAlbum => 0xe9fc9ea7042e5ec0,
|
||||
BundleFileType::SoundEnvironment => 0xd8b27864a97ffdd7,
|
||||
BundleFileType::SpuJob => 0xf97af9983c05b950,
|
||||
BundleFileType::StateMachine => 0xa486d4045106165c,
|
||||
BundleFileType::StaticPVS => 0xe3f0baa17d620321,
|
||||
BundleFileType::Strings => 0x0d972bab10b40fd3,
|
||||
BundleFileType::SurfaceProperties => 0xad2d3fa30d9ab394,
|
||||
BundleFileType::Texture => 0xcd4238c6a0c69e32,
|
||||
BundleFileType::TimpaniBank => 0x99736be1fff739a4,
|
||||
BundleFileType::TimpaniMaster => 0x00a3e6c59a2b9c6c,
|
||||
BundleFileType::Tome => 0x19c792357c99f49b,
|
||||
BundleFileType::Ugg => 0x712d6e3dd1024c9c,
|
||||
BundleFileType::Unit => 0xe0a48d0be9a7453f,
|
||||
BundleFileType::Upb => 0xa99510c6e86dd3c2,
|
||||
BundleFileType::VectorField => 0xf7505933166d6755,
|
||||
BundleFileType::Wav => 0x786f65c00a816b19,
|
||||
BundleFileType::WwiseBank => 0x535a7bd3e650d799,
|
||||
BundleFileType::WwiseDep => 0xaf32095c82f2b070,
|
||||
BundleFileType::WwiseEvent => 0xaabdd317b58dfc8a,
|
||||
BundleFileType::WwiseMetadata => 0xd50a8b7e1c82b110,
|
||||
BundleFileType::WwiseStream => 0x504b55235d21440e,
|
||||
BundleFileType::Xml => 0x76015845a6003765,
|
||||
|
||||
BundleFileType::Unknown(hash) => hash.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl From<BundleFileType> for Murmur64 {
|
||||
fn from(t: BundleFileType) -> Murmur64 {
|
||||
let hash: u64 = t.into();
|
||||
Murmur64::from(hash)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for BundleFileType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.ext_name())
|
||||
}
|
||||
}
|
|
@ -12,10 +12,8 @@ use crate::murmur::{HashGroup, IdString64, Murmur64};
|
|||
|
||||
pub(crate) mod database;
|
||||
pub(crate) mod file;
|
||||
pub(crate) mod filetype;
|
||||
|
||||
pub use file::{BundleFile, BundleFileVariant};
|
||||
pub use filetype::BundleFileType;
|
||||
pub use file::{BundleFile, BundleFileType, BundleFileVariant};
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd)]
|
||||
enum BundleFormat {
|
||||
|
@ -229,10 +227,13 @@ impl Bundle {
|
|||
let _enter = span.enter();
|
||||
tracing::trace!(num_files = self.files.len());
|
||||
|
||||
self.files.iter().try_fold(Vec::new(), |mut data, file| {
|
||||
data.append(&mut file.to_binary()?);
|
||||
Ok::<_, Report>(data)
|
||||
})?
|
||||
self.files
|
||||
.iter()
|
||||
.fold(Ok::<Vec<u8>, Report>(Vec::new()), |data, file| {
|
||||
let mut data = data?;
|
||||
data.append(&mut file.to_binary()?);
|
||||
Ok(data)
|
||||
})?
|
||||
};
|
||||
|
||||
// Ceiling division (or division toward infinity) to calculate
|
||||
|
|
|
@ -15,7 +15,6 @@ use tokio::fs;
|
|||
use crate::binary::sync::ReadExt;
|
||||
use crate::binary::sync::WriteExt;
|
||||
use crate::bundle::file::{BundleFileVariant, UserFile};
|
||||
use crate::murmur::IdString64;
|
||||
use crate::{BundleFile, BundleFileType};
|
||||
|
||||
const BITSQUID_LUAJIT_HEADER: u32 = 0x8253461B;
|
||||
|
@ -54,8 +53,8 @@ where
|
|||
|
||||
let mut buf = vec![0u8; length];
|
||||
r.read_exact(&mut buf)?;
|
||||
let mut s =
|
||||
String::from_utf8(buf).wrap_err("Invalid byte sequence for LuaJIT bytecode name")?;
|
||||
let mut s = String::from_utf8(buf)
|
||||
.wrap_err_with(|| format!("Invalid byte sequence for LuaJIT bytecode name"))?;
|
||||
// Remove the leading `@`
|
||||
s.remove(0);
|
||||
s
|
||||
|
@ -93,13 +92,11 @@ where
|
|||
|
||||
let output = cmd.output().wrap_err("Failed to run ljd")?;
|
||||
|
||||
if !output.status.success() {
|
||||
let err = eyre::eyre!(
|
||||
"LJD exited with code {:?}:\n{}",
|
||||
output.status.code(),
|
||||
if !output.stderr.is_empty() {
|
||||
eyre::bail!(
|
||||
"Decompilation failed: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
tracing::error!("Failed to decompile '{}':\n{:?}", name, err);
|
||||
}
|
||||
|
||||
let content = output.stdout;
|
||||
|
@ -118,13 +115,17 @@ where
|
|||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub fn compile(name: impl Into<IdString64>, code: impl AsRef<str>) -> Result<BundleFile> {
|
||||
pub fn compile<S, C>(name: S, code: C) -> Result<BundleFile>
|
||||
where
|
||||
S: Into<String>,
|
||||
C: AsRef<str>,
|
||||
{
|
||||
let name = name.into();
|
||||
let code = code.as_ref();
|
||||
|
||||
tracing::trace!(
|
||||
"Compiling '{}', {} bytes of code",
|
||||
name.display(),
|
||||
name,
|
||||
code.as_bytes().len()
|
||||
);
|
||||
|
||||
|
@ -132,8 +133,8 @@ pub fn compile(name: impl Into<IdString64>, code: impl AsRef<str>) -> Result<Bun
|
|||
let state = lua::luaL_newstate();
|
||||
lua::luaL_openlibs(state);
|
||||
|
||||
let name = CString::new(format!("@{}", name.display()).into_bytes())
|
||||
.wrap_err_with(|| format!("Cannot convert name into CString: {}", name.display()))?;
|
||||
let name = CString::new(format!("@{name}").into_bytes())
|
||||
.wrap_err_with(|| format!("Cannot convert name into CString: {}", name))?;
|
||||
match lua::luaL_loadbuffer(
|
||||
state,
|
||||
code.as_ptr() as _,
|
||||
|
|
|
@ -7,22 +7,13 @@ use std::str::FromStr;
|
|||
use async_recursion::async_recursion;
|
||||
use color_eyre::eyre::{self, Context};
|
||||
use color_eyre::Result;
|
||||
use path_slash::PathBufExt;
|
||||
use tokio::fs;
|
||||
|
||||
use crate::binary::sync::{ReadExt, WriteExt};
|
||||
use crate::bundle::file::UserFile;
|
||||
use crate::bundle::filetype::BundleFileType;
|
||||
use crate::murmur::{HashGroup, IdString64, Murmur64};
|
||||
use crate::bundle::file::{BundleFileType, UserFile};
|
||||
use crate::murmur::{HashGroup, Murmur64};
|
||||
|
||||
/// Resolves a relative path that might contain wildcards into a list of
|
||||
/// paths that exist on disk and match that wildcard.
|
||||
/// This is similar to globbing in Unix shells, but with much less features.
|
||||
///
|
||||
/// The only wilcard character allowed is `*`, and only at the end of the string,
|
||||
/// where it matches all files recursively in that directory.
|
||||
///
|
||||
/// `t` is an optional extension name, that may be used to force a wildcard
|
||||
/// path to only match that file type `t`.
|
||||
#[tracing::instrument]
|
||||
#[async_recursion]
|
||||
async fn resolve_wildcard<P1, P2>(
|
||||
|
@ -99,12 +90,12 @@ where
|
|||
Ok(paths)
|
||||
}
|
||||
|
||||
type PackageType = HashMap<BundleFileType, HashSet<String>>;
|
||||
type PackageType = HashMap<BundleFileType, HashSet<PathBuf>>;
|
||||
type PackageDefinition = HashMap<String, HashSet<String>>;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Package {
|
||||
_name: IdString64,
|
||||
_name: String,
|
||||
_root: PathBuf,
|
||||
inner: PackageType,
|
||||
flags: u8,
|
||||
|
@ -125,9 +116,9 @@ impl DerefMut for Package {
|
|||
}
|
||||
|
||||
impl Package {
|
||||
pub fn new(name: impl Into<IdString64>, root: PathBuf) -> Self {
|
||||
pub fn new(name: String, root: PathBuf) -> Self {
|
||||
Self {
|
||||
_name: name.into(),
|
||||
_name: name,
|
||||
_root: root,
|
||||
inner: Default::default(),
|
||||
flags: 1,
|
||||
|
@ -138,22 +129,17 @@ impl Package {
|
|||
self.values().fold(0, |total, files| total + files.len())
|
||||
}
|
||||
|
||||
pub fn add_file(&mut self, file_type: BundleFileType, name: impl Into<String>) {
|
||||
pub fn add_file<P: Into<PathBuf>>(&mut self, file_type: BundleFileType, name: P) {
|
||||
self.inner.entry(file_type).or_default().insert(name.into());
|
||||
}
|
||||
|
||||
#[tracing::instrument("Package::from_sjson", skip(sjson), fields(sjson_len = sjson.as_ref().len()))]
|
||||
pub async fn from_sjson<P, S>(
|
||||
sjson: S,
|
||||
name: impl Into<IdString64> + std::fmt::Debug,
|
||||
root: P,
|
||||
) -> Result<Self>
|
||||
pub async fn from_sjson<P, S>(sjson: S, name: String, root: P) -> Result<Self>
|
||||
where
|
||||
P: AsRef<Path> + std::fmt::Debug,
|
||||
S: AsRef<str>,
|
||||
{
|
||||
let root = root.as_ref();
|
||||
let name = name.into();
|
||||
let definition: PackageDefinition = serde_sjson::from_str(sjson.as_ref())?;
|
||||
let mut inner: PackageType = Default::default();
|
||||
|
||||
|
@ -187,11 +173,7 @@ impl Package {
|
|||
continue;
|
||||
};
|
||||
|
||||
tracing::debug!("Adding file {}", path.display());
|
||||
inner
|
||||
.entry(t)
|
||||
.or_default()
|
||||
.insert(path.display().to_string());
|
||||
inner.entry(t).or_default().insert(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -210,9 +192,11 @@ impl Package {
|
|||
pub fn to_sjson(&self) -> Result<String> {
|
||||
let mut map: PackageDefinition = Default::default();
|
||||
|
||||
for (t, names) in self.iter() {
|
||||
for name in names.iter() {
|
||||
map.entry(t.ext_name()).or_default().insert(name.clone());
|
||||
for (t, paths) in self.iter() {
|
||||
for path in paths.iter() {
|
||||
map.entry(t.ext_name())
|
||||
.or_default()
|
||||
.insert(path.display().to_string());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -238,11 +222,11 @@ impl Package {
|
|||
for _ in 0..file_count {
|
||||
let t = BundleFileType::from(r.read_u64()?);
|
||||
let hash = Murmur64::from(r.read_u64()?);
|
||||
let name = ctx.lookup_hash(hash, HashGroup::Filename);
|
||||
let path = ctx.lookup_hash(hash, HashGroup::Filename);
|
||||
inner
|
||||
.entry(t)
|
||||
.or_default()
|
||||
.insert(name.display().to_string());
|
||||
.insert(PathBuf::from(path.display().to_string()));
|
||||
}
|
||||
|
||||
let flags = r.read_u8()?;
|
||||
|
@ -255,7 +239,7 @@ impl Package {
|
|||
|
||||
let pkg = Self {
|
||||
inner,
|
||||
_name: name.into(),
|
||||
_name: name,
|
||||
_root: PathBuf::new(),
|
||||
flags,
|
||||
};
|
||||
|
@ -271,10 +255,12 @@ impl Package {
|
|||
w.write_u32(0x2b)?;
|
||||
w.write_u32(self.values().flatten().count() as u32)?;
|
||||
|
||||
for (t, names) in self.iter() {
|
||||
for name in names.iter() {
|
||||
for (t, paths) in self.iter() {
|
||||
for path in paths.iter() {
|
||||
w.write_u64(t.hash().into())?;
|
||||
w.write_u64(Murmur64::hash(name.as_bytes()).into())?;
|
||||
|
||||
let hash = Murmur64::hash(path.to_slash_lossy().as_bytes());
|
||||
w.write_u64(hash.into())?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -294,11 +280,17 @@ where
|
|||
Ok(vec![UserFile::new(s.into_bytes())])
|
||||
}
|
||||
|
||||
// #[tracing::instrument(skip_all)]
|
||||
// pub fn compile(_ctx: &crate::Context, data: String) -> Result<Vec<u8>> {
|
||||
// let pkg = Package::from_sjson(data)?;
|
||||
// pkg.to_binary()
|
||||
// }
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::bundle::filetype::BundleFileType;
|
||||
use crate::BundleFileType;
|
||||
|
||||
use super::resolve_wildcard;
|
||||
use super::Package;
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
#![feature(test)]
|
||||
|
||||
mod binary;
|
||||
mod bundle;
|
||||
mod context;
|
||||
|
|
|
@ -147,14 +147,14 @@ impl Dictionary {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn add(&mut self, value: impl AsRef<[u8]>, group: HashGroup) {
|
||||
let long = Murmur64::from(murmurhash64::hash(value.as_ref(), SEED as u64));
|
||||
let short = Murmur32::from(murmurhash64::hash32(value.as_ref(), SEED));
|
||||
pub fn add(&mut self, value: String, group: HashGroup) {
|
||||
let long = Murmur64::from(murmurhash64::hash(value.as_bytes(), SEED as u64));
|
||||
let short = Murmur32::from(murmurhash64::hash32(value.as_bytes(), SEED));
|
||||
|
||||
let entry = Entry {
|
||||
long,
|
||||
short,
|
||||
value: String::from_utf8_lossy(value.as_ref()).to_string(),
|
||||
value,
|
||||
group,
|
||||
};
|
||||
|
||||
|
|
|
@ -1,162 +0,0 @@
|
|||
use std::fmt;
|
||||
|
||||
use serde::{Deserializer, Serializer};
|
||||
|
||||
use super::Murmur32;
|
||||
|
||||
// This type encodes the fact that when reading in a bundle, we don't always have a dictionary
|
||||
// entry for every hash in there. So we do want to have the real string available when needed,
|
||||
// but at the same time retain the original hash information for when we don't.
|
||||
// This is especially important when wanting to write back the read bundle, as the hashes need to
|
||||
// stay the same.
|
||||
// The previous system of always turning hashes into strings worked well for the purpose of
|
||||
// displaying hashes, but would have made it very hard to turn a stringyfied hash back into
|
||||
// an actual hash.
|
||||
#[derive(Clone, Debug, Eq)]
|
||||
pub enum IdString32 {
|
||||
Hash(Murmur32),
|
||||
String(String),
|
||||
}
|
||||
|
||||
impl IdString32 {
|
||||
pub fn to_murmur32(&self) -> Murmur32 {
|
||||
match self {
|
||||
Self::Hash(hash) => *hash,
|
||||
Self::String(s) => Murmur32::hash(s.as_bytes()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn display(&self) -> IdString32Display {
|
||||
let s = match self {
|
||||
IdString32::Hash(hash) => hash.to_string(),
|
||||
IdString32::String(s) => s.clone(),
|
||||
};
|
||||
|
||||
IdString32Display(s)
|
||||
}
|
||||
|
||||
pub fn is_string(&self) -> bool {
|
||||
match self {
|
||||
IdString32::Hash(_) => false,
|
||||
IdString32::String(_) => true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_hash(&self) -> bool {
|
||||
match self {
|
||||
IdString32::Hash(_) => true,
|
||||
IdString32::String(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for IdString32 {
|
||||
fn from(value: String) -> Self {
|
||||
Self::String(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<u32> for IdString32 {
|
||||
fn from(value: u32) -> Self {
|
||||
Self::Hash(value.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IdString32> for u32 {
|
||||
fn from(value: IdString32) -> Self {
|
||||
value.to_murmur32().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Murmur32> for IdString32 {
|
||||
fn from(value: Murmur32) -> Self {
|
||||
Self::Hash(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IdString32> for Murmur32 {
|
||||
fn from(value: IdString32) -> Self {
|
||||
value.to_murmur32()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for IdString32 {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.to_murmur32() == other.to_murmur32()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::hash::Hash for IdString32 {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
state.write_u32(self.to_murmur32().into());
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::Serialize for IdString32 {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_u32(self.to_murmur32().into())
|
||||
}
|
||||
}
|
||||
|
||||
struct IdString32Visitor;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for IdString32Visitor {
|
||||
type Value = IdString32;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str("an u32 or a string")
|
||||
}
|
||||
|
||||
fn visit_u32<E>(self, value: u32) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(IdString32::Hash(value.into()))
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(IdString32::String(v.to_string()))
|
||||
}
|
||||
|
||||
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(IdString32::String(v))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for IdString32 {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
deserializer.deserialize_u32(IdString32Visitor)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct IdString32Display(String);
|
||||
|
||||
impl std::fmt::Display for IdString32Display {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::UpperHex for IdString32 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
std::fmt::UpperHex::fmt(&self.to_murmur32(), f)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::LowerHex for IdString32 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
std::fmt::LowerHex::fmt(&self.to_murmur32(), f)
|
||||
}
|
||||
}
|
|
@ -1,175 +0,0 @@
|
|||
use std::{fmt, path::Path};
|
||||
|
||||
use path_slash::PathExt as _;
|
||||
use serde::{Deserializer, Serializer};
|
||||
|
||||
use super::Murmur64;
|
||||
|
||||
// This type encodes the fact that when reading in a bundle, we don't always have a dictionary
|
||||
// entry for every hash in there. So we do want to have the real string available when needed,
|
||||
// but at the same time retain the original hash information for when we don't.
|
||||
// This is especially important when wanting to write back the read bundle, as the hashes need to
|
||||
// stay the same.
|
||||
// The previous system of always turning hashes into strings worked well for the purpose of
|
||||
// displaying hashes, but would have made it very hard to turn a stringyfied hash back into
|
||||
// an actual hash.
|
||||
#[derive(Clone, Debug, Eq)]
|
||||
pub enum IdString64 {
|
||||
Hash(Murmur64),
|
||||
String(String),
|
||||
}
|
||||
|
||||
impl IdString64 {
|
||||
pub fn to_murmur64(&self) -> Murmur64 {
|
||||
match self {
|
||||
Self::Hash(hash) => *hash,
|
||||
Self::String(s) => Murmur64::hash(s.as_bytes()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn display(&self) -> IdString64Display {
|
||||
let s = match self {
|
||||
IdString64::Hash(hash) => hash.to_string(),
|
||||
IdString64::String(s) => s.clone(),
|
||||
};
|
||||
|
||||
IdString64Display(s)
|
||||
}
|
||||
|
||||
pub fn is_string(&self) -> bool {
|
||||
match self {
|
||||
IdString64::Hash(_) => false,
|
||||
IdString64::String(_) => true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_hash(&self) -> bool {
|
||||
match self {
|
||||
IdString64::Hash(_) => true,
|
||||
IdString64::String(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
// Would love to have this as a proper `impl From`, but
|
||||
// rustc will complain that it overlaps with the `impl From<Into<String>>`.
|
||||
pub fn from_path(p: impl AsRef<Path>) -> Self {
|
||||
Self::String(p.as_ref().to_slash_lossy().to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for IdString64 {
|
||||
fn from(value: String) -> Self {
|
||||
Self::String(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<u64> for IdString64 {
|
||||
fn from(value: u64) -> Self {
|
||||
Self::Hash(value.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Murmur64> for IdString64 {
|
||||
fn from(value: Murmur64) -> Self {
|
||||
Self::Hash(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IdString64> for Murmur64 {
|
||||
fn from(value: IdString64) -> Self {
|
||||
value.to_murmur64()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IdString64> for u64 {
|
||||
fn from(value: IdString64) -> Self {
|
||||
value.to_murmur64().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for IdString64 {
|
||||
fn default() -> Self {
|
||||
Self::Hash(0.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for IdString64 {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.to_murmur64() == other.to_murmur64()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::hash::Hash for IdString64 {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
state.write_u64(self.to_murmur64().into());
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::Serialize for IdString64 {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_u64(self.to_murmur64().into())
|
||||
}
|
||||
}
|
||||
|
||||
struct IdString64Visitor;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for IdString64Visitor {
|
||||
type Value = IdString64;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str("an u64 or a string")
|
||||
}
|
||||
|
||||
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(IdString64::Hash(value.into()))
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(IdString64::String(v.to_string()))
|
||||
}
|
||||
|
||||
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(IdString64::String(v))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for IdString64 {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
deserializer.deserialize_u64(IdString64Visitor)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct IdString64Display(String);
|
||||
|
||||
impl std::fmt::Display for IdString64Display {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::UpperHex for IdString64 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
std::fmt::UpperHex::fmt(&self.to_murmur64(), f)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::LowerHex for IdString64 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
std::fmt::LowerHex::fmt(&self.to_murmur64(), f)
|
||||
}
|
||||
}
|
|
@ -1,26 +1,389 @@
|
|||
use std::fmt;
|
||||
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::{Report, Result};
|
||||
use color_eyre::Report;
|
||||
use serde::de::Visitor;
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde::{Deserializer, Serializer};
|
||||
|
||||
mod dictionary;
|
||||
// Currently unused
|
||||
// mod murmurhash32;
|
||||
mod idstring32;
|
||||
mod idstring64;
|
||||
mod murmurhash64;
|
||||
mod types;
|
||||
mod util;
|
||||
|
||||
pub const SEED: u32 = 0;
|
||||
|
||||
pub use dictionary::{Dictionary, Entry, HashGroup};
|
||||
pub use idstring32::*;
|
||||
pub use idstring64::*;
|
||||
pub use murmurhash64::hash;
|
||||
pub use murmurhash64::hash32;
|
||||
pub use murmurhash64::hash_inverse as inverse;
|
||||
|
||||
pub use types::*;
|
||||
fn _swap_bytes_u32(value: u32) -> u32 {
|
||||
u32::from_le_bytes(value.to_be_bytes())
|
||||
}
|
||||
|
||||
fn _swap_bytes_u64(value: u64) -> u64 {
|
||||
u64::from_le_bytes(value.to_be_bytes())
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct Murmur64(u64);
|
||||
|
||||
impl Murmur64 {
|
||||
pub fn hash<B>(s: B) -> Self
|
||||
where
|
||||
B: AsRef<[u8]>,
|
||||
{
|
||||
hash(s.as_ref(), SEED as u64).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<u64> for Murmur64 {
|
||||
fn from(value: u64) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Murmur64> for u64 {
|
||||
fn from(value: Murmur64) -> Self {
|
||||
value.0
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for Murmur64 {
|
||||
type Error = Report;
|
||||
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
u64::from_str_radix(value, 16)
|
||||
.map(Self)
|
||||
.wrap_err_with(|| format!("Failed to convert value to Murmur64: {value}"))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::UpperHex for Murmur64 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::UpperHex::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::LowerHex for Murmur64 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::LowerHex::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Murmur64 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::UpperHex::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Visitor<'de> for Murmur64 {
|
||||
type Value = Self;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str(
|
||||
"an usigned 64 bit integer \
|
||||
or a string in hexadecimal format encoding such an integer",
|
||||
)
|
||||
}
|
||||
|
||||
fn visit_f64<E>(self, value: f64) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
let bytes = value.to_le_bytes();
|
||||
Ok(Self::from(u64::from_le_bytes(bytes)))
|
||||
}
|
||||
|
||||
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(Self::from(value))
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
match Murmur64::try_from(value) {
|
||||
Ok(hash) => Ok(hash),
|
||||
Err(err) => Err(E::custom(format!(
|
||||
"failed to convert '{value}' to Murmur64: {err}"
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for Murmur64 {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
deserializer.deserialize_any(Self(0))
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for Murmur64 {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_str(&format!("{self:016X}"))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct Murmur32(u32);
|
||||
|
||||
impl Murmur32 {
|
||||
pub fn hash<B>(s: B) -> Self
|
||||
where
|
||||
B: AsRef<[u8]>,
|
||||
{
|
||||
hash32(s.as_ref(), SEED).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<u32> for Murmur32 {
|
||||
fn from(value: u32) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Murmur32> for u32 {
|
||||
fn from(value: Murmur32) -> Self {
|
||||
value.0
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for Murmur32 {
|
||||
type Error = Report;
|
||||
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
u32::from_str_radix(value, 16)
|
||||
.map(Self)
|
||||
.wrap_err_with(|| format!("Failed to convert value to Murmur32: {value}"))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::UpperHex for Murmur32 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::UpperHex::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Murmur32 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::UpperHex::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for Murmur32 {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_str(&format!("{self:08X}"))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Visitor<'de> for Murmur32 {
|
||||
type Value = Self;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str(
|
||||
"an usigned 32 bit integer \
|
||||
or a string in hexadecimal format encoding such an integer",
|
||||
)
|
||||
}
|
||||
|
||||
fn visit_f64<E>(self, value: f64) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
let bytes = value.to_le_bytes();
|
||||
self.visit_u32(u64::from_le_bytes(bytes) as u32)
|
||||
}
|
||||
|
||||
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
self.visit_u32(value as u32)
|
||||
}
|
||||
|
||||
fn visit_u32<E>(self, value: u32) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(Self::from(value))
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
match Murmur32::try_from(value) {
|
||||
Ok(hash) => Ok(hash),
|
||||
Err(err) => Err(E::custom(format!(
|
||||
"failed to convert '{value}' to Murmur32: {err}"
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for Murmur32 {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
deserializer.deserialize_any(Self(0))
|
||||
}
|
||||
}
|
||||
|
||||
// This type encodes the fact that when reading in a bundle, we don't always have a dictionary
|
||||
// entry for every hash in there. So we do want to have the real string available when needed,
|
||||
// but at the same time retain the original hash information for when we don't.
|
||||
// This is especially important when wanting to write back the read bundle, as the hashes need to
|
||||
// stay the same.
|
||||
// The previous system of always turning hashes into strings worked well for the purpose of
|
||||
// displaying hashes, but would have made it very hard to turn a stringyfied hash back into
|
||||
// an actual hash.
|
||||
#[derive(Clone, Debug, Eq)]
|
||||
pub enum IdString64 {
|
||||
Hash(Murmur64),
|
||||
String(String),
|
||||
}
|
||||
|
||||
impl IdString64 {
|
||||
pub fn to_murmur64(&self) -> Murmur64 {
|
||||
match self {
|
||||
Self::Hash(hash) => *hash,
|
||||
Self::String(s) => Murmur64::hash(s.as_bytes()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn display(&self) -> IdString64Display {
|
||||
let s = match self {
|
||||
IdString64::Hash(hash) => hash.to_string(),
|
||||
IdString64::String(s) => s.clone(),
|
||||
};
|
||||
|
||||
IdString64Display(s)
|
||||
}
|
||||
|
||||
pub fn is_string(&self) -> bool {
|
||||
match self {
|
||||
IdString64::Hash(_) => false,
|
||||
IdString64::String(_) => true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_hash(&self) -> bool {
|
||||
match self {
|
||||
IdString64::Hash(_) => true,
|
||||
IdString64::String(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: Into<String>> From<S> for IdString64 {
|
||||
fn from(value: S) -> Self {
|
||||
Self::String(value.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Murmur64> for IdString64 {
|
||||
fn from(value: Murmur64) -> Self {
|
||||
Self::Hash(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IdString64> for Murmur64 {
|
||||
fn from(value: IdString64) -> Self {
|
||||
value.to_murmur64()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for IdString64 {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.to_murmur64() == other.to_murmur64()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::hash::Hash for IdString64 {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
state.write_u64(self.to_murmur64().into());
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::Serialize for IdString64 {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_u64(self.to_murmur64().into())
|
||||
}
|
||||
}
|
||||
|
||||
struct IdString64Visitor;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for IdString64Visitor {
|
||||
type Value = IdString64;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str("an u64 or a string")
|
||||
}
|
||||
|
||||
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(IdString64::Hash(value.into()))
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(IdString64::String(v.to_string()))
|
||||
}
|
||||
|
||||
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(IdString64::String(v))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for IdString64 {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
deserializer.deserialize_u64(IdString64Visitor)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct IdString64Display(String);
|
||||
|
||||
impl std::fmt::Display for IdString64Display {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::UpperHex for IdString64 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
std::fmt::UpperHex::fmt(&self.to_murmur64(), f)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::LowerHex for IdString64 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
std::fmt::LowerHex::fmt(&self.to_murmur64(), f)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -119,9 +119,4 @@ fn test_hash() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn test_inverse() {
|
||||
let h = hash("lua".as_bytes(), crate::murmur::SEED as u64);
|
||||
let inv = hash_inverse(h, crate::murmur::SEED as u64);
|
||||
assert_eq!(h, hash(&inv.to_le_bytes(), crate::murmur::SEED as u64));
|
||||
assert_ne!(h, hash(&inv.to_be_bytes(), crate::murmur::SEED as u64));
|
||||
}
|
||||
fn test_inverse() {}
|
||||
|
|
|
@ -1,226 +0,0 @@
|
|||
use self::util::{parse_hex32, parse_hex64};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct Murmur64(u64);
|
||||
|
||||
impl Murmur64 {
|
||||
pub fn hash<B>(s: B) -> Self
|
||||
where
|
||||
B: AsRef<[u8]>,
|
||||
{
|
||||
hash(s.as_ref(), SEED as u64).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<u64> for Murmur64 {
|
||||
fn from(value: u64) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Murmur64> for u64 {
|
||||
fn from(value: Murmur64) -> Self {
|
||||
value.0
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for Murmur64 {
|
||||
type Error = Report;
|
||||
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
parse_hex64(value)
|
||||
.map(Self)
|
||||
.wrap_err_with(|| format!("Failed to convert value to Murmur64: {value}"))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::UpperHex for Murmur64 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::UpperHex::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::LowerHex for Murmur64 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::LowerHex::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Murmur64 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:016X}", self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Visitor<'de> for Murmur64 {
|
||||
type Value = Self;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str(
|
||||
"an usigned 64 bit integer \
|
||||
or a string in hexadecimal format encoding such an integer",
|
||||
)
|
||||
}
|
||||
|
||||
fn visit_f64<E>(self, value: f64) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
let bytes = value.to_le_bytes();
|
||||
Ok(Self::from(u64::from_le_bytes(bytes)))
|
||||
}
|
||||
|
||||
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(Self::from(value))
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
match Murmur64::try_from(value) {
|
||||
Ok(hash) => Ok(hash),
|
||||
Err(err) => Err(E::custom(format!(
|
||||
"failed to convert '{value}' to Murmur64: {err}"
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for Murmur64 {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
deserializer.deserialize_any(Self(0))
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for Murmur64 {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_str(&format!("{self:016X}"))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct Murmur32(u32);
|
||||
|
||||
impl Murmur32 {
|
||||
pub fn hash<B>(s: B) -> Self
|
||||
where
|
||||
B: AsRef<[u8]>,
|
||||
{
|
||||
hash32(s.as_ref(), SEED).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<u32> for Murmur32 {
|
||||
fn from(value: u32) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Murmur32> for u32 {
|
||||
fn from(value: Murmur32) -> Self {
|
||||
value.0
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for Murmur32 {
|
||||
type Error = Report;
|
||||
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
parse_hex32(value)
|
||||
.map(Self)
|
||||
.wrap_err_with(|| format!("Failed to convert value to Murmur32: {value}"))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::UpperHex for Murmur32 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::UpperHex::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::LowerHex for Murmur32 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::LowerHex::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Murmur32 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:08X}", self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for Murmur32 {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_str(&format!("{self:08X}"))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Visitor<'de> for Murmur32 {
|
||||
type Value = Self;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str(
|
||||
"an usigned 32 bit integer \
|
||||
or a string in hexadecimal format encoding such an integer",
|
||||
)
|
||||
}
|
||||
|
||||
fn visit_f64<E>(self, value: f64) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
let bytes = value.to_le_bytes();
|
||||
self.visit_u32(u64::from_le_bytes(bytes) as u32)
|
||||
}
|
||||
|
||||
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
self.visit_u32(value as u32)
|
||||
}
|
||||
|
||||
fn visit_u32<E>(self, value: u32) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(Self::from(value))
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
match Murmur32::try_from(value) {
|
||||
Ok(hash) => Ok(hash),
|
||||
Err(err) => Err(E::custom(format!(
|
||||
"failed to convert '{value}' to Murmur32: {err}"
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for Murmur32 {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
deserializer.deserialize_any(Self(0))
|
||||
}
|
||||
}
|
|
@ -1,132 +0,0 @@
|
|||
use color_eyre::eyre::bail;
|
||||
use color_eyre::Result;
|
||||
|
||||
// Generates tables similar to these:
|
||||
// https://github.com/zbjornson/fast-hex/blob/a3487bca95127634a61bfeae8f8bfc8f0e5baa3f/src/hex.cc#L20-L89
|
||||
// `upper` determines upper vs. lower bits (first character is `upper`).
|
||||
const fn generate_byte_map(upper: bool) -> [u8; 256] {
|
||||
let mut out = [0u8; 256];
|
||||
let factor = if upper { 16 } else { 1 };
|
||||
|
||||
let mut i = 0;
|
||||
|
||||
while i < 256 {
|
||||
match i {
|
||||
0x30..=0x39 => out[i] = factor * (i as u8 - 0x30),
|
||||
0x41..=0x46 => out[i] = factor * (9 + i as u8 - 0x40),
|
||||
0x61..=0x66 => out[i] = factor * (9 + i as u8 - 0x60),
|
||||
_ => out[i] = u8::MAX,
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
||||
out
|
||||
}
|
||||
|
||||
const BYTE_MAP_UPPER: [u8; 256] = generate_byte_map(true);
|
||||
const BYTE_MAP_LOWER: [u8; 256] = generate_byte_map(false);
|
||||
|
||||
macro_rules! make_parse_hex {
|
||||
($name:ident, $ty:ty, $len:expr) => {
|
||||
#[inline]
|
||||
pub fn $name(s: impl AsRef<str>) -> Result<$ty> {
|
||||
// For the string to be valid hex characters, it needs to be ASCII.
|
||||
// So we can simply treat it as a byte stream.
|
||||
let s = s.as_ref().as_bytes();
|
||||
|
||||
if s.len() != $len {
|
||||
bail!(
|
||||
"String length doesn't match. Expected {}, got {}",
|
||||
$len,
|
||||
s.len()
|
||||
);
|
||||
}
|
||||
|
||||
let n = $len / 2;
|
||||
let mut out: $ty = 0;
|
||||
let mut i = 0;
|
||||
|
||||
while i < n {
|
||||
let j = i * 2;
|
||||
|
||||
let c1 = BYTE_MAP_UPPER[s[j] as usize];
|
||||
if c1 == u8::MAX {
|
||||
bail!("Invalid character '{:?}' ({})", char::from(c1), c1);
|
||||
}
|
||||
|
||||
let c2 = BYTE_MAP_LOWER[s[j + 1] as usize];
|
||||
if c2 == u8::MAX {
|
||||
bail!("Invalid character '{:?}' ({})", char::from(c2), c2);
|
||||
}
|
||||
|
||||
out |= ((c1 + c2) as $ty) << (n - i - 1) * 8;
|
||||
|
||||
i += 1;
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
make_parse_hex!(parse_hex64, u64, 16);
|
||||
make_parse_hex!(parse_hex32, u32, 8);
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn parse_32() {
|
||||
let hash = "A14E8DFA";
|
||||
assert_eq!(parse_hex32(hash).unwrap(), 0xA14E8DFA);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_64() {
|
||||
let hash = "A14E8DFA2CD117E2";
|
||||
assert_eq!(parse_hex64(hash).unwrap(), 0xA14E8DFA2CD117E2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn std_from_radix_32() {
|
||||
let hash = "A14E8DFA";
|
||||
assert_eq!(u32::from_str_radix(hash, 16).unwrap(), 0xA14E8DFA);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn std_from_radix_64() {
|
||||
let hash = "A14E8DFA2CD117E2";
|
||||
assert_eq!(u64::from_str_radix(hash, 16).unwrap(), 0xA14E8DFA2CD117E2);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod bench {
|
||||
use super::{parse_hex32, parse_hex64};
|
||||
|
||||
extern crate test;
|
||||
|
||||
const HASH32: &str = "A14E8DFA";
|
||||
const HASH64: &str = "A14E8DFA2CD117E2";
|
||||
|
||||
#[bench]
|
||||
fn custom_32(b: &mut test::Bencher) {
|
||||
b.iter(|| test::black_box(parse_hex32(test::black_box(HASH32))))
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn std_32(b: &mut test::Bencher) {
|
||||
b.iter(|| test::black_box(u32::from_str_radix(test::black_box(HASH32), 16)))
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn custom_64(b: &mut test::Bencher) {
|
||||
b.iter(|| test::black_box(parse_hex64(test::black_box(HASH64))))
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn std_64(b: &mut test::Bencher) {
|
||||
b.iter(|| test::black_box(u64::from_str_radix(test::black_box(HASH64), 16)))
|
||||
}
|
||||
}
|
0
signing/.gitkeep
Normal file
0
signing/.gitkeep
Normal file
Loading…
Add table
Reference in a new issue