Compare commits
No commits in common. "master" and "v0.2.0" have entirely different histories.
125 changed files with 2056 additions and 18522 deletions
|
@ -1,7 +0,0 @@
|
||||||
FROM dtmt-ci-base-linux
|
|
||||||
|
|
||||||
COPY . /src/dtmt
|
|
||||||
COPY --from=dtmt-ci-base-linux /src/*.lib /src/*.so /src/dtmt/lib/oodle/
|
|
||||||
RUN --mount=type=cache,id=cargo-registry,target=/cargo/registry \
|
|
||||||
--mount=type=cache,id=cargo-target,target=/src/dtmt/target \
|
|
||||||
cargo build --release --locked
|
|
|
@ -1,35 +0,0 @@
|
||||||
FROM dtmt-ci-base-msvc
|
|
||||||
|
|
||||||
# Create dummy crates and copy their Cargo.toml, so that dependencies can be cached
|
|
||||||
RUN set -e; \
|
|
||||||
cargo new --bin crates/dtmt; \
|
|
||||||
cargo new --bin crates/dtmm; \
|
|
||||||
cargo new --lib lib/dtmt-shared; \
|
|
||||||
cargo new --lib lib/nexusmods; \
|
|
||||||
cargo new --lib lib/sdk; \
|
|
||||||
cargo new --lib lib/serde_sjson; \
|
|
||||||
cargo new --lib lib/ansi-parser
|
|
||||||
|
|
||||||
COPY Cargo.toml Cargo.lock /src/dtmt/
|
|
||||||
COPY crates/dtmt/Cargo.toml /src/dtmt/crates/dtmt/
|
|
||||||
COPY crates/dtmm/Cargo.toml /src/dtmt/crates/dtmm/
|
|
||||||
COPY lib/dtmt-shared/Cargo.toml /src/dtmt/lib/dtmt-shared/
|
|
||||||
COPY lib/nexusmods/Cargo.toml /src/dtmt/lib/nexusmods/
|
|
||||||
COPY lib/sdk/Cargo.toml /src/dtmt/lib/sdk/
|
|
||||||
COPY lib/serde_sjson/Cargo.toml /src/dtmt/lib/serde_sjson/
|
|
||||||
COPY lib/ansi-parser/Cargo.toml /src/dtmt/lib/ansi-parser/
|
|
||||||
|
|
||||||
# Crates with build scripts cannot be split that way, but they shouldn't change too often
|
|
||||||
COPY lib/luajit2-sys /src/dtmt/lib/luajit2-sys
|
|
||||||
COPY lib/oodle /src/dtmt/lib/oodle
|
|
||||||
# color-eyre needs to be copied, too, then, as it's used by `oodle`
|
|
||||||
COPY lib/color-eyre /src/dtmt/lib/color-eyre
|
|
||||||
COPY --from=dtmt-ci-base-msvc /src/*.lib /src/dtmt/lib/oodle/
|
|
||||||
|
|
||||||
RUN cargo build --release --target x86_64-pc-windows-msvc --locked -Zbuild-std
|
|
||||||
RUN rm -r crates lib
|
|
||||||
|
|
||||||
COPY . /src/dtmt
|
|
||||||
COPY --from=dtmt-ci-base-msvc /src/*.lib /src/dtmt/lib/oodle/
|
|
||||||
|
|
||||||
RUN cargo build --release --target x86_64-pc-windows-msvc --frozen -Zbuild-std
|
|
|
@ -1,138 +0,0 @@
|
||||||
# https://jake-shadle.github.io/xwin/
|
|
||||||
FROM debian:bullseye-slim as xwin
|
|
||||||
|
|
||||||
ARG XWIN_VERSION=0.5.2
|
|
||||||
ARG XWIN_PREFIX="xwin-$XWIN_VERSION-x86_64-unknown-linux-musl"
|
|
||||||
ADD https://github.com/Jake-Shadle/xwin/releases/download/$XWIN_VERSION/$XWIN_PREFIX.tar.gz /root/$XWIN_PREFIX.tar.gz
|
|
||||||
|
|
||||||
RUN set -eux; \
|
|
||||||
apt-get update; \
|
|
||||||
apt-get install --no-install-recommends -y \
|
|
||||||
tar \
|
|
||||||
; \
|
|
||||||
# Install xwin to cargo/bin via github release. Note you could also just use `cargo install xwin`.
|
|
||||||
tar -xzv -f /root/$XWIN_PREFIX.tar.gz -C /usr/bin --strip-components=1 $XWIN_PREFIX/xwin; \
|
|
||||||
apt-get remove -y --auto-remove; \
|
|
||||||
rm -rf \
|
|
||||||
/var/lib/apt/lists/* \
|
|
||||||
/root/$XWIN_PREFIX.tar.gz;
|
|
||||||
|
|
||||||
RUN set -eux; \
|
|
||||||
# Splat the CRT and SDK files to /xwin/crt and /xwin/sdk respectively
|
|
||||||
xwin \
|
|
||||||
--log-level debug \
|
|
||||||
--cache-dir /root/.xwin-cache \
|
|
||||||
--manifest-version 16 \
|
|
||||||
--accept-license \
|
|
||||||
splat \
|
|
||||||
--output /xwin; \
|
|
||||||
# Even though this build step only exists temporary, to copy the
|
|
||||||
# final data out of, it still generates a cache entry on the Docker host.
|
|
||||||
# And to keep that to a minimum, we still delete the stuff we don't need.
|
|
||||||
rm -rf /root/.xwin-cache;
|
|
||||||
|
|
||||||
FROM rust:slim-bullseye as linux
|
|
||||||
|
|
||||||
RUN set -eux; \
|
|
||||||
apt-get update; \
|
|
||||||
apt-get install --no-install-recommends -y \
|
|
||||||
build-essential \
|
|
||||||
cmake \
|
|
||||||
curl \
|
|
||||||
git \
|
|
||||||
gpg \
|
|
||||||
jq \
|
|
||||||
libatk1.0-dev \
|
|
||||||
libclang-13-dev \
|
|
||||||
libglib2.0-dev \
|
|
||||||
libgtk-3-dev \
|
|
||||||
libpango1.0-dev \
|
|
||||||
libssl-dev \
|
|
||||||
libzstd-dev \
|
|
||||||
pkg-config; \
|
|
||||||
apt-get remove -y --auto-remove; \
|
|
||||||
rm -rf /var/lib/apt/lists/*; \
|
|
||||||
rustup default nightly
|
|
||||||
|
|
||||||
WORKDIR /src/dtmt
|
|
||||||
|
|
||||||
COPY lib/oodle/*.so lib/oodle/*.a /src/
|
|
||||||
|
|
||||||
FROM linux as msvc
|
|
||||||
|
|
||||||
ARG LLVM_VERSION=18
|
|
||||||
ENV KEYRINGS /usr/local/share/keyrings
|
|
||||||
|
|
||||||
ADD https://apt.llvm.org/llvm-snapshot.gpg.key /root/llvm-snapshot.gpg.key
|
|
||||||
ADD https://dl.winehq.org/wine-builds/winehq.key /root/winehq.key
|
|
||||||
|
|
||||||
RUN set -eux; \
|
|
||||||
mkdir -p $KEYRINGS; \
|
|
||||||
# clang/lld/llvm
|
|
||||||
gpg --dearmor > $KEYRINGS/llvm.gpg < /root/llvm-snapshot.gpg.key; \
|
|
||||||
# wine
|
|
||||||
gpg --dearmor > $KEYRINGS/winehq.gpg < /root/winehq.key; \
|
|
||||||
echo "deb [signed-by=$KEYRINGS/llvm.gpg] http://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-${LLVM_VERSION} main" > /etc/apt/sources.list.d/llvm.list; \
|
|
||||||
echo "deb [signed-by=$KEYRINGS/winehq.gpg] https://dl.winehq.org/wine-builds/debian/ bullseye main" > /etc/apt/sources.list.d/winehq.list; \
|
|
||||||
dpkg --add-architecture i386; \
|
|
||||||
apt-get update; \
|
|
||||||
apt-get install --no-install-recommends -y \
|
|
||||||
libclang-${LLVM_VERSION}-dev \
|
|
||||||
gcc-mingw-w64-x86-64 \
|
|
||||||
clang-${LLVM_VERSION} \
|
|
||||||
llvm-${LLVM_VERSION} \
|
|
||||||
lld-${LLVM_VERSION} \
|
|
||||||
winehq-staging \
|
|
||||||
; \
|
|
||||||
# ensure that clang/clang++ are callable directly
|
|
||||||
ln -s clang-${LLVM_VERSION} /usr/bin/clang && ln -s clang /usr/bin/clang++ && ln -s lld-${LLVM_VERSION} /usr/bin/ld.lld; \
|
|
||||||
# We also need to setup symlinks ourselves for the MSVC shims because they aren't in the debian packages
|
|
||||||
ln -s clang-${LLVM_VERSION} /usr/bin/clang-cl && ln -s llvm-ar-${LLVM_VERSION} /usr/bin/llvm-lib && ln -s lld-link-${LLVM_VERSION} /usr/bin/lld-link; \
|
|
||||||
# Verify the symlinks are correct
|
|
||||||
clang++ -v; \
|
|
||||||
ld.lld -v; \
|
|
||||||
# Doesn't have an actual -v/--version flag, but it still exits with 0
|
|
||||||
llvm-lib -v; \
|
|
||||||
clang-cl -v; \
|
|
||||||
lld-link --version; \
|
|
||||||
# Use clang instead of gcc when compiling and linking binaries targeting the host (eg proc macros, build files)
|
|
||||||
update-alternatives --install /usr/bin/cc cc /usr/bin/clang 100; \
|
|
||||||
update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++ 100; \
|
|
||||||
update-alternatives --install /usr/bin/ld ld /usr/bin/ld.lld 100; \
|
|
||||||
rustup target add x86_64-pc-windows-msvc; \
|
|
||||||
rustup component add rust-src; \
|
|
||||||
# Remove unneeded files to reduce image size
|
|
||||||
apt-get remove -y --auto-remove; \
|
|
||||||
rm -rf \
|
|
||||||
/var/lib/apt/lists/* \
|
|
||||||
/root/*.key;
|
|
||||||
|
|
||||||
COPY lib/oodle/*.lib /src
|
|
||||||
COPY --from=xwin /xwin /xwin
|
|
||||||
|
|
||||||
# Note that we're using the full target triple for each variable instead of the
|
|
||||||
# simple CC/CXX/AR shorthands to avoid issues when compiling any C/C++ code for
|
|
||||||
# build dependencies that need to compile and execute in the host environment
|
|
||||||
ENV CC_x86_64_pc_windows_msvc="clang-cl" \
|
|
||||||
CXX_x86_64_pc_windows_msvc="clang-cl" \
|
|
||||||
AR_x86_64_pc_windows_msvc="llvm-lib" \
|
|
||||||
# wine can be quite spammy with log messages and they're generally uninteresting
|
|
||||||
WINEDEBUG="-all" \
|
|
||||||
# Use wine to run test executables
|
|
||||||
CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_RUNNER="wine" \
|
|
||||||
# Note that we only disable unused-command-line-argument here since clang-cl
|
|
||||||
# doesn't implement all of the options supported by cl, but the ones it doesn't
|
|
||||||
# are _generally_ not interesting.
|
|
||||||
CL_FLAGS="-Wno-unused-command-line-argument -fuse-ld=lld-link /imsvc/xwin/crt/include /imsvc/xwin/sdk/include/ucrt /imsvc/xwin/sdk/include/um /imsvc/xwin/sdk/include/shared" \
|
|
||||||
# Let cargo know what linker to invoke if you haven't already specified it
|
|
||||||
# in a .cargo/config.toml file
|
|
||||||
CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_LINKER="lld-link" \
|
|
||||||
CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_RUSTFLAGS="-Lnative=/xwin/crt/lib/x86_64 -Lnative=/xwin/sdk/lib/um/x86_64 -Lnative=/xwin/sdk/lib/ucrt/x86_64"
|
|
||||||
|
|
||||||
# These are separate since docker/podman won't transform environment variables defined in the same ENV block
|
|
||||||
ENV CFLAGS_x86_64_pc_windows_msvc="$CL_FLAGS" \
|
|
||||||
CXXFLAGS_x86_64_pc_windows_msvc="$CL_FLAGS"
|
|
||||||
|
|
||||||
# Run wineboot just to setup the default WINEPREFIX so we don't do it every
|
|
||||||
# container run
|
|
||||||
RUN wine wineboot --init
|
|
|
@ -1,230 +0,0 @@
|
||||||
# yaml-language-server: $schema=https://raw.githubusercontent.com/cappyzawa/concourse-pipeline-jsonschema/master/concourse_jsonschema.json#/definitions/Config
|
|
||||||
---
|
|
||||||
|
|
||||||
# The actual CI pipeline that is run per branch
|
|
||||||
resource_types:
|
|
||||||
- name: gitea-package
|
|
||||||
type: registry-image
|
|
||||||
source:
|
|
||||||
repository: registry.local:5000/gitea-package
|
|
||||||
|
|
||||||
- name: gitea-status
|
|
||||||
type: registry-image
|
|
||||||
source:
|
|
||||||
repository: registry.local:5000/gitea-status
|
|
||||||
|
|
||||||
- name: gitea-pr
|
|
||||||
type: registry-image
|
|
||||||
source:
|
|
||||||
repository: registry.local:5000/gitea-pr
|
|
||||||
|
|
||||||
|
|
||||||
resources:
|
|
||||||
- name: repo
|
|
||||||
type: git
|
|
||||||
source:
|
|
||||||
uri: http://forgejo:3000/bitsquid_dt/dtmt
|
|
||||||
branch: master
|
|
||||||
|
|
||||||
- name: repo-pr
|
|
||||||
type: gitea-pr
|
|
||||||
source:
|
|
||||||
access_token: ((gitea_api_key))
|
|
||||||
owner: ((owner))
|
|
||||||
repo: ((repo))
|
|
||||||
url: https://git.sclu1034.dev
|
|
||||||
|
|
||||||
- name: gitea-package
|
|
||||||
type: gitea-package
|
|
||||||
source:
|
|
||||||
access_token: ((gitea_api_key))
|
|
||||||
url: http://forgejo:3000
|
|
||||||
owner: bitsquid_dt
|
|
||||||
type: generic
|
|
||||||
name: dtmt
|
|
||||||
|
|
||||||
|
|
||||||
- name: status-build-msvc
|
|
||||||
type: gitea-status
|
|
||||||
source:
|
|
||||||
access_token: ((gitea_api_key))
|
|
||||||
url: http://forgejo:3000
|
|
||||||
owner: bitsquid_dt
|
|
||||||
repo: dtmt
|
|
||||||
context: build/msvc
|
|
||||||
description: "Build for the target platform: msvc"
|
|
||||||
|
|
||||||
- name: status-build-linux
|
|
||||||
type: gitea-status
|
|
||||||
source:
|
|
||||||
access_token: ((gitea_api_key))
|
|
||||||
url: http://forgejo:3000
|
|
||||||
owner: bitsquid_dt
|
|
||||||
repo: dtmt
|
|
||||||
context: build/linux
|
|
||||||
description: "Build for the target platform: linux"
|
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
- name: set-pipelines
|
|
||||||
plan:
|
|
||||||
- in_parallel:
|
|
||||||
- get: repo-pr
|
|
||||||
trigger: true
|
|
||||||
|
|
||||||
- get: repo
|
|
||||||
|
|
||||||
- load_var: prs
|
|
||||||
file: repo-pr/prs.json
|
|
||||||
|
|
||||||
- across:
|
|
||||||
- var: pr
|
|
||||||
values: ((.:prs))
|
|
||||||
set_pipeline: dtmt-pr
|
|
||||||
file: repo/.ci/pipelines/pr.yml
|
|
||||||
vars:
|
|
||||||
pr: ((.:pr))
|
|
||||||
gitea_api_key: ((gitea_api_key))
|
|
||||||
instance_vars:
|
|
||||||
number: ((.:pr.number))
|
|
||||||
|
|
||||||
|
|
||||||
- name: build-msvc
|
|
||||||
on_success:
|
|
||||||
put: state-success
|
|
||||||
resource: status-build-msvc
|
|
||||||
no_get: true
|
|
||||||
params:
|
|
||||||
state: success
|
|
||||||
sha: ((.:git_sha))
|
|
||||||
|
|
||||||
on_failure:
|
|
||||||
put: state-failure
|
|
||||||
resource: status-build-msvc
|
|
||||||
no_get: true
|
|
||||||
params:
|
|
||||||
state: failure
|
|
||||||
sha: ((.:git_sha))
|
|
||||||
|
|
||||||
plan:
|
|
||||||
- get: repo
|
|
||||||
trigger: true
|
|
||||||
|
|
||||||
- load_var: git_sha
|
|
||||||
file: repo/.git/ref
|
|
||||||
|
|
||||||
- put: state-pending
|
|
||||||
resource: status-build-msvc
|
|
||||||
no_get: true
|
|
||||||
params:
|
|
||||||
state: pending
|
|
||||||
sha: ((.:git_sha))
|
|
||||||
|
|
||||||
- task: build
|
|
||||||
file: repo/.ci/tasks/build.yml
|
|
||||||
vars:
|
|
||||||
pr: ""
|
|
||||||
target: msvc
|
|
||||||
|
|
||||||
- load_var: version_number
|
|
||||||
reveal: true
|
|
||||||
file: artifact/version
|
|
||||||
|
|
||||||
- put: package
|
|
||||||
resource: gitea-package
|
|
||||||
no_get: true
|
|
||||||
inputs:
|
|
||||||
- artifact
|
|
||||||
params:
|
|
||||||
version: ((.:version_number))
|
|
||||||
fail_fast: true
|
|
||||||
override: true
|
|
||||||
globs:
|
|
||||||
- artifact/*.exe
|
|
||||||
- artifact/*.exe.sha256
|
|
||||||
|
|
||||||
- put: package
|
|
||||||
resource: gitea-package
|
|
||||||
no_get: true
|
|
||||||
inputs:
|
|
||||||
- artifact
|
|
||||||
params:
|
|
||||||
version: master
|
|
||||||
fail_fast: true
|
|
||||||
override: true
|
|
||||||
globs:
|
|
||||||
- artifact/*.exe
|
|
||||||
- artifact/*.exe.sha256
|
|
||||||
|
|
||||||
- name: build-linux
|
|
||||||
on_success:
|
|
||||||
put: state-success
|
|
||||||
resource: status-build-linux
|
|
||||||
no_get: true
|
|
||||||
params:
|
|
||||||
state: success
|
|
||||||
sha: ((.:git_sha))
|
|
||||||
|
|
||||||
on_failure:
|
|
||||||
put: state-failure
|
|
||||||
resource: status-build-linux
|
|
||||||
no_get: true
|
|
||||||
params:
|
|
||||||
state: failure
|
|
||||||
sha: ((.:git_sha))
|
|
||||||
|
|
||||||
plan:
|
|
||||||
- get: repo
|
|
||||||
trigger: true
|
|
||||||
|
|
||||||
- load_var: git_sha
|
|
||||||
file: repo/.git/ref
|
|
||||||
|
|
||||||
- put: state-pending
|
|
||||||
resource: status-build-linux
|
|
||||||
no_get: true
|
|
||||||
params:
|
|
||||||
state: pending
|
|
||||||
sha: ((.:git_sha))
|
|
||||||
|
|
||||||
- task: build
|
|
||||||
file: repo/.ci/tasks/build.yml
|
|
||||||
vars:
|
|
||||||
pr: ""
|
|
||||||
target: linux
|
|
||||||
gitea_url: http://forgejo:3000
|
|
||||||
gitea_api_key: ((gitea_api_key))
|
|
||||||
|
|
||||||
- load_var: version_number
|
|
||||||
reveal: true
|
|
||||||
file: artifact/version
|
|
||||||
|
|
||||||
- put: package
|
|
||||||
resource: gitea-package
|
|
||||||
no_get: true
|
|
||||||
inputs:
|
|
||||||
- artifact
|
|
||||||
params:
|
|
||||||
version: ((.:version_number))
|
|
||||||
fail_fast: true
|
|
||||||
override: true
|
|
||||||
globs:
|
|
||||||
- artifact/dtmt
|
|
||||||
- artifact/dtmm
|
|
||||||
- artifact/dtmm.sha256
|
|
||||||
- artifact/dtmt.sha256
|
|
||||||
|
|
||||||
- put: package
|
|
||||||
resource: gitea-package
|
|
||||||
no_get: true
|
|
||||||
inputs:
|
|
||||||
- artifact
|
|
||||||
params:
|
|
||||||
version: master
|
|
||||||
fail_fast: true
|
|
||||||
override: true
|
|
||||||
globs:
|
|
||||||
- artifact/dtmt
|
|
||||||
- artifact/dtmm
|
|
||||||
- artifact/dtmm.sha256
|
|
||||||
- artifact/dtmt.sha256
|
|
|
@ -1,28 +0,0 @@
|
||||||
---
|
|
||||||
|
|
||||||
# The actual CI pipeline that is run per branch
|
|
||||||
|
|
||||||
resources:
|
|
||||||
- name: repo
|
|
||||||
type: git
|
|
||||||
source:
|
|
||||||
uri: https://git.sclu1034.dev/bitsquid_dt/dtmt
|
|
||||||
branch: ((branch))
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
- name: build-msvc
|
|
||||||
plan:
|
|
||||||
- get: repo
|
|
||||||
trigger: true
|
|
||||||
- task: build
|
|
||||||
file: repo/.ci/tasks/build.yml
|
|
||||||
vars:
|
|
||||||
target: msvc
|
|
||||||
- name: build-linux
|
|
||||||
plan:
|
|
||||||
- get: repo
|
|
||||||
trigger: true
|
|
||||||
- task: build
|
|
||||||
file: repo/.ci/tasks/build.yml
|
|
||||||
vars:
|
|
||||||
target: linux
|
|
|
@ -1,217 +0,0 @@
|
||||||
# yaml-language-server: $schema=https://raw.githubusercontent.com/cappyzawa/concourse-pipeline-jsonschema/master/concourse_jsonschema.json#/definitions/Config
|
|
||||||
---
|
|
||||||
|
|
||||||
# The actual CI pipeline that is run per branch
|
|
||||||
resource_types:
|
|
||||||
- name: gitea-package
|
|
||||||
type: registry-image
|
|
||||||
source:
|
|
||||||
repository: registry.local:5000/gitea-package
|
|
||||||
|
|
||||||
- name: gitea-status
|
|
||||||
type: registry-image
|
|
||||||
source:
|
|
||||||
repository: registry.local:5000/gitea-status
|
|
||||||
|
|
||||||
|
|
||||||
resources:
|
|
||||||
- name: repo
|
|
||||||
type: git
|
|
||||||
source:
|
|
||||||
uri: http://forgejo:3000/bitsquid_dt/dtmt
|
|
||||||
branch: ((pr.head.ref))
|
|
||||||
|
|
||||||
- name: gitea-package
|
|
||||||
type: gitea-package
|
|
||||||
source:
|
|
||||||
access_token: ((gitea_api_key))
|
|
||||||
url: http://forgejo:3000
|
|
||||||
owner: bitsquid_dt
|
|
||||||
type: generic
|
|
||||||
name: dtmt
|
|
||||||
|
|
||||||
- name: pr-status-lint-clippy
|
|
||||||
type: gitea-status
|
|
||||||
source:
|
|
||||||
access_token: ((gitea_api_key))
|
|
||||||
url: http://forgejo:3000
|
|
||||||
owner: bitsquid_dt
|
|
||||||
repo: dtmt
|
|
||||||
context: lint/clippy
|
|
||||||
description: Checking for common mistakes and opportunities for code improvement
|
|
||||||
|
|
||||||
- name: pr-status-build-msvc
|
|
||||||
type: gitea-status
|
|
||||||
source:
|
|
||||||
access_token: ((gitea_api_key))
|
|
||||||
url: http://forgejo:3000
|
|
||||||
owner: bitsquid_dt
|
|
||||||
repo: dtmt
|
|
||||||
context: build/msvc
|
|
||||||
description: "Build for the target platform: msvc"
|
|
||||||
|
|
||||||
- name: pr-status-build-linux
|
|
||||||
type: gitea-status
|
|
||||||
source:
|
|
||||||
access_token: ((gitea_api_key))
|
|
||||||
url: http://forgejo:3000
|
|
||||||
owner: bitsquid_dt
|
|
||||||
repo: dtmt
|
|
||||||
context: build/linux
|
|
||||||
description: "Build for the target platform: linux"
|
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
- name: clippy
|
|
||||||
on_success:
|
|
||||||
put: state-success
|
|
||||||
resource: pr-status-lint-clippy
|
|
||||||
no_get: true
|
|
||||||
params:
|
|
||||||
state: success
|
|
||||||
sha: ((.:git_sha))
|
|
||||||
|
|
||||||
on_failure:
|
|
||||||
put: state-failure
|
|
||||||
resource: pr-status-lint-clippy
|
|
||||||
no_get: true
|
|
||||||
params:
|
|
||||||
state: failure
|
|
||||||
sha: ((.:git_sha))
|
|
||||||
|
|
||||||
plan:
|
|
||||||
- get: repo
|
|
||||||
trigger: true
|
|
||||||
|
|
||||||
- load_var: git_sha
|
|
||||||
file: repo/.git/ref
|
|
||||||
|
|
||||||
- put: state-pending
|
|
||||||
resource: pr-status-lint-clippy
|
|
||||||
no_get: true
|
|
||||||
params:
|
|
||||||
state: pending
|
|
||||||
sha: ((.:git_sha))
|
|
||||||
|
|
||||||
- task: check
|
|
||||||
file: repo/.ci/tasks/clippy.yml
|
|
||||||
vars:
|
|
||||||
gitea_api_key: ((gitea_api_key))
|
|
||||||
|
|
||||||
|
|
||||||
- name: build-msvc
|
|
||||||
on_success:
|
|
||||||
put: state-success
|
|
||||||
resource: pr-status-build-msvc
|
|
||||||
no_get: true
|
|
||||||
params:
|
|
||||||
state: success
|
|
||||||
sha: ((.:git_sha))
|
|
||||||
|
|
||||||
on_failure:
|
|
||||||
put: state-failure
|
|
||||||
resource: pr-status-build-msvc
|
|
||||||
no_get: true
|
|
||||||
params:
|
|
||||||
state: failure
|
|
||||||
sha: ((.:git_sha))
|
|
||||||
|
|
||||||
plan:
|
|
||||||
- get: repo
|
|
||||||
trigger: true
|
|
||||||
|
|
||||||
- load_var: git_sha
|
|
||||||
file: repo/.git/ref
|
|
||||||
|
|
||||||
- put: state-pending
|
|
||||||
resource: pr-status-build-msvc
|
|
||||||
no_get: true
|
|
||||||
params:
|
|
||||||
state: pending
|
|
||||||
sha: ((.:git_sha))
|
|
||||||
|
|
||||||
- task: build
|
|
||||||
file: repo/.ci/tasks/build.yml
|
|
||||||
vars:
|
|
||||||
target: msvc
|
|
||||||
pr: ((pr))
|
|
||||||
gitea_url: http://forgejo:3000
|
|
||||||
gitea_api_key: ((gitea_api_key))
|
|
||||||
|
|
||||||
- load_var: version_number
|
|
||||||
reveal: true
|
|
||||||
file: artifact/version
|
|
||||||
|
|
||||||
- put: package
|
|
||||||
resource: gitea-package
|
|
||||||
no_get: true
|
|
||||||
inputs:
|
|
||||||
- artifact
|
|
||||||
params:
|
|
||||||
version: ((.:version_number))
|
|
||||||
fail_fast: true
|
|
||||||
override: true
|
|
||||||
globs:
|
|
||||||
- artifact/dtmt
|
|
||||||
- artifact/dtmm
|
|
||||||
- artifact/*.exe
|
|
||||||
- artifact/*.sha256
|
|
||||||
|
|
||||||
- name: build-linux
|
|
||||||
on_success:
|
|
||||||
put: state-success
|
|
||||||
resource: pr-status-build-linux
|
|
||||||
no_get: true
|
|
||||||
params:
|
|
||||||
state: success
|
|
||||||
sha: ((.:git_sha))
|
|
||||||
|
|
||||||
on_failure:
|
|
||||||
put: state-failure
|
|
||||||
resource: pr-status-build-linux
|
|
||||||
no_get: true
|
|
||||||
params:
|
|
||||||
state: failure
|
|
||||||
sha: ((.:git_sha))
|
|
||||||
|
|
||||||
plan:
|
|
||||||
- get: repo
|
|
||||||
trigger: true
|
|
||||||
|
|
||||||
- load_var: git_sha
|
|
||||||
file: repo/.git/ref
|
|
||||||
|
|
||||||
- put: state-pending
|
|
||||||
resource: pr-status-build-linux
|
|
||||||
no_get: true
|
|
||||||
params:
|
|
||||||
state: pending
|
|
||||||
sha: ((.:git_sha))
|
|
||||||
|
|
||||||
- task: build
|
|
||||||
file: repo/.ci/tasks/build.yml
|
|
||||||
vars:
|
|
||||||
target: linux
|
|
||||||
pr: ((pr))
|
|
||||||
gitea_url: http://forgejo:3000
|
|
||||||
gitea_api_key: ((gitea_api_key))
|
|
||||||
|
|
||||||
- load_var: version_number
|
|
||||||
reveal: true
|
|
||||||
file: artifact/version
|
|
||||||
|
|
||||||
- put: package
|
|
||||||
resource: gitea-package
|
|
||||||
no_get: true
|
|
||||||
inputs:
|
|
||||||
- artifact
|
|
||||||
params:
|
|
||||||
version: ((.:version_number))
|
|
||||||
fail_fast: true
|
|
||||||
override: true
|
|
||||||
globs:
|
|
||||||
- artifact/dtmt
|
|
||||||
- artifact/dtmm
|
|
||||||
- artifact/*.exe
|
|
||||||
- artifact/*.sha256
|
|
||||||
|
|
|
@ -1,62 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -eu
|
|
||||||
|
|
||||||
if [ -n "$OUTPUT" ]; then
|
|
||||||
OUTPUT="$PWD/$OUTPUT"
|
|
||||||
else
|
|
||||||
OUTPUT=$(mktemp -d)
|
|
||||||
fi
|
|
||||||
|
|
||||||
title() {
|
|
||||||
printf "\033[1m%s\033[0m\n" "$1"
|
|
||||||
}
|
|
||||||
|
|
||||||
install_artifact() {
|
|
||||||
install -v -t "$OUTPUT/" "$1"
|
|
||||||
sha256sum "$1" | cut -d' ' -f1 > "$OUTPUT/$(basename "$1").sha256"
|
|
||||||
}
|
|
||||||
|
|
||||||
cd "repo"
|
|
||||||
|
|
||||||
PR=${PR:-}
|
|
||||||
|
|
||||||
if [ -n "$PR" ]; then
|
|
||||||
title "PR: $(echo "$PR" | jq '.number') - $(echo "$PR" | jq '.title')"
|
|
||||||
ref="pr-$(echo "$PR" | jq '.number')-$(git rev-parse --short "$(cat .git/ref || echo "HEAD")" 2>/dev/null || echo 'manual')"
|
|
||||||
elif [ -f ".git/branch"]; then
|
|
||||||
ref=$(cat .git/branch)-$(git rev-parse --short $ref)
|
|
||||||
else
|
|
||||||
ref=$(git rev-parse --short "$(cat .git/ref || echo "HEAD")")
|
|
||||||
fi
|
|
||||||
|
|
||||||
title "Version: '$ref'"
|
|
||||||
echo "$ref" > "$OUTPUT/version"
|
|
||||||
|
|
||||||
case "$TARGET" in
|
|
||||||
msvc)
|
|
||||||
cp /src/*.lib ./lib/oodle/
|
|
||||||
|
|
||||||
title "Building project for target $TARGET"
|
|
||||||
cargo build --color always --locked --release --target x86_64-pc-windows-msvc -Zbuild-std
|
|
||||||
|
|
||||||
title "Install artifacts"
|
|
||||||
install_artifact target/x86_64-pc-windows-msvc/release/dtmt.exe
|
|
||||||
install_artifact target/x86_64-pc-windows-msvc/release/dtmm.exe
|
|
||||||
;;
|
|
||||||
linux)
|
|
||||||
cp /src/*.a ./lib/oodle/
|
|
||||||
|
|
||||||
title "Building project for target $TARGET"
|
|
||||||
cargo build --color always --locked --profile release-lto
|
|
||||||
|
|
||||||
title "Installing artifacts"
|
|
||||||
install_artifact target/release-lto/dtmt
|
|
||||||
install_artifact target/release-lto/dtmm
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo -e "\033[31;1mEnv var 'TARGET' must either be 'msvc' or 'linux'. Got '$TARGET'.\033[0m" >&2
|
|
||||||
exit 1
|
|
||||||
esac
|
|
||||||
|
|
||||||
title "Done"
|
|
|
@ -1,29 +0,0 @@
|
||||||
# yaml-language-server: $schema=https://raw.githubusercontent.com/cappyzawa/concourse-pipeline-jsonschema/master/concourse_jsonschema.json#/definitions/TaskConfig
|
|
||||||
---
|
|
||||||
platform: linux
|
|
||||||
|
|
||||||
image_resource:
|
|
||||||
name: ctmt-bi-base-((target))
|
|
||||||
type: registry-image
|
|
||||||
source:
|
|
||||||
repository: registry.local:5000/dtmt-ci-base-((target))
|
|
||||||
tag: latest
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
- name: repo
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
- name: artifact
|
|
||||||
|
|
||||||
caches:
|
|
||||||
- path: repo/target
|
|
||||||
- path: /usr/local/cargo/registry
|
|
||||||
|
|
||||||
params:
|
|
||||||
CI: "true"
|
|
||||||
TARGET: ((target))
|
|
||||||
PR: ((pr))
|
|
||||||
OUTPUT: artifact
|
|
||||||
|
|
||||||
run:
|
|
||||||
path: repo/.ci/tasks/build.sh
|
|
|
@ -1,15 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
|
|
||||||
set -eu
|
|
||||||
|
|
||||||
title() {
|
|
||||||
printf "\033[1m%s\033[0m\n" "$1"
|
|
||||||
}
|
|
||||||
|
|
||||||
title "Install clippy"
|
|
||||||
rustup component add clippy
|
|
||||||
|
|
||||||
title "Run clippy"
|
|
||||||
cargo clippy --color always --no-deps
|
|
||||||
|
|
||||||
title "Done"
|
|
|
@ -1,26 +0,0 @@
|
||||||
# yaml-language-server: $schema=https://raw.githubusercontent.com/cappyzawa/concourse-pipeline-jsonschema/master/concourse_jsonschema.json#/definitions/TaskConfig
|
|
||||||
---
|
|
||||||
platform: linux
|
|
||||||
|
|
||||||
image_resource:
|
|
||||||
name: dtmt-ci-base-linux
|
|
||||||
type: registry-image
|
|
||||||
source:
|
|
||||||
repository: registry.local:5000/dtmt-ci-base-linux
|
|
||||||
tag: latest
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
- name: repo
|
|
||||||
|
|
||||||
caches:
|
|
||||||
- path: repo/target
|
|
||||||
- path: /usr/local/cargo/registry
|
|
||||||
|
|
||||||
params:
|
|
||||||
CI: "true"
|
|
||||||
GITEA_API_KEY: ((gitea_api_key))
|
|
||||||
|
|
||||||
run:
|
|
||||||
path: .ci/tasks/clippy.sh
|
|
||||||
dir: repo
|
|
||||||
|
|
|
@ -1,51 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
|
|
||||||
set -ux
|
|
||||||
|
|
||||||
script="$1"
|
|
||||||
context="$2"
|
|
||||||
desc="$3"
|
|
||||||
|
|
||||||
if [ -z "$script" ]; then
|
|
||||||
echo "No script to run" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "$context" ]; then
|
|
||||||
echo "Missing 'context' for CI status report" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "$REF" ]; then
|
|
||||||
echo "Environment variable 'REF' must be set to a valid Git ref." >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "$GITEA_API_KEY" ]; then
|
|
||||||
echo "Environment variable 'GITEA_API_KEY' must be set." >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
notify() {
|
|
||||||
curl -X 'POST' \
|
|
||||||
-H 'Content-Type: application/json' \
|
|
||||||
-H 'Accept: application/json' \
|
|
||||||
-H "Authorization: token $GITEA_API_KEY" \
|
|
||||||
"https://git.sclu1034.dev/api/v1/repos/bitsquid_dt/dtmt/statuses/$REF" \
|
|
||||||
--data @- <<EOF
|
|
||||||
{
|
|
||||||
"context": "$2",
|
|
||||||
"description": "$3",
|
|
||||||
"state": "$1"
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
}
|
|
||||||
|
|
||||||
notify 'pending' "$context" "$desc"
|
|
||||||
|
|
||||||
if sh "$script"; then
|
|
||||||
notify 'success' "$context" "$desc"
|
|
||||||
else
|
|
||||||
notify 'failure' "$context" "$desc"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
|
@ -1,9 +0,0 @@
|
||||||
target/
|
|
||||||
docs/
|
|
||||||
data/
|
|
||||||
.git/
|
|
||||||
README.adoc
|
|
||||||
CHANGELOG.adoc
|
|
||||||
LICENSE
|
|
||||||
dictionary.csv
|
|
||||||
Justfile
|
|
|
@ -1,5 +1,3 @@
|
||||||
/target
|
/target
|
||||||
*.a
|
liboo2corelinux64.so
|
||||||
*.so
|
oo2core_8_win64.dll
|
||||||
*.dll
|
|
||||||
*.lib
|
|
||||||
|
|
6
.gitattributes
vendored
6
.gitattributes
vendored
|
@ -1,6 +0,0 @@
|
||||||
* text=auto
|
|
||||||
|
|
||||||
*.xcf filter=lfs diff=lfs merge=lfs -text
|
|
||||||
*.ico filter=lfs diff=lfs merge=lfs -text
|
|
||||||
*.png filter=lfs diff=lfs merge=lfs -text
|
|
||||||
*.jpg filter=lfs diff=lfs merge=lfs -text
|
|
6
.gitignore
vendored
6
.gitignore
vendored
|
@ -1,8 +1,6 @@
|
||||||
/target
|
/target
|
||||||
/data
|
/data
|
||||||
.envrc
|
.envrc
|
||||||
*.a
|
liboo2corelinux64.so
|
||||||
*.so
|
oo2core_8_win64.dll
|
||||||
*.dll
|
|
||||||
*.lib
|
|
||||||
dictionary.csv
|
dictionary.csv
|
||||||
|
|
13
.gitmodules
vendored
13
.gitmodules
vendored
|
@ -1,14 +1,3 @@
|
||||||
[submodule "lib/serde_sjson"]
|
[submodule "lib/serde_sjson"]
|
||||||
path = lib/serde_sjson
|
path = lib/serde_sjson
|
||||||
url = https://git.sclu1034.dev/lucas/serde_sjson.git
|
url = git@git.sclu1034.dev:lucas/serde_sjson.git
|
||||||
[submodule "lib/luajit2-sys"]
|
|
||||||
path = lib/luajit2-sys
|
|
||||||
url = https://github.com/sclu1034/luajit2-sys.git
|
|
||||||
[submodule "lib/color-eyre"]
|
|
||||||
path = lib/color-eyre
|
|
||||||
url = https://github.com/sclu1034/color-eyre.git
|
|
||||||
branch = "fork"
|
|
||||||
[submodule "lib/ansi-parser"]
|
|
||||||
path = lib/ansi-parser
|
|
||||||
url = https://gitlab.com/lschwiderski/ansi-parser.git
|
|
||||||
branch = "issue/outdated-nom"
|
|
||||||
|
|
15
.renovaterc
15
.renovaterc
|
@ -1,15 +0,0 @@
|
||||||
{
|
|
||||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
|
||||||
"extends": [
|
|
||||||
"config:recommended",
|
|
||||||
":combinePatchMinorReleases",
|
|
||||||
":enableVulnerabilityAlerts",
|
|
||||||
":rebaseStalePrs"
|
|
||||||
],
|
|
||||||
"prConcurrentLimit": 10,
|
|
||||||
"branchPrefix": "renovate/",
|
|
||||||
"baseBranches": [
|
|
||||||
"$default",
|
|
||||||
"/^release\\/.*/"
|
|
||||||
]
|
|
||||||
}
|
|
|
@ -2,45 +2,6 @@
|
||||||
|
|
||||||
== [Unreleased]
|
== [Unreleased]
|
||||||
|
|
||||||
=== Added
|
|
||||||
|
|
||||||
- dtmt: split `build` into `build` and `package`
|
|
||||||
- dtmt: implement deploying built bundles
|
|
||||||
- dtmm: indicate when a deployment is necessary
|
|
||||||
- dtmm: check for Steam game update before deployment
|
|
||||||
- dtmm: remove unused bundles from previous deployment
|
|
||||||
- dtmm: show dialog for critical errors
|
|
||||||
- dtmm: check mod order before deployment
|
|
||||||
- dtmt: add mod dependencies to config
|
|
||||||
- dtmm: match mods to Nexus and check for updates
|
|
||||||
- dtmt: add utility to migrate mod projects
|
|
||||||
- dtmm: reset dtkit-patch installations
|
|
||||||
- sdk: implement decompiling Lua files
|
|
||||||
- dtmm: fetch cover image for Nexus mods
|
|
||||||
- dtmm: fetch file version for Nexus mods
|
|
||||||
- dtmm: handle `nxm://` URIs via IPC and import the corresponding mod
|
|
||||||
- dtmm: Add button to open mod on nexusmods.com
|
|
||||||
- dtmt: Implement commands to list bundles and contents
|
|
||||||
- dtmt: Implement command to search for files
|
|
||||||
|
|
||||||
=== Fixed
|
|
||||||
|
|
||||||
- all: force unix path separators for engine values
|
|
||||||
- dtmt: fix extracing files with non-flattened file names
|
|
||||||
- oodle: fix static linking
|
|
||||||
|
|
||||||
== 2023-03-01
|
|
||||||
|
|
||||||
=== Added
|
|
||||||
|
|
||||||
- dtmm: build MVP mod manager GUI and mod loading facilities
|
|
||||||
- dtmt: show status after adding dictionary entries
|
|
||||||
- dtmt: implement building mod bundles
|
|
||||||
- dtmt: implement command to create mod project from template
|
|
||||||
- sdk: statically link Oodle
|
|
||||||
- sdk: implement bundle database handling
|
|
||||||
- sdk: statically link LuaJIT
|
|
||||||
|
|
||||||
== [v0.2.0] - 2022-12-28
|
== [v0.2.0] - 2022-12-28
|
||||||
|
|
||||||
=== Added
|
=== Added
|
||||||
|
|
4101
Cargo.lock
generated
4101
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
74
Cargo.toml
74
Cargo.toml
|
@ -1,79 +1,7 @@
|
||||||
[workspace]
|
[workspace]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
members = [
|
members = ["crates/*", "lib/*"]
|
||||||
"crates/dtmt",
|
|
||||||
"crates/dtmm",
|
|
||||||
"lib/dtmt-shared",
|
|
||||||
"lib/oodle",
|
|
||||||
"lib/sdk",
|
|
||||||
"lib/serde_sjson",
|
|
||||||
"lib/luajit2-sys",
|
|
||||||
"lib/color-eyre",
|
|
||||||
]
|
|
||||||
exclude = ["lib/color-eyre"]
|
|
||||||
|
|
||||||
[workspace.dependencies]
|
|
||||||
ansi-parser = "0.9.1"
|
|
||||||
ansi_term = "0.12.1"
|
|
||||||
async-recursion = "1.0.5"
|
|
||||||
bincode = "1.3.3"
|
|
||||||
bitflags = "2.5.0"
|
|
||||||
byteorder = "1.4.3"
|
|
||||||
clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "string", "unicode"] }
|
|
||||||
cli-table = { version = "0.4.7", default-features = false, features = ["derive"] }
|
|
||||||
color-eyre = { path = "lib/color-eyre" }
|
|
||||||
colors-transform = "0.2.11"
|
|
||||||
confy = "0.6.1"
|
|
||||||
csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
|
|
||||||
druid = { version = "0.8", features = ["im", "serde", "image", "png", "jpeg", "bmp", "webp", "svg"] }
|
|
||||||
druid-widget-nursery = "0.1"
|
|
||||||
dtmt-shared = { path = "lib/dtmt-shared" }
|
|
||||||
fastrand = "2.1.0"
|
|
||||||
futures = "0.3.25"
|
|
||||||
futures-util = "0.3.24"
|
|
||||||
glob = "0.3.0"
|
|
||||||
interprocess = "2.1.0"
|
|
||||||
lazy_static = "1.4.0"
|
|
||||||
luajit2-sys = { path = "lib/luajit2-sys" }
|
|
||||||
minijinja = { version = "2.0.1", default-features = false }
|
|
||||||
nanorand = "0.7.0"
|
|
||||||
nexusmods = { path = "lib/nexusmods" }
|
|
||||||
notify = "8.0.0"
|
|
||||||
oodle = { path = "lib/oodle" }
|
|
||||||
open = "5.0.1"
|
|
||||||
path-clean = "1.0.1"
|
|
||||||
path-slash = "0.2.1"
|
|
||||||
pin-project-lite = "0.2.9"
|
|
||||||
promptly = "0.3.1"
|
|
||||||
sdk = { path = "lib/sdk" }
|
|
||||||
serde = { version = "1.0.152", features = ["derive", "rc"] }
|
|
||||||
serde_sjson = { path = "lib/serde_sjson" }
|
|
||||||
steamlocate = "2.0.0-beta.2"
|
|
||||||
strip-ansi-escapes = "0.2.0"
|
|
||||||
time = { version = "0.3.20", features = ["serde", "serde-well-known", "local-offset", "formatting", "macros"] }
|
|
||||||
tokio = { version = "1.23.0", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
|
|
||||||
tokio-stream = { version = "0.1.12", features = ["fs", "io-util"] }
|
|
||||||
tracing = { version = "0.1.37", features = ["async-await"] }
|
|
||||||
tracing-error = "0.2.0"
|
|
||||||
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
|
|
||||||
usvg = "0.25.0"
|
|
||||||
zip = { version = "2.1.3", default-features = false, features = ["deflate", "bzip2", "zstd", "time"] }
|
|
||||||
|
|
||||||
[profile.dev.package.backtrace]
|
|
||||||
opt-level = 3
|
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
strip = "debuginfo"
|
strip = "debuginfo"
|
||||||
|
|
||||||
# The MSVC toolchain cannot handle LTO properly. Some symbol related to
|
|
||||||
# panic unwind would always be missing.
|
|
||||||
# So we use a separate profile for when we can compile with LTO.
|
|
||||||
[profile.release-lto]
|
|
||||||
inherits = "release"
|
|
||||||
lto = true
|
lto = true
|
||||||
|
|
||||||
[profile.perf]
|
|
||||||
inherits = "release"
|
|
||||||
strip = false
|
|
||||||
lto = true
|
|
||||||
debug = "line-tables-only"
|
|
||||||
|
|
59
Justfile
59
Justfile
|
@ -1,59 +0,0 @@
|
||||||
set positional-arguments
|
|
||||||
|
|
||||||
fly_target := "main"
|
|
||||||
|
|
||||||
build-perf-dtmt:
|
|
||||||
cargo build --profile perf --bin dtmt
|
|
||||||
|
|
||||||
perf-dtmt *args='': build-perf-dtmt
|
|
||||||
perf record --call-graph dwarf ./target/perf/dtmt "$@"
|
|
||||||
|
|
||||||
ci-build: ci-build-msvc ci-build-linux
|
|
||||||
|
|
||||||
ci-build-msvc:
|
|
||||||
docker run --rm -ti --user $(id -u) -v ./:/src/dtmt dtmt-ci-base-msvc cargo --color always build --release --target x86_64-pc-windows-msvc --locked -Zbuild-std
|
|
||||||
|
|
||||||
ci-build-linux:
|
|
||||||
docker run --rm -ti --user $(id -u) -v ./:/src/dtmt dtmt-ci-base-linux cargo --color always build --profile release-lto --locked
|
|
||||||
|
|
||||||
build-image: build-image-msvc build-image-linux
|
|
||||||
|
|
||||||
build-image-msvc:
|
|
||||||
docker build -f .ci/Dockerfile.msvc .
|
|
||||||
|
|
||||||
build-image-linux:
|
|
||||||
docker build -f .ci/Dockerfile.linux .
|
|
||||||
|
|
||||||
ci-image:
|
|
||||||
# The MSVC image depends on the Linux image. So by building that first,
|
|
||||||
# we actually build both, and cache them, so that "building" the
|
|
||||||
# Linux image afterwards merely needs to pull the cache.
|
|
||||||
docker build --target msvc -t dtmt-ci-base-msvc -f .ci/image/Dockerfile .
|
|
||||||
docker build --target linux -t dtmt-ci-base-linux -f .ci/image/Dockerfile .
|
|
||||||
docker tag dtmt-ci-base-msvc registry.sclu1034.dev/dtmt-ci-base-msvc
|
|
||||||
docker tag dtmt-ci-base-linux registry.sclu1034.dev/dtmt-ci-base-linux
|
|
||||||
docker push registry.sclu1034.dev/dtmt-ci-base-msvc
|
|
||||||
docker push registry.sclu1034.dev/dtmt-ci-base-linux
|
|
||||||
|
|
||||||
set-base-pipeline:
|
|
||||||
fly -t {{fly_target}} set-pipeline \
|
|
||||||
--pipeline dtmt \
|
|
||||||
--config .ci/pipelines/base.yml \
|
|
||||||
-v gitea_api_key=${GITEA_API_KEY} \
|
|
||||||
-v owner=bitsquid_dt \
|
|
||||||
-v repo=dtmt
|
|
||||||
|
|
||||||
set-pr-pipeline pr:
|
|
||||||
curl \
|
|
||||||
-H "Authorization: ${GITEA_API_KEY}" \
|
|
||||||
-H 'Accept: application/json' \
|
|
||||||
'https://git.sclu1034.dev/api/v1/repos/bitsquid_dt/dtmt/pulls/{{pr}}' \
|
|
||||||
| yq -y '.' - > 'pr-{{pr}}.yaml'
|
|
||||||
fly -t main set-pipeline \
|
|
||||||
--pipeline dtmt-pr \
|
|
||||||
--config .ci/pipelines/pr.yml \
|
|
||||||
-v gitea_api_key=${GITEA_API_KEY} \
|
|
||||||
-i number={{pr}} \
|
|
||||||
-y branch="$(yq -y '.head.ref' 'pr-{{pr}}.yaml')" \
|
|
||||||
-y pr="$(cat 'pr-{{pr}}.yaml')"
|
|
||||||
|
|
16
README.adoc
16
README.adoc
|
@ -10,18 +10,4 @@
|
||||||
:tip-caption: :bulb:
|
:tip-caption: :bulb:
|
||||||
:warning-caption: :warning:
|
:warning-caption: :warning:
|
||||||
|
|
||||||
A set of tools to use and develop mods for the newest generation of the Bitsquid game engine that powers the game _Warhammer 40.000: Darktide_.
|
A set of tools to develop mods for the newest generation of the Bitsquid game engine that powers the game _Warhammer 40.000: Darktide_.
|
||||||
|
|
||||||
== Darktide Mod Manager (DTMM)
|
|
||||||
|
|
||||||
DTMM is a GUI application to install and manage mods for the game.
|
|
||||||
|
|
||||||
image::docs/screenshots/dtmm.png[dtmm main view]
|
|
||||||
|
|
||||||
Head to https://git.sclu1034.dev/bitsquid_dt/dtmt/src/branch/master/crates/dtmm[crates/dtmm] for more information or check the https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki[Wiki].
|
|
||||||
|
|
||||||
== Darktide Mod Tools (DTMT)
|
|
||||||
|
|
||||||
DTMT is a CLI application providing various commands that aid in developing mods for the game.
|
|
||||||
|
|
||||||
Head to https://git.sclu1034.dev/bitsquid_dt/dtmt/src/branch/master/crates/dtmt[crates/dtmt] for more information or check the https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki[Wiki].
|
|
||||||
|
|
|
@ -1,49 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "dtmm"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
authors = ["Lucas Schwiderski <lucas@lschwiderski.de>"]
|
|
||||||
description = "DTMM is a GUI application to install and manage mods for the game."
|
|
||||||
documentation = "https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki"
|
|
||||||
repository = "https://git.sclu1034.dev/bitsquid_dt/dtmt"
|
|
||||||
homepage = "https://git.sclu1034.dev/bitsquid_dt/dtmt"
|
|
||||||
license-file = "LICENSE"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
ansi-parser = { workspace = true }
|
|
||||||
async-recursion = { workspace = true }
|
|
||||||
bincode = { workspace = true }
|
|
||||||
bitflags = { workspace = true }
|
|
||||||
clap = { workspace = true }
|
|
||||||
color-eyre = { workspace = true }
|
|
||||||
colors-transform = { workspace = true }
|
|
||||||
confy = { workspace = true }
|
|
||||||
druid = { workspace = true }
|
|
||||||
druid-widget-nursery = { workspace = true }
|
|
||||||
dtmt-shared = { workspace = true }
|
|
||||||
futures = { workspace = true }
|
|
||||||
interprocess = { workspace = true }
|
|
||||||
lazy_static = { workspace = true }
|
|
||||||
luajit2-sys = { workspace = true }
|
|
||||||
minijinja = { workspace = true }
|
|
||||||
nexusmods = { workspace = true }
|
|
||||||
oodle = { workspace = true }
|
|
||||||
open = { workspace = true }
|
|
||||||
path-slash = { workspace = true }
|
|
||||||
sdk = { workspace = true }
|
|
||||||
serde = { workspace = true }
|
|
||||||
serde_sjson = { workspace = true }
|
|
||||||
strip-ansi-escapes = { workspace = true }
|
|
||||||
time = { workspace = true }
|
|
||||||
tokio = { workspace = true }
|
|
||||||
tokio-stream = { workspace = true }
|
|
||||||
tracing = { workspace = true }
|
|
||||||
tracing-error = { workspace = true }
|
|
||||||
tracing-subscriber = { workspace = true }
|
|
||||||
usvg = { workspace = true }
|
|
||||||
zip = { workspace = true }
|
|
||||||
|
|
||||||
[build-dependencies]
|
|
||||||
winres = "0.1.12"
|
|
|
@ -1,16 +0,0 @@
|
||||||
= Darktide Mod Manager (DTMM)
|
|
||||||
:idprefix:
|
|
||||||
:idseparator:
|
|
||||||
:toc: macro
|
|
||||||
:toclevels: 1
|
|
||||||
:!toc-title:
|
|
||||||
:caution-caption: :fire:
|
|
||||||
:important-caption: :exclamtion:
|
|
||||||
:note-caption: :paperclip:
|
|
||||||
:tip-caption: :bulb:
|
|
||||||
:warning-caption: :warning:
|
|
||||||
|
|
||||||
DTMM is a GUI application to install and manage mods for the game.
|
|
||||||
|
|
||||||

|
|
||||||
|
|
BIN
crates/dtmm/assets/DTMM_logo.xcf
(Stored with Git LFS)
BIN
crates/dtmm/assets/DTMM_logo.xcf
(Stored with Git LFS)
Binary file not shown.
BIN
crates/dtmm/assets/DTMM_logo_256.png
(Stored with Git LFS)
BIN
crates/dtmm/assets/DTMM_logo_256.png
(Stored with Git LFS)
Binary file not shown.
BIN
crates/dtmm/assets/DTMM_logo_48.png
(Stored with Git LFS)
BIN
crates/dtmm/assets/DTMM_logo_48.png
(Stored with Git LFS)
Binary file not shown.
BIN
crates/dtmm/assets/DTMM_logo_64.png
(Stored with Git LFS)
BIN
crates/dtmm/assets/DTMM_logo_64.png
(Stored with Git LFS)
Binary file not shown.
BIN
crates/dtmm/assets/DTMM_logo_border.png
(Stored with Git LFS)
BIN
crates/dtmm/assets/DTMM_logo_border.png
(Stored with Git LFS)
Binary file not shown.
BIN
crates/dtmm/assets/DTMM_logo_faint_glow.png
(Stored with Git LFS)
BIN
crates/dtmm/assets/DTMM_logo_faint_glow.png
(Stored with Git LFS)
Binary file not shown.
BIN
crates/dtmm/assets/DTMM_logo_small.png
(Stored with Git LFS)
BIN
crates/dtmm/assets/DTMM_logo_small.png
(Stored with Git LFS)
Binary file not shown.
|
@ -1,11 +0,0 @@
|
||||||
[Desktop Entry]
|
|
||||||
Name=DTMM
|
|
||||||
GenericName=Mod Manager
|
|
||||||
Comment=A graphical mod manager for Warhammer 40,000: Darktide
|
|
||||||
Exec=dtmm %u
|
|
||||||
Type=Application
|
|
||||||
Keywords=Mod;
|
|
||||||
StartupNotify=true
|
|
||||||
Categories=Utility;
|
|
||||||
MimeType=x-scheme-handler/nxm;
|
|
||||||
Icon=dtmm
|
|
BIN
crates/dtmm/assets/dtmm.ico
(Stored with Git LFS)
BIN
crates/dtmm/assets/dtmm.ico
(Stored with Git LFS)
Binary file not shown.
|
@ -1,70 +0,0 @@
|
||||||
local StateGame = require("scripts/game_states/state_game")
|
|
||||||
local StateSplash = require("scripts/game_states/game/state_splash")
|
|
||||||
local GameStateMachine = require("scripts/foundation/utilities/game_state_machine")
|
|
||||||
|
|
||||||
local function hook(obj, fn_name, cb)
|
|
||||||
local orig = obj[fn_name]
|
|
||||||
|
|
||||||
obj[fn_name] = function(...)
|
|
||||||
return cb(orig, ...)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
function init(mod_data, boot_gui)
|
|
||||||
local ModLoader = require("scripts/mods/mod_loader")
|
|
||||||
local mod_loader = ModLoader:new(mod_data, boot_gui)
|
|
||||||
|
|
||||||
-- The mod loader needs to remain active during game play, to
|
|
||||||
-- enable reloads
|
|
||||||
hook(StateGame, "update", function(func, dt, ...)
|
|
||||||
mod_loader:update(dt)
|
|
||||||
return func(dt, ...)
|
|
||||||
end)
|
|
||||||
|
|
||||||
-- Skip splash view
|
|
||||||
hook(StateSplash, "on_enter", function(func, self, ...)
|
|
||||||
local result = func(self, ...)
|
|
||||||
|
|
||||||
self._should_skip = true
|
|
||||||
self._continue = true
|
|
||||||
|
|
||||||
return result
|
|
||||||
end)
|
|
||||||
|
|
||||||
-- Trigger state change events
|
|
||||||
hook(GameStateMachine, "_change_state", function(func, self, ...)
|
|
||||||
local old_state = self._state
|
|
||||||
local old_state_name = old_state and self:current_state_name()
|
|
||||||
|
|
||||||
if old_state_name then
|
|
||||||
mod_loader:on_game_state_changed("exit", old_state_name, old_state)
|
|
||||||
end
|
|
||||||
|
|
||||||
local result = func(self, ...)
|
|
||||||
|
|
||||||
local new_state = self._state
|
|
||||||
local new_state_name = new_state and self:current_state_name()
|
|
||||||
|
|
||||||
if new_state_name then
|
|
||||||
mod_loader:on_game_state_changed("enter", new_state_name, new_state)
|
|
||||||
end
|
|
||||||
|
|
||||||
return result
|
|
||||||
end)
|
|
||||||
|
|
||||||
-- Trigger ending state change event
|
|
||||||
hook(GameStateMachine, "destroy", function(func, self, ...)
|
|
||||||
local old_state = self._state
|
|
||||||
local old_state_name = old_state and self:current_state_name()
|
|
||||||
|
|
||||||
if old_state_name then
|
|
||||||
mod_loader:on_game_state_changed("exit", old_state_name)
|
|
||||||
end
|
|
||||||
|
|
||||||
return func(self, ...)
|
|
||||||
end)
|
|
||||||
|
|
||||||
return mod_loader
|
|
||||||
end
|
|
||||||
|
|
||||||
return init
|
|
|
@ -1,28 +0,0 @@
|
||||||
return {
|
|
||||||
{% for mod in mods %}
|
|
||||||
{
|
|
||||||
id = "{{ mod.id }}",
|
|
||||||
name = "{{ mod.name }}",
|
|
||||||
bundled = {{ mod.bundled }},
|
|
||||||
version = {{ mod.version }},
|
|
||||||
packages = {
|
|
||||||
{% for pkg in mod.packages %}
|
|
||||||
"{{ pkg }}",
|
|
||||||
{% endfor %}
|
|
||||||
},
|
|
||||||
run = function()
|
|
||||||
{% if mod.data is none %}
|
|
||||||
return dofile("{{ mod.init }}")
|
|
||||||
{% else %}
|
|
||||||
new_mod("{{ mod.id }}", {
|
|
||||||
mod_script = "{{ mod.init }}",
|
|
||||||
mod_data = "{{ mod.data }}",
|
|
||||||
{% if not mod.localization is none %}
|
|
||||||
mod_localization = "{{ mod.localization }}",
|
|
||||||
{% endif %}
|
|
||||||
})
|
|
||||||
{% endif %}
|
|
||||||
end,
|
|
||||||
},
|
|
||||||
{% endfor %}
|
|
||||||
}
|
|
|
@ -1,412 +0,0 @@
|
||||||
-- Copyright on this file is owned by Fatshark.
|
|
||||||
-- It is extracted, used and modified with permission only for
|
|
||||||
-- the purpose of loading mods within Warhammer 40,000: Darktide.
|
|
||||||
local ModLoader = class("ModLoader")
|
|
||||||
|
|
||||||
local table_unpack = table.unpack or unpack
|
|
||||||
local table_pack = table.pack or pack
|
|
||||||
|
|
||||||
local ScriptGui = require("scripts/foundation/utilities/script_gui")
|
|
||||||
|
|
||||||
local FONT_MATERIAL = "content/ui/fonts/arial"
|
|
||||||
|
|
||||||
local LOG_LEVELS = {
|
|
||||||
spew = 4,
|
|
||||||
info = 3,
|
|
||||||
warning = 2,
|
|
||||||
error = 1
|
|
||||||
}
|
|
||||||
local DEFAULT_SETTINGS = {
|
|
||||||
log_level = LOG_LEVELS.error,
|
|
||||||
developer_mode = false
|
|
||||||
}
|
|
||||||
|
|
||||||
local Keyboard = Keyboard
|
|
||||||
local BUTTON_INDEX_R = Keyboard.button_index("r")
|
|
||||||
local BUTTON_INDEX_LEFT_SHIFT = Keyboard.button_index("left shift")
|
|
||||||
local BUTTON_INDEX_LEFT_CTRL = Keyboard.button_index("left ctrl")
|
|
||||||
|
|
||||||
ModLoader.init = function(self, mod_data, boot_gui)
|
|
||||||
table.dump(mod_data, nil, 5, function(...) Log.info("ModLoader", ...) end)
|
|
||||||
|
|
||||||
self._mod_data = mod_data
|
|
||||||
self._gui = boot_gui
|
|
||||||
|
|
||||||
self._settings = Application.user_setting("mod_settings") or DEFAULT_SETTINGS
|
|
||||||
|
|
||||||
self._mods = {}
|
|
||||||
self._num_mods = nil
|
|
||||||
self._chat_print_buffer = {}
|
|
||||||
self._reload_data = {}
|
|
||||||
self._ui_time = 0
|
|
||||||
|
|
||||||
self._state = "scanning"
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader.developer_mode_enabled = function(self)
|
|
||||||
return self._settings.developer_mode
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader.set_developer_mode = function(self, enabled)
|
|
||||||
self._settings.developer_mode = enabled
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader._draw_state_to_gui = function(self, gui, dt)
|
|
||||||
local state = self._state
|
|
||||||
local t = self._ui_time + dt
|
|
||||||
self._ui_time = t
|
|
||||||
local status_str = "Loading mods"
|
|
||||||
|
|
||||||
if state == "scanning" then
|
|
||||||
status_str = "Scanning for mods"
|
|
||||||
elseif state == "loading" or state == "initializing" then
|
|
||||||
local mod = self._mods[self._mod_load_index]
|
|
||||||
status_str = string.format("Loading mod %q", mod.name)
|
|
||||||
end
|
|
||||||
|
|
||||||
local msg = status_str .. string.rep(".", (2 * t) % 4)
|
|
||||||
ScriptGui.text(gui, msg, FONT_MATERIAL, 25, Vector3(20, 30, 1), Color.white())
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader.remove_gui = function(self)
|
|
||||||
self._gui = nil
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader.mod_data = function(self, id)
|
|
||||||
-- Since this primarily exists for DMF,
|
|
||||||
-- we can optimize the search for its use case of looking for the
|
|
||||||
-- mod currently being loaded
|
|
||||||
local mod_data = self._mods[self._mod_load_index]
|
|
||||||
|
|
||||||
if mod_data.id ~= id then
|
|
||||||
mod_data = nil
|
|
||||||
|
|
||||||
for _, v in ipairs(self._mods) do
|
|
||||||
if v.id == id then
|
|
||||||
mod_data = v
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
return mod_data
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader._check_reload = function()
|
|
||||||
return Keyboard.pressed(BUTTON_INDEX_R) and
|
|
||||||
Keyboard.button(BUTTON_INDEX_LEFT_SHIFT) +
|
|
||||||
Keyboard.button(BUTTON_INDEX_LEFT_CTRL) == 2
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader.update = function(self, dt)
|
|
||||||
local chat_print_buffer = self._chat_print_buffer
|
|
||||||
local num_delayed_prints = #chat_print_buffer
|
|
||||||
|
|
||||||
if num_delayed_prints > 0 and Managers.chat then
|
|
||||||
for i = 1, num_delayed_prints, 1 do
|
|
||||||
-- TODO: Use new chat system
|
|
||||||
-- Managers.chat:add_local_system_message(1, chat_print_buffer[i], true)
|
|
||||||
|
|
||||||
chat_print_buffer[i] = nil
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
local old_state = self._state
|
|
||||||
|
|
||||||
if self._settings.developer_mode and self:_check_reload() then
|
|
||||||
self._reload_requested = true
|
|
||||||
end
|
|
||||||
|
|
||||||
if self._reload_requested and old_state == "done" then
|
|
||||||
self:_reload_mods()
|
|
||||||
end
|
|
||||||
|
|
||||||
if old_state == "done" then
|
|
||||||
self:_run_callbacks("update", dt)
|
|
||||||
elseif old_state == "scanning" then
|
|
||||||
Log.info("ModLoader", "Scanning for mods")
|
|
||||||
self:_build_mod_table()
|
|
||||||
|
|
||||||
self._state = self:_load_mod(1)
|
|
||||||
self._ui_time = 0
|
|
||||||
elseif old_state == "loading" then
|
|
||||||
local handle = self._loading_resource_handle
|
|
||||||
|
|
||||||
if ResourcePackage.has_loaded(handle) then
|
|
||||||
ResourcePackage.flush(handle)
|
|
||||||
|
|
||||||
local mod = self._mods[self._mod_load_index]
|
|
||||||
local next_index = mod.package_index + 1
|
|
||||||
local mod_data = mod.data
|
|
||||||
|
|
||||||
if next_index <= #mod_data.packages then
|
|
||||||
self:_load_package(mod, next_index)
|
|
||||||
else
|
|
||||||
self._state = "initializing"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
elseif old_state == "initializing" then
|
|
||||||
local mod = self._mods[self._mod_load_index]
|
|
||||||
local mod_data = mod.data
|
|
||||||
|
|
||||||
Log.info("ModLoader", "Initializing mod %q", mod.name)
|
|
||||||
|
|
||||||
mod.state = "running"
|
|
||||||
local ok, object = xpcall(mod_data.run, function(err)
|
|
||||||
if type(err) == "string" then
|
|
||||||
return err .. "\n" .. Script.callstack()
|
|
||||||
else
|
|
||||||
return err
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
|
|
||||||
if not ok then
|
|
||||||
if object.error then
|
|
||||||
object = string.format(
|
|
||||||
"%s\n<<Lua Stack>>\n%s\n<</Lua Stack>>\n<<Lua Locals>>\n%s\n<</Lua Locals>>\n<<Lua Self>>\n%s\n<</Lua Self>>",
|
|
||||||
object.error, object.traceback, object.locals, object.self)
|
|
||||||
end
|
|
||||||
|
|
||||||
Log.error("ModLoader", "Failed 'run' for %q: %s", mod.name, object)
|
|
||||||
end
|
|
||||||
|
|
||||||
mod.object = object or {}
|
|
||||||
|
|
||||||
self:_run_callback(mod, "init", self._reload_data[mod.id])
|
|
||||||
|
|
||||||
Log.info("ModLoader", "Finished loading %q", mod.name)
|
|
||||||
|
|
||||||
self._state = self:_load_mod(self._mod_load_index + 1)
|
|
||||||
end
|
|
||||||
|
|
||||||
local gui = self._gui
|
|
||||||
if gui then
|
|
||||||
self:_draw_state_to_gui(gui, dt)
|
|
||||||
end
|
|
||||||
|
|
||||||
if old_state ~= self._state then
|
|
||||||
Log.info("ModLoader", "%s -> %s", old_state, self._state)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader.all_mods_loaded = function(self)
|
|
||||||
return self._state == "done"
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader.destroy = function(self)
|
|
||||||
self:_run_callbacks("on_destroy")
|
|
||||||
self:unload_all_mods()
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader._run_callbacks = function(self, callback_name, ...)
|
|
||||||
for i = 1, self._num_mods, 1 do
|
|
||||||
local mod = self._mods[i]
|
|
||||||
|
|
||||||
if mod and not mod.callbacks_disabled then
|
|
||||||
self:_run_callback(mod, callback_name, ...)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader._run_callback = function(self, mod, callback_name, ...)
|
|
||||||
local object = mod.object
|
|
||||||
local cb = object[callback_name]
|
|
||||||
|
|
||||||
if not cb then
|
|
||||||
return
|
|
||||||
end
|
|
||||||
|
|
||||||
local args = table_pack(...)
|
|
||||||
|
|
||||||
local success, val = xpcall(
|
|
||||||
function() return cb(object, table_unpack(args)) end,
|
|
||||||
function(err)
|
|
||||||
if type(err) == "string" then
|
|
||||||
return err .. "\n" .. Script.callstack()
|
|
||||||
else
|
|
||||||
return err
|
|
||||||
end
|
|
||||||
end
|
|
||||||
)
|
|
||||||
|
|
||||||
if success then
|
|
||||||
return val
|
|
||||||
else
|
|
||||||
Log.error("ModLoader", "Failed to run callback %q for mod %q with id %q. Disabling callbacks until reload.",
|
|
||||||
callback_name, mod.name, mod.id)
|
|
||||||
if val.error then
|
|
||||||
Log.error("ModLoader",
|
|
||||||
"Error: %s\n<<Lua Stack>>\n%s<</Lua Stack>>\n<<Lua Locals>>\n%s<</Lua Locals>>\n<<Lua Self>>\n%s<</Lua Self>>",
|
|
||||||
val.error, val.traceback, val.locals, val.self)
|
|
||||||
else
|
|
||||||
Log.error("ModLoader", "Error: %s", val or "[unknown error]")
|
|
||||||
end
|
|
||||||
|
|
||||||
mod.callbacks_disabled = true
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader._start_scan = function(self)
|
|
||||||
Log.info("ModLoader", "Starting mod scan")
|
|
||||||
self._state = "scanning"
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader._build_mod_table = function(self)
|
|
||||||
fassert(table.is_empty(self._mods), "Trying to add mods to non-empty mod table")
|
|
||||||
|
|
||||||
for i, mod_data in ipairs(self._mod_data) do
|
|
||||||
Log.info(
|
|
||||||
"ModLoader",
|
|
||||||
"mods[%d] = id=%q | name=%q | version=%q | bundled=%s",
|
|
||||||
i,
|
|
||||||
mod_data.id,
|
|
||||||
mod_data.name,
|
|
||||||
mod_data.version,
|
|
||||||
tostring(mod_data.bundled)
|
|
||||||
)
|
|
||||||
|
|
||||||
self._mods[i] = {
|
|
||||||
id = mod_data.id,
|
|
||||||
state = "not_loaded",
|
|
||||||
callbacks_disabled = false,
|
|
||||||
name = mod_data.name,
|
|
||||||
loaded_packages = {},
|
|
||||||
packages = mod_data.packages,
|
|
||||||
data = mod_data,
|
|
||||||
bundled = mod_data.bundled or false,
|
|
||||||
}
|
|
||||||
end
|
|
||||||
|
|
||||||
self._num_mods = #self._mods
|
|
||||||
|
|
||||||
Log.info("ModLoader", "Found %i mods", self._num_mods)
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader._load_mod = function(self, index)
|
|
||||||
self._ui_time = 0
|
|
||||||
local mods = self._mods
|
|
||||||
local mod = mods[index]
|
|
||||||
|
|
||||||
if not mod then
|
|
||||||
table.clear(self._reload_data)
|
|
||||||
|
|
||||||
return "done"
|
|
||||||
end
|
|
||||||
|
|
||||||
Log.info("ModLoader", "Loading mod %q", mod.id)
|
|
||||||
|
|
||||||
mod.state = "loading"
|
|
||||||
|
|
||||||
Crashify.print_property(string.format("Mod:%s", mod.name), true)
|
|
||||||
|
|
||||||
self._mod_load_index = index
|
|
||||||
|
|
||||||
if mod.bundled and mod.packages[1] then
|
|
||||||
self:_load_package(mod, 1)
|
|
||||||
return "loading"
|
|
||||||
else
|
|
||||||
return "initializing"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader._load_package = function(self, mod, index)
|
|
||||||
mod.package_index = index
|
|
||||||
local package_name = mod.packages[index]
|
|
||||||
|
|
||||||
if not package_name then
|
|
||||||
return
|
|
||||||
end
|
|
||||||
|
|
||||||
Log.info("ModLoader", "Loading package %q", package_name)
|
|
||||||
|
|
||||||
local resource_handle = Application.resource_package(package_name)
|
|
||||||
self._loading_resource_handle = resource_handle
|
|
||||||
|
|
||||||
ResourcePackage.load(resource_handle)
|
|
||||||
|
|
||||||
table.insert(mod.loaded_packages, resource_handle)
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader.unload_all_mods = function(self)
|
|
||||||
if self._state ~= "done" then
|
|
||||||
Log.error("ModLoader", "Mods can't be unloaded, mod state is not \"done\". current: %q", self._state)
|
|
||||||
|
|
||||||
return
|
|
||||||
end
|
|
||||||
|
|
||||||
Log.info("ModLoader", "Unload all mod packages")
|
|
||||||
|
|
||||||
for i = self._num_mods, 1, -1 do
|
|
||||||
local mod = self._mods[i]
|
|
||||||
|
|
||||||
if mod then
|
|
||||||
self:unload_mod(i)
|
|
||||||
end
|
|
||||||
|
|
||||||
self._mods[i] = nil
|
|
||||||
end
|
|
||||||
|
|
||||||
self._num_mods = nil
|
|
||||||
self._state = "unloaded"
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader.unload_mod = function(self, index)
|
|
||||||
local mod = self._mods[index]
|
|
||||||
|
|
||||||
if mod then
|
|
||||||
Log.info("ModLoader", "Unloading %q.", mod.name)
|
|
||||||
|
|
||||||
for _, handle in ipairs(mod.loaded_packages) do
|
|
||||||
ResourcePackage.unload(handle)
|
|
||||||
Application.release_resource_package(handle)
|
|
||||||
end
|
|
||||||
|
|
||||||
mod.state = "not_loaded"
|
|
||||||
else
|
|
||||||
Log.error("ModLoader", "Mod index %i can't be unloaded, has not been loaded", index)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader._reload_mods = function(self)
|
|
||||||
Log.info("ModLoader", "reloading mods")
|
|
||||||
|
|
||||||
for i = 1, self._num_mods, 1 do
|
|
||||||
local mod = self._mods[i]
|
|
||||||
|
|
||||||
if mod and mod.state == "running" then
|
|
||||||
Log.info("ModLoader", "reloading %s", mod.name)
|
|
||||||
|
|
||||||
self._reload_data[mod.id] = self:_run_callback(mod, "on_reload")
|
|
||||||
else
|
|
||||||
Log.info("ModLoader", "not reloading mod, state: %s", mod.state)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
self:unload_all_mods()
|
|
||||||
self:_start_scan()
|
|
||||||
|
|
||||||
self._reload_requested = false
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader.on_game_state_changed = function(self, status, state_name, state_object)
|
|
||||||
if self._state == "done" then
|
|
||||||
self:_run_callbacks("on_game_state_changed", status, state_name, state_object)
|
|
||||||
else
|
|
||||||
Log.warning("ModLoader", "Ignored on_game_state_changed call due to being in state %q", self._state)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
ModLoader.print = function(self, level, str, ...)
|
|
||||||
local f = Log[level]
|
|
||||||
if f then
|
|
||||||
f("ModLoader", str, ...)
|
|
||||||
else
|
|
||||||
local message = string.format("[ModLoader][" .. level .. "] " .. str, ...)
|
|
||||||
local log_level = LOG_LEVELS[level] or 99
|
|
||||||
|
|
||||||
if log_level <= 2 then
|
|
||||||
print(message)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
return ModLoader
|
|
|
@ -1,216 +0,0 @@
|
||||||
local _G = _G
|
|
||||||
local rawget = rawget
|
|
||||||
local rawset = rawset
|
|
||||||
|
|
||||||
local log = function(category, format, ...)
|
|
||||||
local Log = rawget(_G, "Log")
|
|
||||||
if Log then
|
|
||||||
Log.info(category, format, ...)
|
|
||||||
else
|
|
||||||
print(string.format("[%s] %s", category or "", string.format(format or "", ...)))
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
log("mod_main", "Initializing mods...")
|
|
||||||
log("mod_main", "[DTMM] Deployment data:\n{{ deployment_info }}")
|
|
||||||
|
|
||||||
local require_store = {}
|
|
||||||
|
|
||||||
-- This token is treated as a string template and filled by DTMM during deployment.
|
|
||||||
-- This allows hiding unsafe I/O functions behind a setting.
|
|
||||||
-- When not replaced, it's also a valid table definition, thereby degrading gracefully.
|
|
||||||
local is_io_enabled = {{ is_io_enabled }} -- luacheck: ignore 113
|
|
||||||
local lua_libs = {
|
|
||||||
debug = debug,
|
|
||||||
os = {
|
|
||||||
date = os.date,
|
|
||||||
time = os.time,
|
|
||||||
clock = os.clock,
|
|
||||||
getenv = os.getenv,
|
|
||||||
difftime = os.difftime,
|
|
||||||
},
|
|
||||||
load = load,
|
|
||||||
loadfile = loadfile,
|
|
||||||
loadstring = loadstring,
|
|
||||||
}
|
|
||||||
|
|
||||||
if is_io_enabled then
|
|
||||||
lua_libs.io = io
|
|
||||||
lua_libs.os = os
|
|
||||||
lua_libs.ffi = require("ffi")
|
|
||||||
end
|
|
||||||
|
|
||||||
Mods = {
|
|
||||||
-- Keep a backup of certain system libraries before
|
|
||||||
-- Fatshark's code scrubs them.
|
|
||||||
-- The loader can then decide to pass them on to mods, or ignore them
|
|
||||||
lua = setmetatable({}, { __index = lua_libs }),
|
|
||||||
require_store = require_store,
|
|
||||||
original_require = require,
|
|
||||||
}
|
|
||||||
|
|
||||||
local can_insert = function(filepath, new_result)
|
|
||||||
local store = require_store[filepath]
|
|
||||||
if not store or #store then
|
|
||||||
return true
|
|
||||||
end
|
|
||||||
|
|
||||||
if store[#store] ~= new_result then
|
|
||||||
return true
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
local original_require = require
|
|
||||||
require = function(filepath, ...)
|
|
||||||
local result = original_require(filepath, ...)
|
|
||||||
if result and type(result) == "table" then
|
|
||||||
if can_insert(filepath, result) then
|
|
||||||
require_store[filepath] = require_store[filepath] or {}
|
|
||||||
local store = require_store[filepath]
|
|
||||||
|
|
||||||
table.insert(store, result)
|
|
||||||
|
|
||||||
if Mods.hook then
|
|
||||||
Mods.hook.enable_by_file(filepath, #store)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
return result
|
|
||||||
end
|
|
||||||
|
|
||||||
require("scripts/boot_init")
|
|
||||||
require("scripts/foundation/utilities/class")
|
|
||||||
|
|
||||||
-- The `__index` metamethod maps a proper identifier `CLASS.MyClassName` to the
|
|
||||||
-- stringified version of the key: `"MyClassName"`.
|
|
||||||
-- This allows using LuaCheck for the stringified class names in hook parameters.
|
|
||||||
_G.CLASS = setmetatable({}, {
|
|
||||||
__index = function(_, key)
|
|
||||||
return key
|
|
||||||
end
|
|
||||||
})
|
|
||||||
|
|
||||||
local original_class = class
|
|
||||||
class = function(class_name, super_name, ...)
|
|
||||||
local result = original_class(class_name, super_name, ...)
|
|
||||||
if not rawget(_G, class_name) then
|
|
||||||
rawset(_G, class_name, result)
|
|
||||||
end
|
|
||||||
if not rawget(_G.CLASS, class_name) then
|
|
||||||
rawset(_G.CLASS, class_name, result)
|
|
||||||
end
|
|
||||||
return result
|
|
||||||
end
|
|
||||||
|
|
||||||
require("scripts/main")
|
|
||||||
log("mod_main", "'scripts/main' loaded")
|
|
||||||
|
|
||||||
-- We need to inject two states into two different state machines:
|
|
||||||
-- First, we inject one into the `"Main"` state machine at a specific location, so that we're
|
|
||||||
-- still early in the process, but right after `StateRequireScripts` where most game files
|
|
||||||
-- are already available to `require` and hook.
|
|
||||||
-- This is where the `ModLoader` is created initially.
|
|
||||||
-- Then, we inject into the very first position of the `"Game"` state machine. This runs right
|
|
||||||
-- after `StateGame._init_managers`, at which point all the parts needed for DMF and other mods
|
|
||||||
-- have been initialized.
|
|
||||||
-- This is where `ModLoader` will finally start loading mods.
|
|
||||||
local function patch_mod_loading_state()
|
|
||||||
local StateBootLoadDML = class("StateBootLoadDML", "StateBootSubStateBase")
|
|
||||||
local StateGameLoadMods = class("StateGameLoadMods")
|
|
||||||
|
|
||||||
StateBootLoadDML.on_enter = function(self, parent, params)
|
|
||||||
log("StateBootLoadDML", "Entered")
|
|
||||||
StateBootLoadDML.super.on_enter(self, parent, params)
|
|
||||||
|
|
||||||
local state_params = self:_state_params()
|
|
||||||
local package_manager = state_params.package_manager
|
|
||||||
|
|
||||||
self._package_manager = package_manager
|
|
||||||
self._package_handles = {
|
|
||||||
["packages/mods"] = package_manager:load("packages/mods", "StateBootLoadDML", nil),
|
|
||||||
}
|
|
||||||
end
|
|
||||||
|
|
||||||
StateBootLoadDML._state_update = function(self, _)
|
|
||||||
local package_manager = self._package_manager
|
|
||||||
|
|
||||||
if package_manager:update() then
|
|
||||||
local mod_data = require("scripts/mods/mod_data")
|
|
||||||
|
|
||||||
local create_mod_loader = require("scripts/mods/init")
|
|
||||||
local mod_loader = create_mod_loader(mod_data)
|
|
||||||
|
|
||||||
Managers.mod = mod_loader
|
|
||||||
|
|
||||||
log("StateBootLoadDML", "DML loaded, exiting")
|
|
||||||
return true, false
|
|
||||||
end
|
|
||||||
|
|
||||||
return false, false
|
|
||||||
end
|
|
||||||
|
|
||||||
|
|
||||||
function StateGameLoadMods:on_enter(_, params)
|
|
||||||
log("StateGameLoadMods", "Entered")
|
|
||||||
self._next_state = require("scripts/game_states/game/state_splash")
|
|
||||||
self._next_state_params = params
|
|
||||||
end
|
|
||||||
|
|
||||||
function StateGameLoadMods:update(_)
|
|
||||||
-- We're relying on the fact that DML internally makes sure
|
|
||||||
-- that `Managers.mod:update()` is being called appropriately.
|
|
||||||
-- The implementation as of this writing is to hook `StateGame.update`.
|
|
||||||
if Managers.mod:all_mods_loaded() then
|
|
||||||
Log.info("StateGameLoadMods", "Mods loaded, exiting")
|
|
||||||
return self._next_state, self._next_state_params
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
local GameStateMachine = require("scripts/foundation/utilities/game_state_machine")
|
|
||||||
local GameStateMachine_init = GameStateMachine.init
|
|
||||||
GameStateMachine.init = function(self, parent, start_state, params, creation_context, state_change_callbacks, name)
|
|
||||||
if name == "Main" then
|
|
||||||
log("mod_main", "Injecting StateBootLoadDML")
|
|
||||||
|
|
||||||
-- Hardcoded position after `StateRequireScripts`.
|
|
||||||
-- We need to wait until then to even begin most of our stuff,
|
|
||||||
-- so that most of the game's core systems are at least loaded and can be hooked,
|
|
||||||
-- even if they aren't running, yet.
|
|
||||||
local pos = 4
|
|
||||||
table.insert(params.states, pos, {
|
|
||||||
StateBootLoadDML,
|
|
||||||
{
|
|
||||||
package_manager = params.package_manager,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
GameStateMachine_init(self, parent, start_state, params, creation_context, state_change_callbacks, name)
|
|
||||||
elseif name == "Game" then
|
|
||||||
log("mod_main", "Injection StateGameLoadMods")
|
|
||||||
-- The second time around, we want to be the first, so we pass our own
|
|
||||||
-- 'start_state'.
|
|
||||||
-- We can't just have the state machine be initialized and then change its `_next_state`, as by the end of
|
|
||||||
-- `init`, a bunch of stuff will already be initialized.
|
|
||||||
GameStateMachine_init(self, parent, StateGameLoadMods, params, creation_context, state_change_callbacks, name)
|
|
||||||
-- And since we're done now, we can revert the function to its original
|
|
||||||
GameStateMachine.init = GameStateMachine_init
|
|
||||||
else
|
|
||||||
-- In all other cases, simply call the original
|
|
||||||
GameStateMachine_init(self, parent, start_state, params, creation_context, state_change_callbacks, name)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
-- Override `init` to run our injection
|
|
||||||
function init()
|
|
||||||
patch_mod_loading_state()
|
|
||||||
|
|
||||||
-- As requested by Fatshark
|
|
||||||
local StateRequireScripts = require("scripts/game_states/boot/state_require_scripts")
|
|
||||||
StateRequireScripts._get_is_modded = function() return true end
|
|
||||||
|
|
||||||
Main:init()
|
|
||||||
end
|
|
||||||
|
|
||||||
-- vim: ft=lua
|
|
|
@ -1,21 +0,0 @@
|
||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2020-2023 Paweł Kuna
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
|
@ -1,8 +0,0 @@
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-alert-circle" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
|
|
||||||
<path stroke="none" d="M0 0h24v24H0z" fill="none"/>
|
|
||||||
<path d="M3 12a9 9 0 1 0 18 0a9 9 0 0 0 -18 0" />
|
|
||||||
<path d="M12 8v4" />
|
|
||||||
<path d="M12 16h.01" />
|
|
||||||
</svg>
|
|
||||||
|
|
||||||
|
|
Before Width: | Height: | Size: 396 B |
|
@ -1,8 +0,0 @@
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-alert-triangle" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
|
|
||||||
<path stroke="none" d="M0 0h24v24H0z" fill="none"/>
|
|
||||||
<path d="M10.24 3.957l-8.422 14.06a1.989 1.989 0 0 0 1.7 2.983h16.845a1.989 1.989 0 0 0 1.7 -2.983l-8.423 -14.06a1.989 1.989 0 0 0 -3.4 0z" />
|
|
||||||
<path d="M12 9v4" />
|
|
||||||
<path d="M12 17h.01" />
|
|
||||||
</svg>
|
|
||||||
|
|
||||||
|
|
Before Width: | Height: | Size: 491 B |
|
@ -1,8 +0,0 @@
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-cloud-download" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
|
|
||||||
<path stroke="none" d="M0 0h24v24H0z" fill="none"/>
|
|
||||||
<path d="M19 18a3.5 3.5 0 0 0 0 -7h-1a5 4.5 0 0 0 -11 -2a4.6 4.4 0 0 0 -2.1 8.4" />
|
|
||||||
<path d="M12 13l0 9" />
|
|
||||||
<path d="M9 19l3 3l3 -3" />
|
|
||||||
</svg>
|
|
||||||
|
|
||||||
|
|
Before Width: | Height: | Size: 439 B |
|
@ -1,7 +0,0 @@
|
||||||
fn main() {
|
|
||||||
if cfg!(target_os = "windows") {
|
|
||||||
let mut res = winres::WindowsResource::new();
|
|
||||||
res.set_icon("assets/dtmm.ico");
|
|
||||||
res.compile().unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,49 +0,0 @@
|
||||||
= Notes
|
|
||||||
|
|
||||||
== Layout
|
|
||||||
|
|
||||||
- top bar:
|
|
||||||
- left aligned: a tab bar with "mods", "settings", "about"
|
|
||||||
- right aligned: a button to start the game
|
|
||||||
- in the future: center aligned a dropdown to select profiles, and button to edit them
|
|
||||||
- main view:
|
|
||||||
- left side: list view of mods
|
|
||||||
- right side: details pane and buttons
|
|
||||||
- always visible, first mod in list is selected by default
|
|
||||||
- buttons:
|
|
||||||
- add mod
|
|
||||||
- deploy mods
|
|
||||||
- remove selected mod
|
|
||||||
- enable/disable (text changes based on state)
|
|
||||||
|
|
||||||
== Mod list
|
|
||||||
|
|
||||||
- name
|
|
||||||
- description?
|
|
||||||
- image?
|
|
||||||
- click to get details pane?
|
|
||||||
|
|
||||||
== Managing mods
|
|
||||||
|
|
||||||
- for each mod in the list, have a checkbox
|
|
||||||
- need a button to remove mods
|
|
||||||
- need a button to add mods from downloaded files
|
|
||||||
- search
|
|
||||||
|
|
||||||
== Misc
|
|
||||||
|
|
||||||
- settings
|
|
||||||
- open mod storage
|
|
||||||
|
|
||||||
== Managing the game
|
|
||||||
|
|
||||||
- deploy mods
|
|
||||||
-
|
|
||||||
|
|
||||||
== Preparing the game
|
|
||||||
|
|
||||||
- click "Install mods" to prepare the game files with the enabled mods
|
|
||||||
|
|
||||||
== Playing the game
|
|
||||||
|
|
||||||
- if overlay file systems are used, the game has to be started through the mod manager
|
|
|
@ -1,303 +0,0 @@
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::io::ErrorKind;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use color_eyre::eyre::{self, Context};
|
|
||||||
use color_eyre::{Help, Report, Result};
|
|
||||||
use druid::im::Vector;
|
|
||||||
use druid::ImageBuf;
|
|
||||||
use dtmt_shared::ModConfig;
|
|
||||||
use nexusmods::Api as NexusApi;
|
|
||||||
use tokio::fs::{self, DirEntry, File};
|
|
||||||
use tokio_stream::wrappers::ReadDirStream;
|
|
||||||
use tokio_stream::StreamExt;
|
|
||||||
|
|
||||||
use crate::state::{ActionState, InitialLoadResult, ModInfo, ModOrder, NexusInfo, PackageInfo};
|
|
||||||
use crate::util;
|
|
||||||
use crate::util::config::{ConfigSerialize, LoadOrderEntry};
|
|
||||||
|
|
||||||
use super::read_sjson_file;
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(state))]
|
|
||||||
pub(crate) async fn delete_mod(state: ActionState, info: &ModInfo) -> Result<()> {
|
|
||||||
let mod_dir = state.mod_dir.join(&info.id);
|
|
||||||
fs::remove_dir_all(&mod_dir)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to remove directory {}", mod_dir.display()))?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(state))]
|
|
||||||
pub(crate) async fn save_settings(state: ActionState) -> Result<()> {
|
|
||||||
let cfg = ConfigSerialize::from(&state);
|
|
||||||
|
|
||||||
tracing::info!("Saving settings to '{}'", state.config_path.display());
|
|
||||||
tracing::debug!(?cfg);
|
|
||||||
|
|
||||||
let data = serde_sjson::to_string(&cfg).wrap_err("Failed to serialize config")?;
|
|
||||||
|
|
||||||
fs::write(state.config_path.as_ref(), &data)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to write config to '{}'",
|
|
||||||
state.config_path.display()
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all,fields(
|
|
||||||
name = ?res.as_ref().map(|entry| entry.file_name())
|
|
||||||
))]
|
|
||||||
async fn read_mod_dir_entry(res: Result<DirEntry>) -> Result<ModInfo> {
|
|
||||||
let entry = res?;
|
|
||||||
let config_path = entry.path().join("dtmt.cfg");
|
|
||||||
let nexus_path = entry.path().join("nexus.sjson");
|
|
||||||
let index_path = entry.path().join("files.sjson");
|
|
||||||
|
|
||||||
let cfg: ModConfig = read_sjson_file(&config_path)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to read mod config '{}'", config_path.display()))?;
|
|
||||||
|
|
||||||
let nexus: Option<NexusInfo> = match read_sjson_file(&nexus_path)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to read Nexus info '{}'", nexus_path.display()))
|
|
||||||
{
|
|
||||||
Ok(nexus) => Some(nexus),
|
|
||||||
Err(err) if err.is::<std::io::Error>() => match err.downcast_ref::<std::io::Error>() {
|
|
||||||
Some(err) if err.kind() == std::io::ErrorKind::NotFound => None,
|
|
||||||
_ => return Err(err),
|
|
||||||
},
|
|
||||||
Err(err) => return Err(err),
|
|
||||||
};
|
|
||||||
|
|
||||||
let files: HashMap<String, Vec<String>> = if cfg.bundled {
|
|
||||||
read_sjson_file(&index_path)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to read file index '{}'", index_path.display()))?
|
|
||||||
} else {
|
|
||||||
Default::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let image = if let Some(path) = &cfg.image {
|
|
||||||
let path = entry.path().join(path);
|
|
||||||
if let Ok(data) = fs::read(&path).await {
|
|
||||||
// Druid somehow doesn't return an error compatible with eyre, here.
|
|
||||||
// So we have to wrap through `Display` manually.
|
|
||||||
let img = match ImageBuf::from_data(&data) {
|
|
||||||
Ok(img) => img,
|
|
||||||
Err(err) => {
|
|
||||||
let err = Report::msg(err.to_string());
|
|
||||||
return Err(err)
|
|
||||||
.wrap_err_with(|| {
|
|
||||||
format!("Failed to import image file '{}'", path.display())
|
|
||||||
})
|
|
||||||
.with_suggestion(|| {
|
|
||||||
"Supported formats are: PNG, JPEG, Bitmap and WebP".to_string()
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Some(img)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
let packages = files
|
|
||||||
.into_iter()
|
|
||||||
.map(|(name, files)| Arc::new(PackageInfo::new(name, files.into_iter().collect())))
|
|
||||||
.collect();
|
|
||||||
let info = ModInfo::new(cfg, packages, image, nexus);
|
|
||||||
Ok(info)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(mod_order))]
|
|
||||||
pub(crate) async fn load_mods<'a, P, S>(mod_dir: P, mod_order: S) -> Result<Vector<Arc<ModInfo>>>
|
|
||||||
where
|
|
||||||
S: Iterator<Item = &'a LoadOrderEntry>,
|
|
||||||
P: AsRef<Path> + std::fmt::Debug,
|
|
||||||
{
|
|
||||||
let mod_dir = mod_dir.as_ref();
|
|
||||||
let read_dir = match fs::read_dir(mod_dir).await {
|
|
||||||
Ok(read_dir) => read_dir,
|
|
||||||
Err(err) if err.kind() == ErrorKind::NotFound => {
|
|
||||||
return Ok(Vector::new());
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
return Err(err)
|
|
||||||
.wrap_err_with(|| format!("Failed to open directory '{}'", mod_dir.display()));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let stream = ReadDirStream::new(read_dir)
|
|
||||||
.map(|res| res.wrap_err("Failed to read dir entry"))
|
|
||||||
.then(read_mod_dir_entry);
|
|
||||||
tokio::pin!(stream);
|
|
||||||
|
|
||||||
let mut mods: HashMap<String, ModInfo> = HashMap::new();
|
|
||||||
|
|
||||||
while let Some(res) = stream.next().await {
|
|
||||||
let info = res?;
|
|
||||||
mods.insert(info.id.clone(), info);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mods = mod_order
|
|
||||||
.filter_map(|entry| {
|
|
||||||
if let Some(mut info) = mods.remove(&entry.id) {
|
|
||||||
info.enabled = entry.enabled;
|
|
||||||
Some(Arc::new(info))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(mods)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn check_mod_order(state: &ActionState) -> Result<()> {
|
|
||||||
if tracing::enabled!(tracing::Level::DEBUG) {
|
|
||||||
let order = state
|
|
||||||
.mods
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.filter(|(_, i)| i.enabled)
|
|
||||||
.fold(String::new(), |mut s, (i, info)| {
|
|
||||||
s.push_str(&format!("{}: {} - {}\n", i, info.id, info.name));
|
|
||||||
s
|
|
||||||
});
|
|
||||||
|
|
||||||
tracing::debug!("Mod order:\n{}", order);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (i, mod_info) in state.mods.iter().enumerate().filter(|(_, i)| i.enabled) {
|
|
||||||
for dep in &mod_info.depends {
|
|
||||||
let dep_info = state.mods.iter().enumerate().find(|(_, m)| m.id == dep.id);
|
|
||||||
|
|
||||||
match dep_info {
|
|
||||||
Some((_, dep_info)) if !dep_info.enabled => {
|
|
||||||
eyre::bail!(
|
|
||||||
"Dependency '{}' ({}) must be enabled.",
|
|
||||||
dep_info.name,
|
|
||||||
dep.id
|
|
||||||
);
|
|
||||||
}
|
|
||||||
Some((j, dep_info)) if dep.order == ModOrder::Before && j >= i => {
|
|
||||||
eyre::bail!(
|
|
||||||
"Dependency '{}' ({}) must be loaded before '{}'",
|
|
||||||
dep_info.name,
|
|
||||||
dep.id,
|
|
||||||
mod_info.name
|
|
||||||
);
|
|
||||||
}
|
|
||||||
Some((j, dep_info)) if dep.order == ModOrder::After && j <= i => {
|
|
||||||
eyre::bail!(
|
|
||||||
"Dependency '{}' ({}) must be loaded after '{}'",
|
|
||||||
dep_info.name,
|
|
||||||
dep.id,
|
|
||||||
mod_info.name
|
|
||||||
);
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
eyre::bail!(
|
|
||||||
"Missing dependency '{}' for mod '{}'",
|
|
||||||
dep.id,
|
|
||||||
mod_info.name
|
|
||||||
);
|
|
||||||
}
|
|
||||||
Some(_) => {
|
|
||||||
// All good
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(info, api), fields(id = info.id, name = info.name, version = info.version))]
|
|
||||||
async fn check_mod_update(info: Arc<ModInfo>, api: Arc<NexusApi>) -> Result<Option<ModInfo>> {
|
|
||||||
let Some(nexus) = &info.nexus else {
|
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
|
|
||||||
let updated_info = api
|
|
||||||
.mods_id(nexus.id)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to query mod {} from Nexus", nexus.id))?;
|
|
||||||
|
|
||||||
let mut info = Arc::unwrap_or_clone(info);
|
|
||||||
info.nexus = Some(NexusInfo::from(updated_info));
|
|
||||||
|
|
||||||
Ok(Some(info))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(state))]
|
|
||||||
pub(crate) async fn check_updates(state: ActionState) -> Result<Vec<ModInfo>> {
|
|
||||||
if state.nexus_api_key.is_empty() {
|
|
||||||
eyre::bail!("Nexus API key not set. Cannot check for updates.");
|
|
||||||
}
|
|
||||||
|
|
||||||
let api = NexusApi::new(state.nexus_api_key.to_string())
|
|
||||||
.wrap_err("Failed to initialize Nexus API")?;
|
|
||||||
let api = Arc::new(api);
|
|
||||||
|
|
||||||
let tasks = state
|
|
||||||
.mods
|
|
||||||
.iter()
|
|
||||||
.map(|info| check_mod_update(info.clone(), api.clone()));
|
|
||||||
|
|
||||||
let results = futures::future::join_all(tasks).await;
|
|
||||||
let updates = results
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|res| match res {
|
|
||||||
Ok(info) => info,
|
|
||||||
Err(err) => {
|
|
||||||
tracing::error!("{:?}", err);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
Ok(updates)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn load_initial(path: PathBuf, is_default: bool) -> Result<InitialLoadResult> {
|
|
||||||
let config = util::config::read_config(path, is_default)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to read config file")?;
|
|
||||||
|
|
||||||
// Create or truncate the log file
|
|
||||||
let log_path = config.data_dir.join("dtmm.log");
|
|
||||||
tokio::spawn(async move {
|
|
||||||
let _ = File::create(&log_path).await;
|
|
||||||
tracing::debug!("Truncated log file");
|
|
||||||
});
|
|
||||||
|
|
||||||
let game_info = tokio::task::spawn_blocking(dtmt_shared::collect_game_info)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to spawn task to collect Steam game info")?;
|
|
||||||
|
|
||||||
let game_info = match game_info {
|
|
||||||
Ok(game_info) => game_info,
|
|
||||||
Err(err) => {
|
|
||||||
tracing::error!("Failed to collect game info: {:?}", err);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if config.game_dir.is_none() && game_info.is_none() {
|
|
||||||
tracing::error!("No Game Directory set. Head to the 'Settings' tab to set it manually",);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mod_dir = config.data_dir.join("mods");
|
|
||||||
let mods = load_mods(mod_dir, config.mod_order.iter())
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to load mods")?;
|
|
||||||
|
|
||||||
Ok((config, mods))
|
|
||||||
}
|
|
|
@ -1,816 +0,0 @@
|
||||||
use std::io::{Cursor, ErrorKind};
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::str::FromStr;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use color_eyre::eyre::Context;
|
|
||||||
use color_eyre::{eyre, Help, Report, Result};
|
|
||||||
use futures::StreamExt;
|
|
||||||
use futures::{stream, TryStreamExt};
|
|
||||||
use minijinja::Environment;
|
|
||||||
use sdk::filetype::lua;
|
|
||||||
use sdk::filetype::package::Package;
|
|
||||||
use sdk::murmur::Murmur64;
|
|
||||||
use sdk::{
|
|
||||||
Bundle, BundleDatabase, BundleFile, BundleFileType, BundleFileVariant, FromBinary, ToBinary,
|
|
||||||
};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use time::OffsetDateTime;
|
|
||||||
use tokio::fs::{self, DirEntry};
|
|
||||||
use tokio::io::AsyncWriteExt;
|
|
||||||
use tracing::Instrument;
|
|
||||||
|
|
||||||
use super::read_sjson_file;
|
|
||||||
use crate::controller::app::check_mod_order;
|
|
||||||
use crate::state::{ActionState, PackageInfo};
|
|
||||||
|
|
||||||
pub const MOD_BUNDLE_NAME: &str = "packages/mods";
|
|
||||||
pub const BOOT_BUNDLE_NAME: &str = "packages/boot";
|
|
||||||
pub const BUNDLE_DATABASE_NAME: &str = "bundle_database.data";
|
|
||||||
pub const MOD_BOOT_SCRIPT: &str = "scripts/mod_main";
|
|
||||||
pub const MOD_DATA_SCRIPT: &str = "scripts/mods/mod_data";
|
|
||||||
pub const SETTINGS_FILE_PATH: &str = "application_settings/settings_common.ini";
|
|
||||||
pub const DEPLOYMENT_DATA_PATH: &str = "dtmm-deployment.sjson";
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct DeploymentData {
|
|
||||||
pub bundles: Vec<String>,
|
|
||||||
pub mod_folders: Vec<String>,
|
|
||||||
#[serde(with = "time::serde::iso8601")]
|
|
||||||
pub timestamp: OffsetDateTime,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument]
|
|
||||||
async fn read_file_with_backup<P>(path: P) -> Result<Vec<u8>>
|
|
||||||
where
|
|
||||||
P: AsRef<Path> + std::fmt::Debug,
|
|
||||||
{
|
|
||||||
let path = path.as_ref();
|
|
||||||
let backup_path = {
|
|
||||||
let mut p = PathBuf::from(path);
|
|
||||||
let ext = if let Some(ext) = p.extension() {
|
|
||||||
ext.to_string_lossy().to_string() + ".bak"
|
|
||||||
} else {
|
|
||||||
String::from("bak")
|
|
||||||
};
|
|
||||||
p.set_extension(ext);
|
|
||||||
p
|
|
||||||
};
|
|
||||||
|
|
||||||
let file_name = path
|
|
||||||
.file_name()
|
|
||||||
.map(|s| s.to_string_lossy().to_string())
|
|
||||||
.unwrap_or_else(|| String::from("file"));
|
|
||||||
|
|
||||||
let bin = match fs::read(&backup_path).await {
|
|
||||||
Ok(bin) => bin,
|
|
||||||
Err(err) if err.kind() == ErrorKind::NotFound => {
|
|
||||||
// TODO: This doesn't need to be awaited here, yet.
|
|
||||||
// I only need to make sure it has finished before writing the changed bundle.
|
|
||||||
tracing::debug!(
|
|
||||||
"Backup does not exist. Backing up original {} to '{}'",
|
|
||||||
file_name,
|
|
||||||
backup_path.display()
|
|
||||||
);
|
|
||||||
fs::copy(path, &backup_path).await.wrap_err_with(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to back up {} '{}' to '{}'",
|
|
||||||
file_name,
|
|
||||||
path.display(),
|
|
||||||
backup_path.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
tracing::debug!("Reading {} from original '{}'", file_name, path.display());
|
|
||||||
fs::read(path).await.wrap_err_with(|| {
|
|
||||||
format!("Failed to read {} file: {}", file_name, path.display())
|
|
||||||
})?
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
return Err(err).wrap_err_with(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to read {} from backup '{}'",
|
|
||||||
file_name,
|
|
||||||
backup_path.display()
|
|
||||||
)
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
Ok(bin)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
|
||||||
async fn patch_game_settings(state: Arc<ActionState>) -> Result<()> {
|
|
||||||
let settings_path = state.game_dir.join("bundle").join(SETTINGS_FILE_PATH);
|
|
||||||
|
|
||||||
let settings = read_file_with_backup(&settings_path)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to read settings.ini")?;
|
|
||||||
let settings = String::from_utf8(settings).wrap_err("Settings.ini is not valid UTF-8")?;
|
|
||||||
|
|
||||||
let mut f = fs::File::create(&settings_path)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to open {}", settings_path.display()))?;
|
|
||||||
|
|
||||||
let Some(i) = settings.find("boot_script =") else {
|
|
||||||
eyre::bail!("couldn't find 'boot_script' field");
|
|
||||||
};
|
|
||||||
|
|
||||||
f.write_all(settings[0..i].as_bytes()).await?;
|
|
||||||
f.write_all(b"boot_script = \"scripts/mod_main\"").await?;
|
|
||||||
|
|
||||||
let Some(j) = settings[i..].find('\n') else {
|
|
||||||
eyre::bail!("couldn't find end of 'boot_script' field");
|
|
||||||
};
|
|
||||||
|
|
||||||
f.write_all(settings[(i + j)..].as_bytes()).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(package = info.name))]
|
|
||||||
fn make_package(info: &PackageInfo) -> Result<Package> {
|
|
||||||
let mut pkg = Package::new(info.name.clone(), PathBuf::new());
|
|
||||||
|
|
||||||
for f in &info.files {
|
|
||||||
let mut it = f.rsplit('.');
|
|
||||||
let file_type = it
|
|
||||||
.next()
|
|
||||||
.ok_or_else(|| eyre::eyre!("missing file extension"))
|
|
||||||
.and_then(BundleFileType::from_str)
|
|
||||||
.wrap_err("Invalid file name in package info")?;
|
|
||||||
let name: String = it.collect();
|
|
||||||
pkg.add_file(file_type, name);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(pkg)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument]
|
|
||||||
async fn copy_recursive(
|
|
||||||
from: impl Into<PathBuf> + std::fmt::Debug,
|
|
||||||
to: impl AsRef<Path> + std::fmt::Debug,
|
|
||||||
) -> Result<()> {
|
|
||||||
let to = to.as_ref();
|
|
||||||
|
|
||||||
#[tracing::instrument]
|
|
||||||
async fn handle_dir(from: PathBuf) -> Result<Vec<(bool, DirEntry)>> {
|
|
||||||
let mut dir = fs::read_dir(&from)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to read directory")?;
|
|
||||||
let mut entries = Vec::new();
|
|
||||||
|
|
||||||
while let Some(entry) = dir.next_entry().await? {
|
|
||||||
let meta = entry.metadata().await.wrap_err_with(|| {
|
|
||||||
format!("Failed to get metadata for '{}'", entry.path().display())
|
|
||||||
})?;
|
|
||||||
entries.push((meta.is_dir(), entry));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(entries)
|
|
||||||
}
|
|
||||||
|
|
||||||
let base = from.into();
|
|
||||||
stream::unfold(vec![base.clone()], |mut state| async {
|
|
||||||
let from = state.pop()?;
|
|
||||||
let inner = match handle_dir(from).await {
|
|
||||||
Ok(entries) => {
|
|
||||||
for (is_dir, entry) in &entries {
|
|
||||||
if *is_dir {
|
|
||||||
state.push(entry.path());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
stream::iter(entries).map(Ok).left_stream()
|
|
||||||
}
|
|
||||||
Err(e) => stream::once(async { Err(e) }).right_stream(),
|
|
||||||
};
|
|
||||||
|
|
||||||
Some((inner, state))
|
|
||||||
})
|
|
||||||
.flatten()
|
|
||||||
.try_for_each(|(is_dir, entry)| {
|
|
||||||
let path = entry.path();
|
|
||||||
let dest = path
|
|
||||||
.strip_prefix(&base)
|
|
||||||
.map(|suffix| to.join(suffix))
|
|
||||||
.expect("all entries are relative to the directory we are walking");
|
|
||||||
|
|
||||||
async move {
|
|
||||||
if is_dir {
|
|
||||||
tracing::trace!("Creating directory '{}'", dest.display());
|
|
||||||
// Instead of trying to filter "already exists" errors out explicitly,
|
|
||||||
// we just ignore all. It'll fail eventually with the next copy operation.
|
|
||||||
let _ = fs::create_dir(&dest).await;
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
tracing::trace!("Copying file '{}' -> '{}'", path.display(), dest.display());
|
|
||||||
fs::copy(&path, &dest).await.map(|_| ()).wrap_err_with(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to copy file '{}' -> '{}'",
|
|
||||||
path.display(),
|
|
||||||
dest.display()
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.map(|_| ())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(state))]
|
|
||||||
async fn copy_mod_folders(state: Arc<ActionState>) -> Result<Vec<String>> {
|
|
||||||
let game_dir = Arc::clone(&state.game_dir);
|
|
||||||
|
|
||||||
let mut tasks = Vec::new();
|
|
||||||
|
|
||||||
for mod_info in state.mods.iter().filter(|m| m.enabled && !m.bundled) {
|
|
||||||
let span = tracing::trace_span!("copying legacy mod", name = mod_info.name);
|
|
||||||
let _enter = span.enter();
|
|
||||||
|
|
||||||
let mod_id = mod_info.id.clone();
|
|
||||||
let mod_dir = Arc::clone(&state.mod_dir);
|
|
||||||
let game_dir = Arc::clone(&game_dir);
|
|
||||||
|
|
||||||
let task = async move {
|
|
||||||
let from = mod_dir.join(&mod_id);
|
|
||||||
let to = game_dir.join("mods").join(&mod_id);
|
|
||||||
|
|
||||||
tracing::debug!(from = %from.display(), to = %to.display(), "Copying legacy mod '{}'", mod_id);
|
|
||||||
let _ = fs::create_dir_all(&to).await;
|
|
||||||
copy_recursive(&from, &to).await.wrap_err_with(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to copy legacy mod from '{}' to '{}'",
|
|
||||||
from.display(),
|
|
||||||
to.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok::<_, Report>(mod_id)
|
|
||||||
};
|
|
||||||
tasks.push(task);
|
|
||||||
}
|
|
||||||
|
|
||||||
let ids = futures::future::try_join_all(tasks).await?;
|
|
||||||
Ok(ids)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_mod_data_lua(state: Arc<ActionState>) -> Result<String> {
|
|
||||||
#[derive(Serialize)]
|
|
||||||
struct TemplateDataMod {
|
|
||||||
id: String,
|
|
||||||
name: String,
|
|
||||||
bundled: bool,
|
|
||||||
version: String,
|
|
||||||
init: String,
|
|
||||||
data: Option<String>,
|
|
||||||
localization: Option<String>,
|
|
||||||
packages: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut env = Environment::new();
|
|
||||||
env.set_trim_blocks(true);
|
|
||||||
env.set_lstrip_blocks(true);
|
|
||||||
env.add_template("mod_data.lua", include_str!("../../assets/mod_data.lua.j2"))
|
|
||||||
.wrap_err("Failed to compile template for `mod_data.lua`")?;
|
|
||||||
let tmpl = env
|
|
||||||
.get_template("mod_data.lua")
|
|
||||||
.wrap_err("Failed to get template `mod_data.lua`")?;
|
|
||||||
|
|
||||||
let data: Vec<TemplateDataMod> = state
|
|
||||||
.mods
|
|
||||||
.iter()
|
|
||||||
.filter_map(|m| {
|
|
||||||
if !m.enabled {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(TemplateDataMod {
|
|
||||||
id: m.id.clone(),
|
|
||||||
name: m.name.clone(),
|
|
||||||
bundled: m.bundled,
|
|
||||||
version: m.version.clone(),
|
|
||||||
init: m.resources.init.to_string_lossy().to_string(),
|
|
||||||
data: m
|
|
||||||
.resources
|
|
||||||
.data
|
|
||||||
.as_ref()
|
|
||||||
.map(|p| p.to_string_lossy().to_string()),
|
|
||||||
localization: m
|
|
||||||
.resources
|
|
||||||
.localization
|
|
||||||
.as_ref()
|
|
||||||
.map(|p| p.to_string_lossy().to_string()),
|
|
||||||
packages: m.packages.iter().map(|p| p.name.clone()).collect(),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let lua = tmpl
|
|
||||||
.render(minijinja::context!(mods => data))
|
|
||||||
.wrap_err("Failed to render template `mod_data.lua`")?;
|
|
||||||
|
|
||||||
tracing::debug!("mod_data.lua:\n{}", lua);
|
|
||||||
|
|
||||||
Ok(lua)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
|
||||||
async fn build_bundles(state: Arc<ActionState>) -> Result<Vec<Bundle>> {
|
|
||||||
let mut mod_bundle = Bundle::new(MOD_BUNDLE_NAME.to_string());
|
|
||||||
let mut tasks = Vec::new();
|
|
||||||
|
|
||||||
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
|
|
||||||
|
|
||||||
let mut bundles = Vec::new();
|
|
||||||
|
|
||||||
let mut add_lua_asset = |name: &str, data: &str| {
|
|
||||||
let span = tracing::info_span!("Compiling Lua", name, data_len = data.len());
|
|
||||||
let _enter = span.enter();
|
|
||||||
|
|
||||||
let file = lua::compile(name.to_string(), data).wrap_err("Failed to compile Lua")?;
|
|
||||||
|
|
||||||
mod_bundle.add_file(file);
|
|
||||||
|
|
||||||
Ok::<_, Report>(())
|
|
||||||
};
|
|
||||||
|
|
||||||
build_mod_data_lua(state.clone())
|
|
||||||
.wrap_err("Failed to build 'mod_data.lua'")
|
|
||||||
.and_then(|data| add_lua_asset(MOD_DATA_SCRIPT, &data))?;
|
|
||||||
add_lua_asset("scripts/mods/init", include_str!("../../assets/init.lua"))?;
|
|
||||||
add_lua_asset(
|
|
||||||
"scripts/mods/mod_loader",
|
|
||||||
include_str!("../../assets/mod_loader.lua"),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
tracing::trace!("Preparing tasks to deploy bundle files");
|
|
||||||
|
|
||||||
for mod_info in state.mods.iter().filter(|m| m.enabled && m.bundled) {
|
|
||||||
let span = tracing::trace_span!("building mod packages", name = mod_info.name);
|
|
||||||
let _enter = span.enter();
|
|
||||||
|
|
||||||
let mod_dir = state.mod_dir.join(&mod_info.id);
|
|
||||||
for pkg_info in &mod_info.packages {
|
|
||||||
let span = tracing::trace_span!("building package", name = pkg_info.name);
|
|
||||||
let _enter = span.enter();
|
|
||||||
|
|
||||||
tracing::trace!(
|
|
||||||
"Building package {} for mod {}",
|
|
||||||
pkg_info.name,
|
|
||||||
mod_info.name
|
|
||||||
);
|
|
||||||
|
|
||||||
let pkg = make_package(pkg_info).wrap_err("Failed to make package")?;
|
|
||||||
let mut variant = BundleFileVariant::new();
|
|
||||||
let bin = pkg
|
|
||||||
.to_binary()
|
|
||||||
.wrap_err("Failed to serialize package to binary")?;
|
|
||||||
variant.set_data(bin);
|
|
||||||
let mut file = BundleFile::new(pkg_info.name.clone(), BundleFileType::Package);
|
|
||||||
file.add_variant(variant);
|
|
||||||
|
|
||||||
tracing::trace!(
|
|
||||||
"Compiled package {} for mod {}",
|
|
||||||
pkg_info.name,
|
|
||||||
mod_info.name
|
|
||||||
);
|
|
||||||
|
|
||||||
mod_bundle.add_file(file);
|
|
||||||
|
|
||||||
let bundle_name = format!("{:016x}", Murmur64::hash(&pkg_info.name));
|
|
||||||
let src = mod_dir.join(&bundle_name);
|
|
||||||
let dest = bundle_dir.join(&bundle_name);
|
|
||||||
let pkg_name = pkg_info.name.clone();
|
|
||||||
let mod_name = mod_info.name.clone();
|
|
||||||
|
|
||||||
// Explicitely drop the guard, so that we can move the span
|
|
||||||
// into the async operation
|
|
||||||
drop(_enter);
|
|
||||||
|
|
||||||
let ctx = state.ctx.clone();
|
|
||||||
|
|
||||||
let task = async move {
|
|
||||||
let bundle = {
|
|
||||||
let bin = fs::read(&src).await.wrap_err_with(|| {
|
|
||||||
format!("Failed to read bundle file '{}'", src.display())
|
|
||||||
})?;
|
|
||||||
let name = Bundle::get_name_from_path(&ctx, &src);
|
|
||||||
Bundle::from_binary(&ctx, name, bin)
|
|
||||||
.wrap_err_with(|| format!("Failed to parse bundle '{}'", src.display()))?
|
|
||||||
};
|
|
||||||
|
|
||||||
tracing::debug!(
|
|
||||||
src = %src.display(),
|
|
||||||
dest = %dest.display(),
|
|
||||||
"Copying bundle '{}' for mod '{}'",
|
|
||||||
pkg_name,
|
|
||||||
mod_name,
|
|
||||||
);
|
|
||||||
// We attempt to remove any previous file, so that the hard link can be created.
|
|
||||||
// We can reasonably ignore errors here, as a 'NotFound' is actually fine, the copy
|
|
||||||
// may be possible despite an error here, or the error will be reported by it anyways.
|
|
||||||
// TODO: There is a chance that we delete an actual game bundle, but with 64bit
|
|
||||||
// hashes, it's low enough for now, and the setup required to detect
|
|
||||||
// "game bundle vs mod bundle" is non-trivial.
|
|
||||||
let _ = fs::remove_file(&dest).await;
|
|
||||||
fs::copy(&src, &dest).await.wrap_err_with(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to copy bundle {pkg_name} for mod {mod_name}. Src: {}, dest: {}",
|
|
||||||
src.display(),
|
|
||||||
dest.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok::<Bundle, color_eyre::Report>(bundle)
|
|
||||||
}
|
|
||||||
.instrument(span);
|
|
||||||
|
|
||||||
tasks.push(task);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tracing::debug!("Copying {} mod bundles", tasks.len());
|
|
||||||
|
|
||||||
let mut tasks = stream::iter(tasks).buffer_unordered(10);
|
|
||||||
|
|
||||||
while let Some(res) = tasks.next().await {
|
|
||||||
let bundle = res?;
|
|
||||||
bundles.push(bundle);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let path = bundle_dir.join(format!("{:x}", mod_bundle.name().to_murmur64()));
|
|
||||||
tracing::trace!("Writing mod bundle to '{}'", path.display());
|
|
||||||
fs::write(&path, mod_bundle.to_binary()?)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to write bundle to '{}'", path.display()))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
bundles.push(mod_bundle);
|
|
||||||
|
|
||||||
Ok(bundles)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
|
||||||
async fn patch_boot_bundle(
|
|
||||||
state: Arc<ActionState>,
|
|
||||||
deployment_info: &String,
|
|
||||||
) -> Result<Vec<Bundle>> {
|
|
||||||
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
|
|
||||||
let bundle_path = bundle_dir.join(format!("{:x}", Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes())));
|
|
||||||
|
|
||||||
let mut bundles = Vec::with_capacity(2);
|
|
||||||
|
|
||||||
let mut boot_bundle = async {
|
|
||||||
let bin = read_file_with_backup(&bundle_path)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to read boot bundle")?;
|
|
||||||
|
|
||||||
Bundle::from_binary(&state.ctx, BOOT_BUNDLE_NAME.to_string(), bin)
|
|
||||||
.wrap_err("Failed to parse boot bundle")
|
|
||||||
}
|
|
||||||
.instrument(tracing::trace_span!("read boot bundle"))
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to read bundle '{}'", BOOT_BUNDLE_NAME))?;
|
|
||||||
|
|
||||||
{
|
|
||||||
tracing::trace!("Adding mod package file to boot bundle");
|
|
||||||
let span = tracing::trace_span!("create mod package file");
|
|
||||||
let _enter = span.enter();
|
|
||||||
|
|
||||||
let mut pkg = Package::new(MOD_BUNDLE_NAME.to_string(), PathBuf::new());
|
|
||||||
|
|
||||||
for mod_info in &state.mods {
|
|
||||||
for pkg_info in &mod_info.packages {
|
|
||||||
pkg.add_file(BundleFileType::Package, &pkg_info.name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pkg.add_file(BundleFileType::Lua, MOD_DATA_SCRIPT);
|
|
||||||
|
|
||||||
let mut variant = BundleFileVariant::new();
|
|
||||||
variant.set_data(pkg.to_binary()?);
|
|
||||||
let mut f = BundleFile::new(MOD_BUNDLE_NAME.to_string(), BundleFileType::Package);
|
|
||||||
f.add_variant(variant);
|
|
||||||
|
|
||||||
boot_bundle.add_file(f);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let span = tracing::debug_span!("Importing mod main script");
|
|
||||||
let _enter = span.enter();
|
|
||||||
|
|
||||||
let mut env = Environment::new();
|
|
||||||
env.set_trim_blocks(true);
|
|
||||||
env.set_lstrip_blocks(true);
|
|
||||||
env.add_template("mod_main.lua", include_str!("../../assets/mod_main.lua.j2"))
|
|
||||||
.wrap_err("Failed to compile template for `mod_main.lua`")?;
|
|
||||||
let tmpl = env
|
|
||||||
.get_template("mod_main.lua")
|
|
||||||
.wrap_err("Failed to get template `mod_main.lua`")?;
|
|
||||||
|
|
||||||
let is_io_enabled = if state.is_io_enabled { "true" } else { "false" };
|
|
||||||
let deployment_info = deployment_info.replace("\"", "\\\"").replace("\n", "\\n");
|
|
||||||
let lua = tmpl
|
|
||||||
.render(minijinja::context!(is_io_enabled => is_io_enabled, deployment_info => deployment_info))
|
|
||||||
.wrap_err("Failed to render template `mod_main.lua`")?;
|
|
||||||
|
|
||||||
tracing::trace!("Main script rendered:\n===========\n{}\n=============", lua);
|
|
||||||
let file = lua::compile(MOD_BOOT_SCRIPT.to_string(), lua)
|
|
||||||
.wrap_err("Failed to compile mod main Lua file")?;
|
|
||||||
|
|
||||||
boot_bundle.add_file(file);
|
|
||||||
}
|
|
||||||
|
|
||||||
async {
|
|
||||||
let bin = boot_bundle
|
|
||||||
.to_binary()
|
|
||||||
.wrap_err("Failed to serialize boot bundle")?;
|
|
||||||
fs::write(&bundle_path, bin)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to write main bundle: {}", bundle_path.display()))
|
|
||||||
}
|
|
||||||
.instrument(tracing::trace_span!("write boot bundle"))
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
bundles.push(boot_bundle);
|
|
||||||
|
|
||||||
Ok(bundles)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(bundles = bundles.as_ref().len()))]
|
|
||||||
async fn patch_bundle_database<B>(state: Arc<ActionState>, bundles: B) -> Result<()>
|
|
||||||
where
|
|
||||||
B: AsRef<[Bundle]>,
|
|
||||||
{
|
|
||||||
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
|
|
||||||
let database_path = bundle_dir.join(BUNDLE_DATABASE_NAME);
|
|
||||||
|
|
||||||
let mut db = {
|
|
||||||
let bin = read_file_with_backup(&database_path)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to read bundle database")?;
|
|
||||||
let mut r = Cursor::new(bin);
|
|
||||||
let db = BundleDatabase::from_binary(&mut r).wrap_err("Failed to parse bundle database")?;
|
|
||||||
tracing::trace!("Finished parsing bundle database");
|
|
||||||
db
|
|
||||||
};
|
|
||||||
|
|
||||||
for bundle in bundles.as_ref() {
|
|
||||||
tracing::trace!("Adding '{}' to bundle database", bundle.name().display());
|
|
||||||
db.add_bundle(bundle);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let bin = db
|
|
||||||
.to_binary()
|
|
||||||
.wrap_err("Failed to serialize bundle database")?;
|
|
||||||
fs::write(&database_path, bin).await.wrap_err_with(|| {
|
|
||||||
format!(
|
|
||||||
"failed to write bundle database to '{}'",
|
|
||||||
database_path.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(bundles = bundles.as_ref().len()))]
|
|
||||||
fn build_deployment_data(
|
|
||||||
bundles: impl AsRef<[Bundle]>,
|
|
||||||
mod_folders: impl AsRef<[String]>,
|
|
||||||
) -> Result<String> {
|
|
||||||
let info = DeploymentData {
|
|
||||||
timestamp: OffsetDateTime::now_utc(),
|
|
||||||
bundles: bundles
|
|
||||||
.as_ref()
|
|
||||||
.iter()
|
|
||||||
.map(|bundle| format!("{:x}", bundle.name().to_murmur64()))
|
|
||||||
.collect(),
|
|
||||||
// TODO:
|
|
||||||
mod_folders: mod_folders
|
|
||||||
.as_ref()
|
|
||||||
.iter()
|
|
||||||
.map(|folder| folder.clone())
|
|
||||||
.collect(),
|
|
||||||
};
|
|
||||||
serde_sjson::to_string(&info).wrap_err("Failed to serizalize deployment data")
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(
|
|
||||||
game_dir = %state.game_dir.display(),
|
|
||||||
mods = state.mods.len()
|
|
||||||
))]
|
|
||||||
pub(crate) async fn deploy_mods(state: ActionState) -> Result<()> {
|
|
||||||
let state = Arc::new(state);
|
|
||||||
let bundle_dir = state.game_dir.join("bundle");
|
|
||||||
let boot_bundle_path = format!("{:016x}", Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes()));
|
|
||||||
|
|
||||||
if fs::metadata(bundle_dir.join(format!("{boot_bundle_path}.patch_999")))
|
|
||||||
.await
|
|
||||||
.is_ok()
|
|
||||||
{
|
|
||||||
let err = eyre::eyre!("Found dtkit-patch-based mod installation.");
|
|
||||||
return Err(err)
|
|
||||||
.with_suggestion(|| {
|
|
||||||
"If you're a mod author and saved projects directly in 'mods/', \
|
|
||||||
use DTMT to migrate them to the new project structure."
|
|
||||||
.to_string()
|
|
||||||
})
|
|
||||||
.with_suggestion(|| {
|
|
||||||
"Click 'Reset Game' to remove the previous mod installation.".to_string()
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let (_, game_info, deployment_info) = tokio::try_join!(
|
|
||||||
async {
|
|
||||||
fs::metadata(&bundle_dir)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to open game bundle directory")
|
|
||||||
.with_suggestion(|| "Double-check 'Game Directory' in the Settings tab.")
|
|
||||||
},
|
|
||||||
async {
|
|
||||||
tokio::task::spawn_blocking(dtmt_shared::collect_game_info)
|
|
||||||
.await
|
|
||||||
.map_err(Report::new)
|
|
||||||
},
|
|
||||||
async {
|
|
||||||
let path = state.game_dir.join(DEPLOYMENT_DATA_PATH);
|
|
||||||
match read_sjson_file::<_, DeploymentData>(&path).await {
|
|
||||||
Ok(data) => Ok(Some(data)),
|
|
||||||
Err(err) => {
|
|
||||||
if let Some(err) = err.downcast_ref::<std::io::Error>()
|
|
||||||
&& err.kind() == ErrorKind::NotFound
|
|
||||||
{
|
|
||||||
Ok(None)
|
|
||||||
} else {
|
|
||||||
Err(err).wrap_err(format!(
|
|
||||||
"Failed to read deployment data from: {}",
|
|
||||||
path.display()
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
.wrap_err("Failed to gather deployment information")?;
|
|
||||||
|
|
||||||
let game_info = match game_info {
|
|
||||||
Ok(game_info) => game_info,
|
|
||||||
Err(err) => {
|
|
||||||
tracing::error!("Failed to collect game info: {:#?}", err);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
tracing::debug!(?game_info, ?deployment_info);
|
|
||||||
|
|
||||||
if let Some(game_info) = game_info {
|
|
||||||
if deployment_info
|
|
||||||
.as_ref()
|
|
||||||
.map(|i| game_info.last_updated > i.timestamp)
|
|
||||||
.unwrap_or(false)
|
|
||||||
{
|
|
||||||
tracing::warn!(
|
|
||||||
"Game was updated since last mod deployment. \
|
|
||||||
Attempting to reconcile game files."
|
|
||||||
);
|
|
||||||
|
|
||||||
tokio::try_join!(
|
|
||||||
async {
|
|
||||||
let path = bundle_dir.join(BUNDLE_DATABASE_NAME);
|
|
||||||
let backup_path = path.with_extension("data.bak");
|
|
||||||
|
|
||||||
fs::copy(&path, &backup_path)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to re-create backup for bundle database.")
|
|
||||||
},
|
|
||||||
async {
|
|
||||||
let path = bundle_dir.join(boot_bundle_path);
|
|
||||||
let backup_path = path.with_extension("bak");
|
|
||||||
|
|
||||||
fs::copy(&path, &backup_path)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to re-create backup for boot bundle")
|
|
||||||
}
|
|
||||||
)
|
|
||||||
.with_suggestion(|| {
|
|
||||||
"Reset the game using 'Reset Game', then verify game files.".to_string()
|
|
||||||
})?;
|
|
||||||
|
|
||||||
tracing::info!(
|
|
||||||
"Successfully re-created game file backups. \
|
|
||||||
Continuing mod deployment."
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
check_mod_order(&state)?;
|
|
||||||
|
|
||||||
tracing::info!(
|
|
||||||
"Deploying {} mods to '{}'.",
|
|
||||||
state.mods.iter().filter(|i| i.enabled).count(),
|
|
||||||
bundle_dir.display()
|
|
||||||
);
|
|
||||||
|
|
||||||
tracing::info!("Copy legacy mod folders");
|
|
||||||
let mod_folders = copy_mod_folders(state.clone())
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to copy mod folders")?;
|
|
||||||
|
|
||||||
tracing::info!("Build mod bundles");
|
|
||||||
let mut bundles = build_bundles(state.clone())
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to build mod bundles")?;
|
|
||||||
|
|
||||||
let new_deployment_info = build_deployment_data(&bundles, &mod_folders)
|
|
||||||
.wrap_err("Failed to build new deployment data")?;
|
|
||||||
|
|
||||||
tracing::info!("Patch boot bundle");
|
|
||||||
let mut boot_bundles = patch_boot_bundle(state.clone(), &new_deployment_info)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to patch boot bundle")?;
|
|
||||||
bundles.append(&mut boot_bundles);
|
|
||||||
|
|
||||||
if let Some(info) = &deployment_info {
|
|
||||||
let bundle_dir = Arc::new(bundle_dir);
|
|
||||||
// Remove bundles from the previous deployment that don't match the current one.
|
|
||||||
// I.e. mods that used to be installed/enabled but aren't anymore.
|
|
||||||
{
|
|
||||||
let tasks = info.bundles.iter().cloned().filter_map(|file_name| {
|
|
||||||
let is_being_deployed = bundles.iter().any(|b2| {
|
|
||||||
let name = format!("{:016x}", b2.name());
|
|
||||||
file_name == name
|
|
||||||
});
|
|
||||||
|
|
||||||
if !is_being_deployed {
|
|
||||||
let bundle_dir = bundle_dir.clone();
|
|
||||||
let task = async move {
|
|
||||||
let path = bundle_dir.join(&file_name);
|
|
||||||
|
|
||||||
tracing::debug!("Removing unused bundle '{}'", file_name);
|
|
||||||
|
|
||||||
if let Err(err) = fs::remove_file(&path).await.wrap_err_with(|| {
|
|
||||||
format!("Failed to remove unused bundle '{}'", path.display())
|
|
||||||
}) {
|
|
||||||
tracing::error!("{:?}", err);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
Some(task)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
futures::future::join_all(tasks).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Do the same thing for mod folders
|
|
||||||
{
|
|
||||||
let tasks = info.mod_folders.iter().filter_map(|mod_id| {
|
|
||||||
let is_being_deployed = mod_folders.iter().any(|id| id == mod_id);
|
|
||||||
|
|
||||||
if !is_being_deployed {
|
|
||||||
let path = bundle_dir.join("mods").join(mod_id);
|
|
||||||
tracing::debug!("Removing unused mod folder '{}'", path.display());
|
|
||||||
|
|
||||||
let task = async move {
|
|
||||||
if let Err(err) = fs::remove_dir_all(&path).await.wrap_err_with(|| {
|
|
||||||
format!("Failed to remove unused legacy mod '{}'", path.display())
|
|
||||||
}) {
|
|
||||||
tracing::error!("{:?}", err);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Some(task)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
});
|
|
||||||
futures::future::join_all(tasks).await;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tracing::info!("Patch game settings");
|
|
||||||
patch_game_settings(state.clone())
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to patch game settings")?;
|
|
||||||
|
|
||||||
tracing::info!("Patching bundle database");
|
|
||||||
patch_bundle_database(state.clone(), &bundles)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to patch bundle database")?;
|
|
||||||
|
|
||||||
tracing::info!("Writing deployment data");
|
|
||||||
{
|
|
||||||
let path = state.game_dir.join(DEPLOYMENT_DATA_PATH);
|
|
||||||
fs::write(&path, &new_deployment_info)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to write deployment data to '{}'", path.display()))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
tracing::info!("Finished deploying mods");
|
|
||||||
Ok(())
|
|
||||||
}
|
|
|
@ -1,259 +0,0 @@
|
||||||
use std::io::{self, ErrorKind};
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use color_eyre::eyre::Context;
|
|
||||||
use color_eyre::{eyre, Result};
|
|
||||||
use sdk::murmur::Murmur64;
|
|
||||||
use tokio::fs::{self};
|
|
||||||
use tokio::io::AsyncWriteExt;
|
|
||||||
|
|
||||||
use crate::controller::deploy::{
|
|
||||||
DeploymentData, BOOT_BUNDLE_NAME, BUNDLE_DATABASE_NAME, DEPLOYMENT_DATA_PATH,
|
|
||||||
};
|
|
||||||
use crate::state::ActionState;
|
|
||||||
|
|
||||||
use super::deploy::SETTINGS_FILE_PATH;
|
|
||||||
|
|
||||||
#[tracing::instrument]
|
|
||||||
async fn read_file_with_backup<P>(path: P) -> Result<Vec<u8>>
|
|
||||||
where
|
|
||||||
P: AsRef<Path> + std::fmt::Debug,
|
|
||||||
{
|
|
||||||
let path = path.as_ref();
|
|
||||||
let backup_path = {
|
|
||||||
let mut p = PathBuf::from(path);
|
|
||||||
let ext = if let Some(ext) = p.extension() {
|
|
||||||
ext.to_string_lossy().to_string() + ".bak"
|
|
||||||
} else {
|
|
||||||
String::from("bak")
|
|
||||||
};
|
|
||||||
p.set_extension(ext);
|
|
||||||
p
|
|
||||||
};
|
|
||||||
|
|
||||||
let file_name = path
|
|
||||||
.file_name()
|
|
||||||
.map(|s| s.to_string_lossy().to_string())
|
|
||||||
.unwrap_or_else(|| String::from("file"));
|
|
||||||
|
|
||||||
let bin = match fs::read(&backup_path).await {
|
|
||||||
Ok(bin) => bin,
|
|
||||||
Err(err) if err.kind() == ErrorKind::NotFound => {
|
|
||||||
// TODO: This doesn't need to be awaited here, yet.
|
|
||||||
// I only need to make sure it has finished before writing the changed bundle.
|
|
||||||
tracing::debug!(
|
|
||||||
"Backup does not exist. Backing up original {} to '{}'",
|
|
||||||
file_name,
|
|
||||||
backup_path.display()
|
|
||||||
);
|
|
||||||
fs::copy(path, &backup_path).await.wrap_err_with(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to back up {} '{}' to '{}'",
|
|
||||||
file_name,
|
|
||||||
path.display(),
|
|
||||||
backup_path.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
tracing::debug!("Reading {} from original '{}'", file_name, path.display());
|
|
||||||
fs::read(path).await.wrap_err_with(|| {
|
|
||||||
format!("Failed to read {} file: {}", file_name, path.display())
|
|
||||||
})?
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
return Err(err).wrap_err_with(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to read {} from backup '{}'",
|
|
||||||
file_name,
|
|
||||||
backup_path.display()
|
|
||||||
)
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
Ok(bin)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
|
||||||
async fn patch_game_settings(state: Arc<ActionState>) -> Result<()> {
|
|
||||||
let settings_path = state.game_dir.join("bundle").join(SETTINGS_FILE_PATH);
|
|
||||||
|
|
||||||
let settings = read_file_with_backup(&settings_path)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to read settings.ini")?;
|
|
||||||
let settings = String::from_utf8(settings).wrap_err("Settings.ini is not valid UTF-8")?;
|
|
||||||
|
|
||||||
let mut f = fs::File::create(&settings_path)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to open {}", settings_path.display()))?;
|
|
||||||
|
|
||||||
let Some(i) = settings.find("boot_script =") else {
|
|
||||||
eyre::bail!("couldn't find 'boot_script' field");
|
|
||||||
};
|
|
||||||
|
|
||||||
f.write_all(settings[0..i].as_bytes()).await?;
|
|
||||||
f.write_all(b"boot_script = \"scripts/mod_main\"").await?;
|
|
||||||
|
|
||||||
let Some(j) = settings[i..].find('\n') else {
|
|
||||||
eyre::bail!("couldn't find end of 'boot_script' field");
|
|
||||||
};
|
|
||||||
|
|
||||||
f.write_all(settings[(i + j)..].as_bytes()).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
|
||||||
async fn reset_dtkit_patch(state: ActionState) -> Result<()> {
|
|
||||||
let bundle_dir = state.game_dir.join("bundle");
|
|
||||||
|
|
||||||
{
|
|
||||||
let path = bundle_dir.join(BUNDLE_DATABASE_NAME);
|
|
||||||
let backup_path = path.with_extension("data.bak");
|
|
||||||
fs::rename(&backup_path, &path).await.wrap_err_with(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to move bundle database backup '{}' -> '{}'",
|
|
||||||
backup_path.display(),
|
|
||||||
path.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
tracing::trace!("Reverted bundle database from backup");
|
|
||||||
}
|
|
||||||
|
|
||||||
for path in [
|
|
||||||
bundle_dir.join(format!(
|
|
||||||
"{:016x}.patch_999",
|
|
||||||
Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes())
|
|
||||||
)),
|
|
||||||
state.game_dir.join("binaries/mod_loader"),
|
|
||||||
state.game_dir.join("toggle_darktide_mods.bat"),
|
|
||||||
state.game_dir.join("README.md"),
|
|
||||||
] {
|
|
||||||
match fs::remove_file(&path).await {
|
|
||||||
Ok(_) => tracing::trace!("Removed file '{}'", path.display()),
|
|
||||||
Err(err) if err.kind() != io::ErrorKind::NotFound => {
|
|
||||||
tracing::error!("Failed to remove file '{}': {}", path.display(), err)
|
|
||||||
}
|
|
||||||
Err(_) => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// We deliberately skip the `mods/` directory here.
|
|
||||||
// Many modders did their development right in there, and as people are prone to not read
|
|
||||||
// error messages and guides in full, there is bound to be someone who would have
|
|
||||||
// deleted all their source code if this removed the `mods/` folder.
|
|
||||||
for path in [state.game_dir.join("tools")] {
|
|
||||||
match fs::remove_dir_all(&path).await {
|
|
||||||
Ok(_) => tracing::trace!("Removed directory '{}'", path.display()),
|
|
||||||
Err(err) if err.kind() != io::ErrorKind::NotFound => {
|
|
||||||
tracing::error!("Failed to remove directory '{}': {}", path.display(), err)
|
|
||||||
}
|
|
||||||
Err(_) => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tracing::info!("Removed dtkit-patch-based mod installation.");
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(state))]
|
|
||||||
pub(crate) async fn reset_mod_deployment(state: ActionState) -> Result<()> {
|
|
||||||
let boot_bundle_path = format!("{:016x}", Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes()));
|
|
||||||
let paths = [BUNDLE_DATABASE_NAME, &boot_bundle_path, SETTINGS_FILE_PATH];
|
|
||||||
let bundle_dir = state.game_dir.join("bundle");
|
|
||||||
|
|
||||||
tracing::info!("Resetting mod deployment in {}", bundle_dir.display());
|
|
||||||
|
|
||||||
if fs::metadata(bundle_dir.join(format!("{boot_bundle_path}.patch_999")))
|
|
||||||
.await
|
|
||||||
.is_ok()
|
|
||||||
{
|
|
||||||
tracing::info!("Found dtkit-patch-based mod installation. Removing.");
|
|
||||||
return reset_dtkit_patch(state).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
tracing::debug!("Reading mod deployment");
|
|
||||||
|
|
||||||
let info: DeploymentData = {
|
|
||||||
let path = state.game_dir.join(DEPLOYMENT_DATA_PATH);
|
|
||||||
let data = match fs::read(&path).await {
|
|
||||||
Ok(data) => data,
|
|
||||||
Err(err) if err.kind() == ErrorKind::NotFound => {
|
|
||||||
tracing::info!("No deployment to reset");
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
return Err(err).wrap_err_with(|| {
|
|
||||||
format!("Failed to read deployment info at '{}'", path.display())
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let data = String::from_utf8(data).wrap_err("Invalid UTF8 in deployment data")?;
|
|
||||||
|
|
||||||
serde_sjson::from_str(&data).wrap_err("Invalid SJSON in deployment data")?
|
|
||||||
};
|
|
||||||
|
|
||||||
for name in info.bundles {
|
|
||||||
let path = bundle_dir.join(name);
|
|
||||||
|
|
||||||
match fs::remove_file(&path).await {
|
|
||||||
Ok(_) => {}
|
|
||||||
Err(err) if err.kind() == ErrorKind::NotFound => {}
|
|
||||||
Err(err) => {
|
|
||||||
tracing::error!("Failed to remove '{}': {:?}", path.display(), err);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
for p in paths {
|
|
||||||
let path = bundle_dir.join(p);
|
|
||||||
let backup = bundle_dir.join(&format!("{}.bak", p));
|
|
||||||
|
|
||||||
let res = async {
|
|
||||||
tracing::debug!(
|
|
||||||
"Copying from backup: {} -> {}",
|
|
||||||
backup.display(),
|
|
||||||
path.display()
|
|
||||||
);
|
|
||||||
|
|
||||||
fs::copy(&backup, &path)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to copy from '{}'", backup.display()))?;
|
|
||||||
|
|
||||||
tracing::debug!("Deleting backup: {}", backup.display());
|
|
||||||
|
|
||||||
match fs::remove_file(&backup).await {
|
|
||||||
Ok(_) => Ok(()),
|
|
||||||
Err(err) if err.kind() == ErrorKind::NotFound => Ok(()),
|
|
||||||
Err(err) => {
|
|
||||||
Err(err).wrap_err_with(|| format!("Failed to remove '{}'", backup.display()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.await;
|
|
||||||
|
|
||||||
if let Err(err) = res {
|
|
||||||
tracing::error!(
|
|
||||||
"Failed to restore '{}' from backup. You may need to verify game files. Error: {:?}",
|
|
||||||
&p,
|
|
||||||
err
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let path = state.game_dir.join(DEPLOYMENT_DATA_PATH);
|
|
||||||
if let Err(err) = fs::remove_file(&path).await {
|
|
||||||
tracing::error!(
|
|
||||||
"Failed to remove deployment data '{}': {:?}",
|
|
||||||
path.display(),
|
|
||||||
err
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tracing::info!("Reset finished");
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
|
@ -1,584 +0,0 @@
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::ffi::CStr;
|
|
||||||
use std::io::{Cursor, Read, Seek, Write};
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use color_eyre::eyre::{self, Context};
|
|
||||||
use color_eyre::{Help, Report, Result};
|
|
||||||
use druid::im::Vector;
|
|
||||||
use druid::{FileInfo, ImageBuf};
|
|
||||||
use dtmt_shared::{ModConfig, ModConfigResources};
|
|
||||||
use luajit2_sys as lua;
|
|
||||||
use nexusmods::Api as NexusApi;
|
|
||||||
use tokio::fs;
|
|
||||||
use zip::ZipArchive;
|
|
||||||
|
|
||||||
use crate::state::{ActionState, ModInfo, NexusInfo, PackageInfo};
|
|
||||||
|
|
||||||
fn find_archive_file<R: Read + Seek>(
|
|
||||||
archive: &ZipArchive<R>,
|
|
||||||
name: impl AsRef<str>,
|
|
||||||
) -> Option<String> {
|
|
||||||
let path = archive
|
|
||||||
.file_names()
|
|
||||||
.find(|path| path.ends_with(name.as_ref()))
|
|
||||||
.map(|s| s.to_string());
|
|
||||||
path
|
|
||||||
}
|
|
||||||
|
|
||||||
fn image_data_to_buffer(data: impl AsRef<[u8]>) -> Result<ImageBuf> {
|
|
||||||
// Druid somehow doesn't return an error compatible with eyre, here.
|
|
||||||
// So we have to wrap through `Display` manually.
|
|
||||||
ImageBuf::from_data(data.as_ref()).map_err(|err| {
|
|
||||||
Report::msg(err.to_string())
|
|
||||||
.wrap_err("Invalid image data")
|
|
||||||
.suggestion("Supported formats are: PNG, JPEG, Bitmap and WebP")
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Runs the content of a `.mod` file to extract what data we can get
|
|
||||||
// from legacy mods.
|
|
||||||
// 1. Create a global function `new_mod` that stores
|
|
||||||
// the relevant bits in global variables.
|
|
||||||
// 2. Run the `.mod` file, which will return a table.
|
|
||||||
// 3. Run the `run` function from that table.
|
|
||||||
// 4. Access the global variables from #1.
|
|
||||||
#[tracing::instrument]
|
|
||||||
fn parse_mod_id_file(data: &str) -> Result<(String, ModConfigResources)> {
|
|
||||||
tracing::debug!("Parsing mod file:\n{}", data);
|
|
||||||
|
|
||||||
let ret = unsafe {
|
|
||||||
let state = lua::luaL_newstate();
|
|
||||||
lua::luaL_openlibs(state);
|
|
||||||
|
|
||||||
let run = b"
|
|
||||||
function fassert() end
|
|
||||||
function new_mod(id, resources)
|
|
||||||
_G.id = id
|
|
||||||
_G.script = resources.mod_script
|
|
||||||
_G.data = resources.mod_data
|
|
||||||
_G.localization = resources.mod_localization
|
|
||||||
end
|
|
||||||
\0";
|
|
||||||
match lua::luaL_loadstring(state, run.as_ptr() as _) as u32 {
|
|
||||||
lua::LUA_OK => {}
|
|
||||||
lua::LUA_ERRSYNTAX => {
|
|
||||||
let err = lua::lua_tostring(state, -1);
|
|
||||||
let err = CStr::from_ptr(err).to_string_lossy().to_string();
|
|
||||||
|
|
||||||
lua::lua_close(state);
|
|
||||||
|
|
||||||
eyre::bail!("Invalid syntax: {}", err);
|
|
||||||
}
|
|
||||||
lua::LUA_ERRMEM => {
|
|
||||||
lua::lua_close(state);
|
|
||||||
eyre::bail!("Failed to allocate sufficient memory to create `new_mod`")
|
|
||||||
}
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
|
|
||||||
match lua::lua_pcall(state, 0, 0, 0) as u32 {
|
|
||||||
lua::LUA_OK => {}
|
|
||||||
lua::LUA_ERRRUN => {
|
|
||||||
let err = lua::lua_tostring(state, -1);
|
|
||||||
let err = CStr::from_ptr(err).to_string_lossy().to_string();
|
|
||||||
|
|
||||||
lua::lua_close(state);
|
|
||||||
|
|
||||||
eyre::bail!("Failed to run buffer: {}", err);
|
|
||||||
}
|
|
||||||
lua::LUA_ERRMEM => {
|
|
||||||
lua::lua_close(state);
|
|
||||||
eyre::bail!("Failed to allocate sufficient memory to run buffer")
|
|
||||||
}
|
|
||||||
// We don't use an error handler function, so this should be unreachable
|
|
||||||
lua::LUA_ERRERR => unreachable!(),
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
|
|
||||||
let name = b".mod\0";
|
|
||||||
match lua::luaL_loadbuffer(
|
|
||||||
state,
|
|
||||||
data.as_ptr() as _,
|
|
||||||
data.len() as _,
|
|
||||||
name.as_ptr() as _,
|
|
||||||
) as u32
|
|
||||||
{
|
|
||||||
lua::LUA_OK => {}
|
|
||||||
lua::LUA_ERRSYNTAX => {
|
|
||||||
let err = lua::lua_tostring(state, -1);
|
|
||||||
let err = CStr::from_ptr(err).to_string_lossy().to_string();
|
|
||||||
|
|
||||||
lua::lua_close(state);
|
|
||||||
|
|
||||||
eyre::bail!("Invalid syntax: {}", err);
|
|
||||||
}
|
|
||||||
lua::LUA_ERRMEM => {
|
|
||||||
lua::lua_close(state);
|
|
||||||
eyre::bail!("Failed to allocate sufficient memory to load `.mod` file buffer")
|
|
||||||
}
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
|
|
||||||
match lua::lua_pcall(state, 0, 1, 0) as u32 {
|
|
||||||
lua::LUA_OK => {}
|
|
||||||
lua::LUA_ERRRUN => {
|
|
||||||
let err = lua::lua_tostring(state, -1);
|
|
||||||
let err = CStr::from_ptr(err).to_string_lossy().to_string();
|
|
||||||
|
|
||||||
lua::lua_close(state);
|
|
||||||
|
|
||||||
eyre::bail!("Failed to run `.mod` file: {}", err);
|
|
||||||
}
|
|
||||||
lua::LUA_ERRMEM => {
|
|
||||||
lua::lua_close(state);
|
|
||||||
eyre::bail!("Failed to allocate sufficient memory to run `.mod` file")
|
|
||||||
}
|
|
||||||
// We don't use an error handler function, so this should be unreachable
|
|
||||||
lua::LUA_ERRERR => unreachable!(),
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
|
|
||||||
let key = b"run\0";
|
|
||||||
lua::lua_pushstring(state, key.as_ptr() as _);
|
|
||||||
lua::lua_gettable(state, -2);
|
|
||||||
|
|
||||||
match lua::lua_pcall(state, 0, 0, 0) as u32 {
|
|
||||||
lua::LUA_OK => {}
|
|
||||||
lua::LUA_ERRRUN => {
|
|
||||||
let err = lua::lua_tostring(state, -1);
|
|
||||||
let err = CStr::from_ptr(err).to_string_lossy().to_string();
|
|
||||||
|
|
||||||
lua::lua_close(state);
|
|
||||||
|
|
||||||
eyre::bail!("Failed to run `.mod.run`: {}", err);
|
|
||||||
}
|
|
||||||
lua::LUA_ERRMEM => {
|
|
||||||
lua::lua_close(state);
|
|
||||||
eyre::bail!("Failed to allocate sufficient memory to run `.mod.run`")
|
|
||||||
}
|
|
||||||
// We don't use an error handler function, so this should be unreachable
|
|
||||||
lua::LUA_ERRERR => unreachable!(),
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
|
|
||||||
let get_global = |state, key: &[u8]| {
|
|
||||||
lua::lua_getglobal(state, key.as_ptr() as _);
|
|
||||||
|
|
||||||
if lua::lua_isnil(state, -1) != 0 {
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
let s = lua::lua_tostring(state, -1);
|
|
||||||
|
|
||||||
if s.is_null() {
|
|
||||||
eyre::bail!("Expected string, got NULL");
|
|
||||||
}
|
|
||||||
|
|
||||||
let ret = CStr::from_ptr(s).to_string_lossy().to_string();
|
|
||||||
lua::lua_pop(state, 1);
|
|
||||||
Ok(Some(ret))
|
|
||||||
};
|
|
||||||
|
|
||||||
let mod_id = get_global(state, b"id\0")
|
|
||||||
.and_then(|s| s.ok_or_else(|| eyre::eyre!("Got `nil`")))
|
|
||||||
.wrap_err("Failed to get `id`")?;
|
|
||||||
|
|
||||||
let resources = ModConfigResources {
|
|
||||||
init: get_global(state, b"script\0")
|
|
||||||
.and_then(|s| s.map(PathBuf::from).ok_or_else(|| eyre::eyre!("Got `nil`")))
|
|
||||||
.wrap_err("Failed to get `script`.")?,
|
|
||||||
data: get_global(state, b"data\0")
|
|
||||||
.wrap_err("Failed to get `data`.")?
|
|
||||||
.map(PathBuf::from),
|
|
||||||
localization: get_global(state, b"localization\0")
|
|
||||||
.wrap_err("Failed to get `localization`")?
|
|
||||||
.map(PathBuf::from),
|
|
||||||
};
|
|
||||||
|
|
||||||
lua::lua_close(state);
|
|
||||||
|
|
||||||
(mod_id, resources)
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(ret)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extracts the mod configuration from the mod archive.
|
|
||||||
// This may either be a proper `dtmt.cfg`, or the legacy `<mod_name>.mod` ID file.
|
|
||||||
//
|
|
||||||
// It also returns the directory where this file was found, used as root path. This
|
|
||||||
// allows flexibility in what the directory structure is exactly, since many people
|
|
||||||
// still end up creating tarbombs and Nexus does its own re-packaging.
|
|
||||||
#[tracing::instrument(skip(archive))]
|
|
||||||
fn extract_mod_config<R: Read + Seek>(archive: &mut ZipArchive<R>) -> Result<(ModConfig, String)> {
|
|
||||||
let legacy_mod_data = if let Some(name) = find_archive_file(archive, ".mod") {
|
|
||||||
let (mod_id, resources) = {
|
|
||||||
let mut f = archive
|
|
||||||
.by_name(&name)
|
|
||||||
.wrap_err("Failed to read `.mod` file from archive")?;
|
|
||||||
|
|
||||||
let mut buf = Vec::with_capacity(f.size() as usize);
|
|
||||||
f.read_to_end(&mut buf)
|
|
||||||
.wrap_err("Failed to read `.mod` file from archive")?;
|
|
||||||
|
|
||||||
let data = String::from_utf8(buf).wrap_err("`.mod` file is not valid UTF-8")?;
|
|
||||||
parse_mod_id_file(&data)
|
|
||||||
.wrap_err("Invalid `.mod` file")
|
|
||||||
.note(
|
|
||||||
"The `.mod` file's `run` function may not contain any additional logic \
|
|
||||||
besides the default.",
|
|
||||||
)
|
|
||||||
.suggestion("Contact the mod author to fix this.")?
|
|
||||||
};
|
|
||||||
|
|
||||||
let root = if let Some(index) = name.rfind('/') {
|
|
||||||
name[..index].to_string()
|
|
||||||
} else {
|
|
||||||
String::new()
|
|
||||||
};
|
|
||||||
|
|
||||||
Some((mod_id, resources, root))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
tracing::debug!(?legacy_mod_data);
|
|
||||||
|
|
||||||
if let Some(name) = find_archive_file(archive, "dtmt.cfg") {
|
|
||||||
let mut f = archive
|
|
||||||
.by_name(&name)
|
|
||||||
.wrap_err("Failed to read mod config from archive")?;
|
|
||||||
|
|
||||||
let mut buf = Vec::with_capacity(f.size() as usize);
|
|
||||||
f.read_to_end(&mut buf)
|
|
||||||
.wrap_err("Failed to read mod config from archive")?;
|
|
||||||
|
|
||||||
let data = String::from_utf8(buf).wrap_err("Mod config is not valid UTF-8")?;
|
|
||||||
|
|
||||||
let mut cfg: ModConfig = serde_sjson::from_str(&data)
|
|
||||||
.wrap_err("Failed to deserialize mod config")
|
|
||||||
.suggestion("Contact the mod author to fix this.")?;
|
|
||||||
|
|
||||||
if let Some((mod_id, resources, root)) = legacy_mod_data {
|
|
||||||
if cfg.id != mod_id {
|
|
||||||
let err = eyre::eyre!("Mod ID in `dtmt.cfg` does not match mod ID in `.mod` file");
|
|
||||||
return Err(err).suggestion("Contact the mod author to fix this.");
|
|
||||||
}
|
|
||||||
|
|
||||||
cfg.resources = resources;
|
|
||||||
|
|
||||||
// Enforce that packages are skipped
|
|
||||||
cfg.bundled = false;
|
|
||||||
cfg.packages = vec![];
|
|
||||||
|
|
||||||
Ok((cfg, root))
|
|
||||||
} else {
|
|
||||||
let root = name
|
|
||||||
.strip_suffix("dtmt.cfg")
|
|
||||||
.expect("String must end with that suffix")
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
Ok((cfg, root))
|
|
||||||
}
|
|
||||||
} else if let Some((mod_id, resources, root)) = legacy_mod_data {
|
|
||||||
let cfg = ModConfig {
|
|
||||||
bundled: false,
|
|
||||||
dir: PathBuf::new(),
|
|
||||||
id: mod_id.clone(),
|
|
||||||
name: mod_id,
|
|
||||||
summary: "A mod for the game Warhammer 40,000: Darktide".into(),
|
|
||||||
version: "N/A".into(),
|
|
||||||
description: None,
|
|
||||||
author: None,
|
|
||||||
image: None,
|
|
||||||
categories: Vec::new(),
|
|
||||||
packages: Vec::new(),
|
|
||||||
resources,
|
|
||||||
depends: Vec::new(),
|
|
||||||
name_overrides: Default::default(),
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok((cfg, root))
|
|
||||||
} else {
|
|
||||||
eyre::bail!(
|
|
||||||
"Mod needs a config file or `.mod` file. \
|
|
||||||
Please get in touch with the author to provide a properly packaged mod."
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(archive))]
|
|
||||||
fn extract_bundled_mod<R: Read + Seek>(
|
|
||||||
archive: &mut ZipArchive<R>,
|
|
||||||
root: String,
|
|
||||||
dest: impl AsRef<Path> + std::fmt::Debug,
|
|
||||||
) -> Result<Vector<Arc<PackageInfo>>> {
|
|
||||||
let files: HashMap<String, Vec<String>> = {
|
|
||||||
let name = archive
|
|
||||||
.file_names()
|
|
||||||
.find(|name| name.ends_with("files.sjson"))
|
|
||||||
.map(|s| s.to_string())
|
|
||||||
.ok_or_else(|| eyre::eyre!("archive does not contain file index"))?;
|
|
||||||
|
|
||||||
let mut f = archive
|
|
||||||
.by_name(&name)
|
|
||||||
.wrap_err("Failed to read file index from archive")?;
|
|
||||||
let mut buf = Vec::with_capacity(f.size() as usize);
|
|
||||||
f.read_to_end(&mut buf)
|
|
||||||
.wrap_err("Failed to read file index from archive")?;
|
|
||||||
|
|
||||||
let data = String::from_utf8(buf).wrap_err("File index is not valid UTF-8")?;
|
|
||||||
serde_sjson::from_str(&data).wrap_err("Failed to deserialize file index")?
|
|
||||||
};
|
|
||||||
|
|
||||||
tracing::trace!(?files);
|
|
||||||
|
|
||||||
let dest = dest.as_ref();
|
|
||||||
tracing::trace!("Extracting mod archive to {}", dest.display());
|
|
||||||
archive
|
|
||||||
.extract(dest)
|
|
||||||
.wrap_err_with(|| format!("Failed to extract archive to {}", dest.display()))?;
|
|
||||||
|
|
||||||
let packages = files
|
|
||||||
.into_iter()
|
|
||||||
.map(|(name, files)| Arc::new(PackageInfo::new(name, files.into_iter().collect())))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
tracing::trace!(?packages);
|
|
||||||
|
|
||||||
Ok(packages)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(archive))]
|
|
||||||
fn extract_legacy_mod<R: Read + Seek>(
|
|
||||||
archive: &mut ZipArchive<R>,
|
|
||||||
root: String,
|
|
||||||
dest: impl Into<PathBuf> + std::fmt::Debug,
|
|
||||||
) -> Result<()> {
|
|
||||||
let dest = dest.into();
|
|
||||||
let file_count = archive.len();
|
|
||||||
|
|
||||||
for i in 0..file_count {
|
|
||||||
let mut f = archive
|
|
||||||
.by_index(i)
|
|
||||||
.wrap_err_with(|| format!("Failed to get file at index {}", i))?;
|
|
||||||
|
|
||||||
let Some(name) = f.enclosed_name().map(|p| p.to_path_buf()) else {
|
|
||||||
let err = eyre::eyre!("File name in archive is not a safe path value.").suggestion(
|
|
||||||
"Only use well-known applications to create the ZIP archive, \
|
|
||||||
and don't create paths that point outside the archive directory.",
|
|
||||||
);
|
|
||||||
return Err(err);
|
|
||||||
};
|
|
||||||
|
|
||||||
let Ok(suffix) = name.strip_prefix(&root) else {
|
|
||||||
tracing::warn!(
|
|
||||||
"Skipping file outside of the mod root directory: {}",
|
|
||||||
name.display()
|
|
||||||
);
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
let name = dest.join(suffix);
|
|
||||||
|
|
||||||
if f.is_dir() {
|
|
||||||
// The majority of errors will actually be "X already exists".
|
|
||||||
// But rather than filter them invidually, we just ignore all of them.
|
|
||||||
// If there is a legitimate error of "couldn't create X", it will eventually fail when
|
|
||||||
// we try to put a file in there.
|
|
||||||
tracing::trace!("Creating directory '{}'", name.display());
|
|
||||||
let _ = std::fs::create_dir_all(&name);
|
|
||||||
} else {
|
|
||||||
let mut buf = Vec::with_capacity(f.size() as usize);
|
|
||||||
f.read_to_end(&mut buf)
|
|
||||||
.wrap_err_with(|| format!("Failed to read file '{}'", name.display()))?;
|
|
||||||
|
|
||||||
tracing::trace!("Writing file '{}'", name.display());
|
|
||||||
let mut out = std::fs::OpenOptions::new()
|
|
||||||
.write(true)
|
|
||||||
.create(true)
|
|
||||||
.open(&name)
|
|
||||||
.wrap_err_with(|| format!("Failed to open file '{}'", name.display()))?;
|
|
||||||
|
|
||||||
out.write_all(&buf)
|
|
||||||
.wrap_err_with(|| format!("Failed to write to '{}'", name.display()))?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(state))]
|
|
||||||
pub(crate) async fn import_from_file(state: ActionState, info: FileInfo) -> Result<ModInfo> {
|
|
||||||
let data = fs::read(&info.path)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to read file {}", info.path.display()))?;
|
|
||||||
|
|
||||||
let nexus = if let Some((_, id, version, timestamp)) = info
|
|
||||||
.path
|
|
||||||
.file_name()
|
|
||||||
.and_then(|s| s.to_str())
|
|
||||||
.and_then(NexusApi::parse_file_name)
|
|
||||||
{
|
|
||||||
if !state.nexus_api_key.is_empty() {
|
|
||||||
let api = NexusApi::new(state.nexus_api_key.to_string())?;
|
|
||||||
let mod_info = api
|
|
||||||
.mods_id(id)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to query mod {} from Nexus", id))?;
|
|
||||||
|
|
||||||
let version = match api.file_version(id, timestamp).await {
|
|
||||||
Ok(version) => version,
|
|
||||||
Err(err) => {
|
|
||||||
let err = Report::new(err);
|
|
||||||
tracing::warn!(
|
|
||||||
"Failed to fetch version for Nexus download. \
|
|
||||||
Falling back to file name:\n{:?}",
|
|
||||||
err
|
|
||||||
);
|
|
||||||
version
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let info = NexusInfo::from(mod_info);
|
|
||||||
tracing::debug!(version, ?info);
|
|
||||||
|
|
||||||
Some((info, version))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
tracing::trace!(?nexus);
|
|
||||||
|
|
||||||
import_mod(state, nexus, data).await
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(state))]
|
|
||||||
pub(crate) async fn import_from_nxm(state: ActionState, uri: String) -> Result<ModInfo> {
|
|
||||||
let url = uri
|
|
||||||
.parse()
|
|
||||||
.wrap_err_with(|| format!("Invalid Uri '{}'", uri))?;
|
|
||||||
|
|
||||||
let api = NexusApi::new(state.nexus_api_key.to_string())?;
|
|
||||||
let (mod_info, file_info, data) = api
|
|
||||||
.handle_nxm(url)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to download mod from NXM uri '{}'", uri))?;
|
|
||||||
|
|
||||||
let nexus = NexusInfo::from(mod_info);
|
|
||||||
import_mod(state, Some((nexus, file_info.version)), data).await
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(state, data), fields(data = data.len()))]
|
|
||||||
pub(crate) async fn import_mod(
|
|
||||||
state: ActionState,
|
|
||||||
nexus: Option<(NexusInfo, String)>,
|
|
||||||
data: Vec<u8>,
|
|
||||||
) -> Result<ModInfo> {
|
|
||||||
let data = Cursor::new(data);
|
|
||||||
let mut archive = ZipArchive::new(data).wrap_err("Failed to open ZIP archive")?;
|
|
||||||
|
|
||||||
if tracing::enabled!(tracing::Level::DEBUG) {
|
|
||||||
let names = archive.file_names().fold(String::new(), |mut s, name| {
|
|
||||||
s.push('\n');
|
|
||||||
s.push_str(name);
|
|
||||||
s
|
|
||||||
});
|
|
||||||
tracing::debug!("Archive contents:{}", names);
|
|
||||||
}
|
|
||||||
|
|
||||||
let (mut mod_cfg, root) =
|
|
||||||
extract_mod_config(&mut archive).wrap_err("Failed to extract mod configuration")?;
|
|
||||||
tracing::info!("Importing mod {} ({})", mod_cfg.name, mod_cfg.id);
|
|
||||||
|
|
||||||
let mod_dir = state.data_dir.join(state.mod_dir.as_ref());
|
|
||||||
let dest = mod_dir.join(&mod_cfg.id);
|
|
||||||
tracing::trace!("Creating mods directory {}", dest.display());
|
|
||||||
fs::create_dir_all(&dest)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to create data directory '{}'", dest.display()))?;
|
|
||||||
|
|
||||||
let image = if let Some(path) = &mod_cfg.image {
|
|
||||||
let name = archive
|
|
||||||
.file_names()
|
|
||||||
.find(|name| name.ends_with(&path.display().to_string()))
|
|
||||||
.map(|s| s.to_string())
|
|
||||||
.ok_or_else(|| eyre::eyre!("archive does not contain configured image file"))?;
|
|
||||||
|
|
||||||
let mut f = archive
|
|
||||||
.by_name(&name)
|
|
||||||
.wrap_err("Failed to read image file from archive")?;
|
|
||||||
let mut buf = Vec::with_capacity(f.size() as usize);
|
|
||||||
f.read_to_end(&mut buf)
|
|
||||||
.wrap_err("Failed to read file index from archive")?;
|
|
||||||
|
|
||||||
let img = image_data_to_buffer(buf)?;
|
|
||||||
Some(img)
|
|
||||||
} else if let Some((nexus, _)) = &nexus {
|
|
||||||
let api = NexusApi::new(state.nexus_api_key.to_string())?;
|
|
||||||
let url = nexus.picture_url.as_ref();
|
|
||||||
let data = api
|
|
||||||
.picture(url)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to download Nexus image from '{}'", url))?;
|
|
||||||
|
|
||||||
let img = image_data_to_buffer(&data)?;
|
|
||||||
|
|
||||||
let name = "image.bin";
|
|
||||||
let path = dest.join(name);
|
|
||||||
match fs::write(&path, &data).await {
|
|
||||||
Ok(_) => {
|
|
||||||
mod_cfg.image = Some(name.into());
|
|
||||||
Some(img)
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
let err = Report::new(err).wrap_err(format!(
|
|
||||||
"Failed to write Nexus picture to file '{}'",
|
|
||||||
path.display()
|
|
||||||
));
|
|
||||||
tracing::error!("{:?}", err);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
tracing::trace!(?image);
|
|
||||||
tracing::debug!(root, ?mod_cfg);
|
|
||||||
|
|
||||||
let packages = if mod_cfg.bundled {
|
|
||||||
extract_bundled_mod(&mut archive, root, &mod_dir).wrap_err("Failed to extract mod")?
|
|
||||||
} else {
|
|
||||||
extract_legacy_mod(&mut archive, root, &dest).wrap_err("Failed to extract legacy mod")?;
|
|
||||||
|
|
||||||
if let Some((_, version)) = &nexus {
|
|
||||||
// We use the version number stored in the `ModInfo` to compare against the `NexusInfo`
|
|
||||||
// for version checks. So for this one, we can't actually rely on merely shadowing,
|
|
||||||
// like with the other fields.
|
|
||||||
mod_cfg.version = version.clone();
|
|
||||||
}
|
|
||||||
|
|
||||||
let data = serde_sjson::to_string(&mod_cfg).wrap_err("Failed to serialize mod config")?;
|
|
||||||
fs::write(dest.join("dtmt.cfg"), &data)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to write mod config")?;
|
|
||||||
|
|
||||||
Default::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some((nexus, _)) = &nexus {
|
|
||||||
let data = serde_sjson::to_string(nexus).wrap_err("Failed to serialize Nexus info")?;
|
|
||||||
let path = dest.join("nexus.sjson");
|
|
||||||
fs::write(&path, data.as_bytes())
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to write Nexus info to '{}'", path.display()))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let info = ModInfo::new(mod_cfg, packages, image, nexus.map(|(info, _)| info));
|
|
||||||
Ok(info)
|
|
||||||
}
|
|
|
@ -1,25 +0,0 @@
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use color_eyre::{eyre::Context, Result};
|
|
||||||
use serde::Deserialize;
|
|
||||||
use tokio::fs;
|
|
||||||
|
|
||||||
pub mod app;
|
|
||||||
pub mod deploy;
|
|
||||||
pub mod game;
|
|
||||||
pub mod import;
|
|
||||||
pub mod worker;
|
|
||||||
|
|
||||||
#[tracing::instrument]
|
|
||||||
async fn read_sjson_file<P, T>(path: P) -> Result<T>
|
|
||||||
where
|
|
||||||
T: for<'a> Deserialize<'a>,
|
|
||||||
P: AsRef<Path> + std::fmt::Debug,
|
|
||||||
{
|
|
||||||
let path = path.as_ref();
|
|
||||||
let buf = fs::read(path)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?;
|
|
||||||
let data = String::from_utf8(buf).wrap_err("Invalid UTF8")?;
|
|
||||||
serde_sjson::from_str(&data).wrap_err("Failed to deserialize SJSON")
|
|
||||||
}
|
|
|
@ -1,246 +0,0 @@
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use color_eyre::eyre::Context;
|
|
||||||
use color_eyre::Help;
|
|
||||||
use color_eyre::Report;
|
|
||||||
use color_eyre::Result;
|
|
||||||
use druid::{ExtEventSink, SingleUse, Target};
|
|
||||||
use tokio::fs::OpenOptions;
|
|
||||||
use tokio::io::AsyncWriteExt;
|
|
||||||
use tokio::runtime::Runtime;
|
|
||||||
|
|
||||||
use tokio::sync::mpsc::UnboundedReceiver;
|
|
||||||
use tokio::sync::RwLock;
|
|
||||||
|
|
||||||
use crate::controller::app::*;
|
|
||||||
use crate::controller::deploy::deploy_mods;
|
|
||||||
use crate::controller::game::*;
|
|
||||||
use crate::controller::import::*;
|
|
||||||
use crate::state::AsyncAction;
|
|
||||||
use crate::state::ACTION_FINISH_CHECK_UPDATE;
|
|
||||||
use crate::state::ACTION_FINISH_LOAD_INITIAL;
|
|
||||||
use crate::state::ACTION_FINISH_SAVE_SETTINGS;
|
|
||||||
use crate::state::ACTION_SHOW_ERROR_DIALOG;
|
|
||||||
use crate::state::{
|
|
||||||
ACTION_FINISH_ADD_MOD, ACTION_FINISH_DELETE_SELECTED_MOD, ACTION_FINISH_DEPLOY,
|
|
||||||
ACTION_FINISH_RESET_DEPLOYMENT, ACTION_LOG,
|
|
||||||
};
|
|
||||||
|
|
||||||
async fn send_error(sink: Arc<RwLock<ExtEventSink>>, err: Report) {
|
|
||||||
sink.write()
|
|
||||||
.await
|
|
||||||
.submit_command(ACTION_SHOW_ERROR_DIALOG, SingleUse::new(err), Target::Auto)
|
|
||||||
.expect("failed to send command");
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_action(
|
|
||||||
event_sink: Arc<RwLock<ExtEventSink>>,
|
|
||||||
action_queue: Arc<RwLock<UnboundedReceiver<AsyncAction>>>,
|
|
||||||
) {
|
|
||||||
while let Some(action) = action_queue.write().await.recv().await {
|
|
||||||
if cfg!(debug_assertions) && !matches!(action, AsyncAction::Log(_)) {
|
|
||||||
tracing::debug!(?action);
|
|
||||||
}
|
|
||||||
|
|
||||||
let event_sink = event_sink.clone();
|
|
||||||
match action {
|
|
||||||
AsyncAction::DeployMods(state) => tokio::spawn(async move {
|
|
||||||
if let Err(err) = deploy_mods(state).await.wrap_err("Failed to deploy mods") {
|
|
||||||
tracing::error!("{:?}", err);
|
|
||||||
send_error(event_sink.clone(), err).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
event_sink
|
|
||||||
.write()
|
|
||||||
.await
|
|
||||||
.submit_command(ACTION_FINISH_DEPLOY, (), Target::Auto)
|
|
||||||
.expect("failed to send command");
|
|
||||||
}),
|
|
||||||
AsyncAction::AddMod(state, info) => tokio::spawn(async move {
|
|
||||||
match import_from_file(state, info)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to import mod")
|
|
||||||
{
|
|
||||||
Ok(mod_info) => {
|
|
||||||
event_sink
|
|
||||||
.write()
|
|
||||||
.await
|
|
||||||
.submit_command(
|
|
||||||
ACTION_FINISH_ADD_MOD,
|
|
||||||
SingleUse::new(Arc::new(mod_info)),
|
|
||||||
Target::Auto,
|
|
||||||
)
|
|
||||||
.expect("failed to send command");
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
tracing::error!("{:?}", err);
|
|
||||||
send_error(event_sink.clone(), err).await;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
AsyncAction::DeleteMod(state, info) => tokio::spawn(async move {
|
|
||||||
let mod_dir = state.mod_dir.join(&info.id);
|
|
||||||
if let Err(err) = delete_mod(state, &info)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to delete mod files")
|
|
||||||
.with_suggestion(|| {
|
|
||||||
format!("Clean the folder '{}' manually", mod_dir.display())
|
|
||||||
})
|
|
||||||
{
|
|
||||||
tracing::error!("{:?}", err);
|
|
||||||
send_error(event_sink.clone(), err).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
event_sink
|
|
||||||
.write()
|
|
||||||
.await
|
|
||||||
.submit_command(
|
|
||||||
ACTION_FINISH_DELETE_SELECTED_MOD,
|
|
||||||
SingleUse::new(info),
|
|
||||||
Target::Auto,
|
|
||||||
)
|
|
||||||
.expect("failed to send command");
|
|
||||||
}),
|
|
||||||
AsyncAction::ResetDeployment(state) => tokio::spawn(async move {
|
|
||||||
if let Err(err) = reset_mod_deployment(state)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to reset mod deployment")
|
|
||||||
{
|
|
||||||
tracing::error!("{:?}", err);
|
|
||||||
send_error(event_sink.clone(), err).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
event_sink
|
|
||||||
.write()
|
|
||||||
.await
|
|
||||||
.submit_command(ACTION_FINISH_RESET_DEPLOYMENT, (), Target::Auto)
|
|
||||||
.expect("failed to send command");
|
|
||||||
}),
|
|
||||||
AsyncAction::SaveSettings(state) => tokio::spawn(async move {
|
|
||||||
if let Err(err) = save_settings(state)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to save settings")
|
|
||||||
{
|
|
||||||
tracing::error!("{:?}", err);
|
|
||||||
send_error(event_sink.clone(), err).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
event_sink
|
|
||||||
.write()
|
|
||||||
.await
|
|
||||||
.submit_command(ACTION_FINISH_SAVE_SETTINGS, (), Target::Auto)
|
|
||||||
.expect("failed to send command");
|
|
||||||
}),
|
|
||||||
AsyncAction::CheckUpdates(state) => tokio::spawn(async move {
|
|
||||||
let updates = match check_updates(state)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to check for updates")
|
|
||||||
{
|
|
||||||
Ok(updates) => updates,
|
|
||||||
Err(err) => {
|
|
||||||
tracing::error!("{:?}", err);
|
|
||||||
send_error(event_sink.clone(), err).await;
|
|
||||||
vec![]
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
event_sink
|
|
||||||
.write()
|
|
||||||
.await
|
|
||||||
.submit_command(
|
|
||||||
ACTION_FINISH_CHECK_UPDATE,
|
|
||||||
SingleUse::new(updates),
|
|
||||||
Target::Auto,
|
|
||||||
)
|
|
||||||
.expect("failed to send command");
|
|
||||||
}),
|
|
||||||
AsyncAction::LoadInitial((path, is_default)) => tokio::spawn(async move {
|
|
||||||
let data = match load_initial(path, is_default)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to load initial application data")
|
|
||||||
{
|
|
||||||
Ok(data) => Some(data),
|
|
||||||
Err(err) => {
|
|
||||||
tracing::error!("{:?}", err);
|
|
||||||
send_error(event_sink.clone(), err).await;
|
|
||||||
None
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
event_sink
|
|
||||||
.write()
|
|
||||||
.await
|
|
||||||
.submit_command(
|
|
||||||
ACTION_FINISH_LOAD_INITIAL,
|
|
||||||
SingleUse::new(data),
|
|
||||||
Target::Auto,
|
|
||||||
)
|
|
||||||
.expect("failed to send command");
|
|
||||||
}),
|
|
||||||
AsyncAction::Log((state, line)) => tokio::spawn(async move {
|
|
||||||
if let Ok(mut f) = OpenOptions::new()
|
|
||||||
.append(true)
|
|
||||||
.open(state.data_dir.join("dtmm.log"))
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
let _ = f.write_all(&line).await;
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
AsyncAction::NxmDownload(state, uri) => tokio::spawn(async move {
|
|
||||||
match import_from_nxm(state, uri)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to handle NXM URI")
|
|
||||||
{
|
|
||||||
Ok(mod_info) => {
|
|
||||||
event_sink
|
|
||||||
.write()
|
|
||||||
.await
|
|
||||||
.submit_command(
|
|
||||||
ACTION_FINISH_ADD_MOD,
|
|
||||||
SingleUse::new(Arc::new(mod_info)),
|
|
||||||
Target::Auto,
|
|
||||||
)
|
|
||||||
.expect("failed to send command");
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
tracing::error!("{:?}", err);
|
|
||||||
send_error(event_sink.clone(), err).await;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn handle_log(
|
|
||||||
event_sink: Arc<RwLock<ExtEventSink>>,
|
|
||||||
log_queue: Arc<RwLock<UnboundedReceiver<Vec<u8>>>>,
|
|
||||||
) {
|
|
||||||
while let Some(line) = log_queue.write().await.recv().await {
|
|
||||||
let event_sink = event_sink.clone();
|
|
||||||
event_sink
|
|
||||||
.write()
|
|
||||||
.await
|
|
||||||
.submit_command(ACTION_LOG, SingleUse::new(line), Target::Auto)
|
|
||||||
.expect("failed to send command");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn work_thread(
|
|
||||||
event_sink: Arc<RwLock<ExtEventSink>>,
|
|
||||||
action_queue: Arc<RwLock<UnboundedReceiver<AsyncAction>>>,
|
|
||||||
log_queue: Arc<RwLock<UnboundedReceiver<Vec<u8>>>>,
|
|
||||||
) -> Result<()> {
|
|
||||||
let rt = Runtime::new()?;
|
|
||||||
|
|
||||||
rt.block_on(async {
|
|
||||||
loop {
|
|
||||||
tokio::select! {
|
|
||||||
_ = handle_action(event_sink.clone(), action_queue.clone()) => {},
|
|
||||||
_ = handle_log(event_sink.clone(), log_queue.clone()) => {},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
|
@ -1,207 +0,0 @@
|
||||||
#![recursion_limit = "256"]
|
|
||||||
#![feature(let_chains)]
|
|
||||||
#![feature(iterator_try_collect)]
|
|
||||||
#![windows_subsystem = "windows"]
|
|
||||||
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use clap::parser::ValueSource;
|
|
||||||
use clap::{command, value_parser, Arg};
|
|
||||||
use color_eyre::eyre::{self, Context};
|
|
||||||
use color_eyre::{Report, Result, Section};
|
|
||||||
use druid::AppLauncher;
|
|
||||||
use interprocess::local_socket::{prelude::*, GenericNamespaced, ListenerOptions};
|
|
||||||
use tokio::sync::RwLock;
|
|
||||||
|
|
||||||
use crate::controller::worker::work_thread;
|
|
||||||
use crate::state::{AsyncAction, ACTION_HANDLE_NXM};
|
|
||||||
use crate::state::{Delegate, State};
|
|
||||||
use crate::ui::theme;
|
|
||||||
use crate::util::log::LogLevel;
|
|
||||||
|
|
||||||
mod controller;
|
|
||||||
mod state;
|
|
||||||
mod util {
|
|
||||||
pub mod ansi;
|
|
||||||
pub mod config;
|
|
||||||
pub mod log;
|
|
||||||
}
|
|
||||||
mod ui;
|
|
||||||
|
|
||||||
// As explained in https://docs.rs/interprocess/2.1.0/interprocess/local_socket/struct.Name.html
|
|
||||||
// namespaces are supported on both platforms we care about: Windows and Linux.
|
|
||||||
const IPC_ADDRESS: &str = "dtmm.sock";
|
|
||||||
|
|
||||||
#[tracing::instrument]
|
|
||||||
fn notify_nxm_download(
|
|
||||||
uri: impl AsRef<str> + std::fmt::Debug,
|
|
||||||
level: Option<LogLevel>,
|
|
||||||
) -> Result<()> {
|
|
||||||
util::log::create_tracing_subscriber(level, None);
|
|
||||||
|
|
||||||
tracing::debug!("Received Uri '{}', sending to main process.", uri.as_ref());
|
|
||||||
|
|
||||||
let mut stream = LocalSocketStream::connect(
|
|
||||||
IPC_ADDRESS
|
|
||||||
.to_ns_name::<GenericNamespaced>()
|
|
||||||
.expect("Invalid socket name"),
|
|
||||||
)
|
|
||||||
.wrap_err_with(|| format!("Failed to connect to '{}'", IPC_ADDRESS))
|
|
||||||
.suggestion("Make sure the main window is open.")?;
|
|
||||||
|
|
||||||
tracing::debug!("Connected to main process at '{}'", IPC_ADDRESS);
|
|
||||||
|
|
||||||
bincode::serialize_into(&mut stream, uri.as_ref()).wrap_err("Failed to send URI")?;
|
|
||||||
|
|
||||||
// We don't really care what the message is, we just need an acknowledgement.
|
|
||||||
let _: String = bincode::deserialize_from(&mut stream).wrap_err("Failed to receive reply")?;
|
|
||||||
|
|
||||||
tracing::info!(
|
|
||||||
"Notified DTMM with uri '{}'. Check the main window.",
|
|
||||||
uri.as_ref()
|
|
||||||
);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument]
|
|
||||||
fn main() -> Result<()> {
|
|
||||||
color_eyre::install()?;
|
|
||||||
|
|
||||||
let default_config_path = util::config::get_default_config_path();
|
|
||||||
|
|
||||||
tracing::trace!(default_config_path = %default_config_path.display());
|
|
||||||
|
|
||||||
let matches = command!()
|
|
||||||
.arg(
|
|
||||||
Arg::new("config")
|
|
||||||
.long("config")
|
|
||||||
.short('c')
|
|
||||||
.help("Path to the config file")
|
|
||||||
.value_parser(value_parser!(PathBuf))
|
|
||||||
.default_value(default_config_path.to_string_lossy().to_string()),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::new("log-level")
|
|
||||||
.long("log-level")
|
|
||||||
.help("The maximum level of log events to print")
|
|
||||||
.value_parser(value_parser!(LogLevel))
|
|
||||||
.default_value("info"),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::new("nxm")
|
|
||||||
.help("An `nxm://` URI to download")
|
|
||||||
.required(false),
|
|
||||||
)
|
|
||||||
.get_matches();
|
|
||||||
|
|
||||||
let level = if matches.value_source("log-level") == Some(ValueSource::DefaultValue) {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
matches.get_one::<LogLevel>("log-level").cloned()
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(uri) = matches.get_one::<String>("nxm") {
|
|
||||||
return notify_nxm_download(uri, level).wrap_err("Failed to send NXM Uri to main window.");
|
|
||||||
}
|
|
||||||
|
|
||||||
let (log_tx, log_rx) = tokio::sync::mpsc::unbounded_channel();
|
|
||||||
util::log::create_tracing_subscriber(level, Some(log_tx));
|
|
||||||
|
|
||||||
let (action_tx, action_rx) = tokio::sync::mpsc::unbounded_channel();
|
|
||||||
|
|
||||||
let config_path = matches
|
|
||||||
.get_one::<PathBuf>("config")
|
|
||||||
.cloned()
|
|
||||||
.expect("argument has default value");
|
|
||||||
let is_config_default = matches.value_source("config") == Some(ValueSource::DefaultValue);
|
|
||||||
if action_tx
|
|
||||||
.send(AsyncAction::LoadInitial((config_path, is_config_default)))
|
|
||||||
.is_err()
|
|
||||||
{
|
|
||||||
let err = eyre::eyre!("Failed to send action");
|
|
||||||
return Err(err);
|
|
||||||
}
|
|
||||||
|
|
||||||
let launcher = AppLauncher::with_window(ui::window::main::new())
|
|
||||||
.delegate(Delegate::new(action_tx))
|
|
||||||
.configure_env(theme::set_theme_env);
|
|
||||||
|
|
||||||
let event_sink = launcher.get_external_handle();
|
|
||||||
|
|
||||||
{
|
|
||||||
let span = tracing::info_span!(IPC_ADDRESS, "nxm-socket");
|
|
||||||
let _guard = span.enter();
|
|
||||||
|
|
||||||
let event_sink = event_sink.clone();
|
|
||||||
let server = ListenerOptions::new()
|
|
||||||
.name(
|
|
||||||
IPC_ADDRESS
|
|
||||||
.to_ns_name::<GenericNamespaced>()
|
|
||||||
.expect("Invalid socket name"),
|
|
||||||
)
|
|
||||||
.create_sync()
|
|
||||||
.wrap_err("Failed to create IPC listener")?;
|
|
||||||
|
|
||||||
tracing::debug!("IPC server listening on '{}'", IPC_ADDRESS);
|
|
||||||
|
|
||||||
// Drop the guard here, so that we can re-enter the same span in the thread.
|
|
||||||
drop(_guard);
|
|
||||||
|
|
||||||
std::thread::Builder::new()
|
|
||||||
.name("nxm-socket".into())
|
|
||||||
.spawn(move || {
|
|
||||||
let _guard = span.enter();
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let res = server.accept().wrap_err_with(|| {
|
|
||||||
format!("IPC server failed to listen on '{}'", IPC_ADDRESS)
|
|
||||||
});
|
|
||||||
|
|
||||||
match res {
|
|
||||||
Ok(mut stream) => {
|
|
||||||
let res = bincode::deserialize_from(&mut stream)
|
|
||||||
.wrap_err("Failed to read message")
|
|
||||||
.and_then(|uri: String| {
|
|
||||||
tracing::trace!(uri, "Received NXM uri");
|
|
||||||
|
|
||||||
event_sink
|
|
||||||
.submit_command(ACTION_HANDLE_NXM, uri, druid::Target::Auto)
|
|
||||||
.wrap_err("Failed to start NXM download")
|
|
||||||
});
|
|
||||||
match res {
|
|
||||||
Ok(()) => {
|
|
||||||
let _ = bincode::serialize_into(&mut stream, "Ok");
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
tracing::error!("{:?}", err);
|
|
||||||
let _ = bincode::serialize_into(&mut stream, "Error");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
tracing::error!("Failed to receive client connection: {:?}", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.wrap_err("Failed to create thread")?;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::thread::Builder::new()
|
|
||||||
.name("work-thread".into())
|
|
||||||
.spawn(move || {
|
|
||||||
let event_sink = Arc::new(RwLock::new(event_sink));
|
|
||||||
let action_rx = Arc::new(RwLock::new(action_rx));
|
|
||||||
let log_rx = Arc::new(RwLock::new(log_rx));
|
|
||||||
loop {
|
|
||||||
if let Err(err) = work_thread(event_sink.clone(), action_rx.clone(), log_rx.clone())
|
|
||||||
{
|
|
||||||
tracing::error!("Work thread failed, restarting: {:?}", err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.wrap_err("Failed to create thread")?;
|
|
||||||
|
|
||||||
launcher.launch(State::new()).map_err(Report::new)
|
|
||||||
}
|
|
|
@ -1,276 +0,0 @@
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use druid::im::{HashMap, Vector};
|
|
||||||
use druid::text::RichText;
|
|
||||||
use druid::{Data, ImageBuf, Lens, WindowHandle, WindowId};
|
|
||||||
use dtmt_shared::ModConfig;
|
|
||||||
use nexusmods::Mod as NexusMod;
|
|
||||||
|
|
||||||
use super::SelectedModLens;
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Data, Debug, PartialEq)]
|
|
||||||
pub(crate) enum View {
|
|
||||||
Mods,
|
|
||||||
Settings,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for View {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::Mods
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Data, Debug, PartialEq)]
|
|
||||||
pub struct PackageInfo {
|
|
||||||
pub name: String,
|
|
||||||
pub files: Vector<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PackageInfo {
|
|
||||||
pub fn new(name: String, files: Vector<String>) -> Self {
|
|
||||||
Self { name, files }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub(crate) struct ModResourceInfo {
|
|
||||||
pub init: PathBuf,
|
|
||||||
pub data: Option<PathBuf>,
|
|
||||||
pub localization: Option<PathBuf>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Data, Debug, PartialEq)]
|
|
||||||
pub(crate) enum ModOrder {
|
|
||||||
Before,
|
|
||||||
After,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Data, Debug, PartialEq)]
|
|
||||||
pub(crate) struct ModDependency {
|
|
||||||
pub id: String,
|
|
||||||
pub order: ModOrder,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<dtmt_shared::ModDependency> for ModDependency {
|
|
||||||
fn from(value: dtmt_shared::ModDependency) -> Self {
|
|
||||||
match value {
|
|
||||||
dtmt_shared::ModDependency::ID(id) => ModDependency {
|
|
||||||
id,
|
|
||||||
order: ModOrder::Before,
|
|
||||||
},
|
|
||||||
dtmt_shared::ModDependency::Config { id, order } => ModDependency {
|
|
||||||
id,
|
|
||||||
order: match order {
|
|
||||||
dtmt_shared::ModOrder::Before => ModOrder::Before,
|
|
||||||
dtmt_shared::ModOrder::After => ModOrder::After,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Data, Debug, Lens, serde::Serialize, serde::Deserialize)]
|
|
||||||
pub(crate) struct NexusInfo {
|
|
||||||
pub author: String,
|
|
||||||
pub category_id: u64,
|
|
||||||
pub created_timestamp: i64,
|
|
||||||
pub description: Arc<String>,
|
|
||||||
pub id: u64,
|
|
||||||
pub name: String,
|
|
||||||
pub picture_url: Arc<String>,
|
|
||||||
pub summary: Arc<String>,
|
|
||||||
pub uid: u64,
|
|
||||||
pub updated_timestamp: i64,
|
|
||||||
pub uploaded_by: String,
|
|
||||||
pub version: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<NexusMod> for NexusInfo {
|
|
||||||
fn from(value: NexusMod) -> Self {
|
|
||||||
Self {
|
|
||||||
author: value.author,
|
|
||||||
category_id: value.category_id,
|
|
||||||
created_timestamp: value.created_timestamp.unix_timestamp(),
|
|
||||||
description: Arc::new(value.description),
|
|
||||||
id: value.mod_id,
|
|
||||||
name: value.name,
|
|
||||||
picture_url: Arc::new(value.picture_url.into()),
|
|
||||||
summary: Arc::new(value.summary),
|
|
||||||
uid: value.uid,
|
|
||||||
updated_timestamp: value.updated_timestamp.unix_timestamp(),
|
|
||||||
uploaded_by: value.uploaded_by,
|
|
||||||
version: value.version,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Data, Lens)]
|
|
||||||
pub(crate) struct ModInfo {
|
|
||||||
pub id: String,
|
|
||||||
pub name: String,
|
|
||||||
pub summary: Arc<String>,
|
|
||||||
pub description: Option<Arc<String>>,
|
|
||||||
pub categories: Vector<String>,
|
|
||||||
pub author: Option<String>,
|
|
||||||
pub image: Option<ImageBuf>,
|
|
||||||
pub version: String,
|
|
||||||
pub enabled: bool,
|
|
||||||
pub depends: Vector<ModDependency>,
|
|
||||||
pub bundled: bool,
|
|
||||||
#[lens(ignore)]
|
|
||||||
#[data(ignore)]
|
|
||||||
pub packages: Vector<Arc<PackageInfo>>,
|
|
||||||
#[lens(ignore)]
|
|
||||||
#[data(ignore)]
|
|
||||||
pub resources: ModResourceInfo,
|
|
||||||
#[data(ignore)]
|
|
||||||
pub nexus: Option<NexusInfo>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Debug for ModInfo {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
f.debug_struct("ModInfo")
|
|
||||||
.field("id", &self.id)
|
|
||||||
.field("name", &self.name)
|
|
||||||
.field("summary", &self.summary)
|
|
||||||
.field(
|
|
||||||
"description",
|
|
||||||
&(match &self.description {
|
|
||||||
Some(desc) => format!("Some(String[0..{}])", desc.len()),
|
|
||||||
None => "None".to_string(),
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.field("categories", &self.categories)
|
|
||||||
.field("author", &self.author)
|
|
||||||
.field(
|
|
||||||
"image",
|
|
||||||
&(match &self.image {
|
|
||||||
Some(image) => format!("Some(ImageBuf[{}x{}])", image.width(), image.height()),
|
|
||||||
None => "None".to_string(),
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.field("version", &self.version)
|
|
||||||
.field("enabled", &self.enabled)
|
|
||||||
.field("packages", &format!("Vec[0..{}]", self.packages.len()))
|
|
||||||
.field("resources", &self.resources)
|
|
||||||
.field("depends", &self.depends)
|
|
||||||
.field("bundled", &self.bundled)
|
|
||||||
.field("nexus", &self.nexus)
|
|
||||||
.finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ModInfo {
|
|
||||||
pub fn new(
|
|
||||||
cfg: ModConfig,
|
|
||||||
packages: Vector<Arc<PackageInfo>>,
|
|
||||||
image: Option<ImageBuf>,
|
|
||||||
nexus: Option<NexusInfo>,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
id: cfg.id,
|
|
||||||
name: cfg.name,
|
|
||||||
summary: Arc::new(cfg.summary),
|
|
||||||
description: cfg.description.map(Arc::new),
|
|
||||||
author: cfg.author,
|
|
||||||
version: cfg.version,
|
|
||||||
enabled: false,
|
|
||||||
packages,
|
|
||||||
bundled: cfg.bundled,
|
|
||||||
image,
|
|
||||||
categories: cfg.categories.into_iter().collect(),
|
|
||||||
resources: ModResourceInfo {
|
|
||||||
init: cfg.resources.init,
|
|
||||||
data: cfg.resources.data,
|
|
||||||
localization: cfg.resources.localization,
|
|
||||||
},
|
|
||||||
depends: cfg.depends.into_iter().map(ModDependency::from).collect(),
|
|
||||||
nexus,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Data, Lens)]
|
|
||||||
pub(crate) struct State {
|
|
||||||
pub current_view: View,
|
|
||||||
pub mods: Vector<Arc<ModInfo>>,
|
|
||||||
pub selected_mod_index: Option<usize>,
|
|
||||||
pub dirty: bool,
|
|
||||||
pub is_deployment_in_progress: bool,
|
|
||||||
pub is_reset_in_progress: bool,
|
|
||||||
pub is_save_in_progress: bool,
|
|
||||||
pub is_next_save_pending: bool,
|
|
||||||
pub is_update_in_progress: bool,
|
|
||||||
pub is_io_enabled: bool,
|
|
||||||
pub game_dir: Arc<PathBuf>,
|
|
||||||
pub data_dir: Arc<PathBuf>,
|
|
||||||
pub nexus_api_key: Arc<String>,
|
|
||||||
pub log: Vector<RichText>,
|
|
||||||
// True, when the initial loading of configuration and mods is still in progress
|
|
||||||
pub loading: bool,
|
|
||||||
|
|
||||||
#[lens(ignore)]
|
|
||||||
#[data(ignore)]
|
|
||||||
pub config_path: Arc<PathBuf>,
|
|
||||||
#[lens(ignore)]
|
|
||||||
#[data(ignore)]
|
|
||||||
pub windows: HashMap<WindowId, WindowHandle>,
|
|
||||||
#[lens(ignore)]
|
|
||||||
#[data(ignore)]
|
|
||||||
pub ctx: Arc<sdk::Context>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl State {
|
|
||||||
#[allow(non_upper_case_globals)]
|
|
||||||
pub const selected_mod: SelectedModLens = SelectedModLens;
|
|
||||||
|
|
||||||
pub fn new() -> Self {
|
|
||||||
let ctx = sdk::Context::new();
|
|
||||||
|
|
||||||
Self {
|
|
||||||
ctx: Arc::new(ctx),
|
|
||||||
current_view: View::default(),
|
|
||||||
mods: Vector::new(),
|
|
||||||
selected_mod_index: None,
|
|
||||||
dirty: false,
|
|
||||||
is_deployment_in_progress: false,
|
|
||||||
is_reset_in_progress: false,
|
|
||||||
is_save_in_progress: false,
|
|
||||||
is_next_save_pending: false,
|
|
||||||
is_update_in_progress: false,
|
|
||||||
is_io_enabled: false,
|
|
||||||
config_path: Arc::new(PathBuf::new()),
|
|
||||||
game_dir: Arc::new(PathBuf::new()),
|
|
||||||
data_dir: Arc::new(PathBuf::new()),
|
|
||||||
nexus_api_key: Arc::new(String::new()),
|
|
||||||
log: Vector::new(),
|
|
||||||
windows: HashMap::new(),
|
|
||||||
loading: true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn select_mod(&mut self, index: usize) {
|
|
||||||
self.selected_mod_index = Some(index);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_mod(&mut self, info: Arc<ModInfo>) {
|
|
||||||
if let Some(pos) = self.mods.iter().position(|i| i.id == info.id) {
|
|
||||||
self.mods.set(pos, info);
|
|
||||||
self.selected_mod_index = Some(pos);
|
|
||||||
} else {
|
|
||||||
self.mods.push_back(info);
|
|
||||||
self.selected_mod_index = Some(self.mods.len() - 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn can_move_mod_down(&self) -> bool {
|
|
||||||
self.selected_mod_index
|
|
||||||
.map(|i| i < (self.mods.len().saturating_sub(1)))
|
|
||||||
.unwrap_or(false)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn can_move_mod_up(&self) -> bool {
|
|
||||||
self.selected_mod_index.map(|i| i > 0).unwrap_or(false)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,475 +0,0 @@
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use color_eyre::Report;
|
|
||||||
use druid::im::Vector;
|
|
||||||
use druid::{
|
|
||||||
AppDelegate, Command, DelegateCtx, Env, FileInfo, Handled, Selector, SingleUse, Target,
|
|
||||||
WindowHandle, WindowId,
|
|
||||||
};
|
|
||||||
use tokio::sync::mpsc::UnboundedSender;
|
|
||||||
|
|
||||||
use crate::ui::window;
|
|
||||||
use crate::util::ansi::ansi_to_rich_text;
|
|
||||||
use crate::util::config::Config;
|
|
||||||
|
|
||||||
use super::{ModInfo, State};
|
|
||||||
|
|
||||||
pub(crate) const ACTION_SELECT_MOD: Selector<usize> = Selector::new("dtmm.action.select-mod");
|
|
||||||
pub(crate) const ACTION_SELECTED_MOD_UP: Selector = Selector::new("dtmm.action.selected-mod-up");
|
|
||||||
pub(crate) const ACTION_SELECTED_MOD_DOWN: Selector =
|
|
||||||
Selector::new("dtmm.action.selected-mod-down");
|
|
||||||
pub(crate) const ACTION_START_DELETE_SELECTED_MOD: Selector<SingleUse<Arc<ModInfo>>> =
|
|
||||||
Selector::new("dtmm.action.srart-delete-selected-mod");
|
|
||||||
pub(crate) const ACTION_FINISH_DELETE_SELECTED_MOD: Selector<SingleUse<Arc<ModInfo>>> =
|
|
||||||
Selector::new("dtmm.action.finish-delete-selected-mod");
|
|
||||||
|
|
||||||
pub(crate) const ACTION_START_DEPLOY: Selector = Selector::new("dtmm.action.start-deploy");
|
|
||||||
pub(crate) const ACTION_FINISH_DEPLOY: Selector = Selector::new("dtmm.action.finish-deploy");
|
|
||||||
|
|
||||||
pub(crate) const ACTION_START_RESET_DEPLOYMENT: Selector =
|
|
||||||
Selector::new("dtmm.action.start-reset-deployment");
|
|
||||||
pub(crate) const ACTION_FINISH_RESET_DEPLOYMENT: Selector =
|
|
||||||
Selector::new("dtmm.action.finish-reset-deployment");
|
|
||||||
|
|
||||||
pub(crate) const ACTION_HANDLE_NXM: Selector<String> = Selector::new("dtmm.action.handle-nxm");
|
|
||||||
pub(crate) const ACTION_ADD_MOD: Selector<FileInfo> = Selector::new("dtmm.action.add-mod");
|
|
||||||
pub(crate) const ACTION_FINISH_ADD_MOD: Selector<SingleUse<Arc<ModInfo>>> =
|
|
||||||
Selector::new("dtmm.action.finish-add-mod");
|
|
||||||
|
|
||||||
pub(crate) const ACTION_LOG: Selector<SingleUse<Vec<u8>>> = Selector::new("dtmm.action.log");
|
|
||||||
|
|
||||||
pub(crate) const ACTION_START_SAVE_SETTINGS: Selector =
|
|
||||||
Selector::new("dtmm.action.start-save-settings");
|
|
||||||
pub(crate) const ACTION_FINISH_SAVE_SETTINGS: Selector =
|
|
||||||
Selector::new("dtmm.action.finish-save-settings");
|
|
||||||
|
|
||||||
pub(crate) const ACTION_START_CHECK_UPDATE: Selector =
|
|
||||||
Selector::new("dtmm.action.start-check-update");
|
|
||||||
pub(crate) const ACTION_FINISH_CHECK_UPDATE: Selector<SingleUse<Vec<ModInfo>>> =
|
|
||||||
Selector::new("dtmm.action.finish-check-update");
|
|
||||||
|
|
||||||
pub(crate) const ACTION_SET_DIRTY: Selector = Selector::new("dtmm.action.set-dirty");
|
|
||||||
|
|
||||||
pub(crate) const ACTION_SHOW_ERROR_DIALOG: Selector<SingleUse<Report>> =
|
|
||||||
Selector::new("dtmm.action.show-error-dialog");
|
|
||||||
|
|
||||||
pub(crate) const ACTION_SET_WINDOW_HANDLE: Selector<SingleUse<(WindowId, WindowHandle)>> =
|
|
||||||
Selector::new("dtmm.action.set-window-handle");
|
|
||||||
|
|
||||||
pub(crate) type InitialLoadResult = (Config, Vector<Arc<ModInfo>>);
|
|
||||||
pub(crate) const ACTION_FINISH_LOAD_INITIAL: Selector<SingleUse<Option<InitialLoadResult>>> =
|
|
||||||
Selector::new("dtmm.action.finish-load-initial");
|
|
||||||
|
|
||||||
pub(crate) const ACTION_OPEN_LINK: Selector<Arc<String>> = Selector::new("dtmm.action.open-link");
|
|
||||||
|
|
||||||
// A sub-selection of `State`'s fields that are required in `AsyncAction`s and that are
|
|
||||||
// `Send + Sync`
|
|
||||||
pub(crate) struct ActionState {
|
|
||||||
pub mods: Vector<Arc<ModInfo>>,
|
|
||||||
pub game_dir: Arc<PathBuf>,
|
|
||||||
pub data_dir: Arc<PathBuf>,
|
|
||||||
pub mod_dir: Arc<PathBuf>,
|
|
||||||
pub config_path: Arc<PathBuf>,
|
|
||||||
pub ctx: Arc<sdk::Context>,
|
|
||||||
pub nexus_api_key: Arc<String>,
|
|
||||||
pub is_io_enabled: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<State> for ActionState {
|
|
||||||
fn from(state: State) -> Self {
|
|
||||||
Self {
|
|
||||||
mods: state.mods,
|
|
||||||
game_dir: state.game_dir,
|
|
||||||
mod_dir: Arc::new(state.data_dir.join("mods")),
|
|
||||||
data_dir: state.data_dir,
|
|
||||||
config_path: state.config_path,
|
|
||||||
ctx: state.ctx,
|
|
||||||
nexus_api_key: state.nexus_api_key,
|
|
||||||
is_io_enabled: state.is_io_enabled,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) enum AsyncAction {
|
|
||||||
DeployMods(ActionState),
|
|
||||||
ResetDeployment(ActionState),
|
|
||||||
AddMod(ActionState, FileInfo),
|
|
||||||
DeleteMod(ActionState, Arc<ModInfo>),
|
|
||||||
SaveSettings(ActionState),
|
|
||||||
CheckUpdates(ActionState),
|
|
||||||
LoadInitial((PathBuf, bool)),
|
|
||||||
Log((ActionState, Vec<u8>)),
|
|
||||||
NxmDownload(ActionState, String),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Debug for AsyncAction {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
AsyncAction::DeployMods(_) => write!(f, "AsyncAction::DeployMods(_state)"),
|
|
||||||
AsyncAction::ResetDeployment(_) => write!(f, "AsyncAction::ResetDeployment(_state)"),
|
|
||||||
AsyncAction::AddMod(_, info) => write!(f, "AsyncAction::AddMod(_state, {:?})", info),
|
|
||||||
AsyncAction::DeleteMod(_, info) => {
|
|
||||||
write!(f, "AsyncAction::DeleteMod(_state, {:?})", info)
|
|
||||||
}
|
|
||||||
AsyncAction::SaveSettings(_) => write!(f, "AsyncAction::SaveSettings(_state)"),
|
|
||||||
AsyncAction::CheckUpdates(_) => write!(f, "AsyncAction::CheckUpdates(_state)"),
|
|
||||||
AsyncAction::LoadInitial((path, is_default)) => write!(
|
|
||||||
f,
|
|
||||||
"AsyncAction::LoadInitial(({:?}, {:?}))",
|
|
||||||
path, is_default
|
|
||||||
),
|
|
||||||
AsyncAction::Log(_) => write!(f, "AsyncAction::Log(_)"),
|
|
||||||
AsyncAction::NxmDownload(_, uri) => {
|
|
||||||
write!(f, "AsyncAction::NxmDownload(_state, {})", uri)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) struct Delegate {
|
|
||||||
sender: UnboundedSender<AsyncAction>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Delegate {
|
|
||||||
pub fn new(sender: UnboundedSender<AsyncAction>) -> Self {
|
|
||||||
Self { sender }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AppDelegate<State> for Delegate {
|
|
||||||
#[tracing::instrument(name = "Delegate", skip_all)]
|
|
||||||
fn command(
|
|
||||||
&mut self,
|
|
||||||
ctx: &mut DelegateCtx,
|
|
||||||
_target: Target,
|
|
||||||
cmd: &Command,
|
|
||||||
state: &mut State,
|
|
||||||
_env: &Env,
|
|
||||||
) -> Handled {
|
|
||||||
if cfg!(debug_assertions) && !cmd.is(ACTION_LOG) {
|
|
||||||
tracing::trace!(?cmd);
|
|
||||||
}
|
|
||||||
|
|
||||||
match cmd {
|
|
||||||
cmd if cmd.is(ACTION_START_DEPLOY) => {
|
|
||||||
if self
|
|
||||||
.sender
|
|
||||||
.send(AsyncAction::DeployMods(state.clone().into()))
|
|
||||||
.is_ok()
|
|
||||||
{
|
|
||||||
state.is_deployment_in_progress = true;
|
|
||||||
} else {
|
|
||||||
tracing::error!("Failed to queue action to deploy mods");
|
|
||||||
}
|
|
||||||
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_FINISH_DEPLOY) => {
|
|
||||||
state.is_deployment_in_progress = false;
|
|
||||||
state.dirty = false;
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_START_RESET_DEPLOYMENT) => {
|
|
||||||
if self
|
|
||||||
.sender
|
|
||||||
.send(AsyncAction::ResetDeployment(state.clone().into()))
|
|
||||||
.is_ok()
|
|
||||||
{
|
|
||||||
state.is_reset_in_progress = true;
|
|
||||||
} else {
|
|
||||||
tracing::error!("Failed to queue action to reset mod deployment");
|
|
||||||
}
|
|
||||||
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_FINISH_RESET_DEPLOYMENT) => {
|
|
||||||
state.is_reset_in_progress = false;
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_SELECT_MOD) => {
|
|
||||||
let index = cmd
|
|
||||||
.get(ACTION_SELECT_MOD)
|
|
||||||
.expect("command type matched but didn't contain the expected value");
|
|
||||||
|
|
||||||
state.select_mod(*index);
|
|
||||||
// ctx.submit_command(ACTION_START_SAVE_SETTINGS);
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_SELECTED_MOD_UP) => {
|
|
||||||
let Some(i) = state.selected_mod_index else {
|
|
||||||
return Handled::No;
|
|
||||||
};
|
|
||||||
|
|
||||||
let len = state.mods.len();
|
|
||||||
if len == 0 || i == 0 {
|
|
||||||
return Handled::No;
|
|
||||||
}
|
|
||||||
|
|
||||||
state.mods.swap(i, i - 1);
|
|
||||||
state.selected_mod_index = Some(i - 1);
|
|
||||||
// ctx.submit_command(ACTION_START_SAVE_SETTINGS);
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_SELECTED_MOD_DOWN) => {
|
|
||||||
let Some(i) = state.selected_mod_index else {
|
|
||||||
return Handled::No;
|
|
||||||
};
|
|
||||||
|
|
||||||
let len = state.mods.len();
|
|
||||||
if len == 0 || i == usize::MAX || i >= len - 1 {
|
|
||||||
return Handled::No;
|
|
||||||
}
|
|
||||||
|
|
||||||
state.mods.swap(i, i + 1);
|
|
||||||
state.selected_mod_index = Some(i + 1);
|
|
||||||
// ctx.submit_command(ACTION_START_SAVE_SETTINGS);
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_START_DELETE_SELECTED_MOD) => {
|
|
||||||
let info = cmd
|
|
||||||
.get(ACTION_START_DELETE_SELECTED_MOD)
|
|
||||||
.and_then(SingleUse::take)
|
|
||||||
.expect("command type matched but didn't contain the expected value");
|
|
||||||
|
|
||||||
if self
|
|
||||||
.sender
|
|
||||||
.send(AsyncAction::DeleteMod(state.clone().into(), info))
|
|
||||||
.is_err()
|
|
||||||
{
|
|
||||||
tracing::error!("Failed to queue action to deploy mods");
|
|
||||||
}
|
|
||||||
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_FINISH_DELETE_SELECTED_MOD) => {
|
|
||||||
let info = cmd
|
|
||||||
.get(ACTION_FINISH_DELETE_SELECTED_MOD)
|
|
||||||
.and_then(SingleUse::take)
|
|
||||||
.expect("command type matched but didn't contain the expected value");
|
|
||||||
|
|
||||||
let found = state.mods.iter().enumerate().find(|(_, i)| i.id == info.id);
|
|
||||||
let Some((index, _)) = found else {
|
|
||||||
return Handled::No;
|
|
||||||
};
|
|
||||||
|
|
||||||
state.mods.remove(index);
|
|
||||||
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_HANDLE_NXM) => {
|
|
||||||
let uri = cmd
|
|
||||||
.get(ACTION_HANDLE_NXM)
|
|
||||||
.expect("command type match but didn't contain the expected value");
|
|
||||||
|
|
||||||
if self
|
|
||||||
.sender
|
|
||||||
.send(AsyncAction::NxmDownload(state.clone().into(), uri.clone()))
|
|
||||||
.is_err()
|
|
||||||
{
|
|
||||||
tracing::error!("Failed to queue action to download NXM mod");
|
|
||||||
}
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_ADD_MOD) => {
|
|
||||||
let info = cmd
|
|
||||||
.get(ACTION_ADD_MOD)
|
|
||||||
.expect("command type matched but didn't contain the expected value");
|
|
||||||
|
|
||||||
if self
|
|
||||||
.sender
|
|
||||||
.send(AsyncAction::AddMod(state.clone().into(), info.clone()))
|
|
||||||
.is_err()
|
|
||||||
{
|
|
||||||
tracing::error!("Failed to queue action to add mod");
|
|
||||||
}
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_FINISH_ADD_MOD) => {
|
|
||||||
let info = cmd
|
|
||||||
.get(ACTION_FINISH_ADD_MOD)
|
|
||||||
.expect("command type matched but didn't contain the expected value");
|
|
||||||
|
|
||||||
if let Some(info) = info.take() {
|
|
||||||
state.add_mod(info);
|
|
||||||
}
|
|
||||||
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_LOG) => {
|
|
||||||
let line = cmd
|
|
||||||
.get(ACTION_LOG)
|
|
||||||
.expect("command type matched but didn't contain the expected value");
|
|
||||||
|
|
||||||
if let Some(line) = line.take() {
|
|
||||||
{
|
|
||||||
let line = String::from_utf8_lossy(&line);
|
|
||||||
state.log.push_back(ansi_to_rich_text(line.trim()));
|
|
||||||
}
|
|
||||||
|
|
||||||
if self
|
|
||||||
.sender
|
|
||||||
.send(AsyncAction::Log((state.clone().into(), line)))
|
|
||||||
.is_err()
|
|
||||||
{
|
|
||||||
tracing::error!("Failed to queue action to add mod");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_START_SAVE_SETTINGS) => {
|
|
||||||
if state.is_save_in_progress {
|
|
||||||
state.is_next_save_pending = true;
|
|
||||||
} else if self
|
|
||||||
.sender
|
|
||||||
.send(AsyncAction::SaveSettings(state.clone().into()))
|
|
||||||
.is_ok()
|
|
||||||
{
|
|
||||||
state.is_save_in_progress = true;
|
|
||||||
} else {
|
|
||||||
tracing::error!("Failed to queue action to save settings");
|
|
||||||
}
|
|
||||||
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_FINISH_SAVE_SETTINGS) => {
|
|
||||||
tracing::trace!(
|
|
||||||
in_progress = state.is_save_in_progress,
|
|
||||||
next_pending = state.is_next_save_pending,
|
|
||||||
"Finished saving settings",
|
|
||||||
);
|
|
||||||
state.is_save_in_progress = false;
|
|
||||||
|
|
||||||
if state.is_next_save_pending {
|
|
||||||
state.is_next_save_pending = false;
|
|
||||||
ctx.submit_command(ACTION_START_SAVE_SETTINGS);
|
|
||||||
}
|
|
||||||
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_SET_DIRTY) => {
|
|
||||||
state.dirty = true;
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_SHOW_ERROR_DIALOG) => {
|
|
||||||
let err = cmd
|
|
||||||
.get(ACTION_SHOW_ERROR_DIALOG)
|
|
||||||
.and_then(SingleUse::take)
|
|
||||||
.expect("command type matched but didn't contain the expected value");
|
|
||||||
|
|
||||||
let window = state
|
|
||||||
.windows
|
|
||||||
.get(&window::main::WINDOW_ID)
|
|
||||||
.expect("root window does not exist");
|
|
||||||
|
|
||||||
let dialog = window::dialog::error::<State>(err, window.clone());
|
|
||||||
ctx.new_window(dialog);
|
|
||||||
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_SET_WINDOW_HANDLE) => {
|
|
||||||
let (id, handle) = cmd
|
|
||||||
.get(ACTION_SET_WINDOW_HANDLE)
|
|
||||||
.and_then(SingleUse::take)
|
|
||||||
.expect("command type matched but didn't contain the expected value");
|
|
||||||
|
|
||||||
state.windows.insert(id, handle);
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_START_CHECK_UPDATE) => {
|
|
||||||
if self
|
|
||||||
.sender
|
|
||||||
.send(AsyncAction::CheckUpdates(state.clone().into()))
|
|
||||||
.is_ok()
|
|
||||||
{
|
|
||||||
state.is_update_in_progress = true;
|
|
||||||
} else {
|
|
||||||
tracing::error!("Failed to queue action to check updates");
|
|
||||||
}
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_FINISH_CHECK_UPDATE) => {
|
|
||||||
let mut updates = cmd
|
|
||||||
.get(ACTION_FINISH_CHECK_UPDATE)
|
|
||||||
.and_then(SingleUse::take)
|
|
||||||
.expect("command type matched but didn't contain the expected value");
|
|
||||||
|
|
||||||
if tracing::enabled!(tracing::Level::DEBUG) {
|
|
||||||
let mods: Vec<_> = updates
|
|
||||||
.iter()
|
|
||||||
.map(|info| {
|
|
||||||
format!(
|
|
||||||
"{}: {} -> {:?}",
|
|
||||||
info.name,
|
|
||||||
info.version,
|
|
||||||
info.nexus.as_ref().map(|n| &n.version)
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
tracing::info!("Mod updates:\n{}", mods.join("\n"));
|
|
||||||
}
|
|
||||||
|
|
||||||
for mod_info in state.mods.iter_mut() {
|
|
||||||
if let Some(index) = updates.iter().position(|i2| i2.id == mod_info.id) {
|
|
||||||
let update = updates.swap_remove(index);
|
|
||||||
*mod_info = Arc::new(update);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
state.is_update_in_progress = false;
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_FINISH_LOAD_INITIAL) => {
|
|
||||||
let data = cmd
|
|
||||||
.get(ACTION_FINISH_LOAD_INITIAL)
|
|
||||||
.and_then(SingleUse::take)
|
|
||||||
.expect("command type matched but didn't contain the expected value");
|
|
||||||
|
|
||||||
if let Some((config, mods)) = data {
|
|
||||||
state.mods = mods;
|
|
||||||
state.config_path = Arc::new(config.path);
|
|
||||||
state.data_dir = Arc::new(config.data_dir);
|
|
||||||
state.game_dir = Arc::new(config.game_dir.unwrap_or_default());
|
|
||||||
state.nexus_api_key = Arc::new(config.nexus_api_key.unwrap_or_default());
|
|
||||||
state.is_io_enabled = config.unsafe_io;
|
|
||||||
}
|
|
||||||
|
|
||||||
state.loading = false;
|
|
||||||
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
cmd if cmd.is(ACTION_OPEN_LINK) => {
|
|
||||||
let url = cmd
|
|
||||||
.get(ACTION_OPEN_LINK)
|
|
||||||
.expect("command type matched but didn't contain the expected value");
|
|
||||||
|
|
||||||
if let Err(err) = open::that_detached(Arc::as_ref(url)) {
|
|
||||||
tracing::error!(
|
|
||||||
"{:?}",
|
|
||||||
Report::new(err).wrap_err(format!("Failed to open url '{}'", url))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
Handled::Yes
|
|
||||||
}
|
|
||||||
_ => Handled::No,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn window_added(
|
|
||||||
&mut self,
|
|
||||||
id: WindowId,
|
|
||||||
handle: WindowHandle,
|
|
||||||
data: &mut State,
|
|
||||||
_: &Env,
|
|
||||||
_: &mut DelegateCtx,
|
|
||||||
) {
|
|
||||||
data.windows.insert(id, handle);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn window_removed(&mut self, id: WindowId, data: &mut State, _: &Env, _: &mut DelegateCtx) {
|
|
||||||
data.windows.remove(&id);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,124 +0,0 @@
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use druid::im::Vector;
|
|
||||||
use druid::{Data, Lens};
|
|
||||||
|
|
||||||
use super::{ModInfo, NexusInfo, State};
|
|
||||||
|
|
||||||
pub(crate) struct SelectedModLens;
|
|
||||||
|
|
||||||
impl Lens<State, Option<Arc<ModInfo>>> for SelectedModLens {
|
|
||||||
#[tracing::instrument(name = "SelectedModLens::with", skip_all)]
|
|
||||||
fn with<V, F: FnOnce(&Option<Arc<ModInfo>>) -> V>(&self, data: &State, f: F) -> V {
|
|
||||||
let info = data
|
|
||||||
.selected_mod_index
|
|
||||||
.and_then(|i| data.mods.get(i).cloned());
|
|
||||||
|
|
||||||
f(&info)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(name = "SelectedModLens::with_mut", skip_all)]
|
|
||||||
fn with_mut<V, F: FnOnce(&mut Option<Arc<ModInfo>>) -> V>(&self, data: &mut State, f: F) -> V {
|
|
||||||
match data.selected_mod_index {
|
|
||||||
Some(i) => {
|
|
||||||
let mut info = data.mods.get_mut(i).cloned();
|
|
||||||
let ret = f(&mut info);
|
|
||||||
|
|
||||||
if let Some(new) = info {
|
|
||||||
// TODO: Figure out a way to check for equality and
|
|
||||||
// only update when needed
|
|
||||||
data.mods.set(i, new);
|
|
||||||
} else {
|
|
||||||
data.selected_mod_index = None;
|
|
||||||
}
|
|
||||||
|
|
||||||
ret
|
|
||||||
}
|
|
||||||
None => f(&mut None),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A Lens that maps an `im::Vector<T>` to `im::Vector<(usize, T)>`,
|
|
||||||
/// where each element in the destination vector includes its index in the
|
|
||||||
/// source vector.
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub(crate) struct IndexedVectorLens;
|
|
||||||
|
|
||||||
impl<T: Data> Lens<Vector<T>, Vector<(usize, T)>> for IndexedVectorLens {
|
|
||||||
#[tracing::instrument(name = "IndexedVectorLens::with", skip_all)]
|
|
||||||
fn with<V, F: FnOnce(&Vector<(usize, T)>) -> V>(&self, values: &Vector<T>, f: F) -> V {
|
|
||||||
let indexed = values
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(i, val)| (i, val.clone()))
|
|
||||||
.collect();
|
|
||||||
f(&indexed)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(name = "IndexedVectorLens::with_mut", skip_all)]
|
|
||||||
fn with_mut<V, F: FnOnce(&mut Vector<(usize, T)>) -> V>(
|
|
||||||
&self,
|
|
||||||
values: &mut Vector<T>,
|
|
||||||
f: F,
|
|
||||||
) -> V {
|
|
||||||
let mut indexed = values
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(i, val)| (i, val.clone()))
|
|
||||||
.collect();
|
|
||||||
let ret = f(&mut indexed);
|
|
||||||
|
|
||||||
*values = indexed.into_iter().map(|(_i, val)| val).collect();
|
|
||||||
|
|
||||||
ret
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A Lens that first checks a key in a mod's `NexusInfo`, then falls back to
|
|
||||||
/// the regular one.
|
|
||||||
pub(crate) struct NexusInfoLens<T, L, R>
|
|
||||||
where
|
|
||||||
L: Lens<NexusInfo, T>,
|
|
||||||
R: Lens<ModInfo, T>,
|
|
||||||
{
|
|
||||||
value: L,
|
|
||||||
fallback: R,
|
|
||||||
_marker: std::marker::PhantomData<T>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Data, L, R> NexusInfoLens<T, L, R>
|
|
||||||
where
|
|
||||||
L: Lens<NexusInfo, T>,
|
|
||||||
R: Lens<ModInfo, T>,
|
|
||||||
{
|
|
||||||
pub fn new(value: L, fallback: R) -> Self {
|
|
||||||
Self {
|
|
||||||
value,
|
|
||||||
fallback,
|
|
||||||
_marker: std::marker::PhantomData,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Data, L, R> Lens<ModInfo, T> for NexusInfoLens<T, L, R>
|
|
||||||
where
|
|
||||||
L: Lens<NexusInfo, T>,
|
|
||||||
R: Lens<ModInfo, T>,
|
|
||||||
{
|
|
||||||
fn with<V, F: FnOnce(&T) -> V>(&self, data: &ModInfo, f: F) -> V {
|
|
||||||
if let Some(nexus) = &data.nexus {
|
|
||||||
self.value.with(nexus, f)
|
|
||||||
} else {
|
|
||||||
self.fallback.with(data, f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn with_mut<V, F: FnOnce(&mut T) -> V>(&self, data: &mut ModInfo, f: F) -> V {
|
|
||||||
if let Some(nexus) = &mut data.nexus {
|
|
||||||
self.value.with_mut(nexus, f)
|
|
||||||
} else {
|
|
||||||
self.fallback.with_mut(data, f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,7 +0,0 @@
|
||||||
mod data;
|
|
||||||
mod delegate;
|
|
||||||
mod lens;
|
|
||||||
|
|
||||||
pub(crate) use data::*;
|
|
||||||
pub(crate) use delegate::*;
|
|
||||||
pub(crate) use lens::*;
|
|
|
@ -1,6 +0,0 @@
|
||||||
pub mod theme;
|
|
||||||
pub mod widget;
|
|
||||||
pub mod window {
|
|
||||||
pub mod dialog;
|
|
||||||
pub mod main;
|
|
||||||
}
|
|
|
@ -1,87 +0,0 @@
|
||||||
use colors_transform::Color as _;
|
|
||||||
use colors_transform::Rgb;
|
|
||||||
use druid::Color;
|
|
||||||
|
|
||||||
pub use gruvbox_dark::*;
|
|
||||||
|
|
||||||
macro_rules! make_color {
|
|
||||||
($name:ident, $r:literal, $g:literal, $b:literal, $a:literal) => {
|
|
||||||
pub const $name: Color = Color::rgba8($r, $g, $b, $a);
|
|
||||||
};
|
|
||||||
($name:ident, $r:literal, $g:literal, $b:literal) => {
|
|
||||||
pub const $name: Color = Color::rgb8($r, $g, $b);
|
|
||||||
};
|
|
||||||
($name:ident, $col:expr) => {
|
|
||||||
pub const $name: Color = $col;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
make_color!(TOP_BAR_BACKGROUND_COLOR, COLOR_BG1);
|
|
||||||
make_color!(LINK_COLOR, COLOR_ACCENT);
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub mod gruvbox_dark {
|
|
||||||
use druid::Color;
|
|
||||||
|
|
||||||
make_color!(COLOR_BG0_H, 0x1d, 0x20, 0x21);
|
|
||||||
make_color!(COLOR_BG0_S, 0x32, 0x20, 0x2f);
|
|
||||||
make_color!(COLOR_BG0, 0x28, 0x28, 0x28);
|
|
||||||
make_color!(COLOR_BG1, 0x3c, 0x38, 0x36);
|
|
||||||
make_color!(COLOR_BG2, 0x50, 0x49, 0x45);
|
|
||||||
make_color!(COLOR_BG3, 0x66, 0x5c, 0x54);
|
|
||||||
make_color!(COLOR_BG4, 0x7c, 0x6f, 0x64);
|
|
||||||
|
|
||||||
make_color!(COLOR_FG0, 0xfb, 0xf1, 0xc7);
|
|
||||||
make_color!(COLOR_FG1, 0xeb, 0xdb, 0xb2);
|
|
||||||
make_color!(COLOR_FG2, 0xd5, 0xc4, 0xa1);
|
|
||||||
make_color!(COLOR_FG3, 0xbd, 0xae, 0x93);
|
|
||||||
make_color!(COLOR_FG4, 0xa8, 0x99, 0x84);
|
|
||||||
|
|
||||||
make_color!(COLOR_BG, COLOR_BG0);
|
|
||||||
make_color!(COLOR_GRAY_LIGHT, 0x92, 0x83, 0x74);
|
|
||||||
|
|
||||||
make_color!(COLOR_RED_DARK, 0xcc, 0x24, 0x1d);
|
|
||||||
make_color!(COLOR_RED_LIGHT, 0xfb, 0x49, 0x34);
|
|
||||||
|
|
||||||
make_color!(COLOR_GREEN_DARK, 0x98, 0x97, 0x1a);
|
|
||||||
make_color!(COLOR_GREEN_LIGHT, 0xb8, 0xbb, 0x26);
|
|
||||||
|
|
||||||
make_color!(COLOR_YELLOW_DARK, 0xd7, 0x99, 0x21);
|
|
||||||
make_color!(COLOR_YELLOW_LIGHT, 0xfa, 0xbd, 0x2f);
|
|
||||||
|
|
||||||
make_color!(COLOR_BLUE_DARK, 0x45, 0x85, 0x88);
|
|
||||||
make_color!(COLOR_BLUE_LIGHT, 0x83, 0xa5, 0x98);
|
|
||||||
|
|
||||||
make_color!(COLOR_PURPLE_DARK, 0xb1, 0x26, 0x86);
|
|
||||||
make_color!(COLOR_PURPLE_LIGHT, 0xd3, 0x86, 0x9b);
|
|
||||||
|
|
||||||
make_color!(COLOR_AQUA_DARK, 0x68, 0x9d, 0x6a);
|
|
||||||
make_color!(COLOR_AQUA_LIGHT, 0x8e, 0xc0, 0x7c);
|
|
||||||
|
|
||||||
make_color!(COLOR_GRAY_DARK, 0xa8, 0x99, 0x84);
|
|
||||||
make_color!(COLOR_FG, COLOR_FG1);
|
|
||||||
|
|
||||||
make_color!(COLOR_ORANGE_DARK, 0xd6, 0x5d, 0x0e);
|
|
||||||
make_color!(COLOR_ORANGE_LIGHT, 0xfe, 0x80, 0x19);
|
|
||||||
|
|
||||||
make_color!(COLOR_ACCENT, COLOR_BLUE_LIGHT);
|
|
||||||
make_color!(COLOR_ACCENT_FG, COLOR_BG0_H);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait ColorExt {
|
|
||||||
fn darken(&self, fac: f32) -> Self;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ColorExt for Color {
|
|
||||||
fn darken(&self, fac: f32) -> Self {
|
|
||||||
let (r, g, b, a) = self.as_rgba();
|
|
||||||
let rgb = Rgb::from(r as f32, g as f32, b as f32);
|
|
||||||
let rgb = rgb.lighten(-1. * fac);
|
|
||||||
Self::rgba(
|
|
||||||
rgb.get_red() as f64,
|
|
||||||
rgb.get_green() as f64,
|
|
||||||
rgb.get_blue() as f64,
|
|
||||||
a,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,41 +0,0 @@
|
||||||
use druid::Color;
|
|
||||||
use usvg::{
|
|
||||||
Error, Fill, LineCap, LineJoin, NodeKind, NonZeroPositiveF64, Options, Paint, Stroke, Tree,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub static ALERT_CIRCLE: &str = include_str!("../../../assets/tabler-icons/alert-circle.svg");
|
|
||||||
pub static CLOUD_DOWNLOAD: &str = include_str!("../../../assets/tabler-icons/cloud-download.svg");
|
|
||||||
|
|
||||||
pub fn parse_svg(svg: &str) -> Result<Tree, Error> {
|
|
||||||
let opt = Options::default();
|
|
||||||
Tree::from_str(svg, &opt.to_ref())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn recolor_icon(tree: Tree, stroke: bool, color: Color) -> Tree {
|
|
||||||
let (red, green, blue, _) = color.as_rgba8();
|
|
||||||
|
|
||||||
let mut children = tree.root.children();
|
|
||||||
// The first element is always some kind of background placeholder
|
|
||||||
children.next();
|
|
||||||
|
|
||||||
for node in children {
|
|
||||||
if let NodeKind::Path(ref mut path) = *node.borrow_mut() {
|
|
||||||
if stroke {
|
|
||||||
path.stroke = Some(Stroke {
|
|
||||||
paint: Paint::Color(usvg::Color { red, green, blue }),
|
|
||||||
width: NonZeroPositiveF64::new(2.).expect("the value is not zero"),
|
|
||||||
linecap: LineCap::Round,
|
|
||||||
linejoin: LineJoin::Round,
|
|
||||||
..Default::default()
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
path.fill = Some(Fill {
|
|
||||||
paint: Paint::Color(usvg::Color { red, green, blue }),
|
|
||||||
..Default::default()
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tree
|
|
||||||
}
|
|
|
@ -1,13 +0,0 @@
|
||||||
use druid::{Color, Insets, Key};
|
|
||||||
|
|
||||||
pub const KEY_BUTTON_BG: Key<Color> = Key::new("dtmm.button.bg");
|
|
||||||
pub const KEY_BUTTON_BG_HOT: Key<Color> = Key::new("dtmm.button.bg-hot");
|
|
||||||
pub const KEY_BUTTON_BG_ACTIVE: Key<Color> = Key::new("dtmm.button.bg-active");
|
|
||||||
pub const KEY_BUTTON_BG_DISABLED: Key<Color> = Key::new("dtmm.button.bg-disabled");
|
|
||||||
|
|
||||||
pub const KEY_BUTTON_FG: Key<Color> = Key::new("dtmm.button.fg");
|
|
||||||
pub const KEY_BUTTON_FG_DISABLED: Key<Color> = Key::new("dtmm.button.fg-disabled");
|
|
||||||
|
|
||||||
pub const KEY_BUTTON_PADDING: Key<Insets> = Key::new("dtmm.button.padding");
|
|
||||||
|
|
||||||
pub const KEY_MOD_LIST_ITEM_BG_COLOR: Key<Color> = Key::new("dtmm.mod-list.item.background-color");
|
|
|
@ -1,33 +0,0 @@
|
||||||
use druid::{Env, Insets};
|
|
||||||
|
|
||||||
use crate::state::State;
|
|
||||||
|
|
||||||
mod colors;
|
|
||||||
pub mod icons;
|
|
||||||
pub mod keys;
|
|
||||||
|
|
||||||
pub use colors::*;
|
|
||||||
|
|
||||||
pub const TOP_BAR_INSETS: Insets = Insets::uniform(5.0);
|
|
||||||
pub const DISABLED_ALPHA: f64 = 0.65;
|
|
||||||
|
|
||||||
pub(crate) fn set_theme_env(env: &mut Env, _: &State) {
|
|
||||||
env.set(druid::theme::TEXT_COLOR, COLOR_FG);
|
|
||||||
env.set(druid::theme::SCROLLBAR_COLOR, COLOR_FG);
|
|
||||||
env.set(druid::theme::BORDER_LIGHT, COLOR_FG);
|
|
||||||
env.set(druid::theme::BUTTON_BORDER_RADIUS, 2.);
|
|
||||||
|
|
||||||
env.set(keys::KEY_BUTTON_BG, COLOR_ACCENT);
|
|
||||||
env.set(keys::KEY_BUTTON_BG_HOT, COLOR_ACCENT.darken(0.03));
|
|
||||||
env.set(keys::KEY_BUTTON_BG_ACTIVE, COLOR_ACCENT.darken(0.1));
|
|
||||||
env.set(
|
|
||||||
keys::KEY_BUTTON_BG_DISABLED,
|
|
||||||
COLOR_ACCENT.with_alpha(DISABLED_ALPHA),
|
|
||||||
);
|
|
||||||
env.set(keys::KEY_BUTTON_FG, COLOR_ACCENT_FG);
|
|
||||||
env.set(
|
|
||||||
keys::KEY_BUTTON_FG_DISABLED,
|
|
||||||
COLOR_ACCENT_FG.with_alpha(DISABLED_ALPHA),
|
|
||||||
);
|
|
||||||
env.set(keys::KEY_BUTTON_PADDING, Insets::uniform_xy(8., 2.));
|
|
||||||
}
|
|
|
@ -1,197 +0,0 @@
|
||||||
use druid::kurbo::Line;
|
|
||||||
use druid::widget::prelude::*;
|
|
||||||
use druid::{Color, KeyOrValue, Point, WidgetPod};
|
|
||||||
|
|
||||||
pub struct Border<T> {
|
|
||||||
inner: WidgetPod<T, Box<dyn Widget<T>>>,
|
|
||||||
color: BorderColor,
|
|
||||||
width: BorderWidths,
|
|
||||||
// corner_radius: KeyOrValue<RoundedRectRadii>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Data> Border<T> {
|
|
||||||
pub fn new(inner: impl Widget<T> + 'static) -> Self {
|
|
||||||
let inner = WidgetPod::new(inner).boxed();
|
|
||||||
Self {
|
|
||||||
inner,
|
|
||||||
color: Color::TRANSPARENT.into(),
|
|
||||||
width: 0f64.into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_color(&mut self, color: impl Into<KeyOrValue<Color>>) {
|
|
||||||
self.color = BorderColor::Uniform(color.into());
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_color(mut self, color: impl Into<KeyOrValue<Color>>) -> Self {
|
|
||||||
self.set_color(color);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_bottom_border(&mut self, width: impl Into<KeyOrValue<f64>>) {
|
|
||||||
self.width.bottom = width.into();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_bottom_border(mut self, width: impl Into<KeyOrValue<f64>>) -> Self {
|
|
||||||
self.set_bottom_border(width);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_top_border(&mut self, width: impl Into<KeyOrValue<f64>>) {
|
|
||||||
self.width.top = width.into();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_top_border(mut self, width: impl Into<KeyOrValue<f64>>) -> Self {
|
|
||||||
self.set_top_border(width);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Data> Widget<T> for Border<T> {
|
|
||||||
fn event(&mut self, ctx: &mut EventCtx, event: &Event, data: &mut T, env: &Env) {
|
|
||||||
self.inner.event(ctx, event, data, env)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lifecycle(&mut self, ctx: &mut LifeCycleCtx, event: &LifeCycle, data: &T, env: &Env) {
|
|
||||||
self.inner.lifecycle(ctx, event, data, env);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update(&mut self, ctx: &mut UpdateCtx, _: &T, data: &T, env: &Env) {
|
|
||||||
self.inner.update(ctx, data, env);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn layout(&mut self, ctx: &mut LayoutCtx, bc: &BoxConstraints, data: &T, env: &Env) -> Size {
|
|
||||||
bc.debug_check("Border");
|
|
||||||
|
|
||||||
let (left, top, right, bottom) = self.width.resolve(env);
|
|
||||||
|
|
||||||
let inner_bc = bc.shrink((left + right, top + bottom));
|
|
||||||
let inner_size = self.inner.layout(ctx, &inner_bc, data, env);
|
|
||||||
|
|
||||||
let origin = Point::new(left, top);
|
|
||||||
self.inner.set_origin(ctx, origin);
|
|
||||||
|
|
||||||
let size = Size::new(
|
|
||||||
inner_size.width + left + right,
|
|
||||||
inner_size.height + top + bottom,
|
|
||||||
);
|
|
||||||
|
|
||||||
let insets = self.inner.compute_parent_paint_insets(size);
|
|
||||||
ctx.set_paint_insets(insets);
|
|
||||||
|
|
||||||
let baseline_offset = self.inner.baseline_offset();
|
|
||||||
if baseline_offset > 0. {
|
|
||||||
ctx.set_baseline_offset(baseline_offset + bottom);
|
|
||||||
}
|
|
||||||
|
|
||||||
size
|
|
||||||
}
|
|
||||||
|
|
||||||
fn paint(&mut self, ctx: &mut PaintCtx, data: &T, env: &Env) {
|
|
||||||
let size = ctx.size();
|
|
||||||
let (left, top, right, bottom) = self.width.resolve(env);
|
|
||||||
let (col_left, col_top, col_right, col_bottom) = self.color.resolve(env);
|
|
||||||
|
|
||||||
self.inner.paint(ctx, data, env);
|
|
||||||
|
|
||||||
// There's probably a more elegant way to create the various `Line`s, but this works for now.
|
|
||||||
// The important bit is to move each line inwards by half each side's border width. Otherwise
|
|
||||||
// it would draw hald of the border outside of the widget's boundary.
|
|
||||||
|
|
||||||
if left > 0. {
|
|
||||||
ctx.stroke(
|
|
||||||
Line::new((left / 2., top / 2.), (left / 2., size.height)),
|
|
||||||
&col_left,
|
|
||||||
left,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if top > 0. {
|
|
||||||
ctx.stroke(
|
|
||||||
Line::new((left / 2., top / 2.), (size.width - (right / 2.), top / 2.)),
|
|
||||||
&col_top,
|
|
||||||
top,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if right > 0. {
|
|
||||||
ctx.stroke(
|
|
||||||
Line::new(
|
|
||||||
(size.width - (right / 2.), top / 2.),
|
|
||||||
(size.width - (right / 2.), size.height - (bottom / 2.)),
|
|
||||||
),
|
|
||||||
&col_right,
|
|
||||||
right,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if bottom > 0. {
|
|
||||||
ctx.stroke(
|
|
||||||
Line::new(
|
|
||||||
(left / 2., size.height - (bottom / 2.)),
|
|
||||||
(size.width - (right / 2.), size.height - (bottom / 2.)),
|
|
||||||
),
|
|
||||||
&col_bottom,
|
|
||||||
bottom,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub enum BorderColor {
|
|
||||||
Uniform(KeyOrValue<Color>),
|
|
||||||
// Individual {
|
|
||||||
// left: KeyOrValue<Color>,
|
|
||||||
// top: KeyOrValue<Color>,
|
|
||||||
// right: KeyOrValue<Color>,
|
|
||||||
// bottom: KeyOrValue<Color>,
|
|
||||||
// },
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BorderColor {
|
|
||||||
pub fn resolve(&self, env: &Env) -> (Color, Color, Color, Color) {
|
|
||||||
match self {
|
|
||||||
Self::Uniform(val) => {
|
|
||||||
let color = val.resolve(env);
|
|
||||||
(color, color, color, color)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Color> for BorderColor {
|
|
||||||
fn from(value: Color) -> Self {
|
|
||||||
Self::Uniform(value.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct BorderWidths {
|
|
||||||
pub left: KeyOrValue<f64>,
|
|
||||||
pub top: KeyOrValue<f64>,
|
|
||||||
pub right: KeyOrValue<f64>,
|
|
||||||
pub bottom: KeyOrValue<f64>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<f64> for BorderWidths {
|
|
||||||
fn from(value: f64) -> Self {
|
|
||||||
Self {
|
|
||||||
left: value.into(),
|
|
||||||
top: value.into(),
|
|
||||||
right: value.into(),
|
|
||||||
bottom: value.into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BorderWidths {
|
|
||||||
pub fn resolve(&self, env: &Env) -> (f64, f64, f64, f64) {
|
|
||||||
(
|
|
||||||
self.left.resolve(env),
|
|
||||||
self.top.resolve(env),
|
|
||||||
self.right.resolve(env),
|
|
||||||
self.bottom.resolve(env),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,113 +0,0 @@
|
||||||
use druid::widget::prelude::*;
|
|
||||||
use druid::widget::{Click, ControllerHost, Label, LabelText};
|
|
||||||
use druid::WidgetPod;
|
|
||||||
use druid::{Affine, WidgetExt};
|
|
||||||
|
|
||||||
use crate::ui::theme;
|
|
||||||
|
|
||||||
pub struct Button<T> {
|
|
||||||
inner: WidgetPod<T, Box<dyn Widget<T>>>,
|
|
||||||
inner_size: Size,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Data> Button<T> {
|
|
||||||
pub fn new(inner: impl Widget<T> + 'static) -> Self {
|
|
||||||
let inner = inner.env_scope(|env, _| {
|
|
||||||
env.set(
|
|
||||||
druid::theme::TEXT_COLOR,
|
|
||||||
env.get(theme::keys::KEY_BUTTON_FG),
|
|
||||||
);
|
|
||||||
env.set(
|
|
||||||
druid::theme::DISABLED_TEXT_COLOR,
|
|
||||||
env.get(theme::keys::KEY_BUTTON_FG_DISABLED),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
let inner = WidgetPod::new(inner).boxed();
|
|
||||||
Self {
|
|
||||||
inner,
|
|
||||||
inner_size: Size::ZERO,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_label(text: impl Into<LabelText<T>>) -> Self {
|
|
||||||
let inner = Label::new(text);
|
|
||||||
Self::new(inner)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn on_click(
|
|
||||||
self,
|
|
||||||
f: impl Fn(&mut EventCtx, &mut T, &Env) + 'static,
|
|
||||||
) -> ControllerHost<Self, Click<T>> {
|
|
||||||
ControllerHost::new(self, Click::new(f))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Data> Widget<T> for Button<T> {
|
|
||||||
fn event(&mut self, ctx: &mut EventCtx, event: &Event, _: &mut T, _: &Env) {
|
|
||||||
match event {
|
|
||||||
Event::MouseDown(_) if !ctx.is_disabled() => {
|
|
||||||
ctx.set_active(true);
|
|
||||||
ctx.request_paint();
|
|
||||||
}
|
|
||||||
Event::MouseUp(_) => {
|
|
||||||
if ctx.is_active() && !ctx.is_disabled() {
|
|
||||||
ctx.request_paint();
|
|
||||||
}
|
|
||||||
ctx.set_active(false);
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lifecycle(&mut self, ctx: &mut LifeCycleCtx, event: &LifeCycle, data: &T, env: &Env) {
|
|
||||||
if let LifeCycle::HotChanged(_) | LifeCycle::DisabledChanged(_) = event {
|
|
||||||
ctx.request_paint();
|
|
||||||
}
|
|
||||||
self.inner.lifecycle(ctx, event, data, env);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update(&mut self, ctx: &mut UpdateCtx, _: &T, data: &T, env: &Env) {
|
|
||||||
self.inner.update(ctx, data, env);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn layout(&mut self, ctx: &mut LayoutCtx, bc: &BoxConstraints, data: &T, env: &Env) -> Size {
|
|
||||||
bc.debug_check("Button");
|
|
||||||
|
|
||||||
let padding = env.get(theme::keys::KEY_BUTTON_PADDING).size();
|
|
||||||
let inner_bc = bc.shrink(padding).loosen();
|
|
||||||
|
|
||||||
self.inner_size = self.inner.layout(ctx, &inner_bc, data, env);
|
|
||||||
|
|
||||||
bc.constrain(Size::new(
|
|
||||||
self.inner_size.width + padding.width,
|
|
||||||
self.inner_size.height + padding.height,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn paint(&mut self, ctx: &mut PaintCtx, data: &T, env: &Env) {
|
|
||||||
let size = ctx.size();
|
|
||||||
|
|
||||||
let bg_color = if ctx.is_disabled() {
|
|
||||||
env.get(theme::keys::KEY_BUTTON_BG_DISABLED)
|
|
||||||
} else if ctx.is_hot() {
|
|
||||||
env.get(theme::keys::KEY_BUTTON_BG_HOT)
|
|
||||||
} else if ctx.is_active() {
|
|
||||||
env.get(theme::keys::KEY_BUTTON_BG_ACTIVE)
|
|
||||||
} else {
|
|
||||||
env.get(theme::keys::KEY_BUTTON_BG)
|
|
||||||
};
|
|
||||||
|
|
||||||
ctx.fill(
|
|
||||||
size.to_rect()
|
|
||||||
.to_rounded_rect(env.get(druid::theme::BUTTON_BORDER_RADIUS)),
|
|
||||||
&bg_color,
|
|
||||||
);
|
|
||||||
|
|
||||||
let inner_pos = (size.to_vec2() - self.inner_size.to_vec2()) / 2.;
|
|
||||||
|
|
||||||
ctx.with_save(|ctx| {
|
|
||||||
ctx.transform(Affine::translate(inner_pos));
|
|
||||||
self.inner.paint(ctx, data, env);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,136 +0,0 @@
|
||||||
use druid::widget::{Button, Controller, Image, Scroll};
|
|
||||||
use druid::{
|
|
||||||
Data, Env, Event, EventCtx, ImageBuf, LifeCycle, LifeCycleCtx, Rect, UpdateCtx, Widget,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::state::{State, ACTION_SET_DIRTY, ACTION_START_SAVE_SETTINGS};
|
|
||||||
|
|
||||||
pub struct DisabledButtonController;
|
|
||||||
|
|
||||||
impl<T: Data> Controller<T, Button<T>> for DisabledButtonController {
|
|
||||||
fn event(
|
|
||||||
&mut self,
|
|
||||||
child: &mut Button<T>,
|
|
||||||
ctx: &mut EventCtx,
|
|
||||||
event: &Event,
|
|
||||||
data: &mut T,
|
|
||||||
env: &Env,
|
|
||||||
) {
|
|
||||||
if !ctx.is_disabled() {
|
|
||||||
ctx.set_disabled(true);
|
|
||||||
ctx.request_paint();
|
|
||||||
}
|
|
||||||
child.event(ctx, event, data, env)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update(
|
|
||||||
&mut self,
|
|
||||||
child: &mut Button<T>,
|
|
||||||
ctx: &mut UpdateCtx,
|
|
||||||
old_data: &T,
|
|
||||||
data: &T,
|
|
||||||
env: &Env,
|
|
||||||
) {
|
|
||||||
if !ctx.is_disabled() {
|
|
||||||
ctx.set_disabled(true);
|
|
||||||
ctx.request_paint();
|
|
||||||
}
|
|
||||||
child.update(ctx, old_data, data, env)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct AutoScrollController;
|
|
||||||
|
|
||||||
impl<T: Data, W: Widget<T>> Controller<T, Scroll<T, W>> for AutoScrollController {
|
|
||||||
fn update(
|
|
||||||
&mut self,
|
|
||||||
child: &mut Scroll<T, W>,
|
|
||||||
ctx: &mut UpdateCtx,
|
|
||||||
old_data: &T,
|
|
||||||
data: &T,
|
|
||||||
env: &Env,
|
|
||||||
) {
|
|
||||||
child.update(ctx, old_data, data, env);
|
|
||||||
|
|
||||||
if !ctx.is_disabled() {
|
|
||||||
let size = child.child_size();
|
|
||||||
let end_region = Rect::new(size.width - 1., size.height - 1., size.width, size.height);
|
|
||||||
child.scroll_to(ctx, end_region);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! compare_state_fields {
|
|
||||||
($old:ident, $new:ident, $($field:ident),+) => {
|
|
||||||
$(!Data::same(&$old.$field, &$new.$field)) || +
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A controller that tracks state changes for certain fields and submits commands to handle them.
|
|
||||||
pub struct DirtyStateController;
|
|
||||||
|
|
||||||
impl<W: Widget<State>> Controller<State, W> for DirtyStateController {
|
|
||||||
fn update(
|
|
||||||
&mut self,
|
|
||||||
child: &mut W,
|
|
||||||
ctx: &mut UpdateCtx,
|
|
||||||
old_data: &State,
|
|
||||||
data: &State,
|
|
||||||
env: &Env,
|
|
||||||
) {
|
|
||||||
// Only start tracking changes after the initial load has finished
|
|
||||||
if old_data.loading == data.loading {
|
|
||||||
if compare_state_fields!(
|
|
||||||
old_data,
|
|
||||||
data,
|
|
||||||
mods,
|
|
||||||
game_dir,
|
|
||||||
data_dir,
|
|
||||||
nexus_api_key,
|
|
||||||
is_io_enabled
|
|
||||||
) {
|
|
||||||
ctx.submit_command(ACTION_START_SAVE_SETTINGS);
|
|
||||||
}
|
|
||||||
|
|
||||||
if compare_state_fields!(old_data, data, mods, game_dir, is_io_enabled) {
|
|
||||||
ctx.submit_command(ACTION_SET_DIRTY);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
child.update(ctx, old_data, data, env)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct ImageLensController;
|
|
||||||
|
|
||||||
impl Controller<ImageBuf, Image> for ImageLensController {
|
|
||||||
fn lifecycle(
|
|
||||||
&mut self,
|
|
||||||
widget: &mut Image,
|
|
||||||
ctx: &mut LifeCycleCtx,
|
|
||||||
event: &LifeCycle,
|
|
||||||
data: &ImageBuf,
|
|
||||||
env: &Env,
|
|
||||||
) {
|
|
||||||
if let LifeCycle::WidgetAdded = event {
|
|
||||||
widget.set_image_data(data.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
widget.lifecycle(ctx, event, data, env);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update(
|
|
||||||
&mut self,
|
|
||||||
widget: &mut Image,
|
|
||||||
ctx: &mut UpdateCtx,
|
|
||||||
old_data: &ImageBuf,
|
|
||||||
data: &ImageBuf,
|
|
||||||
env: &Env,
|
|
||||||
) {
|
|
||||||
if !Data::same(old_data, data) {
|
|
||||||
widget.set_image_data(data.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
widget.update(ctx, old_data, data, env);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,63 +0,0 @@
|
||||||
use std::f64::INFINITY;
|
|
||||||
|
|
||||||
use druid::widget::prelude::*;
|
|
||||||
use druid::{Point, WidgetPod};
|
|
||||||
|
|
||||||
pub struct FillContainer<T> {
|
|
||||||
child: WidgetPod<T, Box<dyn Widget<T>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Data> FillContainer<T> {
|
|
||||||
pub fn new(child: impl Widget<T> + 'static) -> Self {
|
|
||||||
Self {
|
|
||||||
child: WidgetPod::new(child).boxed(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Data> Widget<T> for FillContainer<T> {
|
|
||||||
#[tracing::instrument(name = "FillContainer", level = "trace", skip_all)]
|
|
||||||
fn event(&mut self, ctx: &mut EventCtx, event: &Event, data: &mut T, env: &Env) {
|
|
||||||
self.child.event(ctx, event, data, env);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(name = "FillContainer", level = "trace", skip_all)]
|
|
||||||
fn lifecycle(&mut self, ctx: &mut LifeCycleCtx, event: &LifeCycle, data: &T, env: &Env) {
|
|
||||||
self.child.lifecycle(ctx, event, data, env)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(name = "FillContainer", level = "trace", skip_all)]
|
|
||||||
fn update(&mut self, ctx: &mut UpdateCtx, _: &T, data: &T, env: &Env) {
|
|
||||||
self.child.update(ctx, data, env);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(name = "FillContainer", level = "trace", skip_all)]
|
|
||||||
fn layout(&mut self, ctx: &mut LayoutCtx, bc: &BoxConstraints, data: &T, env: &Env) -> Size {
|
|
||||||
bc.debug_check("FillContainer");
|
|
||||||
|
|
||||||
let child_size = self.child.layout(ctx, bc, data, env);
|
|
||||||
|
|
||||||
let w = if bc.is_width_bounded() {
|
|
||||||
INFINITY
|
|
||||||
} else {
|
|
||||||
child_size.width
|
|
||||||
};
|
|
||||||
|
|
||||||
let h = if bc.is_height_bounded() {
|
|
||||||
INFINITY
|
|
||||||
} else {
|
|
||||||
child_size.height
|
|
||||||
};
|
|
||||||
|
|
||||||
let my_size = bc.constrain(Size::new(w, h));
|
|
||||||
|
|
||||||
self.child.set_origin(ctx, Point::new(0.0, 0.0));
|
|
||||||
tracing::trace!("Computed layout: size={}", my_size);
|
|
||||||
my_size
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(name = "FillContainer", level = "trace", skip_all)]
|
|
||||||
fn paint(&mut self, ctx: &mut PaintCtx, data: &T, env: &Env) {
|
|
||||||
self.child.paint(ctx, data, env);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,35 +0,0 @@
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use druid::text::Formatter;
|
|
||||||
|
|
||||||
pub mod border;
|
|
||||||
pub mod button;
|
|
||||||
pub mod controller;
|
|
||||||
|
|
||||||
pub(crate) struct PathBufFormatter;
|
|
||||||
|
|
||||||
impl PathBufFormatter {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Formatter<Arc<PathBuf>> for PathBufFormatter {
|
|
||||||
fn format(&self, value: &Arc<PathBuf>) -> String {
|
|
||||||
value.display().to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn validate_partial_input(
|
|
||||||
&self,
|
|
||||||
_input: &str,
|
|
||||||
_sel: &druid::text::Selection,
|
|
||||||
) -> druid::text::Validation {
|
|
||||||
druid::text::Validation::success()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn value(&self, input: &str) -> Result<Arc<PathBuf>, druid::text::ValidationError> {
|
|
||||||
let p = PathBuf::from(input);
|
|
||||||
Ok(Arc::new(p))
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,73 +0,0 @@
|
||||||
use druid::widget::{Controller, Flex};
|
|
||||||
use druid::{Data, Widget};
|
|
||||||
|
|
||||||
pub struct TableSelect<T> {
|
|
||||||
widget: Flex<T>,
|
|
||||||
controller: TableSelectController<T>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Data> TableSelect<T> {
|
|
||||||
pub fn new(values: impl IntoIterator<Item = (impl Widget<T> + 'static)>) -> Self {
|
|
||||||
todo!();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Data> Widget<T> for TableSelect<T> {
|
|
||||||
fn event(
|
|
||||||
&mut self,
|
|
||||||
ctx: &mut druid::EventCtx,
|
|
||||||
event: &druid::Event,
|
|
||||||
data: &mut T,
|
|
||||||
env: &druid::Env,
|
|
||||||
) {
|
|
||||||
todo!()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lifecycle(
|
|
||||||
&mut self,
|
|
||||||
ctx: &mut druid::LifeCycleCtx,
|
|
||||||
event: &druid::LifeCycle,
|
|
||||||
data: &T,
|
|
||||||
env: &druid::Env,
|
|
||||||
) {
|
|
||||||
todo!()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update(&mut self, ctx: &mut druid::UpdateCtx, old_data: &T, data: &T, env: &druid::Env) {
|
|
||||||
todo!()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn layout(
|
|
||||||
&mut self,
|
|
||||||
ctx: &mut druid::LayoutCtx,
|
|
||||||
bc: &druid::BoxConstraints,
|
|
||||||
data: &T,
|
|
||||||
env: &druid::Env,
|
|
||||||
) -> druid::Size {
|
|
||||||
todo!()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn paint(&mut self, ctx: &mut druid::PaintCtx, data: &T, env: &druid::Env) {
|
|
||||||
todo!()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct TableSelectController<T> {
|
|
||||||
inner: T,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Data> TableSelectController<T> {}
|
|
||||||
|
|
||||||
impl<T: Data> Controller<T, Flex<T>> for TableSelectController<T> {}
|
|
||||||
|
|
||||||
pub struct TableItem<T> {
|
|
||||||
inner: dyn Widget<T>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Data> TableItem<T> {
|
|
||||||
pub fn new(inner: impl Widget<T>) -> Self {
|
|
||||||
todo!();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Data> Widget<T> for TableItem<T> {}
|
|
|
@ -1,91 +0,0 @@
|
||||||
use color_eyre::{Handler, HelpInfo, Report};
|
|
||||||
use druid::widget::{CrossAxisAlignment, Flex, Label, LineBreaking};
|
|
||||||
use druid::{Data, WidgetExt, WindowDesc, WindowHandle};
|
|
||||||
|
|
||||||
use crate::ui::theme;
|
|
||||||
use crate::ui::widget::button::Button;
|
|
||||||
|
|
||||||
const WINDOW_SIZE: (f64, f64) = (600., 250.);
|
|
||||||
|
|
||||||
/// Show an error dialog.
|
|
||||||
/// The title and message are extracted from the error chain in the given `Report`.
|
|
||||||
pub fn error<T: Data>(err: Report, _parent: WindowHandle) -> WindowDesc<T> {
|
|
||||||
let (title, msg) = {
|
|
||||||
let count = err.chain().count();
|
|
||||||
|
|
||||||
if count == 1 {
|
|
||||||
// If there is only one error, that's all we can show.
|
|
||||||
(
|
|
||||||
String::from("An error occurred!"),
|
|
||||||
err.root_cause().to_string(),
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
let first = err.chain().next().unwrap();
|
|
||||||
let root = err.root_cause();
|
|
||||||
|
|
||||||
// If there is more than one error in the chain we want to show
|
|
||||||
// - The first one: This will describe the overall operation that failed
|
|
||||||
// - The root cause: The actual thing that failed (e.g. 'No such file or directory')
|
|
||||||
// - The one before the root cause: With diligent `wrap_err` usage, this will provide
|
|
||||||
// context to the root cause (e.g. the file name we failed to access)
|
|
||||||
//
|
|
||||||
// If there are only two errors, the first one is also the context to the root cause.
|
|
||||||
if count > 2 {
|
|
||||||
// The second to last one, the context to the root cause
|
|
||||||
let context = err.chain().nth(count - 2).unwrap();
|
|
||||||
|
|
||||||
(format!("{first}!"), format!("{}: {}", context, root))
|
|
||||||
} else {
|
|
||||||
("An error occurred!".to_string(), format!("{}: {}", first, root))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let title = Label::new(title)
|
|
||||||
.with_text_size(24.)
|
|
||||||
.with_text_color(theme::COLOR_RED_LIGHT);
|
|
||||||
let text = Label::new(msg).with_line_break_mode(LineBreaking::WordWrap);
|
|
||||||
|
|
||||||
let button = Button::with_label("Ok")
|
|
||||||
.on_click(|ctx, _, _| {
|
|
||||||
ctx.window().close();
|
|
||||||
})
|
|
||||||
.align_right();
|
|
||||||
|
|
||||||
let mut widget = Flex::column()
|
|
||||||
.cross_axis_alignment(CrossAxisAlignment::Start)
|
|
||||||
.with_child(title)
|
|
||||||
.with_default_spacer()
|
|
||||||
.with_child(text);
|
|
||||||
|
|
||||||
if let Some(handler) = err.handler().downcast_ref::<Handler>() {
|
|
||||||
let mut first = true;
|
|
||||||
for section in handler.sections() {
|
|
||||||
if let HelpInfo::Suggestion(data, _) = section {
|
|
||||||
if first {
|
|
||||||
widget.add_default_spacer();
|
|
||||||
first = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
let w = Flex::row()
|
|
||||||
.cross_axis_alignment(CrossAxisAlignment::Start)
|
|
||||||
.with_child(Label::new("Suggestion:").with_text_color(theme::COLOR_GREEN_LIGHT))
|
|
||||||
.with_spacer(2.)
|
|
||||||
.with_child(
|
|
||||||
Label::new(data.to_string()).with_line_break_mode(LineBreaking::WordWrap),
|
|
||||||
);
|
|
||||||
|
|
||||||
widget.add_child(w);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let widget = widget.with_flex_spacer(1.).with_child(button).padding(10.);
|
|
||||||
|
|
||||||
WindowDesc::new(widget)
|
|
||||||
.title("Critical Error")
|
|
||||||
.show_titlebar(true)
|
|
||||||
.with_min_size(WINDOW_SIZE)
|
|
||||||
.set_always_on_top(true)
|
|
||||||
.resizable(false)
|
|
||||||
}
|
|
|
@ -1,536 +0,0 @@
|
||||||
use std::str::FromStr;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use druid::im::Vector;
|
|
||||||
use druid::text::RichTextBuilder;
|
|
||||||
use druid::widget::{
|
|
||||||
Checkbox, CrossAxisAlignment, Either, Flex, Image, Label, LineBreaking, List,
|
|
||||||
MainAxisAlignment, Maybe, Scroll, SizedBox, Split, Svg, SvgData, TextBox, ViewSwitcher,
|
|
||||||
};
|
|
||||||
use druid::{lens, Env};
|
|
||||||
use druid::{
|
|
||||||
Color, FileDialogOptions, FileSpec, FontDescriptor, FontFamily, LensExt, SingleUse, Widget,
|
|
||||||
WidgetExt, WindowDesc, WindowId,
|
|
||||||
};
|
|
||||||
use druid::{Data, ImageBuf, LifeCycleCtx};
|
|
||||||
use druid_widget_nursery::WidgetExt as _;
|
|
||||||
use lazy_static::lazy_static;
|
|
||||||
|
|
||||||
use crate::state::{
|
|
||||||
ModInfo, NexusInfo, NexusInfoLens, State, View, ACTION_ADD_MOD, ACTION_OPEN_LINK,
|
|
||||||
ACTION_SELECTED_MOD_DOWN, ACTION_SELECTED_MOD_UP, ACTION_SELECT_MOD, ACTION_SET_WINDOW_HANDLE,
|
|
||||||
ACTION_START_CHECK_UPDATE, ACTION_START_DELETE_SELECTED_MOD, ACTION_START_DEPLOY,
|
|
||||||
ACTION_START_RESET_DEPLOYMENT,
|
|
||||||
};
|
|
||||||
use crate::ui::theme::{self, ColorExt, COLOR_GREEN_LIGHT};
|
|
||||||
use crate::ui::widget::border::Border;
|
|
||||||
use crate::ui::widget::button::Button;
|
|
||||||
use crate::ui::widget::controller::{
|
|
||||||
AutoScrollController, DirtyStateController, ImageLensController,
|
|
||||||
};
|
|
||||||
use crate::ui::widget::PathBufFormatter;
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
pub static ref WINDOW_ID: WindowId = WindowId::next();
|
|
||||||
}
|
|
||||||
|
|
||||||
const TITLE: &str = "Darktide Mod Manager";
|
|
||||||
const WINDOW_SIZE: (f64, f64) = (1080., 720.);
|
|
||||||
const MOD_DETAILS_MIN_WIDTH: f64 = 325.;
|
|
||||||
|
|
||||||
pub(crate) fn new() -> WindowDesc<State> {
|
|
||||||
WindowDesc::new(build_window())
|
|
||||||
.title(TITLE)
|
|
||||||
.window_size(WINDOW_SIZE)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_top_bar() -> impl Widget<State> {
|
|
||||||
let mods_button = Button::with_label("Mods")
|
|
||||||
.on_click(|_ctx, state: &mut State, _env| state.current_view = View::Mods);
|
|
||||||
|
|
||||||
let settings_button =
|
|
||||||
Button::with_label("Settings").on_click(|_ctx, state: &mut State, _env| {
|
|
||||||
state.current_view = View::Settings;
|
|
||||||
});
|
|
||||||
|
|
||||||
let check_update_button = {
|
|
||||||
let make_button = || {
|
|
||||||
Button::with_label("Check for updates").on_click(|ctx, _: &mut State, _| {
|
|
||||||
ctx.submit_command(ACTION_START_CHECK_UPDATE);
|
|
||||||
})
|
|
||||||
};
|
|
||||||
|
|
||||||
Either::new(
|
|
||||||
|data, _| data.nexus_api_key.is_empty(),
|
|
||||||
make_button()
|
|
||||||
.tooltip(|_: &State, _: &Env| "A Nexus API key is required")
|
|
||||||
.disabled_if(|_, _| true),
|
|
||||||
make_button().disabled_if(|data, _| data.is_update_in_progress),
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
let deploy_button = {
|
|
||||||
let icon = Svg::new(SvgData::from_str(theme::icons::ALERT_CIRCLE).expect("invalid SVG"))
|
|
||||||
.fix_height(druid::theme::TEXT_SIZE_NORMAL);
|
|
||||||
|
|
||||||
let inner = Either::new(
|
|
||||||
|state: &State, _| state.dirty,
|
|
||||||
Flex::row()
|
|
||||||
.with_child(icon)
|
|
||||||
.with_spacer(3.)
|
|
||||||
.with_child(Label::new("Deploy Mods")),
|
|
||||||
Label::new("Deploy Mods"),
|
|
||||||
);
|
|
||||||
Button::new(inner)
|
|
||||||
.on_click(|ctx, _state: &mut State, _env| {
|
|
||||||
ctx.submit_command(ACTION_START_DEPLOY);
|
|
||||||
})
|
|
||||||
.disabled_if(|data, _| data.is_deployment_in_progress || data.is_reset_in_progress)
|
|
||||||
};
|
|
||||||
|
|
||||||
let reset_button = Button::with_label("Reset Game")
|
|
||||||
.on_click(|ctx, _state: &mut State, _env| {
|
|
||||||
ctx.submit_command(ACTION_START_RESET_DEPLOYMENT);
|
|
||||||
})
|
|
||||||
.disabled_if(|data, _| data.is_deployment_in_progress || data.is_reset_in_progress);
|
|
||||||
|
|
||||||
let bar = Flex::row()
|
|
||||||
.must_fill_main_axis(true)
|
|
||||||
.main_axis_alignment(MainAxisAlignment::SpaceBetween)
|
|
||||||
.with_child(
|
|
||||||
Flex::row()
|
|
||||||
.with_child(mods_button)
|
|
||||||
.with_default_spacer()
|
|
||||||
.with_child(settings_button),
|
|
||||||
)
|
|
||||||
.with_child(
|
|
||||||
Flex::row()
|
|
||||||
.with_child(check_update_button)
|
|
||||||
.with_default_spacer()
|
|
||||||
.with_child(deploy_button)
|
|
||||||
.with_default_spacer()
|
|
||||||
.with_child(reset_button),
|
|
||||||
)
|
|
||||||
.padding(theme::TOP_BAR_INSETS)
|
|
||||||
.background(theme::TOP_BAR_BACKGROUND_COLOR);
|
|
||||||
|
|
||||||
Border::new(bar)
|
|
||||||
.with_color(theme::COLOR_FG2)
|
|
||||||
.with_bottom_border(1.)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_mod_list() -> impl Widget<State> {
|
|
||||||
let list = List::new(|| {
|
|
||||||
let checkbox = Checkbox::new("")
|
|
||||||
.env_scope(|env, selected| {
|
|
||||||
env.set(druid::theme::BORDER_DARK, theme::COLOR_BG3);
|
|
||||||
env.set(druid::theme::BORDER_LIGHT, theme::COLOR_BG3);
|
|
||||||
env.set(druid::theme::TEXT_COLOR, theme::COLOR_ACCENT_FG);
|
|
||||||
|
|
||||||
if *selected {
|
|
||||||
env.set(druid::theme::BACKGROUND_DARK, theme::COLOR_ACCENT);
|
|
||||||
env.set(druid::theme::BACKGROUND_LIGHT, theme::COLOR_ACCENT);
|
|
||||||
} else {
|
|
||||||
env.set(druid::theme::BACKGROUND_DARK, Color::TRANSPARENT);
|
|
||||||
env.set(druid::theme::BACKGROUND_LIGHT, Color::TRANSPARENT);
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.lens(lens!((usize, Arc<ModInfo>, bool), 1).then(ModInfo::enabled.in_arc()));
|
|
||||||
|
|
||||||
let name = Label::dynamic(|info: &Arc<ModInfo>, _| {
|
|
||||||
info.nexus
|
|
||||||
.as_ref()
|
|
||||||
.map(|n| n.name.clone())
|
|
||||||
.unwrap_or_else(|| info.name.clone())
|
|
||||||
})
|
|
||||||
.lens(lens!((usize, Arc<ModInfo>, bool), 1));
|
|
||||||
|
|
||||||
let version = {
|
|
||||||
let icon = {
|
|
||||||
let tree =
|
|
||||||
theme::icons::parse_svg(theme::icons::CLOUD_DOWNLOAD).expect("invalid SVG");
|
|
||||||
|
|
||||||
let tree = theme::icons::recolor_icon(tree, true, COLOR_GREEN_LIGHT);
|
|
||||||
|
|
||||||
Svg::new(tree).fix_height(druid::theme::TEXT_SIZE_NORMAL)
|
|
||||||
};
|
|
||||||
|
|
||||||
Either::new(
|
|
||||||
|info, _| {
|
|
||||||
info.nexus
|
|
||||||
.as_ref()
|
|
||||||
.map(|n| info.version != n.version)
|
|
||||||
.unwrap_or(false)
|
|
||||||
},
|
|
||||||
Flex::row()
|
|
||||||
.with_child(icon)
|
|
||||||
.with_spacer(3.)
|
|
||||||
.with_child(Label::raw().lens(ModInfo::version.in_arc())),
|
|
||||||
Label::raw().lens(ModInfo::version.in_arc()),
|
|
||||||
)
|
|
||||||
.lens(lens!((usize, Arc<ModInfo>, bool), 1))
|
|
||||||
};
|
|
||||||
|
|
||||||
let fields = Flex::row()
|
|
||||||
.must_fill_main_axis(true)
|
|
||||||
.main_axis_alignment(MainAxisAlignment::SpaceBetween)
|
|
||||||
.with_child(name)
|
|
||||||
.with_child(version);
|
|
||||||
|
|
||||||
Flex::row()
|
|
||||||
.must_fill_main_axis(true)
|
|
||||||
.with_child(checkbox)
|
|
||||||
.with_flex_child(fields, 1.)
|
|
||||||
.padding((5.0, 4.0))
|
|
||||||
.background(theme::keys::KEY_MOD_LIST_ITEM_BG_COLOR)
|
|
||||||
.on_click(|ctx, (i, _, _), _env| ctx.submit_command(ACTION_SELECT_MOD.with(*i)))
|
|
||||||
.env_scope(|env, (i, _, selected)| {
|
|
||||||
if *selected {
|
|
||||||
env.set(theme::keys::KEY_MOD_LIST_ITEM_BG_COLOR, theme::COLOR_ACCENT);
|
|
||||||
env.set(
|
|
||||||
druid::theme::TEXT_COLOR,
|
|
||||||
theme::COLOR_ACCENT_FG.darken(0.05),
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
env.set(druid::theme::TEXT_COLOR, theme::COLOR_FG);
|
|
||||||
|
|
||||||
if (i % 2) == 1 {
|
|
||||||
env.set(theme::keys::KEY_MOD_LIST_ITEM_BG_COLOR, theme::COLOR_BG1);
|
|
||||||
} else {
|
|
||||||
env.set(theme::keys::KEY_MOD_LIST_ITEM_BG_COLOR, theme::COLOR_BG);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
let scroll = Scroll::new(list).vertical().lens(lens::Identity.map(
|
|
||||||
|state: &State| {
|
|
||||||
state
|
|
||||||
.mods
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(i, val)| (i, val.clone(), Some(i) == state.selected_mod_index))
|
|
||||||
.collect::<Vector<_>>()
|
|
||||||
},
|
|
||||||
|state, infos| {
|
|
||||||
infos.into_iter().for_each(|(i, new, _)| {
|
|
||||||
if !Data::same(&state.mods.get(i).cloned(), &Some(new.clone())) {
|
|
||||||
state.mods.set(i, new);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
},
|
|
||||||
));
|
|
||||||
|
|
||||||
Flex::column()
|
|
||||||
.must_fill_main_axis(true)
|
|
||||||
.with_child(Flex::row())
|
|
||||||
.with_flex_child(scroll, 1.0)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_mod_details_buttons() -> impl Widget<State> {
|
|
||||||
let button_move_up = Button::with_label("Move Up")
|
|
||||||
.on_click(|ctx, _state, _env| ctx.submit_command(ACTION_SELECTED_MOD_UP))
|
|
||||||
.disabled_if(|state: &State, _env: &druid::Env| !state.can_move_mod_up());
|
|
||||||
|
|
||||||
let button_move_down = Button::with_label("Move Down")
|
|
||||||
.on_click(|ctx, _state, _env| ctx.submit_command(ACTION_SELECTED_MOD_DOWN))
|
|
||||||
.disabled_if(|state: &State, _env: &druid::Env| !state.can_move_mod_down());
|
|
||||||
|
|
||||||
let button_toggle_mod = Maybe::new(
|
|
||||||
|| {
|
|
||||||
let inner = Label::dynamic(|enabled, _env| {
|
|
||||||
if *enabled {
|
|
||||||
"Disable Mod".into()
|
|
||||||
} else {
|
|
||||||
"Enable Mod".into()
|
|
||||||
}
|
|
||||||
});
|
|
||||||
Button::new(inner)
|
|
||||||
.on_click(|_ctx, enabled: &mut bool, _env| {
|
|
||||||
*enabled = !(*enabled);
|
|
||||||
})
|
|
||||||
.lens(ModInfo::enabled.in_arc())
|
|
||||||
},
|
|
||||||
// TODO: Gray out
|
|
||||||
|| Button::with_label("Enable Mod"),
|
|
||||||
)
|
|
||||||
.disabled_if(|info: &Option<Arc<ModInfo>>, _env: &druid::Env| info.is_none())
|
|
||||||
.lens(State::selected_mod);
|
|
||||||
|
|
||||||
let button_add_mod = Button::with_label("Add Mod").on_click(|ctx, _state: &mut State, _env| {
|
|
||||||
let zip = FileSpec::new("Zip file", &["zip"]);
|
|
||||||
let opts = FileDialogOptions::new()
|
|
||||||
.allowed_types(vec![zip])
|
|
||||||
.default_type(zip)
|
|
||||||
.name_label("Mod Archive")
|
|
||||||
.title("Choose a mod to add")
|
|
||||||
.accept_command(ACTION_ADD_MOD);
|
|
||||||
ctx.submit_command(druid::commands::SHOW_OPEN_PANEL.with(opts))
|
|
||||||
});
|
|
||||||
|
|
||||||
let button_delete_mod = Button::with_label("Delete Mod")
|
|
||||||
.on_click(|ctx, data: &mut Option<Arc<ModInfo>>, _env| {
|
|
||||||
if let Some(info) = data {
|
|
||||||
ctx.submit_command(
|
|
||||||
ACTION_START_DELETE_SELECTED_MOD.with(SingleUse::new(info.clone())),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.disabled_if(|info: &Option<Arc<ModInfo>>, _env: &druid::Env| info.is_none())
|
|
||||||
.lens(State::selected_mod);
|
|
||||||
|
|
||||||
Flex::column()
|
|
||||||
.cross_axis_alignment(CrossAxisAlignment::Center)
|
|
||||||
.with_child(
|
|
||||||
Flex::row()
|
|
||||||
.main_axis_alignment(MainAxisAlignment::End)
|
|
||||||
.with_child(button_move_up)
|
|
||||||
.with_default_spacer()
|
|
||||||
.with_child(button_move_down),
|
|
||||||
)
|
|
||||||
.with_default_spacer()
|
|
||||||
.with_child(
|
|
||||||
Flex::row()
|
|
||||||
.main_axis_alignment(MainAxisAlignment::End)
|
|
||||||
.with_child(button_toggle_mod)
|
|
||||||
.with_default_spacer()
|
|
||||||
.with_child(button_add_mod)
|
|
||||||
.with_default_spacer()
|
|
||||||
.with_child(button_delete_mod),
|
|
||||||
)
|
|
||||||
.expand_width()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_mod_details_info() -> impl Widget<State> {
|
|
||||||
Maybe::new(
|
|
||||||
|| {
|
|
||||||
let name = Label::raw()
|
|
||||||
.with_text_size(24.)
|
|
||||||
// Force the label to take up the entire details' pane width,
|
|
||||||
// so that we can center-align it.
|
|
||||||
.expand_width()
|
|
||||||
.lens(NexusInfoLens::new(NexusInfo::name, ModInfo::name).in_arc());
|
|
||||||
let summary = Label::raw()
|
|
||||||
.with_line_break_mode(LineBreaking::WordWrap)
|
|
||||||
.lens(NexusInfoLens::new(NexusInfo::summary, ModInfo::summary).in_arc());
|
|
||||||
|
|
||||||
let version_line = Label::dynamic(|info: &Arc<ModInfo>, _| {
|
|
||||||
let author = info
|
|
||||||
.nexus
|
|
||||||
.as_ref()
|
|
||||||
.map(|n| &n.author)
|
|
||||||
.or(info.author.as_ref());
|
|
||||||
|
|
||||||
if let Some(author) = &author {
|
|
||||||
format!("Version: {}, by {author}", info.version)
|
|
||||||
} else {
|
|
||||||
format!("Version: {}", info.version)
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
let categories = Label::dynamic(|info: &Arc<ModInfo>, _| {
|
|
||||||
if info.categories.is_empty() {
|
|
||||||
String::from("Uncategorized")
|
|
||||||
} else {
|
|
||||||
info.categories.iter().enumerate().fold(
|
|
||||||
String::from("Category: "),
|
|
||||||
|mut s, (i, category)| {
|
|
||||||
if i > 0 {
|
|
||||||
s.push_str(", ");
|
|
||||||
}
|
|
||||||
s.push_str(category);
|
|
||||||
s
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
let nexus_link = Maybe::or_empty(|| {
|
|
||||||
let link = Label::raw().lens(NexusInfo::id.map(
|
|
||||||
|id| {
|
|
||||||
let url = format!("https://nexusmods.com/warhammer40kdarktide/mods/{}", id);
|
|
||||||
let mut builder = RichTextBuilder::new();
|
|
||||||
builder
|
|
||||||
.push("Open on Nexusmods")
|
|
||||||
.underline(true)
|
|
||||||
.text_color(theme::LINK_COLOR)
|
|
||||||
.link(ACTION_OPEN_LINK.with(Arc::new(url)));
|
|
||||||
builder.build()
|
|
||||||
},
|
|
||||||
|_, _| {},
|
|
||||||
));
|
|
||||||
Flex::column()
|
|
||||||
.cross_axis_alignment(CrossAxisAlignment::Start)
|
|
||||||
.main_axis_alignment(MainAxisAlignment::Start)
|
|
||||||
.with_child(link)
|
|
||||||
.with_spacer(4.)
|
|
||||||
})
|
|
||||||
.lens(ModInfo::nexus.in_arc());
|
|
||||||
|
|
||||||
let details = Flex::column()
|
|
||||||
.cross_axis_alignment(CrossAxisAlignment::Start)
|
|
||||||
.main_axis_alignment(MainAxisAlignment::Start)
|
|
||||||
.with_child(name)
|
|
||||||
.with_spacer(4.)
|
|
||||||
.with_child(summary)
|
|
||||||
.with_spacer(4.)
|
|
||||||
.with_child(nexus_link)
|
|
||||||
.with_child(version_line)
|
|
||||||
.with_spacer(4.)
|
|
||||||
.with_child(categories)
|
|
||||||
.padding((4., 4.));
|
|
||||||
|
|
||||||
let image =
|
|
||||||
Maybe::or_empty(|| Image::new(ImageBuf::empty()).controller(ImageLensController))
|
|
||||||
.lens(ModInfo::image.in_arc());
|
|
||||||
|
|
||||||
Flex::column()
|
|
||||||
.main_axis_alignment(MainAxisAlignment::Start)
|
|
||||||
.must_fill_main_axis(true)
|
|
||||||
.cross_axis_alignment(CrossAxisAlignment::Start)
|
|
||||||
.with_child(image)
|
|
||||||
.with_child(details)
|
|
||||||
},
|
|
||||||
Flex::column,
|
|
||||||
)
|
|
||||||
.lens(State::selected_mod)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_mod_details() -> impl Widget<State> {
|
|
||||||
Flex::column()
|
|
||||||
.must_fill_main_axis(true)
|
|
||||||
.cross_axis_alignment(CrossAxisAlignment::Start)
|
|
||||||
.main_axis_alignment(MainAxisAlignment::SpaceBetween)
|
|
||||||
.with_flex_child(build_mod_details_info(), 1.0)
|
|
||||||
.with_child(build_mod_details_buttons().padding((4., 4., 4., 8.)))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_view_mods() -> impl Widget<State> {
|
|
||||||
Split::columns(build_mod_list(), build_mod_details())
|
|
||||||
.split_point(0.75)
|
|
||||||
.min_size(0.0, MOD_DETAILS_MIN_WIDTH)
|
|
||||||
.solid_bar(true)
|
|
||||||
.bar_size(2.0)
|
|
||||||
.draggable(true)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_view_settings() -> impl Widget<State> {
|
|
||||||
let data_dir_setting = Flex::row()
|
|
||||||
.must_fill_main_axis(true)
|
|
||||||
.main_axis_alignment(MainAxisAlignment::Start)
|
|
||||||
.with_child(Label::new("Data Directory:"))
|
|
||||||
.with_default_spacer()
|
|
||||||
.with_flex_child(
|
|
||||||
TextBox::new()
|
|
||||||
.with_formatter(PathBufFormatter::new())
|
|
||||||
.expand_width()
|
|
||||||
.lens(State::data_dir),
|
|
||||||
1.,
|
|
||||||
)
|
|
||||||
.expand_width();
|
|
||||||
|
|
||||||
let game_dir_setting = Flex::row()
|
|
||||||
.must_fill_main_axis(true)
|
|
||||||
.main_axis_alignment(MainAxisAlignment::Start)
|
|
||||||
.with_child(Label::new("Game Directory:"))
|
|
||||||
.with_default_spacer()
|
|
||||||
.with_flex_child(
|
|
||||||
TextBox::new()
|
|
||||||
.with_formatter(PathBufFormatter::new())
|
|
||||||
.expand_width()
|
|
||||||
.lens(State::game_dir),
|
|
||||||
1.,
|
|
||||||
)
|
|
||||||
.expand_width();
|
|
||||||
|
|
||||||
let nexus_apy_key_setting = Flex::row()
|
|
||||||
.must_fill_main_axis(true)
|
|
||||||
.main_axis_alignment(MainAxisAlignment::Start)
|
|
||||||
.with_child(Label::new("Nexus API Key:"))
|
|
||||||
.with_default_spacer()
|
|
||||||
.with_flex_child(TextBox::new().expand_width().lens(State::nexus_api_key), 1.)
|
|
||||||
.expand_width();
|
|
||||||
|
|
||||||
let io_setting = Flex::row()
|
|
||||||
.must_fill_main_axis(true)
|
|
||||||
.main_axis_alignment(MainAxisAlignment::Start)
|
|
||||||
.with_child(Label::new("Enable unsafe I/O:"))
|
|
||||||
.with_default_spacer()
|
|
||||||
.with_child(Checkbox::from_label(Label::dynamic(
|
|
||||||
|enabled: &bool, _: &Env| {
|
|
||||||
if *enabled {
|
|
||||||
"Enabled".into()
|
|
||||||
} else {
|
|
||||||
"Disabled".into()
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)))
|
|
||||||
.lens(State::is_io_enabled)
|
|
||||||
.tooltip(|_: &State, _: &Env| {
|
|
||||||
"Enabling this gives ANY mod full access to your files \
|
|
||||||
and the ability to load arbitrary software libraries.\n\
|
|
||||||
Only enable this if it is crucial for a mod's functionality, \
|
|
||||||
and you are sure none of the ones you have installed are malicious."
|
|
||||||
})
|
|
||||||
.expand_width();
|
|
||||||
|
|
||||||
let content = Flex::column()
|
|
||||||
.must_fill_main_axis(true)
|
|
||||||
.cross_axis_alignment(CrossAxisAlignment::Start)
|
|
||||||
.with_child(data_dir_setting)
|
|
||||||
.with_default_spacer()
|
|
||||||
.with_child(game_dir_setting)
|
|
||||||
.with_default_spacer()
|
|
||||||
.with_child(io_setting)
|
|
||||||
.with_default_spacer()
|
|
||||||
.with_child(nexus_apy_key_setting);
|
|
||||||
|
|
||||||
SizedBox::new(content)
|
|
||||||
.width(800.)
|
|
||||||
.expand_height()
|
|
||||||
.padding(5.)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_main() -> impl Widget<State> {
|
|
||||||
ViewSwitcher::new(
|
|
||||||
|state: &State, _| state.current_view,
|
|
||||||
|selector, _, _| match selector {
|
|
||||||
View::Mods => Box::new(build_view_mods()),
|
|
||||||
View::Settings => Box::new(build_view_settings()),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_log_view() -> impl Widget<State> {
|
|
||||||
let list = List::new(|| {
|
|
||||||
Label::raw()
|
|
||||||
.with_font(FontDescriptor::new(FontFamily::MONOSPACE))
|
|
||||||
.with_line_break_mode(LineBreaking::WordWrap)
|
|
||||||
})
|
|
||||||
.lens(State::log)
|
|
||||||
.padding(4.)
|
|
||||||
.scroll()
|
|
||||||
.vertical()
|
|
||||||
.controller(AutoScrollController);
|
|
||||||
|
|
||||||
let inner = Border::new(list)
|
|
||||||
.with_color(theme::COLOR_FG2)
|
|
||||||
.with_top_border(1.);
|
|
||||||
|
|
||||||
SizedBox::new(inner).expand_width().height(128.0)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_window() -> impl Widget<State> {
|
|
||||||
// TODO: Add borders between the sections
|
|
||||||
Flex::column()
|
|
||||||
.must_fill_main_axis(true)
|
|
||||||
.with_child(build_top_bar())
|
|
||||||
.with_flex_child(build_main(), 1.0)
|
|
||||||
.with_child(build_log_view())
|
|
||||||
.controller(DirtyStateController)
|
|
||||||
.on_added(|_, ctx: &mut LifeCycleCtx, _, _| {
|
|
||||||
ctx.submit_command(
|
|
||||||
ACTION_SET_WINDOW_HANDLE.with(SingleUse::new((*WINDOW_ID, ctx.window().clone()))),
|
|
||||||
);
|
|
||||||
})
|
|
||||||
}
|
|
|
@ -1,92 +0,0 @@
|
||||||
use ansi_parser::{AnsiParser, AnsiSequence, Output};
|
|
||||||
use druid::text::{RichText, RichTextBuilder};
|
|
||||||
use druid::{Color, FontStyle, FontWeight};
|
|
||||||
|
|
||||||
use crate::ui::theme;
|
|
||||||
|
|
||||||
#[derive(Default, Debug)]
|
|
||||||
struct TextState {
|
|
||||||
color: Option<Color>,
|
|
||||||
dim: bool,
|
|
||||||
bold: bool,
|
|
||||||
underline: bool,
|
|
||||||
strikethrough: bool,
|
|
||||||
italic: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ansi_to_rich_text(input: &str) -> RichText {
|
|
||||||
let mut builder = RichTextBuilder::new();
|
|
||||||
|
|
||||||
let mut state = TextState::default();
|
|
||||||
|
|
||||||
for token in input.ansi_parse() {
|
|
||||||
match token {
|
|
||||||
Output::TextBlock(text) => {
|
|
||||||
let mut attr = builder.push(text);
|
|
||||||
attr.underline(state.underline);
|
|
||||||
attr.strikethrough(state.strikethrough);
|
|
||||||
|
|
||||||
if state.bold {
|
|
||||||
attr.weight(FontWeight::BOLD);
|
|
||||||
}
|
|
||||||
|
|
||||||
if state.italic {
|
|
||||||
attr.style(FontStyle::Italic);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(color) = state.color {
|
|
||||||
attr.text_color(color);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Output::Escape(AnsiSequence::SetGraphicsMode(values)) => {
|
|
||||||
for v in values {
|
|
||||||
match v {
|
|
||||||
0 => {
|
|
||||||
state = Default::default();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
1 => state.bold = true,
|
|
||||||
2 => state.dim = true,
|
|
||||||
3 => state.italic = true,
|
|
||||||
4 => state.underline = true,
|
|
||||||
9 => state.strikethrough = true,
|
|
||||||
22 => {
|
|
||||||
state.bold = false;
|
|
||||||
state.dim = false;
|
|
||||||
}
|
|
||||||
23 => state.italic = false,
|
|
||||||
24 => state.underline = false,
|
|
||||||
29 => state.underline = false,
|
|
||||||
30..=40 | 90..=100 => {
|
|
||||||
let mut col = v - 30;
|
|
||||||
if col > 9 {
|
|
||||||
state.bold = true;
|
|
||||||
col -= 60;
|
|
||||||
}
|
|
||||||
|
|
||||||
state.color = match col {
|
|
||||||
// This escape code is usually called 'black', but is actually used
|
|
||||||
// as "foreground color", in regards to light themes.
|
|
||||||
0 => Some(theme::COLOR_FG),
|
|
||||||
1 => Some(theme::COLOR_RED_LIGHT),
|
|
||||||
2 => Some(theme::COLOR_GREEN_LIGHT),
|
|
||||||
3 => Some(theme::COLOR_YELLOW_LIGHT),
|
|
||||||
4 => Some(theme::COLOR_BLUE_LIGHT),
|
|
||||||
5 => Some(theme::COLOR_PURPLE_LIGHT),
|
|
||||||
6 => Some(theme::COLOR_AQUA_LIGHT),
|
|
||||||
// Similarly, 'white' is the background color
|
|
||||||
7 => Some(theme::COLOR_BG),
|
|
||||||
9 => None,
|
|
||||||
_ => unreachable!(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Output::Escape(_) => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
builder.build()
|
|
||||||
}
|
|
|
@ -1,179 +0,0 @@
|
||||||
use std::io::ErrorKind;
|
|
||||||
use std::path::Path;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use color_eyre::{eyre::Context, Result};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use tokio::fs;
|
|
||||||
|
|
||||||
use crate::state::{ActionState, ModInfo};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize)]
|
|
||||||
pub(crate) struct LoadOrderEntrySerialize<'a> {
|
|
||||||
pub id: &'a String,
|
|
||||||
pub enabled: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> From<&'a ModInfo> for LoadOrderEntrySerialize<'a> {
|
|
||||||
fn from(info: &'a ModInfo) -> Self {
|
|
||||||
Self {
|
|
||||||
id: &info.id,
|
|
||||||
enabled: info.enabled,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
|
||||||
pub(crate) struct ConfigSerialize<'a> {
|
|
||||||
game_dir: &'a Path,
|
|
||||||
data_dir: &'a Path,
|
|
||||||
nexus_api_key: &'a String,
|
|
||||||
mod_order: Vec<LoadOrderEntrySerialize<'a>>,
|
|
||||||
unsafe_io: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> From<&'a ActionState> for ConfigSerialize<'a> {
|
|
||||||
fn from(state: &'a ActionState) -> Self {
|
|
||||||
Self {
|
|
||||||
game_dir: &state.game_dir,
|
|
||||||
data_dir: &state.data_dir,
|
|
||||||
nexus_api_key: &state.nexus_api_key,
|
|
||||||
unsafe_io: state.is_io_enabled,
|
|
||||||
mod_order: state
|
|
||||||
.mods
|
|
||||||
.iter()
|
|
||||||
.map(Arc::as_ref)
|
|
||||||
.map(LoadOrderEntrySerialize::from)
|
|
||||||
.collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
|
||||||
pub(crate) struct LoadOrderEntry {
|
|
||||||
pub id: String,
|
|
||||||
pub enabled: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
|
||||||
pub(crate) struct Config {
|
|
||||||
#[serde(skip)]
|
|
||||||
pub path: PathBuf,
|
|
||||||
#[serde(default = "get_default_data_dir")]
|
|
||||||
pub data_dir: PathBuf,
|
|
||||||
pub game_dir: Option<PathBuf>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub unsafe_io: bool,
|
|
||||||
pub nexus_api_key: Option<String>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub mod_order: Vec<LoadOrderEntry>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(target_os = "windows"))]
|
|
||||||
pub fn get_default_config_path() -> PathBuf {
|
|
||||||
let config_dir = std::env::var("XDG_CONFIG_DIR").unwrap_or_else(|_| {
|
|
||||||
let home = std::env::var("HOME").unwrap_or_else(|_| {
|
|
||||||
let user = std::env::var("USER").expect("user env variable not set");
|
|
||||||
format!("/home/{user}")
|
|
||||||
});
|
|
||||||
format!("{home}/.config")
|
|
||||||
});
|
|
||||||
|
|
||||||
PathBuf::from(config_dir).join("dtmm").join("dtmm.cfg")
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(target_os = "windows")]
|
|
||||||
pub fn get_default_config_path() -> PathBuf {
|
|
||||||
let config_dir = std::env::var("APPDATA").expect("appdata env var not set");
|
|
||||||
PathBuf::from(config_dir).join("dtmm").join("dtmm.cfg")
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(target_os = "windows"))]
|
|
||||||
pub fn get_default_data_dir() -> PathBuf {
|
|
||||||
let data_dir = std::env::var("XDG_DATA_DIR").unwrap_or_else(|_| {
|
|
||||||
let home = std::env::var("HOME").unwrap_or_else(|_| {
|
|
||||||
let user = std::env::var("USER").expect("user env variable not set");
|
|
||||||
format!("/home/{user}")
|
|
||||||
});
|
|
||||||
format!("{home}/.local/share")
|
|
||||||
});
|
|
||||||
|
|
||||||
PathBuf::from(data_dir).join("dtmm")
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(target_os = "windows")]
|
|
||||||
pub fn get_default_data_dir() -> PathBuf {
|
|
||||||
let data_dir = std::env::var("LOCALAPPDATA").expect("appdata env var not set");
|
|
||||||
PathBuf::from(data_dir).join("dtmm")
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument]
|
|
||||||
pub(crate) async fn read_config<P>(path: P, is_default: bool) -> Result<Config>
|
|
||||||
where
|
|
||||||
P: Into<PathBuf> + std::fmt::Debug,
|
|
||||||
{
|
|
||||||
let path = path.into();
|
|
||||||
let default_path = get_default_config_path();
|
|
||||||
|
|
||||||
match fs::read(&path).await {
|
|
||||||
Ok(data) => {
|
|
||||||
let data = String::from_utf8(data).wrap_err_with(|| {
|
|
||||||
format!("Config file '{}' contains invalid UTF-8", path.display())
|
|
||||||
})?;
|
|
||||||
let mut cfg: Config = serde_sjson::from_str(&data)
|
|
||||||
.wrap_err_with(|| format!("Invalid config file {}", path.display()))?;
|
|
||||||
|
|
||||||
cfg.path = path;
|
|
||||||
|
|
||||||
tracing::debug!("Read config file '{}': {:?}", cfg.path.display(), cfg);
|
|
||||||
|
|
||||||
Ok(cfg)
|
|
||||||
}
|
|
||||||
Err(err) if err.kind() == ErrorKind::NotFound => {
|
|
||||||
if !is_default {
|
|
||||||
return Err(err)
|
|
||||||
.wrap_err_with(|| format!("Failed to read config file {}", path.display()))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
tracing::debug!(
|
|
||||||
"Config file not found at '{}', creating default.",
|
|
||||||
path.display()
|
|
||||||
);
|
|
||||||
|
|
||||||
{
|
|
||||||
let parent = default_path
|
|
||||||
.parent()
|
|
||||||
.expect("a file path always has a parent directory");
|
|
||||||
fs::create_dir_all(parent).await.wrap_err_with(|| {
|
|
||||||
format!("Failed to create directories {}", parent.display())
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let config = Config {
|
|
||||||
path: default_path,
|
|
||||||
data_dir: get_default_data_dir(),
|
|
||||||
game_dir: None,
|
|
||||||
nexus_api_key: None,
|
|
||||||
mod_order: Vec::new(),
|
|
||||||
unsafe_io: false,
|
|
||||||
};
|
|
||||||
|
|
||||||
{
|
|
||||||
let data = serde_sjson::to_string(&config)
|
|
||||||
.wrap_err("Failed to serialize default config value")?;
|
|
||||||
fs::write(&config.path, data).await.wrap_err_with(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to write default config to {}",
|
|
||||||
config.path.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(config)
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
Err(err).wrap_err_with(|| format!("Failed to read config file {}", path.display()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,95 +0,0 @@
|
||||||
use clap::ValueEnum;
|
|
||||||
use tokio::sync::mpsc::UnboundedSender;
|
|
||||||
use tracing_error::ErrorLayer;
|
|
||||||
use tracing_subscriber::filter::FilterFn;
|
|
||||||
use tracing_subscriber::fmt;
|
|
||||||
use tracing_subscriber::fmt::format::debug_fn;
|
|
||||||
use tracing_subscriber::layer::SubscriberExt;
|
|
||||||
use tracing_subscriber::prelude::*;
|
|
||||||
use tracing_subscriber::EnvFilter;
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, ValueEnum)]
|
|
||||||
pub enum LogLevel {
|
|
||||||
Trace,
|
|
||||||
Debug,
|
|
||||||
Info,
|
|
||||||
Warn,
|
|
||||||
Error,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<LogLevel> for EnvFilter {
|
|
||||||
fn from(level: LogLevel) -> Self {
|
|
||||||
let filter = match level {
|
|
||||||
LogLevel::Trace => "error,dtmm=trace,sdk=trace",
|
|
||||||
LogLevel::Debug => "error,dtmm=debug,sdk=debug",
|
|
||||||
LogLevel::Info => "error,dtmm=info",
|
|
||||||
LogLevel::Warn => "error,dtmm=warn",
|
|
||||||
LogLevel::Error => "error",
|
|
||||||
};
|
|
||||||
EnvFilter::new(filter)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct ChannelWriter {
|
|
||||||
tx: UnboundedSender<Vec<u8>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ChannelWriter {
|
|
||||||
pub fn new(tx: UnboundedSender<Vec<u8>>) -> Self {
|
|
||||||
Self { tx }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::io::Write for ChannelWriter {
|
|
||||||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
|
||||||
let tx = self.tx.clone();
|
|
||||||
// The `send` errors when the receiving end has closed.
|
|
||||||
// But there's not much we can do at that point, so we just ignore it.
|
|
||||||
let _ = tx.send(buf.to_vec());
|
|
||||||
|
|
||||||
Ok(buf.len())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn flush(&mut self) -> std::io::Result<()> {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn create_tracing_subscriber(level: Option<LogLevel>, tx: Option<UnboundedSender<Vec<u8>>>) {
|
|
||||||
let mut env_layer = if let Some(level) = level {
|
|
||||||
EnvFilter::from(level)
|
|
||||||
} else if cfg!(debug_assertions) {
|
|
||||||
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info"))
|
|
||||||
} else {
|
|
||||||
EnvFilter::new("error,dtmm=info")
|
|
||||||
};
|
|
||||||
|
|
||||||
// The internal implementation of Druid's GTK file dialog turns
|
|
||||||
// cancelling the dialog into an error. The, also internal, wrapper
|
|
||||||
// then logs and swallows the error.
|
|
||||||
// Therefore, as a consumer of the library, we don't have any way
|
|
||||||
// to customize this behavior, and instead have to filter out the
|
|
||||||
// tracing event.
|
|
||||||
env_layer = env_layer.add_directive(
|
|
||||||
"druid_shell::backend::gtk::window=off"
|
|
||||||
.parse()
|
|
||||||
.expect("Invalid env filter directive"),
|
|
||||||
);
|
|
||||||
|
|
||||||
let stdout_layer = fmt::layer().pretty();
|
|
||||||
|
|
||||||
let channel_layer = tx.map(|tx| {
|
|
||||||
fmt::layer()
|
|
||||||
.event_format(dtmt_shared::Formatter)
|
|
||||||
.fmt_fields(debug_fn(dtmt_shared::format_fields))
|
|
||||||
.with_writer(move || ChannelWriter::new(tx.clone()))
|
|
||||||
.with_filter(FilterFn::new(dtmt_shared::filter_fields))
|
|
||||||
});
|
|
||||||
|
|
||||||
tracing_subscriber::registry()
|
|
||||||
.with(env_layer)
|
|
||||||
.with(channel_layer)
|
|
||||||
.with(stdout_layer)
|
|
||||||
.with(ErrorLayer::new(fmt::format::Pretty::default()))
|
|
||||||
.init();
|
|
||||||
}
|
|
|
@ -1,43 +1,27 @@
|
||||||
[package]
|
[package]
|
||||||
name = "dtmt"
|
name = "dtmt"
|
||||||
version = "0.3.0"
|
version = "0.2.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
async-recursion = { workspace = true }
|
clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "unicode"] }
|
||||||
clap = { workspace = true }
|
color-eyre = "0.6.2"
|
||||||
cli-table = { workspace = true }
|
csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
|
||||||
color-eyre = { workspace = true }
|
sdk = { path = "../../lib/sdk", version = "0.2.0" }
|
||||||
confy = { workspace = true }
|
futures = "0.3.25"
|
||||||
csv-async = { workspace = true }
|
futures-util = "0.3.24"
|
||||||
dtmt-shared = { workspace = true }
|
glob = "0.3.0"
|
||||||
futures = { workspace = true }
|
libloading = "0.7.4"
|
||||||
futures-util = { workspace = true }
|
nanorand = "0.7.0"
|
||||||
glob = { workspace = true }
|
pin-project-lite = "0.2.9"
|
||||||
luajit2-sys = { workspace = true }
|
serde = { version = "1.0.147", features = ["derive"] }
|
||||||
minijinja = { workspace = true }
|
serde_sjson = { path = "../../lib/serde_sjson", version = "*" }
|
||||||
nanorand = { workspace = true }
|
tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
|
||||||
notify = { workspace = true }
|
tokio-stream = { version = "0.1.11", features = ["fs", "io-util"] }
|
||||||
oodle = { workspace = true }
|
tracing = { version = "0.1.37", features = ["async-await"] }
|
||||||
path-clean = { workspace = true }
|
tracing-error = "0.2.0"
|
||||||
path-slash = { workspace = true }
|
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
|
||||||
pin-project-lite = { workspace = true }
|
confy = "0.5.1"
|
||||||
promptly = { workspace = true }
|
|
||||||
sdk = { workspace = true }
|
|
||||||
serde = { workspace = true }
|
|
||||||
serde_sjson = { workspace = true }
|
|
||||||
tokio = { workspace = true }
|
|
||||||
tokio-stream = { workspace = true }
|
|
||||||
tracing = { workspace = true }
|
|
||||||
tracing-error = { workspace = true }
|
|
||||||
tracing-subscriber = { workspace = true }
|
|
||||||
zip = { workspace = true }
|
|
||||||
|
|
||||||
# Cannot be a workspace dependencies when it's optional
|
|
||||||
shlex = { version = "1.2.0", optional = true }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tempfile = "3.3.0"
|
tempfile = "3.3.0"
|
||||||
|
|
||||||
[features]
|
|
||||||
shlex-bench = ["dep:shlex"]
|
|
||||||
|
|
|
@ -1,32 +0,0 @@
|
||||||
= Darktide Mod Tools (DTMT)
|
|
||||||
:idprefix:
|
|
||||||
:idseparator:
|
|
||||||
:toc: macro
|
|
||||||
:toclevels: 1
|
|
||||||
:!toc-title:
|
|
||||||
:caution-caption: :fire:
|
|
||||||
:important-caption: :exclamtion:
|
|
||||||
:note-caption: :paperclip:
|
|
||||||
:tip-caption: :bulb:
|
|
||||||
:warning-caption: :warning:
|
|
||||||
|
|
||||||
A set of tools to develop mods for the newest generation of the Bitsquid game engine that powers the game _Warhammer 40.000: Darktide_.
|
|
||||||
|
|
||||||
== Quickstart
|
|
||||||
|
|
||||||
1. Head to the latest https://git.sclu1034.dev/bitsquid_dt/dtmt/releases/[release] and download the `dtmt` binary for your platform.
|
|
||||||
2. Place the binary and `dictionary.csv` next to each other.
|
|
||||||
3. Open a command prompt, navigate to the downloaded binary and run `dtmt.exe help`.
|
|
||||||
4. Use the `help` command (it works for subcommands, too) and the https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki/CLI-Reference[CLI Reference].
|
|
||||||
|
|
||||||
== Runtime dependencies
|
|
||||||
|
|
||||||
The LuaJit decompiler (short "ljd") is used to decompile Lua files. A version tailored specifically to Bitsquid may be found here: https://github.com/Aussiemon/ljd.
|
|
||||||
|
|
||||||
A custom executable location may be passed via the `--ljd` flag during extraction, otherwise decompilation expects `ljd` to be found via the `PATH` environmental variable.
|
|
||||||
|
|
||||||
== Building
|
|
||||||
|
|
||||||
1. Install Rust from https://www.rust-lang.org/learn/get-started[rust-lang.org] or via the preferred means for your system.
|
|
||||||
2. Download or clone this source code. Make sure to include the submodules in `lib/`.
|
|
||||||
3. Run `cargo build`.
|
|
|
@ -1,414 +1,24 @@
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::path::PathBuf;
|
||||||
use std::ops::Deref;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use clap::{value_parser, Arg, ArgMatches, Command};
|
use clap::{value_parser, Arg, ArgMatches, Command};
|
||||||
use color_eyre::eyre::{self, Context, Result};
|
use color_eyre::eyre::Result;
|
||||||
use color_eyre::{Help, Report};
|
use tokio::sync::RwLock;
|
||||||
use dtmt_shared::ModConfig;
|
|
||||||
use futures::future::try_join_all;
|
|
||||||
use futures::StreamExt;
|
|
||||||
use path_slash::PathExt;
|
|
||||||
use sdk::filetype::package::Package;
|
|
||||||
use sdk::murmur::IdString64;
|
|
||||||
use sdk::{Bundle, BundleFile};
|
|
||||||
use tokio::fs::{self, File};
|
|
||||||
use tokio::io::AsyncReadExt;
|
|
||||||
use tokio::sync::Mutex;
|
|
||||||
|
|
||||||
const PROJECT_CONFIG_NAME: &str = "dtmt.cfg";
|
pub(crate) fn _command_definition() -> Command {
|
||||||
|
Command::new("build").about("Build a project").arg(
|
||||||
type FileIndexMap = HashMap<String, HashSet<String>>;
|
Arg::new("directory")
|
||||||
|
.required(false)
|
||||||
pub(crate) fn command_definition() -> Command {
|
.default_value(".")
|
||||||
Command::new("build")
|
.value_parser(value_parser!(PathBuf))
|
||||||
.about("Build a project")
|
.help(
|
||||||
.arg(
|
"The path to the project to build. \
|
||||||
Arg::new("directory")
|
If omitted, the current working directory is used.",
|
||||||
.required(false)
|
),
|
||||||
.value_parser(value_parser!(PathBuf))
|
)
|
||||||
.help(
|
|
||||||
"The path to the project to build. \
|
|
||||||
If omitted, dtmt will search from the current working directory upward.",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::new("out")
|
|
||||||
.long("out")
|
|
||||||
.short('o')
|
|
||||||
.default_value("out")
|
|
||||||
.value_parser(value_parser!(PathBuf))
|
|
||||||
.help("The directory to write output files to."),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::new("deploy")
|
|
||||||
.long("deploy")
|
|
||||||
.short('d')
|
|
||||||
.value_parser(value_parser!(PathBuf))
|
|
||||||
.help(
|
|
||||||
"If the path to the game (without the trailing '/bundle') is specified, \
|
|
||||||
deploy the newly built bundles. \
|
|
||||||
This will not adjust the bundle database or package files, so if files are \
|
|
||||||
added or removed, you will have to import into DTMM and re-deploy there.",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Try to find a `dtmt.cfg` in the given directory or traverse up the parents.
|
|
||||||
#[tracing::instrument]
|
|
||||||
async fn find_project_config(dir: Option<PathBuf>) -> Result<ModConfig> {
|
|
||||||
let (path, mut file) = if let Some(path) = dir {
|
|
||||||
let file = File::open(&path.join(PROJECT_CONFIG_NAME))
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to open file: {}", path.display()))
|
|
||||||
.with_suggestion(|| {
|
|
||||||
format!(
|
|
||||||
"Make sure the file at '{}' exists and is readable",
|
|
||||||
path.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
(path, file)
|
|
||||||
} else {
|
|
||||||
let mut dir = std::env::current_dir()?;
|
|
||||||
loop {
|
|
||||||
let path = dir.join(PROJECT_CONFIG_NAME);
|
|
||||||
match File::open(&path).await {
|
|
||||||
Ok(file) => break (dir, file),
|
|
||||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
|
||||||
if let Some(parent) = dir.parent() {
|
|
||||||
// TODO: Re-write with recursion to avoid allocating the `PathBuf`.
|
|
||||||
dir = parent.to_path_buf();
|
|
||||||
} else {
|
|
||||||
eyre::bail!("Could not find project root");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
let err = Report::new(err)
|
|
||||||
.wrap_err(format!("Failed to open file: {}", path.display()));
|
|
||||||
return Err(err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut buf = String::new();
|
|
||||||
file.read_to_string(&mut buf)
|
|
||||||
.await
|
|
||||||
.wrap_err("Invalid UTF-8")?;
|
|
||||||
|
|
||||||
let mut cfg: ModConfig =
|
|
||||||
serde_sjson::from_str(&buf).wrap_err("Failed to deserialize mod config")?;
|
|
||||||
cfg.dir = path;
|
|
||||||
Ok(cfg)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Iterate over the paths in the given `Package` and
|
|
||||||
/// compile each file by its file type.
|
|
||||||
#[tracing::instrument(skip_all)]
|
|
||||||
async fn compile_package_files(pkg: &Package, cfg: &ModConfig) -> Result<Vec<BundleFile>> {
|
|
||||||
let root = Arc::new(&cfg.dir);
|
|
||||||
let name_overrides = &cfg.name_overrides;
|
|
||||||
|
|
||||||
let tasks = pkg
|
|
||||||
.iter()
|
|
||||||
.flat_map(|(file_type, names)| {
|
|
||||||
names.iter().map(|name| {
|
|
||||||
(
|
|
||||||
*file_type,
|
|
||||||
name,
|
|
||||||
// Cloning the `Arc` here solves the issue that in the next `.map`, I need to
|
|
||||||
// `move` the closure parameters, but can't `move` `root` before it was cloned.
|
|
||||||
root.clone(),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.map(|(file_type, name, root)| async move {
|
|
||||||
let path = PathBuf::from(name);
|
|
||||||
let sjson = fs::read_to_string(&path)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?;
|
|
||||||
|
|
||||||
let name = path.with_extension("").to_slash_lossy().to_string();
|
|
||||||
let name = if let Some(new_name) = name_overrides.get(&name) {
|
|
||||||
let new_name = match u64::from_str_radix(new_name, 16) {
|
|
||||||
Ok(hash) => IdString64::from(hash),
|
|
||||||
Err(_) => IdString64::from(new_name.clone()),
|
|
||||||
};
|
|
||||||
tracing::info!("Overriding '{}' -> '{}'", name, new_name.display());
|
|
||||||
new_name
|
|
||||||
} else {
|
|
||||||
IdString64::from(name.clone())
|
|
||||||
};
|
|
||||||
BundleFile::from_sjson(name, file_type, sjson, root.as_ref()).await
|
|
||||||
});
|
|
||||||
|
|
||||||
let results = futures::stream::iter(tasks)
|
|
||||||
.buffer_unordered(10)
|
|
||||||
.collect::<Vec<Result<BundleFile>>>()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
results.into_iter().collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Read a `.package` file, collect the referenced files
|
|
||||||
/// and compile all of them into a bundle.
|
|
||||||
#[tracing::instrument]
|
|
||||||
async fn build_package(
|
|
||||||
cfg: &ModConfig,
|
|
||||||
package: impl AsRef<Path> + std::fmt::Debug,
|
|
||||||
) -> Result<Bundle> {
|
|
||||||
let root = &cfg.dir;
|
|
||||||
let package = package.as_ref();
|
|
||||||
|
|
||||||
let mut path = root.join(package);
|
|
||||||
path.set_extension("package");
|
|
||||||
let sjson = fs::read_to_string(&path)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to read file {}", path.display()))?;
|
|
||||||
|
|
||||||
let pkg_name = package.to_slash_lossy().to_string();
|
|
||||||
let pkg = Package::from_sjson(sjson, pkg_name.clone(), root)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Invalid package file {}", &pkg_name))?;
|
|
||||||
|
|
||||||
let files = compile_package_files(&pkg, cfg).await?;
|
|
||||||
let mut bundle = Bundle::new(pkg_name);
|
|
||||||
for file in files {
|
|
||||||
bundle.add_file(file);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(bundle)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Cleans the path of internal parent (`../`) or self (`./`) components,
|
|
||||||
/// and ensures that it is relative.
|
|
||||||
fn normalize_file_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
|
|
||||||
let path = path.as_ref();
|
|
||||||
|
|
||||||
if path.is_absolute() || path.has_root() {
|
|
||||||
let err = eyre::eyre!("Path is absolute: {}", path.display());
|
|
||||||
return Err(err).with_suggestion(|| "Specify a relative file path.".to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
let path = path_clean::clean(path);
|
|
||||||
|
|
||||||
if path.starts_with("..") {
|
|
||||||
eyre::bail!("path starts with a parent component: {}", path.display());
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument]
|
|
||||||
pub(crate) async fn read_project_config(dir: Option<PathBuf>) -> Result<ModConfig> {
|
|
||||||
let mut cfg = find_project_config(dir).await?;
|
|
||||||
|
|
||||||
if let Some(path) = cfg.image {
|
|
||||||
let path = normalize_file_path(path)
|
|
||||||
.wrap_err("Invalid config field 'image'")
|
|
||||||
.with_suggestion(|| {
|
|
||||||
"Specify a file path relative to and child path of the \
|
|
||||||
directory where 'dtmt.cfg' is."
|
|
||||||
.to_string()
|
|
||||||
})?;
|
|
||||||
cfg.image = Some(path);
|
|
||||||
}
|
|
||||||
|
|
||||||
cfg.resources.init = normalize_file_path(cfg.resources.init)
|
|
||||||
.wrap_err("Invalid config field 'resources.init'")
|
|
||||||
.with_suggestion(|| {
|
|
||||||
"Specify a file path relative to and child path of the \
|
|
||||||
directory where 'dtmt.cfg' is."
|
|
||||||
.to_string()
|
|
||||||
})
|
|
||||||
.with_suggestion(|| {
|
|
||||||
"Use 'dtmt new' in a separate directory to generate \
|
|
||||||
a valid mod template."
|
|
||||||
.to_string()
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if let Some(path) = cfg.resources.data {
|
|
||||||
let path = normalize_file_path(path)
|
|
||||||
.wrap_err("Invalid config field 'resources.data'")
|
|
||||||
.with_suggestion(|| {
|
|
||||||
"Specify a file path relative to and child path of the \
|
|
||||||
directory where 'dtmt.cfg' is."
|
|
||||||
.to_string()
|
|
||||||
})
|
|
||||||
.with_suggestion(|| {
|
|
||||||
"Use 'dtmt new' in a separate directory to generate \
|
|
||||||
a valid mod template."
|
|
||||||
.to_string()
|
|
||||||
})?;
|
|
||||||
cfg.resources.data = Some(path);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(path) = cfg.resources.localization {
|
|
||||||
let path = normalize_file_path(path)
|
|
||||||
.wrap_err("Invalid config field 'resources.localization'")
|
|
||||||
.with_suggestion(|| {
|
|
||||||
"Specify a file path relative to and child path of the \
|
|
||||||
directory where 'dtmt.cfg' is."
|
|
||||||
.to_string()
|
|
||||||
})
|
|
||||||
.with_suggestion(|| {
|
|
||||||
"Use 'dtmt new' in a separate directory to generate \
|
|
||||||
a valid mod template."
|
|
||||||
.to_string()
|
|
||||||
})?;
|
|
||||||
cfg.resources.localization = Some(path);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(cfg)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument]
|
|
||||||
pub(crate) async fn build<P>(
|
|
||||||
cfg: &ModConfig,
|
|
||||||
out_path: impl AsRef<Path> + std::fmt::Debug,
|
|
||||||
game_dir: Arc<Option<P>>,
|
|
||||||
) -> Result<()>
|
|
||||||
where
|
|
||||||
P: AsRef<Path> + std::fmt::Debug,
|
|
||||||
{
|
|
||||||
let out_path = out_path.as_ref();
|
|
||||||
|
|
||||||
fs::create_dir_all(out_path)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to create output directory '{}'", out_path.display()))?;
|
|
||||||
|
|
||||||
let file_map = Arc::new(Mutex::new(FileIndexMap::new()));
|
|
||||||
|
|
||||||
let tasks = cfg
|
|
||||||
.packages
|
|
||||||
.iter()
|
|
||||||
// The closure below would capture the `Arc`s before they could be cloned,
|
|
||||||
// so instead we need to clone them in a non-move block and inject them
|
|
||||||
// via parameters.
|
|
||||||
.map(|path| (path, cfg.clone(), file_map.clone(), game_dir.clone()))
|
|
||||||
.map(|(path, cfg, file_map, game_dir)| async move {
|
|
||||||
if path.extension().is_some() {
|
|
||||||
eyre::bail!(
|
|
||||||
"Package name must be specified without file extension: {}",
|
|
||||||
path.display()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let bundle = build_package(&cfg, path).await.wrap_err_with(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to build package '{}' at '{}'",
|
|
||||||
path.display(),
|
|
||||||
cfg.dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let bundle_name = match bundle.name() {
|
|
||||||
IdString64::Hash(_) => {
|
|
||||||
eyre::bail!("bundle name must be known as string. got hash")
|
|
||||||
}
|
|
||||||
IdString64::String(s) => s.clone(),
|
|
||||||
};
|
|
||||||
|
|
||||||
{
|
|
||||||
let mut file_map = file_map.lock().await;
|
|
||||||
let map_entry = file_map.entry(bundle_name).or_default();
|
|
||||||
|
|
||||||
for file in bundle.files() {
|
|
||||||
map_entry.insert(file.name(false, None));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let name = bundle.name().to_murmur64().to_string().to_ascii_lowercase();
|
|
||||||
let path = out_path.join(&name);
|
|
||||||
let data = bundle.to_binary()?;
|
|
||||||
|
|
||||||
tracing::trace!(
|
|
||||||
"Writing bundle {} to '{}'",
|
|
||||||
bundle.name().display(),
|
|
||||||
path.display()
|
|
||||||
);
|
|
||||||
fs::write(&path, &data)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to write bundle to '{}'", path.display()))?;
|
|
||||||
|
|
||||||
if let Some(game_dir) = game_dir.as_ref() {
|
|
||||||
let path = game_dir.as_ref().join(&name);
|
|
||||||
|
|
||||||
tracing::trace!(
|
|
||||||
"Deploying bundle {} to '{}'",
|
|
||||||
bundle.name().display(),
|
|
||||||
path.display()
|
|
||||||
);
|
|
||||||
fs::write(&path, &data)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to write bundle to '{}'", path.display()))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
});
|
|
||||||
|
|
||||||
try_join_all(tasks)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to build mod bundles")?;
|
|
||||||
|
|
||||||
{
|
|
||||||
let path = out_path.join("files.sjson");
|
|
||||||
tracing::trace!(path = %path.display(), "Writing file index");
|
|
||||||
let file_map = file_map.lock().await;
|
|
||||||
let data = serde_sjson::to_string(file_map.deref())?;
|
|
||||||
fs::write(&path, data)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to write file index to '{}'", path.display()))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(img_path) = &cfg.image {
|
|
||||||
let path = cfg.dir.join(img_path);
|
|
||||||
let dest = out_path.join(img_path);
|
|
||||||
|
|
||||||
tracing::trace!(src = %path.display(), dest = %dest.display(), "Copying image file");
|
|
||||||
|
|
||||||
if let Some(parent) = dest.parent() {
|
|
||||||
fs::create_dir_all(&parent)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to create directory '{}'", parent.display()))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
fs::copy(&path, &dest).await.wrap_err_with(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to copy image from '{}' to '{}'",
|
|
||||||
path.display(),
|
|
||||||
dest.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
tracing::info!("Compiled bundles written to '{}'", out_path.display());
|
|
||||||
|
|
||||||
if let Some(game_dir) = game_dir.as_ref() {
|
|
||||||
tracing::info!("Deployed bundles to '{}'", game_dir.as_ref().display());
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
pub(crate) async fn run(_ctx: Arc<RwLock<sdk::Context>>, _matches: &ArgMatches) -> Result<()> {
|
||||||
let cfg = read_project_config(matches.get_one::<PathBuf>("directory").cloned()).await?;
|
unimplemented!()
|
||||||
|
|
||||||
let game_dir = matches
|
|
||||||
.get_one::<PathBuf>("deploy")
|
|
||||||
.map(|p| p.join("bundle"));
|
|
||||||
|
|
||||||
let out_path = matches
|
|
||||||
.get_one::<PathBuf>("out")
|
|
||||||
.expect("parameter should have default value");
|
|
||||||
|
|
||||||
tracing::debug!(?cfg, ?game_dir, ?out_path);
|
|
||||||
|
|
||||||
let game_dir = Arc::new(game_dir);
|
|
||||||
|
|
||||||
build(&cfg, out_path, game_dir).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,174 +0,0 @@
|
||||||
use std::{io::Cursor, path::PathBuf};
|
|
||||||
|
|
||||||
use clap::{value_parser, Arg, ArgMatches, Command};
|
|
||||||
use color_eyre::{eyre::Context as _, Result};
|
|
||||||
use sdk::murmur::{HashGroup, IdString64, Murmur64};
|
|
||||||
use sdk::{BundleDatabase, FromBinary as _};
|
|
||||||
use tokio::fs;
|
|
||||||
|
|
||||||
pub(crate) fn command_definition() -> Command {
|
|
||||||
Command::new("db")
|
|
||||||
.about("Various operations regarding `bundle_database.data`.")
|
|
||||||
.subcommand_required(true)
|
|
||||||
.subcommand(
|
|
||||||
Command::new("list-files")
|
|
||||||
.about("List bundle contents")
|
|
||||||
.arg(
|
|
||||||
Arg::new("database")
|
|
||||||
.required(true)
|
|
||||||
.help("Path to the bundle database")
|
|
||||||
.value_parser(value_parser!(PathBuf)),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::new("bundle")
|
|
||||||
.help("The bundle name. If omitted, all bundles will be listed.")
|
|
||||||
.required(false),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.subcommand(
|
|
||||||
Command::new("list-bundles").about("List bundles").arg(
|
|
||||||
Arg::new("database")
|
|
||||||
.required(true)
|
|
||||||
.help("Path to the bundle database")
|
|
||||||
.value_parser(value_parser!(PathBuf)),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.subcommand(
|
|
||||||
Command::new("find-file")
|
|
||||||
.about("Find the bundle a file belongs to")
|
|
||||||
.arg(
|
|
||||||
Arg::new("database")
|
|
||||||
.required(true)
|
|
||||||
.help("Path to the bundle database")
|
|
||||||
.value_parser(value_parser!(PathBuf)),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::new("file-name")
|
|
||||||
.required(true)
|
|
||||||
.help("Name of the file. May be a hash in hex representation or a string"),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
|
||||||
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
|
||||||
let Some((op, sub_matches)) = matches.subcommand() else {
|
|
||||||
unreachable!("clap is configured to require a subcommand");
|
|
||||||
};
|
|
||||||
|
|
||||||
let database = {
|
|
||||||
let path = sub_matches
|
|
||||||
.get_one::<PathBuf>("database")
|
|
||||||
.expect("argument is required");
|
|
||||||
|
|
||||||
let binary = fs::read(&path)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?;
|
|
||||||
|
|
||||||
let mut r = Cursor::new(binary);
|
|
||||||
|
|
||||||
BundleDatabase::from_binary(&mut r).wrap_err("Failed to parse bundle database")?
|
|
||||||
};
|
|
||||||
|
|
||||||
match op {
|
|
||||||
"list-files" => {
|
|
||||||
let index = database.files();
|
|
||||||
|
|
||||||
if let Some(bundle) = sub_matches.get_one::<String>("bundle") {
|
|
||||||
let hash = u64::from_str_radix(bundle, 16)
|
|
||||||
.map(Murmur64::from)
|
|
||||||
.wrap_err("Invalid hex sequence")?;
|
|
||||||
|
|
||||||
if let Some(files) = index.get(&hash) {
|
|
||||||
for file in files {
|
|
||||||
let name = ctx.lookup_hash(file.name, HashGroup::Filename);
|
|
||||||
let extension = file.extension.ext_name();
|
|
||||||
println!("{}.{}", name.display(), extension);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
tracing::info!("Bundle {} not found in the database", bundle);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
for (bundle_hash, files) in index.iter() {
|
|
||||||
let bundle_name = ctx.lookup_hash(*bundle_hash, HashGroup::Filename);
|
|
||||||
|
|
||||||
match bundle_name {
|
|
||||||
IdString64::String(name) => {
|
|
||||||
println!("{:016x} {}", bundle_hash, name);
|
|
||||||
}
|
|
||||||
IdString64::Hash(hash) => {
|
|
||||||
println!("{:016x}", hash);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for file in files {
|
|
||||||
let name = ctx.lookup_hash(file.name, HashGroup::Filename);
|
|
||||||
let extension = file.extension.ext_name();
|
|
||||||
|
|
||||||
match name {
|
|
||||||
IdString64::String(name) => {
|
|
||||||
println!("\t{:016x}.{:<12} {}", file.name, extension, name);
|
|
||||||
}
|
|
||||||
IdString64::Hash(hash) => {
|
|
||||||
println!("\t{:016x}.{}", hash, extension);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
println!();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
"list-bundles" => {
|
|
||||||
for bundle_hash in database.bundles().keys() {
|
|
||||||
let bundle_name = ctx.lookup_hash(*bundle_hash, HashGroup::Filename);
|
|
||||||
|
|
||||||
match bundle_name {
|
|
||||||
IdString64::String(name) => {
|
|
||||||
println!("{:016x} {}", bundle_hash, name);
|
|
||||||
}
|
|
||||||
IdString64::Hash(hash) => {
|
|
||||||
println!("{:016x}", hash);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
"find-file" => {
|
|
||||||
let name = sub_matches
|
|
||||||
.get_one::<String>("file-name")
|
|
||||||
.expect("required argument");
|
|
||||||
let name = match u64::from_str_radix(name, 16).map(Murmur64::from) {
|
|
||||||
Ok(hash) => hash,
|
|
||||||
Err(_) => Murmur64::hash(name),
|
|
||||||
};
|
|
||||||
|
|
||||||
let bundles = database.files().iter().filter_map(|(bundle_hash, files)| {
|
|
||||||
if files.iter().any(|file| file.name == name) {
|
|
||||||
Some(bundle_hash)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
let mut found = false;
|
|
||||||
|
|
||||||
for bundle in bundles {
|
|
||||||
found = true;
|
|
||||||
println!("{:016x}", bundle);
|
|
||||||
}
|
|
||||||
|
|
||||||
if !found {
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
_ => unreachable!(
|
|
||||||
"clap is configured to require a subcommand, and they're all handled above"
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,10 +1,13 @@
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use clap::{value_parser, Arg, ArgMatches, Command};
|
use clap::{value_parser, Arg, ArgMatches, Command};
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
|
|
||||||
use sdk::decompress;
|
use sdk::decompress;
|
||||||
use tokio::fs;
|
use tokio::fs::{self, File};
|
||||||
|
use tokio::io::BufReader;
|
||||||
|
use tokio::sync::RwLock;
|
||||||
|
|
||||||
pub(crate) fn command_definition() -> Command {
|
pub(crate) fn command_definition() -> Command {
|
||||||
Command::new("decompress")
|
Command::new("decompress")
|
||||||
|
@ -34,20 +37,24 @@ pub(crate) fn command_definition() -> Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip(ctx))]
|
#[tracing::instrument(skip(ctx))]
|
||||||
async fn decompress_bundle<P1, P2>(ctx: &sdk::Context, bundle: P1, destination: P2) -> Result<()>
|
async fn decompress_bundle<P1, P2>(
|
||||||
|
ctx: Arc<RwLock<sdk::Context>>,
|
||||||
|
bundle: P1,
|
||||||
|
destination: P2,
|
||||||
|
) -> Result<()>
|
||||||
where
|
where
|
||||||
P1: AsRef<Path> + std::fmt::Debug,
|
P1: AsRef<Path> + std::fmt::Debug,
|
||||||
P2: AsRef<Path> + std::fmt::Debug,
|
P2: AsRef<Path> + std::fmt::Debug,
|
||||||
{
|
{
|
||||||
let binary = fs::read(bundle).await?;
|
let in_file = File::open(bundle).await?;
|
||||||
let data = decompress(ctx, binary)?;
|
let out_file = File::create(destination).await?;
|
||||||
fs::write(destination, &data).await?;
|
|
||||||
|
|
||||||
Ok(())
|
// A `BufWriter` does not help here, as we're mostly just out chunks.
|
||||||
|
decompress(ctx, BufReader::new(in_file), out_file).await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
pub(crate) async fn run(ctx: Arc<RwLock<sdk::Context>>, matches: &ArgMatches) -> Result<()> {
|
||||||
let bundle = matches
|
let bundle = matches
|
||||||
.get_one::<PathBuf>("bundle")
|
.get_one::<PathBuf>("bundle")
|
||||||
.expect("required argument 'bundle' is missing");
|
.expect("required argument 'bundle' is missing");
|
||||||
|
@ -63,8 +70,8 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
||||||
let name = bundle.file_name();
|
let name = bundle.file_name();
|
||||||
|
|
||||||
if is_dir && name.is_some() {
|
if is_dir && name.is_some() {
|
||||||
decompress_bundle(&ctx, bundle, out_path.join(name.unwrap())).await
|
decompress_bundle(ctx, bundle, out_path.join(name.unwrap())).await
|
||||||
} else {
|
} else {
|
||||||
decompress_bundle(&ctx, bundle, out_path).await
|
decompress_bundle(ctx, bundle, out_path).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,20 +1,17 @@
|
||||||
use std::ffi::OsStr;
|
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
|
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
|
||||||
use color_eyre::eyre::{self, bail, Context, Result};
|
use color_eyre::eyre::{self, Context, Result};
|
||||||
use color_eyre::{Help, Report};
|
use color_eyre::{Help, Report, SectionExt};
|
||||||
use futures::future::try_join_all;
|
use futures::future::try_join_all;
|
||||||
use futures::StreamExt;
|
use futures::{StreamExt, TryFutureExt};
|
||||||
use glob::Pattern;
|
use glob::Pattern;
|
||||||
use sdk::{Bundle, BundleFile, CmdLine};
|
use sdk::{Bundle, BundleFile};
|
||||||
use tokio::fs;
|
use tokio::{fs, sync::RwLock};
|
||||||
|
|
||||||
use crate::cmd::util::resolve_bundle_paths;
|
use crate::cmd::util::resolve_bundle_paths;
|
||||||
use crate::shell_parse::ShellParser;
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn parse_glob_pattern(s: &str) -> Result<Pattern, String> {
|
fn parse_glob_pattern(s: &str) -> Result<Pattern, String> {
|
||||||
match Pattern::new(s) {
|
match Pattern::new(s) {
|
||||||
Ok(p) => Ok(p),
|
Ok(p) => Ok(p),
|
||||||
|
@ -22,7 +19,6 @@ fn parse_glob_pattern(s: &str) -> Result<Pattern, String> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn flatten_name(s: &str) -> String {
|
fn flatten_name(s: &str) -> String {
|
||||||
s.replace('/', "_")
|
s.replace('/', "_")
|
||||||
}
|
}
|
||||||
|
@ -37,7 +33,7 @@ pub(crate) fn command_definition() -> Command {
|
||||||
.value_parser(value_parser!(PathBuf))
|
.value_parser(value_parser!(PathBuf))
|
||||||
.help(
|
.help(
|
||||||
"Path to the bundle(s) to read. If this points to a directory instead \
|
"Path to the bundle(s) to read. If this points to a directory instead \
|
||||||
of a file, all files in that directory will be checked.",
|
of a file, all files in that directory will be checked.",
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
|
@ -93,102 +89,49 @@ pub(crate) fn command_definition() -> Command {
|
||||||
Arg::new("ljd")
|
Arg::new("ljd")
|
||||||
.long("ljd")
|
.long("ljd")
|
||||||
.help(
|
.help(
|
||||||
"A custom command line to execute ljd with. It is treated as follows:\n\
|
"Path to a custom ljd executable. If not set, \
|
||||||
* if the argument is a valid path to an existing file:\n\
|
`ljd` will be called from PATH.",
|
||||||
** if the file is called 'main.py', it is assumed that 'python.exe' \
|
|
||||||
exists in PATH to execute this with.\n\
|
|
||||||
** otherwise it is treated as an executable\n\
|
|
||||||
* if it's a single word, it's treated as an executable in PATH\n\
|
|
||||||
* otherwise it is treated as a command line template.\n\
|
|
||||||
In any case, the application being run must accept ljd's flags '-c' and '-f'.",
|
|
||||||
)
|
)
|
||||||
.default_value("ljd"),
|
.default_value("ljd"),
|
||||||
)
|
)
|
||||||
// .arg(
|
.arg(
|
||||||
// Arg::new("revorb")
|
Arg::new("revorb")
|
||||||
// .long("revorb")
|
.long("revorb")
|
||||||
// .help(
|
.help(
|
||||||
// "Path to a custom revorb executable. If not set, \
|
"Path to a custom revorb executable. If not set, \
|
||||||
// `revorb` will be called from PATH.",
|
`revorb` will be called from PATH.",
|
||||||
// )
|
)
|
||||||
// .default_value("revorb"),
|
.default_value("revorb"),
|
||||||
// )
|
)
|
||||||
// .arg(
|
.arg(
|
||||||
// Arg::new("ww2ogg")
|
Arg::new("ww2ogg")
|
||||||
// .long("ww2ogg")
|
.long("ww2ogg")
|
||||||
// .help(
|
.help(
|
||||||
// "Path to a custom ww2ogg executable. If not set, \
|
"Path to a custom ww2ogg executable. If not set, \
|
||||||
// `ww2ogg` will be called from PATH.\nSee the documentation for how \
|
`ww2ogg` will be called from PATH.\nSee the documentation for how \
|
||||||
// to set up the script for this.",
|
to set up the script for this.",
|
||||||
// )
|
)
|
||||||
// .default_value("ww2ogg"),
|
.default_value("ww2ogg"),
|
||||||
// )
|
)
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument]
|
|
||||||
async fn parse_command_line_template(tmpl: &String) -> Result<CmdLine> {
|
|
||||||
if tmpl.trim().is_empty() {
|
|
||||||
eyre::bail!("Command line template must not be empty");
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut cmd = if matches!(fs::try_exists(tmpl).await, Ok(true)) {
|
|
||||||
let path = PathBuf::from(tmpl);
|
|
||||||
if path.file_name() == Some(OsStr::new("main.py")) {
|
|
||||||
let mut cmd = CmdLine::new("python");
|
|
||||||
cmd.arg(path);
|
|
||||||
cmd
|
|
||||||
} else {
|
|
||||||
CmdLine::new(path)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let mut parsed = ShellParser::new(tmpl.as_bytes());
|
|
||||||
// Safety: The initial `tmpl` was a `&String` (i.e. valid UTF-8), and `shlex` does not
|
|
||||||
// insert or remove characters, nor does it split UTF-8 characters.
|
|
||||||
// So the resulting byte stream is still valid UTF-8.
|
|
||||||
let mut cmd = CmdLine::new(unsafe {
|
|
||||||
let bytes = parsed.next().expect("Template is not empty");
|
|
||||||
String::from_utf8_unchecked(bytes.to_vec())
|
|
||||||
});
|
|
||||||
|
|
||||||
while let Some(arg) = parsed.next() {
|
|
||||||
// Safety: See above.
|
|
||||||
cmd.arg(unsafe { String::from_utf8_unchecked(arg.to_vec()) });
|
|
||||||
}
|
|
||||||
|
|
||||||
if parsed.errored {
|
|
||||||
bail!("Invalid command line template");
|
|
||||||
}
|
|
||||||
|
|
||||||
cmd
|
|
||||||
};
|
|
||||||
|
|
||||||
// Add ljd flags
|
|
||||||
cmd.arg("-c");
|
|
||||||
|
|
||||||
tracing::debug!("Parsed command line template: {:?}", cmd);
|
|
||||||
|
|
||||||
Ok(cmd)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
pub(crate) async fn run(ctx: Arc<RwLock<sdk::Context>>, matches: &ArgMatches) -> Result<()> {
|
||||||
{
|
{
|
||||||
let ljd_bin = matches
|
let ljd_bin = matches
|
||||||
.get_one::<String>("ljd")
|
.get_one::<String>("ljd")
|
||||||
.expect("no default value for 'ljd' parameter");
|
.expect("no default value for 'ljd' parameter");
|
||||||
// let revorb_bin = matches
|
let revorb_bin = matches
|
||||||
// .get_one::<String>("revorb")
|
.get_one::<String>("revorb")
|
||||||
// .expect("no default value for 'revorb' parameter");
|
.expect("no default value for 'revorb' parameter");
|
||||||
// let ww2ogg_bin = matches
|
let ww2ogg_bin = matches
|
||||||
// .get_one::<String>("ww2ogg")
|
.get_one::<String>("ww2ogg")
|
||||||
// .expect("no default value for 'ww2ogg' parameter");
|
.expect("no default value for 'ww2ogg' parameter");
|
||||||
|
|
||||||
ctx.ljd = parse_command_line_template(ljd_bin)
|
let mut ctx = ctx.write().await;
|
||||||
.await
|
ctx.ljd = Some(ljd_bin.clone());
|
||||||
.map(Option::Some)
|
ctx.revorb = Some(revorb_bin.clone());
|
||||||
.wrap_err("Failed to parse command line template for flag 'ljd'")?;
|
ctx.ww2ogg = Some(ww2ogg_bin.clone());
|
||||||
// ctx.revorb = Some(revorb_bin.clone());
|
|
||||||
// ctx.ww2ogg = Some(ww2ogg_bin.clone());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let includes = match matches.get_many::<Pattern>("include") {
|
let includes = match matches.get_many::<Pattern>("include") {
|
||||||
|
@ -233,66 +176,58 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let includes = Arc::new(includes);
|
let mut paths = Box::pin(resolve_bundle_paths(bundles));
|
||||||
let excludes = Arc::new(excludes);
|
|
||||||
let ctx = Arc::new(ctx);
|
|
||||||
|
|
||||||
resolve_bundle_paths(bundles)
|
// TODO: Find a way to do this with `for_each_concurrent`. The first attempt
|
||||||
.for_each_concurrent(10, |p| async {
|
// just kept head-butting into a "use of moved value" wall.
|
||||||
let includes = includes.clone();
|
while let Some(path) = paths.next().await {
|
||||||
let excludes = excludes.clone();
|
let res = Bundle::open(ctx.clone(), &path)
|
||||||
let ctx = ctx.clone();
|
.and_then(|bundle| {
|
||||||
|
extract_bundle(
|
||||||
let options = ExtractOptions {
|
ctx.clone(),
|
||||||
includes,
|
bundle,
|
||||||
excludes,
|
&dest,
|
||||||
decompile: should_decompile,
|
ExtractOptions {
|
||||||
flatten: should_flatten,
|
includes: &includes,
|
||||||
dry_run: is_dry_run,
|
excludes: &excludes,
|
||||||
};
|
decompile: should_decompile,
|
||||||
|
flatten: should_flatten,
|
||||||
async move {
|
dry_run: is_dry_run,
|
||||||
match extract_bundle(ctx, &p, &dest, options).await {
|
},
|
||||||
Ok(_) => {}
|
)
|
||||||
Err(err) => tracing::error!("{err:?}"),
|
})
|
||||||
}
|
|
||||||
}
|
|
||||||
.await
|
.await
|
||||||
})
|
.wrap_err_with(|| format!("failed to extract from bundle '{}'", path.display()));
|
||||||
.await;
|
|
||||||
|
if let Err(err) = res {
|
||||||
|
tracing::error!("{:#}", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
struct ExtractOptions<'a> {
|
struct ExtractOptions<'a> {
|
||||||
decompile: bool,
|
decompile: bool,
|
||||||
flatten: bool,
|
flatten: bool,
|
||||||
dry_run: bool,
|
dry_run: bool,
|
||||||
includes: Arc<Vec<&'a Pattern>>,
|
includes: &'a dyn AsRef<[&'a Pattern]>,
|
||||||
excludes: Arc<Vec<&'a Pattern>>,
|
excludes: &'a dyn AsRef<[&'a Pattern]>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(
|
#[tracing::instrument(
|
||||||
skip(ctx, options),
|
skip(ctx, bundle, options),
|
||||||
fields(decompile = options.decompile, flatten = options.flatten, dry_run = options.dry_run)
|
fields(decompile = options.decompile, flatten = options.flatten, dry_run = options.dry_run)
|
||||||
)]
|
)]
|
||||||
async fn extract_bundle<P1, P2>(
|
async fn extract_bundle<P>(
|
||||||
ctx: Arc<sdk::Context>,
|
ctx: Arc<RwLock<sdk::Context>>,
|
||||||
path: P1,
|
bundle: Bundle,
|
||||||
dest: P2,
|
dest: P,
|
||||||
options: ExtractOptions<'_>,
|
options: ExtractOptions<'_>,
|
||||||
) -> Result<()>
|
) -> Result<()>
|
||||||
where
|
where
|
||||||
P1: AsRef<Path> + std::fmt::Debug,
|
P: AsRef<Path> + std::fmt::Debug,
|
||||||
P2: AsRef<Path> + std::fmt::Debug,
|
|
||||||
{
|
{
|
||||||
let bundle = {
|
|
||||||
let data = fs::read(path.as_ref()).await?;
|
|
||||||
let name = Bundle::get_name_from_path(&ctx, path.as_ref());
|
|
||||||
Bundle::from_binary(&ctx, name, data)?
|
|
||||||
};
|
|
||||||
|
|
||||||
let includes = options.includes.as_ref();
|
let includes = options.includes.as_ref();
|
||||||
let excludes = options.excludes.as_ref();
|
let excludes = options.excludes.as_ref();
|
||||||
let dest = dest.as_ref();
|
let dest = dest.as_ref();
|
||||||
|
@ -342,7 +277,7 @@ where
|
||||||
for file in files {
|
for file in files {
|
||||||
let name = file.name(options.decompile, None);
|
let name = file.name(options.decompile, None);
|
||||||
let data = if options.decompile {
|
let data = if options.decompile {
|
||||||
file.decompiled(&ctx).await
|
file.decompiled(ctx.clone()).await
|
||||||
} else {
|
} else {
|
||||||
file.raw()
|
file.raw()
|
||||||
};
|
};
|
||||||
|
@ -369,25 +304,14 @@ where
|
||||||
path.push(name);
|
path.push(name);
|
||||||
|
|
||||||
if options.dry_run {
|
if options.dry_run {
|
||||||
tracing::info!("Dry Run: Writing file '{}'", path.display());
|
tracing::info!(path = %path.display(), "Writing file");
|
||||||
} else {
|
} else {
|
||||||
tracing::info!("Writing file '{}'", path.display());
|
tracing::debug!(path = %path.display(), "Writing file");
|
||||||
tasks.push(tokio::spawn(async move {
|
tasks.push(tokio::spawn(async move {
|
||||||
if let Some(parent) = path.parent() {
|
fs::write(&path, file.data())
|
||||||
fs::create_dir_all(&parent).await.wrap_err_with(|| {
|
.await
|
||||||
format!(
|
.wrap_err("failed to write extracted file to disc")
|
||||||
"failed to create parent directories '{}'",
|
.with_section(|| path.display().to_string().header("Path"))
|
||||||
parent.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
fs::write(&path, file.data()).await.wrap_err_with(|| {
|
|
||||||
format!(
|
|
||||||
"failed to write extracted file to disc: '{}'",
|
|
||||||
path.display()
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -411,9 +335,9 @@ where
|
||||||
path.push(name);
|
path.push(name);
|
||||||
|
|
||||||
if options.dry_run {
|
if options.dry_run {
|
||||||
tracing::info!("Dry Run: Writing file '{}'", path.display());
|
tracing::info!(path = %path.display(), "Writing file");
|
||||||
} else {
|
} else {
|
||||||
tracing::info!("Writing file '{}'", path.display());
|
tracing::debug!(path = %path.display(), "Writing file");
|
||||||
tasks.push(tokio::spawn(async move {
|
tasks.push(tokio::spawn(async move {
|
||||||
let parent = match path.parent() {
|
let parent = match path.parent() {
|
||||||
Some(parent) => parent,
|
Some(parent) => parent,
|
||||||
|
@ -425,19 +349,17 @@ where
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
fs::create_dir_all(parent).await.wrap_err_with(|| {
|
fs::create_dir_all(parent)
|
||||||
format!(
|
.await
|
||||||
"failed to create parent directory: '{}'",
|
.wrap_err("failed to create parent directory")
|
||||||
parent.display()
|
.with_section(|| {
|
||||||
)
|
parent.display().to_string().header("Path")
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
fs::write(&path, file.data()).await.wrap_err_with(|| {
|
fs::write(&path, file.data())
|
||||||
format!(
|
.await
|
||||||
"failed to write extracted file to disc: '{}'",
|
.wrap_err("failed to write extracted file to disc")
|
||||||
path.display()
|
.with_section(|| path.display().to_string().header("Path"))
|
||||||
)
|
|
||||||
})
|
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -445,7 +367,10 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
let err = err.wrap_err(format!("Failed to decompile file {}", name));
|
let err = err
|
||||||
|
.wrap_err("Failed to decompile")
|
||||||
|
.with_section(|| name.header("File"));
|
||||||
|
|
||||||
tracing::error!("{:?}", err);
|
tracing::error!("{:?}", err);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,11 +1,12 @@
|
||||||
use std::path::PathBuf;
|
use std::{path::PathBuf, sync::Arc};
|
||||||
|
|
||||||
use clap::{value_parser, Arg, ArgMatches, Command};
|
use clap::{value_parser, Arg, ArgMatches, Command};
|
||||||
use color_eyre::eyre::{self, Context, Result};
|
use color_eyre::{
|
||||||
use color_eyre::Help;
|
eyre::{self, Context, Result},
|
||||||
|
Help,
|
||||||
|
};
|
||||||
use sdk::Bundle;
|
use sdk::Bundle;
|
||||||
use tokio::fs::{self, File};
|
use tokio::{fs::File, io::AsyncReadExt, sync::RwLock};
|
||||||
use tokio::io::AsyncReadExt;
|
|
||||||
|
|
||||||
pub(crate) fn command_definition() -> Command {
|
pub(crate) fn command_definition() -> Command {
|
||||||
Command::new("inject")
|
Command::new("inject")
|
||||||
|
@ -41,7 +42,7 @@ pub(crate) fn command_definition() -> Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
pub(crate) async fn run(ctx: Arc<RwLock<sdk::Context>>, matches: &ArgMatches) -> Result<()> {
|
||||||
let bundle_path = matches
|
let bundle_path = matches
|
||||||
.get_one::<PathBuf>("bundle")
|
.get_one::<PathBuf>("bundle")
|
||||||
.expect("required parameter not found");
|
.expect("required parameter not found");
|
||||||
|
@ -52,30 +53,29 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
||||||
|
|
||||||
tracing::trace!(bundle_path = %bundle_path.display(), file_path = %file_path.display());
|
tracing::trace!(bundle_path = %bundle_path.display(), file_path = %file_path.display());
|
||||||
|
|
||||||
let mut bundle = {
|
let mut bundle = Bundle::open(ctx.clone(), bundle_path)
|
||||||
let binary = fs::read(bundle_path).await?;
|
.await
|
||||||
let name = Bundle::get_name_from_path(&ctx, bundle_path);
|
.wrap_err("Failed to open bundle file")?;
|
||||||
Bundle::from_binary(&ctx, name, binary).wrap_err("Failed to open bundle file")?
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(name) = matches.get_one::<String>("replace") {
|
if let Some(_name) = matches.get_one::<String>("replace") {
|
||||||
let mut file = File::open(&file_path)
|
let mut file = File::open(&file_path)
|
||||||
.await
|
.await
|
||||||
.wrap_err_with(|| format!("Failed to open '{}'", file_path.display()))?;
|
.wrap_err_with(|| format!("failed to open '{}'", file_path.display()))?;
|
||||||
|
|
||||||
if let Some(variant) = bundle
|
if let Some(variant) = bundle
|
||||||
.files_mut()
|
.files_mut()
|
||||||
.filter(|file| file.matches_name(name.clone()))
|
.filter(|file| file.matches_name(_name))
|
||||||
// TODO: Handle file variants
|
// TODO: Handle file variants
|
||||||
.find_map(|file| file.variants_mut().next())
|
.filter_map(|file| file.variants_mut().next())
|
||||||
|
.next()
|
||||||
{
|
{
|
||||||
let mut data = Vec::new();
|
let mut data = Vec::new();
|
||||||
file.read_to_end(&mut data)
|
file.read_to_end(&mut data)
|
||||||
.await
|
.await
|
||||||
.wrap_err("Failed to read input file")?;
|
.wrap_err("failed to read input file")?;
|
||||||
variant.set_data(data);
|
variant.set_data(data);
|
||||||
} else {
|
} else {
|
||||||
let err = eyre::eyre!("No file '{}' in this bundle.", name)
|
let err = eyre::eyre!("No file '{}' in this bundle.", _name)
|
||||||
.with_suggestion(|| {
|
.with_suggestion(|| {
|
||||||
format!(
|
format!(
|
||||||
"Run '{} bundle list {}' to list the files in this bundle.",
|
"Run '{} bundle list {}' to list the files in this bundle.",
|
||||||
|
@ -87,7 +87,7 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
||||||
format!(
|
format!(
|
||||||
"Use '{} bundle inject --add {} {} {}' to add it as a new file",
|
"Use '{} bundle inject --add {} {} {}' to add it as a new file",
|
||||||
clap::crate_name!(),
|
clap::crate_name!(),
|
||||||
name,
|
_name,
|
||||||
bundle_path.display(),
|
bundle_path.display(),
|
||||||
file_path.display()
|
file_path.display()
|
||||||
)
|
)
|
||||||
|
@ -97,13 +97,13 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
let out_path = matches.get_one::<PathBuf>("output").unwrap_or(bundle_path);
|
let out_path = matches.get_one::<PathBuf>("output").unwrap_or(bundle_path);
|
||||||
let data = bundle
|
let mut out_file = File::create(out_path)
|
||||||
.to_binary()
|
|
||||||
.wrap_err("Failed to write changed bundle to output")?;
|
|
||||||
|
|
||||||
fs::write(out_path, &data)
|
|
||||||
.await
|
.await
|
||||||
.wrap_err("Failed to write data to output file")?;
|
.wrap_err_with(|| format!("failed to open output file {}", out_path.display()))?;
|
||||||
|
bundle
|
||||||
|
.write(ctx.clone(), &mut out_file)
|
||||||
|
.await
|
||||||
|
.wrap_err("failed to write changed bundle to output")?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
|
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
|
||||||
use color_eyre::eyre::{self, Context, Result};
|
use color_eyre::eyre::{self, Result};
|
||||||
use color_eyre::{Help, SectionExt};
|
use color_eyre::{Help, SectionExt};
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use sdk::Bundle;
|
use sdk::Bundle;
|
||||||
use tokio::fs;
|
use tokio::sync::RwLock;
|
||||||
|
use tracing::Instrument;
|
||||||
|
|
||||||
use crate::cmd::util::resolve_bundle_paths;
|
use crate::cmd::util::resolve_bundle_paths;
|
||||||
|
|
||||||
|
@ -31,69 +32,39 @@ pub(crate) fn command_definition() -> Command {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug)]
|
#[derive(Copy, Clone)]
|
||||||
enum OutputFormat {
|
enum OutputFormat {
|
||||||
Text,
|
Text,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn format_byte_size(size: usize) -> String {
|
fn print_bundle_list(bundle: Bundle, fmt: OutputFormat) {
|
||||||
if size < 1024 {
|
|
||||||
format!("{} Bytes", size)
|
|
||||||
} else if size < 1024 * 1024 {
|
|
||||||
format!("{} kB", size / 1024)
|
|
||||||
} else if size < 1024 * 1024 * 1024 {
|
|
||||||
format!("{} MB", size / (1024 * 1024))
|
|
||||||
} else {
|
|
||||||
format!("{} GB", size / (1024 * 1024 * 1024))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(ctx))]
|
|
||||||
async fn print_bundle_contents<P>(ctx: &sdk::Context, path: P, fmt: OutputFormat) -> Result<()>
|
|
||||||
where
|
|
||||||
P: AsRef<Path> + std::fmt::Debug,
|
|
||||||
{
|
|
||||||
let p = path.as_ref();
|
|
||||||
let bundle = {
|
|
||||||
let binary = fs::read(p).await?;
|
|
||||||
let name = Bundle::get_name_from_path(ctx, p);
|
|
||||||
Bundle::from_binary(ctx, name, binary)?
|
|
||||||
};
|
|
||||||
|
|
||||||
match fmt {
|
match fmt {
|
||||||
OutputFormat::Text => {
|
OutputFormat::Text => {
|
||||||
println!(
|
println!("Bundle: {}", bundle.name());
|
||||||
"Bundle: {} ({:016x})",
|
|
||||||
bundle.name().display(),
|
|
||||||
bundle.name()
|
|
||||||
);
|
|
||||||
|
|
||||||
for f in bundle.files().iter() {
|
for f in bundle.files().iter() {
|
||||||
if f.variants().len() != 1 {
|
if f.variants().len() != 1 {
|
||||||
let err = eyre::eyre!("Expected exactly one version for this file.")
|
let err = eyre::eyre!("Expected exactly one version for this file.")
|
||||||
.with_section(|| f.variants().len().to_string().header("Bundle:"))
|
.with_section(|| f.variants().len().to_string().header("Bundle:"))
|
||||||
.with_section(|| bundle.name().display().header("Bundle:"));
|
.with_section(|| bundle.name().clone().header("Bundle:"));
|
||||||
|
|
||||||
tracing::error!("{:#}", err);
|
tracing::error!("{:#}", err);
|
||||||
}
|
}
|
||||||
|
|
||||||
let v = &f.variants()[0];
|
let v = &f.variants()[0];
|
||||||
println!(
|
println!(
|
||||||
"\t{}.{}: {} ({})",
|
"\t{}.{}: {} bytes",
|
||||||
f.base_name().display(),
|
f.base_name(),
|
||||||
f.file_type().ext_name(),
|
f.file_type().ext_name(),
|
||||||
format_byte_size(v.size()),
|
|
||||||
v.size()
|
v.size()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
pub(crate) async fn run(ctx: Arc<RwLock<sdk::Context>>, matches: &ArgMatches) -> Result<()> {
|
||||||
let bundles = matches
|
let bundles = matches
|
||||||
.get_many::<PathBuf>("bundle")
|
.get_many::<PathBuf>("bundle")
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
|
@ -107,20 +78,22 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
||||||
OutputFormat::Text
|
OutputFormat::Text
|
||||||
};
|
};
|
||||||
|
|
||||||
let ctx = Arc::new(ctx);
|
|
||||||
|
|
||||||
paths
|
paths
|
||||||
.for_each_concurrent(10, |p| async {
|
.for_each_concurrent(10, |p| async {
|
||||||
|
let span = tracing::info_span!("list bundle");
|
||||||
let ctx = ctx.clone();
|
let ctx = ctx.clone();
|
||||||
async move {
|
async move {
|
||||||
if let Err(err) = print_bundle_contents(&ctx, &p, fmt)
|
let span = tracing::info_span!("open bundle");
|
||||||
|
if let Err(err) = Bundle::open(ctx, &p)
|
||||||
|
.instrument(span)
|
||||||
.await
|
.await
|
||||||
.wrap_err_with(|| format!("Failed to list contents of bundle {}", p.display()))
|
.map(|bundle| print_bundle_list(bundle, fmt))
|
||||||
{
|
{
|
||||||
tracing::error!("{err:?}");
|
tracing::error!("Failed to open bundle '{}': {:?}", p.display(), err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
.await;
|
.instrument(span)
|
||||||
|
.await
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
|
|
|
@ -1,31 +1,56 @@
|
||||||
use clap::{ArgMatches, Command};
|
use std::sync::Arc;
|
||||||
use color_eyre::eyre::Result;
|
|
||||||
|
use clap::{Arg, ArgMatches, Command};
|
||||||
|
use color_eyre::eyre::Result;
|
||||||
|
use tokio::sync::RwLock;
|
||||||
|
|
||||||
|
use sdk::Oodle;
|
||||||
|
|
||||||
mod db;
|
|
||||||
mod decompress;
|
mod decompress;
|
||||||
mod extract;
|
mod extract;
|
||||||
mod inject;
|
mod inject;
|
||||||
mod list;
|
mod list;
|
||||||
|
|
||||||
|
#[cfg(target_os = "windows")]
|
||||||
|
const OODLE_LIB_NAME: &str = "oo2core_8_win64";
|
||||||
|
|
||||||
|
#[cfg(target_os = "linux")]
|
||||||
|
const OODLE_LIB_NAME: &str = "liboo2corelinux64.so";
|
||||||
|
|
||||||
pub(crate) fn command_definition() -> Command {
|
pub(crate) fn command_definition() -> Command {
|
||||||
Command::new("bundle")
|
Command::new("bundle")
|
||||||
.subcommand_required(true)
|
.subcommand_required(true)
|
||||||
.about("Manipulate the game's bundle files")
|
.about("Manipulate the game's bundle files")
|
||||||
|
.arg(
|
||||||
|
Arg::new("oodle")
|
||||||
|
.long("oodle")
|
||||||
|
.default_value(OODLE_LIB_NAME)
|
||||||
|
.help(
|
||||||
|
"The oodle library to load. This may either be:\n\
|
||||||
|
- A library name that will be searched for in the system's default paths.\n\
|
||||||
|
- A file path relative to the current working directory.\n\
|
||||||
|
- An absolute file path.",
|
||||||
|
),
|
||||||
|
)
|
||||||
.subcommand(decompress::command_definition())
|
.subcommand(decompress::command_definition())
|
||||||
.subcommand(extract::command_definition())
|
.subcommand(extract::command_definition())
|
||||||
.subcommand(inject::command_definition())
|
.subcommand(inject::command_definition())
|
||||||
.subcommand(list::command_definition())
|
.subcommand(list::command_definition())
|
||||||
.subcommand(db::command_definition())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
pub(crate) async fn run(ctx: Arc<RwLock<sdk::Context>>, matches: &ArgMatches) -> Result<()> {
|
||||||
|
if let Some(name) = matches.get_one::<String>("oodle") {
|
||||||
|
let oodle = Oodle::new(name)?;
|
||||||
|
let mut ctx = ctx.write().await;
|
||||||
|
ctx.oodle = Some(oodle);
|
||||||
|
}
|
||||||
|
|
||||||
match matches.subcommand() {
|
match matches.subcommand() {
|
||||||
Some(("decompress", sub_matches)) => decompress::run(ctx, sub_matches).await,
|
Some(("decompress", sub_matches)) => decompress::run(ctx, sub_matches).await,
|
||||||
Some(("extract", sub_matches)) => extract::run(ctx, sub_matches).await,
|
Some(("extract", sub_matches)) => extract::run(ctx, sub_matches).await,
|
||||||
Some(("inject", sub_matches)) => inject::run(ctx, sub_matches).await,
|
Some(("inject", sub_matches)) => inject::run(ctx, sub_matches).await,
|
||||||
Some(("list", sub_matches)) => list::run(ctx, sub_matches).await,
|
Some(("list", sub_matches)) => list::run(ctx, sub_matches).await,
|
||||||
Some(("db", sub_matches)) => db::run(ctx, sub_matches).await,
|
|
||||||
_ => unreachable!(
|
_ => unreachable!(
|
||||||
"clap is configured to require a subcommand, and they're all handled above"
|
"clap is configured to require a subcommand, and they're all handled above"
|
||||||
),
|
),
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command, ValueEnum};
|
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command, ValueEnum};
|
||||||
use cli_table::{print_stdout, WithTitle};
|
|
||||||
use color_eyre::eyre::{Context, Result};
|
use color_eyre::eyre::{Context, Result};
|
||||||
use color_eyre::{Help, SectionExt};
|
use color_eyre::{Help, SectionExt};
|
||||||
use sdk::murmur::{IdString64, Murmur32, Murmur64};
|
|
||||||
use tokio::fs::File;
|
use tokio::fs::File;
|
||||||
use tokio::io::{AsyncBufReadExt, BufReader};
|
use tokio::io::{AsyncBufReadExt, BufReader};
|
||||||
|
use tokio::sync::RwLock;
|
||||||
use tokio_stream::wrappers::LinesStream;
|
use tokio_stream::wrappers::LinesStream;
|
||||||
use tokio_stream::StreamExt;
|
use tokio_stream::StreamExt;
|
||||||
|
|
||||||
|
@ -29,40 +29,6 @@ impl From<HashGroup> for sdk::murmur::HashGroup {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for HashGroup {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
HashGroup::Filename => write!(f, "filename"),
|
|
||||||
HashGroup::Filetype => write!(f, "filetype"),
|
|
||||||
HashGroup::Strings => write!(f, "strings"),
|
|
||||||
HashGroup::Other => write!(f, "other"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(cli_table::Table)]
|
|
||||||
struct TableRow {
|
|
||||||
#[table(title = "Value")]
|
|
||||||
value: String,
|
|
||||||
#[table(title = "Murmur64")]
|
|
||||||
long: Murmur64,
|
|
||||||
#[table(title = "Murmur32")]
|
|
||||||
short: Murmur32,
|
|
||||||
#[table(title = "Group")]
|
|
||||||
group: sdk::murmur::HashGroup,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&sdk::murmur::Entry> for TableRow {
|
|
||||||
fn from(entry: &sdk::murmur::Entry) -> Self {
|
|
||||||
Self {
|
|
||||||
value: entry.value().clone(),
|
|
||||||
long: entry.long(),
|
|
||||||
short: entry.short(),
|
|
||||||
group: entry.group(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn command_definition() -> Command {
|
pub(crate) fn command_definition() -> Command {
|
||||||
Command::new("dictionary")
|
Command::new("dictionary")
|
||||||
.about("Manipulate a hash dictionary file.")
|
.about("Manipulate a hash dictionary file.")
|
||||||
|
@ -79,8 +45,7 @@ pub(crate) fn command_definition() -> Command {
|
||||||
.short('g')
|
.short('g')
|
||||||
.long("group")
|
.long("group")
|
||||||
.action(ArgAction::Append)
|
.action(ArgAction::Append)
|
||||||
.value_parser(value_parser!(HashGroup))
|
.value_parser(value_parser!(HashGroup)),
|
||||||
.default_values(["other", "filename", "filetype", "strings"]),
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.subcommand(
|
.subcommand(
|
||||||
|
@ -104,7 +69,6 @@ pub(crate) fn command_definition() -> Command {
|
||||||
.value_parser(value_parser!(PathBuf)),
|
.value_parser(value_parser!(PathBuf)),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.subcommand(Command::new("show").about("Show the contents of the dictionary"))
|
|
||||||
.subcommand(Command::new("save").about(
|
.subcommand(Command::new("save").about(
|
||||||
"Save back the currently loaded dictionary, with hashes pre-computed. \
|
"Save back the currently loaded dictionary, with hashes pre-computed. \
|
||||||
Pre-computing hashes speeds up loading large dictionaries, as they would \
|
Pre-computing hashes speeds up loading large dictionaries, as they would \
|
||||||
|
@ -113,26 +77,21 @@ pub(crate) fn command_definition() -> Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
pub(crate) async fn run(ctx: Arc<RwLock<sdk::Context>>, matches: &ArgMatches) -> Result<()> {
|
||||||
match matches.subcommand() {
|
match matches.subcommand() {
|
||||||
Some(("lookup", sub_matches)) => {
|
Some(("lookup", sub_matches)) => {
|
||||||
let hash = {
|
let hash = sub_matches
|
||||||
let s = sub_matches
|
.get_one::<u64>("hash")
|
||||||
.get_one::<String>("hash")
|
.expect("required argument not found");
|
||||||
.expect("required argument not found");
|
|
||||||
|
|
||||||
u64::from_str_radix(s, 16)
|
|
||||||
.wrap_err("Failed to parse argument as hexadecimal string")?
|
|
||||||
};
|
|
||||||
|
|
||||||
let groups = sub_matches
|
let groups = sub_matches
|
||||||
.get_many::<HashGroup>("group")
|
.get_many::<HashGroup>("group")
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
let ctx = ctx.read().await;
|
||||||
for group in groups {
|
for group in groups {
|
||||||
if let IdString64::String(value) = ctx.lookup_hash(hash, (*group).into()) {
|
let value = ctx.lookup_hash(*hash, (*group).into());
|
||||||
println!("{group}: {value}");
|
println!("{value}");
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -145,10 +104,7 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
|
||||||
.get_one::<HashGroup>("group")
|
.get_one::<HashGroup>("group")
|
||||||
.expect("required argument not found");
|
.expect("required argument not found");
|
||||||
|
|
||||||
let r: BufReader<Box<dyn tokio::io::AsyncRead + std::marker::Unpin>> = if let Some(name) =
|
let r: BufReader<Box<dyn tokio::io::AsyncRead + std::marker::Unpin>> = if let Some(name) = path.file_name() && name == "-" {
|
||||||
path.file_name()
|
|
||||||
&& name == "-"
|
|
||||||
{
|
|
||||||
let f = tokio::io::stdin();
|
let f = tokio::io::stdin();
|
||||||
BufReader::new(Box::new(f))
|
BufReader::new(Box::new(f))
|
||||||
} else {
|
} else {
|
||||||
|
@ -156,25 +112,13 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
|
||||||
BufReader::new(Box::new(f))
|
BufReader::new(Box::new(f))
|
||||||
};
|
};
|
||||||
|
|
||||||
let group = sdk::murmur::HashGroup::from(*group);
|
|
||||||
|
|
||||||
let mut added = 0;
|
|
||||||
let mut skipped = 0;
|
|
||||||
|
|
||||||
let lines: Vec<_> = LinesStream::new(r.lines()).collect().await;
|
let lines: Vec<_> = LinesStream::new(r.lines()).collect().await;
|
||||||
let total = {
|
{
|
||||||
|
let mut ctx = ctx.write().await;
|
||||||
for line in lines.into_iter() {
|
for line in lines.into_iter() {
|
||||||
let value = line?;
|
ctx.lookup.add(line?, (*group).into());
|
||||||
if ctx.lookup.find(&value, group).is_some() {
|
|
||||||
skipped += 1;
|
|
||||||
} else {
|
|
||||||
ctx.lookup.add(value, group);
|
|
||||||
added += 1;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
ctx.lookup.len()
|
|
||||||
};
|
|
||||||
|
|
||||||
let out_path = matches
|
let out_path = matches
|
||||||
.get_one::<PathBuf>("dictionary")
|
.get_one::<PathBuf>("dictionary")
|
||||||
|
@ -190,18 +134,12 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
|
||||||
})
|
})
|
||||||
.with_section(|| out_path.display().to_string().header("Path:"))?;
|
.with_section(|| out_path.display().to_string().header("Path:"))?;
|
||||||
|
|
||||||
ctx.lookup
|
ctx.read()
|
||||||
|
.await
|
||||||
|
.lookup
|
||||||
.to_csv(f)
|
.to_csv(f)
|
||||||
.await
|
.await
|
||||||
.wrap_err("Failed to write dictionary to disk")?;
|
.wrap_err("Failed to write dictionary to disk")
|
||||||
|
|
||||||
tracing::info!(
|
|
||||||
"Added {} entries, skipped {} duplicates. Total now {}.",
|
|
||||||
added,
|
|
||||||
skipped,
|
|
||||||
total
|
|
||||||
);
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
Some(("save", _)) => {
|
Some(("save", _)) => {
|
||||||
let out_path = matches
|
let out_path = matches
|
||||||
|
@ -218,19 +156,13 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
|
||||||
})
|
})
|
||||||
.with_section(|| out_path.display().to_string().header("Path:"))?;
|
.with_section(|| out_path.display().to_string().header("Path:"))?;
|
||||||
|
|
||||||
ctx.lookup
|
ctx.read()
|
||||||
|
.await
|
||||||
|
.lookup
|
||||||
.to_csv(f)
|
.to_csv(f)
|
||||||
.await
|
.await
|
||||||
.wrap_err("Failed to write dictionary to disk")
|
.wrap_err("Failed to write dictionary to disk")
|
||||||
}
|
}
|
||||||
Some(("show", _)) => {
|
|
||||||
let lookup = &ctx.lookup;
|
|
||||||
let rows: Vec<_> = lookup.entries().iter().map(TableRow::from).collect();
|
|
||||||
|
|
||||||
print_stdout(rows.with_title())?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
_ => unreachable!(
|
_ => unreachable!(
|
||||||
"clap is configured to require a subcommand, and they're all handled above"
|
"clap is configured to require a subcommand, and they're all handled above"
|
||||||
),
|
),
|
||||||
|
|
|
@ -1,407 +0,0 @@
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::ffi::{CStr, CString};
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
use clap::{value_parser, Arg, ArgMatches, Command};
|
|
||||||
use color_eyre::eyre::{self, Context};
|
|
||||||
use color_eyre::{Help, Report, Result};
|
|
||||||
use dtmt_shared::{ModConfig, ModConfigResources, ModDependency};
|
|
||||||
use futures::FutureExt;
|
|
||||||
use luajit2_sys as lua;
|
|
||||||
use tokio::fs;
|
|
||||||
use tokio_stream::wrappers::ReadDirStream;
|
|
||||||
use tokio_stream::StreamExt;
|
|
||||||
|
|
||||||
pub(crate) fn command_definition() -> Command {
|
|
||||||
Command::new("migrate")
|
|
||||||
.about("Migrate a mod project from the loose file structure to DTMT.")
|
|
||||||
.arg(
|
|
||||||
Arg::new("mod-file")
|
|
||||||
.required(true)
|
|
||||||
.value_parser(value_parser!(PathBuf))
|
|
||||||
.help("The path to the mod's '<id>.mod' file."),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::new("directory")
|
|
||||||
.required(true)
|
|
||||||
.value_parser(value_parser!(PathBuf))
|
|
||||||
.help(
|
|
||||||
"The directory to create the mod in. Within this directory, \
|
|
||||||
DTMT will create a new folder named after the mod ID and migrate files \
|
|
||||||
into that folder.",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
struct ModFile {
|
|
||||||
id: String,
|
|
||||||
init: PathBuf,
|
|
||||||
data: Option<PathBuf>,
|
|
||||||
localization: Option<PathBuf>,
|
|
||||||
}
|
|
||||||
|
|
||||||
// This piece of Lua code stubs DMF functions and runs a mod's `.mod` file to extract
|
|
||||||
// the contained information.
|
|
||||||
static MOD_FILE_RUNNER: &str = r#"
|
|
||||||
_DATA = {}
|
|
||||||
|
|
||||||
function fassert() end
|
|
||||||
|
|
||||||
function new_mod(id, options)
|
|
||||||
_DATA.id = id
|
|
||||||
_DATA.init = options.mod_script
|
|
||||||
_DATA.data = options.mod_data
|
|
||||||
_DATA.localization = options.mod_localization
|
|
||||||
end
|
|
||||||
|
|
||||||
dmf = {
|
|
||||||
dofile = function(self, file)
|
|
||||||
_DATA.init = file
|
|
||||||
end
|
|
||||||
}
|
|
||||||
|
|
||||||
_MOD().run()
|
|
||||||
"#;
|
|
||||||
|
|
||||||
#[tracing::instrument]
|
|
||||||
async fn evaluate_mod_file(path: impl AsRef<Path> + std::fmt::Debug) -> Result<ModFile> {
|
|
||||||
let path = path.as_ref();
|
|
||||||
let code = fs::read(path)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?;
|
|
||||||
|
|
||||||
tokio::task::spawn_blocking(move || unsafe {
|
|
||||||
let state = lua::luaL_newstate();
|
|
||||||
lua::luaL_openlibs(state);
|
|
||||||
|
|
||||||
let code = CString::new(code).expect("Cannot build CString");
|
|
||||||
let name = CString::new("_MOD").expect("Cannot build CString");
|
|
||||||
|
|
||||||
match lua::luaL_loadstring(state, code.as_ptr()) as u32 {
|
|
||||||
lua::LUA_OK => {}
|
|
||||||
lua::LUA_ERRSYNTAX => {
|
|
||||||
let err = lua::lua_tostring(state, -1);
|
|
||||||
let err = CStr::from_ptr(err).to_string_lossy().to_string();
|
|
||||||
|
|
||||||
lua::lua_close(state);
|
|
||||||
|
|
||||||
eyre::bail!("Invalid syntax: {}", err);
|
|
||||||
}
|
|
||||||
lua::LUA_ERRMEM => {
|
|
||||||
lua::lua_close(state);
|
|
||||||
eyre::bail!("Failed to allocate sufficient memory")
|
|
||||||
}
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
|
|
||||||
tracing::trace!("Loaded '.mod' code");
|
|
||||||
|
|
||||||
lua::lua_setglobal(state, name.as_ptr());
|
|
||||||
|
|
||||||
let code = CString::new(MOD_FILE_RUNNER).expect("Cannot build CString");
|
|
||||||
match lua::luaL_loadstring(state, code.as_ptr()) as u32 {
|
|
||||||
lua::LUA_OK => {}
|
|
||||||
lua::LUA_ERRSYNTAX => {
|
|
||||||
let err = lua::lua_tostring(state, -1);
|
|
||||||
let err = CStr::from_ptr(err).to_string_lossy().to_string();
|
|
||||||
|
|
||||||
lua::lua_close(state);
|
|
||||||
|
|
||||||
eyre::bail!("Invalid syntax: {}", err);
|
|
||||||
}
|
|
||||||
lua::LUA_ERRMEM => {
|
|
||||||
lua::lua_close(state);
|
|
||||||
eyre::bail!("Failed to allocate sufficient memory")
|
|
||||||
}
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
|
|
||||||
match lua::lua_pcall(state, 0, 1, 0) as u32 {
|
|
||||||
lua::LUA_OK => {}
|
|
||||||
lua::LUA_ERRRUN => {
|
|
||||||
let err = lua::lua_tostring(state, -1);
|
|
||||||
let err = CStr::from_ptr(err).to_string_lossy().to_string();
|
|
||||||
|
|
||||||
lua::lua_close(state);
|
|
||||||
|
|
||||||
eyre::bail!("Failed to evaluate '.mod' file: {}", err);
|
|
||||||
}
|
|
||||||
lua::LUA_ERRMEM => {
|
|
||||||
lua::lua_close(state);
|
|
||||||
eyre::bail!("Failed to allocate sufficient memory")
|
|
||||||
}
|
|
||||||
// We don't use an error handler function, so this should be unreachable
|
|
||||||
lua::LUA_ERRERR => unreachable!(),
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
|
|
||||||
tracing::trace!("Loaded file runner code");
|
|
||||||
|
|
||||||
let name = CString::new("_DATA").expect("Cannot build CString");
|
|
||||||
lua::lua_getglobal(state, name.as_ptr());
|
|
||||||
|
|
||||||
let id = {
|
|
||||||
let name = CString::new("id").expect("Cannot build CString");
|
|
||||||
lua::lua_getfield(state, -1, name.as_ptr());
|
|
||||||
let val = {
|
|
||||||
let ptr = lua::lua_tostring(state, -1);
|
|
||||||
let str = CStr::from_ptr(ptr);
|
|
||||||
str.to_str()
|
|
||||||
.expect("ID value is not a valid string")
|
|
||||||
.to_string()
|
|
||||||
};
|
|
||||||
lua::lua_pop(state, 1);
|
|
||||||
val
|
|
||||||
};
|
|
||||||
|
|
||||||
let path_prefix = format!("{id}/");
|
|
||||||
|
|
||||||
let init = {
|
|
||||||
let name = CString::new("init").expect("Cannot build CString");
|
|
||||||
lua::lua_getfield(state, -1, name.as_ptr());
|
|
||||||
let val = {
|
|
||||||
let ptr = lua::lua_tostring(state, -1);
|
|
||||||
let str = CStr::from_ptr(ptr);
|
|
||||||
str.to_str().expect("ID value is not a valid string")
|
|
||||||
};
|
|
||||||
lua::lua_pop(state, 1);
|
|
||||||
PathBuf::from(val.strip_prefix(&path_prefix).unwrap_or(val))
|
|
||||||
};
|
|
||||||
|
|
||||||
let data = {
|
|
||||||
let name = CString::new("data").expect("Cannot build CString");
|
|
||||||
lua::lua_getfield(state, -1, name.as_ptr());
|
|
||||||
|
|
||||||
if lua::lua_isnil(state, -1) > 0 {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
let val = {
|
|
||||||
let ptr = lua::lua_tostring(state, -1);
|
|
||||||
let str = CStr::from_ptr(ptr);
|
|
||||||
str.to_str().expect("ID value is not a valid string")
|
|
||||||
};
|
|
||||||
lua::lua_pop(state, 1);
|
|
||||||
Some(PathBuf::from(val.strip_prefix(&path_prefix).unwrap_or(val)))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let localization = {
|
|
||||||
let name = CString::new("localization").expect("Cannot build CString");
|
|
||||||
lua::lua_getfield(state, -1, name.as_ptr());
|
|
||||||
|
|
||||||
if lua::lua_isnil(state, -1) > 0 {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
let val = {
|
|
||||||
let ptr = lua::lua_tostring(state, -1);
|
|
||||||
let str = CStr::from_ptr(ptr);
|
|
||||||
str.to_str().expect("ID value is not a valid string")
|
|
||||||
};
|
|
||||||
lua::lua_pop(state, 1);
|
|
||||||
Some(PathBuf::from(val.strip_prefix(&path_prefix).unwrap_or(val)))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
lua::lua_close(state);
|
|
||||||
|
|
||||||
let mod_file = ModFile {
|
|
||||||
id,
|
|
||||||
init,
|
|
||||||
data,
|
|
||||||
localization,
|
|
||||||
};
|
|
||||||
|
|
||||||
tracing::trace!(?mod_file);
|
|
||||||
|
|
||||||
Ok(mod_file)
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.map_err(Report::new)
|
|
||||||
.flatten()
|
|
||||||
.wrap_err("Failed to run mod file handler")
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_recursion::async_recursion]
|
|
||||||
#[tracing::instrument]
|
|
||||||
async fn process_directory<P1, P2>(path: P1, prefix: P2) -> Result<()>
|
|
||||||
where
|
|
||||||
P1: AsRef<Path> + std::fmt::Debug + std::marker::Send,
|
|
||||||
P2: AsRef<Path> + std::fmt::Debug + std::marker::Send,
|
|
||||||
{
|
|
||||||
let path = path.as_ref();
|
|
||||||
let prefix = prefix.as_ref();
|
|
||||||
|
|
||||||
let read_dir = fs::read_dir(&path)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to read directory '{}'", path.display()))?;
|
|
||||||
|
|
||||||
let stream = ReadDirStream::new(read_dir).map(|res| res.wrap_err("Failed to read dir entry"));
|
|
||||||
tokio::pin!(stream);
|
|
||||||
|
|
||||||
while let Some(res) = stream.next().await {
|
|
||||||
let entry = res?;
|
|
||||||
let in_path = entry.path();
|
|
||||||
let out_path = prefix.join(entry.file_name());
|
|
||||||
|
|
||||||
let t = entry.file_type().await?;
|
|
||||||
|
|
||||||
if t.is_dir() {
|
|
||||||
process_directory(in_path, out_path).await?;
|
|
||||||
} else {
|
|
||||||
tracing::trace!(
|
|
||||||
"Copying file '{}' -> '{}'",
|
|
||||||
in_path.display(),
|
|
||||||
out_path.display()
|
|
||||||
);
|
|
||||||
let res = fs::create_dir_all(prefix)
|
|
||||||
.then(|_| fs::copy(&in_path, &out_path))
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to copy '{}' -> '{}'",
|
|
||||||
in_path.display(),
|
|
||||||
out_path.display()
|
|
||||||
)
|
|
||||||
});
|
|
||||||
if let Err(err) = res {
|
|
||||||
tracing::error!("{:?}", err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
|
||||||
pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
|
||||||
let (mod_file, in_dir) = {
|
|
||||||
let path = matches
|
|
||||||
.get_one::<PathBuf>("mod-file")
|
|
||||||
.expect("Parameter is required");
|
|
||||||
|
|
||||||
let mod_file = evaluate_mod_file(&path)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to evaluate '.mod' file")?;
|
|
||||||
|
|
||||||
(
|
|
||||||
mod_file,
|
|
||||||
path.parent().expect("A file path always has a parent"),
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
let out_dir = matches
|
|
||||||
.get_one::<PathBuf>("directory")
|
|
||||||
.expect("Parameter is required");
|
|
||||||
|
|
||||||
{
|
|
||||||
let is_dir = fs::metadata(out_dir)
|
|
||||||
.await
|
|
||||||
.map(|meta| meta.is_dir())
|
|
||||||
.unwrap_or(false);
|
|
||||||
|
|
||||||
if !is_dir {
|
|
||||||
let err = eyre::eyre!("Invalid output directory '{}'", out_dir.display());
|
|
||||||
return Err(err)
|
|
||||||
.with_suggestion(|| "Make sure the directory exists and is writable.".to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let out_dir = out_dir.join(&mod_file.id);
|
|
||||||
|
|
||||||
fs::create_dir(&out_dir)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to create mod directory '{}'", out_dir.display()))?;
|
|
||||||
|
|
||||||
tracing::info!("Created mod directory '{}'", out_dir.display());
|
|
||||||
|
|
||||||
println!(
|
|
||||||
"Enter additional information about your mod '{}'!",
|
|
||||||
&mod_file.id
|
|
||||||
);
|
|
||||||
|
|
||||||
let name = promptly::prompt_default("Display name", mod_file.id.clone())
|
|
||||||
.map(|s: String| s.trim().to_string())?;
|
|
||||||
let summary = promptly::prompt("Short summary").map(|s: String| s.trim().to_string())?;
|
|
||||||
let author =
|
|
||||||
promptly::prompt_opt("Author").map(|opt| opt.map(|s: String| s.trim().to_string()))?;
|
|
||||||
let version = promptly::prompt_default("Version", String::from("0.1.0"))
|
|
||||||
.map(|s: String| s.trim().to_string())?;
|
|
||||||
let categories = promptly::prompt("Categories (comma separated list)")
|
|
||||||
.map(|s: String| s.trim().to_string())
|
|
||||||
.map(|s: String| s.split(',').map(|s| s.trim().to_string()).collect())?;
|
|
||||||
|
|
||||||
let packages = vec![PathBuf::from("packages/mods").join(&mod_file.id)];
|
|
||||||
|
|
||||||
let dtmt_cfg = ModConfig {
|
|
||||||
dir: out_dir,
|
|
||||||
id: mod_file.id,
|
|
||||||
name,
|
|
||||||
summary,
|
|
||||||
author,
|
|
||||||
version,
|
|
||||||
description: None,
|
|
||||||
image: None,
|
|
||||||
categories,
|
|
||||||
packages,
|
|
||||||
resources: ModConfigResources {
|
|
||||||
init: mod_file.init,
|
|
||||||
data: mod_file.data,
|
|
||||||
localization: mod_file.localization,
|
|
||||||
},
|
|
||||||
depends: vec![ModDependency::ID(String::from("DMF"))],
|
|
||||||
bundled: true,
|
|
||||||
name_overrides: HashMap::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
tracing::debug!(?dtmt_cfg);
|
|
||||||
|
|
||||||
{
|
|
||||||
let path = dtmt_cfg.dir.join("dtmt.cfg");
|
|
||||||
let data = serde_sjson::to_string(&dtmt_cfg).wrap_err("Failed to serialize dtmt.cfg")?;
|
|
||||||
fs::write(&path, &data)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to write '{}'", path.display()))?;
|
|
||||||
|
|
||||||
tracing::info!("Created mod configuration at '{}'", path.display());
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let path = dtmt_cfg
|
|
||||||
.dir
|
|
||||||
.join(&dtmt_cfg.packages[0])
|
|
||||||
.with_extension("package");
|
|
||||||
|
|
||||||
let data = {
|
|
||||||
let mut map = HashMap::new();
|
|
||||||
map.insert("lua", vec![format!("scripts/mods/{}/*", dtmt_cfg.id)]);
|
|
||||||
map
|
|
||||||
};
|
|
||||||
let data = serde_sjson::to_string(&data).wrap_err("Failed to serialize package file")?;
|
|
||||||
|
|
||||||
fs::create_dir_all(path.parent().unwrap())
|
|
||||||
.then(|_| fs::write(&path, &data))
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to write '{}'", path.display()))?;
|
|
||||||
|
|
||||||
tracing::info!("Created package file at '{}'", path.display());
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let path = in_dir.join("scripts");
|
|
||||||
let scripts_dir = dtmt_cfg.dir.join("scripts");
|
|
||||||
process_directory(&path, &scripts_dir)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to copy files from '{}' to '{}'",
|
|
||||||
path.display(),
|
|
||||||
scripts_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
tracing::info!("Copied script files to '{}'", scripts_dir.display());
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
|
@ -1,6 +1,9 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use clap::{Arg, ArgAction, ArgMatches, Command};
|
use clap::{Arg, ArgAction, ArgMatches, Command};
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
use sdk::murmur::{Murmur32, Murmur64};
|
use sdk::murmur::{Murmur32, Murmur64};
|
||||||
|
use tokio::sync::RwLock;
|
||||||
|
|
||||||
pub(crate) fn command_definition() -> Command {
|
pub(crate) fn command_definition() -> Command {
|
||||||
Command::new("murmur")
|
Command::new("murmur")
|
||||||
|
@ -25,7 +28,7 @@ pub(crate) fn command_definition() -> Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
pub(crate) async fn run(_ctx: Arc<RwLock<sdk::Context>>, matches: &ArgMatches) -> Result<()> {
|
||||||
match matches.subcommand() {
|
match matches.subcommand() {
|
||||||
Some(("hash", sub_matches)) => {
|
Some(("hash", sub_matches)) => {
|
||||||
let s = sub_matches
|
let s = sub_matches
|
||||||
|
|
|
@ -1,214 +1,22 @@
|
||||||
use std::path::PathBuf;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use clap::{Arg, ArgMatches, Command};
|
use clap::{Arg, ArgMatches, Command};
|
||||||
use color_eyre::eyre::{self, Context, Result};
|
use color_eyre::eyre::Result;
|
||||||
use color_eyre::Help;
|
use tokio::sync::RwLock;
|
||||||
use futures::{StreamExt, TryStreamExt};
|
|
||||||
use minijinja::Environment;
|
|
||||||
use tokio::fs::{self, DirBuilder};
|
|
||||||
|
|
||||||
const TEMPLATES: [(&str, &str); 5] = [
|
pub(crate) fn _command_definition() -> Command {
|
||||||
(
|
|
||||||
"dtmt.cfg",
|
|
||||||
r#"//
|
|
||||||
// This is your mod's main configuration file. It tells DTMT how to build the mod,
|
|
||||||
// and DTMM what to display to your users.
|
|
||||||
// Certain files have been pre-filled by the template, the ones commented out (`//`)
|
|
||||||
// are optional.
|
|
||||||
//
|
|
||||||
// A unique identifier (preferably lower case, alphanumeric)
|
|
||||||
id = "{{id}}"
|
|
||||||
// The display name that your users will see.
|
|
||||||
// This doesn't have to be unique, but you still want to avoid being confused with other
|
|
||||||
// mods.
|
|
||||||
name = "{{name}}"
|
|
||||||
// It's good practice to increase this number whenever you publish changes.
|
|
||||||
// It's up to you if you use SemVer or something simpler like `1970-12-24`. It should sort and
|
|
||||||
// compare well, though.
|
|
||||||
version = "0.1.0"
|
|
||||||
// author = ""
|
|
||||||
|
|
||||||
// A one- or two-line short description.
|
|
||||||
summary = "This is my new mod '{{name}}'!"
|
|
||||||
// description = ""
|
|
||||||
// image = "assets/logo.png"
|
|
||||||
|
|
||||||
// Can contain arbitrary strings. But to keep things consistent and useful,
|
|
||||||
// capitalize names and check existing mods for matching categories.
|
|
||||||
categories = [
|
|
||||||
Misc
|
|
||||||
// UI
|
|
||||||
// QoL
|
|
||||||
// Tools
|
|
||||||
]
|
|
||||||
|
|
||||||
// A list of mod IDs that this mod depends on. You can find
|
|
||||||
// those IDs by downloading the mod and extracting their `dtmt.cfg`.
|
|
||||||
// To make your fellow modders' lives easier, publish your own mods' IDs
|
|
||||||
// somewhere visible, such as the Nexusmods page.
|
|
||||||
depends = [
|
|
||||||
DMF
|
|
||||||
]
|
|
||||||
|
|
||||||
// The primary resources that serve as the entry point to your
|
|
||||||
// mod's code. Unless for very specific use cases, the generated
|
|
||||||
// values shouldn't be changed.
|
|
||||||
resources = {
|
|
||||||
init = "scripts/mods/{{id}}/init"
|
|
||||||
data = "scripts/mods/{{id}}/data"
|
|
||||||
localization = "scripts/mods/{{id}}/localization"
|
|
||||||
}
|
|
||||||
|
|
||||||
// The list of packages, or bundles, to build.
|
|
||||||
// Each one corresponds to a package definition in the named folder.
|
|
||||||
// For mods that contain only code and/or a few small assets, a single
|
|
||||||
// package will suffice.
|
|
||||||
packages = [
|
|
||||||
"packages/mods/{{id}}"
|
|
||||||
]
|
|
||||||
"#,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"packages/mods/{{id}}.package",
|
|
||||||
r#"lua = [
|
|
||||||
"scripts/mods/{{id}}/*"
|
|
||||||
]
|
|
||||||
"#,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"scripts/mods/{{id}}/init.lua",
|
|
||||||
r#"local mod = get_mod("{{id}}")
|
|
||||||
|
|
||||||
-- Your mod code goes here.
|
|
||||||
"#,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"scripts/mods/{{id}}/data.lua",
|
|
||||||
r#"local mod = get_mod("{{id}}")
|
|
||||||
|
|
||||||
return {
|
|
||||||
name = "{{name}}",
|
|
||||||
description = mod:localize("mod_description"),
|
|
||||||
is_togglable = true,
|
|
||||||
}"#,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"scripts/mods/{{id}}/localization.lua",
|
|
||||||
r#"return {
|
|
||||||
mod_description = {
|
|
||||||
en = "This is my new mod '{{name}}'!",
|
|
||||||
},
|
|
||||||
}"#,
|
|
||||||
),
|
|
||||||
];
|
|
||||||
|
|
||||||
pub(crate) fn command_definition() -> Command {
|
|
||||||
Command::new("new")
|
Command::new("new")
|
||||||
.about("Create a new project")
|
.about("Create a new project")
|
||||||
.arg(
|
.arg(Arg::new("name").help(
|
||||||
Arg::new("name")
|
"The name of the new project. Will default to the name of the project's directory",
|
||||||
.long("name")
|
))
|
||||||
.help("The display name of the new mod."),
|
.arg(Arg::new("directory").required(true).help(
|
||||||
)
|
"The directory where to initialize the new project. This directory must be empty\
|
||||||
.arg(Arg::new("root").help(
|
or must not exist. If `.` is given, the current directory will be used.",
|
||||||
"The directory where to initialize the new project. This directory must be empty \
|
|
||||||
or must not exist. If omitted or `.` is given, the current directory \
|
|
||||||
will be used.",
|
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
pub(crate) async fn run(_ctx: Arc<RwLock<sdk::Context>>, _matches: &ArgMatches) -> Result<()> {
|
||||||
let root = if let Some(dir) = matches.get_one::<String>("root") {
|
unimplemented!()
|
||||||
if dir == "." {
|
|
||||||
std::env::current_dir()
|
|
||||||
.wrap_err("The current working dir is invalid")
|
|
||||||
.with_suggestion(|| "Change to a different directory.")?
|
|
||||||
} else {
|
|
||||||
PathBuf::from(dir)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let prompt = "The mod directory";
|
|
||||||
match std::env::current_dir() {
|
|
||||||
Ok(default) => promptly::prompt_default(prompt, default)?,
|
|
||||||
Err(_) => promptly::prompt(prompt)?,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let name = if let Some(name) = matches.get_one::<String>("name") {
|
|
||||||
name.clone()
|
|
||||||
} else {
|
|
||||||
promptly::prompt("The display name")?
|
|
||||||
};
|
|
||||||
|
|
||||||
let id = {
|
|
||||||
let default = name
|
|
||||||
.chars()
|
|
||||||
.map(|c| {
|
|
||||||
if c.is_ascii_alphanumeric() {
|
|
||||||
c.to_ascii_lowercase()
|
|
||||||
} else {
|
|
||||||
'_'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<String>();
|
|
||||||
promptly::prompt_default("The unique mod ID", default)?
|
|
||||||
};
|
|
||||||
|
|
||||||
tracing::debug!(root = %root.display(), name, id);
|
|
||||||
|
|
||||||
let render_ctx = minijinja::context!(name => name.as_str(), id => id.as_str());
|
|
||||||
let env = Environment::new();
|
|
||||||
|
|
||||||
let templates = TEMPLATES
|
|
||||||
.iter()
|
|
||||||
.map(|(path_tmpl, content_tmpl)| {
|
|
||||||
env.render_str(path_tmpl, &render_ctx)
|
|
||||||
.wrap_err_with(|| format!("Failed to render template: {}", path_tmpl))
|
|
||||||
.and_then(|path| {
|
|
||||||
env.render_named_str(&path, content_tmpl, &render_ctx)
|
|
||||||
.wrap_err_with(|| format!("Failed to render template '{}'", &path))
|
|
||||||
.map(|content| (root.join(path), content))
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.map(|res| async move {
|
|
||||||
match res {
|
|
||||||
Ok((path, content)) => {
|
|
||||||
let dir = path
|
|
||||||
.parent()
|
|
||||||
.ok_or_else(|| eyre::eyre!("invalid root path"))?;
|
|
||||||
|
|
||||||
DirBuilder::new()
|
|
||||||
.recursive(true)
|
|
||||||
.create(&dir)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| {
|
|
||||||
format!("Failed to create directory {}", dir.display())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
tracing::trace!("Writing file {}", path.display());
|
|
||||||
|
|
||||||
fs::write(&path, content.as_bytes())
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| {
|
|
||||||
format!("Failed to write content to path {}", path.display())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
Err(e) => Err(e),
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
futures::stream::iter(templates)
|
|
||||||
.buffer_unordered(10)
|
|
||||||
.try_fold((), |_, _| async { Ok(()) })
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
tracing::info!(
|
|
||||||
"Created {} files for mod '{}' in '{}'.",
|
|
||||||
TEMPLATES.len(),
|
|
||||||
name,
|
|
||||||
root.display()
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,147 +0,0 @@
|
||||||
use std::io::{Cursor, Write};
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
use clap::{value_parser, Arg, ArgMatches, Command};
|
|
||||||
use color_eyre::eyre::{Context, Result};
|
|
||||||
use color_eyre::Help;
|
|
||||||
use dtmt_shared::ModConfig;
|
|
||||||
use path_slash::{PathBufExt, PathExt};
|
|
||||||
use tokio::fs;
|
|
||||||
use tokio_stream::wrappers::ReadDirStream;
|
|
||||||
use tokio_stream::StreamExt;
|
|
||||||
use zip::write::SimpleFileOptions;
|
|
||||||
use zip::ZipWriter;
|
|
||||||
|
|
||||||
use crate::cmd::build::read_project_config;
|
|
||||||
|
|
||||||
pub(crate) fn command_definition() -> Command {
|
|
||||||
Command::new("package")
|
|
||||||
.about("Package compiled bundles for distribution")
|
|
||||||
.arg(
|
|
||||||
Arg::new("project")
|
|
||||||
.required(false)
|
|
||||||
.value_parser(value_parser!(PathBuf))
|
|
||||||
.help(
|
|
||||||
"The path to the project to build. \
|
|
||||||
If omitted, dtmt will search from the current working directory upward.",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::new("directory")
|
|
||||||
.long("directory")
|
|
||||||
.short('d')
|
|
||||||
.default_value("out")
|
|
||||||
.value_parser(value_parser!(PathBuf))
|
|
||||||
.help(
|
|
||||||
"The path to the directory were the compiled bundles were written to. \
|
|
||||||
This is the same directory as `dtmt build -o`",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::new("out")
|
|
||||||
.long("out")
|
|
||||||
.short('o')
|
|
||||||
.value_parser(value_parser!(PathBuf))
|
|
||||||
.help(
|
|
||||||
"The path to write the packaged file to. Will default to a file in the \
|
|
||||||
current working directory",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_recursion::async_recursion]
|
|
||||||
async fn process_directory<P1, P2, W>(zip: &mut ZipWriter<W>, path: P1, prefix: P2) -> Result<()>
|
|
||||||
where
|
|
||||||
P1: AsRef<Path> + std::marker::Send,
|
|
||||||
P2: AsRef<Path> + std::marker::Send,
|
|
||||||
W: std::io::Write + std::io::Seek + std::marker::Send,
|
|
||||||
{
|
|
||||||
let path = path.as_ref();
|
|
||||||
let prefix = prefix.as_ref();
|
|
||||||
|
|
||||||
zip.add_directory(prefix.to_slash_lossy(), SimpleFileOptions::default())?;
|
|
||||||
|
|
||||||
let read_dir = fs::read_dir(&path)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to read directory '{}'", path.display()))?;
|
|
||||||
|
|
||||||
let stream = ReadDirStream::new(read_dir).map(|res| res.wrap_err("Failed to read dir entry"));
|
|
||||||
tokio::pin!(stream);
|
|
||||||
|
|
||||||
while let Some(res) = stream.next().await {
|
|
||||||
let entry = res?;
|
|
||||||
let in_path = entry.path();
|
|
||||||
let out_path = prefix.join(entry.file_name());
|
|
||||||
|
|
||||||
let t = entry.file_type().await?;
|
|
||||||
|
|
||||||
if t.is_file() || t.is_symlink() {
|
|
||||||
let data = fs::read(&in_path)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to read '{}'", in_path.display()))?;
|
|
||||||
{
|
|
||||||
zip.start_file(out_path.to_slash_lossy(), SimpleFileOptions::default())?;
|
|
||||||
zip.write_all(&data)?;
|
|
||||||
}
|
|
||||||
} else if t.is_dir() {
|
|
||||||
process_directory(zip, in_path, out_path).await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn package<P1, P2>(cfg: &ModConfig, path: P1, dest: P2) -> Result<()>
|
|
||||||
where
|
|
||||||
P1: AsRef<Path>,
|
|
||||||
P2: AsRef<Path>,
|
|
||||||
{
|
|
||||||
let path = path.as_ref();
|
|
||||||
let dest = dest.as_ref();
|
|
||||||
|
|
||||||
let mut zip = ZipWriter::new(Cursor::new(Vec::with_capacity(1024)));
|
|
||||||
|
|
||||||
process_directory(&mut zip, path, PathBuf::from(&cfg.id))
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to add directory to archive")?;
|
|
||||||
|
|
||||||
{
|
|
||||||
let name = PathBuf::from(&cfg.id).join("dtmt.cfg");
|
|
||||||
let path = cfg.dir.join("dtmt.cfg");
|
|
||||||
|
|
||||||
let data = fs::read(&path)
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to read mod config at {}", path.display()))?;
|
|
||||||
|
|
||||||
zip.start_file(name.to_slash_lossy(), SimpleFileOptions::default())?;
|
|
||||||
zip.write_all(&data)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let data = zip.finish()?;
|
|
||||||
|
|
||||||
fs::write(dest, data.into_inner())
|
|
||||||
.await
|
|
||||||
.wrap_err_with(|| format!("Failed to write mod archive to '{}'", dest.display()))
|
|
||||||
.with_suggestion(|| "Make sure that parent directories exist.".to_string())?;
|
|
||||||
|
|
||||||
tracing::info!("Mod archive written to {}", dest.display());
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
|
||||||
pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
|
||||||
let cfg = read_project_config(matches.get_one::<PathBuf>("project").cloned()).await?;
|
|
||||||
|
|
||||||
let dest = matches
|
|
||||||
.get_one::<PathBuf>("out")
|
|
||||||
.map(path_clean::clean)
|
|
||||||
.unwrap_or_else(|| PathBuf::from(format!("{}.zip", cfg.id)));
|
|
||||||
|
|
||||||
let path = cfg.dir.join(
|
|
||||||
matches
|
|
||||||
.get_one::<PathBuf>("directory")
|
|
||||||
.expect("parameter has default value"),
|
|
||||||
);
|
|
||||||
|
|
||||||
package(&cfg, path, dest).await
|
|
||||||
}
|
|
|
@ -8,7 +8,7 @@ use tokio::fs;
|
||||||
use tokio_stream::wrappers::ReadDirStream;
|
use tokio_stream::wrappers::ReadDirStream;
|
||||||
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument]
|
||||||
pub async fn process_path<P>(path: P) -> Vec<PathBuf>
|
pub async fn foo<P>(path: P) -> Vec<PathBuf>
|
||||||
where
|
where
|
||||||
P: AsRef<Path> + std::fmt::Debug,
|
P: AsRef<Path> + std::fmt::Debug,
|
||||||
{
|
{
|
||||||
|
@ -98,10 +98,7 @@ where
|
||||||
I: Iterator<Item = PathBuf> + std::fmt::Debug,
|
I: Iterator<Item = PathBuf> + std::fmt::Debug,
|
||||||
{
|
{
|
||||||
let tasks = paths.map(|p| async move {
|
let tasks = paths.map(|p| async move {
|
||||||
// Clippy doesn't understand that the block here is required to `move` in the reference.
|
match tokio::spawn(async move { foo(&p).await }).await {
|
||||||
// The task is spawned to make sure tokio can distribute these over threads.
|
|
||||||
#[allow(clippy::redundant_async_block)]
|
|
||||||
match tokio::spawn(async move { process_path(&p).await }).await {
|
|
||||||
Ok(paths) => paths,
|
Ok(paths) => paths,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
tracing::error!(%err, "failed to spawn task to resolve bundle paths");
|
tracing::error!(%err, "failed to spawn task to resolve bundle paths");
|
||||||
|
@ -114,9 +111,6 @@ where
|
||||||
results.into_iter().flatten().collect()
|
results.into_iter().flatten().collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
// `tracing::instrument` generates code that triggers this warning.
|
|
||||||
// Not much we can do to prevent that.
|
|
||||||
#[allow(clippy::let_with_type_underscore)]
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
pub fn resolve_bundle_paths<I>(paths: I) -> impl Stream<Item = PathBuf>
|
pub fn resolve_bundle_paths<I>(paths: I) -> impl Stream<Item = PathBuf>
|
||||||
where
|
where
|
||||||
|
@ -135,12 +129,12 @@ mod tests {
|
||||||
use tempfile::tempdir;
|
use tempfile::tempdir;
|
||||||
use tokio::process::Command;
|
use tokio::process::Command;
|
||||||
|
|
||||||
use super::process_path;
|
use super::foo;
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn resolve_single_file() {
|
async fn resolve_single_file() {
|
||||||
let path = PathBuf::from("foo");
|
let path = PathBuf::from("foo");
|
||||||
let paths = process_path(&path).await;
|
let paths = foo(&path).await;
|
||||||
assert_eq!(paths.len(), 1);
|
assert_eq!(paths.len(), 1);
|
||||||
assert_eq!(paths[0], path);
|
assert_eq!(paths[0], path);
|
||||||
}
|
}
|
||||||
|
@ -148,7 +142,7 @@ mod tests {
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn resolve_empty_directory() {
|
async fn resolve_empty_directory() {
|
||||||
let dir = tempdir().expect("failed to create temporary directory");
|
let dir = tempdir().expect("failed to create temporary directory");
|
||||||
let paths = process_path(dir).await;
|
let paths = foo(dir).await;
|
||||||
assert!(paths.is_empty());
|
assert!(paths.is_empty());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -176,7 +170,7 @@ mod tests {
|
||||||
.await
|
.await
|
||||||
.expect("failed to create temporary files");
|
.expect("failed to create temporary files");
|
||||||
|
|
||||||
let paths = process_path(dir).await;
|
let paths = foo(dir).await;
|
||||||
|
|
||||||
assert_eq!(bundle_names.len(), paths.len());
|
assert_eq!(bundle_names.len(), paths.len());
|
||||||
|
|
||||||
|
|
|
@ -1,231 +1,26 @@
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::Duration;
|
|
||||||
|
|
||||||
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
|
use clap::{value_parser, Arg, ArgMatches, Command};
|
||||||
use color_eyre::eyre::{Context, Result};
|
use color_eyre::eyre::Result;
|
||||||
use dtmt_shared::ModConfig;
|
use tokio::sync::RwLock;
|
||||||
use notify::{Event, Watcher};
|
|
||||||
|
|
||||||
use crate::cmd::build::{build, read_project_config};
|
pub(crate) fn _command_definition() -> Command {
|
||||||
|
|
||||||
use super::package::package;
|
|
||||||
|
|
||||||
pub(crate) fn command_definition() -> Command {
|
|
||||||
Command::new("watch")
|
Command::new("watch")
|
||||||
.about("Watch for file system changes and re-build the mod archive.")
|
.about("Re-build the given directory on file changes.")
|
||||||
.arg(
|
|
||||||
Arg::new("debounce")
|
|
||||||
.long("debounce")
|
|
||||||
.short('b')
|
|
||||||
.default_value("150")
|
|
||||||
.value_parser(value_parser!(u64))
|
|
||||||
.help(
|
|
||||||
"The delay to debounce events by. This avoids continously \
|
|
||||||
rebuilding on rapid file changes, such as version control checkouts.",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new("directory")
|
Arg::new("directory")
|
||||||
.required(false)
|
.required(false)
|
||||||
|
.default_value(".")
|
||||||
.value_parser(value_parser!(PathBuf))
|
.value_parser(value_parser!(PathBuf))
|
||||||
.help(
|
.help(
|
||||||
"The path to the project to build. \
|
"The path to the project to build. \
|
||||||
If omitted, the current working directory is used.",
|
If omitted, the current working directory is used.",
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.arg(
|
|
||||||
Arg::new("out")
|
|
||||||
.long("out")
|
|
||||||
.short('o')
|
|
||||||
.default_value("out")
|
|
||||||
.value_parser(value_parser!(PathBuf))
|
|
||||||
.help("The directory to write output files to."),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::new("deploy")
|
|
||||||
.long("deploy")
|
|
||||||
.short('d')
|
|
||||||
.value_parser(value_parser!(PathBuf))
|
|
||||||
.help(
|
|
||||||
"If the path to the game (without the trailing '/bundle') is specified, \
|
|
||||||
deploy the newly built bundles. \
|
|
||||||
This will not adjust the bundle database or package files, so if files are \
|
|
||||||
added or removed, you will have to import into DTMM and re-deploy there.",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::new("archive")
|
|
||||||
.long("archive")
|
|
||||||
.short('a')
|
|
||||||
.value_parser(value_parser!(PathBuf))
|
|
||||||
.help(
|
|
||||||
"The path to write the packaged file to. Will default to a file in the \
|
|
||||||
current working directory",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::new("ignore")
|
|
||||||
.long("ignore")
|
|
||||||
.short('i')
|
|
||||||
.value_parser(value_parser!(PathBuf))
|
|
||||||
.action(ArgAction::Append)
|
|
||||||
.help(
|
|
||||||
"A directory or file path to ignore. May be specified multiple times. \
|
|
||||||
The values of 'out' and 'archive' are ignored automatically.",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument]
|
|
||||||
async fn compile(
|
|
||||||
cfg: &ModConfig,
|
|
||||||
out_path: impl AsRef<Path> + std::fmt::Debug,
|
|
||||||
archive_path: impl AsRef<Path> + std::fmt::Debug,
|
|
||||||
game_dir: Arc<Option<impl AsRef<Path> + std::fmt::Debug>>,
|
|
||||||
) -> Result<()> {
|
|
||||||
let out_path = out_path.as_ref();
|
|
||||||
build(cfg, out_path, game_dir)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to build bundles")?;
|
|
||||||
package(cfg, out_path, archive_path)
|
|
||||||
.await
|
|
||||||
.wrap_err("Failed to package bundles")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
pub(crate) async fn run(_ctx: Arc<RwLock<sdk::Context>>, _matches: &ArgMatches) -> Result<()> {
|
||||||
let cfg = read_project_config(matches.get_one::<PathBuf>("directory").cloned())
|
unimplemented!()
|
||||||
.await
|
|
||||||
.wrap_err("failed to load project config")?;
|
|
||||||
tracing::debug!(?cfg);
|
|
||||||
let cfg = Arc::new(cfg);
|
|
||||||
|
|
||||||
let game_dir = matches
|
|
||||||
.get_one::<PathBuf>("deploy")
|
|
||||||
.map(path_clean::clean)
|
|
||||||
.map(|p| if p.is_absolute() { p } else { cfg.dir.join(p) })
|
|
||||||
.map(|p| p.join("bundle"));
|
|
||||||
|
|
||||||
let out_path = matches
|
|
||||||
.get_one::<PathBuf>("out")
|
|
||||||
.map(path_clean::clean)
|
|
||||||
.map(|p| if p.is_absolute() { p } else { cfg.dir.join(p) })
|
|
||||||
.expect("parameter should have default value");
|
|
||||||
|
|
||||||
let archive_path = matches
|
|
||||||
.get_one::<PathBuf>("archive")
|
|
||||||
.map(path_clean::clean)
|
|
||||||
.map(|p| if p.is_absolute() { p } else { cfg.dir.join(p) })
|
|
||||||
.unwrap_or_else(|| cfg.dir.join(format!("{}.zip", cfg.id)));
|
|
||||||
|
|
||||||
let ignored = {
|
|
||||||
let mut ignored: Vec<_> = matches
|
|
||||||
.get_many::<PathBuf>("ignore")
|
|
||||||
.unwrap_or_default()
|
|
||||||
.map(path_clean::clean)
|
|
||||||
.map(|p| if p.is_absolute() { p } else { cfg.dir.join(p) })
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
ignored.push(out_path.clone());
|
|
||||||
ignored.push(archive_path.clone());
|
|
||||||
|
|
||||||
ignored
|
|
||||||
};
|
|
||||||
|
|
||||||
if tracing::enabled!(tracing::Level::INFO) {
|
|
||||||
let list = ignored.iter().fold(String::new(), |mut s, p| {
|
|
||||||
s.push_str("\n - ");
|
|
||||||
s.push_str(&p.display().to_string());
|
|
||||||
s
|
|
||||||
});
|
|
||||||
|
|
||||||
tracing::info!("Ignoring:{}", list);
|
|
||||||
}
|
|
||||||
|
|
||||||
let game_dir = Arc::new(game_dir);
|
|
||||||
|
|
||||||
let duration =
|
|
||||||
Duration::from_millis(matches.get_one::<u64>("debounce").copied().unwrap_or(150));
|
|
||||||
let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel();
|
|
||||||
|
|
||||||
let mut watcher = notify::recommended_watcher(move |res: Result<Event, _>| {
|
|
||||||
let ignored = match &res {
|
|
||||||
Ok(evt) => evt.paths.iter().any(|p1| {
|
|
||||||
let p1 = path_clean::clean(p1);
|
|
||||||
ignored.iter().any(|p2| p1.starts_with(p2))
|
|
||||||
}),
|
|
||||||
Err(_) => false,
|
|
||||||
};
|
|
||||||
|
|
||||||
tracing::trace!(?res, ignored, "Received file system event");
|
|
||||||
|
|
||||||
if !ignored {
|
|
||||||
if let Err(err) = tx.send(res) {
|
|
||||||
tracing::error!("Failed to send file system event: {:?}", err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.wrap_err("failed to create file system watcher")?;
|
|
||||||
|
|
||||||
tracing::info!("Starting file watcher on '{}'", cfg.dir.display());
|
|
||||||
|
|
||||||
let path = cfg.dir.clone();
|
|
||||||
watcher
|
|
||||||
.watch(&path, notify::RecursiveMode::Recursive)
|
|
||||||
.wrap_err_with(|| {
|
|
||||||
format!(
|
|
||||||
"failed to watch directory for file changes: {}",
|
|
||||||
path.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
tracing::trace!("Starting debounce loop");
|
|
||||||
|
|
||||||
let mut dirty = false;
|
|
||||||
loop {
|
|
||||||
// While we could just always await on the timeout, splitting things like this
|
|
||||||
// optimizes the case when no events happen for a while. Rather than being woken every
|
|
||||||
// `duration` just to do nothing, this way we always wait for a new event first until
|
|
||||||
// we start the debounce timeouts.
|
|
||||||
if dirty {
|
|
||||||
match tokio::time::timeout(duration, rx.recv()).await {
|
|
||||||
// The error is the wanted case, as it signals that we haven't received an
|
|
||||||
// event within `duration`, which es what the debounce is supposed to wait for.
|
|
||||||
Err(_) => {
|
|
||||||
tracing::trace!("Received debounce timeout, running build");
|
|
||||||
if let Err(err) =
|
|
||||||
compile(&cfg, &out_path, &archive_path, game_dir.clone()).await
|
|
||||||
{
|
|
||||||
tracing::error!("Failed to build mod archive: {:?}", err);
|
|
||||||
}
|
|
||||||
dirty = false;
|
|
||||||
}
|
|
||||||
Ok(None) => {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
// We received a value before the timeout, so we reset it
|
|
||||||
Ok(_) => {
|
|
||||||
tracing::trace!("Received value before timeout, resetting");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
match rx.recv().await {
|
|
||||||
Some(_) => {
|
|
||||||
tracing::trace!("Received event, starting debounce");
|
|
||||||
dirty = true;
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tracing::trace!("Event channel closed");
|
|
||||||
if let Err(err) = compile(&cfg, &out_path, &archive_path, game_dir.clone()).await {
|
|
||||||
tracing::error!("Failed to build mod archive: {:?}", err);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,5 @@
|
||||||
#![feature(io_error_more)]
|
#![feature(io_error_more)]
|
||||||
#![feature(let_chains)]
|
#![feature(let_chains)]
|
||||||
#![feature(result_flattening)]
|
|
||||||
#![feature(test)]
|
|
||||||
#![windows_subsystem = "console"]
|
|
||||||
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
@ -10,25 +7,24 @@ use std::sync::Arc;
|
||||||
use clap::parser::ValueSource;
|
use clap::parser::ValueSource;
|
||||||
use clap::value_parser;
|
use clap::value_parser;
|
||||||
use clap::{command, Arg};
|
use clap::{command, Arg};
|
||||||
use color_eyre::eyre;
|
|
||||||
use color_eyre::eyre::{Context, Result};
|
use color_eyre::eyre::{Context, Result};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tokio::fs::File;
|
use tokio::fs::File;
|
||||||
use tokio::io::BufReader;
|
use tokio::io::BufReader;
|
||||||
use tokio::sync::RwLock;
|
use tokio::sync::RwLock;
|
||||||
|
use tracing_error::ErrorLayer;
|
||||||
|
use tracing_subscriber::prelude::*;
|
||||||
|
use tracing_subscriber::EnvFilter;
|
||||||
|
|
||||||
mod cmd {
|
mod cmd {
|
||||||
pub mod build;
|
pub mod build;
|
||||||
pub mod bundle;
|
pub mod bundle;
|
||||||
pub mod dictionary;
|
pub mod dictionary;
|
||||||
pub mod migrate;
|
|
||||||
pub mod murmur;
|
pub mod murmur;
|
||||||
pub mod new;
|
pub mod new;
|
||||||
pub mod package;
|
|
||||||
mod util;
|
mod util;
|
||||||
pub mod watch;
|
pub mod watch;
|
||||||
}
|
}
|
||||||
mod shell_parse;
|
|
||||||
|
|
||||||
#[derive(Default, Deserialize, Serialize)]
|
#[derive(Default, Deserialize, Serialize)]
|
||||||
struct GlobalConfig {
|
struct GlobalConfig {
|
||||||
|
@ -53,19 +49,28 @@ async fn main() -> Result<()> {
|
||||||
.global(true)
|
.global(true)
|
||||||
.value_parser(value_parser!(PathBuf)),
|
.value_parser(value_parser!(PathBuf)),
|
||||||
)
|
)
|
||||||
.subcommand(cmd::build::command_definition())
|
// .subcommand(cmd::build::command_definition())
|
||||||
.subcommand(cmd::bundle::command_definition())
|
.subcommand(cmd::bundle::command_definition())
|
||||||
.subcommand(cmd::dictionary::command_definition())
|
.subcommand(cmd::dictionary::command_definition())
|
||||||
.subcommand(cmd::migrate::command_definition())
|
|
||||||
.subcommand(cmd::murmur::command_definition())
|
.subcommand(cmd::murmur::command_definition())
|
||||||
.subcommand(cmd::new::command_definition())
|
// .subcommand(cmd::new::command_definition())
|
||||||
.subcommand(cmd::package::command_definition())
|
// .subcommand(cmd::watch::command_definition())
|
||||||
.subcommand(cmd::watch::command_definition())
|
|
||||||
.get_matches();
|
.get_matches();
|
||||||
|
|
||||||
dtmt_shared::create_tracing_subscriber();
|
{
|
||||||
|
let fmt_layer = tracing_subscriber::fmt::layer().pretty();
|
||||||
|
let filter_layer =
|
||||||
|
EnvFilter::try_from_default_env().or_else(|_| EnvFilter::try_new("info"))?;
|
||||||
|
|
||||||
|
tracing_subscriber::registry()
|
||||||
|
.with(filter_layer)
|
||||||
|
.with(fmt_layer)
|
||||||
|
.with(ErrorLayer::new(
|
||||||
|
tracing_subscriber::fmt::format::Pretty::default(),
|
||||||
|
))
|
||||||
|
.init();
|
||||||
|
}
|
||||||
|
|
||||||
// TODO: Move this into a `Context::init` method?
|
|
||||||
let ctx = sdk::Context::new();
|
let ctx = sdk::Context::new();
|
||||||
let ctx = Arc::new(RwLock::new(ctx));
|
let ctx = Arc::new(RwLock::new(ctx));
|
||||||
|
|
||||||
|
@ -78,9 +83,10 @@ async fn main() -> Result<()> {
|
||||||
let ctx = ctx.clone();
|
let ctx = ctx.clone();
|
||||||
|
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
|
let mut ctx = ctx.write().await;
|
||||||
let res = File::open(&path)
|
let res = File::open(&path)
|
||||||
.await
|
.await
|
||||||
.wrap_err_with(|| format!("Failed to open dictionary file: {}", path.display()));
|
.wrap_err_with(|| format!("failed to open dictionary file: {}", path.display()));
|
||||||
|
|
||||||
let f = match res {
|
let f = match res {
|
||||||
Ok(f) => f,
|
Ok(f) => f,
|
||||||
|
@ -95,7 +101,6 @@ async fn main() -> Result<()> {
|
||||||
};
|
};
|
||||||
|
|
||||||
let r = BufReader::new(f);
|
let r = BufReader::new(f);
|
||||||
let mut ctx = ctx.write().await;
|
|
||||||
if let Err(err) = ctx.lookup.from_csv(r).await {
|
if let Err(err) = ctx.lookup.from_csv(r).await {
|
||||||
tracing::error!("{:#}", err);
|
tracing::error!("{:#}", err);
|
||||||
}
|
}
|
||||||
|
@ -107,7 +112,7 @@ async fn main() -> Result<()> {
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
let conf = tokio::task::spawn_blocking(|| {
|
let conf = tokio::task::spawn_blocking(|| {
|
||||||
confy::load::<GlobalConfig>(clap::crate_name!(), None)
|
confy::load::<GlobalConfig>(clap::crate_name!(), None)
|
||||||
.wrap_err("Failed to load global configuration")
|
.wrap_err("failed to load global configuration")
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
|
@ -124,20 +129,13 @@ async fn main() -> Result<()> {
|
||||||
|
|
||||||
tokio::try_join!(dicitonary_task, global_config_task)?;
|
tokio::try_join!(dicitonary_task, global_config_task)?;
|
||||||
|
|
||||||
let ctx = match Arc::try_unwrap(ctx).map(|ctx| ctx.into_inner()) {
|
|
||||||
Ok(ctx) => ctx,
|
|
||||||
Err(_) => eyre::bail!("failed to unwrap context"),
|
|
||||||
};
|
|
||||||
|
|
||||||
match matches.subcommand() {
|
match matches.subcommand() {
|
||||||
Some(("build", sub_matches)) => cmd::build::run(ctx, sub_matches).await?,
|
|
||||||
Some(("bundle", sub_matches)) => cmd::bundle::run(ctx, sub_matches).await?,
|
Some(("bundle", sub_matches)) => cmd::bundle::run(ctx, sub_matches).await?,
|
||||||
Some(("dictionary", sub_matches)) => cmd::dictionary::run(ctx, sub_matches).await?,
|
|
||||||
Some(("migrate", sub_matches)) => cmd::migrate::run(ctx, sub_matches).await?,
|
|
||||||
Some(("murmur", sub_matches)) => cmd::murmur::run(ctx, sub_matches).await?,
|
Some(("murmur", sub_matches)) => cmd::murmur::run(ctx, sub_matches).await?,
|
||||||
Some(("new", sub_matches)) => cmd::new::run(ctx, sub_matches).await?,
|
Some(("new", sub_matches)) => cmd::new::run(ctx, sub_matches).await?,
|
||||||
Some(("package", sub_matches)) => cmd::package::run(ctx, sub_matches).await?,
|
Some(("build", sub_matches)) => cmd::build::run(ctx, sub_matches).await?,
|
||||||
Some(("watch", sub_matches)) => cmd::watch::run(ctx, sub_matches).await?,
|
Some(("watch", sub_matches)) => cmd::watch::run(ctx, sub_matches).await?,
|
||||||
|
Some(("dictionary", sub_matches)) => cmd::dictionary::run(ctx, sub_matches).await?,
|
||||||
_ => unreachable!(
|
_ => unreachable!(
|
||||||
"clap is configured to require a subcommand, and they're all handled above"
|
"clap is configured to require a subcommand, and they're all handled above"
|
||||||
),
|
),
|
||||||
|
|
|
@ -1,189 +0,0 @@
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
|
||||||
enum ParserState {
|
|
||||||
Start,
|
|
||||||
Word,
|
|
||||||
SingleQuote,
|
|
||||||
DoubleQuote,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct ShellParser<'a> {
|
|
||||||
bytes: &'a [u8],
|
|
||||||
offset: usize,
|
|
||||||
pub errored: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> ShellParser<'a> {
|
|
||||||
pub fn new(bytes: &'a [u8]) -> Self {
|
|
||||||
Self {
|
|
||||||
bytes,
|
|
||||||
offset: 0,
|
|
||||||
errored: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_word(&mut self) -> Option<&'a [u8]> {
|
|
||||||
// The start of the current word. Certain leading characters should be ignored,
|
|
||||||
// so this might change.
|
|
||||||
let mut start = self.offset;
|
|
||||||
let mut state = ParserState::Start;
|
|
||||||
|
|
||||||
while self.offset < self.bytes.len() {
|
|
||||||
let c = self.bytes[self.offset];
|
|
||||||
self.offset += 1;
|
|
||||||
|
|
||||||
match state {
|
|
||||||
ParserState::Start => match c {
|
|
||||||
// Ignore leading whitespace
|
|
||||||
b' ' | b'\t' | b'\n' => start += 1,
|
|
||||||
b'\'' => {
|
|
||||||
state = ParserState::SingleQuote;
|
|
||||||
start += 1;
|
|
||||||
}
|
|
||||||
b'"' => {
|
|
||||||
state = ParserState::DoubleQuote;
|
|
||||||
start += 1;
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
state = ParserState::Word;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
ParserState::Word => match c {
|
|
||||||
// Unquoted whitespace ends the current word
|
|
||||||
b' ' | b'\t' | b'\n' => {
|
|
||||||
return Some(&self.bytes[start..self.offset - 1]);
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
},
|
|
||||||
ParserState::SingleQuote => match c {
|
|
||||||
b'\'' => {
|
|
||||||
return Some(&self.bytes[start..(self.offset - 1)]);
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
},
|
|
||||||
ParserState::DoubleQuote => match c {
|
|
||||||
b'"' => {
|
|
||||||
return Some(&self.bytes[start..(self.offset - 1)]);
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match state {
|
|
||||||
ParserState::Start => None,
|
|
||||||
ParserState::Word => Some(&self.bytes[start..self.offset]),
|
|
||||||
ParserState::SingleQuote | ParserState::DoubleQuote => {
|
|
||||||
self.errored = true;
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Iterator for ShellParser<'a> {
|
|
||||||
type Item = &'a [u8];
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
self.parse_word()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_one_word() {
|
|
||||||
let mut it = ShellParser::new(b"hello");
|
|
||||||
assert_eq!(it.next(), Some("hello".as_bytes()));
|
|
||||||
assert_eq!(it.next(), None);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_one_single() {
|
|
||||||
let mut it = ShellParser::new(b"'hello'");
|
|
||||||
assert_eq!(it.next(), Some("hello".as_bytes()));
|
|
||||||
assert_eq!(it.next(), None);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_open_quote() {
|
|
||||||
let mut it = ShellParser::new(b"'hello");
|
|
||||||
assert_eq!(it.next(), None);
|
|
||||||
assert!(it.errored)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_ww2ogg() {
|
|
||||||
let mut it = ShellParser::new(
|
|
||||||
b"ww2ogg.exe --pcb \"/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin\"",
|
|
||||||
);
|
|
||||||
assert_eq!(it.next(), Some("ww2ogg.exe".as_bytes()));
|
|
||||||
assert_eq!(it.next(), Some("--pcb".as_bytes()));
|
|
||||||
assert_eq!(
|
|
||||||
it.next(),
|
|
||||||
Some("/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin".as_bytes())
|
|
||||||
);
|
|
||||||
assert_eq!(it.next(), None);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod bench {
|
|
||||||
extern crate test;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
#[cfg(feature = "shlex-bench")]
|
|
||||||
use shlex::bytes::Shlex;
|
|
||||||
use test::Bencher;
|
|
||||||
|
|
||||||
mod ww2ogg {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[bench]
|
|
||||||
fn custom(b: &mut Bencher) {
|
|
||||||
let val = test::black_box(
|
|
||||||
b"ww2ogg.exe --pcb \"/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin\"",
|
|
||||||
);
|
|
||||||
b.iter(|| {
|
|
||||||
let it = ShellParser::new(val);
|
|
||||||
let _: Vec<_> = test::black_box(it.collect());
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "shlex-bench")]
|
|
||||||
#[bench]
|
|
||||||
fn shlex(b: &mut Bencher) {
|
|
||||||
let val = test::black_box(
|
|
||||||
b"ww2ogg.exe --pcb \"/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin\"",
|
|
||||||
);
|
|
||||||
b.iter(|| {
|
|
||||||
let it = Shlex::new(val);
|
|
||||||
let _: Vec<_> = test::black_box(it.collect());
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod one_single {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[bench]
|
|
||||||
fn custom(b: &mut Bencher) {
|
|
||||||
let val = test::black_box(b"'hello'");
|
|
||||||
b.iter(|| {
|
|
||||||
let it = ShellParser::new(val);
|
|
||||||
let _: Vec<_> = test::black_box(it.collect());
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "shlex-bench")]
|
|
||||||
#[bench]
|
|
||||||
fn shlex(b: &mut Bencher) {
|
|
||||||
let val = test::black_box(b"'hello'");
|
|
||||||
b.iter(|| {
|
|
||||||
let it = Shlex::new(val);
|
|
||||||
let _: Vec<_> = test::black_box(it.collect());
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
BIN
docs/screenshots/dtmm.png
(Stored with Git LFS)
BIN
docs/screenshots/dtmm.png
(Stored with Git LFS)
Binary file not shown.
|
@ -1 +0,0 @@
|
||||||
Subproject commit 27beb4bc1ffd2865a432e13f0588b5351ff419bf
|
|
|
@ -1 +0,0 @@
|
||||||
Subproject commit 228b8ca37ee79ab9afa45c40da415e4dcb029751
|
|
|
@ -1,16 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "dtmt-shared"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
ansi_term = { workspace = true }
|
|
||||||
color-eyre = { workspace = true }
|
|
||||||
serde = { workspace = true }
|
|
||||||
steamlocate = { workspace = true }
|
|
||||||
time = { workspace = true }
|
|
||||||
tracing = { workspace = true }
|
|
||||||
tracing-error = { workspace = true }
|
|
||||||
tracing-subscriber = { workspace = true }
|
|
|
@ -1,13 +0,0 @@
|
||||||
= dtmt-shared
|
|
||||||
:idprefix:
|
|
||||||
:idseparator:
|
|
||||||
:toc: macro
|
|
||||||
:toclevels: 1
|
|
||||||
:!toc-title:
|
|
||||||
:caution-caption: :fire:
|
|
||||||
:important-caption: :exclamtion:
|
|
||||||
:note-caption: :paperclip:
|
|
||||||
:tip-caption: :bulb:
|
|
||||||
:warning-caption: :warning:
|
|
||||||
|
|
||||||
A set of types and functions shared between multiple crates within _Darktide Mod Tools_ that don't fit into the engine SDK.
|
|
|
@ -1,102 +0,0 @@
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use color_eyre::eyre::{OptionExt as _, WrapErr as _};
|
|
||||||
use color_eyre::Result;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use steamlocate::SteamDir;
|
|
||||||
use time::OffsetDateTime;
|
|
||||||
|
|
||||||
pub use log::*;
|
|
||||||
|
|
||||||
mod log;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
|
|
||||||
pub struct ModConfigResources {
|
|
||||||
pub init: PathBuf,
|
|
||||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
|
||||||
pub data: Option<PathBuf>,
|
|
||||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
|
||||||
pub localization: Option<PathBuf>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub enum ModOrder {
|
|
||||||
Before,
|
|
||||||
After,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
|
|
||||||
#[serde(untagged)]
|
|
||||||
pub enum ModDependency {
|
|
||||||
ID(String),
|
|
||||||
Config { id: String, order: ModOrder },
|
|
||||||
}
|
|
||||||
|
|
||||||
// A bit dumb, but serde doesn't support literal values with the
|
|
||||||
// `default` attribute, only paths.
|
|
||||||
fn default_true() -> bool {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Similarly dumb, as the `skip_serializing_if` attribute needs a function
|
|
||||||
fn is_true(val: &bool) -> bool {
|
|
||||||
*val
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
|
|
||||||
pub struct ModConfig {
|
|
||||||
#[serde(skip)]
|
|
||||||
pub dir: PathBuf,
|
|
||||||
pub id: String,
|
|
||||||
pub name: String,
|
|
||||||
pub summary: String,
|
|
||||||
pub version: String,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub description: Option<String>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub author: Option<String>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub image: Option<PathBuf>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub categories: Vec<String>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub packages: Vec<PathBuf>,
|
|
||||||
pub resources: ModConfigResources,
|
|
||||||
#[serde(default)]
|
|
||||||
pub depends: Vec<ModDependency>,
|
|
||||||
#[serde(default = "default_true", skip_serializing_if = "is_true")]
|
|
||||||
pub bundled: bool,
|
|
||||||
#[serde(default)]
|
|
||||||
pub name_overrides: HashMap<String, String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const STEAMAPP_ID: u32 = 1361210;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct GameInfo {
|
|
||||||
pub path: PathBuf,
|
|
||||||
pub last_updated: OffsetDateTime,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn collect_game_info() -> Result<Option<GameInfo>> {
|
|
||||||
let dir = SteamDir::locate().wrap_err("Failed to locate Steam installation")?;
|
|
||||||
|
|
||||||
let found = dir
|
|
||||||
.find_app(STEAMAPP_ID)
|
|
||||||
.wrap_err("Failed to look up game by Steam app ID")?;
|
|
||||||
|
|
||||||
let Some((app, _)) = found else {
|
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
|
|
||||||
let last_updated = app
|
|
||||||
.last_updated
|
|
||||||
.ok_or_eyre("Missing field 'last_updated'")?;
|
|
||||||
|
|
||||||
Ok(Some(GameInfo {
|
|
||||||
path: app.install_dir.into(),
|
|
||||||
last_updated: last_updated.into(),
|
|
||||||
}))
|
|
||||||
}
|
|
|
@ -1,110 +0,0 @@
|
||||||
use std::fmt::Result;
|
|
||||||
|
|
||||||
use ansi_term::Color;
|
|
||||||
use time::format_description::FormatItem;
|
|
||||||
use time::macros::format_description;
|
|
||||||
use time::OffsetDateTime;
|
|
||||||
use tracing::field::Field;
|
|
||||||
use tracing::{Event, Level, Metadata, Subscriber};
|
|
||||||
use tracing_error::ErrorLayer;
|
|
||||||
use tracing_subscriber::filter::FilterFn;
|
|
||||||
use tracing_subscriber::fmt::format::{debug_fn, Writer};
|
|
||||||
use tracing_subscriber::fmt::{self, FmtContext, FormatEvent, FormatFields};
|
|
||||||
use tracing_subscriber::layer::SubscriberExt;
|
|
||||||
use tracing_subscriber::prelude::*;
|
|
||||||
use tracing_subscriber::registry::LookupSpan;
|
|
||||||
use tracing_subscriber::EnvFilter;
|
|
||||||
|
|
||||||
pub const TIME_FORMAT: &[FormatItem] = format_description!("[hour]:[minute]:[second]");
|
|
||||||
|
|
||||||
pub fn format_fields(w: &mut Writer<'_>, field: &Field, val: &dyn std::fmt::Debug) -> Result {
|
|
||||||
if field.name() == "message" {
|
|
||||||
write!(w, "{:?}", val)
|
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn filter_fields(metadata: &Metadata<'_>) -> bool {
|
|
||||||
metadata
|
|
||||||
.fields()
|
|
||||||
.iter()
|
|
||||||
.any(|field| field.name() == "message")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct Formatter;
|
|
||||||
|
|
||||||
impl<S, N> FormatEvent<S, N> for Formatter
|
|
||||||
where
|
|
||||||
S: Subscriber + for<'a> LookupSpan<'a>,
|
|
||||||
N: for<'a> FormatFields<'a> + 'static,
|
|
||||||
{
|
|
||||||
fn format_event(
|
|
||||||
&self,
|
|
||||||
ctx: &FmtContext<'_, S, N>,
|
|
||||||
mut writer: Writer<'_>,
|
|
||||||
event: &Event<'_>,
|
|
||||||
) -> Result {
|
|
||||||
let meta = event.metadata();
|
|
||||||
|
|
||||||
let time = OffsetDateTime::now_local().unwrap_or_else(|_| OffsetDateTime::now_utc());
|
|
||||||
let time = time.format(TIME_FORMAT).map_err(|_| std::fmt::Error)?;
|
|
||||||
|
|
||||||
let level = meta.level();
|
|
||||||
// Sadly, tracing's `Level` is a struct, not an enum, so we can't properly `match` it.
|
|
||||||
let color = if *level == Level::TRACE {
|
|
||||||
Color::Purple
|
|
||||||
} else if *level == Level::DEBUG {
|
|
||||||
Color::Blue
|
|
||||||
} else if *level == Level::INFO {
|
|
||||||
Color::Green
|
|
||||||
} else if *level == Level::WARN {
|
|
||||||
Color::Yellow
|
|
||||||
} else if *level == Level::ERROR {
|
|
||||||
Color::Red
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
|
||||||
};
|
|
||||||
|
|
||||||
write!(
|
|
||||||
writer,
|
|
||||||
"[{}] [{:>5}] ",
|
|
||||||
time,
|
|
||||||
color.bold().paint(format!("{}", level))
|
|
||||||
)?;
|
|
||||||
|
|
||||||
ctx.field_format().format_fields(writer.by_ref(), event)?;
|
|
||||||
|
|
||||||
writeln!(writer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn create_tracing_subscriber() {
|
|
||||||
let env_layer =
|
|
||||||
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::try_new("info").unwrap());
|
|
||||||
|
|
||||||
let (dev_stdout_layer, prod_stdout_layer, filter_layer) = if cfg!(debug_assertions) {
|
|
||||||
let fmt_layer = fmt::layer().pretty().with_writer(std::io::stderr);
|
|
||||||
(Some(fmt_layer), None, None)
|
|
||||||
} else {
|
|
||||||
// Creates a layer that
|
|
||||||
// - only prints events that contain a message
|
|
||||||
// - does not print fields
|
|
||||||
// - does not print spans/targets
|
|
||||||
// - only prints time, not date
|
|
||||||
let fmt_layer = fmt::layer()
|
|
||||||
.with_writer(std::io::stderr)
|
|
||||||
.event_format(Formatter)
|
|
||||||
.fmt_fields(debug_fn(format_fields));
|
|
||||||
|
|
||||||
(None, Some(fmt_layer), Some(FilterFn::new(filter_fields)))
|
|
||||||
};
|
|
||||||
|
|
||||||
tracing_subscriber::registry()
|
|
||||||
.with(filter_layer)
|
|
||||||
.with(env_layer)
|
|
||||||
.with(dev_stdout_layer)
|
|
||||||
.with(prod_stdout_layer)
|
|
||||||
.with(ErrorLayer::new(fmt::format::Pretty::default()))
|
|
||||||
.init();
|
|
||||||
}
|
|
|
@ -1 +0,0 @@
|
||||||
Subproject commit 6d94a4dd2c296bf1f044ee4c70fb10dca4c1c241
|
|
|
@ -1,21 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "nexusmods"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
futures = "0.3.26"
|
|
||||||
lazy_static = "1.4.0"
|
|
||||||
regex = "1.7.1"
|
|
||||||
reqwest = { version = "0.12.4" }
|
|
||||||
serde = { version = "1.0.152", features = ["derive"] }
|
|
||||||
serde_json = "1.0.94"
|
|
||||||
thiserror = "2.0.0"
|
|
||||||
time = { version = "0.3.20", features = ["serde"] }
|
|
||||||
tracing = "0.1.37"
|
|
||||||
url = { version = "2.3.1", features = ["serde"] }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
tokio = { version = "1.26.0", features = ["rt", "macros"] }
|
|
|
@ -1,339 +0,0 @@
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::convert::Infallible;
|
|
||||||
|
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use regex::Regex;
|
|
||||||
use reqwest::header::{HeaderMap, HeaderValue, InvalidHeaderValue};
|
|
||||||
use reqwest::{Client, IntoUrl, RequestBuilder, Url};
|
|
||||||
use serde::Deserialize;
|
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
mod types;
|
|
||||||
use time::OffsetDateTime;
|
|
||||||
pub use types::*;
|
|
||||||
|
|
||||||
// TODO: Add OS information
|
|
||||||
const USER_AGENT: &str = concat!("DTMM/", env!("CARGO_PKG_VERSION"));
|
|
||||||
const GAME_ID: &str = "warhammer40kdarktide";
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref BASE_URL: Url = Url::parse("https://api.nexusmods.com/v1/").unwrap();
|
|
||||||
static ref BASE_URL_GAME: Url =
|
|
||||||
Url::parse("https://api.nexusmods.com/v1/games/warhammer40kdarktide/").unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
|
||||||
pub enum Error {
|
|
||||||
#[error("HTTP error: {0:?}")]
|
|
||||||
HTTP(#[from] reqwest::Error),
|
|
||||||
#[error("invalid URL: {0:?}")]
|
|
||||||
URLParseError(#[from] url::ParseError),
|
|
||||||
#[error("failed to deserialize due to {error}: {json}")]
|
|
||||||
Deserialize {
|
|
||||||
json: String,
|
|
||||||
error: serde_json::Error,
|
|
||||||
},
|
|
||||||
#[error(transparent)]
|
|
||||||
InvalidHeaderValue(#[from] InvalidHeaderValue),
|
|
||||||
#[error("this error cannot happen")]
|
|
||||||
Infallible(#[from] Infallible),
|
|
||||||
#[error("invalid NXM URL '{url}': {0}", url = .1.as_str())]
|
|
||||||
InvalidNXM(&'static str, Url),
|
|
||||||
#[error("{0}")]
|
|
||||||
Custom(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type Result<T> = std::result::Result<T, Error>;
|
|
||||||
|
|
||||||
pub struct Nxm {
|
|
||||||
pub mod_id: u64,
|
|
||||||
pub file_id: u64,
|
|
||||||
pub user_id: u64,
|
|
||||||
pub key: String,
|
|
||||||
pub expires: OffsetDateTime,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct Api {
|
|
||||||
client: Client,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Api {
|
|
||||||
pub fn new(key: String) -> Result<Self> {
|
|
||||||
let mut headers = HeaderMap::new();
|
|
||||||
headers.insert("accept", HeaderValue::from_static("application/json"));
|
|
||||||
headers.insert("apikey", HeaderValue::from_str(&key)?);
|
|
||||||
|
|
||||||
let client = Client::builder()
|
|
||||||
.user_agent(USER_AGENT)
|
|
||||||
.default_headers(headers)
|
|
||||||
.build()?;
|
|
||||||
|
|
||||||
Ok(Self { client })
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(self))]
|
|
||||||
async fn send<T>(&self, req: RequestBuilder) -> Result<T>
|
|
||||||
where
|
|
||||||
T: for<'a> Deserialize<'a>,
|
|
||||||
{
|
|
||||||
let res = req.send().await?.error_for_status()?;
|
|
||||||
tracing::trace!(?res);
|
|
||||||
|
|
||||||
let json = res.text().await?;
|
|
||||||
serde_json::from_str(&json).map_err(|error| Error::Deserialize { json, error })
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(self))]
|
|
||||||
pub async fn user_validate(&self) -> Result<User> {
|
|
||||||
let url = BASE_URL.join("users/validate.json")?;
|
|
||||||
let req = self.client.get(url);
|
|
||||||
self.send(req).await
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(self))]
|
|
||||||
pub async fn mods_updated(&self, period: UpdatePeriod) -> Result<Vec<UpdateInfo>> {
|
|
||||||
let url = BASE_URL_GAME.join("mods/updated.json")?;
|
|
||||||
let req = self.client.get(url).query(&[period]);
|
|
||||||
self.send(req).await
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(self))]
|
|
||||||
pub async fn mods_id(&self, id: u64) -> Result<Mod> {
|
|
||||||
let url = BASE_URL_GAME.join(&format!("mods/{}.json", id))?;
|
|
||||||
let req = self.client.get(url);
|
|
||||||
self.send(req).await
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(self))]
|
|
||||||
pub async fn file_version<T>(&self, id: u64, timestamp: T) -> Result<String>
|
|
||||||
where
|
|
||||||
T: std::fmt::Debug,
|
|
||||||
OffsetDateTime: PartialEq<T>,
|
|
||||||
{
|
|
||||||
let url = BASE_URL_GAME.join(&format!("mods/{id}/files.json"))?;
|
|
||||||
let req = self.client.get(url);
|
|
||||||
let files: FileList = self.send(req).await?;
|
|
||||||
|
|
||||||
let Some(file) = files
|
|
||||||
.files
|
|
||||||
.into_iter()
|
|
||||||
.find(|file| file.uploaded_timestamp == timestamp)
|
|
||||||
else {
|
|
||||||
let err = Error::Custom("Timestamp does not match any file".into());
|
|
||||||
return Err(err);
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(file.version)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(self))]
|
|
||||||
pub async fn picture(&self, url: impl IntoUrl + std::fmt::Debug) -> Result<Vec<u8>> {
|
|
||||||
let res = self.client.get(url).send().await?.error_for_status()?;
|
|
||||||
|
|
||||||
res.bytes()
|
|
||||||
.await
|
|
||||||
.map(|bytes| bytes.to_vec())
|
|
||||||
.map_err(From::from)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(self))]
|
|
||||||
pub async fn get_file_by_id(&self, mod_id: u64, file_id: u64) -> Result<File> {
|
|
||||||
let url = BASE_URL_GAME.join(&format!("mods/{mod_id}/files/{file_id}.json"))?;
|
|
||||||
let req = self.client.get(url);
|
|
||||||
self.send(req).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parse_file_name<S: AsRef<str>>(
|
|
||||||
name: S,
|
|
||||||
) -> Option<(String, u64, String, OffsetDateTime)> {
|
|
||||||
lazy_static! {
|
|
||||||
static ref RE: Regex = Regex::new(r#"^(?P<name>.+?)-(?P<mod_id>[1-9]\d*)-(?P<version>.+?)-(?P<updated>[1-9]\d*)(?:\.\w+)?$"#).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
RE.captures(name.as_ref()).and_then(|cap| {
|
|
||||||
let name = cap.name("name").map(|s| s.as_str().to_string())?;
|
|
||||||
let mod_id = cap.name("mod_id").and_then(|s| s.as_str().parse().ok())?;
|
|
||||||
let version = cap.name("version").map(|s| s.as_str().replace('-', "."))?;
|
|
||||||
let updated = cap
|
|
||||||
.name("updated")
|
|
||||||
.and_then(|s| s.as_str().parse().ok())
|
|
||||||
.and_then(|s| OffsetDateTime::from_unix_timestamp(s).ok())?;
|
|
||||||
|
|
||||||
Some((name, mod_id, version, updated))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(self))]
|
|
||||||
pub async fn mods_download_link(
|
|
||||||
&self,
|
|
||||||
mod_id: u64,
|
|
||||||
file_id: u64,
|
|
||||||
key: String,
|
|
||||||
expires: OffsetDateTime,
|
|
||||||
) -> Result<Vec<DownloadLink>> {
|
|
||||||
let url =
|
|
||||||
BASE_URL_GAME.join(&format!("mods/{mod_id}/files/{file_id}/download_link.json"))?;
|
|
||||||
let req = self
|
|
||||||
.client
|
|
||||||
.get(url)
|
|
||||||
.query(&[("key", key)])
|
|
||||||
.query(&[("expires", expires.unix_timestamp())]);
|
|
||||||
self.send(req).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn handle_nxm(&self, url: Url) -> Result<(Mod, File, Vec<u8>)> {
|
|
||||||
let nxm = Self::parse_nxm(url.clone())?;
|
|
||||||
|
|
||||||
let user = self.user_validate().await?;
|
|
||||||
|
|
||||||
if nxm.user_id != user.user_id {
|
|
||||||
return Err(Error::InvalidNXM("user_id mismtach", url));
|
|
||||||
}
|
|
||||||
|
|
||||||
let (mod_data, file_info, download_info) = futures::try_join!(
|
|
||||||
self.mods_id(nxm.mod_id),
|
|
||||||
self.get_file_by_id(nxm.mod_id, nxm.file_id),
|
|
||||||
self.mods_download_link(nxm.mod_id, nxm.file_id, nxm.key, nxm.expires)
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let Some(download_url) = download_info.first().map(|i| i.uri.clone()) else {
|
|
||||||
return Err(Error::InvalidNXM("no download link", url));
|
|
||||||
};
|
|
||||||
|
|
||||||
let req = self.client.get(download_url);
|
|
||||||
let data = req.send().await?.bytes().await?;
|
|
||||||
|
|
||||||
Ok((mod_data, file_info, data.to_vec()))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parse_nxm(nxm: Url) -> Result<Nxm> {
|
|
||||||
if nxm.scheme() != "nxm" {
|
|
||||||
return Err(Error::InvalidNXM("Invalid scheme", nxm));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Now it makes sense, why Nexus calls this field `game_domain_name`, when it's just
|
|
||||||
// another path segment in the regular API calls.
|
|
||||||
if nxm.host_str() != Some(GAME_ID) {
|
|
||||||
return Err(Error::InvalidNXM("Invalid game domain name", nxm));
|
|
||||||
}
|
|
||||||
|
|
||||||
let Some(mut segments) = nxm.path_segments() else {
|
|
||||||
return Err(Error::InvalidNXM("Missing path segments", nxm));
|
|
||||||
};
|
|
||||||
|
|
||||||
if segments.next() != Some("mods") {
|
|
||||||
return Err(Error::InvalidNXM(
|
|
||||||
"Unexpected path segment, expected 'mods'",
|
|
||||||
nxm,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let Some(mod_id) = segments.next().and_then(|id| id.parse().ok()) else {
|
|
||||||
return Err(Error::InvalidNXM("Invalid mod ID", nxm));
|
|
||||||
};
|
|
||||||
|
|
||||||
if segments.next() != Some("files") {
|
|
||||||
return Err(Error::InvalidNXM(
|
|
||||||
"Unexpected path segment, expected 'files'",
|
|
||||||
nxm,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let Some(file_id) = segments.next().and_then(|id| id.parse().ok()) else {
|
|
||||||
return Err(Error::InvalidNXM("Invalid file ID", nxm));
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut query = HashMap::new();
|
|
||||||
let pairs = nxm.query_pairs();
|
|
||||||
|
|
||||||
for (key, val) in pairs {
|
|
||||||
query.insert(key, val);
|
|
||||||
}
|
|
||||||
|
|
||||||
let Some(key) = query.get("key") else {
|
|
||||||
return Err(Error::InvalidNXM("Missing query field 'key'", nxm));
|
|
||||||
};
|
|
||||||
|
|
||||||
let expires = query
|
|
||||||
.get("expires")
|
|
||||||
.and_then(|expires| expires.parse().ok())
|
|
||||||
.and_then(|expires| OffsetDateTime::from_unix_timestamp(expires).ok());
|
|
||||||
let Some(expires) = expires else {
|
|
||||||
return Err(Error::InvalidNXM("Missing query field 'expires'", nxm));
|
|
||||||
};
|
|
||||||
|
|
||||||
let user_id = query.get("user_id").and_then(|id| id.parse().ok());
|
|
||||||
let Some(user_id) = user_id else {
|
|
||||||
return Err(Error::InvalidNXM("Missing query field 'user_id'", nxm));
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Nxm {
|
|
||||||
mod_id,
|
|
||||||
file_id,
|
|
||||||
key: key.to_string(),
|
|
||||||
expires,
|
|
||||||
user_id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use reqwest::Url;
|
|
||||||
use time::OffsetDateTime;
|
|
||||||
|
|
||||||
use crate::Api;
|
|
||||||
|
|
||||||
fn make_api() -> Api {
|
|
||||||
let key = std::env::var("NEXUSMODS_API_KEY").expect("'NEXUSMODS_API_KEY' env var missing");
|
|
||||||
Api::new(key).expect("failed to build API client")
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn mods_updated() {
|
|
||||||
let client = make_api();
|
|
||||||
client
|
|
||||||
.mods_updated(Default::default())
|
|
||||||
.await
|
|
||||||
.expect("failed to query 'mods_updated'");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn user_validate() {
|
|
||||||
let client = make_api();
|
|
||||||
client
|
|
||||||
.user_validate()
|
|
||||||
.await
|
|
||||||
.expect("failed to query 'user_validate'");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn mods_id() {
|
|
||||||
let client = make_api();
|
|
||||||
let dmf_id = 8;
|
|
||||||
client
|
|
||||||
.mods_id(dmf_id)
|
|
||||||
.await
|
|
||||||
.expect("failed to query 'mods_id'");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parse_file_name() {
|
|
||||||
let file = "Darktide Mod Framework-8-23-3-04-1677966575.zip";
|
|
||||||
let (name, mod_id, version, updated) = Api::parse_file_name(file).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(name, String::from("Darktide Mod Framework"));
|
|
||||||
assert_eq!(mod_id, 8);
|
|
||||||
assert_eq!(version, String::from("23-3-04"));
|
|
||||||
assert_eq!(
|
|
||||||
updated,
|
|
||||||
OffsetDateTime::from_unix_timestamp(1677966575).unwrap()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parse_nxm() {
|
|
||||||
let nxm = Url::parse("nxm://warhammer40kdarktide/mods/8/files/1000172397?key=VZ86Guj_LosPvtkD90-ZQg&expires=1678359882&user_id=1234567").expect("invalid NXM example");
|
|
||||||
Api::parse_nxm(nxm).expect("failed to parse nxm link");
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,140 +0,0 @@
|
||||||
use reqwest::Url;
|
|
||||||
use serde::ser::SerializeTuple;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use time::OffsetDateTime;
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
pub struct User {
|
|
||||||
pub user_id: u64,
|
|
||||||
pub name: String,
|
|
||||||
pub profile_url: Url,
|
|
||||||
// pub is_premium: bool,
|
|
||||||
// pub is_supporter: bool,
|
|
||||||
// pub email: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Deserialize)]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub enum ModStatus {
|
|
||||||
Published,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Deserialize)]
|
|
||||||
pub enum EndorseStatus {
|
|
||||||
Endorsed,
|
|
||||||
Undecided,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
pub struct ModEndorsement {
|
|
||||||
pub endorse_status: EndorseStatus,
|
|
||||||
#[serde(with = "time::serde::timestamp::option")]
|
|
||||||
pub timestamp: Option<OffsetDateTime>,
|
|
||||||
pub version: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
pub struct Mod {
|
|
||||||
pub name: String,
|
|
||||||
pub description: String,
|
|
||||||
pub summary: String,
|
|
||||||
pub picture_url: Url,
|
|
||||||
pub uid: u64,
|
|
||||||
pub mod_id: u64,
|
|
||||||
pub category_id: u64,
|
|
||||||
pub version: String,
|
|
||||||
#[serde(with = "time::serde::timestamp")]
|
|
||||||
pub created_timestamp: OffsetDateTime,
|
|
||||||
// created_time: OffsetDateTime,
|
|
||||||
#[serde(with = "time::serde::timestamp")]
|
|
||||||
pub updated_timestamp: OffsetDateTime,
|
|
||||||
// updated_time: OffsetDateTime,
|
|
||||||
pub author: String,
|
|
||||||
pub uploaded_by: String,
|
|
||||||
pub uploaded_users_profile_url: Url,
|
|
||||||
pub status: ModStatus,
|
|
||||||
pub available: bool,
|
|
||||||
pub endorsement: ModEndorsement,
|
|
||||||
// pub mod_downloads: u64,
|
|
||||||
// pub mod_unique_downloads: u64,
|
|
||||||
// pub game_id: u64,
|
|
||||||
// pub allow_rating: bool,
|
|
||||||
// pub domain_name: String,
|
|
||||||
// pub endorsement_count: u64,
|
|
||||||
// pub contains_adult_content: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
pub struct File {
|
|
||||||
pub id: Vec<u64>,
|
|
||||||
pub uid: u64,
|
|
||||||
pub file_id: u64,
|
|
||||||
pub name: String,
|
|
||||||
pub version: String,
|
|
||||||
pub category_id: u64,
|
|
||||||
pub category_name: String,
|
|
||||||
pub is_primary: bool,
|
|
||||||
pub size: u64,
|
|
||||||
pub file_name: String,
|
|
||||||
#[serde(with = "time::serde::timestamp")]
|
|
||||||
pub uploaded_timestamp: OffsetDateTime,
|
|
||||||
pub mod_version: String,
|
|
||||||
pub external_virus_scan_url: String,
|
|
||||||
pub description: String,
|
|
||||||
pub size_kb: u64,
|
|
||||||
pub size_in_bytes: u64,
|
|
||||||
pub changelog_html: Option<String>,
|
|
||||||
pub content_preview_link: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
pub struct FileList {
|
|
||||||
pub files: Vec<File>,
|
|
||||||
// pub file_updates: Vec<serde_json::Value>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
pub struct DownloadLink {
|
|
||||||
pub name: String,
|
|
||||||
pub short_name: String,
|
|
||||||
#[serde(alias = "URI")]
|
|
||||||
pub uri: Url,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
pub struct UpdateInfo {
|
|
||||||
pub mod_id: u64,
|
|
||||||
#[serde(with = "time::serde::timestamp")]
|
|
||||||
pub latest_file_update: OffsetDateTime,
|
|
||||||
#[serde(with = "time::serde::timestamp")]
|
|
||||||
pub latest_mod_activity: OffsetDateTime,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug)]
|
|
||||||
pub enum UpdatePeriod {
|
|
||||||
Day,
|
|
||||||
Week,
|
|
||||||
Month,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for UpdatePeriod {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::Week
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Serialize for UpdatePeriod {
|
|
||||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
|
||||||
where
|
|
||||||
S: serde::Serializer,
|
|
||||||
{
|
|
||||||
let mut tup = serializer.serialize_tuple(2)?;
|
|
||||||
tup.serialize_element("period")?;
|
|
||||||
tup.serialize_element(match self {
|
|
||||||
Self::Day => "1d",
|
|
||||||
Self::Week => "1w",
|
|
||||||
Self::Month => "1m",
|
|
||||||
})?;
|
|
||||||
tup.end()
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,13 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "oodle"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
color-eyre = { workspace = true }
|
|
||||||
tracing = { workspace = true }
|
|
||||||
|
|
||||||
[build-dependencies]
|
|
||||||
bindgen = "0.71.0"
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue