Compare commits

..

84 commits

Author SHA1 Message Date
8cb2c6b2cd
Merge pull request 'Fix using branch for version number' (#209) from issue/205 into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #209
2025-03-12 12:25:49 +00:00
6ba13ac1ec
Fix using branch for version number 2025-03-12 13:24:03 +01:00
4d0762c0ba
Merge pull request 'Fix branch name in package version' (#207) from issue/205 into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #207
2025-03-12 12:15:30 +00:00
752291fe2d
Merge pull request 'Explicitly define base branches' (#208) from feat/renovate into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #208
2025-03-12 12:15:09 +00:00
71f945a96c
Explicitly define base branches
Currently, the dependency dashboard lists a bunch of pending updates
under a section called "Other branches". I'm not sure, but this sounds
like one of the configs I extend from enables base branches other than
only the default. To test, and make it explicit, set only the branches
I really want checked.

I'm adding the `release/.*` regex for now, even though I don't have
any release process yet.
2025-03-12 13:11:01 +01:00
d15f533e19
Fix branch name in package version
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
2025-03-12 11:52:53 +01:00
1a3c564ecf
Merge pull request 'Improve CI artifact version names' (#206) from issue/205 into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #206
2025-03-12 10:46:26 +00:00
beba47f340
Push a packaged with a fixed version for master
All checks were successful
build/msvc Build for the target platform: msvc
lint/clippy Checking for common mistakes and opportunities for code improvement
build/linux Build for the target platform: linux
To provide something that can easily be linked to, also push packages
built from `master` to a version that doesn't contain the SHA.
2025-03-12 11:33:48 +01:00
5612e271fb
Improve version name for CI artifacts built off master
The name from `git describe --tags` is rather confusing to people that
aren't familiar with it. Especially in the current situation, where
there are no proper versioned releases.

A name like `master-123456` should be much clearer.

Closes #205.
2025-03-12 11:26:24 +01:00
69300e87e6
Merge pull request 'fix(deps): update rust crate thiserror to v2' (#200) from renovate/thiserror-2.x into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #200
2025-02-19 10:08:51 +00:00
a3583b4485
fix(deps): update rust crate thiserror to v2
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
2025-02-19 11:02:02 +01:00
5982a66033
Merge pull request 'chore(deps): update rust crate notify to v8' (#202) from renovate/notify-8.x into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #202
2025-02-19 09:53:28 +00:00
adf9610ecc
chore(deps): update rust crate notify to v8
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
2025-01-10 14:48:31 +00:00
91cd54fff7
Merge pull request 'chore(deps): update rust crate bindgen to 0.71.0' (#201) from renovate/bindgen-0.x into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #201
2024-12-10 10:05:20 +00:00
b219e20f3a
chore(deps): update rust crate bindgen to 0.71.0
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
2024-12-06 20:32:37 +00:00
f9ccdf746e
Merge pull request 'chore(deps): update rust crate notify to v7' (#199) from renovate/notify-7.x into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #199
2024-10-28 09:24:47 +00:00
72ce06b0e5
chore(deps): update rust crate notify to v7
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
2024-10-25 17:32:26 +00:00
fc151f1449
Merge pull request 'fix(deps): update rust crate serde to v1.0.209' (#194) from renovate/serde-monorepo into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #194
2024-08-27 07:07:26 +00:00
659b63bfe9
fix(deps): update rust crate serde to v1.0.209
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
2024-08-27 06:30:39 +00:00
9f90b45275
Merge pull request 'chore(deps): update rust crate minijinja to v2.2.0' (#195) from renovate/minijinja-2.x-lockfile into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #195
2024-08-27 06:20:36 +00:00
67c64bb357
chore(deps): update rust crate minijinja to v2.2.0
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
2024-08-26 20:45:40 +00:00
6017ec058b
Merge pull request 'chore(deps): update rust crate fastrand to v2.1.1' (#193) from renovate/fastrand-2.x-lockfile into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: https://git.sclu1034.dev///bitsquid_dt/dtmt/pulls/193
2024-08-24 14:09:14 +00:00
ffd4927d27
chore(deps): update rust crate fastrand to v2.1.1
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/linux Build for the target platform: linux
build/msvc Build for the target platform: msvc
2024-08-24 10:02:43 +00:00
49a9eb4312
Merge pull request 'fix(deps): update rust crate serde_json to v1.0.127' (#192) from renovate/serde_json-1.x-lockfile into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: https://git.sclu1034.dev///bitsquid_dt/dtmt/pulls/192
2024-08-24 09:48:42 +00:00
4d665200fa
fix(deps): update rust crate serde_json to v1.0.127
All checks were successful
build/msvc Build for the target platform: msvc
lint/clippy Checking for common mistakes and opportunities for code improvement
build/linux Build for the target platform: linux
2024-08-23 21:30:32 +00:00
b3463ffb46
Merge pull request 'chore: Configure Renovate' (#187) from renovate/configure into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: https://git.sclu1034.dev///bitsquid_dt/dtmt/pulls/187
2024-08-21 13:31:35 +00:00
7cb44532b2
Add .renovaterc
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
2024-08-21 15:28:24 +02:00
15aa9bcf5e
Merge pull request 'Update dependencies' (#188) from feat/dependencies into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: https://git.sclu1034.dev///bitsquid_dt/dtmt/pulls/188
2024-08-21 13:28:11 +00:00
a2bbab1398
Update dependencies
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
2024-08-21 14:33:39 +02:00
88becb72a9
Merge pull request 'Consilidate template libraries' (#186) from issue/124 into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: https://git.sclu1034.dev///bitsquid_dt/dtmt/pulls/186
2024-08-21 09:49:18 +00:00
df2992a476
Improve mod template comments
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/linux Build for the target platform: linux
build/msvc Build for the target platform: msvc
2024-08-20 16:28:56 +02:00
e336240094
Consilidate template libraries
Remove last uses of `string_template` in favor of `minijinja`.

Closes #124.
2024-08-20 16:28:08 +02:00
d7fa80f471
Merge pull request 'Add tests for hash inversion' (#185) from feat/murmur-tests into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: https://git.sclu1034.dev///bitsquid_dt/dtmt/pulls/185
2024-08-14 12:07:31 +00:00
831592edf6
Merge pull request 'Implement bundle database resource hashes' (#184) from feat/bundle-database into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #184
2024-08-14 13:58:38 +02:00
2a1d8d815f
Add tests for hash inversion
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/linux Build for the target platform: linux
build/msvc Build for the target platform: msvc
Just a quick round trip test, and an additional assert to demonstrate
that byte order does matter.
2024-08-14 09:22:24 +02:00
d931e6b9ca
dtmt: Add command to search for files
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/linux Build for the target platform: linux
build/msvc Build for the target platform: msvc
Not really of much use at the moment, but inspired by the HD2 community.
2024-07-28 22:04:14 +02:00
7fa08c2efd
dtmt: Implement listing bundle database contents 2024-07-28 22:03:43 +02:00
dbf060032b
sdk: Implement bundle database resource hashes
Algorithm reverse engineered by WhiteGoat.
2024-07-28 14:46:10 +02:00
4b39d290b6 Merge pull request 'Add IdString32' (#183) from feat/various into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #183
2024-07-19 11:41:43 +02:00
3a6e954f9a
sdk: Refactor murmur modules and add IdString32
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/linux Build for the target platform: linux
build/msvc Build for the target platform: msvc
2024-07-19 11:30:09 +02:00
5a880b2953 Merge pull request 'Various minor changes extracted from unfinished projects' (#182) from feat/various into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #182
2024-07-19 11:13:47 +02:00
f1f9a818cc
sdk: Allow any byte stream for hashing dictionary entries
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/linux Build for the target platform: linux
build/msvc Build for the target platform: msvc
2024-07-19 09:48:25 +02:00
c997489e18
Add some doc comments 2024-07-19 09:48:23 +02:00
08219f05ba
sdk: Fix reading strings
Fatshark has a few weird string fields, where they provide a length
field, but then sometimes write a shorter, NUL-terminated string into
that same field and adding padding up to the "advertised" length.
To properly read those strings, we can't rely on just the length field
anymore, but need to check for a NUL, too.
2024-07-19 09:48:21 +02:00
edad0d4493
Improve file listing output
Adds pretty printing for file size and always shows the bundle hash name
2024-07-19 09:48:20 +02:00
74a7aaa6e5
dtmt-shared: Write log lines to stderr
Ideally, I would prefer the usual split per logging level, but that
seems to be somewhat complex with `tracing_subscriber`, so this simply
switches everything over to stderr, so that some of the experiment
commands can write results to stdout.
2024-07-19 09:48:15 +02:00
437e724d07 Merge pull request 'Implement name overrides' (#181) from feat/name-overrides into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #181
2024-07-19 09:35:16 +02:00
95fc6c160b
dtmt: Implement name overrides
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
For most of the game files, we don't know the actual name, only the hash
of that name. To still allow building bundles that contain files with
that name (e.g. to override a game file with a custom one), there needs
to be a way to tell DTMT to name a file such that its hash is the same
as the one in the game.
The initial idea was to just expect the file name on disk to be the
hash, but that wouldn't allow for arbitrary folder structures anymore.

So instead, there is now a new, optional setting in `dtmt.cfg`, where
the modder can map a file path to an override name.
2024-07-18 09:50:48 +02:00
b7e26eee57
refactor(sdk): Split BundleFileType into its own file 2024-07-17 11:14:22 +02:00
e56176e341 Merge pull request 'Fix printing hashes with leading zeroes' (#180) from issue/list-with-leading-zeroes into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #180
2024-07-13 10:35:42 +02:00
a47167b735
Fix printing hashes with leading zeroes
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Closes #179
2024-07-12 15:58:39 +02:00
bf87e0c571 Merge pull request 'Extend mod loader logging' (#178) from feat/extend-mod-load-logging into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #178
2024-07-12 15:06:08 +02:00
a4e78f1c6b
Log deployment data
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Closes #168
2024-07-12 14:35:43 +02:00
1020efe53d
Add version field to mod loader logging
Ref: #168
2024-07-12 14:34:55 +02:00
ba31d51098
Align Crashify property with Fatshark
They recently submitted a PR to DML with their preferred property names,
so we should match that.

Ref: #168
2024-07-12 13:41:29 +02:00
0539b6b456
Strip block whitespace in templates
Ref: #168
2024-07-12 13:41:19 +02:00
9ad9d21402 Merge pull request 'Add application icon' (#174) from feat/icon into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #174
2024-07-12 11:48:13 +02:00
84606814fd
Add application icon
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
2024-07-10 23:27:43 +02:00
aaccedb85f Merge pull request 'Pull ModLoader in' (#156) from feat/builtin-dml into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #156
2024-07-10 23:23:37 +02:00
e6f1e7c117
Fix load order verification
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
With `.enumerate()` after `.filter()`, the resulting indices didn't
properly map back to the overall mod list anymore. But the checks
afterwards relied on that.
Moving the `.enumerate()` before the `.filter()` makes sure that the
indices are correct.
2024-07-10 21:54:51 +02:00
91651a8467
Apply linter fixes in mod_main.lua
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
2024-07-10 20:02:27 +02:00
05df72635a
dtmm: Pull ModLoader in
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/linux Build for the target platform: linux
build/msvc Build for the target platform: msvc
Closes #155.
2024-07-10 19:49:00 +02:00
644848c8c6 Merge pull request 'Perform various optimizations' (#173) from feat/optimization into master
All checks were successful
build/linux Build for the target platform: linux
build/msvc Build for the target platform: msvc
Reviewed-on: #173
2024-07-10 19:45:52 +02:00
0f14834e2d
Remove string_template
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/linux Build for the target platform: linux
build/msvc Build for the target platform: msvc
Use minijinja for all templates
2024-07-10 18:41:38 +02:00
94b64b4619
Update zip 2024-07-10 18:40:52 +02:00
b403751228
Update ansi-parser
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/linux Build for the target platform: linux
build/msvc Build for the target platform: msvc
2024-07-09 16:16:50 +02:00
96a7eeb1e0
Implement faster hex string parsing 2024-07-09 16:16:48 +02:00
ef4c2a1d94
Update interprocess 2024-07-09 16:16:46 +02:00
9577d70423
Add missing build tools to CI image 2024-07-09 16:16:44 +02:00
189c3199a0
Update bitflags 2024-07-09 16:16:42 +02:00
b8ac80562a
Update zip 2024-07-09 16:16:37 +02:00
3546bc8faa
Update bindgen 2024-05-15 21:57:45 +02:00
4bc5777a4b
Update notify 2024-05-15 20:04:47 +02:00
0c4c078b10
Update dependencies 2024-05-15 19:24:57 +02:00
ae30499a49
Remove unused dependency 2024-05-15 19:19:54 +02:00
647cb1b8bd
Update fastrand 2024-05-15 19:16:34 +02:00
ecd235be05
Update ansi-parser
Patched to update heapless while waiting for the merge request.
2024-05-15 19:14:07 +02:00
bac75e1c9a
Update confy 2024-05-15 18:58:08 +02:00
21df6cfc5c
Update reqwest 2024-05-15 18:52:58 +02:00
6030917ade
Update steamlocate
The actual update already happened, but `cargo oudated` cannot handle
the suffix, so we must update the `Cargo.toml` as well.
2024-05-15 16:37:28 +02:00
dfd51513da
Update strip-ansi-escapes
Removes duplicate dependency of arrayvec.
2024-05-15 16:32:45 +02:00
535a30a7ca
Add simpler shell parser
This obsoletes `shlex`. The quoting turned out unnecessary, and the
splitting supported a lot more than we need. It also forced
unncessary allocations: The splitting doesn't add any characters and
keeps UTF-8 intact, so returning slices from the input is perfectly
possible.
Though this particular implementation will only come to use in the
future, as `CmdLine` still requires that the slices are cloned.

Still, the custom implementation performs about 3x faster.
2024-05-15 16:32:44 +02:00
7a1727ff3b Merge pull request 'Update CI image' (#175) from feat/updates into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #175
2024-05-15 16:25:46 +02:00
e1277783a3 Merge pull request 'Update dependencies' (#172) from feat/updates into master
All checks were successful
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
Reviewed-on: #172
2024-05-15 15:30:12 +02:00
68 changed files with 3437 additions and 1914 deletions

View file

@ -37,6 +37,7 @@ RUN set -eux; \
apt-get update; \
apt-get install --no-install-recommends -y \
build-essential \
cmake \
curl \
git \
gpg \

View file

@ -125,8 +125,6 @@ jobs:
vars:
pr: ""
target: msvc
gitea_url: http://forgejo:3000
gitea_api_key: ((gitea_api_key))
- load_var: version_number
reveal: true
@ -142,10 +140,21 @@ jobs:
fail_fast: true
override: true
globs:
- artifact/dtmt
- artifact/dtmm
- artifact/*.exe
- artifact/*.sha256
- artifact/*.exe.sha256
- put: package
resource: gitea-package
no_get: true
inputs:
- artifact
params:
version: master
fail_fast: true
override: true
globs:
- artifact/*.exe
- artifact/*.exe.sha256
- name: build-linux
on_success:
@ -202,5 +211,20 @@ jobs:
globs:
- artifact/dtmt
- artifact/dtmm
- artifact/*.exe
- artifact/*.sha256
- artifact/dtmm.sha256
- artifact/dtmt.sha256
- put: package
resource: gitea-package
no_get: true
inputs:
- artifact
params:
version: master
fail_fast: true
override: true
globs:
- artifact/dtmt
- artifact/dtmm
- artifact/dtmm.sha256
- artifact/dtmt.sha256

View file

@ -24,8 +24,10 @@ PR=${PR:-}
if [ -n "$PR" ]; then
title "PR: $(echo "$PR" | jq '.number') - $(echo "$PR" | jq '.title')"
ref="pr-$(echo "$PR" | jq '.number')-$(git rev-parse --short "$(cat .git/ref || echo "HEAD")" 2>/dev/null || echo 'manual')"
elif [ -f ".git/branch"]; then
ref=$(cat .git/branch)-$(git rev-parse --short $ref)
else
ref=$(git describe --tags)
ref=$(git rev-parse --short "$(cat .git/ref || echo "HEAD")")
fi
title "Version: '$ref'"

View file

@ -22,7 +22,6 @@ caches:
params:
CI: "true"
TARGET: ((target))
GITEA_API_KEY: ((gitea_api_key))
PR: ((pr))
OUTPUT: artifact

6
.gitattributes vendored Normal file
View file

@ -0,0 +1,6 @@
* text=auto
*.xcf filter=lfs diff=lfs merge=lfs -text
*.ico filter=lfs diff=lfs merge=lfs -text
*.png filter=lfs diff=lfs merge=lfs -text
*.jpg filter=lfs diff=lfs merge=lfs -text

15
.renovaterc Normal file
View file

@ -0,0 +1,15 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"config:recommended",
":combinePatchMinorReleases",
":enableVulnerabilityAlerts",
":rebaseStalePrs"
],
"prConcurrentLimit": 10,
"branchPrefix": "renovate/",
"baseBranches": [
"$default",
"/^release\\/.*/"
]
}

View file

@ -20,6 +20,8 @@
- dtmm: fetch file version for Nexus mods
- dtmm: handle `nxm://` URIs via IPC and import the corresponding mod
- dtmm: Add button to open mod on nexusmods.com
- dtmt: Implement commands to list bundles and contents
- dtmt: Implement command to search for files
=== Fixed

1484
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -8,11 +8,56 @@ members = [
"lib/sdk",
"lib/serde_sjson",
"lib/luajit2-sys",
"lib/color-eyre",
]
exclude = ["lib/color-eyre"]
[patch.crates-io]
[workspace.dependencies]
ansi-parser = "0.9.1"
ansi_term = "0.12.1"
async-recursion = "1.0.5"
bincode = "1.3.3"
bitflags = "2.5.0"
byteorder = "1.4.3"
clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "string", "unicode"] }
cli-table = { version = "0.4.7", default-features = false, features = ["derive"] }
color-eyre = { path = "lib/color-eyre" }
colors-transform = "0.2.11"
confy = "0.6.1"
csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
druid = { version = "0.8", features = ["im", "serde", "image", "png", "jpeg", "bmp", "webp", "svg"] }
druid-widget-nursery = "0.1"
dtmt-shared = { path = "lib/dtmt-shared" }
fastrand = "2.1.0"
futures = "0.3.25"
futures-util = "0.3.24"
glob = "0.3.0"
interprocess = "2.1.0"
lazy_static = "1.4.0"
luajit2-sys = { path = "lib/luajit2-sys" }
minijinja = { version = "2.0.1", default-features = false }
nanorand = "0.7.0"
nexusmods = { path = "lib/nexusmods" }
notify = "8.0.0"
oodle = { path = "lib/oodle" }
open = "5.0.1"
path-clean = "1.0.1"
path-slash = "0.2.1"
pin-project-lite = "0.2.9"
promptly = "0.3.1"
sdk = { path = "lib/sdk" }
serde = { version = "1.0.152", features = ["derive", "rc"] }
serde_sjson = { path = "lib/serde_sjson" }
steamlocate = "2.0.0-beta.2"
strip-ansi-escapes = "0.2.0"
time = { version = "0.3.20", features = ["serde", "serde-well-known", "local-offset", "formatting", "macros"] }
tokio = { version = "1.23.0", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
tokio-stream = { version = "0.1.12", features = ["fs", "io-util"] }
tracing = { version = "0.1.37", features = ["async-await"] }
tracing-error = "0.2.0"
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
usvg = "0.25.0"
zip = { version = "2.1.3", default-features = false, features = ["deflate", "bzip2", "zstd", "time"] }
[profile.dev.package.backtrace]
opt-level = 3
@ -26,3 +71,9 @@ strip = "debuginfo"
[profile.release-lto]
inherits = "release"
lto = true
[profile.perf]
inherits = "release"
strip = false
lto = true
debug = "line-tables-only"

View file

@ -1,5 +1,13 @@
set positional-arguments
fly_target := "main"
build-perf-dtmt:
cargo build --profile perf --bin dtmt
perf-dtmt *args='': build-perf-dtmt
perf record --call-graph dwarf ./target/perf/dtmt "$@"
ci-build: ci-build-msvc ci-build-linux
ci-build-msvc:

View file

@ -2,40 +2,48 @@
name = "dtmm"
version = "0.1.0"
edition = "2021"
authors = ["Lucas Schwiderski <lucas@lschwiderski.de>"]
description = "DTMM is a GUI application to install and manage mods for the game."
documentation = "https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki"
repository = "https://git.sclu1034.dev/bitsquid_dt/dtmt"
homepage = "https://git.sclu1034.dev/bitsquid_dt/dtmt"
license-file = "LICENSE"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
ansi-parser = "0.9.0"
async-recursion = "1.0.5"
bincode = "1.3.3"
bitflags = "1.3.2"
clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "string", "unicode"] }
color-eyre = "0.6.2"
colors-transform = "0.2.11"
confy = "0.5.1"
druid = { version = "0.8", features = ["im", "serde", "image", "png", "jpeg", "bmp", "webp", "svg"] }
druid-widget-nursery = "0.1"
dtmt-shared = { path = "../../lib/dtmt-shared", version = "*" }
futures = "0.3.25"
interprocess = { version = "1.2.1", default-features = false }
lazy_static = "1.4.0"
luajit2-sys = { path = "../../lib/luajit2-sys", version = "*" }
minijinja = "1.0.10"
nexusmods = { path = "../../lib/nexusmods", version = "*" }
oodle = { path = "../../lib/oodle", version = "*" }
open = "5.0.1"
path-slash = "0.2.1"
sdk = { path = "../../lib/sdk", version = "*" }
serde = { version = "1.0.152", features = ["derive", "rc"] }
serde_sjson = { path = "../../lib/serde_sjson", version = "*" }
string_template = "0.2.1"
strip-ansi-escapes = "0.1.1"
time = { version = "0.3.20", features = ["serde", "serde-well-known", "local-offset"] }
tokio = { version = "1.23.0", features = ["rt", "fs", "tracing", "sync"] }
tokio-stream = { version = "0.1.12", features = ["fs"] }
tracing = "0.1.37"
tracing-error = "0.2.0"
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
usvg = "0.25.0"
zip = "0.6.4"
ansi-parser = { workspace = true }
async-recursion = { workspace = true }
bincode = { workspace = true }
bitflags = { workspace = true }
clap = { workspace = true }
color-eyre = { workspace = true }
colors-transform = { workspace = true }
confy = { workspace = true }
druid = { workspace = true }
druid-widget-nursery = { workspace = true }
dtmt-shared = { workspace = true }
futures = { workspace = true }
interprocess = { workspace = true }
lazy_static = { workspace = true }
luajit2-sys = { workspace = true }
minijinja = { workspace = true }
nexusmods = { workspace = true }
oodle = { workspace = true }
open = { workspace = true }
path-slash = { workspace = true }
sdk = { workspace = true }
serde = { workspace = true }
serde_sjson = { workspace = true }
strip-ansi-escapes = { workspace = true }
time = { workspace = true }
tokio = { workspace = true }
tokio-stream = { workspace = true }
tracing = { workspace = true }
tracing-error = { workspace = true }
tracing-subscriber = { workspace = true }
usvg = { workspace = true }
zip = { workspace = true }
[build-dependencies]
winres = "0.1.12"

BIN
crates/dtmm/assets/DTMM_logo.xcf (Stored with Git LFS) Normal file

Binary file not shown.

BIN
crates/dtmm/assets/DTMM_logo_256.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
crates/dtmm/assets/DTMM_logo_48.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
crates/dtmm/assets/DTMM_logo_64.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
crates/dtmm/assets/DTMM_logo_border.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
crates/dtmm/assets/DTMM_logo_faint_glow.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
crates/dtmm/assets/DTMM_logo_small.png (Stored with Git LFS) Normal file

Binary file not shown.

View file

@ -8,3 +8,4 @@ Keywords=Mod;
StartupNotify=true
Categories=Utility;
MimeType=x-scheme-handler/nxm;
Icon=dtmm

BIN
crates/dtmm/assets/dtmm.ico (Stored with Git LFS) Normal file

Binary file not shown.

View file

@ -0,0 +1,70 @@
local StateGame = require("scripts/game_states/state_game")
local StateSplash = require("scripts/game_states/game/state_splash")
local GameStateMachine = require("scripts/foundation/utilities/game_state_machine")
local function hook(obj, fn_name, cb)
local orig = obj[fn_name]
obj[fn_name] = function(...)
return cb(orig, ...)
end
end
function init(mod_data, boot_gui)
local ModLoader = require("scripts/mods/mod_loader")
local mod_loader = ModLoader:new(mod_data, boot_gui)
-- The mod loader needs to remain active during game play, to
-- enable reloads
hook(StateGame, "update", function(func, dt, ...)
mod_loader:update(dt)
return func(dt, ...)
end)
-- Skip splash view
hook(StateSplash, "on_enter", function(func, self, ...)
local result = func(self, ...)
self._should_skip = true
self._continue = true
return result
end)
-- Trigger state change events
hook(GameStateMachine, "_change_state", function(func, self, ...)
local old_state = self._state
local old_state_name = old_state and self:current_state_name()
if old_state_name then
mod_loader:on_game_state_changed("exit", old_state_name, old_state)
end
local result = func(self, ...)
local new_state = self._state
local new_state_name = new_state and self:current_state_name()
if new_state_name then
mod_loader:on_game_state_changed("enter", new_state_name, new_state)
end
return result
end)
-- Trigger ending state change event
hook(GameStateMachine, "destroy", function(func, self, ...)
local old_state = self._state
local old_state_name = old_state and self:current_state_name()
if old_state_name then
mod_loader:on_game_state_changed("exit", old_state_name)
end
return func(self, ...)
end)
return mod_loader
end
return init

View file

@ -4,6 +4,7 @@ return {
id = "{{ mod.id }}",
name = "{{ mod.name }}",
bundled = {{ mod.bundled }},
version = {{ mod.version }},
packages = {
{% for pkg in mod.packages %}
"{{ pkg }}",

View file

@ -0,0 +1,412 @@
-- Copyright on this file is owned by Fatshark.
-- It is extracted, used and modified with permission only for
-- the purpose of loading mods within Warhammer 40,000: Darktide.
local ModLoader = class("ModLoader")
local table_unpack = table.unpack or unpack
local table_pack = table.pack or pack
local ScriptGui = require("scripts/foundation/utilities/script_gui")
local FONT_MATERIAL = "content/ui/fonts/arial"
local LOG_LEVELS = {
spew = 4,
info = 3,
warning = 2,
error = 1
}
local DEFAULT_SETTINGS = {
log_level = LOG_LEVELS.error,
developer_mode = false
}
local Keyboard = Keyboard
local BUTTON_INDEX_R = Keyboard.button_index("r")
local BUTTON_INDEX_LEFT_SHIFT = Keyboard.button_index("left shift")
local BUTTON_INDEX_LEFT_CTRL = Keyboard.button_index("left ctrl")
ModLoader.init = function(self, mod_data, boot_gui)
table.dump(mod_data, nil, 5, function(...) Log.info("ModLoader", ...) end)
self._mod_data = mod_data
self._gui = boot_gui
self._settings = Application.user_setting("mod_settings") or DEFAULT_SETTINGS
self._mods = {}
self._num_mods = nil
self._chat_print_buffer = {}
self._reload_data = {}
self._ui_time = 0
self._state = "scanning"
end
ModLoader.developer_mode_enabled = function(self)
return self._settings.developer_mode
end
ModLoader.set_developer_mode = function(self, enabled)
self._settings.developer_mode = enabled
end
ModLoader._draw_state_to_gui = function(self, gui, dt)
local state = self._state
local t = self._ui_time + dt
self._ui_time = t
local status_str = "Loading mods"
if state == "scanning" then
status_str = "Scanning for mods"
elseif state == "loading" or state == "initializing" then
local mod = self._mods[self._mod_load_index]
status_str = string.format("Loading mod %q", mod.name)
end
local msg = status_str .. string.rep(".", (2 * t) % 4)
ScriptGui.text(gui, msg, FONT_MATERIAL, 25, Vector3(20, 30, 1), Color.white())
end
ModLoader.remove_gui = function(self)
self._gui = nil
end
ModLoader.mod_data = function(self, id)
-- Since this primarily exists for DMF,
-- we can optimize the search for its use case of looking for the
-- mod currently being loaded
local mod_data = self._mods[self._mod_load_index]
if mod_data.id ~= id then
mod_data = nil
for _, v in ipairs(self._mods) do
if v.id == id then
mod_data = v
end
end
end
return mod_data
end
ModLoader._check_reload = function()
return Keyboard.pressed(BUTTON_INDEX_R) and
Keyboard.button(BUTTON_INDEX_LEFT_SHIFT) +
Keyboard.button(BUTTON_INDEX_LEFT_CTRL) == 2
end
ModLoader.update = function(self, dt)
local chat_print_buffer = self._chat_print_buffer
local num_delayed_prints = #chat_print_buffer
if num_delayed_prints > 0 and Managers.chat then
for i = 1, num_delayed_prints, 1 do
-- TODO: Use new chat system
-- Managers.chat:add_local_system_message(1, chat_print_buffer[i], true)
chat_print_buffer[i] = nil
end
end
local old_state = self._state
if self._settings.developer_mode and self:_check_reload() then
self._reload_requested = true
end
if self._reload_requested and old_state == "done" then
self:_reload_mods()
end
if old_state == "done" then
self:_run_callbacks("update", dt)
elseif old_state == "scanning" then
Log.info("ModLoader", "Scanning for mods")
self:_build_mod_table()
self._state = self:_load_mod(1)
self._ui_time = 0
elseif old_state == "loading" then
local handle = self._loading_resource_handle
if ResourcePackage.has_loaded(handle) then
ResourcePackage.flush(handle)
local mod = self._mods[self._mod_load_index]
local next_index = mod.package_index + 1
local mod_data = mod.data
if next_index <= #mod_data.packages then
self:_load_package(mod, next_index)
else
self._state = "initializing"
end
end
elseif old_state == "initializing" then
local mod = self._mods[self._mod_load_index]
local mod_data = mod.data
Log.info("ModLoader", "Initializing mod %q", mod.name)
mod.state = "running"
local ok, object = xpcall(mod_data.run, function(err)
if type(err) == "string" then
return err .. "\n" .. Script.callstack()
else
return err
end
end)
if not ok then
if object.error then
object = string.format(
"%s\n<<Lua Stack>>\n%s\n<</Lua Stack>>\n<<Lua Locals>>\n%s\n<</Lua Locals>>\n<<Lua Self>>\n%s\n<</Lua Self>>",
object.error, object.traceback, object.locals, object.self)
end
Log.error("ModLoader", "Failed 'run' for %q: %s", mod.name, object)
end
mod.object = object or {}
self:_run_callback(mod, "init", self._reload_data[mod.id])
Log.info("ModLoader", "Finished loading %q", mod.name)
self._state = self:_load_mod(self._mod_load_index + 1)
end
local gui = self._gui
if gui then
self:_draw_state_to_gui(gui, dt)
end
if old_state ~= self._state then
Log.info("ModLoader", "%s -> %s", old_state, self._state)
end
end
ModLoader.all_mods_loaded = function(self)
return self._state == "done"
end
ModLoader.destroy = function(self)
self:_run_callbacks("on_destroy")
self:unload_all_mods()
end
ModLoader._run_callbacks = function(self, callback_name, ...)
for i = 1, self._num_mods, 1 do
local mod = self._mods[i]
if mod and not mod.callbacks_disabled then
self:_run_callback(mod, callback_name, ...)
end
end
end
ModLoader._run_callback = function(self, mod, callback_name, ...)
local object = mod.object
local cb = object[callback_name]
if not cb then
return
end
local args = table_pack(...)
local success, val = xpcall(
function() return cb(object, table_unpack(args)) end,
function(err)
if type(err) == "string" then
return err .. "\n" .. Script.callstack()
else
return err
end
end
)
if success then
return val
else
Log.error("ModLoader", "Failed to run callback %q for mod %q with id %q. Disabling callbacks until reload.",
callback_name, mod.name, mod.id)
if val.error then
Log.error("ModLoader",
"Error: %s\n<<Lua Stack>>\n%s<</Lua Stack>>\n<<Lua Locals>>\n%s<</Lua Locals>>\n<<Lua Self>>\n%s<</Lua Self>>",
val.error, val.traceback, val.locals, val.self)
else
Log.error("ModLoader", "Error: %s", val or "[unknown error]")
end
mod.callbacks_disabled = true
end
end
ModLoader._start_scan = function(self)
Log.info("ModLoader", "Starting mod scan")
self._state = "scanning"
end
ModLoader._build_mod_table = function(self)
fassert(table.is_empty(self._mods), "Trying to add mods to non-empty mod table")
for i, mod_data in ipairs(self._mod_data) do
Log.info(
"ModLoader",
"mods[%d] = id=%q | name=%q | version=%q | bundled=%s",
i,
mod_data.id,
mod_data.name,
mod_data.version,
tostring(mod_data.bundled)
)
self._mods[i] = {
id = mod_data.id,
state = "not_loaded",
callbacks_disabled = false,
name = mod_data.name,
loaded_packages = {},
packages = mod_data.packages,
data = mod_data,
bundled = mod_data.bundled or false,
}
end
self._num_mods = #self._mods
Log.info("ModLoader", "Found %i mods", self._num_mods)
end
ModLoader._load_mod = function(self, index)
self._ui_time = 0
local mods = self._mods
local mod = mods[index]
if not mod then
table.clear(self._reload_data)
return "done"
end
Log.info("ModLoader", "Loading mod %q", mod.id)
mod.state = "loading"
Crashify.print_property(string.format("Mod:%s", mod.name), true)
self._mod_load_index = index
if mod.bundled and mod.packages[1] then
self:_load_package(mod, 1)
return "loading"
else
return "initializing"
end
end
ModLoader._load_package = function(self, mod, index)
mod.package_index = index
local package_name = mod.packages[index]
if not package_name then
return
end
Log.info("ModLoader", "Loading package %q", package_name)
local resource_handle = Application.resource_package(package_name)
self._loading_resource_handle = resource_handle
ResourcePackage.load(resource_handle)
table.insert(mod.loaded_packages, resource_handle)
end
ModLoader.unload_all_mods = function(self)
if self._state ~= "done" then
Log.error("ModLoader", "Mods can't be unloaded, mod state is not \"done\". current: %q", self._state)
return
end
Log.info("ModLoader", "Unload all mod packages")
for i = self._num_mods, 1, -1 do
local mod = self._mods[i]
if mod then
self:unload_mod(i)
end
self._mods[i] = nil
end
self._num_mods = nil
self._state = "unloaded"
end
ModLoader.unload_mod = function(self, index)
local mod = self._mods[index]
if mod then
Log.info("ModLoader", "Unloading %q.", mod.name)
for _, handle in ipairs(mod.loaded_packages) do
ResourcePackage.unload(handle)
Application.release_resource_package(handle)
end
mod.state = "not_loaded"
else
Log.error("ModLoader", "Mod index %i can't be unloaded, has not been loaded", index)
end
end
ModLoader._reload_mods = function(self)
Log.info("ModLoader", "reloading mods")
for i = 1, self._num_mods, 1 do
local mod = self._mods[i]
if mod and mod.state == "running" then
Log.info("ModLoader", "reloading %s", mod.name)
self._reload_data[mod.id] = self:_run_callback(mod, "on_reload")
else
Log.info("ModLoader", "not reloading mod, state: %s", mod.state)
end
end
self:unload_all_mods()
self:_start_scan()
self._reload_requested = false
end
ModLoader.on_game_state_changed = function(self, status, state_name, state_object)
if self._state == "done" then
self:_run_callbacks("on_game_state_changed", status, state_name, state_object)
else
Log.warning("ModLoader", "Ignored on_game_state_changed call due to being in state %q", self._state)
end
end
ModLoader.print = function(self, level, str, ...)
local f = Log[level]
if f then
f("ModLoader", str, ...)
else
local message = string.format("[ModLoader][" .. level .. "] " .. str, ...)
local log_level = LOG_LEVELS[level] or 99
if log_level <= 2 then
print(message)
end
end
end
return ModLoader

View file

@ -12,12 +12,13 @@ local log = function(category, format, ...)
end
log("mod_main", "Initializing mods...")
log("mod_main", "[DTMM] Deployment data:\n{{ deployment_info }}")
local require_store = {}
-- This token is treated as a string template and filled by DTMM during deployment.
-- This allows hiding unsafe I/O functions behind a setting.
-- It's also a valid table definition, thereby degrading gracefully when not replaced.
-- When not replaced, it's also a valid table definition, thereby degrading gracefully.
local is_io_enabled = {{ is_io_enabled }} -- luacheck: ignore 113
local lua_libs = {
debug = debug,
@ -105,10 +106,16 @@ end
require("scripts/main")
log("mod_main", "'scripts/main' loaded")
-- Inject our state into the game. The state needs to run after `StateGame._init_managers`,
-- since some parts of DMF, and presumably other mods, depend on some of those managers to exist.
-- We need to inject two states into two different state machines:
-- First, we inject one into the `"Main"` state machine at a specific location, so that we're
-- still early in the process, but right after `StateRequireScripts` where most game files
-- are already available to `require` and hook.
-- This is where the `ModLoader` is created initially.
-- Then, we inject into the very first position of the `"Game"` state machine. This runs right
-- after `StateGame._init_managers`, at which point all the parts needed for DMF and other mods
-- have been initialized.
-- This is where `ModLoader` will finally start loading mods.
local function patch_mod_loading_state()
local StateBootSubStateBase = require("scripts/game_states/boot/state_boot_sub_state_base")
local StateBootLoadDML = class("StateBootLoadDML", "StateBootSubStateBase")
local StateGameLoadMods = class("StateGameLoadMods")
@ -121,19 +128,21 @@ local function patch_mod_loading_state()
self._package_manager = package_manager
self._package_handles = {
["packages/mods"] = package_manager:load("packages/mods", "StateBootDML", nil),
["packages/dml"] = package_manager:load("packages/dml", "StateBootDML", nil),
["packages/mods"] = package_manager:load("packages/mods", "StateBootLoadDML", nil),
}
end
StateBootLoadDML._state_update = function(self, dt)
StateBootLoadDML._state_update = function(self, _)
local package_manager = self._package_manager
if package_manager:update() then
local DML = require("scripts/mods/dml/init")
local mod_data = require("scripts/mods/mod_data")
local mod_loader = DML.create_loader(mod_data)
local create_mod_loader = require("scripts/mods/init")
local mod_loader = create_mod_loader(mod_data)
Managers.mod = mod_loader
log("StateBootLoadDML", "DML loaded, exiting")
return true, false
end
@ -148,9 +157,7 @@ local function patch_mod_loading_state()
self._next_state_params = params
end
function StateGameLoadMods:update(main_dt)
local state = self._loading_state
function StateGameLoadMods:update(_)
-- We're relying on the fact that DML internally makes sure
-- that `Managers.mod:update()` is being called appropriately.
-- The implementation as of this writing is to hook `StateGame.update`.
@ -188,8 +195,6 @@ local function patch_mod_loading_state()
GameStateMachine_init(self, parent, StateGameLoadMods, params, creation_context, state_change_callbacks, name)
-- And since we're done now, we can revert the function to its original
GameStateMachine.init = GameStateMachine_init
return
else
-- In all other cases, simply call the original
GameStateMachine_init(self, parent, start_state, params, creation_context, state_change_callbacks, name)
@ -207,3 +212,5 @@ function init()
Main:init()
end
-- vim: ft=lua

7
crates/dtmm/build.rs Normal file
View file

@ -0,0 +1,7 @@
fn main() {
if cfg!(target_os = "windows") {
let mut res = winres::WindowsResource::new();
res.set_icon("assets/dtmm.ico");
res.compile().unwrap();
}
}

View file

@ -161,27 +161,21 @@ where
}
pub(crate) fn check_mod_order(state: &ActionState) -> Result<()> {
{
let first = state.mods.get(0);
if first.is_none() || !(first.unwrap().id == "dml" && first.unwrap().enabled) {
// TODO: Add a suggestion where to get it, once that's published
eyre::bail!("'Darktide Mod Loader' needs to be installed, enabled and at the top of the load order");
}
}
if tracing::enabled!(tracing::Level::DEBUG) {
let order = state.mods.iter().filter(|i| i.enabled).enumerate().fold(
String::new(),
|mut s, (i, info)| {
let order = state
.mods
.iter()
.enumerate()
.filter(|(_, i)| i.enabled)
.fold(String::new(), |mut s, (i, info)| {
s.push_str(&format!("{}: {} - {}\n", i, info.id, info.name));
s
},
);
});
tracing::debug!("Mod order:\n{}", order);
}
for (i, mod_info) in state.mods.iter().filter(|i| i.enabled).enumerate() {
for (i, mod_info) in state.mods.iter().enumerate().filter(|(_, i)| i.enabled) {
for dep in &mod_info.depends {
let dep_info = state.mods.iter().enumerate().find(|(_, m)| m.id == dep.id);

View file

@ -1,4 +1,3 @@
use std::collections::HashMap;
use std::io::{Cursor, ErrorKind};
use std::path::{Path, PathBuf};
use std::str::FromStr;
@ -16,7 +15,6 @@ use sdk::{
Bundle, BundleDatabase, BundleFile, BundleFileType, BundleFileVariant, FromBinary, ToBinary,
};
use serde::{Deserialize, Serialize};
use string_template::Template;
use time::OffsetDateTime;
use tokio::fs::{self, DirEntry};
use tokio::io::AsyncWriteExt;
@ -28,7 +26,6 @@ use crate::state::{ActionState, PackageInfo};
pub const MOD_BUNDLE_NAME: &str = "packages/mods";
pub const BOOT_BUNDLE_NAME: &str = "packages/boot";
pub const DML_BUNDLE_NAME: &str = "packages/dml";
pub const BUNDLE_DATABASE_NAME: &str = "bundle_database.data";
pub const MOD_BOOT_SCRIPT: &str = "scripts/mod_main";
pub const MOD_DATA_SCRIPT: &str = "scripts/mods/mod_data";
@ -227,11 +224,7 @@ async fn copy_mod_folders(state: Arc<ActionState>) -> Result<Vec<String>> {
let mut tasks = Vec::new();
for mod_info in state
.mods
.iter()
.filter(|m| m.id != "dml" && m.enabled && !m.bundled)
{
for mod_info in state.mods.iter().filter(|m| m.enabled && !m.bundled) {
let span = tracing::trace_span!("copying legacy mod", name = mod_info.name);
let _enter = span.enter();
@ -268,6 +261,7 @@ fn build_mod_data_lua(state: Arc<ActionState>) -> Result<String> {
id: String,
name: String,
bundled: bool,
version: String,
init: String,
data: Option<String>,
localization: Option<String>,
@ -275,6 +269,8 @@ fn build_mod_data_lua(state: Arc<ActionState>) -> Result<String> {
}
let mut env = Environment::new();
env.set_trim_blocks(true);
env.set_lstrip_blocks(true);
env.add_template("mod_data.lua", include_str!("../../assets/mod_data.lua.j2"))
.wrap_err("Failed to compile template for `mod_data.lua`")?;
let tmpl = env
@ -285,7 +281,7 @@ fn build_mod_data_lua(state: Arc<ActionState>) -> Result<String> {
.mods
.iter()
.filter_map(|m| {
if m.id == "dml" || !m.enabled {
if !m.enabled {
return None;
}
@ -293,6 +289,7 @@ fn build_mod_data_lua(state: Arc<ActionState>) -> Result<String> {
id: m.id.clone(),
name: m.name.clone(),
bundled: m.bundled,
version: m.version.clone(),
init: m.resources.init.to_string_lossy().to_string(),
data: m
.resources
@ -327,31 +324,29 @@ async fn build_bundles(state: Arc<ActionState>) -> Result<Vec<Bundle>> {
let mut bundles = Vec::new();
{
tracing::trace!("Building mod data script");
let span = tracing::debug_span!("Building mod data script");
let mut add_lua_asset = |name: &str, data: &str| {
let span = tracing::info_span!("Compiling Lua", name, data_len = data.len());
let _enter = span.enter();
let lua = build_mod_data_lua(state.clone()).wrap_err("Failed to build Lua mod data")?;
tracing::trace!("Compiling mod data script");
let file =
lua::compile(MOD_DATA_SCRIPT, lua).wrap_err("Failed to compile mod data Lua file")?;
tracing::trace!("Compile mod data script");
let file = lua::compile(name.to_string(), data).wrap_err("Failed to compile Lua")?;
mod_bundle.add_file(file);
}
Ok::<_, Report>(())
};
build_mod_data_lua(state.clone())
.wrap_err("Failed to build 'mod_data.lua'")
.and_then(|data| add_lua_asset(MOD_DATA_SCRIPT, &data))?;
add_lua_asset("scripts/mods/init", include_str!("../../assets/init.lua"))?;
add_lua_asset(
"scripts/mods/mod_loader",
include_str!("../../assets/mod_loader.lua"),
)?;
tracing::trace!("Preparing tasks to deploy bundle files");
for mod_info in state
.mods
.iter()
.filter(|m| m.id != "dml" && m.enabled && m.bundled)
{
for mod_info in state.mods.iter().filter(|m| m.enabled && m.bundled) {
let span = tracing::trace_span!("building mod packages", name = mod_info.name);
let _enter = span.enter();
@ -458,7 +453,10 @@ async fn build_bundles(state: Arc<ActionState>) -> Result<Vec<Bundle>> {
}
#[tracing::instrument(skip_all)]
async fn patch_boot_bundle(state: Arc<ActionState>) -> Result<Vec<Bundle>> {
async fn patch_boot_bundle(
state: Arc<ActionState>,
deployment_info: &String,
) -> Result<Vec<Bundle>> {
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
let bundle_path = bundle_dir.join(format!("{:x}", Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes())));
@ -499,88 +497,28 @@ async fn patch_boot_bundle(state: Arc<ActionState>) -> Result<Vec<Bundle>> {
boot_bundle.add_file(f);
}
{
tracing::trace!("Handling DML packages and bundle");
let span = tracing::trace_span!("handle DML");
let _enter = span.enter();
let mut variant = BundleFileVariant::new();
let mod_info = state
.mods
.iter()
.find(|m| m.id == "dml")
.ok_or_else(|| eyre::eyre!("DML not found in mod list"))?;
let pkg_info = mod_info
.packages
.get(0)
.ok_or_else(|| eyre::eyre!("invalid mod package for DML"))
.with_suggestion(|| "Re-download and import the newest version.".to_string())?;
let bundle_name = format!("{:016x}", Murmur64::hash(&pkg_info.name));
let src = state.mod_dir.join(&mod_info.id).join(&bundle_name);
{
let bin = fs::read(&src)
.await
.wrap_err_with(|| format!("Failed to read bundle file '{}'", src.display()))?;
let name = Bundle::get_name_from_path(&state.ctx, &src);
let dml_bundle = Bundle::from_binary(&state.ctx, name, bin)
.wrap_err_with(|| format!("Failed to parse bundle '{}'", src.display()))?;
bundles.push(dml_bundle);
};
{
let dest = bundle_dir.join(&bundle_name);
let pkg_name = pkg_info.name.clone();
let mod_name = mod_info.name.clone();
tracing::debug!(
"Copying bundle {} for mod {}: {} -> {}",
pkg_name,
mod_name,
src.display(),
dest.display()
);
// We attempt to remove any previous file, so that the hard link can be created.
// We can reasonably ignore errors here, as a 'NotFound' is actually fine, the copy
// may be possible despite an error here, or the error will be reported by it anyways.
// TODO: There is a chance that we delete an actual game bundle, but with 64bit
// hashes, it's low enough for now, and the setup required to detect
// "game bundle vs mod bundle" is non-trivial.
let _ = fs::remove_file(&dest).await;
fs::copy(&src, &dest).await.wrap_err_with(|| {
format!(
"Failed to copy bundle {pkg_name} for mod {mod_name}. Src: {}, dest: {}",
src.display(),
dest.display()
)
})?;
}
let pkg = make_package(pkg_info).wrap_err("Failed to create package file for dml")?;
variant.set_data(pkg.to_binary()?);
let mut f = BundleFile::new(DML_BUNDLE_NAME.to_string(), BundleFileType::Package);
f.add_variant(variant);
boot_bundle.add_file(f);
}
{
let span = tracing::debug_span!("Importing mod main script");
let _enter = span.enter();
let is_io_enabled = format!("{}", state.is_io_enabled);
let mut data = HashMap::new();
data.insert("is_io_enabled", is_io_enabled.as_str());
let mut env = Environment::new();
env.set_trim_blocks(true);
env.set_lstrip_blocks(true);
env.add_template("mod_main.lua", include_str!("../../assets/mod_main.lua.j2"))
.wrap_err("Failed to compile template for `mod_main.lua`")?;
let tmpl = env
.get_template("mod_main.lua")
.wrap_err("Failed to get template `mod_main.lua`")?;
let is_io_enabled = if state.is_io_enabled { "true" } else { "false" };
let deployment_info = deployment_info.replace("\"", "\\\"").replace("\n", "\\n");
let lua = tmpl
.render(minijinja::context!(is_io_enabled => is_io_enabled, deployment_info => deployment_info))
.wrap_err("Failed to render template `mod_main.lua`")?;
let tmpl = include_str!("../../assets/mod_main.lua");
let lua = Template::new(tmpl).render(&data);
tracing::trace!("Main script rendered:\n===========\n{}\n=============", lua);
let file =
lua::compile(MOD_BOOT_SCRIPT, lua).wrap_err("Failed to compile mod main Lua file")?;
let file = lua::compile(MOD_BOOT_SCRIPT.to_string(), lua)
.wrap_err("Failed to compile mod main Lua file")?;
boot_bundle.add_file(file);
}
@ -640,14 +578,10 @@ where
}
#[tracing::instrument(skip_all, fields(bundles = bundles.as_ref().len()))]
async fn write_deployment_data<B>(
state: Arc<ActionState>,
bundles: B,
mod_folders: Vec<String>,
) -> Result<()>
where
B: AsRef<[Bundle]>,
{
fn build_deployment_data(
bundles: impl AsRef<[Bundle]>,
mod_folders: impl AsRef<[String]>,
) -> Result<String> {
let info = DeploymentData {
timestamp: OffsetDateTime::now_utc(),
bundles: bundles
@ -656,16 +590,13 @@ where
.map(|bundle| format!("{:x}", bundle.name().to_murmur64()))
.collect(),
// TODO:
mod_folders,
mod_folders: mod_folders
.as_ref()
.iter()
.map(|folder| folder.clone())
.collect(),
};
let path = state.game_dir.join(DEPLOYMENT_DATA_PATH);
let data = serde_sjson::to_string(&info).wrap_err("Failed to serizalie deployment data")?;
fs::write(&path, &data)
.await
.wrap_err_with(|| format!("Failed to write deployment data to '{}'", path.display()))?;
Ok(())
serde_sjson::to_string(&info).wrap_err("Failed to serizalize deployment data")
}
#[tracing::instrument(skip_all, fields(
@ -707,7 +638,7 @@ pub(crate) async fn deploy_mods(state: ActionState) -> Result<()> {
},
async {
let path = state.game_dir.join(DEPLOYMENT_DATA_PATH);
match read_sjson_file::<_, DeploymentData>(path).await {
match read_sjson_file::<_, DeploymentData>(&path).await {
Ok(data) => Ok(Some(data)),
Err(err) => {
if let Some(err) = err.downcast_ref::<std::io::Error>()
@ -715,7 +646,10 @@ pub(crate) async fn deploy_mods(state: ActionState) -> Result<()> {
{
Ok(None)
} else {
Err(err).wrap_err("Failed to read deployment data")
Err(err).wrap_err(format!(
"Failed to read deployment data from: {}",
path.display()
))
}
}
}
@ -791,8 +725,11 @@ pub(crate) async fn deploy_mods(state: ActionState) -> Result<()> {
.await
.wrap_err("Failed to build mod bundles")?;
let new_deployment_info = build_deployment_data(&bundles, &mod_folders)
.wrap_err("Failed to build new deployment data")?;
tracing::info!("Patch boot bundle");
let mut boot_bundles = patch_boot_bundle(state.clone())
let mut boot_bundles = patch_boot_bundle(state.clone(), &new_deployment_info)
.await
.wrap_err("Failed to patch boot bundle")?;
bundles.append(&mut boot_bundles);
@ -867,9 +804,12 @@ pub(crate) async fn deploy_mods(state: ActionState) -> Result<()> {
.wrap_err("Failed to patch bundle database")?;
tracing::info!("Writing deployment data");
write_deployment_data(state.clone(), &bundles, mod_folders)
{
let path = state.game_dir.join(DEPLOYMENT_DATA_PATH);
fs::write(&path, &new_deployment_info)
.await
.wrap_err("Failed to write deployment data")?;
.wrap_err_with(|| format!("Failed to write deployment data to '{}'", path.display()))?;
}
tracing::info!("Finished deploying mods");
Ok(())

View file

@ -297,6 +297,7 @@ fn extract_mod_config<R: Read + Seek>(archive: &mut ZipArchive<R>) -> Result<(Mo
packages: Vec::new(),
resources,
depends: Vec::new(),
name_overrides: Default::default(),
};
Ok((cfg, root))

View file

@ -11,7 +11,7 @@ use clap::{command, value_parser, Arg};
use color_eyre::eyre::{self, Context};
use color_eyre::{Report, Result, Section};
use druid::AppLauncher;
use interprocess::local_socket::{LocalSocketListener, LocalSocketStream};
use interprocess::local_socket::{prelude::*, GenericNamespaced, ListenerOptions};
use tokio::sync::RwLock;
use crate::controller::worker::work_thread;
@ -29,9 +29,9 @@ mod util {
}
mod ui;
// As explained in https://docs.rs/interprocess/latest/interprocess/local_socket/enum.NameTypeSupport.html
// As explained in https://docs.rs/interprocess/2.1.0/interprocess/local_socket/struct.Name.html
// namespaces are supported on both platforms we care about: Windows and Linux.
const IPC_ADDRESS: &str = "@dtmm.sock";
const IPC_ADDRESS: &str = "dtmm.sock";
#[tracing::instrument]
fn notify_nxm_download(
@ -42,7 +42,11 @@ fn notify_nxm_download(
tracing::debug!("Received Uri '{}', sending to main process.", uri.as_ref());
let mut stream = LocalSocketStream::connect(IPC_ADDRESS)
let mut stream = LocalSocketStream::connect(
IPC_ADDRESS
.to_ns_name::<GenericNamespaced>()
.expect("Invalid socket name"),
)
.wrap_err_with(|| format!("Failed to connect to '{}'", IPC_ADDRESS))
.suggestion("Make sure the main window is open.")?;
@ -130,8 +134,14 @@ fn main() -> Result<()> {
let _guard = span.enter();
let event_sink = event_sink.clone();
let server =
LocalSocketListener::bind(IPC_ADDRESS).wrap_err("Failed to create IPC listener")?;
let server = ListenerOptions::new()
.name(
IPC_ADDRESS
.to_ns_name::<GenericNamespaced>()
.expect("Invalid socket name"),
)
.create_sync()
.wrap_err("Failed to create IPC listener")?;
tracing::debug!("IPC server listening on '{}'", IPC_ADDRESS);

View file

@ -4,36 +4,40 @@ version = "0.3.0"
edition = "2021"
[dependencies]
clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "unicode"] }
cli-table = { version = "0.4.7", default-features = false, features = ["derive"] }
color-eyre = "0.6.2"
confy = "0.5.1"
csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
dtmt-shared = { path = "../../lib/dtmt-shared", version = "*" }
futures = "0.3.25"
futures-util = "0.3.24"
glob = "0.3.0"
libloading = "0.7.4"
nanorand = "0.7.0"
oodle = { path = "../../lib/oodle", version = "*" }
pin-project-lite = "0.2.9"
promptly = "0.3.1"
sdk = { path = "../../lib/sdk", version = "*" }
serde_sjson = { path = "../../lib/serde_sjson", version = "*" }
serde = { version = "1.0.147", features = ["derive"] }
string_template = "0.2.1"
tokio-stream = { version = "0.1.11", features = ["fs", "io-util"] }
tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
tracing-error = "0.2.0"
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
tracing = { version = "0.1.37", features = ["async-await"] }
zip = "0.6.3"
path-clean = "1.0.1"
path-slash = "0.2.1"
async-recursion = "1.0.2"
notify = "5.1.0"
luajit2-sys = { path = "../../lib/luajit2-sys", version = "*" }
shlex = "1.2.0"
async-recursion = { workspace = true }
clap = { workspace = true }
cli-table = { workspace = true }
color-eyre = { workspace = true }
confy = { workspace = true }
csv-async = { workspace = true }
dtmt-shared = { workspace = true }
futures = { workspace = true }
futures-util = { workspace = true }
glob = { workspace = true }
luajit2-sys = { workspace = true }
minijinja = { workspace = true }
nanorand = { workspace = true }
notify = { workspace = true }
oodle = { workspace = true }
path-clean = { workspace = true }
path-slash = { workspace = true }
pin-project-lite = { workspace = true }
promptly = { workspace = true }
sdk = { workspace = true }
serde = { workspace = true }
serde_sjson = { workspace = true }
tokio = { workspace = true }
tokio-stream = { workspace = true }
tracing = { workspace = true }
tracing-error = { workspace = true }
tracing-subscriber = { workspace = true }
zip = { workspace = true }
# Cannot be a workspace dependencies when it's optional
shlex = { version = "1.2.0", optional = true }
[dev-dependencies]
tempfile = "3.3.0"
[features]
shlex-bench = ["dep:shlex"]

View file

@ -55,6 +55,7 @@ pub(crate) fn command_definition() -> Command {
)
}
/// Try to find a `dtmt.cfg` in the given directory or traverse up the parents.
#[tracing::instrument]
async fn find_project_config(dir: Option<PathBuf>) -> Result<ModConfig> {
let (path, mut file) = if let Some(path) = dir {
@ -102,39 +103,44 @@ async fn find_project_config(dir: Option<PathBuf>) -> Result<ModConfig> {
Ok(cfg)
}
/// Iterate over the paths in the given `Package` and
/// compile each file by its file type.
#[tracing::instrument(skip_all)]
async fn compile_package_files<P>(pkg: &Package, root: P) -> Result<Vec<BundleFile>>
where
P: AsRef<Path> + std::fmt::Debug,
{
let root = Arc::new(root.as_ref());
async fn compile_package_files(pkg: &Package, cfg: &ModConfig) -> Result<Vec<BundleFile>> {
let root = Arc::new(&cfg.dir);
let name_overrides = &cfg.name_overrides;
let tasks = pkg
.iter()
.flat_map(|(file_type, paths)| {
paths.iter().map(|path| {
.flat_map(|(file_type, names)| {
names.iter().map(|name| {
(
*file_type,
path,
name,
// Cloning the `Arc` here solves the issue that in the next `.map`, I need to
// `move` the closure parameters, but can't `move` `root` before it was cloned.
root.clone(),
)
})
})
.map(|(file_type, path, root)| async move {
let sjson = fs::read_to_string(&path).await?;
let mut path = path.clone();
path.set_extension("");
BundleFile::from_sjson(
path.to_slash_lossy().to_string(),
file_type,
sjson,
root.as_ref(),
)
.map(|(file_type, name, root)| async move {
let path = PathBuf::from(name);
let sjson = fs::read_to_string(&path)
.await
.wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?;
let name = path.with_extension("").to_slash_lossy().to_string();
let name = if let Some(new_name) = name_overrides.get(&name) {
let new_name = match u64::from_str_radix(new_name, 16) {
Ok(hash) => IdString64::from(hash),
Err(_) => IdString64::from(new_name.clone()),
};
tracing::info!("Overriding '{}' -> '{}'", name, new_name.display());
new_name
} else {
IdString64::from(name.clone())
};
BundleFile::from_sjson(name, file_type, sjson, root.as_ref()).await
});
let results = futures::stream::iter(tasks)
@ -145,13 +151,14 @@ where
results.into_iter().collect()
}
/// Read a `.package` file, collect the referenced files
/// and compile all of them into a bundle.
#[tracing::instrument]
async fn build_package<P1, P2>(package: P1, root: P2) -> Result<Bundle>
where
P1: AsRef<Path> + std::fmt::Debug,
P2: AsRef<Path> + std::fmt::Debug,
{
let root = root.as_ref();
async fn build_package(
cfg: &ModConfig,
package: impl AsRef<Path> + std::fmt::Debug,
) -> Result<Bundle> {
let root = &cfg.dir;
let package = package.as_ref();
let mut path = root.join(package);
@ -165,7 +172,7 @@ where
.await
.wrap_err_with(|| format!("Invalid package file {}", &pkg_name))?;
let files = compile_package_files(&pkg, root).await?;
let files = compile_package_files(&pkg, cfg).await?;
let mut bundle = Bundle::new(pkg_name);
for file in files {
bundle.add_file(file);
@ -174,6 +181,8 @@ where
Ok(bundle)
}
/// Cleans the path of internal parent (`../`) or self (`./`) components,
/// and ensures that it is relative.
fn normalize_file_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
let path = path.as_ref();
@ -254,14 +263,14 @@ pub(crate) async fn read_project_config(dir: Option<PathBuf>) -> Result<ModConfi
Ok(cfg)
}
pub(crate) async fn build<P1, P2>(
#[tracing::instrument]
pub(crate) async fn build<P>(
cfg: &ModConfig,
out_path: P1,
game_dir: Arc<Option<P2>>,
out_path: impl AsRef<Path> + std::fmt::Debug,
game_dir: Arc<Option<P>>,
) -> Result<()>
where
P1: AsRef<Path>,
P2: AsRef<Path>,
P: AsRef<Path> + std::fmt::Debug,
{
let out_path = out_path.as_ref();
@ -286,7 +295,7 @@ where
);
}
let bundle = build_package(path, &cfg.dir).await.wrap_err_with(|| {
let bundle = build_package(&cfg, path).await.wrap_err_with(|| {
format!(
"Failed to build package '{}' at '{}'",
path.display(),

View file

@ -0,0 +1,174 @@
use std::{io::Cursor, path::PathBuf};
use clap::{value_parser, Arg, ArgMatches, Command};
use color_eyre::{eyre::Context as _, Result};
use sdk::murmur::{HashGroup, IdString64, Murmur64};
use sdk::{BundleDatabase, FromBinary as _};
use tokio::fs;
pub(crate) fn command_definition() -> Command {
Command::new("db")
.about("Various operations regarding `bundle_database.data`.")
.subcommand_required(true)
.subcommand(
Command::new("list-files")
.about("List bundle contents")
.arg(
Arg::new("database")
.required(true)
.help("Path to the bundle database")
.value_parser(value_parser!(PathBuf)),
)
.arg(
Arg::new("bundle")
.help("The bundle name. If omitted, all bundles will be listed.")
.required(false),
),
)
.subcommand(
Command::new("list-bundles").about("List bundles").arg(
Arg::new("database")
.required(true)
.help("Path to the bundle database")
.value_parser(value_parser!(PathBuf)),
),
)
.subcommand(
Command::new("find-file")
.about("Find the bundle a file belongs to")
.arg(
Arg::new("database")
.required(true)
.help("Path to the bundle database")
.value_parser(value_parser!(PathBuf)),
)
.arg(
Arg::new("file-name")
.required(true)
.help("Name of the file. May be a hash in hex representation or a string"),
),
)
}
#[tracing::instrument(skip_all)]
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
let Some((op, sub_matches)) = matches.subcommand() else {
unreachable!("clap is configured to require a subcommand");
};
let database = {
let path = sub_matches
.get_one::<PathBuf>("database")
.expect("argument is required");
let binary = fs::read(&path)
.await
.wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?;
let mut r = Cursor::new(binary);
BundleDatabase::from_binary(&mut r).wrap_err("Failed to parse bundle database")?
};
match op {
"list-files" => {
let index = database.files();
if let Some(bundle) = sub_matches.get_one::<String>("bundle") {
let hash = u64::from_str_radix(bundle, 16)
.map(Murmur64::from)
.wrap_err("Invalid hex sequence")?;
if let Some(files) = index.get(&hash) {
for file in files {
let name = ctx.lookup_hash(file.name, HashGroup::Filename);
let extension = file.extension.ext_name();
println!("{}.{}", name.display(), extension);
}
} else {
tracing::info!("Bundle {} not found in the database", bundle);
}
} else {
for (bundle_hash, files) in index.iter() {
let bundle_name = ctx.lookup_hash(*bundle_hash, HashGroup::Filename);
match bundle_name {
IdString64::String(name) => {
println!("{:016x} {}", bundle_hash, name);
}
IdString64::Hash(hash) => {
println!("{:016x}", hash);
}
}
for file in files {
let name = ctx.lookup_hash(file.name, HashGroup::Filename);
let extension = file.extension.ext_name();
match name {
IdString64::String(name) => {
println!("\t{:016x}.{:<12} {}", file.name, extension, name);
}
IdString64::Hash(hash) => {
println!("\t{:016x}.{}", hash, extension);
}
}
}
println!();
}
}
Ok(())
}
"list-bundles" => {
for bundle_hash in database.bundles().keys() {
let bundle_name = ctx.lookup_hash(*bundle_hash, HashGroup::Filename);
match bundle_name {
IdString64::String(name) => {
println!("{:016x} {}", bundle_hash, name);
}
IdString64::Hash(hash) => {
println!("{:016x}", hash);
}
}
}
Ok(())
}
"find-file" => {
let name = sub_matches
.get_one::<String>("file-name")
.expect("required argument");
let name = match u64::from_str_radix(name, 16).map(Murmur64::from) {
Ok(hash) => hash,
Err(_) => Murmur64::hash(name),
};
let bundles = database.files().iter().filter_map(|(bundle_hash, files)| {
if files.iter().any(|file| file.name == name) {
Some(bundle_hash)
} else {
None
}
});
let mut found = false;
for bundle in bundles {
found = true;
println!("{:016x}", bundle);
}
if !found {
std::process::exit(1);
}
Ok(())
}
_ => unreachable!(
"clap is configured to require a subcommand, and they're all handled above"
),
}
}

View file

@ -3,7 +3,7 @@ use std::path::{Path, PathBuf};
use std::sync::Arc;
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
use color_eyre::eyre::{self, Context, Result};
use color_eyre::eyre::{self, bail, Context, Result};
use color_eyre::{Help, Report};
use futures::future::try_join_all;
use futures::StreamExt;
@ -12,7 +12,9 @@ use sdk::{Bundle, BundleFile, CmdLine};
use tokio::fs;
use crate::cmd::util::resolve_bundle_paths;
use crate::shell_parse::ShellParser;
#[inline]
fn parse_glob_pattern(s: &str) -> Result<Pattern, String> {
match Pattern::new(s) {
Ok(p) => Ok(p),
@ -20,6 +22,7 @@ fn parse_glob_pattern(s: &str) -> Result<Pattern, String> {
}
}
#[inline]
fn flatten_name(s: &str) -> String {
s.replace('/', "_")
}
@ -131,26 +134,29 @@ async fn parse_command_line_template(tmpl: &String) -> Result<CmdLine> {
let mut cmd = if matches!(fs::try_exists(tmpl).await, Ok(true)) {
let path = PathBuf::from(tmpl);
if path.file_name() == Some(OsStr::new("main.py")) {
let arg = path.display().to_string();
let mut cmd = CmdLine::new("python");
cmd.arg(shlex::quote(&arg).to_string());
cmd.arg(path);
cmd
} else {
CmdLine::new(path)
}
} else {
let Some(args) = shlex::split(tmpl) else {
eyre::bail!("Invalid shell syntax");
};
let mut parsed = ShellParser::new(tmpl.as_bytes());
// Safety: The initial `tmpl` was a `&String` (i.e. valid UTF-8), and `shlex` does not
// insert or remove characters, nor does it split UTF-8 characters.
// So the resulting byte stream is still valid UTF-8.
let mut cmd = CmdLine::new(unsafe {
let bytes = parsed.next().expect("Template is not empty");
String::from_utf8_unchecked(bytes.to_vec())
});
// We already checked that the template is not empty
let mut cmd = CmdLine::new(args[0].clone());
let mut it = args.iter();
// Skip the first one, that's the command name
it.next();
while let Some(arg) = parsed.next() {
// Safety: See above.
cmd.arg(unsafe { String::from_utf8_unchecked(arg.to_vec()) });
}
for arg in it {
cmd.arg(arg);
if parsed.errored {
bail!("Invalid command line template");
}
cmd

View file

@ -36,6 +36,18 @@ enum OutputFormat {
Text,
}
fn format_byte_size(size: usize) -> String {
if size < 1024 {
format!("{} Bytes", size)
} else if size < 1024 * 1024 {
format!("{} kB", size / 1024)
} else if size < 1024 * 1024 * 1024 {
format!("{} MB", size / (1024 * 1024))
} else {
format!("{} GB", size / (1024 * 1024 * 1024))
}
}
#[tracing::instrument(skip(ctx))]
async fn print_bundle_contents<P>(ctx: &sdk::Context, path: P, fmt: OutputFormat) -> Result<()>
where
@ -50,7 +62,11 @@ where
match fmt {
OutputFormat::Text => {
println!("Bundle: {}", bundle.name().display());
println!(
"Bundle: {} ({:016x})",
bundle.name().display(),
bundle.name()
);
for f in bundle.files().iter() {
if f.variants().len() != 1 {
@ -63,9 +79,10 @@ where
let v = &f.variants()[0];
println!(
"\t{}.{}: {} bytes",
"\t{}.{}: {} ({})",
f.base_name().display(),
f.file_type().ext_name(),
format_byte_size(v.size()),
v.size()
);
}

View file

@ -1,6 +1,7 @@
use clap::{ArgMatches, Command};
use color_eyre::eyre::Result;
mod db;
mod decompress;
mod extract;
mod inject;
@ -14,6 +15,7 @@ pub(crate) fn command_definition() -> Command {
.subcommand(extract::command_definition())
.subcommand(inject::command_definition())
.subcommand(list::command_definition())
.subcommand(db::command_definition())
}
#[tracing::instrument(skip_all)]
@ -23,6 +25,7 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
Some(("extract", sub_matches)) => extract::run(ctx, sub_matches).await,
Some(("inject", sub_matches)) => inject::run(ctx, sub_matches).await,
Some(("list", sub_matches)) => list::run(ctx, sub_matches).await,
Some(("db", sub_matches)) => db::run(ctx, sub_matches).await,
_ => unreachable!(
"clap is configured to require a subcommand, and they're all handled above"
),

View file

@ -351,6 +351,7 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()>
},
depends: vec![ModDependency::ID(String::from("DMF"))],
bundled: true,
name_overrides: HashMap::new(),
};
tracing::debug!(?dtmt_cfg);

View file

@ -1,18 +1,30 @@
use std::collections::HashMap;
use std::path::PathBuf;
use clap::{Arg, ArgMatches, Command};
use color_eyre::eyre::{self, Context, Result};
use color_eyre::Help;
use futures::{StreamExt, TryStreamExt};
use string_template::Template;
use minijinja::Environment;
use tokio::fs::{self, DirBuilder};
const TEMPLATES: [(&str, &str); 5] = [
(
"dtmt.cfg",
r#"id = "{{id}}"
r#"//
// This is your mod's main configuration file. It tells DTMT how to build the mod,
// and DTMM what to display to your users.
// Certain files have been pre-filled by the template, the ones commented out (`//`)
// are optional.
//
// A unique identifier (preferably lower case, alphanumeric)
id = "{{id}}"
// The display name that your users will see.
// This doesn't have to be unique, but you still want to avoid being confused with other
// mods.
name = "{{name}}"
// It's good practice to increase this number whenever you publish changes.
// It's up to you if you use SemVer or something simpler like `1970-12-24`. It should sort and
// compare well, though.
version = "0.1.0"
// author = ""
@ -32,16 +44,25 @@ categories = [
// A list of mod IDs that this mod depends on. You can find
// those IDs by downloading the mod and extracting their `dtmt.cfg`.
// To make your fellow modders' lives easier, publish your own mods' IDs
// somewhere visible, such as the Nexusmods page.
depends = [
DMF
]
// The primary resources that serve as the entry point to your
// mod's code. Unless for very specific use cases, the generated
// values shouldn't be changed.
resources = {
init = "scripts/mods/{{id}}/init"
data = "scripts/mods/{{id}}/data"
localization = "scripts/mods/{{id}}/localization"
}
// The list of packages, or bundles, to build.
// Each one corresponds to a package definition in the named folder.
// For mods that contain only code and/or a few small assets, a single
// package will suffice.
packages = [
"packages/mods/{{id}}"
]
@ -59,7 +80,6 @@ packages = [
r#"local mod = get_mod("{{id}}")
-- Your mod code goes here.
-- https://vmf-docs.verminti.de
"#,
),
(
@ -137,19 +157,23 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()>
tracing::debug!(root = %root.display(), name, id);
let mut data = HashMap::new();
data.insert("name", name.as_str());
data.insert("id", id.as_str());
let render_ctx = minijinja::context!(name => name.as_str(), id => id.as_str());
let env = Environment::new();
let templates = TEMPLATES
.iter()
.map(|(path_tmpl, content_tmpl)| {
let path = Template::new(path_tmpl).render(&data);
let content = Template::new(content_tmpl).render(&data);
(root.join(path), content)
env.render_str(path_tmpl, &render_ctx)
.wrap_err_with(|| format!("Failed to render template: {}", path_tmpl))
.and_then(|path| {
env.render_named_str(&path, content_tmpl, &render_ctx)
.wrap_err_with(|| format!("Failed to render template '{}'", &path))
.map(|content| (root.join(path), content))
})
.map(|(path, content)| async move {
})
.map(|res| async move {
match res {
Ok((path, content)) => {
let dir = path
.parent()
.ok_or_else(|| eyre::eyre!("invalid root path"))?;
@ -158,13 +182,20 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()>
.recursive(true)
.create(&dir)
.await
.wrap_err_with(|| format!("Failed to create directory {}", dir.display()))?;
.wrap_err_with(|| {
format!("Failed to create directory {}", dir.display())
})?;
tracing::trace!("Writing file {}", path.display());
fs::write(&path, content.as_bytes())
.await
.wrap_err_with(|| format!("Failed to write content to path {}", path.display()))
.wrap_err_with(|| {
format!("Failed to write content to path {}", path.display())
})
}
Err(e) => Err(e),
}
});
futures::stream::iter(templates)

View file

@ -1,6 +1,5 @@
use std::io::{Cursor, Write};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use clap::{value_parser, Arg, ArgMatches, Command};
use color_eyre::eyre::{Context, Result};
@ -8,9 +7,9 @@ use color_eyre::Help;
use dtmt_shared::ModConfig;
use path_slash::{PathBufExt, PathExt};
use tokio::fs;
use tokio::sync::Mutex;
use tokio_stream::wrappers::ReadDirStream;
use tokio_stream::StreamExt;
use zip::write::SimpleFileOptions;
use zip::ZipWriter;
use crate::cmd::build::read_project_config;
@ -51,11 +50,7 @@ pub(crate) fn command_definition() -> Command {
}
#[async_recursion::async_recursion]
async fn process_directory<P1, P2, W>(
zip: Arc<Mutex<ZipWriter<W>>>,
path: P1,
prefix: P2,
) -> Result<()>
async fn process_directory<P1, P2, W>(zip: &mut ZipWriter<W>, path: P1, prefix: P2) -> Result<()>
where
P1: AsRef<Path> + std::marker::Send,
P2: AsRef<Path> + std::marker::Send,
@ -64,9 +59,7 @@ where
let path = path.as_ref();
let prefix = prefix.as_ref();
zip.lock()
.await
.add_directory(prefix.to_slash_lossy(), Default::default())?;
zip.add_directory(prefix.to_slash_lossy(), SimpleFileOptions::default())?;
let read_dir = fs::read_dir(&path)
.await
@ -87,12 +80,11 @@ where
.await
.wrap_err_with(|| format!("Failed to read '{}'", in_path.display()))?;
{
let mut zip = zip.lock().await;
zip.start_file(out_path.to_slash_lossy(), Default::default())?;
zip.start_file(out_path.to_slash_lossy(), SimpleFileOptions::default())?;
zip.write_all(&data)?;
}
} else if t.is_dir() {
process_directory(zip.clone(), in_path, out_path).await?;
process_directory(zip, in_path, out_path).await?;
}
}
@ -107,16 +99,12 @@ where
let path = path.as_ref();
let dest = dest.as_ref();
let data = Cursor::new(Vec::new());
let zip = ZipWriter::new(data);
let zip = Arc::new(Mutex::new(zip));
let mut zip = ZipWriter::new(Cursor::new(Vec::with_capacity(1024)));
process_directory(zip.clone(), path, PathBuf::from(&cfg.id))
process_directory(&mut zip, path, PathBuf::from(&cfg.id))
.await
.wrap_err("Failed to add directory to archive")?;
let mut zip = zip.lock().await;
{
let name = PathBuf::from(&cfg.id).join("dtmt.cfg");
let path = cfg.dir.join("dtmt.cfg");
@ -125,7 +113,7 @@ where
.await
.wrap_err_with(|| format!("Failed to read mod config at {}", path.display()))?;
zip.start_file(name.to_slash_lossy(), Default::default())?;
zip.start_file(name.to_slash_lossy(), SimpleFileOptions::default())?;
zip.write_all(&data)?;
}

View file

@ -77,17 +77,14 @@ pub(crate) fn command_definition() -> Command {
)
}
async fn compile<P1, P2, P3>(
#[tracing::instrument]
async fn compile(
cfg: &ModConfig,
out_path: P1,
archive_path: P2,
game_dir: Arc<Option<P3>>,
) -> Result<()>
where
P1: AsRef<Path> + std::marker::Copy,
P2: AsRef<Path>,
P3: AsRef<Path>,
{
out_path: impl AsRef<Path> + std::fmt::Debug,
archive_path: impl AsRef<Path> + std::fmt::Debug,
game_dir: Arc<Option<impl AsRef<Path> + std::fmt::Debug>>,
) -> Result<()> {
let out_path = out_path.as_ref();
build(cfg, out_path, game_dir)
.await
.wrap_err("Failed to build bundles")?;

View file

@ -1,6 +1,7 @@
#![feature(io_error_more)]
#![feature(let_chains)]
#![feature(result_flattening)]
#![feature(test)]
#![windows_subsystem = "console"]
use std::path::PathBuf;
@ -27,6 +28,7 @@ mod cmd {
mod util;
pub mod watch;
}
mod shell_parse;
#[derive(Default, Deserialize, Serialize)]
struct GlobalConfig {

View file

@ -0,0 +1,189 @@
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum ParserState {
Start,
Word,
SingleQuote,
DoubleQuote,
}
pub struct ShellParser<'a> {
bytes: &'a [u8],
offset: usize,
pub errored: bool,
}
impl<'a> ShellParser<'a> {
pub fn new(bytes: &'a [u8]) -> Self {
Self {
bytes,
offset: 0,
errored: false,
}
}
fn parse_word(&mut self) -> Option<&'a [u8]> {
// The start of the current word. Certain leading characters should be ignored,
// so this might change.
let mut start = self.offset;
let mut state = ParserState::Start;
while self.offset < self.bytes.len() {
let c = self.bytes[self.offset];
self.offset += 1;
match state {
ParserState::Start => match c {
// Ignore leading whitespace
b' ' | b'\t' | b'\n' => start += 1,
b'\'' => {
state = ParserState::SingleQuote;
start += 1;
}
b'"' => {
state = ParserState::DoubleQuote;
start += 1;
}
_ => {
state = ParserState::Word;
}
},
ParserState::Word => match c {
// Unquoted whitespace ends the current word
b' ' | b'\t' | b'\n' => {
return Some(&self.bytes[start..self.offset - 1]);
}
_ => {}
},
ParserState::SingleQuote => match c {
b'\'' => {
return Some(&self.bytes[start..(self.offset - 1)]);
}
_ => {}
},
ParserState::DoubleQuote => match c {
b'"' => {
return Some(&self.bytes[start..(self.offset - 1)]);
}
_ => {}
},
}
}
match state {
ParserState::Start => None,
ParserState::Word => Some(&self.bytes[start..self.offset]),
ParserState::SingleQuote | ParserState::DoubleQuote => {
self.errored = true;
None
}
}
}
}
impl<'a> Iterator for ShellParser<'a> {
type Item = &'a [u8];
fn next(&mut self) -> Option<Self::Item> {
self.parse_word()
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_one_word() {
let mut it = ShellParser::new(b"hello");
assert_eq!(it.next(), Some("hello".as_bytes()));
assert_eq!(it.next(), None);
}
#[test]
fn test_one_single() {
let mut it = ShellParser::new(b"'hello'");
assert_eq!(it.next(), Some("hello".as_bytes()));
assert_eq!(it.next(), None);
}
#[test]
fn test_open_quote() {
let mut it = ShellParser::new(b"'hello");
assert_eq!(it.next(), None);
assert!(it.errored)
}
#[test]
fn test_ww2ogg() {
let mut it = ShellParser::new(
b"ww2ogg.exe --pcb \"/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin\"",
);
assert_eq!(it.next(), Some("ww2ogg.exe".as_bytes()));
assert_eq!(it.next(), Some("--pcb".as_bytes()));
assert_eq!(
it.next(),
Some("/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin".as_bytes())
);
assert_eq!(it.next(), None);
}
}
#[cfg(test)]
mod bench {
extern crate test;
use super::*;
#[cfg(feature = "shlex-bench")]
use shlex::bytes::Shlex;
use test::Bencher;
mod ww2ogg {
use super::*;
#[bench]
fn custom(b: &mut Bencher) {
let val = test::black_box(
b"ww2ogg.exe --pcb \"/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin\"",
);
b.iter(|| {
let it = ShellParser::new(val);
let _: Vec<_> = test::black_box(it.collect());
})
}
#[cfg(feature = "shlex-bench")]
#[bench]
fn shlex(b: &mut Bencher) {
let val = test::black_box(
b"ww2ogg.exe --pcb \"/usr/share/ww2ogg/packed_cookbook_aoTuV_603.bin\"",
);
b.iter(|| {
let it = Shlex::new(val);
let _: Vec<_> = test::black_box(it.collect());
})
}
}
mod one_single {
use super::*;
#[bench]
fn custom(b: &mut Bencher) {
let val = test::black_box(b"'hello'");
b.iter(|| {
let it = ShellParser::new(val);
let _: Vec<_> = test::black_box(it.collect());
})
}
#[cfg(feature = "shlex-bench")]
#[bench]
fn shlex(b: &mut Bencher) {
let val = test::black_box(b"'hello'");
b.iter(|| {
let it = Shlex::new(val);
let _: Vec<_> = test::black_box(it.collect());
})
}
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

After

Width:  |  Height:  |  Size: 130 B

@ -1 +1 @@
Subproject commit b40962a61c748756d7da293d9fff26aca019603e
Subproject commit 228b8ca37ee79ab9afa45c40da415e4dcb029751

View file

@ -6,11 +6,11 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
ansi_term = "0.12.1"
color-eyre = "0.6.2"
serde = "1.0.152"
steamlocate = "2.0.0-alpha.0"
time = { version = "0.3.19", features = ["formatting", "local-offset", "macros"] }
tracing = "0.1.37"
tracing-error = "0.2.0"
tracing-subscriber = "0.3.16"
ansi_term = { workspace = true }
color-eyre = { workspace = true }
serde = { workspace = true }
steamlocate = { workspace = true }
time = { workspace = true }
tracing = { workspace = true }
tracing-error = { workspace = true }
tracing-subscriber = { workspace = true }

View file

@ -1,3 +1,4 @@
use std::collections::HashMap;
use std::path::PathBuf;
use color_eyre::eyre::{OptionExt as _, WrapErr as _};
@ -67,6 +68,8 @@ pub struct ModConfig {
pub depends: Vec<ModDependency>,
#[serde(default = "default_true", skip_serializing_if = "is_true")]
pub bundled: bool,
#[serde(default)]
pub name_overrides: HashMap<String, String>,
}
pub const STEAMAPP_ID: u32 = 1361210;

View file

@ -84,7 +84,7 @@ pub fn create_tracing_subscriber() {
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::try_new("info").unwrap());
let (dev_stdout_layer, prod_stdout_layer, filter_layer) = if cfg!(debug_assertions) {
let fmt_layer = fmt::layer().pretty();
let fmt_layer = fmt::layer().pretty().with_writer(std::io::stderr);
(Some(fmt_layer), None, None)
} else {
// Creates a layer that
@ -93,6 +93,7 @@ pub fn create_tracing_subscriber() {
// - does not print spans/targets
// - only prints time, not date
let fmt_layer = fmt::layer()
.with_writer(std::io::stderr)
.event_format(Formatter)
.fmt_fields(debug_fn(format_fields));

@ -1 +1 @@
Subproject commit 19120166f9fc7838b98c71fc348791abc820e323
Subproject commit 6d94a4dd2c296bf1f044ee4c70fb10dca4c1c241

View file

@ -9,10 +9,10 @@ edition = "2021"
futures = "0.3.26"
lazy_static = "1.4.0"
regex = "1.7.1"
reqwest = { version = "0.11.14" }
reqwest = { version = "0.12.4" }
serde = { version = "1.0.152", features = ["derive"] }
serde_json = "1.0.94"
thiserror = "1.0.39"
thiserror = "2.0.0"
time = { version = "0.3.20", features = ["serde"] }
tracing = "0.1.37"
url = { version = "2.3.1", features = ["serde"] }

View file

@ -28,7 +28,7 @@ pub enum Error {
HTTP(#[from] reqwest::Error),
#[error("invalid URL: {0:?}")]
URLParseError(#[from] url::ParseError),
#[error("failed to deserialize '{error}': {json}")]
#[error("failed to deserialize due to {error}: {json}")]
Deserialize {
json: String,
error: serde_json::Error,
@ -37,7 +37,7 @@ pub enum Error {
InvalidHeaderValue(#[from] InvalidHeaderValue),
#[error("this error cannot happen")]
Infallible(#[from] Infallible),
#[error("invalid NXM URL '{}': {0}", .1.as_str())]
#[error("invalid NXM URL '{url}': {0}", url = .1.as_str())]
InvalidNXM(&'static str, Url),
#[error("{0}")]
Custom(String),

View file

@ -6,8 +6,8 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
color-eyre = "0.6.2"
tracing = "0.1.37"
color-eyre = { workspace = true }
tracing = { workspace = true }
[build-dependencies]
bindgen = "0.64.0"
bindgen = "0.71.0"

View file

@ -1,5 +1,3 @@
extern crate bindgen;
use std::env;
use std::path::PathBuf;
@ -33,7 +31,7 @@ fn main() {
.blocklist_file("stdlib.h")
// Tell cargo to invalidate the built crate whenever any of the
// included header files changed.
.parse_callbacks(Box::new(bindgen::CargoCallbacks))
.parse_callbacks(Box::new(bindgen::CargoCallbacks::new()))
// Finish the builder and generate the bindings.
.generate()
// Unwrap the Result and panic on failure.

View file

@ -7,6 +7,7 @@ use std::ptr;
use color_eyre::{eyre, Result};
#[allow(dead_code)]
#[allow(clippy::identity_op)]
mod bindings {
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
}

View file

@ -4,24 +4,23 @@ version = "0.3.0"
edition = "2021"
[dependencies]
bitflags = "1.3.2"
byteorder = "1.4.3"
color-eyre = "0.6.2"
csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
fastrand = "1.8.0"
futures = "0.3.25"
futures-util = "0.3.24"
glob = "0.3.0"
libloading = "0.7.4"
nanorand = "0.7.0"
pin-project-lite = "0.2.9"
serde = { version = "1.0.147", features = ["derive"] }
serde_sjson = { path = "../../lib/serde_sjson", version = "*" }
oodle = { path = "../../lib/oodle", version = "*" }
tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
tokio-stream = { version = "0.1.11", features = ["fs", "io-util"] }
tracing = { version = "0.1.37", features = ["async-await"] }
tracing-error = "0.2.0"
luajit2-sys = { path = "../../lib/luajit2-sys", version = "*" }
async-recursion = "1.0.2"
path-slash = "0.2.1"
async-recursion = { workspace = true }
bitflags = { workspace = true }
byteorder = { workspace = true }
color-eyre = { workspace = true }
csv-async = { workspace = true }
fastrand = { workspace = true }
futures = { workspace = true }
futures-util = { workspace = true }
glob = { workspace = true }
luajit2-sys = { workspace = true }
nanorand = { workspace = true }
oodle = { workspace = true }
path-slash = { workspace = true }
pin-project-lite = { workspace = true }
serde = { workspace = true }
serde_sjson = { workspace = true }
tokio = { workspace = true }
tokio-stream = { workspace = true }
tracing = { workspace = true }
tracing-error = { workspace = true }

View file

@ -43,6 +43,7 @@ impl<T: FromBinary> FromBinary for Vec<T> {
}
pub mod sync {
use std::ffi::CStr;
use std::io::{self, Read, Seek, SeekFrom};
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
@ -165,25 +166,13 @@ pub mod sync {
}
fn read_string_len(&mut self, len: usize) -> Result<String> {
let mut buf = vec![0; len];
let res = self
.read_exact(&mut buf)
.map_err(Report::new)
.and_then(|_| {
String::from_utf8(buf).map_err(|err| {
let ascii = String::from_utf8_lossy(err.as_bytes()).to_string();
let bytes = format!("{:?}", err.as_bytes());
Report::new(err)
.with_section(move || bytes.header("Bytes:"))
.with_section(move || ascii.header("ASCII:"))
})
});
let pos = self.stream_position();
let res = read_string_len(self, len);
if res.is_ok() {
return res;
}
let pos = self.stream_position();
if pos.is_ok() {
res.with_section(|| {
format!("{pos:#X} ({pos})", pos = pos.unwrap()).header("Position: ")
@ -243,4 +232,22 @@ pub mod sync {
Err(err).with_section(|| format!("{pos:#X} ({pos})").header("Position: "))
}
fn read_string_len(mut r: impl Read, len: usize) -> Result<String> {
let mut buf = vec![0; len];
r.read_exact(&mut buf)
.wrap_err_with(|| format!("Failed to read {} bytes", len))?;
let res = match CStr::from_bytes_until_nul(&buf) {
Ok(s) => {
let s = s.to_str()?;
Ok(s.to_string())
}
Err(_) => String::from_utf8(buf.clone()).map_err(Report::new),
};
res.wrap_err("Invalid binary for UTF8 string")
.with_section(|| format!("{}", String::from_utf8_lossy(&buf)).header("ASCI:"))
.with_section(|| format!("{:x?}", buf).header("Bytes:"))
}
}

View file

@ -13,21 +13,21 @@ use crate::binary::ToBinary;
use crate::murmur::Murmur64;
use crate::Bundle;
use super::file::BundleFileType;
use super::filetype::BundleFileType;
const DATABASE_VERSION: u32 = 0x6;
const FILE_VERSION: u32 = 0x4;
pub struct BundleFile {
name: String,
stream: String,
platform_specific: bool,
file_time: u64,
pub name: String,
pub stream: String,
pub platform_specific: bool,
pub file_time: u64,
}
pub struct FileName {
extension: BundleFileType,
name: Murmur64,
pub extension: BundleFileType,
pub name: Murmur64,
}
pub struct BundleDatabase {
@ -36,7 +36,34 @@ pub struct BundleDatabase {
bundle_contents: HashMap<Murmur64, Vec<FileName>>,
}
// Implements the partial Murmur that's used by the engine to compute bundle resource hashes,
// but in a way that the loop can be done outside the function.
#[inline(always)]
fn add_to_resource_hash(mut k: u64, name: impl Into<u64>) -> u64 {
const M: u64 = 0xc6a4a7935bd1e995;
const R: u64 = 47;
let mut h: u64 = name.into();
k = k.wrapping_mul(M);
k ^= k >> R;
k = k.wrapping_mul(M);
h ^= k;
k = M.wrapping_mul(h);
k
}
impl BundleDatabase {
pub fn bundles(&self) -> &HashMap<Murmur64, Vec<BundleFile>> {
&self.stored_files
}
pub fn files(&self) -> &HashMap<Murmur64, Vec<FileName>> {
&self.bundle_contents
}
pub fn add_bundle(&mut self, bundle: &Bundle) {
let hash = bundle.name().to_murmur64();
let name = hash.to_string();
@ -69,20 +96,26 @@ impl BundleDatabase {
}
}
let mut resource_hash = 0;
for f in bundle.files() {
let name = f.base_name().to_murmur64();
let file_name = FileName {
extension: f.file_type(),
name: f.base_name().to_murmur64(),
name,
};
// TODO: Compute actual resource hash
self.resource_hashes.insert(hash, 0);
resource_hash = add_to_resource_hash(resource_hash, name);
// TODO: Make sure each file name only exists once. Probably best to turn
// the `Vec` into a sorted `HashSet`.
self.bundle_contents
.entry(hash)
.or_default()
.push(file_name);
}
self.resource_hashes.insert(hash, resource_hash);
}
}
@ -103,7 +136,7 @@ impl FromBinary for BundleDatabase {
let mut stored_files = HashMap::with_capacity(num_entries);
for _ in 0..num_entries {
let hash = Murmur64::from(r.read_u64()?);
let hash = r.read_u64().map(Murmur64::from)?;
let num_files = r.read_u32()? as usize;
let mut files = Vec::with_capacity(num_files);
@ -161,7 +194,7 @@ impl FromBinary for BundleDatabase {
let mut resource_hashes = HashMap::with_capacity(num_hashes);
for _ in 0..num_hashes {
let name = Murmur64::from(r.read_u64()?);
let name = r.read_u64().map(Murmur64::from)?;
let hash = r.read_u64()?;
resource_hashes.insert(name, hash);
@ -171,14 +204,14 @@ impl FromBinary for BundleDatabase {
let mut bundle_contents = HashMap::with_capacity(num_contents);
for _ in 0..num_contents {
let hash = Murmur64::from(r.read_u64()?);
let hash = r.read_u64().map(Murmur64::from)?;
let num_files = r.read_u32()? as usize;
let mut files = Vec::with_capacity(num_files);
for _ in 0..num_files {
let extension = BundleFileType::from(r.read_u64()?);
let name = Murmur64::from(r.read_u64()?);
let extension = r.read_u64().map(BundleFileType::from)?;
let name = r.read_u64().map(Murmur64::from)?;
files.push(FileName { extension, name });
}

View file

@ -5,407 +5,12 @@ use bitflags::bitflags;
use color_eyre::eyre::Context;
use color_eyre::{eyre, Result};
use futures::future::join_all;
use serde::Serialize;
use crate::binary::sync::*;
use crate::filetype::*;
use crate::murmur::{HashGroup, IdString64, Murmur64};
#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)]
pub enum BundleFileType {
Animation,
AnimationCurves,
Apb,
BakedLighting,
Bik,
BlendSet,
Bones,
Chroma,
CommonPackage,
Config,
Crypto,
Data,
Entity,
Flow,
Font,
Ies,
Ini,
Input,
Ivf,
Keys,
Level,
Lua,
Material,
Mod,
MouseCursor,
NavData,
NetworkConfig,
OddleNet,
Package,
Particles,
PhysicsProperties,
RenderConfig,
RtPipeline,
Scene,
Shader,
ShaderLibrary,
ShaderLibraryGroup,
ShadingEnvionmentMapping,
ShadingEnvironment,
Slug,
SlugAlbum,
SoundEnvironment,
SpuJob,
StateMachine,
StaticPVS,
Strings,
SurfaceProperties,
Texture,
TimpaniBank,
TimpaniMaster,
Tome,
Ugg,
Unit,
Upb,
VectorField,
Wav,
WwiseBank,
WwiseDep,
WwiseEvent,
WwiseMetadata,
WwiseStream,
Xml,
Unknown(Murmur64),
}
impl BundleFileType {
pub fn ext_name(&self) -> String {
match self {
BundleFileType::AnimationCurves => String::from("animation_curves"),
BundleFileType::Animation => String::from("animation"),
BundleFileType::Apb => String::from("apb"),
BundleFileType::BakedLighting => String::from("baked_lighting"),
BundleFileType::Bik => String::from("bik"),
BundleFileType::BlendSet => String::from("blend_set"),
BundleFileType::Bones => String::from("bones"),
BundleFileType::Chroma => String::from("chroma"),
BundleFileType::CommonPackage => String::from("common_package"),
BundleFileType::Config => String::from("config"),
BundleFileType::Crypto => String::from("crypto"),
BundleFileType::Data => String::from("data"),
BundleFileType::Entity => String::from("entity"),
BundleFileType::Flow => String::from("flow"),
BundleFileType::Font => String::from("font"),
BundleFileType::Ies => String::from("ies"),
BundleFileType::Ini => String::from("ini"),
BundleFileType::Input => String::from("input"),
BundleFileType::Ivf => String::from("ivf"),
BundleFileType::Keys => String::from("keys"),
BundleFileType::Level => String::from("level"),
BundleFileType::Lua => String::from("lua"),
BundleFileType::Material => String::from("material"),
BundleFileType::Mod => String::from("mod"),
BundleFileType::MouseCursor => String::from("mouse_cursor"),
BundleFileType::NavData => String::from("nav_data"),
BundleFileType::NetworkConfig => String::from("network_config"),
BundleFileType::OddleNet => String::from("oodle_net"),
BundleFileType::Package => String::from("package"),
BundleFileType::Particles => String::from("particles"),
BundleFileType::PhysicsProperties => String::from("physics_properties"),
BundleFileType::RenderConfig => String::from("render_config"),
BundleFileType::RtPipeline => String::from("rt_pipeline"),
BundleFileType::Scene => String::from("scene"),
BundleFileType::ShaderLibraryGroup => String::from("shader_library_group"),
BundleFileType::ShaderLibrary => String::from("shader_library"),
BundleFileType::Shader => String::from("shader"),
BundleFileType::ShadingEnvionmentMapping => String::from("shading_environment_mapping"),
BundleFileType::ShadingEnvironment => String::from("shading_environment"),
BundleFileType::SlugAlbum => String::from("slug_album"),
BundleFileType::Slug => String::from("slug"),
BundleFileType::SoundEnvironment => String::from("sound_environment"),
BundleFileType::SpuJob => String::from("spu_job"),
BundleFileType::StateMachine => String::from("state_machine"),
BundleFileType::StaticPVS => String::from("static_pvs"),
BundleFileType::Strings => String::from("strings"),
BundleFileType::SurfaceProperties => String::from("surface_properties"),
BundleFileType::Texture => String::from("texture"),
BundleFileType::TimpaniBank => String::from("timpani_bank"),
BundleFileType::TimpaniMaster => String::from("timpani_master"),
BundleFileType::Tome => String::from("tome"),
BundleFileType::Ugg => String::from("ugg"),
BundleFileType::Unit => String::from("unit"),
BundleFileType::Upb => String::from("upb"),
BundleFileType::VectorField => String::from("vector_field"),
BundleFileType::Wav => String::from("wav"),
BundleFileType::WwiseBank => String::from("wwise_bank"),
BundleFileType::WwiseDep => String::from("wwise_dep"),
BundleFileType::WwiseEvent => String::from("wwise_event"),
BundleFileType::WwiseMetadata => String::from("wwise_metadata"),
BundleFileType::WwiseStream => String::from("wwise_stream"),
BundleFileType::Xml => String::from("xml"),
BundleFileType::Unknown(s) => format!("{s:016X}"),
}
}
pub fn decompiled_ext_name(&self) -> String {
match self {
BundleFileType::Texture => String::from("dds"),
BundleFileType::WwiseBank => String::from("bnk"),
BundleFileType::WwiseStream => String::from("ogg"),
_ => self.ext_name(),
}
}
pub fn hash(&self) -> Murmur64 {
Murmur64::from(*self)
}
}
impl std::str::FromStr for BundleFileType {
type Err = color_eyre::Report;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let val = match s {
"animation_curves" => BundleFileType::AnimationCurves,
"animation" => BundleFileType::Animation,
"apb" => BundleFileType::Apb,
"baked_lighting" => BundleFileType::BakedLighting,
"bik" => BundleFileType::Bik,
"blend_set" => BundleFileType::BlendSet,
"bones" => BundleFileType::Bones,
"chroma" => BundleFileType::Chroma,
"common_package" => BundleFileType::CommonPackage,
"config" => BundleFileType::Config,
"crypto" => BundleFileType::Crypto,
"data" => BundleFileType::Data,
"entity" => BundleFileType::Entity,
"flow" => BundleFileType::Flow,
"font" => BundleFileType::Font,
"ies" => BundleFileType::Ies,
"ini" => BundleFileType::Ini,
"input" => BundleFileType::Input,
"ivf" => BundleFileType::Ivf,
"keys" => BundleFileType::Keys,
"level" => BundleFileType::Level,
"lua" => BundleFileType::Lua,
"material" => BundleFileType::Material,
"mod" => BundleFileType::Mod,
"mouse_cursor" => BundleFileType::MouseCursor,
"nav_data" => BundleFileType::NavData,
"network_config" => BundleFileType::NetworkConfig,
"oodle_net" => BundleFileType::OddleNet,
"package" => BundleFileType::Package,
"particles" => BundleFileType::Particles,
"physics_properties" => BundleFileType::PhysicsProperties,
"render_config" => BundleFileType::RenderConfig,
"rt_pipeline" => BundleFileType::RtPipeline,
"scene" => BundleFileType::Scene,
"shader_library_group" => BundleFileType::ShaderLibraryGroup,
"shader_library" => BundleFileType::ShaderLibrary,
"shader" => BundleFileType::Shader,
"shading_environment_mapping" => BundleFileType::ShadingEnvionmentMapping,
"shading_environment" => BundleFileType::ShadingEnvironment,
"slug_album" => BundleFileType::SlugAlbum,
"slug" => BundleFileType::Slug,
"sound_environment" => BundleFileType::SoundEnvironment,
"spu_job" => BundleFileType::SpuJob,
"state_machine" => BundleFileType::StateMachine,
"static_pvs" => BundleFileType::StaticPVS,
"strings" => BundleFileType::Strings,
"surface_properties" => BundleFileType::SurfaceProperties,
"texture" => BundleFileType::Texture,
"timpani_bank" => BundleFileType::TimpaniBank,
"timpani_master" => BundleFileType::TimpaniMaster,
"tome" => BundleFileType::Tome,
"ugg" => BundleFileType::Ugg,
"unit" => BundleFileType::Unit,
"upb" => BundleFileType::Upb,
"vector_field" => BundleFileType::VectorField,
"wav" => BundleFileType::Wav,
"wwise_bank" => BundleFileType::WwiseBank,
"wwise_dep" => BundleFileType::WwiseDep,
"wwise_event" => BundleFileType::WwiseEvent,
"wwise_metadata" => BundleFileType::WwiseMetadata,
"wwise_stream" => BundleFileType::WwiseStream,
"xml" => BundleFileType::Xml,
s => eyre::bail!("Unknown type string '{}'", s),
};
Ok(val)
}
}
impl Serialize for BundleFileType {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let value = self.ext_name();
value.serialize(serializer)
}
}
impl From<Murmur64> for BundleFileType {
fn from(value: Murmur64) -> Self {
Self::from(Into::<u64>::into(value))
}
}
impl From<u64> for BundleFileType {
fn from(hash: u64) -> BundleFileType {
match hash {
0x931e336d7646cc26 => BundleFileType::Animation,
0xdcfb9e18fff13984 => BundleFileType::AnimationCurves,
0x3eed05ba83af5090 => BundleFileType::Apb,
0x7ffdb779b04e4ed1 => BundleFileType::BakedLighting,
0xaa5965f03029fa18 => BundleFileType::Bik,
0xe301e8af94e3b5a3 => BundleFileType::BlendSet,
0x18dead01056b72e9 => BundleFileType::Bones,
0xb7893adf7567506a => BundleFileType::Chroma,
0xfe9754bd19814a47 => BundleFileType::CommonPackage,
0x82645835e6b73232 => BundleFileType::Config,
0x69108ded1e3e634b => BundleFileType::Crypto,
0x8fd0d44d20650b68 => BundleFileType::Data,
0x9831ca893b0d087d => BundleFileType::Entity,
0x92d3ee038eeb610d => BundleFileType::Flow,
0x9efe0a916aae7880 => BundleFileType::Font,
0x8f7d5a2c0f967655 => BundleFileType::Ies,
0xd526a27da14f1dc5 => BundleFileType::Ini,
0x2bbcabe5074ade9e => BundleFileType::Input,
0xfa4a8e091a91201e => BundleFileType::Ivf,
0xa62f9297dc969e85 => BundleFileType::Keys,
0x2a690fd348fe9ac5 => BundleFileType::Level,
0xa14e8dfa2cd117e2 => BundleFileType::Lua,
0xeac0b497876adedf => BundleFileType::Material,
0x3fcdd69156a46417 => BundleFileType::Mod,
0xb277b11fe4a61d37 => BundleFileType::MouseCursor,
0x169de9566953d264 => BundleFileType::NavData,
0x3b1fa9e8f6bac374 => BundleFileType::NetworkConfig,
0xb0f2c12eb107f4d8 => BundleFileType::OddleNet,
0xad9c6d9ed1e5e77a => BundleFileType::Package,
0xa8193123526fad64 => BundleFileType::Particles,
0xbf21403a3ab0bbb1 => BundleFileType::PhysicsProperties,
0x27862fe24795319c => BundleFileType::RenderConfig,
0x9ca183c2d0e76dee => BundleFileType::RtPipeline,
0x9d0a795bfe818d19 => BundleFileType::Scene,
0xcce8d5b5f5ae333f => BundleFileType::Shader,
0xe5ee32a477239a93 => BundleFileType::ShaderLibrary,
0x9e5c3cc74575aeb5 => BundleFileType::ShaderLibraryGroup,
0x250e0a11ac8e26f8 => BundleFileType::ShadingEnvionmentMapping,
0xfe73c7dcff8a7ca5 => BundleFileType::ShadingEnvironment,
0xa27b4d04a9ba6f9e => BundleFileType::Slug,
0xe9fc9ea7042e5ec0 => BundleFileType::SlugAlbum,
0xd8b27864a97ffdd7 => BundleFileType::SoundEnvironment,
0xf97af9983c05b950 => BundleFileType::SpuJob,
0xa486d4045106165c => BundleFileType::StateMachine,
0xe3f0baa17d620321 => BundleFileType::StaticPVS,
0x0d972bab10b40fd3 => BundleFileType::Strings,
0xad2d3fa30d9ab394 => BundleFileType::SurfaceProperties,
0xcd4238c6a0c69e32 => BundleFileType::Texture,
0x99736be1fff739a4 => BundleFileType::TimpaniBank,
0x00a3e6c59a2b9c6c => BundleFileType::TimpaniMaster,
0x19c792357c99f49b => BundleFileType::Tome,
0x712d6e3dd1024c9c => BundleFileType::Ugg,
0xe0a48d0be9a7453f => BundleFileType::Unit,
0xa99510c6e86dd3c2 => BundleFileType::Upb,
0xf7505933166d6755 => BundleFileType::VectorField,
0x786f65c00a816b19 => BundleFileType::Wav,
0x535a7bd3e650d799 => BundleFileType::WwiseBank,
0xaf32095c82f2b070 => BundleFileType::WwiseDep,
0xaabdd317b58dfc8a => BundleFileType::WwiseEvent,
0xd50a8b7e1c82b110 => BundleFileType::WwiseMetadata,
0x504b55235d21440e => BundleFileType::WwiseStream,
0x76015845a6003765 => BundleFileType::Xml,
_ => BundleFileType::Unknown(Murmur64::from(hash)),
}
}
}
impl From<BundleFileType> for u64 {
fn from(t: BundleFileType) -> u64 {
match t {
BundleFileType::Animation => 0x931e336d7646cc26,
BundleFileType::AnimationCurves => 0xdcfb9e18fff13984,
BundleFileType::Apb => 0x3eed05ba83af5090,
BundleFileType::BakedLighting => 0x7ffdb779b04e4ed1,
BundleFileType::Bik => 0xaa5965f03029fa18,
BundleFileType::BlendSet => 0xe301e8af94e3b5a3,
BundleFileType::Bones => 0x18dead01056b72e9,
BundleFileType::Chroma => 0xb7893adf7567506a,
BundleFileType::CommonPackage => 0xfe9754bd19814a47,
BundleFileType::Config => 0x82645835e6b73232,
BundleFileType::Crypto => 0x69108ded1e3e634b,
BundleFileType::Data => 0x8fd0d44d20650b68,
BundleFileType::Entity => 0x9831ca893b0d087d,
BundleFileType::Flow => 0x92d3ee038eeb610d,
BundleFileType::Font => 0x9efe0a916aae7880,
BundleFileType::Ies => 0x8f7d5a2c0f967655,
BundleFileType::Ini => 0xd526a27da14f1dc5,
BundleFileType::Input => 0x2bbcabe5074ade9e,
BundleFileType::Ivf => 0xfa4a8e091a91201e,
BundleFileType::Keys => 0xa62f9297dc969e85,
BundleFileType::Level => 0x2a690fd348fe9ac5,
BundleFileType::Lua => 0xa14e8dfa2cd117e2,
BundleFileType::Material => 0xeac0b497876adedf,
BundleFileType::Mod => 0x3fcdd69156a46417,
BundleFileType::MouseCursor => 0xb277b11fe4a61d37,
BundleFileType::NavData => 0x169de9566953d264,
BundleFileType::NetworkConfig => 0x3b1fa9e8f6bac374,
BundleFileType::OddleNet => 0xb0f2c12eb107f4d8,
BundleFileType::Package => 0xad9c6d9ed1e5e77a,
BundleFileType::Particles => 0xa8193123526fad64,
BundleFileType::PhysicsProperties => 0xbf21403a3ab0bbb1,
BundleFileType::RenderConfig => 0x27862fe24795319c,
BundleFileType::RtPipeline => 0x9ca183c2d0e76dee,
BundleFileType::Scene => 0x9d0a795bfe818d19,
BundleFileType::Shader => 0xcce8d5b5f5ae333f,
BundleFileType::ShaderLibrary => 0xe5ee32a477239a93,
BundleFileType::ShaderLibraryGroup => 0x9e5c3cc74575aeb5,
BundleFileType::ShadingEnvionmentMapping => 0x250e0a11ac8e26f8,
BundleFileType::ShadingEnvironment => 0xfe73c7dcff8a7ca5,
BundleFileType::Slug => 0xa27b4d04a9ba6f9e,
BundleFileType::SlugAlbum => 0xe9fc9ea7042e5ec0,
BundleFileType::SoundEnvironment => 0xd8b27864a97ffdd7,
BundleFileType::SpuJob => 0xf97af9983c05b950,
BundleFileType::StateMachine => 0xa486d4045106165c,
BundleFileType::StaticPVS => 0xe3f0baa17d620321,
BundleFileType::Strings => 0x0d972bab10b40fd3,
BundleFileType::SurfaceProperties => 0xad2d3fa30d9ab394,
BundleFileType::Texture => 0xcd4238c6a0c69e32,
BundleFileType::TimpaniBank => 0x99736be1fff739a4,
BundleFileType::TimpaniMaster => 0x00a3e6c59a2b9c6c,
BundleFileType::Tome => 0x19c792357c99f49b,
BundleFileType::Ugg => 0x712d6e3dd1024c9c,
BundleFileType::Unit => 0xe0a48d0be9a7453f,
BundleFileType::Upb => 0xa99510c6e86dd3c2,
BundleFileType::VectorField => 0xf7505933166d6755,
BundleFileType::Wav => 0x786f65c00a816b19,
BundleFileType::WwiseBank => 0x535a7bd3e650d799,
BundleFileType::WwiseDep => 0xaf32095c82f2b070,
BundleFileType::WwiseEvent => 0xaabdd317b58dfc8a,
BundleFileType::WwiseMetadata => 0xd50a8b7e1c82b110,
BundleFileType::WwiseStream => 0x504b55235d21440e,
BundleFileType::Xml => 0x76015845a6003765,
BundleFileType::Unknown(hash) => hash.into(),
}
}
}
impl From<BundleFileType> for Murmur64 {
fn from(t: BundleFileType) -> Murmur64 {
let hash: u64 = t.into();
Murmur64::from(hash)
}
}
impl std::fmt::Display for BundleFileType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.ext_name())
}
}
use super::filetype::BundleFileType;
#[derive(Debug)]
struct BundleFileHeader {
@ -501,7 +106,7 @@ impl BundleFileVariant {
}
bitflags! {
#[derive(Default)]
#[derive(Default, Clone, Copy, Debug)]
pub struct Properties: u32 {
const DATA = 0b100;
}
@ -515,7 +120,7 @@ pub struct BundleFile {
}
impl BundleFile {
pub fn new(name: String, file_type: BundleFileType) -> Self {
pub fn new(name: impl Into<IdString64>, file_type: BundleFileType) -> Self {
Self {
file_type,
name: name.into(),
@ -647,20 +252,15 @@ impl BundleFile {
Ok(w.into_inner())
}
#[tracing::instrument(name = "File::from_sjson", skip(sjson))]
pub async fn from_sjson<P, S>(
name: String,
#[tracing::instrument("File::from_sjson", skip(sjson, name), fields(name = %name.display()))]
pub async fn from_sjson(
name: IdString64,
file_type: BundleFileType,
sjson: S,
root: P,
) -> Result<Self>
where
P: AsRef<Path> + std::fmt::Debug,
S: AsRef<str>,
{
sjson: impl AsRef<str>,
root: impl AsRef<Path> + std::fmt::Debug,
) -> Result<Self> {
match file_type {
BundleFileType::Lua => lua::compile(name.clone(), sjson)
.wrap_err_with(|| format!("Failed to compile Lua file '{}'", name)),
BundleFileType::Lua => lua::compile(name, sjson).wrap_err("Failed to compile Lua file"),
BundleFileType::Unknown(_) => {
eyre::bail!("Unknown file type. Cannot compile from SJSON");
}
@ -699,10 +299,7 @@ impl BundleFile {
s
}
pub fn matches_name<S>(&self, name: S) -> bool
where
S: Into<IdString64>,
{
pub fn matches_name(&self, name: impl Into<IdString64>) -> bool {
let name = name.into();
if self.name == name {
return true;

View file

@ -0,0 +1,400 @@
use color_eyre::{eyre, Result};
use serde::Serialize;
use crate::murmur::Murmur64;
#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)]
pub enum BundleFileType {
Animation,
AnimationCurves,
Apb,
BakedLighting,
Bik,
BlendSet,
Bones,
Chroma,
CommonPackage,
Config,
Crypto,
Data,
Entity,
Flow,
Font,
Ies,
Ini,
Input,
Ivf,
Keys,
Level,
Lua,
Material,
Mod,
MouseCursor,
NavData,
NetworkConfig,
OddleNet,
Package,
Particles,
PhysicsProperties,
RenderConfig,
RtPipeline,
Scene,
Shader,
ShaderLibrary,
ShaderLibraryGroup,
ShadingEnvionmentMapping,
ShadingEnvironment,
Slug,
SlugAlbum,
SoundEnvironment,
SpuJob,
StateMachine,
StaticPVS,
Strings,
SurfaceProperties,
Texture,
TimpaniBank,
TimpaniMaster,
Tome,
Ugg,
Unit,
Upb,
VectorField,
Wav,
WwiseBank,
WwiseDep,
WwiseEvent,
WwiseMetadata,
WwiseStream,
Xml,
Unknown(Murmur64),
}
impl BundleFileType {
pub fn ext_name(&self) -> String {
match self {
BundleFileType::AnimationCurves => String::from("animation_curves"),
BundleFileType::Animation => String::from("animation"),
BundleFileType::Apb => String::from("apb"),
BundleFileType::BakedLighting => String::from("baked_lighting"),
BundleFileType::Bik => String::from("bik"),
BundleFileType::BlendSet => String::from("blend_set"),
BundleFileType::Bones => String::from("bones"),
BundleFileType::Chroma => String::from("chroma"),
BundleFileType::CommonPackage => String::from("common_package"),
BundleFileType::Config => String::from("config"),
BundleFileType::Crypto => String::from("crypto"),
BundleFileType::Data => String::from("data"),
BundleFileType::Entity => String::from("entity"),
BundleFileType::Flow => String::from("flow"),
BundleFileType::Font => String::from("font"),
BundleFileType::Ies => String::from("ies"),
BundleFileType::Ini => String::from("ini"),
BundleFileType::Input => String::from("input"),
BundleFileType::Ivf => String::from("ivf"),
BundleFileType::Keys => String::from("keys"),
BundleFileType::Level => String::from("level"),
BundleFileType::Lua => String::from("lua"),
BundleFileType::Material => String::from("material"),
BundleFileType::Mod => String::from("mod"),
BundleFileType::MouseCursor => String::from("mouse_cursor"),
BundleFileType::NavData => String::from("nav_data"),
BundleFileType::NetworkConfig => String::from("network_config"),
BundleFileType::OddleNet => String::from("oodle_net"),
BundleFileType::Package => String::from("package"),
BundleFileType::Particles => String::from("particles"),
BundleFileType::PhysicsProperties => String::from("physics_properties"),
BundleFileType::RenderConfig => String::from("render_config"),
BundleFileType::RtPipeline => String::from("rt_pipeline"),
BundleFileType::Scene => String::from("scene"),
BundleFileType::ShaderLibraryGroup => String::from("shader_library_group"),
BundleFileType::ShaderLibrary => String::from("shader_library"),
BundleFileType::Shader => String::from("shader"),
BundleFileType::ShadingEnvionmentMapping => String::from("shading_environment_mapping"),
BundleFileType::ShadingEnvironment => String::from("shading_environment"),
BundleFileType::SlugAlbum => String::from("slug_album"),
BundleFileType::Slug => String::from("slug"),
BundleFileType::SoundEnvironment => String::from("sound_environment"),
BundleFileType::SpuJob => String::from("spu_job"),
BundleFileType::StateMachine => String::from("state_machine"),
BundleFileType::StaticPVS => String::from("static_pvs"),
BundleFileType::Strings => String::from("strings"),
BundleFileType::SurfaceProperties => String::from("surface_properties"),
BundleFileType::Texture => String::from("texture"),
BundleFileType::TimpaniBank => String::from("timpani_bank"),
BundleFileType::TimpaniMaster => String::from("timpani_master"),
BundleFileType::Tome => String::from("tome"),
BundleFileType::Ugg => String::from("ugg"),
BundleFileType::Unit => String::from("unit"),
BundleFileType::Upb => String::from("upb"),
BundleFileType::VectorField => String::from("vector_field"),
BundleFileType::Wav => String::from("wav"),
BundleFileType::WwiseBank => String::from("wwise_bank"),
BundleFileType::WwiseDep => String::from("wwise_dep"),
BundleFileType::WwiseEvent => String::from("wwise_event"),
BundleFileType::WwiseMetadata => String::from("wwise_metadata"),
BundleFileType::WwiseStream => String::from("wwise_stream"),
BundleFileType::Xml => String::from("xml"),
BundleFileType::Unknown(s) => format!("{s:016X}"),
}
}
pub fn decompiled_ext_name(&self) -> String {
match self {
BundleFileType::Texture => String::from("dds"),
BundleFileType::WwiseBank => String::from("bnk"),
BundleFileType::WwiseStream => String::from("ogg"),
_ => self.ext_name(),
}
}
pub fn hash(&self) -> Murmur64 {
Murmur64::from(*self)
}
}
impl std::str::FromStr for BundleFileType {
type Err = color_eyre::Report;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let val = match s {
"animation_curves" => BundleFileType::AnimationCurves,
"animation" => BundleFileType::Animation,
"apb" => BundleFileType::Apb,
"baked_lighting" => BundleFileType::BakedLighting,
"bik" => BundleFileType::Bik,
"blend_set" => BundleFileType::BlendSet,
"bones" => BundleFileType::Bones,
"chroma" => BundleFileType::Chroma,
"common_package" => BundleFileType::CommonPackage,
"config" => BundleFileType::Config,
"crypto" => BundleFileType::Crypto,
"data" => BundleFileType::Data,
"entity" => BundleFileType::Entity,
"flow" => BundleFileType::Flow,
"font" => BundleFileType::Font,
"ies" => BundleFileType::Ies,
"ini" => BundleFileType::Ini,
"input" => BundleFileType::Input,
"ivf" => BundleFileType::Ivf,
"keys" => BundleFileType::Keys,
"level" => BundleFileType::Level,
"lua" => BundleFileType::Lua,
"material" => BundleFileType::Material,
"mod" => BundleFileType::Mod,
"mouse_cursor" => BundleFileType::MouseCursor,
"nav_data" => BundleFileType::NavData,
"network_config" => BundleFileType::NetworkConfig,
"oodle_net" => BundleFileType::OddleNet,
"package" => BundleFileType::Package,
"particles" => BundleFileType::Particles,
"physics_properties" => BundleFileType::PhysicsProperties,
"render_config" => BundleFileType::RenderConfig,
"rt_pipeline" => BundleFileType::RtPipeline,
"scene" => BundleFileType::Scene,
"shader_library_group" => BundleFileType::ShaderLibraryGroup,
"shader_library" => BundleFileType::ShaderLibrary,
"shader" => BundleFileType::Shader,
"shading_environment_mapping" => BundleFileType::ShadingEnvionmentMapping,
"shading_environment" => BundleFileType::ShadingEnvironment,
"slug_album" => BundleFileType::SlugAlbum,
"slug" => BundleFileType::Slug,
"sound_environment" => BundleFileType::SoundEnvironment,
"spu_job" => BundleFileType::SpuJob,
"state_machine" => BundleFileType::StateMachine,
"static_pvs" => BundleFileType::StaticPVS,
"strings" => BundleFileType::Strings,
"surface_properties" => BundleFileType::SurfaceProperties,
"texture" => BundleFileType::Texture,
"timpani_bank" => BundleFileType::TimpaniBank,
"timpani_master" => BundleFileType::TimpaniMaster,
"tome" => BundleFileType::Tome,
"ugg" => BundleFileType::Ugg,
"unit" => BundleFileType::Unit,
"upb" => BundleFileType::Upb,
"vector_field" => BundleFileType::VectorField,
"wav" => BundleFileType::Wav,
"wwise_bank" => BundleFileType::WwiseBank,
"wwise_dep" => BundleFileType::WwiseDep,
"wwise_event" => BundleFileType::WwiseEvent,
"wwise_metadata" => BundleFileType::WwiseMetadata,
"wwise_stream" => BundleFileType::WwiseStream,
"xml" => BundleFileType::Xml,
s => eyre::bail!("Unknown type string '{}'", s),
};
Ok(val)
}
}
impl Serialize for BundleFileType {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let value = self.ext_name();
value.serialize(serializer)
}
}
impl From<Murmur64> for BundleFileType {
fn from(value: Murmur64) -> Self {
Self::from(Into::<u64>::into(value))
}
}
impl From<u64> for BundleFileType {
fn from(hash: u64) -> BundleFileType {
match hash {
0x931e336d7646cc26 => BundleFileType::Animation,
0xdcfb9e18fff13984 => BundleFileType::AnimationCurves,
0x3eed05ba83af5090 => BundleFileType::Apb,
0x7ffdb779b04e4ed1 => BundleFileType::BakedLighting,
0xaa5965f03029fa18 => BundleFileType::Bik,
0xe301e8af94e3b5a3 => BundleFileType::BlendSet,
0x18dead01056b72e9 => BundleFileType::Bones,
0xb7893adf7567506a => BundleFileType::Chroma,
0xfe9754bd19814a47 => BundleFileType::CommonPackage,
0x82645835e6b73232 => BundleFileType::Config,
0x69108ded1e3e634b => BundleFileType::Crypto,
0x8fd0d44d20650b68 => BundleFileType::Data,
0x9831ca893b0d087d => BundleFileType::Entity,
0x92d3ee038eeb610d => BundleFileType::Flow,
0x9efe0a916aae7880 => BundleFileType::Font,
0x8f7d5a2c0f967655 => BundleFileType::Ies,
0xd526a27da14f1dc5 => BundleFileType::Ini,
0x2bbcabe5074ade9e => BundleFileType::Input,
0xfa4a8e091a91201e => BundleFileType::Ivf,
0xa62f9297dc969e85 => BundleFileType::Keys,
0x2a690fd348fe9ac5 => BundleFileType::Level,
0xa14e8dfa2cd117e2 => BundleFileType::Lua,
0xeac0b497876adedf => BundleFileType::Material,
0x3fcdd69156a46417 => BundleFileType::Mod,
0xb277b11fe4a61d37 => BundleFileType::MouseCursor,
0x169de9566953d264 => BundleFileType::NavData,
0x3b1fa9e8f6bac374 => BundleFileType::NetworkConfig,
0xb0f2c12eb107f4d8 => BundleFileType::OddleNet,
0xad9c6d9ed1e5e77a => BundleFileType::Package,
0xa8193123526fad64 => BundleFileType::Particles,
0xbf21403a3ab0bbb1 => BundleFileType::PhysicsProperties,
0x27862fe24795319c => BundleFileType::RenderConfig,
0x9ca183c2d0e76dee => BundleFileType::RtPipeline,
0x9d0a795bfe818d19 => BundleFileType::Scene,
0xcce8d5b5f5ae333f => BundleFileType::Shader,
0xe5ee32a477239a93 => BundleFileType::ShaderLibrary,
0x9e5c3cc74575aeb5 => BundleFileType::ShaderLibraryGroup,
0x250e0a11ac8e26f8 => BundleFileType::ShadingEnvionmentMapping,
0xfe73c7dcff8a7ca5 => BundleFileType::ShadingEnvironment,
0xa27b4d04a9ba6f9e => BundleFileType::Slug,
0xe9fc9ea7042e5ec0 => BundleFileType::SlugAlbum,
0xd8b27864a97ffdd7 => BundleFileType::SoundEnvironment,
0xf97af9983c05b950 => BundleFileType::SpuJob,
0xa486d4045106165c => BundleFileType::StateMachine,
0xe3f0baa17d620321 => BundleFileType::StaticPVS,
0x0d972bab10b40fd3 => BundleFileType::Strings,
0xad2d3fa30d9ab394 => BundleFileType::SurfaceProperties,
0xcd4238c6a0c69e32 => BundleFileType::Texture,
0x99736be1fff739a4 => BundleFileType::TimpaniBank,
0x00a3e6c59a2b9c6c => BundleFileType::TimpaniMaster,
0x19c792357c99f49b => BundleFileType::Tome,
0x712d6e3dd1024c9c => BundleFileType::Ugg,
0xe0a48d0be9a7453f => BundleFileType::Unit,
0xa99510c6e86dd3c2 => BundleFileType::Upb,
0xf7505933166d6755 => BundleFileType::VectorField,
0x786f65c00a816b19 => BundleFileType::Wav,
0x535a7bd3e650d799 => BundleFileType::WwiseBank,
0xaf32095c82f2b070 => BundleFileType::WwiseDep,
0xaabdd317b58dfc8a => BundleFileType::WwiseEvent,
0xd50a8b7e1c82b110 => BundleFileType::WwiseMetadata,
0x504b55235d21440e => BundleFileType::WwiseStream,
0x76015845a6003765 => BundleFileType::Xml,
_ => BundleFileType::Unknown(Murmur64::from(hash)),
}
}
}
impl From<BundleFileType> for u64 {
fn from(t: BundleFileType) -> u64 {
match t {
BundleFileType::Animation => 0x931e336d7646cc26,
BundleFileType::AnimationCurves => 0xdcfb9e18fff13984,
BundleFileType::Apb => 0x3eed05ba83af5090,
BundleFileType::BakedLighting => 0x7ffdb779b04e4ed1,
BundleFileType::Bik => 0xaa5965f03029fa18,
BundleFileType::BlendSet => 0xe301e8af94e3b5a3,
BundleFileType::Bones => 0x18dead01056b72e9,
BundleFileType::Chroma => 0xb7893adf7567506a,
BundleFileType::CommonPackage => 0xfe9754bd19814a47,
BundleFileType::Config => 0x82645835e6b73232,
BundleFileType::Crypto => 0x69108ded1e3e634b,
BundleFileType::Data => 0x8fd0d44d20650b68,
BundleFileType::Entity => 0x9831ca893b0d087d,
BundleFileType::Flow => 0x92d3ee038eeb610d,
BundleFileType::Font => 0x9efe0a916aae7880,
BundleFileType::Ies => 0x8f7d5a2c0f967655,
BundleFileType::Ini => 0xd526a27da14f1dc5,
BundleFileType::Input => 0x2bbcabe5074ade9e,
BundleFileType::Ivf => 0xfa4a8e091a91201e,
BundleFileType::Keys => 0xa62f9297dc969e85,
BundleFileType::Level => 0x2a690fd348fe9ac5,
BundleFileType::Lua => 0xa14e8dfa2cd117e2,
BundleFileType::Material => 0xeac0b497876adedf,
BundleFileType::Mod => 0x3fcdd69156a46417,
BundleFileType::MouseCursor => 0xb277b11fe4a61d37,
BundleFileType::NavData => 0x169de9566953d264,
BundleFileType::NetworkConfig => 0x3b1fa9e8f6bac374,
BundleFileType::OddleNet => 0xb0f2c12eb107f4d8,
BundleFileType::Package => 0xad9c6d9ed1e5e77a,
BundleFileType::Particles => 0xa8193123526fad64,
BundleFileType::PhysicsProperties => 0xbf21403a3ab0bbb1,
BundleFileType::RenderConfig => 0x27862fe24795319c,
BundleFileType::RtPipeline => 0x9ca183c2d0e76dee,
BundleFileType::Scene => 0x9d0a795bfe818d19,
BundleFileType::Shader => 0xcce8d5b5f5ae333f,
BundleFileType::ShaderLibrary => 0xe5ee32a477239a93,
BundleFileType::ShaderLibraryGroup => 0x9e5c3cc74575aeb5,
BundleFileType::ShadingEnvionmentMapping => 0x250e0a11ac8e26f8,
BundleFileType::ShadingEnvironment => 0xfe73c7dcff8a7ca5,
BundleFileType::Slug => 0xa27b4d04a9ba6f9e,
BundleFileType::SlugAlbum => 0xe9fc9ea7042e5ec0,
BundleFileType::SoundEnvironment => 0xd8b27864a97ffdd7,
BundleFileType::SpuJob => 0xf97af9983c05b950,
BundleFileType::StateMachine => 0xa486d4045106165c,
BundleFileType::StaticPVS => 0xe3f0baa17d620321,
BundleFileType::Strings => 0x0d972bab10b40fd3,
BundleFileType::SurfaceProperties => 0xad2d3fa30d9ab394,
BundleFileType::Texture => 0xcd4238c6a0c69e32,
BundleFileType::TimpaniBank => 0x99736be1fff739a4,
BundleFileType::TimpaniMaster => 0x00a3e6c59a2b9c6c,
BundleFileType::Tome => 0x19c792357c99f49b,
BundleFileType::Ugg => 0x712d6e3dd1024c9c,
BundleFileType::Unit => 0xe0a48d0be9a7453f,
BundleFileType::Upb => 0xa99510c6e86dd3c2,
BundleFileType::VectorField => 0xf7505933166d6755,
BundleFileType::Wav => 0x786f65c00a816b19,
BundleFileType::WwiseBank => 0x535a7bd3e650d799,
BundleFileType::WwiseDep => 0xaf32095c82f2b070,
BundleFileType::WwiseEvent => 0xaabdd317b58dfc8a,
BundleFileType::WwiseMetadata => 0xd50a8b7e1c82b110,
BundleFileType::WwiseStream => 0x504b55235d21440e,
BundleFileType::Xml => 0x76015845a6003765,
BundleFileType::Unknown(hash) => hash.into(),
}
}
}
impl From<BundleFileType> for Murmur64 {
fn from(t: BundleFileType) -> Murmur64 {
let hash: u64 = t.into();
Murmur64::from(hash)
}
}
impl std::fmt::Display for BundleFileType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.ext_name())
}
}

View file

@ -12,8 +12,10 @@ use crate::murmur::{HashGroup, IdString64, Murmur64};
pub(crate) mod database;
pub(crate) mod file;
pub(crate) mod filetype;
pub use file::{BundleFile, BundleFileType, BundleFileVariant};
pub use file::{BundleFile, BundleFileVariant};
pub use filetype::BundleFileType;
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd)]
enum BundleFormat {

View file

@ -15,6 +15,7 @@ use tokio::fs;
use crate::binary::sync::ReadExt;
use crate::binary::sync::WriteExt;
use crate::bundle::file::{BundleFileVariant, UserFile};
use crate::murmur::IdString64;
use crate::{BundleFile, BundleFileType};
const BITSQUID_LUAJIT_HEADER: u32 = 0x8253461B;
@ -117,17 +118,13 @@ where
}
#[tracing::instrument(skip_all)]
pub fn compile<S, C>(name: S, code: C) -> Result<BundleFile>
where
S: Into<String>,
C: AsRef<str>,
{
pub fn compile(name: impl Into<IdString64>, code: impl AsRef<str>) -> Result<BundleFile> {
let name = name.into();
let code = code.as_ref();
tracing::trace!(
"Compiling '{}', {} bytes of code",
name,
name.display(),
code.as_bytes().len()
);
@ -135,8 +132,8 @@ where
let state = lua::luaL_newstate();
lua::luaL_openlibs(state);
let name = CString::new(format!("@{name}").into_bytes())
.wrap_err_with(|| format!("Cannot convert name into CString: {}", name))?;
let name = CString::new(format!("@{}", name.display()).into_bytes())
.wrap_err_with(|| format!("Cannot convert name into CString: {}", name.display()))?;
match lua::luaL_loadbuffer(
state,
code.as_ptr() as _,

View file

@ -7,13 +7,22 @@ use std::str::FromStr;
use async_recursion::async_recursion;
use color_eyre::eyre::{self, Context};
use color_eyre::Result;
use path_slash::PathBufExt;
use tokio::fs;
use crate::binary::sync::{ReadExt, WriteExt};
use crate::bundle::file::{BundleFileType, UserFile};
use crate::murmur::{HashGroup, Murmur64};
use crate::bundle::file::UserFile;
use crate::bundle::filetype::BundleFileType;
use crate::murmur::{HashGroup, IdString64, Murmur64};
/// Resolves a relative path that might contain wildcards into a list of
/// paths that exist on disk and match that wildcard.
/// This is similar to globbing in Unix shells, but with much less features.
///
/// The only wilcard character allowed is `*`, and only at the end of the string,
/// where it matches all files recursively in that directory.
///
/// `t` is an optional extension name, that may be used to force a wildcard
/// path to only match that file type `t`.
#[tracing::instrument]
#[async_recursion]
async fn resolve_wildcard<P1, P2>(
@ -90,12 +99,12 @@ where
Ok(paths)
}
type PackageType = HashMap<BundleFileType, HashSet<PathBuf>>;
type PackageType = HashMap<BundleFileType, HashSet<String>>;
type PackageDefinition = HashMap<String, HashSet<String>>;
#[derive(Default)]
pub struct Package {
_name: String,
_name: IdString64,
_root: PathBuf,
inner: PackageType,
flags: u8,
@ -116,9 +125,9 @@ impl DerefMut for Package {
}
impl Package {
pub fn new(name: String, root: PathBuf) -> Self {
pub fn new(name: impl Into<IdString64>, root: PathBuf) -> Self {
Self {
_name: name,
_name: name.into(),
_root: root,
inner: Default::default(),
flags: 1,
@ -129,17 +138,22 @@ impl Package {
self.values().fold(0, |total, files| total + files.len())
}
pub fn add_file<P: Into<PathBuf>>(&mut self, file_type: BundleFileType, name: P) {
pub fn add_file(&mut self, file_type: BundleFileType, name: impl Into<String>) {
self.inner.entry(file_type).or_default().insert(name.into());
}
#[tracing::instrument("Package::from_sjson", skip(sjson), fields(sjson_len = sjson.as_ref().len()))]
pub async fn from_sjson<P, S>(sjson: S, name: String, root: P) -> Result<Self>
pub async fn from_sjson<P, S>(
sjson: S,
name: impl Into<IdString64> + std::fmt::Debug,
root: P,
) -> Result<Self>
where
P: AsRef<Path> + std::fmt::Debug,
S: AsRef<str>,
{
let root = root.as_ref();
let name = name.into();
let definition: PackageDefinition = serde_sjson::from_str(sjson.as_ref())?;
let mut inner: PackageType = Default::default();
@ -173,7 +187,11 @@ impl Package {
continue;
};
inner.entry(t).or_default().insert(path);
tracing::debug!("Adding file {}", path.display());
inner
.entry(t)
.or_default()
.insert(path.display().to_string());
}
}
}
@ -192,11 +210,9 @@ impl Package {
pub fn to_sjson(&self) -> Result<String> {
let mut map: PackageDefinition = Default::default();
for (t, paths) in self.iter() {
for path in paths.iter() {
map.entry(t.ext_name())
.or_default()
.insert(path.display().to_string());
for (t, names) in self.iter() {
for name in names.iter() {
map.entry(t.ext_name()).or_default().insert(name.clone());
}
}
@ -222,11 +238,11 @@ impl Package {
for _ in 0..file_count {
let t = BundleFileType::from(r.read_u64()?);
let hash = Murmur64::from(r.read_u64()?);
let path = ctx.lookup_hash(hash, HashGroup::Filename);
let name = ctx.lookup_hash(hash, HashGroup::Filename);
inner
.entry(t)
.or_default()
.insert(PathBuf::from(path.display().to_string()));
.insert(name.display().to_string());
}
let flags = r.read_u8()?;
@ -239,7 +255,7 @@ impl Package {
let pkg = Self {
inner,
_name: name,
_name: name.into(),
_root: PathBuf::new(),
flags,
};
@ -255,12 +271,10 @@ impl Package {
w.write_u32(0x2b)?;
w.write_u32(self.values().flatten().count() as u32)?;
for (t, paths) in self.iter() {
for path in paths.iter() {
for (t, names) in self.iter() {
for name in names.iter() {
w.write_u64(t.hash().into())?;
let hash = Murmur64::hash(path.to_slash_lossy().as_bytes());
w.write_u64(hash.into())?;
w.write_u64(Murmur64::hash(name.as_bytes()).into())?;
}
}
@ -280,17 +294,11 @@ where
Ok(vec![UserFile::new(s.into_bytes())])
}
// #[tracing::instrument(skip_all)]
// pub fn compile(_ctx: &crate::Context, data: String) -> Result<Vec<u8>> {
// let pkg = Package::from_sjson(data)?;
// pkg.to_binary()
// }
#[cfg(test)]
mod test {
use std::path::PathBuf;
use crate::BundleFileType;
use crate::bundle::filetype::BundleFileType;
use super::resolve_wildcard;
use super::Package;

View file

@ -1,3 +1,5 @@
#![feature(test)]
mod binary;
mod bundle;
mod context;

View file

@ -147,14 +147,14 @@ impl Dictionary {
Ok(())
}
pub fn add(&mut self, value: String, group: HashGroup) {
let long = Murmur64::from(murmurhash64::hash(value.as_bytes(), SEED as u64));
let short = Murmur32::from(murmurhash64::hash32(value.as_bytes(), SEED));
pub fn add(&mut self, value: impl AsRef<[u8]>, group: HashGroup) {
let long = Murmur64::from(murmurhash64::hash(value.as_ref(), SEED as u64));
let short = Murmur32::from(murmurhash64::hash32(value.as_ref(), SEED));
let entry = Entry {
long,
short,
value,
value: String::from_utf8_lossy(value.as_ref()).to_string(),
group,
};

View file

@ -0,0 +1,162 @@
use std::fmt;
use serde::{Deserializer, Serializer};
use super::Murmur32;
// This type encodes the fact that when reading in a bundle, we don't always have a dictionary
// entry for every hash in there. So we do want to have the real string available when needed,
// but at the same time retain the original hash information for when we don't.
// This is especially important when wanting to write back the read bundle, as the hashes need to
// stay the same.
// The previous system of always turning hashes into strings worked well for the purpose of
// displaying hashes, but would have made it very hard to turn a stringyfied hash back into
// an actual hash.
#[derive(Clone, Debug, Eq)]
pub enum IdString32 {
Hash(Murmur32),
String(String),
}
impl IdString32 {
pub fn to_murmur32(&self) -> Murmur32 {
match self {
Self::Hash(hash) => *hash,
Self::String(s) => Murmur32::hash(s.as_bytes()),
}
}
pub fn display(&self) -> IdString32Display {
let s = match self {
IdString32::Hash(hash) => hash.to_string(),
IdString32::String(s) => s.clone(),
};
IdString32Display(s)
}
pub fn is_string(&self) -> bool {
match self {
IdString32::Hash(_) => false,
IdString32::String(_) => true,
}
}
pub fn is_hash(&self) -> bool {
match self {
IdString32::Hash(_) => true,
IdString32::String(_) => false,
}
}
}
impl From<String> for IdString32 {
fn from(value: String) -> Self {
Self::String(value)
}
}
impl From<u32> for IdString32 {
fn from(value: u32) -> Self {
Self::Hash(value.into())
}
}
impl From<IdString32> for u32 {
fn from(value: IdString32) -> Self {
value.to_murmur32().into()
}
}
impl From<Murmur32> for IdString32 {
fn from(value: Murmur32) -> Self {
Self::Hash(value)
}
}
impl From<IdString32> for Murmur32 {
fn from(value: IdString32) -> Self {
value.to_murmur32()
}
}
impl PartialEq for IdString32 {
fn eq(&self, other: &Self) -> bool {
self.to_murmur32() == other.to_murmur32()
}
}
impl std::hash::Hash for IdString32 {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
state.write_u32(self.to_murmur32().into());
}
}
impl serde::Serialize for IdString32 {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_u32(self.to_murmur32().into())
}
}
struct IdString32Visitor;
impl<'de> serde::de::Visitor<'de> for IdString32Visitor {
type Value = IdString32;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("an u32 or a string")
}
fn visit_u32<E>(self, value: u32) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(IdString32::Hash(value.into()))
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(IdString32::String(v.to_string()))
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(IdString32::String(v))
}
}
impl<'de> serde::Deserialize<'de> for IdString32 {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_u32(IdString32Visitor)
}
}
pub struct IdString32Display(String);
impl std::fmt::Display for IdString32Display {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl std::fmt::UpperHex for IdString32 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
std::fmt::UpperHex::fmt(&self.to_murmur32(), f)
}
}
impl std::fmt::LowerHex for IdString32 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
std::fmt::LowerHex::fmt(&self.to_murmur32(), f)
}
}

View file

@ -0,0 +1,175 @@
use std::{fmt, path::Path};
use path_slash::PathExt as _;
use serde::{Deserializer, Serializer};
use super::Murmur64;
// This type encodes the fact that when reading in a bundle, we don't always have a dictionary
// entry for every hash in there. So we do want to have the real string available when needed,
// but at the same time retain the original hash information for when we don't.
// This is especially important when wanting to write back the read bundle, as the hashes need to
// stay the same.
// The previous system of always turning hashes into strings worked well for the purpose of
// displaying hashes, but would have made it very hard to turn a stringyfied hash back into
// an actual hash.
#[derive(Clone, Debug, Eq)]
pub enum IdString64 {
Hash(Murmur64),
String(String),
}
impl IdString64 {
pub fn to_murmur64(&self) -> Murmur64 {
match self {
Self::Hash(hash) => *hash,
Self::String(s) => Murmur64::hash(s.as_bytes()),
}
}
pub fn display(&self) -> IdString64Display {
let s = match self {
IdString64::Hash(hash) => hash.to_string(),
IdString64::String(s) => s.clone(),
};
IdString64Display(s)
}
pub fn is_string(&self) -> bool {
match self {
IdString64::Hash(_) => false,
IdString64::String(_) => true,
}
}
pub fn is_hash(&self) -> bool {
match self {
IdString64::Hash(_) => true,
IdString64::String(_) => false,
}
}
// Would love to have this as a proper `impl From`, but
// rustc will complain that it overlaps with the `impl From<Into<String>>`.
pub fn from_path(p: impl AsRef<Path>) -> Self {
Self::String(p.as_ref().to_slash_lossy().to_string())
}
}
impl From<String> for IdString64 {
fn from(value: String) -> Self {
Self::String(value)
}
}
impl From<u64> for IdString64 {
fn from(value: u64) -> Self {
Self::Hash(value.into())
}
}
impl From<Murmur64> for IdString64 {
fn from(value: Murmur64) -> Self {
Self::Hash(value)
}
}
impl From<IdString64> for Murmur64 {
fn from(value: IdString64) -> Self {
value.to_murmur64()
}
}
impl From<IdString64> for u64 {
fn from(value: IdString64) -> Self {
value.to_murmur64().into()
}
}
impl Default for IdString64 {
fn default() -> Self {
Self::Hash(0.into())
}
}
impl PartialEq for IdString64 {
fn eq(&self, other: &Self) -> bool {
self.to_murmur64() == other.to_murmur64()
}
}
impl std::hash::Hash for IdString64 {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
state.write_u64(self.to_murmur64().into());
}
}
impl serde::Serialize for IdString64 {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_u64(self.to_murmur64().into())
}
}
struct IdString64Visitor;
impl<'de> serde::de::Visitor<'de> for IdString64Visitor {
type Value = IdString64;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("an u64 or a string")
}
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(IdString64::Hash(value.into()))
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(IdString64::String(v.to_string()))
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(IdString64::String(v))
}
}
impl<'de> serde::Deserialize<'de> for IdString64 {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_u64(IdString64Visitor)
}
}
pub struct IdString64Display(String);
impl std::fmt::Display for IdString64Display {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl std::fmt::UpperHex for IdString64 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
std::fmt::UpperHex::fmt(&self.to_murmur64(), f)
}
}
impl std::fmt::LowerHex for IdString64 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
std::fmt::LowerHex::fmt(&self.to_murmur64(), f)
}
}

View file

@ -1,389 +1,26 @@
use std::fmt;
use color_eyre::eyre::Context;
use color_eyre::Report;
use color_eyre::{Report, Result};
use serde::de::Visitor;
use serde::{Deserialize, Serialize};
use serde::{Deserializer, Serializer};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
mod dictionary;
// Currently unused
// mod murmurhash32;
mod idstring32;
mod idstring64;
mod murmurhash64;
mod types;
mod util;
pub const SEED: u32 = 0;
pub use dictionary::{Dictionary, Entry, HashGroup};
pub use idstring32::*;
pub use idstring64::*;
pub use murmurhash64::hash;
pub use murmurhash64::hash32;
pub use murmurhash64::hash_inverse as inverse;
fn _swap_bytes_u32(value: u32) -> u32 {
u32::from_le_bytes(value.to_be_bytes())
}
fn _swap_bytes_u64(value: u64) -> u64 {
u64::from_le_bytes(value.to_be_bytes())
}
#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq)]
pub struct Murmur64(u64);
impl Murmur64 {
pub fn hash<B>(s: B) -> Self
where
B: AsRef<[u8]>,
{
hash(s.as_ref(), SEED as u64).into()
}
}
impl From<u64> for Murmur64 {
fn from(value: u64) -> Self {
Self(value)
}
}
impl From<Murmur64> for u64 {
fn from(value: Murmur64) -> Self {
value.0
}
}
impl TryFrom<&str> for Murmur64 {
type Error = Report;
fn try_from(value: &str) -> Result<Self, Self::Error> {
u64::from_str_radix(value, 16)
.map(Self)
.wrap_err_with(|| format!("Failed to convert value to Murmur64: {value}"))
}
}
impl fmt::UpperHex for Murmur64 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::UpperHex::fmt(&self.0, f)
}
}
impl fmt::LowerHex for Murmur64 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::LowerHex::fmt(&self.0, f)
}
}
impl fmt::Display for Murmur64 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::UpperHex::fmt(&self.0, f)
}
}
impl<'de> Visitor<'de> for Murmur64 {
type Value = Self;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str(
"an usigned 64 bit integer \
or a string in hexadecimal format encoding such an integer",
)
}
fn visit_f64<E>(self, value: f64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
let bytes = value.to_le_bytes();
Ok(Self::from(u64::from_le_bytes(bytes)))
}
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(Self::from(value))
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
match Murmur64::try_from(value) {
Ok(hash) => Ok(hash),
Err(err) => Err(E::custom(format!(
"failed to convert '{value}' to Murmur64: {err}"
))),
}
}
}
impl<'de> Deserialize<'de> for Murmur64 {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_any(Self(0))
}
}
impl Serialize for Murmur64 {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&format!("{self:016X}"))
}
}
#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq)]
pub struct Murmur32(u32);
impl Murmur32 {
pub fn hash<B>(s: B) -> Self
where
B: AsRef<[u8]>,
{
hash32(s.as_ref(), SEED).into()
}
}
impl From<u32> for Murmur32 {
fn from(value: u32) -> Self {
Self(value)
}
}
impl From<Murmur32> for u32 {
fn from(value: Murmur32) -> Self {
value.0
}
}
impl TryFrom<&str> for Murmur32 {
type Error = Report;
fn try_from(value: &str) -> Result<Self, Self::Error> {
u32::from_str_radix(value, 16)
.map(Self)
.wrap_err_with(|| format!("Failed to convert value to Murmur32: {value}"))
}
}
impl fmt::UpperHex for Murmur32 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::UpperHex::fmt(&self.0, f)
}
}
impl fmt::Display for Murmur32 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::UpperHex::fmt(&self.0, f)
}
}
impl Serialize for Murmur32 {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&format!("{self:08X}"))
}
}
impl<'de> Visitor<'de> for Murmur32 {
type Value = Self;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str(
"an usigned 32 bit integer \
or a string in hexadecimal format encoding such an integer",
)
}
fn visit_f64<E>(self, value: f64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
let bytes = value.to_le_bytes();
self.visit_u32(u64::from_le_bytes(bytes) as u32)
}
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
self.visit_u32(value as u32)
}
fn visit_u32<E>(self, value: u32) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(Self::from(value))
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
match Murmur32::try_from(value) {
Ok(hash) => Ok(hash),
Err(err) => Err(E::custom(format!(
"failed to convert '{value}' to Murmur32: {err}"
))),
}
}
}
impl<'de> Deserialize<'de> for Murmur32 {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_any(Self(0))
}
}
// This type encodes the fact that when reading in a bundle, we don't always have a dictionary
// entry for every hash in there. So we do want to have the real string available when needed,
// but at the same time retain the original hash information for when we don't.
// This is especially important when wanting to write back the read bundle, as the hashes need to
// stay the same.
// The previous system of always turning hashes into strings worked well for the purpose of
// displaying hashes, but would have made it very hard to turn a stringyfied hash back into
// an actual hash.
#[derive(Clone, Debug, Eq)]
pub enum IdString64 {
Hash(Murmur64),
String(String),
}
impl IdString64 {
pub fn to_murmur64(&self) -> Murmur64 {
match self {
Self::Hash(hash) => *hash,
Self::String(s) => Murmur64::hash(s.as_bytes()),
}
}
pub fn display(&self) -> IdString64Display {
let s = match self {
IdString64::Hash(hash) => hash.to_string(),
IdString64::String(s) => s.clone(),
};
IdString64Display(s)
}
pub fn is_string(&self) -> bool {
match self {
IdString64::Hash(_) => false,
IdString64::String(_) => true,
}
}
pub fn is_hash(&self) -> bool {
match self {
IdString64::Hash(_) => true,
IdString64::String(_) => false,
}
}
}
impl<S: Into<String>> From<S> for IdString64 {
fn from(value: S) -> Self {
Self::String(value.into())
}
}
impl From<Murmur64> for IdString64 {
fn from(value: Murmur64) -> Self {
Self::Hash(value)
}
}
impl From<IdString64> for Murmur64 {
fn from(value: IdString64) -> Self {
value.to_murmur64()
}
}
impl PartialEq for IdString64 {
fn eq(&self, other: &Self) -> bool {
self.to_murmur64() == other.to_murmur64()
}
}
impl std::hash::Hash for IdString64 {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
state.write_u64(self.to_murmur64().into());
}
}
impl serde::Serialize for IdString64 {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_u64(self.to_murmur64().into())
}
}
struct IdString64Visitor;
impl<'de> serde::de::Visitor<'de> for IdString64Visitor {
type Value = IdString64;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("an u64 or a string")
}
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(IdString64::Hash(value.into()))
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(IdString64::String(v.to_string()))
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(IdString64::String(v))
}
}
impl<'de> serde::Deserialize<'de> for IdString64 {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_u64(IdString64Visitor)
}
}
pub struct IdString64Display(String);
impl std::fmt::Display for IdString64Display {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl std::fmt::UpperHex for IdString64 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
std::fmt::UpperHex::fmt(&self.to_murmur64(), f)
}
}
impl std::fmt::LowerHex for IdString64 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
std::fmt::LowerHex::fmt(&self.to_murmur64(), f)
}
}
pub use types::*;

View file

@ -119,4 +119,9 @@ fn test_hash() {
}
#[test]
fn test_inverse() {}
fn test_inverse() {
let h = hash("lua".as_bytes(), crate::murmur::SEED as u64);
let inv = hash_inverse(h, crate::murmur::SEED as u64);
assert_eq!(h, hash(&inv.to_le_bytes(), crate::murmur::SEED as u64));
assert_ne!(h, hash(&inv.to_be_bytes(), crate::murmur::SEED as u64));
}

226
lib/sdk/src/murmur/types.rs Normal file
View file

@ -0,0 +1,226 @@
use self::util::{parse_hex32, parse_hex64};
use super::*;
#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq)]
pub struct Murmur64(u64);
impl Murmur64 {
pub fn hash<B>(s: B) -> Self
where
B: AsRef<[u8]>,
{
hash(s.as_ref(), SEED as u64).into()
}
}
impl From<u64> for Murmur64 {
fn from(value: u64) -> Self {
Self(value)
}
}
impl From<Murmur64> for u64 {
fn from(value: Murmur64) -> Self {
value.0
}
}
impl TryFrom<&str> for Murmur64 {
type Error = Report;
fn try_from(value: &str) -> Result<Self, Self::Error> {
parse_hex64(value)
.map(Self)
.wrap_err_with(|| format!("Failed to convert value to Murmur64: {value}"))
}
}
impl fmt::UpperHex for Murmur64 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::UpperHex::fmt(&self.0, f)
}
}
impl fmt::LowerHex for Murmur64 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::LowerHex::fmt(&self.0, f)
}
}
impl fmt::Display for Murmur64 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:016X}", self)
}
}
impl<'de> Visitor<'de> for Murmur64 {
type Value = Self;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str(
"an usigned 64 bit integer \
or a string in hexadecimal format encoding such an integer",
)
}
fn visit_f64<E>(self, value: f64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
let bytes = value.to_le_bytes();
Ok(Self::from(u64::from_le_bytes(bytes)))
}
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(Self::from(value))
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
match Murmur64::try_from(value) {
Ok(hash) => Ok(hash),
Err(err) => Err(E::custom(format!(
"failed to convert '{value}' to Murmur64: {err}"
))),
}
}
}
impl<'de> Deserialize<'de> for Murmur64 {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_any(Self(0))
}
}
impl Serialize for Murmur64 {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&format!("{self:016X}"))
}
}
#[derive(Clone, Copy, Debug, Hash, Eq, PartialEq)]
pub struct Murmur32(u32);
impl Murmur32 {
pub fn hash<B>(s: B) -> Self
where
B: AsRef<[u8]>,
{
hash32(s.as_ref(), SEED).into()
}
}
impl From<u32> for Murmur32 {
fn from(value: u32) -> Self {
Self(value)
}
}
impl From<Murmur32> for u32 {
fn from(value: Murmur32) -> Self {
value.0
}
}
impl TryFrom<&str> for Murmur32 {
type Error = Report;
fn try_from(value: &str) -> Result<Self, Self::Error> {
parse_hex32(value)
.map(Self)
.wrap_err_with(|| format!("Failed to convert value to Murmur32: {value}"))
}
}
impl fmt::UpperHex for Murmur32 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::UpperHex::fmt(&self.0, f)
}
}
impl fmt::LowerHex for Murmur32 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::LowerHex::fmt(&self.0, f)
}
}
impl fmt::Display for Murmur32 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:08X}", self)
}
}
impl Serialize for Murmur32 {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&format!("{self:08X}"))
}
}
impl<'de> Visitor<'de> for Murmur32 {
type Value = Self;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str(
"an usigned 32 bit integer \
or a string in hexadecimal format encoding such an integer",
)
}
fn visit_f64<E>(self, value: f64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
let bytes = value.to_le_bytes();
self.visit_u32(u64::from_le_bytes(bytes) as u32)
}
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
self.visit_u32(value as u32)
}
fn visit_u32<E>(self, value: u32) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(Self::from(value))
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
match Murmur32::try_from(value) {
Ok(hash) => Ok(hash),
Err(err) => Err(E::custom(format!(
"failed to convert '{value}' to Murmur32: {err}"
))),
}
}
}
impl<'de> Deserialize<'de> for Murmur32 {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_any(Self(0))
}
}

132
lib/sdk/src/murmur/util.rs Normal file
View file

@ -0,0 +1,132 @@
use color_eyre::eyre::bail;
use color_eyre::Result;
// Generates tables similar to these:
// https://github.com/zbjornson/fast-hex/blob/a3487bca95127634a61bfeae8f8bfc8f0e5baa3f/src/hex.cc#L20-L89
// `upper` determines upper vs. lower bits (first character is `upper`).
const fn generate_byte_map(upper: bool) -> [u8; 256] {
let mut out = [0u8; 256];
let factor = if upper { 16 } else { 1 };
let mut i = 0;
while i < 256 {
match i {
0x30..=0x39 => out[i] = factor * (i as u8 - 0x30),
0x41..=0x46 => out[i] = factor * (9 + i as u8 - 0x40),
0x61..=0x66 => out[i] = factor * (9 + i as u8 - 0x60),
_ => out[i] = u8::MAX,
}
i += 1;
}
out
}
const BYTE_MAP_UPPER: [u8; 256] = generate_byte_map(true);
const BYTE_MAP_LOWER: [u8; 256] = generate_byte_map(false);
macro_rules! make_parse_hex {
($name:ident, $ty:ty, $len:expr) => {
#[inline]
pub fn $name(s: impl AsRef<str>) -> Result<$ty> {
// For the string to be valid hex characters, it needs to be ASCII.
// So we can simply treat it as a byte stream.
let s = s.as_ref().as_bytes();
if s.len() != $len {
bail!(
"String length doesn't match. Expected {}, got {}",
$len,
s.len()
);
}
let n = $len / 2;
let mut out: $ty = 0;
let mut i = 0;
while i < n {
let j = i * 2;
let c1 = BYTE_MAP_UPPER[s[j] as usize];
if c1 == u8::MAX {
bail!("Invalid character '{:?}' ({})", char::from(c1), c1);
}
let c2 = BYTE_MAP_LOWER[s[j + 1] as usize];
if c2 == u8::MAX {
bail!("Invalid character '{:?}' ({})", char::from(c2), c2);
}
out |= ((c1 + c2) as $ty) << (n - i - 1) * 8;
i += 1;
}
Ok(out)
}
};
}
make_parse_hex!(parse_hex64, u64, 16);
make_parse_hex!(parse_hex32, u32, 8);
#[cfg(test)]
mod test {
use super::*;
#[test]
fn parse_32() {
let hash = "A14E8DFA";
assert_eq!(parse_hex32(hash).unwrap(), 0xA14E8DFA);
}
#[test]
fn parse_64() {
let hash = "A14E8DFA2CD117E2";
assert_eq!(parse_hex64(hash).unwrap(), 0xA14E8DFA2CD117E2);
}
#[test]
fn std_from_radix_32() {
let hash = "A14E8DFA";
assert_eq!(u32::from_str_radix(hash, 16).unwrap(), 0xA14E8DFA);
}
#[test]
fn std_from_radix_64() {
let hash = "A14E8DFA2CD117E2";
assert_eq!(u64::from_str_radix(hash, 16).unwrap(), 0xA14E8DFA2CD117E2);
}
}
#[cfg(test)]
mod bench {
use super::{parse_hex32, parse_hex64};
extern crate test;
const HASH32: &str = "A14E8DFA";
const HASH64: &str = "A14E8DFA2CD117E2";
#[bench]
fn custom_32(b: &mut test::Bencher) {
b.iter(|| test::black_box(parse_hex32(test::black_box(HASH32))))
}
#[bench]
fn std_32(b: &mut test::Bencher) {
b.iter(|| test::black_box(u32::from_str_radix(test::black_box(HASH32), 16)))
}
#[bench]
fn custom_64(b: &mut test::Bencher) {
b.iter(|| test::black_box(parse_hex64(test::black_box(HASH64))))
}
#[bench]
fn std_64(b: &mut test::Bencher) {
b.iter(|| test::black_box(u64::from_str_radix(test::black_box(HASH64), 16)))
}
}