Compare commits
49 commits
experiment
...
master
Author | SHA1 | Date | |
---|---|---|---|
8cb2c6b2cd | |||
6ba13ac1ec | |||
4d0762c0ba | |||
752291fe2d | |||
71f945a96c | |||
d15f533e19 | |||
1a3c564ecf | |||
beba47f340 | |||
5612e271fb | |||
69300e87e6 | |||
a3583b4485 | |||
5982a66033 | |||
adf9610ecc | |||
91cd54fff7 | |||
b219e20f3a | |||
f9ccdf746e | |||
72ce06b0e5 | |||
fc151f1449 | |||
659b63bfe9 | |||
9f90b45275 | |||
67c64bb357 | |||
6017ec058b | |||
ffd4927d27 | |||
49a9eb4312 | |||
4d665200fa | |||
b3463ffb46 | |||
7cb44532b2 | |||
15aa9bcf5e | |||
a2bbab1398 | |||
88becb72a9 | |||
df2992a476 | |||
e336240094 | |||
d7fa80f471 | |||
831592edf6 | |||
2a1d8d815f | |||
d931e6b9ca | |||
7fa08c2efd | |||
dbf060032b | |||
4b39d290b6 | |||
3a6e954f9a | |||
5a880b2953 | |||
f1f9a818cc | |||
c997489e18 | |||
08219f05ba | |||
edad0d4493 | |||
74a7aaa6e5 | |||
437e724d07 | |||
95fc6c160b | |||
b7e26eee57 |
45 changed files with 1956 additions and 2319 deletions
|
@ -125,8 +125,6 @@ jobs:
|
|||
vars:
|
||||
pr: ""
|
||||
target: msvc
|
||||
gitea_url: http://forgejo:3000
|
||||
gitea_api_key: ((gitea_api_key))
|
||||
|
||||
- load_var: version_number
|
||||
reveal: true
|
||||
|
@ -142,10 +140,21 @@ jobs:
|
|||
fail_fast: true
|
||||
override: true
|
||||
globs:
|
||||
- artifact/dtmt
|
||||
- artifact/dtmm
|
||||
- artifact/*.exe
|
||||
- artifact/*.sha256
|
||||
- artifact/*.exe.sha256
|
||||
|
||||
- put: package
|
||||
resource: gitea-package
|
||||
no_get: true
|
||||
inputs:
|
||||
- artifact
|
||||
params:
|
||||
version: master
|
||||
fail_fast: true
|
||||
override: true
|
||||
globs:
|
||||
- artifact/*.exe
|
||||
- artifact/*.exe.sha256
|
||||
|
||||
- name: build-linux
|
||||
on_success:
|
||||
|
@ -202,5 +211,20 @@ jobs:
|
|||
globs:
|
||||
- artifact/dtmt
|
||||
- artifact/dtmm
|
||||
- artifact/*.exe
|
||||
- artifact/*.sha256
|
||||
- artifact/dtmm.sha256
|
||||
- artifact/dtmt.sha256
|
||||
|
||||
- put: package
|
||||
resource: gitea-package
|
||||
no_get: true
|
||||
inputs:
|
||||
- artifact
|
||||
params:
|
||||
version: master
|
||||
fail_fast: true
|
||||
override: true
|
||||
globs:
|
||||
- artifact/dtmt
|
||||
- artifact/dtmm
|
||||
- artifact/dtmm.sha256
|
||||
- artifact/dtmt.sha256
|
||||
|
|
|
@ -24,8 +24,10 @@ PR=${PR:-}
|
|||
if [ -n "$PR" ]; then
|
||||
title "PR: $(echo "$PR" | jq '.number') - $(echo "$PR" | jq '.title')"
|
||||
ref="pr-$(echo "$PR" | jq '.number')-$(git rev-parse --short "$(cat .git/ref || echo "HEAD")" 2>/dev/null || echo 'manual')"
|
||||
elif [ -f ".git/branch"]; then
|
||||
ref=$(cat .git/branch)-$(git rev-parse --short $ref)
|
||||
else
|
||||
ref=$(git describe --tags)
|
||||
ref=$(git rev-parse --short "$(cat .git/ref || echo "HEAD")")
|
||||
fi
|
||||
|
||||
title "Version: '$ref'"
|
||||
|
|
|
@ -22,7 +22,6 @@ caches:
|
|||
params:
|
||||
CI: "true"
|
||||
TARGET: ((target))
|
||||
GITEA_API_KEY: ((gitea_api_key))
|
||||
PR: ((pr))
|
||||
OUTPUT: artifact
|
||||
|
||||
|
|
15
.renovaterc
Normal file
15
.renovaterc
Normal file
|
@ -0,0 +1,15 @@
|
|||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"extends": [
|
||||
"config:recommended",
|
||||
":combinePatchMinorReleases",
|
||||
":enableVulnerabilityAlerts",
|
||||
":rebaseStalePrs"
|
||||
],
|
||||
"prConcurrentLimit": 10,
|
||||
"branchPrefix": "renovate/",
|
||||
"baseBranches": [
|
||||
"$default",
|
||||
"/^release\\/.*/"
|
||||
]
|
||||
}
|
|
@ -20,6 +20,8 @@
|
|||
- dtmm: fetch file version for Nexus mods
|
||||
- dtmm: handle `nxm://` URIs via IPC and import the corresponding mod
|
||||
- dtmm: Add button to open mod on nexusmods.com
|
||||
- dtmt: Implement commands to list bundles and contents
|
||||
- dtmt: Implement command to search for files
|
||||
|
||||
=== Fixed
|
||||
|
||||
|
|
1063
Cargo.lock
generated
1063
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
48
Cargo.toml
48
Cargo.toml
|
@ -13,11 +13,51 @@ members = [
|
|||
exclude = ["lib/color-eyre"]
|
||||
|
||||
[workspace.dependencies]
|
||||
zip = { version = "2.1.3", default-features = false, features = ["deflate", "bzip2", "zstd", "time"] }
|
||||
|
||||
[patch.crates-io]
|
||||
ansi-parser = "0.9.1"
|
||||
ansi_term = "0.12.1"
|
||||
async-recursion = "1.0.5"
|
||||
bincode = "1.3.3"
|
||||
bitflags = "2.5.0"
|
||||
byteorder = "1.4.3"
|
||||
clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "string", "unicode"] }
|
||||
cli-table = { version = "0.4.7", default-features = false, features = ["derive"] }
|
||||
color-eyre = { path = "lib/color-eyre" }
|
||||
ansi-parser = { git = "https://gitlab.com/lschwiderski/ansi-parser.git", branch = "issue/outdated-heapless", version = "0.9.1" }
|
||||
colors-transform = "0.2.11"
|
||||
confy = "0.6.1"
|
||||
csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
|
||||
druid = { version = "0.8", features = ["im", "serde", "image", "png", "jpeg", "bmp", "webp", "svg"] }
|
||||
druid-widget-nursery = "0.1"
|
||||
dtmt-shared = { path = "lib/dtmt-shared" }
|
||||
fastrand = "2.1.0"
|
||||
futures = "0.3.25"
|
||||
futures-util = "0.3.24"
|
||||
glob = "0.3.0"
|
||||
interprocess = "2.1.0"
|
||||
lazy_static = "1.4.0"
|
||||
luajit2-sys = { path = "lib/luajit2-sys" }
|
||||
minijinja = { version = "2.0.1", default-features = false }
|
||||
nanorand = "0.7.0"
|
||||
nexusmods = { path = "lib/nexusmods" }
|
||||
notify = "8.0.0"
|
||||
oodle = { path = "lib/oodle" }
|
||||
open = "5.0.1"
|
||||
path-clean = "1.0.1"
|
||||
path-slash = "0.2.1"
|
||||
pin-project-lite = "0.2.9"
|
||||
promptly = "0.3.1"
|
||||
sdk = { path = "lib/sdk" }
|
||||
serde = { version = "1.0.152", features = ["derive", "rc"] }
|
||||
serde_sjson = { path = "lib/serde_sjson" }
|
||||
steamlocate = "2.0.0-beta.2"
|
||||
strip-ansi-escapes = "0.2.0"
|
||||
time = { version = "0.3.20", features = ["serde", "serde-well-known", "local-offset", "formatting", "macros"] }
|
||||
tokio = { version = "1.23.0", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
|
||||
tokio-stream = { version = "0.1.12", features = ["fs", "io-util"] }
|
||||
tracing = { version = "0.1.37", features = ["async-await"] }
|
||||
tracing-error = "0.2.0"
|
||||
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
|
||||
usvg = "0.25.0"
|
||||
zip = { version = "2.1.3", default-features = false, features = ["deflate", "bzip2", "zstd", "time"] }
|
||||
|
||||
[profile.dev.package.backtrace]
|
||||
opt-level = 3
|
||||
|
|
|
@ -12,37 +12,37 @@ license-file = "LICENSE"
|
|||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
ansi-parser = "0.9.0"
|
||||
async-recursion = "1.0.5"
|
||||
bincode = "1.3.3"
|
||||
bitflags = "2.5.0"
|
||||
clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "string", "unicode"] }
|
||||
color-eyre = "0.6.2"
|
||||
colors-transform = "0.2.11"
|
||||
confy = "0.6.1"
|
||||
druid = { version = "0.8", features = ["im", "serde", "image", "png", "jpeg", "bmp", "webp", "svg"] }
|
||||
druid-widget-nursery = "0.1"
|
||||
dtmt-shared = { path = "../../lib/dtmt-shared", version = "*" }
|
||||
futures = "0.3.25"
|
||||
interprocess = "2.1.0"
|
||||
lazy_static = "1.4.0"
|
||||
luajit2-sys = { path = "../../lib/luajit2-sys", version = "*" }
|
||||
minijinja = { version = "2.0.1", default-features = false }
|
||||
nexusmods = { path = "../../lib/nexusmods", version = "*" }
|
||||
oodle = { path = "../../lib/oodle", version = "*" }
|
||||
open = "5.0.1"
|
||||
path-slash = "0.2.1"
|
||||
sdk = { path = "../../lib/sdk", version = "*" }
|
||||
serde = { version = "1.0.152", features = ["derive", "rc"] }
|
||||
serde_sjson = { path = "../../lib/serde_sjson", version = "*" }
|
||||
strip-ansi-escapes = "0.2.0"
|
||||
time = { version = "0.3.20", features = ["serde", "serde-well-known", "local-offset"] }
|
||||
tokio = { version = "1.23.0", features = ["rt", "fs", "tracing", "sync"] }
|
||||
tokio-stream = { version = "0.1.12", features = ["fs"] }
|
||||
tracing = "0.1.37"
|
||||
tracing-error = "0.2.0"
|
||||
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
|
||||
usvg = "0.25.0"
|
||||
ansi-parser = { workspace = true }
|
||||
async-recursion = { workspace = true }
|
||||
bincode = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
clap = { workspace = true }
|
||||
color-eyre = { workspace = true }
|
||||
colors-transform = { workspace = true }
|
||||
confy = { workspace = true }
|
||||
druid = { workspace = true }
|
||||
druid-widget-nursery = { workspace = true }
|
||||
dtmt-shared = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
interprocess = { workspace = true }
|
||||
lazy_static = { workspace = true }
|
||||
luajit2-sys = { workspace = true }
|
||||
minijinja = { workspace = true }
|
||||
nexusmods = { workspace = true }
|
||||
oodle = { workspace = true }
|
||||
open = { workspace = true }
|
||||
path-slash = { workspace = true }
|
||||
sdk = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_sjson = { workspace = true }
|
||||
strip-ansi-escapes = { workspace = true }
|
||||
time = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tokio-stream = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-error = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
usvg = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
|
|
|
@ -324,11 +324,11 @@ async fn build_bundles(state: Arc<ActionState>) -> Result<Vec<Bundle>> {
|
|||
|
||||
let mut bundles = Vec::new();
|
||||
|
||||
let mut add_lua_asset = |name, data: &str| {
|
||||
let mut add_lua_asset = |name: &str, data: &str| {
|
||||
let span = tracing::info_span!("Compiling Lua", name, data_len = data.len());
|
||||
let _enter = span.enter();
|
||||
|
||||
let file = lua::compile(name, data).wrap_err("Failed to compile Lua")?;
|
||||
let file = lua::compile(name.to_string(), data).wrap_err("Failed to compile Lua")?;
|
||||
|
||||
mod_bundle.add_file(file);
|
||||
|
||||
|
@ -517,8 +517,8 @@ async fn patch_boot_bundle(
|
|||
.wrap_err("Failed to render template `mod_main.lua`")?;
|
||||
|
||||
tracing::trace!("Main script rendered:\n===========\n{}\n=============", lua);
|
||||
let file =
|
||||
lua::compile(MOD_BOOT_SCRIPT, lua).wrap_err("Failed to compile mod main Lua file")?;
|
||||
let file = lua::compile(MOD_BOOT_SCRIPT.to_string(), lua)
|
||||
.wrap_err("Failed to compile mod main Lua file")?;
|
||||
|
||||
boot_bundle.add_file(file);
|
||||
}
|
||||
|
|
|
@ -297,6 +297,7 @@ fn extract_mod_config<R: Read + Seek>(archive: &mut ZipArchive<R>) -> Result<(Mo
|
|||
packages: Vec::new(),
|
||||
resources,
|
||||
depends: Vec::new(),
|
||||
name_overrides: Default::default(),
|
||||
};
|
||||
|
||||
Ok((cfg, root))
|
||||
|
|
|
@ -4,38 +4,37 @@ version = "0.3.0"
|
|||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "unicode"] }
|
||||
cli-table = { version = "0.4.7", default-features = false, features = ["derive"] }
|
||||
color-eyre = "0.6.2"
|
||||
confy = "0.6.1"
|
||||
csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
|
||||
dtmt-shared = { path = "../../lib/dtmt-shared", version = "*" }
|
||||
futures = "0.3.25"
|
||||
futures-util = "0.3.24"
|
||||
glob = "0.3.0"
|
||||
nanorand = "0.7.0"
|
||||
oodle = { path = "../../lib/oodle", version = "*" }
|
||||
pin-project-lite = "0.2.9"
|
||||
promptly = "0.3.1"
|
||||
sdk = { path = "../../lib/sdk", version = "*" }
|
||||
serde_sjson = { path = "../../lib/serde_sjson", version = "*" }
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
string_template = "0.2.1"
|
||||
tokio-stream = { version = "0.1.11", features = ["fs", "io-util"] }
|
||||
tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
|
||||
tracing-error = "0.2.0"
|
||||
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
|
||||
tracing = { version = "0.1.37", features = ["async-await"] }
|
||||
async-recursion = { workspace = true }
|
||||
clap = { workspace = true }
|
||||
cli-table = { workspace = true }
|
||||
color-eyre = { workspace = true }
|
||||
confy = { workspace = true }
|
||||
csv-async = { workspace = true }
|
||||
dtmt-shared = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
futures-util = { workspace = true }
|
||||
glob = { workspace = true }
|
||||
luajit2-sys = { workspace = true }
|
||||
minijinja = { workspace = true }
|
||||
nanorand = { workspace = true }
|
||||
notify = { workspace = true }
|
||||
oodle = { workspace = true }
|
||||
path-clean = { workspace = true }
|
||||
path-slash = { workspace = true }
|
||||
pin-project-lite = { workspace = true }
|
||||
promptly = { workspace = true }
|
||||
sdk = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_sjson = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tokio-stream = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-error = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
path-clean = "1.0.1"
|
||||
path-slash = "0.2.1"
|
||||
async-recursion = "1.0.2"
|
||||
notify = "6.1.1"
|
||||
luajit2-sys = { path = "../../lib/luajit2-sys", version = "*" }
|
||||
|
||||
# Cannot be a workspace dependencies when it's optional
|
||||
shlex = { version = "1.2.0", optional = true }
|
||||
atty = "0.2.14"
|
||||
itertools = "0.11.0"
|
||||
crossbeam = { version = "0.8.2", features = ["crossbeam-deque"] }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.3.0"
|
||||
|
|
|
@ -55,6 +55,7 @@ pub(crate) fn command_definition() -> Command {
|
|||
)
|
||||
}
|
||||
|
||||
/// Try to find a `dtmt.cfg` in the given directory or traverse up the parents.
|
||||
#[tracing::instrument]
|
||||
async fn find_project_config(dir: Option<PathBuf>) -> Result<ModConfig> {
|
||||
let (path, mut file) = if let Some(path) = dir {
|
||||
|
@ -102,39 +103,44 @@ async fn find_project_config(dir: Option<PathBuf>) -> Result<ModConfig> {
|
|||
Ok(cfg)
|
||||
}
|
||||
|
||||
/// Iterate over the paths in the given `Package` and
|
||||
/// compile each file by its file type.
|
||||
#[tracing::instrument(skip_all)]
|
||||
async fn compile_package_files<P>(pkg: &Package, root: P) -> Result<Vec<BundleFile>>
|
||||
where
|
||||
P: AsRef<Path> + std::fmt::Debug,
|
||||
{
|
||||
let root = Arc::new(root.as_ref());
|
||||
async fn compile_package_files(pkg: &Package, cfg: &ModConfig) -> Result<Vec<BundleFile>> {
|
||||
let root = Arc::new(&cfg.dir);
|
||||
let name_overrides = &cfg.name_overrides;
|
||||
|
||||
let tasks = pkg
|
||||
.iter()
|
||||
.flat_map(|(file_type, paths)| {
|
||||
paths.iter().map(|path| {
|
||||
.flat_map(|(file_type, names)| {
|
||||
names.iter().map(|name| {
|
||||
(
|
||||
*file_type,
|
||||
path,
|
||||
name,
|
||||
// Cloning the `Arc` here solves the issue that in the next `.map`, I need to
|
||||
// `move` the closure parameters, but can't `move` `root` before it was cloned.
|
||||
root.clone(),
|
||||
)
|
||||
})
|
||||
})
|
||||
.map(|(file_type, path, root)| async move {
|
||||
let sjson = fs::read_to_string(&path).await?;
|
||||
.map(|(file_type, name, root)| async move {
|
||||
let path = PathBuf::from(name);
|
||||
let sjson = fs::read_to_string(&path)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?;
|
||||
|
||||
let mut path = path.clone();
|
||||
path.set_extension("");
|
||||
|
||||
BundleFile::from_sjson(
|
||||
path.to_slash_lossy().to_string(),
|
||||
file_type,
|
||||
sjson,
|
||||
root.as_ref(),
|
||||
)
|
||||
.await
|
||||
let name = path.with_extension("").to_slash_lossy().to_string();
|
||||
let name = if let Some(new_name) = name_overrides.get(&name) {
|
||||
let new_name = match u64::from_str_radix(new_name, 16) {
|
||||
Ok(hash) => IdString64::from(hash),
|
||||
Err(_) => IdString64::from(new_name.clone()),
|
||||
};
|
||||
tracing::info!("Overriding '{}' -> '{}'", name, new_name.display());
|
||||
new_name
|
||||
} else {
|
||||
IdString64::from(name.clone())
|
||||
};
|
||||
BundleFile::from_sjson(name, file_type, sjson, root.as_ref()).await
|
||||
});
|
||||
|
||||
let results = futures::stream::iter(tasks)
|
||||
|
@ -145,13 +151,14 @@ where
|
|||
results.into_iter().collect()
|
||||
}
|
||||
|
||||
/// Read a `.package` file, collect the referenced files
|
||||
/// and compile all of them into a bundle.
|
||||
#[tracing::instrument]
|
||||
async fn build_package<P1, P2>(package: P1, root: P2) -> Result<Bundle>
|
||||
where
|
||||
P1: AsRef<Path> + std::fmt::Debug,
|
||||
P2: AsRef<Path> + std::fmt::Debug,
|
||||
{
|
||||
let root = root.as_ref();
|
||||
async fn build_package(
|
||||
cfg: &ModConfig,
|
||||
package: impl AsRef<Path> + std::fmt::Debug,
|
||||
) -> Result<Bundle> {
|
||||
let root = &cfg.dir;
|
||||
let package = package.as_ref();
|
||||
|
||||
let mut path = root.join(package);
|
||||
|
@ -165,7 +172,7 @@ where
|
|||
.await
|
||||
.wrap_err_with(|| format!("Invalid package file {}", &pkg_name))?;
|
||||
|
||||
let files = compile_package_files(&pkg, root).await?;
|
||||
let files = compile_package_files(&pkg, cfg).await?;
|
||||
let mut bundle = Bundle::new(pkg_name);
|
||||
for file in files {
|
||||
bundle.add_file(file);
|
||||
|
@ -174,6 +181,8 @@ where
|
|||
Ok(bundle)
|
||||
}
|
||||
|
||||
/// Cleans the path of internal parent (`../`) or self (`./`) components,
|
||||
/// and ensures that it is relative.
|
||||
fn normalize_file_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
|
||||
let path = path.as_ref();
|
||||
|
||||
|
@ -254,14 +263,14 @@ pub(crate) async fn read_project_config(dir: Option<PathBuf>) -> Result<ModConfi
|
|||
Ok(cfg)
|
||||
}
|
||||
|
||||
pub(crate) async fn build<P1, P2>(
|
||||
#[tracing::instrument]
|
||||
pub(crate) async fn build<P>(
|
||||
cfg: &ModConfig,
|
||||
out_path: P1,
|
||||
game_dir: Arc<Option<P2>>,
|
||||
out_path: impl AsRef<Path> + std::fmt::Debug,
|
||||
game_dir: Arc<Option<P>>,
|
||||
) -> Result<()>
|
||||
where
|
||||
P1: AsRef<Path>,
|
||||
P2: AsRef<Path>,
|
||||
P: AsRef<Path> + std::fmt::Debug,
|
||||
{
|
||||
let out_path = out_path.as_ref();
|
||||
|
||||
|
@ -286,7 +295,7 @@ where
|
|||
);
|
||||
}
|
||||
|
||||
let bundle = build_package(path, &cfg.dir).await.wrap_err_with(|| {
|
||||
let bundle = build_package(&cfg, path).await.wrap_err_with(|| {
|
||||
format!(
|
||||
"Failed to build package '{}' at '{}'",
|
||||
path.display(),
|
||||
|
|
174
crates/dtmt/src/cmd/bundle/db.rs
Normal file
174
crates/dtmt/src/cmd/bundle/db.rs
Normal file
|
@ -0,0 +1,174 @@
|
|||
use std::{io::Cursor, path::PathBuf};
|
||||
|
||||
use clap::{value_parser, Arg, ArgMatches, Command};
|
||||
use color_eyre::{eyre::Context as _, Result};
|
||||
use sdk::murmur::{HashGroup, IdString64, Murmur64};
|
||||
use sdk::{BundleDatabase, FromBinary as _};
|
||||
use tokio::fs;
|
||||
|
||||
pub(crate) fn command_definition() -> Command {
|
||||
Command::new("db")
|
||||
.about("Various operations regarding `bundle_database.data`.")
|
||||
.subcommand_required(true)
|
||||
.subcommand(
|
||||
Command::new("list-files")
|
||||
.about("List bundle contents")
|
||||
.arg(
|
||||
Arg::new("database")
|
||||
.required(true)
|
||||
.help("Path to the bundle database")
|
||||
.value_parser(value_parser!(PathBuf)),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("bundle")
|
||||
.help("The bundle name. If omitted, all bundles will be listed.")
|
||||
.required(false),
|
||||
),
|
||||
)
|
||||
.subcommand(
|
||||
Command::new("list-bundles").about("List bundles").arg(
|
||||
Arg::new("database")
|
||||
.required(true)
|
||||
.help("Path to the bundle database")
|
||||
.value_parser(value_parser!(PathBuf)),
|
||||
),
|
||||
)
|
||||
.subcommand(
|
||||
Command::new("find-file")
|
||||
.about("Find the bundle a file belongs to")
|
||||
.arg(
|
||||
Arg::new("database")
|
||||
.required(true)
|
||||
.help("Path to the bundle database")
|
||||
.value_parser(value_parser!(PathBuf)),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("file-name")
|
||||
.required(true)
|
||||
.help("Name of the file. May be a hash in hex representation or a string"),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
||||
let Some((op, sub_matches)) = matches.subcommand() else {
|
||||
unreachable!("clap is configured to require a subcommand");
|
||||
};
|
||||
|
||||
let database = {
|
||||
let path = sub_matches
|
||||
.get_one::<PathBuf>("database")
|
||||
.expect("argument is required");
|
||||
|
||||
let binary = fs::read(&path)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?;
|
||||
|
||||
let mut r = Cursor::new(binary);
|
||||
|
||||
BundleDatabase::from_binary(&mut r).wrap_err("Failed to parse bundle database")?
|
||||
};
|
||||
|
||||
match op {
|
||||
"list-files" => {
|
||||
let index = database.files();
|
||||
|
||||
if let Some(bundle) = sub_matches.get_one::<String>("bundle") {
|
||||
let hash = u64::from_str_radix(bundle, 16)
|
||||
.map(Murmur64::from)
|
||||
.wrap_err("Invalid hex sequence")?;
|
||||
|
||||
if let Some(files) = index.get(&hash) {
|
||||
for file in files {
|
||||
let name = ctx.lookup_hash(file.name, HashGroup::Filename);
|
||||
let extension = file.extension.ext_name();
|
||||
println!("{}.{}", name.display(), extension);
|
||||
}
|
||||
} else {
|
||||
tracing::info!("Bundle {} not found in the database", bundle);
|
||||
}
|
||||
} else {
|
||||
for (bundle_hash, files) in index.iter() {
|
||||
let bundle_name = ctx.lookup_hash(*bundle_hash, HashGroup::Filename);
|
||||
|
||||
match bundle_name {
|
||||
IdString64::String(name) => {
|
||||
println!("{:016x} {}", bundle_hash, name);
|
||||
}
|
||||
IdString64::Hash(hash) => {
|
||||
println!("{:016x}", hash);
|
||||
}
|
||||
}
|
||||
|
||||
for file in files {
|
||||
let name = ctx.lookup_hash(file.name, HashGroup::Filename);
|
||||
let extension = file.extension.ext_name();
|
||||
|
||||
match name {
|
||||
IdString64::String(name) => {
|
||||
println!("\t{:016x}.{:<12} {}", file.name, extension, name);
|
||||
}
|
||||
IdString64::Hash(hash) => {
|
||||
println!("\t{:016x}.{}", hash, extension);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
"list-bundles" => {
|
||||
for bundle_hash in database.bundles().keys() {
|
||||
let bundle_name = ctx.lookup_hash(*bundle_hash, HashGroup::Filename);
|
||||
|
||||
match bundle_name {
|
||||
IdString64::String(name) => {
|
||||
println!("{:016x} {}", bundle_hash, name);
|
||||
}
|
||||
IdString64::Hash(hash) => {
|
||||
println!("{:016x}", hash);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
"find-file" => {
|
||||
let name = sub_matches
|
||||
.get_one::<String>("file-name")
|
||||
.expect("required argument");
|
||||
let name = match u64::from_str_radix(name, 16).map(Murmur64::from) {
|
||||
Ok(hash) => hash,
|
||||
Err(_) => Murmur64::hash(name),
|
||||
};
|
||||
|
||||
let bundles = database.files().iter().filter_map(|(bundle_hash, files)| {
|
||||
if files.iter().any(|file| file.name == name) {
|
||||
Some(bundle_hash)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
let mut found = false;
|
||||
|
||||
for bundle in bundles {
|
||||
found = true;
|
||||
println!("{:016x}", bundle);
|
||||
}
|
||||
|
||||
if !found {
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
_ => unreachable!(
|
||||
"clap is configured to require a subcommand, and they're all handled above"
|
||||
),
|
||||
}
|
||||
}
|
|
@ -36,6 +36,18 @@ enum OutputFormat {
|
|||
Text,
|
||||
}
|
||||
|
||||
fn format_byte_size(size: usize) -> String {
|
||||
if size < 1024 {
|
||||
format!("{} Bytes", size)
|
||||
} else if size < 1024 * 1024 {
|
||||
format!("{} kB", size / 1024)
|
||||
} else if size < 1024 * 1024 * 1024 {
|
||||
format!("{} MB", size / (1024 * 1024))
|
||||
} else {
|
||||
format!("{} GB", size / (1024 * 1024 * 1024))
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(ctx))]
|
||||
async fn print_bundle_contents<P>(ctx: &sdk::Context, path: P, fmt: OutputFormat) -> Result<()>
|
||||
where
|
||||
|
@ -50,7 +62,11 @@ where
|
|||
|
||||
match fmt {
|
||||
OutputFormat::Text => {
|
||||
println!("Bundle: {}", bundle.name().display());
|
||||
println!(
|
||||
"Bundle: {} ({:016x})",
|
||||
bundle.name().display(),
|
||||
bundle.name()
|
||||
);
|
||||
|
||||
for f in bundle.files().iter() {
|
||||
if f.variants().len() != 1 {
|
||||
|
@ -63,9 +79,10 @@ where
|
|||
|
||||
let v = &f.variants()[0];
|
||||
println!(
|
||||
"\t{}.{}: {} bytes",
|
||||
"\t{}.{}: {} ({})",
|
||||
f.base_name().display(),
|
||||
f.file_type().ext_name(),
|
||||
format_byte_size(v.size()),
|
||||
v.size()
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use clap::{ArgMatches, Command};
|
||||
use color_eyre::eyre::Result;
|
||||
|
||||
mod db;
|
||||
mod decompress;
|
||||
mod extract;
|
||||
mod inject;
|
||||
|
@ -14,6 +15,7 @@ pub(crate) fn command_definition() -> Command {
|
|||
.subcommand(extract::command_definition())
|
||||
.subcommand(inject::command_definition())
|
||||
.subcommand(list::command_definition())
|
||||
.subcommand(db::command_definition())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
|
@ -23,6 +25,7 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
|||
Some(("extract", sub_matches)) => extract::run(ctx, sub_matches).await,
|
||||
Some(("inject", sub_matches)) => inject::run(ctx, sub_matches).await,
|
||||
Some(("list", sub_matches)) => list::run(ctx, sub_matches).await,
|
||||
Some(("db", sub_matches)) => db::run(ctx, sub_matches).await,
|
||||
_ => unreachable!(
|
||||
"clap is configured to require a subcommand, and they're all handled above"
|
||||
),
|
||||
|
|
|
@ -1,520 +0,0 @@
|
|||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
use std::io::{BufWriter, Write};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::thread::JoinHandle;
|
||||
|
||||
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
|
||||
use color_eyre::eyre::{self, Context};
|
||||
use color_eyre::Result;
|
||||
use crossbeam::channel::{bounded, unbounded, Receiver, Sender};
|
||||
use itertools::Itertools;
|
||||
use sdk::murmur::Murmur64;
|
||||
use tokio::time::Instant;
|
||||
|
||||
pub(crate) fn command_definition() -> Command {
|
||||
Command::new("brute-force-words")
|
||||
.about(
|
||||
"Given a list of words and a set of delimiters, iteratevily creates permutations \
|
||||
of growing length.\n\
|
||||
Delimiters are placed between every word in the result.\n\n\
|
||||
Example: \
|
||||
Given the words ['packages', 'boot'], the delimiters ['/', '_'] and a length of 2, the resulting \
|
||||
words will be\n\
|
||||
- packages\n\
|
||||
- boot\n\
|
||||
- packages/packages\n\
|
||||
- packages_packages\n\
|
||||
- packages/boot\n\
|
||||
- packages_boot\n\
|
||||
- boot/packages\n\
|
||||
- boot_packages\n\
|
||||
- boot/boot\n\
|
||||
- boot_boot",
|
||||
)
|
||||
.arg(
|
||||
Arg::new("delimiter")
|
||||
.help(
|
||||
"The delimiters to put between the words. \
|
||||
All permutations of this list will be tried for every string of words.\n\
|
||||
Specify multiple times to set multiple values.\n\
|
||||
Defaults to ['/', '_'].",
|
||||
)
|
||||
.short('d')
|
||||
.long("delimiter")
|
||||
.action(ArgAction::Append),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("max-length")
|
||||
.help("The maximum number of words up to which to build strings.")
|
||||
.long("max")
|
||||
.long("max-length")
|
||||
.short('m')
|
||||
.default_value("5")
|
||||
.value_parser(value_parser!(usize)),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("continue")
|
||||
.help("Can be used to continue a previous operation where it stopped. Word list and delimiters must match.")
|
||||
.short('c')
|
||||
.long("continue")
|
||||
)
|
||||
.arg(
|
||||
Arg::new("threads")
|
||||
.help("The number of workers to run in parallel.")
|
||||
.long("threads")
|
||||
.short('n')
|
||||
.default_value("6")
|
||||
.value_parser(value_parser!(usize))
|
||||
)
|
||||
.arg(
|
||||
Arg::new("words")
|
||||
.help("Path to a file containing words line by line.")
|
||||
.required(true)
|
||||
.value_parser(value_parser!(PathBuf)),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("hashes")
|
||||
.help(
|
||||
"Path to a file containing the hashes to attempt to brute force. \
|
||||
Hashes are expected in hexadecimal notiation. \
|
||||
Only 64-bit hashes are supported."
|
||||
)
|
||||
.required(true)
|
||||
.value_parser(value_parser!(PathBuf)),
|
||||
)
|
||||
}
|
||||
|
||||
const LINE_FEED: u8 = 0x0A;
|
||||
const UNDERSCORE: u8 = 0x5F;
|
||||
const ZERO: u8 = 0x30;
|
||||
|
||||
const PREFIXES: [&str; 36] = [
|
||||
"",
|
||||
"content/characters/",
|
||||
"content/debug/",
|
||||
"content/decals/",
|
||||
"content/environment/",
|
||||
"content/fx/",
|
||||
"content/fx/particles/",
|
||||
"content/gizmos/",
|
||||
"content/items/",
|
||||
"content/levels/",
|
||||
"content/liquid_area/",
|
||||
"content/localization/",
|
||||
"content/materials/",
|
||||
"content/minion_impact_assets/",
|
||||
"content/pickups/",
|
||||
"content/shading_environments/",
|
||||
"content/textures/",
|
||||
"content/ui/",
|
||||
"content/videos/",
|
||||
"content/vo/",
|
||||
"content/volume_types/",
|
||||
"content/weapons/",
|
||||
"content/",
|
||||
"core/",
|
||||
"core/units/",
|
||||
"packages/boot_assets/",
|
||||
"packages/content/",
|
||||
"packages/game_scripts/",
|
||||
"packages/strings/",
|
||||
"packages/ui/",
|
||||
"packages/",
|
||||
"wwise/events/",
|
||||
"wwise/packages/",
|
||||
"wwise/world_sound_fx/",
|
||||
"wwise/events/weapons/",
|
||||
"wwise/events/minions/",
|
||||
];
|
||||
|
||||
fn make_info_printer(rx: Receiver<(usize, usize, String)>, hash_count: usize) -> JoinHandle<()> {
|
||||
std::thread::spawn(move || {
|
||||
let mut writer = std::io::stderr();
|
||||
let mut total_count = 0;
|
||||
let mut total_found = 0;
|
||||
|
||||
let mut start = Instant::now();
|
||||
|
||||
while let Ok((count, found, last)) = rx.recv() {
|
||||
total_count += count;
|
||||
total_found += found;
|
||||
|
||||
let now = Instant::now();
|
||||
if (now - start).as_millis() > 250 {
|
||||
let s = &last[0..std::cmp::min(last.len(), 60)];
|
||||
let s = format!(
|
||||
"\r{:12} per second | {total_found:6}/{hash_count} found | {s:<60}",
|
||||
total_count * 4
|
||||
);
|
||||
|
||||
writer.write_all(s.as_bytes()).unwrap();
|
||||
|
||||
total_count = 0;
|
||||
start = now;
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn make_stdout_printer(rx: Receiver<Vec<u8>>) -> JoinHandle<()> {
|
||||
std::thread::spawn(move || {
|
||||
let mut writer = std::io::stdout();
|
||||
|
||||
while let Ok(buf) = rx.recv() {
|
||||
writer.write_all(&buf).unwrap();
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
struct State {
|
||||
delimiter_lists: Arc<Vec<Vec<String>>>,
|
||||
hashes: Arc<HashSet<Murmur64>>,
|
||||
words: Arc<Vec<String>>,
|
||||
delimiters_len: usize,
|
||||
stdout_tx: Sender<Vec<u8>>,
|
||||
info_tx: Sender<(usize, usize, String)>,
|
||||
}
|
||||
|
||||
fn make_worker(rx: Receiver<Vec<usize>>, state: State) -> JoinHandle<()> {
|
||||
std::thread::spawn(move || {
|
||||
let delimiter_lists = &state.delimiter_lists;
|
||||
let hashes = &state.hashes;
|
||||
let words = &state.words;
|
||||
let delimiters_len = state.delimiters_len;
|
||||
|
||||
let mut count = 0;
|
||||
let mut found = 0;
|
||||
let mut buf = Vec::with_capacity(1024);
|
||||
|
||||
while let Ok(indices) = rx.recv() {
|
||||
let sequence = indices.iter().map(|i| words[*i].as_str());
|
||||
|
||||
// We only want delimiters between words, so we keep that iterator shorter by
|
||||
// one.
|
||||
let delimiter_count = sequence.len() as u32 - 1;
|
||||
|
||||
for prefix in PREFIXES.iter().map(|p| p.as_bytes()) {
|
||||
buf.clear();
|
||||
|
||||
// We can keep the prefix at the front of the buffer and only
|
||||
// replace the parts after that.
|
||||
let prefix_len = prefix.len();
|
||||
buf.extend_from_slice(prefix);
|
||||
|
||||
for delims in delimiter_lists
|
||||
.iter()
|
||||
.take(delimiters_len.pow(delimiter_count))
|
||||
{
|
||||
buf.truncate(prefix_len);
|
||||
|
||||
let delims = delims
|
||||
.iter()
|
||||
.map(|s| s.as_str())
|
||||
.take(delimiter_count as usize);
|
||||
sequence
|
||||
.clone()
|
||||
.interleave(delims.clone())
|
||||
.for_each(|word| buf.extend_from_slice(word.as_bytes()));
|
||||
|
||||
count += 1;
|
||||
|
||||
let hash = Murmur64::hash(&buf);
|
||||
if hashes.contains(&hash) {
|
||||
found += 1;
|
||||
|
||||
buf.push(LINE_FEED);
|
||||
if state.stdout_tx.send(buf.clone()).is_err() {
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
let word_len = buf.len();
|
||||
|
||||
// If the regular word itself didn't match, we check
|
||||
// for numbered suffixes.
|
||||
// For now, we only check up to `09` to avoid more complex logic
|
||||
// writing into the buffer.
|
||||
// Packages that contain files with higher numbers than this
|
||||
// should hopefully become easier to spot once a good number of
|
||||
// hashes is found.
|
||||
for i in 1..=9 {
|
||||
buf.truncate(word_len);
|
||||
buf.push(UNDERSCORE);
|
||||
buf.push(ZERO);
|
||||
buf.push(ZERO + i);
|
||||
|
||||
count += 1;
|
||||
|
||||
let hash = Murmur64::hash(&buf);
|
||||
if hashes.contains(&hash) {
|
||||
found += 1;
|
||||
|
||||
buf.push(LINE_FEED);
|
||||
if state.stdout_tx.send(buf.clone()).is_err() {
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if count >= 2 * 1024 * 1024 {
|
||||
// The last prefix in the set is the one that will stay in the buffer
|
||||
// when we're about to print here.
|
||||
// So we strip that, to show just the generated part.
|
||||
// We also restrict the length to stay on a single line.
|
||||
let prefix_len = PREFIXES[35].len();
|
||||
// No need to wait for this
|
||||
let _ = state.info_tx.try_send((
|
||||
count,
|
||||
found,
|
||||
String::from_utf8_lossy(&buf[prefix_len..]).to_string(),
|
||||
));
|
||||
|
||||
count = 0;
|
||||
found = 0;
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn build_delimiter_lists(delimiters: impl AsRef<[String]>, max_length: usize) -> Vec<Vec<String>> {
|
||||
let delimiters = delimiters.as_ref();
|
||||
let mut indices = vec![0; max_length];
|
||||
let mut list = Vec::new();
|
||||
|
||||
for _ in 0..delimiters.len().pow(max_length as u32) {
|
||||
list.push(
|
||||
indices
|
||||
.iter()
|
||||
.map(|i| delimiters[*i].clone())
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
for v in indices.iter_mut() {
|
||||
if *v >= delimiters.len() - 1 {
|
||||
*v = 0;
|
||||
break;
|
||||
} else {
|
||||
*v += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
list
|
||||
}
|
||||
|
||||
fn build_initial_indices(
|
||||
cont: Option<&String>,
|
||||
delimiters: impl AsRef<[String]>,
|
||||
words: impl AsRef<[String]>,
|
||||
) -> Result<Vec<usize>> {
|
||||
if let Some(cont) = cont {
|
||||
let mut splits = vec![cont.clone()];
|
||||
|
||||
for delim in delimiters.as_ref().iter() {
|
||||
splits = splits
|
||||
.iter()
|
||||
.flat_map(|s| s.split(delim))
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
}
|
||||
|
||||
let indices = splits
|
||||
.into_iter()
|
||||
.map(|s| {
|
||||
words
|
||||
.as_ref()
|
||||
.iter()
|
||||
.enumerate()
|
||||
.find(|(_, v)| s == **v)
|
||||
.map(|(i, _)| i)
|
||||
.ok_or_else(|| eyre::eyre!("'{}' is not in the word list", s))
|
||||
})
|
||||
.collect::<Result<_>>()?;
|
||||
|
||||
tracing::info!("Continuing from '{}' -> '{:?}'", cont, &indices);
|
||||
|
||||
Ok(indices)
|
||||
} else {
|
||||
Ok(vec![0])
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
#[allow(clippy::mut_range_bound)]
|
||||
pub(crate) fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
||||
let max_length: usize = matches
|
||||
.get_one::<usize>("max-length")
|
||||
.copied()
|
||||
.expect("parameter has default");
|
||||
|
||||
let num_threads: usize = matches
|
||||
.get_one::<usize>("threads")
|
||||
.copied()
|
||||
.expect("parameter has default");
|
||||
|
||||
let words = {
|
||||
let path = matches
|
||||
.get_one::<PathBuf>("words")
|
||||
.expect("missing required parameter");
|
||||
|
||||
let file = fs::read_to_string(path)
|
||||
.wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?;
|
||||
|
||||
let words: Vec<_> = file.lines().map(str::to_string).collect();
|
||||
|
||||
if words.is_empty() {
|
||||
eyre::bail!("Word list must not be empty");
|
||||
}
|
||||
|
||||
Arc::new(words)
|
||||
};
|
||||
|
||||
let hashes = {
|
||||
let path = matches
|
||||
.get_one::<PathBuf>("hashes")
|
||||
.expect("missing required argument");
|
||||
let content = fs::read_to_string(path)
|
||||
.wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?;
|
||||
|
||||
let hashes: Result<HashSet<_>, _> = content
|
||||
.lines()
|
||||
.map(|s| u64::from_str_radix(s, 16).map(Murmur64::from))
|
||||
.collect();
|
||||
|
||||
let hashes = hashes?;
|
||||
|
||||
tracing::trace!("{:?}", hashes);
|
||||
|
||||
Arc::new(hashes)
|
||||
};
|
||||
|
||||
let mut delimiters: Vec<String> = matches
|
||||
.get_many::<String>("delimiter")
|
||||
.unwrap_or_default()
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
if delimiters.is_empty() {
|
||||
delimiters.push(String::from("/"));
|
||||
delimiters.push(String::from("_"));
|
||||
}
|
||||
|
||||
let delimiters_len = delimiters.len();
|
||||
|
||||
let word_count = words.len();
|
||||
tracing::info!("{} words to try", word_count);
|
||||
|
||||
// To be able to easily combine the permutations of words and delimiters,
|
||||
// we turn the latter into a pre-defined list of all permutations of delimiters
|
||||
// that are possible at the given amount of words.
|
||||
// Combining `Iterator::cycle` with `Itertools::permutations` works, but
|
||||
// with a high `max_length`, it runs OOM.
|
||||
// So we basically have to implement a smaller version of the iterative algorithm we use later on
|
||||
// to build permutations of the actual words.
|
||||
let delimiter_lists = {
|
||||
let lists = build_delimiter_lists(&delimiters, max_length - 1);
|
||||
Arc::new(lists)
|
||||
};
|
||||
tracing::debug!("{:?}", delimiter_lists);
|
||||
|
||||
let (info_tx, info_rx) = bounded(100);
|
||||
let (stdout_tx, stdout_rx) = unbounded::<Vec<u8>>();
|
||||
let (task_tx, task_rx) = bounded::<Vec<usize>>(num_threads * 4);
|
||||
let mut handles = Vec::new();
|
||||
|
||||
for _ in 0..num_threads {
|
||||
let handle = make_worker(
|
||||
task_rx.clone(),
|
||||
State {
|
||||
delimiter_lists: Arc::clone(&delimiter_lists),
|
||||
hashes: Arc::clone(&hashes),
|
||||
words: Arc::clone(&words),
|
||||
delimiters_len,
|
||||
stdout_tx: stdout_tx.clone(),
|
||||
info_tx: info_tx.clone(),
|
||||
},
|
||||
);
|
||||
handles.push(handle);
|
||||
}
|
||||
// These are only used inside the worker threads, but due to the loops above, we had to
|
||||
// clone them one too many times.
|
||||
// So we drop that extra reference immediately, to ensure that the channels can
|
||||
// disconnect properly when the threads finish.
|
||||
drop(stdout_tx);
|
||||
drop(info_tx);
|
||||
|
||||
handles.push(make_info_printer(info_rx, hashes.len()));
|
||||
handles.push(make_stdout_printer(stdout_rx));
|
||||
|
||||
let mut indices =
|
||||
build_initial_indices(matches.get_one::<String>("continue"), &delimiters, &*words)
|
||||
.wrap_err("Failed to build initial indices")?;
|
||||
let mut indices_len = indices.len();
|
||||
let mut sequence = indices
|
||||
.iter()
|
||||
.map(|index| words[*index].as_str())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Prevent re-allocation by reserving as much as we need upfront
|
||||
indices.reserve(max_length);
|
||||
sequence.reserve(max_length);
|
||||
|
||||
'outer: loop {
|
||||
task_tx.send(indices.clone())?;
|
||||
|
||||
for i in 0..indices_len {
|
||||
let index = indices.get_mut(i).unwrap();
|
||||
let word = sequence.get_mut(i).unwrap();
|
||||
|
||||
if *index >= word_count - 1 {
|
||||
*index = 0;
|
||||
*word = words[*index].as_str();
|
||||
|
||||
if indices.get(i + 1).is_none() {
|
||||
indices_len += 1;
|
||||
|
||||
if indices_len > max_length {
|
||||
break 'outer;
|
||||
}
|
||||
|
||||
indices.push(0);
|
||||
sequence.push(words[0].as_str());
|
||||
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
*index += 1;
|
||||
*word = words[*index].as_str();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Dropping the senders will disconnect the channel,
|
||||
// so that the threads holding the other end will eventually
|
||||
// complete as well.
|
||||
drop(task_tx);
|
||||
|
||||
for handle in handles {
|
||||
match handle.join() {
|
||||
Ok(_) => {}
|
||||
Err(value) => {
|
||||
if let Some(err) = value.downcast_ref::<String>() {
|
||||
eyre::bail!("Thread failed: {}", err);
|
||||
} else {
|
||||
eyre::bail!("Thread failed with unknown error: {:?}", value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let _ = std::io::stdout().write_all("\r".as_bytes());
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,463 +0,0 @@
|
|||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use clap::{value_parser, Arg, ArgMatches, Command, ValueEnum};
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::Result;
|
||||
use tokio::fs;
|
||||
|
||||
pub(crate) fn command_definition() -> Command {
|
||||
Command::new("extract-words")
|
||||
.about(
|
||||
"Extract unique alphanumeric sequences that match common identifier rules from the given file. \
|
||||
Only ASCII is supported.",
|
||||
)
|
||||
.arg(
|
||||
Arg::new("file")
|
||||
.required(true)
|
||||
.value_parser(value_parser!(PathBuf))
|
||||
.help("Path to the file to extract words from."),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("min-length")
|
||||
.help("Minimum length to consider a word.")
|
||||
.long("min-length")
|
||||
.short('m')
|
||||
.default_value("3")
|
||||
.value_parser(value_parser!(usize))
|
||||
)
|
||||
.arg(
|
||||
Arg::new("algorithm")
|
||||
.help("The algorithm to determine matching words")
|
||||
.long("algorithm")
|
||||
.short('a')
|
||||
.default_value("identifier")
|
||||
.value_parser(value_parser!(Algorithm))
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, ValueEnum)]
|
||||
#[value(rename_all = "snake_case")]
|
||||
enum Algorithm {
|
||||
Alphabetic,
|
||||
Alphanumeric,
|
||||
Identifier,
|
||||
Number,
|
||||
Hash32,
|
||||
Hash64,
|
||||
Paths,
|
||||
}
|
||||
|
||||
impl Algorithm {
|
||||
fn is_start(&self, c: char) -> bool {
|
||||
match self {
|
||||
Self::Alphabetic => c.is_ascii_alphabetic(),
|
||||
Self::Alphanumeric => c.is_ascii_alphanumeric(),
|
||||
Self::Identifier => c.is_ascii_alphabetic(),
|
||||
Self::Number => c.is_numeric(),
|
||||
Self::Hash32 | Self::Hash64 => matches!(c, '0'..='9' | 'a'..='f' | 'A'..='F'),
|
||||
// Supposed to be handled separately
|
||||
Self::Paths => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_body(&self, c: char) -> bool {
|
||||
match self {
|
||||
Self::Alphabetic => c.is_ascii_alphabetic(),
|
||||
Self::Alphanumeric => c.is_ascii_alphanumeric(),
|
||||
Self::Identifier => c.is_ascii_alphanumeric(),
|
||||
Self::Number => c.is_numeric(),
|
||||
Self::Hash32 | Self::Hash64 => matches!(c, '0'..='9' | 'a'..='f' | 'A'..='F'),
|
||||
// Supposed to be handled separately
|
||||
Self::Paths => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_length(&self, len: usize) -> bool {
|
||||
match self {
|
||||
Self::Alphabetic => true,
|
||||
Self::Alphanumeric => true,
|
||||
Self::Identifier => true,
|
||||
Self::Number => true,
|
||||
Self::Hash32 => len == 8,
|
||||
Self::Hash64 => len == 16,
|
||||
// Supposed to be handled separately
|
||||
Self::Paths => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Algorithm {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
match self {
|
||||
Algorithm::Alphabetic => "alphabetic",
|
||||
Algorithm::Alphanumeric => "alphanumeric",
|
||||
Algorithm::Identifier => "identifier",
|
||||
Algorithm::Number => "number",
|
||||
Algorithm::Hash32 => "hash32",
|
||||
Algorithm::Hash64 => "hash64",
|
||||
Algorithm::Paths => "paths",
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum PathState {
|
||||
Begin,
|
||||
PathComponent,
|
||||
PathSeparator,
|
||||
Boundary,
|
||||
NonWord,
|
||||
End,
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(chars))]
|
||||
fn extract_paths(chars: impl Iterator<Item = char>) -> Vec<Vec<String>> {
|
||||
let mut chars = chars.peekable();
|
||||
|
||||
let mut state = PathState::Begin;
|
||||
let mut list = Vec::new();
|
||||
let mut path = Vec::new();
|
||||
let mut word = String::new();
|
||||
|
||||
let is_boundary = |c: char| c == '\n' || c == ' ' || c == ',' || c == '\t' || c == '|';
|
||||
|
||||
'machine: loop {
|
||||
state = match state {
|
||||
PathState::Begin => match chars.next() {
|
||||
None => PathState::End,
|
||||
Some(c) if c.is_ascii_alphabetic() => {
|
||||
word.push(c);
|
||||
PathState::PathComponent
|
||||
}
|
||||
Some(c) if is_boundary(c) => PathState::Boundary,
|
||||
Some('/') => PathState::PathSeparator,
|
||||
Some(_) => PathState::NonWord,
|
||||
},
|
||||
PathState::PathComponent => match chars.next() {
|
||||
None => {
|
||||
path.push(word.clone());
|
||||
list.push(path.clone());
|
||||
|
||||
PathState::End
|
||||
}
|
||||
Some(c) if c.is_ascii_alphanumeric() || c == '_' => {
|
||||
word.push(c);
|
||||
PathState::PathComponent
|
||||
}
|
||||
Some('/') => {
|
||||
path.push(word.clone());
|
||||
word.clear();
|
||||
|
||||
PathState::PathSeparator
|
||||
}
|
||||
Some(c) if is_boundary(c) => {
|
||||
path.push(word.clone());
|
||||
list.push(path.clone());
|
||||
|
||||
path.clear();
|
||||
word.clear();
|
||||
|
||||
PathState::Boundary
|
||||
}
|
||||
Some(_) => {
|
||||
list.push(path.clone());
|
||||
|
||||
path.clear();
|
||||
word.clear();
|
||||
|
||||
PathState::NonWord
|
||||
}
|
||||
},
|
||||
PathState::PathSeparator => match chars.next() {
|
||||
None => {
|
||||
list.push(path.clone());
|
||||
PathState::End
|
||||
}
|
||||
Some('/') => PathState::PathSeparator,
|
||||
Some(c) if c.is_ascii_alphabetic() || c == '_' => {
|
||||
word.push(c);
|
||||
PathState::PathComponent
|
||||
}
|
||||
Some(c) if is_boundary(c) => {
|
||||
list.push(path.clone());
|
||||
path.clear();
|
||||
PathState::Boundary
|
||||
}
|
||||
Some(_) => {
|
||||
list.push(path.clone());
|
||||
path.clear();
|
||||
PathState::NonWord
|
||||
}
|
||||
},
|
||||
PathState::Boundary => match chars.next() {
|
||||
None => PathState::End,
|
||||
Some(c) if c.is_ascii_alphabetic() => {
|
||||
word.push(c);
|
||||
PathState::PathComponent
|
||||
}
|
||||
Some(c) if is_boundary(c) => PathState::Boundary,
|
||||
Some(_) => PathState::NonWord,
|
||||
},
|
||||
PathState::NonWord => match chars.next() {
|
||||
None => PathState::End,
|
||||
Some(c) if is_boundary(c) => PathState::Boundary,
|
||||
Some(_) => PathState::NonWord,
|
||||
},
|
||||
PathState::End => {
|
||||
break 'machine;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
list
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(chars))]
|
||||
fn algorithm_path_components(chars: impl Iterator<Item = char>, min_length: usize) {
|
||||
let mut chars = chars.peekable();
|
||||
|
||||
let mut state = PathState::Begin;
|
||||
let mut word = String::new();
|
||||
let mut lists = vec![HashMap::<String, usize>::new()];
|
||||
let mut index = 0;
|
||||
|
||||
let is_boundary = |c: char| c == '\n' || c == ' ' || c == ',' || c == '\t';
|
||||
|
||||
'machine: loop {
|
||||
state = match state {
|
||||
PathState::Begin => match chars.next() {
|
||||
None => PathState::End,
|
||||
Some(c) if c.is_ascii_alphabetic() => {
|
||||
word.push(c);
|
||||
PathState::PathComponent
|
||||
}
|
||||
Some(c) if is_boundary(c) => PathState::Boundary,
|
||||
// Ignore leading path separators to not trigger the logic of advancing
|
||||
// the component count
|
||||
Some('/') => PathState::Boundary,
|
||||
Some(_) => PathState::NonWord,
|
||||
},
|
||||
PathState::PathComponent => match chars.next() {
|
||||
None => PathState::End,
|
||||
Some(c) if c.is_ascii_alphanumeric() || c == '_' => {
|
||||
word.push(c);
|
||||
PathState::PathComponent
|
||||
}
|
||||
Some('/') => PathState::PathSeparator,
|
||||
Some(c) => {
|
||||
if index > 0 && word.len() >= min_length {
|
||||
let list = &mut lists[index];
|
||||
list.entry(word.clone())
|
||||
.and_modify(|count| *count += 1)
|
||||
.or_insert(1);
|
||||
}
|
||||
word.clear();
|
||||
|
||||
index = 0;
|
||||
|
||||
if is_boundary(c) {
|
||||
PathState::Boundary
|
||||
} else {
|
||||
PathState::NonWord
|
||||
}
|
||||
}
|
||||
},
|
||||
PathState::PathSeparator => {
|
||||
if word.len() >= min_length {
|
||||
let list = &mut lists[index];
|
||||
list.entry(word.clone())
|
||||
.and_modify(|count| *count += 1)
|
||||
.or_insert(1);
|
||||
}
|
||||
word.clear();
|
||||
|
||||
index += 1;
|
||||
if lists.get(index).is_none() {
|
||||
lists.push(HashMap::new());
|
||||
}
|
||||
|
||||
// Ignore multiple separators
|
||||
while chars.next_if(|c| *c == '/').is_some() {}
|
||||
|
||||
match chars.next() {
|
||||
None => PathState::End,
|
||||
Some(c) if c.is_ascii_alphabetic() || c == '_' => {
|
||||
word.push(c);
|
||||
PathState::PathComponent
|
||||
}
|
||||
Some(c) if is_boundary(c) => {
|
||||
index = 0;
|
||||
PathState::Boundary
|
||||
}
|
||||
Some(_) => {
|
||||
index = 0;
|
||||
PathState::NonWord
|
||||
}
|
||||
}
|
||||
}
|
||||
PathState::Boundary => match chars.next() {
|
||||
None => PathState::End,
|
||||
Some(c) if c.is_ascii_alphabetic() => {
|
||||
word.push(c);
|
||||
PathState::PathComponent
|
||||
}
|
||||
Some(c) if is_boundary(c) => PathState::Boundary,
|
||||
Some(_) => PathState::NonWord,
|
||||
},
|
||||
PathState::NonWord => match chars.next() {
|
||||
None => PathState::End,
|
||||
Some(c) if is_boundary(c) => PathState::Boundary,
|
||||
Some(_) => PathState::NonWord,
|
||||
},
|
||||
PathState::End => {
|
||||
if word.len() >= min_length {
|
||||
let list = &mut lists[index];
|
||||
list.entry(word.clone())
|
||||
.and_modify(|count| *count += 1)
|
||||
.or_insert(1);
|
||||
}
|
||||
|
||||
break 'machine;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for i in 0..lists.len() {
|
||||
print!("Word {i}, Count {i},");
|
||||
}
|
||||
println!();
|
||||
|
||||
let mut lines: Vec<Vec<Option<(String, usize)>>> = Vec::new();
|
||||
|
||||
for (i, list) in lists.into_iter().enumerate() {
|
||||
let mut entries = list.into_iter().collect::<Vec<_>>();
|
||||
entries.sort_by(|(_, a), (_, b)| b.partial_cmp(a).unwrap());
|
||||
|
||||
for (j, (word, count)) in entries.into_iter().enumerate() {
|
||||
if let Some(line) = lines.get_mut(j) {
|
||||
while line.len() < i {
|
||||
line.push(None);
|
||||
}
|
||||
line.push(Some((word, count)));
|
||||
} else {
|
||||
let mut line = Vec::new();
|
||||
while line.len() < i {
|
||||
line.push(None);
|
||||
}
|
||||
line.push(Some((word, count)));
|
||||
lines.push(line);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for line in lines.iter() {
|
||||
for cell in line.iter() {
|
||||
if let Some((word, count)) = cell {
|
||||
print!("{},{},", word, count);
|
||||
} else {
|
||||
print!(",,");
|
||||
}
|
||||
}
|
||||
println!();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum State {
|
||||
Begin,
|
||||
NonWord,
|
||||
Word,
|
||||
End,
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
||||
let path = matches
|
||||
.get_one::<PathBuf>("file")
|
||||
.expect("missing required parameter");
|
||||
|
||||
let algorithm = matches
|
||||
.get_one::<Algorithm>("algorithm")
|
||||
.expect("parameter has default");
|
||||
|
||||
let min_length = matches
|
||||
.get_one::<usize>("min-length")
|
||||
.copied()
|
||||
.expect("paramter has default");
|
||||
|
||||
let content = fs::read_to_string(&path)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?;
|
||||
let mut chars = content.chars();
|
||||
|
||||
if *algorithm == Algorithm::Paths {
|
||||
algorithm_path_components(chars, min_length);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut state = State::Begin;
|
||||
let mut word = String::new();
|
||||
let mut visited = HashMap::new();
|
||||
|
||||
'machine: loop {
|
||||
state = match state {
|
||||
State::Begin => match chars.next() {
|
||||
None => State::End,
|
||||
Some(c) if algorithm.is_start(c) => {
|
||||
word.push(c);
|
||||
State::Word
|
||||
}
|
||||
Some(_) => State::NonWord,
|
||||
},
|
||||
State::End => break 'machine,
|
||||
State::NonWord => match chars.next() {
|
||||
None => State::End,
|
||||
Some(c) if algorithm.is_body(c) => {
|
||||
word.push(c);
|
||||
State::Word
|
||||
}
|
||||
Some(_) => State::NonWord,
|
||||
},
|
||||
State::Word => match chars.next() {
|
||||
None => {
|
||||
if word.len() >= min_length && algorithm.is_length(word.len()) {
|
||||
visited
|
||||
.entry(word.clone())
|
||||
.and_modify(|v| *v += 1)
|
||||
.or_insert(1);
|
||||
}
|
||||
State::End
|
||||
}
|
||||
Some(c) if algorithm.is_body(c) => {
|
||||
word.push(c);
|
||||
State::Word
|
||||
}
|
||||
Some(_) => {
|
||||
if word.len() >= min_length && algorithm.is_length(word.len()) {
|
||||
visited
|
||||
.entry(word.clone())
|
||||
.and_modify(|v| *v += 1)
|
||||
.or_insert(1);
|
||||
}
|
||||
word.clear();
|
||||
State::NonWord
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
let mut entries: Vec<(String, usize)> = visited.into_iter().collect();
|
||||
// Reverse sides during comparison to get "highest to lowest"
|
||||
entries.sort_by(|(_, a), (_, b)| b.partial_cmp(a).unwrap());
|
||||
|
||||
entries
|
||||
.iter()
|
||||
.for_each(|(word, count)| println!("{:016} {}", word, count));
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
use clap::{ArgMatches, Command};
|
||||
use color_eyre::Result;
|
||||
|
||||
mod brute_force_words;
|
||||
mod extract_words;
|
||||
|
||||
pub(crate) fn command_definition() -> Command {
|
||||
Command::new("experiment")
|
||||
.subcommand_required(true)
|
||||
.about("A collection of utilities and experiments.")
|
||||
.subcommand(brute_force_words::command_definition())
|
||||
.subcommand(extract_words::command_definition())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
||||
match matches.subcommand() {
|
||||
// It's fine to block here, as this is the only thing that's executing on the runtime.
|
||||
// The other option with `spawn_blocking` would require setting up values to be Send+Sync.
|
||||
Some(("brute-force-words", sub_matches)) => brute_force_words::run(ctx, sub_matches),
|
||||
Some(("extract-words", sub_matches)) => extract_words::run(ctx, sub_matches).await,
|
||||
_ => unreachable!(
|
||||
"clap is configured to require a subcommand, and they're all handled above"
|
||||
),
|
||||
}
|
||||
}
|
|
@ -351,6 +351,7 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()>
|
|||
},
|
||||
depends: vec![ModDependency::ID(String::from("DMF"))],
|
||||
bundled: true,
|
||||
name_overrides: HashMap::new(),
|
||||
};
|
||||
|
||||
tracing::debug!(?dtmt_cfg);
|
||||
|
|
|
@ -1,18 +1,30 @@
|
|||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use clap::{Arg, ArgMatches, Command};
|
||||
use color_eyre::eyre::{self, Context, Result};
|
||||
use color_eyre::Help;
|
||||
use futures::{StreamExt, TryStreamExt};
|
||||
use string_template::Template;
|
||||
use minijinja::Environment;
|
||||
use tokio::fs::{self, DirBuilder};
|
||||
|
||||
const TEMPLATES: [(&str, &str); 5] = [
|
||||
(
|
||||
"dtmt.cfg",
|
||||
r#"id = "{{id}}"
|
||||
r#"//
|
||||
// This is your mod's main configuration file. It tells DTMT how to build the mod,
|
||||
// and DTMM what to display to your users.
|
||||
// Certain files have been pre-filled by the template, the ones commented out (`//`)
|
||||
// are optional.
|
||||
//
|
||||
// A unique identifier (preferably lower case, alphanumeric)
|
||||
id = "{{id}}"
|
||||
// The display name that your users will see.
|
||||
// This doesn't have to be unique, but you still want to avoid being confused with other
|
||||
// mods.
|
||||
name = "{{name}}"
|
||||
// It's good practice to increase this number whenever you publish changes.
|
||||
// It's up to you if you use SemVer or something simpler like `1970-12-24`. It should sort and
|
||||
// compare well, though.
|
||||
version = "0.1.0"
|
||||
// author = ""
|
||||
|
||||
|
@ -32,16 +44,25 @@ categories = [
|
|||
|
||||
// A list of mod IDs that this mod depends on. You can find
|
||||
// those IDs by downloading the mod and extracting their `dtmt.cfg`.
|
||||
// To make your fellow modders' lives easier, publish your own mods' IDs
|
||||
// somewhere visible, such as the Nexusmods page.
|
||||
depends = [
|
||||
DMF
|
||||
]
|
||||
|
||||
// The primary resources that serve as the entry point to your
|
||||
// mod's code. Unless for very specific use cases, the generated
|
||||
// values shouldn't be changed.
|
||||
resources = {
|
||||
init = "scripts/mods/{{id}}/init"
|
||||
data = "scripts/mods/{{id}}/data"
|
||||
localization = "scripts/mods/{{id}}/localization"
|
||||
}
|
||||
|
||||
// The list of packages, or bundles, to build.
|
||||
// Each one corresponds to a package definition in the named folder.
|
||||
// For mods that contain only code and/or a few small assets, a single
|
||||
// package will suffice.
|
||||
packages = [
|
||||
"packages/mods/{{id}}"
|
||||
]
|
||||
|
@ -59,7 +80,6 @@ packages = [
|
|||
r#"local mod = get_mod("{{id}}")
|
||||
|
||||
-- Your mod code goes here.
|
||||
-- https://vmf-docs.verminti.de
|
||||
"#,
|
||||
),
|
||||
(
|
||||
|
@ -137,34 +157,45 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()>
|
|||
|
||||
tracing::debug!(root = %root.display(), name, id);
|
||||
|
||||
let mut data = HashMap::new();
|
||||
data.insert("name", name.as_str());
|
||||
data.insert("id", id.as_str());
|
||||
let render_ctx = minijinja::context!(name => name.as_str(), id => id.as_str());
|
||||
let env = Environment::new();
|
||||
|
||||
let templates = TEMPLATES
|
||||
.iter()
|
||||
.map(|(path_tmpl, content_tmpl)| {
|
||||
let path = Template::new(path_tmpl).render(&data);
|
||||
let content = Template::new(content_tmpl).render(&data);
|
||||
|
||||
(root.join(path), content)
|
||||
env.render_str(path_tmpl, &render_ctx)
|
||||
.wrap_err_with(|| format!("Failed to render template: {}", path_tmpl))
|
||||
.and_then(|path| {
|
||||
env.render_named_str(&path, content_tmpl, &render_ctx)
|
||||
.wrap_err_with(|| format!("Failed to render template '{}'", &path))
|
||||
.map(|content| (root.join(path), content))
|
||||
})
|
||||
})
|
||||
.map(|(path, content)| async move {
|
||||
let dir = path
|
||||
.parent()
|
||||
.ok_or_else(|| eyre::eyre!("invalid root path"))?;
|
||||
.map(|res| async move {
|
||||
match res {
|
||||
Ok((path, content)) => {
|
||||
let dir = path
|
||||
.parent()
|
||||
.ok_or_else(|| eyre::eyre!("invalid root path"))?;
|
||||
|
||||
DirBuilder::new()
|
||||
.recursive(true)
|
||||
.create(&dir)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to create directory {}", dir.display()))?;
|
||||
DirBuilder::new()
|
||||
.recursive(true)
|
||||
.create(&dir)
|
||||
.await
|
||||
.wrap_err_with(|| {
|
||||
format!("Failed to create directory {}", dir.display())
|
||||
})?;
|
||||
|
||||
tracing::trace!("Writing file {}", path.display());
|
||||
tracing::trace!("Writing file {}", path.display());
|
||||
|
||||
fs::write(&path, content.as_bytes())
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to write content to path {}", path.display()))
|
||||
fs::write(&path, content.as_bytes())
|
||||
.await
|
||||
.wrap_err_with(|| {
|
||||
format!("Failed to write content to path {}", path.display())
|
||||
})
|
||||
}
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
});
|
||||
|
||||
futures::stream::iter(templates)
|
||||
|
|
|
@ -77,17 +77,14 @@ pub(crate) fn command_definition() -> Command {
|
|||
)
|
||||
}
|
||||
|
||||
async fn compile<P1, P2, P3>(
|
||||
#[tracing::instrument]
|
||||
async fn compile(
|
||||
cfg: &ModConfig,
|
||||
out_path: P1,
|
||||
archive_path: P2,
|
||||
game_dir: Arc<Option<P3>>,
|
||||
) -> Result<()>
|
||||
where
|
||||
P1: AsRef<Path> + std::marker::Copy,
|
||||
P2: AsRef<Path>,
|
||||
P3: AsRef<Path>,
|
||||
{
|
||||
out_path: impl AsRef<Path> + std::fmt::Debug,
|
||||
archive_path: impl AsRef<Path> + std::fmt::Debug,
|
||||
game_dir: Arc<Option<impl AsRef<Path> + std::fmt::Debug>>,
|
||||
) -> Result<()> {
|
||||
let out_path = out_path.as_ref();
|
||||
build(cfg, out_path, game_dir)
|
||||
.await
|
||||
.wrap_err("Failed to build bundles")?;
|
||||
|
|
|
@ -21,7 +21,6 @@ mod cmd {
|
|||
pub mod build;
|
||||
pub mod bundle;
|
||||
pub mod dictionary;
|
||||
pub mod experiment;
|
||||
pub mod migrate;
|
||||
pub mod murmur;
|
||||
pub mod new;
|
||||
|
@ -57,7 +56,6 @@ async fn main() -> Result<()> {
|
|||
.subcommand(cmd::build::command_definition())
|
||||
.subcommand(cmd::bundle::command_definition())
|
||||
.subcommand(cmd::dictionary::command_definition())
|
||||
.subcommand(cmd::experiment::command_definition())
|
||||
.subcommand(cmd::migrate::command_definition())
|
||||
.subcommand(cmd::murmur::command_definition())
|
||||
.subcommand(cmd::new::command_definition())
|
||||
|
@ -135,7 +133,6 @@ async fn main() -> Result<()> {
|
|||
Some(("build", sub_matches)) => cmd::build::run(ctx, sub_matches).await?,
|
||||
Some(("bundle", sub_matches)) => cmd::bundle::run(ctx, sub_matches).await?,
|
||||
Some(("dictionary", sub_matches)) => cmd::dictionary::run(ctx, sub_matches).await?,
|
||||
Some(("experiment", sub_matches)) => cmd::experiment::run(ctx, sub_matches).await?,
|
||||
Some(("migrate", sub_matches)) => cmd::migrate::run(ctx, sub_matches).await?,
|
||||
Some(("murmur", sub_matches)) => cmd::murmur::run(ctx, sub_matches).await?,
|
||||
Some(("new", sub_matches)) => cmd::new::run(ctx, sub_matches).await?,
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit b40962a61c748756d7da293d9fff26aca019603e
|
||||
Subproject commit 228b8ca37ee79ab9afa45c40da415e4dcb029751
|
|
@ -6,11 +6,11 @@ edition = "2021"
|
|||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
ansi_term = "0.12.1"
|
||||
color-eyre = "0.6.2"
|
||||
serde = "1.0.152"
|
||||
steamlocate = "2.0.0-beta.2"
|
||||
time = { version = "0.3.19", features = ["formatting", "local-offset", "macros"] }
|
||||
tracing = "0.1.37"
|
||||
tracing-error = "0.2.0"
|
||||
tracing-subscriber = "0.3.16"
|
||||
ansi_term = { workspace = true }
|
||||
color-eyre = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
steamlocate = { workspace = true }
|
||||
time = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-error = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use color_eyre::eyre::{OptionExt as _, WrapErr as _};
|
||||
|
@ -67,6 +68,8 @@ pub struct ModConfig {
|
|||
pub depends: Vec<ModDependency>,
|
||||
#[serde(default = "default_true", skip_serializing_if = "is_true")]
|
||||
pub bundled: bool,
|
||||
#[serde(default)]
|
||||
pub name_overrides: HashMap<String, String>,
|
||||
}
|
||||
|
||||
pub const STEAMAPP_ID: u32 = 1361210;
|
||||
|
|
|
@ -84,7 +84,7 @@ pub fn create_tracing_subscriber() {
|
|||
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::try_new("info").unwrap());
|
||||
|
||||
let (dev_stdout_layer, prod_stdout_layer, filter_layer) = if cfg!(debug_assertions) {
|
||||
let fmt_layer = fmt::layer().pretty();
|
||||
let fmt_layer = fmt::layer().pretty().with_writer(std::io::stderr);
|
||||
(Some(fmt_layer), None, None)
|
||||
} else {
|
||||
// Creates a layer that
|
||||
|
@ -93,6 +93,7 @@ pub fn create_tracing_subscriber() {
|
|||
// - does not print spans/targets
|
||||
// - only prints time, not date
|
||||
let fmt_layer = fmt::layer()
|
||||
.with_writer(std::io::stderr)
|
||||
.event_format(Formatter)
|
||||
.fmt_fields(debug_fn(format_fields));
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 5d1a075742395f767c79d9c0d7466c6fb442f106
|
||||
Subproject commit 6d94a4dd2c296bf1f044ee4c70fb10dca4c1c241
|
|
@ -12,7 +12,7 @@ regex = "1.7.1"
|
|||
reqwest = { version = "0.12.4" }
|
||||
serde = { version = "1.0.152", features = ["derive"] }
|
||||
serde_json = "1.0.94"
|
||||
thiserror = "1.0.39"
|
||||
thiserror = "2.0.0"
|
||||
time = { version = "0.3.20", features = ["serde"] }
|
||||
tracing = "0.1.37"
|
||||
url = { version = "2.3.1", features = ["serde"] }
|
||||
|
|
|
@ -28,7 +28,7 @@ pub enum Error {
|
|||
HTTP(#[from] reqwest::Error),
|
||||
#[error("invalid URL: {0:?}")]
|
||||
URLParseError(#[from] url::ParseError),
|
||||
#[error("failed to deserialize '{error}': {json}")]
|
||||
#[error("failed to deserialize due to {error}: {json}")]
|
||||
Deserialize {
|
||||
json: String,
|
||||
error: serde_json::Error,
|
||||
|
@ -37,7 +37,7 @@ pub enum Error {
|
|||
InvalidHeaderValue(#[from] InvalidHeaderValue),
|
||||
#[error("this error cannot happen")]
|
||||
Infallible(#[from] Infallible),
|
||||
#[error("invalid NXM URL '{}': {0}", .1.as_str())]
|
||||
#[error("invalid NXM URL '{url}': {0}", url = .1.as_str())]
|
||||
InvalidNXM(&'static str, Url),
|
||||
#[error("{0}")]
|
||||
Custom(String),
|
||||
|
|
|
@ -6,8 +6,8 @@ edition = "2021"
|
|||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
color-eyre = "0.6.2"
|
||||
tracing = "0.1.37"
|
||||
color-eyre = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
bindgen = "0.69.4"
|
||||
bindgen = "0.71.0"
|
||||
|
|
|
@ -7,6 +7,7 @@ use std::ptr;
|
|||
use color_eyre::{eyre, Result};
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[allow(clippy::identity_op)]
|
||||
mod bindings {
|
||||
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
|
||||
}
|
||||
|
|
|
@ -4,23 +4,23 @@ version = "0.3.0"
|
|||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
bitflags = "2.5.0"
|
||||
byteorder = "1.4.3"
|
||||
color-eyre = "0.6.2"
|
||||
csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
|
||||
fastrand = "2.1.0"
|
||||
futures = "0.3.25"
|
||||
futures-util = "0.3.24"
|
||||
glob = "0.3.0"
|
||||
nanorand = "0.7.0"
|
||||
pin-project-lite = "0.2.9"
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
serde_sjson = { path = "../../lib/serde_sjson", version = "*" }
|
||||
oodle = { path = "../../lib/oodle", version = "*" }
|
||||
tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
|
||||
tokio-stream = { version = "0.1.11", features = ["fs", "io-util"] }
|
||||
tracing = { version = "0.1.37", features = ["async-await"] }
|
||||
tracing-error = "0.2.0"
|
||||
luajit2-sys = { path = "../../lib/luajit2-sys", version = "*" }
|
||||
async-recursion = "1.0.2"
|
||||
path-slash = "0.2.1"
|
||||
async-recursion = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
byteorder = { workspace = true }
|
||||
color-eyre = { workspace = true }
|
||||
csv-async = { workspace = true }
|
||||
fastrand = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
futures-util = { workspace = true }
|
||||
glob = { workspace = true }
|
||||
luajit2-sys = { workspace = true }
|
||||
nanorand = { workspace = true }
|
||||
oodle = { workspace = true }
|
||||
path-slash = { workspace = true }
|
||||
pin-project-lite = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_sjson = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tokio-stream = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-error = { workspace = true }
|
||||
|
|
|
@ -43,6 +43,7 @@ impl<T: FromBinary> FromBinary for Vec<T> {
|
|||
}
|
||||
|
||||
pub mod sync {
|
||||
use std::ffi::CStr;
|
||||
use std::io::{self, Read, Seek, SeekFrom};
|
||||
|
||||
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
|
||||
|
@ -165,25 +166,13 @@ pub mod sync {
|
|||
}
|
||||
|
||||
fn read_string_len(&mut self, len: usize) -> Result<String> {
|
||||
let mut buf = vec![0; len];
|
||||
let res = self
|
||||
.read_exact(&mut buf)
|
||||
.map_err(Report::new)
|
||||
.and_then(|_| {
|
||||
String::from_utf8(buf).map_err(|err| {
|
||||
let ascii = String::from_utf8_lossy(err.as_bytes()).to_string();
|
||||
let bytes = format!("{:?}", err.as_bytes());
|
||||
Report::new(err)
|
||||
.with_section(move || bytes.header("Bytes:"))
|
||||
.with_section(move || ascii.header("ASCII:"))
|
||||
})
|
||||
});
|
||||
let pos = self.stream_position();
|
||||
|
||||
let res = read_string_len(self, len);
|
||||
if res.is_ok() {
|
||||
return res;
|
||||
}
|
||||
|
||||
let pos = self.stream_position();
|
||||
if pos.is_ok() {
|
||||
res.with_section(|| {
|
||||
format!("{pos:#X} ({pos})", pos = pos.unwrap()).header("Position: ")
|
||||
|
@ -243,4 +232,22 @@ pub mod sync {
|
|||
|
||||
Err(err).with_section(|| format!("{pos:#X} ({pos})").header("Position: "))
|
||||
}
|
||||
|
||||
fn read_string_len(mut r: impl Read, len: usize) -> Result<String> {
|
||||
let mut buf = vec![0; len];
|
||||
r.read_exact(&mut buf)
|
||||
.wrap_err_with(|| format!("Failed to read {} bytes", len))?;
|
||||
|
||||
let res = match CStr::from_bytes_until_nul(&buf) {
|
||||
Ok(s) => {
|
||||
let s = s.to_str()?;
|
||||
Ok(s.to_string())
|
||||
}
|
||||
Err(_) => String::from_utf8(buf.clone()).map_err(Report::new),
|
||||
};
|
||||
|
||||
res.wrap_err("Invalid binary for UTF8 string")
|
||||
.with_section(|| format!("{}", String::from_utf8_lossy(&buf)).header("ASCI:"))
|
||||
.with_section(|| format!("{:x?}", buf).header("Bytes:"))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,21 +13,21 @@ use crate::binary::ToBinary;
|
|||
use crate::murmur::Murmur64;
|
||||
use crate::Bundle;
|
||||
|
||||
use super::file::BundleFileType;
|
||||
use super::filetype::BundleFileType;
|
||||
|
||||
const DATABASE_VERSION: u32 = 0x6;
|
||||
const FILE_VERSION: u32 = 0x4;
|
||||
|
||||
pub struct BundleFile {
|
||||
name: String,
|
||||
stream: String,
|
||||
platform_specific: bool,
|
||||
file_time: u64,
|
||||
pub name: String,
|
||||
pub stream: String,
|
||||
pub platform_specific: bool,
|
||||
pub file_time: u64,
|
||||
}
|
||||
|
||||
pub struct FileName {
|
||||
extension: BundleFileType,
|
||||
name: Murmur64,
|
||||
pub extension: BundleFileType,
|
||||
pub name: Murmur64,
|
||||
}
|
||||
|
||||
pub struct BundleDatabase {
|
||||
|
@ -36,7 +36,34 @@ pub struct BundleDatabase {
|
|||
bundle_contents: HashMap<Murmur64, Vec<FileName>>,
|
||||
}
|
||||
|
||||
// Implements the partial Murmur that's used by the engine to compute bundle resource hashes,
|
||||
// but in a way that the loop can be done outside the function.
|
||||
#[inline(always)]
|
||||
fn add_to_resource_hash(mut k: u64, name: impl Into<u64>) -> u64 {
|
||||
const M: u64 = 0xc6a4a7935bd1e995;
|
||||
const R: u64 = 47;
|
||||
|
||||
let mut h: u64 = name.into();
|
||||
|
||||
k = k.wrapping_mul(M);
|
||||
k ^= k >> R;
|
||||
k = k.wrapping_mul(M);
|
||||
|
||||
h ^= k;
|
||||
k = M.wrapping_mul(h);
|
||||
|
||||
k
|
||||
}
|
||||
|
||||
impl BundleDatabase {
|
||||
pub fn bundles(&self) -> &HashMap<Murmur64, Vec<BundleFile>> {
|
||||
&self.stored_files
|
||||
}
|
||||
|
||||
pub fn files(&self) -> &HashMap<Murmur64, Vec<FileName>> {
|
||||
&self.bundle_contents
|
||||
}
|
||||
|
||||
pub fn add_bundle(&mut self, bundle: &Bundle) {
|
||||
let hash = bundle.name().to_murmur64();
|
||||
let name = hash.to_string();
|
||||
|
@ -69,20 +96,26 @@ impl BundleDatabase {
|
|||
}
|
||||
}
|
||||
|
||||
let mut resource_hash = 0;
|
||||
|
||||
for f in bundle.files() {
|
||||
let name = f.base_name().to_murmur64();
|
||||
let file_name = FileName {
|
||||
extension: f.file_type(),
|
||||
name: f.base_name().to_murmur64(),
|
||||
name,
|
||||
};
|
||||
|
||||
// TODO: Compute actual resource hash
|
||||
self.resource_hashes.insert(hash, 0);
|
||||
resource_hash = add_to_resource_hash(resource_hash, name);
|
||||
|
||||
// TODO: Make sure each file name only exists once. Probably best to turn
|
||||
// the `Vec` into a sorted `HashSet`.
|
||||
self.bundle_contents
|
||||
.entry(hash)
|
||||
.or_default()
|
||||
.push(file_name);
|
||||
}
|
||||
|
||||
self.resource_hashes.insert(hash, resource_hash);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -103,7 +136,7 @@ impl FromBinary for BundleDatabase {
|
|||
let mut stored_files = HashMap::with_capacity(num_entries);
|
||||
|
||||
for _ in 0..num_entries {
|
||||
let hash = Murmur64::from(r.read_u64()?);
|
||||
let hash = r.read_u64().map(Murmur64::from)?;
|
||||
|
||||
let num_files = r.read_u32()? as usize;
|
||||
let mut files = Vec::with_capacity(num_files);
|
||||
|
@ -161,7 +194,7 @@ impl FromBinary for BundleDatabase {
|
|||
let mut resource_hashes = HashMap::with_capacity(num_hashes);
|
||||
|
||||
for _ in 0..num_hashes {
|
||||
let name = Murmur64::from(r.read_u64()?);
|
||||
let name = r.read_u64().map(Murmur64::from)?;
|
||||
let hash = r.read_u64()?;
|
||||
|
||||
resource_hashes.insert(name, hash);
|
||||
|
@ -171,14 +204,14 @@ impl FromBinary for BundleDatabase {
|
|||
let mut bundle_contents = HashMap::with_capacity(num_contents);
|
||||
|
||||
for _ in 0..num_contents {
|
||||
let hash = Murmur64::from(r.read_u64()?);
|
||||
let hash = r.read_u64().map(Murmur64::from)?;
|
||||
|
||||
let num_files = r.read_u32()? as usize;
|
||||
let mut files = Vec::with_capacity(num_files);
|
||||
|
||||
for _ in 0..num_files {
|
||||
let extension = BundleFileType::from(r.read_u64()?);
|
||||
let name = Murmur64::from(r.read_u64()?);
|
||||
let extension = r.read_u64().map(BundleFileType::from)?;
|
||||
let name = r.read_u64().map(Murmur64::from)?;
|
||||
|
||||
files.push(FileName { extension, name });
|
||||
}
|
||||
|
|
|
@ -5,407 +5,12 @@ use bitflags::bitflags;
|
|||
use color_eyre::eyre::Context;
|
||||
use color_eyre::{eyre, Result};
|
||||
use futures::future::join_all;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::binary::sync::*;
|
||||
use crate::filetype::*;
|
||||
use crate::murmur::{HashGroup, IdString64, Murmur64};
|
||||
|
||||
#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)]
|
||||
pub enum BundleFileType {
|
||||
Animation,
|
||||
AnimationCurves,
|
||||
Apb,
|
||||
BakedLighting,
|
||||
Bik,
|
||||
BlendSet,
|
||||
Bones,
|
||||
Chroma,
|
||||
CommonPackage,
|
||||
Config,
|
||||
Crypto,
|
||||
Data,
|
||||
Entity,
|
||||
Flow,
|
||||
Font,
|
||||
Ies,
|
||||
Ini,
|
||||
Input,
|
||||
Ivf,
|
||||
Keys,
|
||||
Level,
|
||||
Lua,
|
||||
Material,
|
||||
Mod,
|
||||
MouseCursor,
|
||||
NavData,
|
||||
NetworkConfig,
|
||||
OddleNet,
|
||||
Package,
|
||||
Particles,
|
||||
PhysicsProperties,
|
||||
RenderConfig,
|
||||
RtPipeline,
|
||||
Scene,
|
||||
Shader,
|
||||
ShaderLibrary,
|
||||
ShaderLibraryGroup,
|
||||
ShadingEnvionmentMapping,
|
||||
ShadingEnvironment,
|
||||
Slug,
|
||||
SlugAlbum,
|
||||
SoundEnvironment,
|
||||
SpuJob,
|
||||
StateMachine,
|
||||
StaticPVS,
|
||||
Strings,
|
||||
SurfaceProperties,
|
||||
Texture,
|
||||
TimpaniBank,
|
||||
TimpaniMaster,
|
||||
Tome,
|
||||
Ugg,
|
||||
Unit,
|
||||
Upb,
|
||||
VectorField,
|
||||
Wav,
|
||||
WwiseBank,
|
||||
WwiseDep,
|
||||
WwiseEvent,
|
||||
WwiseMetadata,
|
||||
WwiseStream,
|
||||
Xml,
|
||||
|
||||
Unknown(Murmur64),
|
||||
}
|
||||
|
||||
impl BundleFileType {
|
||||
pub fn ext_name(&self) -> String {
|
||||
match self {
|
||||
BundleFileType::AnimationCurves => String::from("animation_curves"),
|
||||
BundleFileType::Animation => String::from("animation"),
|
||||
BundleFileType::Apb => String::from("apb"),
|
||||
BundleFileType::BakedLighting => String::from("baked_lighting"),
|
||||
BundleFileType::Bik => String::from("bik"),
|
||||
BundleFileType::BlendSet => String::from("blend_set"),
|
||||
BundleFileType::Bones => String::from("bones"),
|
||||
BundleFileType::Chroma => String::from("chroma"),
|
||||
BundleFileType::CommonPackage => String::from("common_package"),
|
||||
BundleFileType::Config => String::from("config"),
|
||||
BundleFileType::Crypto => String::from("crypto"),
|
||||
BundleFileType::Data => String::from("data"),
|
||||
BundleFileType::Entity => String::from("entity"),
|
||||
BundleFileType::Flow => String::from("flow"),
|
||||
BundleFileType::Font => String::from("font"),
|
||||
BundleFileType::Ies => String::from("ies"),
|
||||
BundleFileType::Ini => String::from("ini"),
|
||||
BundleFileType::Input => String::from("input"),
|
||||
BundleFileType::Ivf => String::from("ivf"),
|
||||
BundleFileType::Keys => String::from("keys"),
|
||||
BundleFileType::Level => String::from("level"),
|
||||
BundleFileType::Lua => String::from("lua"),
|
||||
BundleFileType::Material => String::from("material"),
|
||||
BundleFileType::Mod => String::from("mod"),
|
||||
BundleFileType::MouseCursor => String::from("mouse_cursor"),
|
||||
BundleFileType::NavData => String::from("nav_data"),
|
||||
BundleFileType::NetworkConfig => String::from("network_config"),
|
||||
BundleFileType::OddleNet => String::from("oodle_net"),
|
||||
BundleFileType::Package => String::from("package"),
|
||||
BundleFileType::Particles => String::from("particles"),
|
||||
BundleFileType::PhysicsProperties => String::from("physics_properties"),
|
||||
BundleFileType::RenderConfig => String::from("render_config"),
|
||||
BundleFileType::RtPipeline => String::from("rt_pipeline"),
|
||||
BundleFileType::Scene => String::from("scene"),
|
||||
BundleFileType::ShaderLibraryGroup => String::from("shader_library_group"),
|
||||
BundleFileType::ShaderLibrary => String::from("shader_library"),
|
||||
BundleFileType::Shader => String::from("shader"),
|
||||
BundleFileType::ShadingEnvionmentMapping => String::from("shading_environment_mapping"),
|
||||
BundleFileType::ShadingEnvironment => String::from("shading_environment"),
|
||||
BundleFileType::SlugAlbum => String::from("slug_album"),
|
||||
BundleFileType::Slug => String::from("slug"),
|
||||
BundleFileType::SoundEnvironment => String::from("sound_environment"),
|
||||
BundleFileType::SpuJob => String::from("spu_job"),
|
||||
BundleFileType::StateMachine => String::from("state_machine"),
|
||||
BundleFileType::StaticPVS => String::from("static_pvs"),
|
||||
BundleFileType::Strings => String::from("strings"),
|
||||
BundleFileType::SurfaceProperties => String::from("surface_properties"),
|
||||
BundleFileType::Texture => String::from("texture"),
|
||||
BundleFileType::TimpaniBank => String::from("timpani_bank"),
|
||||
BundleFileType::TimpaniMaster => String::from("timpani_master"),
|
||||
BundleFileType::Tome => String::from("tome"),
|
||||
BundleFileType::Ugg => String::from("ugg"),
|
||||
BundleFileType::Unit => String::from("unit"),
|
||||
BundleFileType::Upb => String::from("upb"),
|
||||
BundleFileType::VectorField => String::from("vector_field"),
|
||||
BundleFileType::Wav => String::from("wav"),
|
||||
BundleFileType::WwiseBank => String::from("wwise_bank"),
|
||||
BundleFileType::WwiseDep => String::from("wwise_dep"),
|
||||
BundleFileType::WwiseEvent => String::from("wwise_event"),
|
||||
BundleFileType::WwiseMetadata => String::from("wwise_metadata"),
|
||||
BundleFileType::WwiseStream => String::from("wwise_stream"),
|
||||
BundleFileType::Xml => String::from("xml"),
|
||||
|
||||
BundleFileType::Unknown(s) => format!("{s:016X}"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn decompiled_ext_name(&self) -> String {
|
||||
match self {
|
||||
BundleFileType::Texture => String::from("dds"),
|
||||
BundleFileType::WwiseBank => String::from("bnk"),
|
||||
BundleFileType::WwiseStream => String::from("ogg"),
|
||||
_ => self.ext_name(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hash(&self) -> Murmur64 {
|
||||
Murmur64::from(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for BundleFileType {
|
||||
type Err = color_eyre::Report;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let val = match s {
|
||||
"animation_curves" => BundleFileType::AnimationCurves,
|
||||
"animation" => BundleFileType::Animation,
|
||||
"apb" => BundleFileType::Apb,
|
||||
"baked_lighting" => BundleFileType::BakedLighting,
|
||||
"bik" => BundleFileType::Bik,
|
||||
"blend_set" => BundleFileType::BlendSet,
|
||||
"bones" => BundleFileType::Bones,
|
||||
"chroma" => BundleFileType::Chroma,
|
||||
"common_package" => BundleFileType::CommonPackage,
|
||||
"config" => BundleFileType::Config,
|
||||
"crypto" => BundleFileType::Crypto,
|
||||
"data" => BundleFileType::Data,
|
||||
"entity" => BundleFileType::Entity,
|
||||
"flow" => BundleFileType::Flow,
|
||||
"font" => BundleFileType::Font,
|
||||
"ies" => BundleFileType::Ies,
|
||||
"ini" => BundleFileType::Ini,
|
||||
"input" => BundleFileType::Input,
|
||||
"ivf" => BundleFileType::Ivf,
|
||||
"keys" => BundleFileType::Keys,
|
||||
"level" => BundleFileType::Level,
|
||||
"lua" => BundleFileType::Lua,
|
||||
"material" => BundleFileType::Material,
|
||||
"mod" => BundleFileType::Mod,
|
||||
"mouse_cursor" => BundleFileType::MouseCursor,
|
||||
"nav_data" => BundleFileType::NavData,
|
||||
"network_config" => BundleFileType::NetworkConfig,
|
||||
"oodle_net" => BundleFileType::OddleNet,
|
||||
"package" => BundleFileType::Package,
|
||||
"particles" => BundleFileType::Particles,
|
||||
"physics_properties" => BundleFileType::PhysicsProperties,
|
||||
"render_config" => BundleFileType::RenderConfig,
|
||||
"rt_pipeline" => BundleFileType::RtPipeline,
|
||||
"scene" => BundleFileType::Scene,
|
||||
"shader_library_group" => BundleFileType::ShaderLibraryGroup,
|
||||
"shader_library" => BundleFileType::ShaderLibrary,
|
||||
"shader" => BundleFileType::Shader,
|
||||
"shading_environment_mapping" => BundleFileType::ShadingEnvionmentMapping,
|
||||
"shading_environment" => BundleFileType::ShadingEnvironment,
|
||||
"slug_album" => BundleFileType::SlugAlbum,
|
||||
"slug" => BundleFileType::Slug,
|
||||
"sound_environment" => BundleFileType::SoundEnvironment,
|
||||
"spu_job" => BundleFileType::SpuJob,
|
||||
"state_machine" => BundleFileType::StateMachine,
|
||||
"static_pvs" => BundleFileType::StaticPVS,
|
||||
"strings" => BundleFileType::Strings,
|
||||
"surface_properties" => BundleFileType::SurfaceProperties,
|
||||
"texture" => BundleFileType::Texture,
|
||||
"timpani_bank" => BundleFileType::TimpaniBank,
|
||||
"timpani_master" => BundleFileType::TimpaniMaster,
|
||||
"tome" => BundleFileType::Tome,
|
||||
"ugg" => BundleFileType::Ugg,
|
||||
"unit" => BundleFileType::Unit,
|
||||
"upb" => BundleFileType::Upb,
|
||||
"vector_field" => BundleFileType::VectorField,
|
||||
"wav" => BundleFileType::Wav,
|
||||
"wwise_bank" => BundleFileType::WwiseBank,
|
||||
"wwise_dep" => BundleFileType::WwiseDep,
|
||||
"wwise_event" => BundleFileType::WwiseEvent,
|
||||
"wwise_metadata" => BundleFileType::WwiseMetadata,
|
||||
"wwise_stream" => BundleFileType::WwiseStream,
|
||||
"xml" => BundleFileType::Xml,
|
||||
s => eyre::bail!("Unknown type string '{}'", s),
|
||||
};
|
||||
|
||||
Ok(val)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for BundleFileType {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let value = self.ext_name();
|
||||
value.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Murmur64> for BundleFileType {
|
||||
fn from(value: Murmur64) -> Self {
|
||||
Self::from(Into::<u64>::into(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<u64> for BundleFileType {
|
||||
fn from(hash: u64) -> BundleFileType {
|
||||
match hash {
|
||||
0x931e336d7646cc26 => BundleFileType::Animation,
|
||||
0xdcfb9e18fff13984 => BundleFileType::AnimationCurves,
|
||||
0x3eed05ba83af5090 => BundleFileType::Apb,
|
||||
0x7ffdb779b04e4ed1 => BundleFileType::BakedLighting,
|
||||
0xaa5965f03029fa18 => BundleFileType::Bik,
|
||||
0xe301e8af94e3b5a3 => BundleFileType::BlendSet,
|
||||
0x18dead01056b72e9 => BundleFileType::Bones,
|
||||
0xb7893adf7567506a => BundleFileType::Chroma,
|
||||
0xfe9754bd19814a47 => BundleFileType::CommonPackage,
|
||||
0x82645835e6b73232 => BundleFileType::Config,
|
||||
0x69108ded1e3e634b => BundleFileType::Crypto,
|
||||
0x8fd0d44d20650b68 => BundleFileType::Data,
|
||||
0x9831ca893b0d087d => BundleFileType::Entity,
|
||||
0x92d3ee038eeb610d => BundleFileType::Flow,
|
||||
0x9efe0a916aae7880 => BundleFileType::Font,
|
||||
0x8f7d5a2c0f967655 => BundleFileType::Ies,
|
||||
0xd526a27da14f1dc5 => BundleFileType::Ini,
|
||||
0x2bbcabe5074ade9e => BundleFileType::Input,
|
||||
0xfa4a8e091a91201e => BundleFileType::Ivf,
|
||||
0xa62f9297dc969e85 => BundleFileType::Keys,
|
||||
0x2a690fd348fe9ac5 => BundleFileType::Level,
|
||||
0xa14e8dfa2cd117e2 => BundleFileType::Lua,
|
||||
0xeac0b497876adedf => BundleFileType::Material,
|
||||
0x3fcdd69156a46417 => BundleFileType::Mod,
|
||||
0xb277b11fe4a61d37 => BundleFileType::MouseCursor,
|
||||
0x169de9566953d264 => BundleFileType::NavData,
|
||||
0x3b1fa9e8f6bac374 => BundleFileType::NetworkConfig,
|
||||
0xb0f2c12eb107f4d8 => BundleFileType::OddleNet,
|
||||
0xad9c6d9ed1e5e77a => BundleFileType::Package,
|
||||
0xa8193123526fad64 => BundleFileType::Particles,
|
||||
0xbf21403a3ab0bbb1 => BundleFileType::PhysicsProperties,
|
||||
0x27862fe24795319c => BundleFileType::RenderConfig,
|
||||
0x9ca183c2d0e76dee => BundleFileType::RtPipeline,
|
||||
0x9d0a795bfe818d19 => BundleFileType::Scene,
|
||||
0xcce8d5b5f5ae333f => BundleFileType::Shader,
|
||||
0xe5ee32a477239a93 => BundleFileType::ShaderLibrary,
|
||||
0x9e5c3cc74575aeb5 => BundleFileType::ShaderLibraryGroup,
|
||||
0x250e0a11ac8e26f8 => BundleFileType::ShadingEnvionmentMapping,
|
||||
0xfe73c7dcff8a7ca5 => BundleFileType::ShadingEnvironment,
|
||||
0xa27b4d04a9ba6f9e => BundleFileType::Slug,
|
||||
0xe9fc9ea7042e5ec0 => BundleFileType::SlugAlbum,
|
||||
0xd8b27864a97ffdd7 => BundleFileType::SoundEnvironment,
|
||||
0xf97af9983c05b950 => BundleFileType::SpuJob,
|
||||
0xa486d4045106165c => BundleFileType::StateMachine,
|
||||
0xe3f0baa17d620321 => BundleFileType::StaticPVS,
|
||||
0x0d972bab10b40fd3 => BundleFileType::Strings,
|
||||
0xad2d3fa30d9ab394 => BundleFileType::SurfaceProperties,
|
||||
0xcd4238c6a0c69e32 => BundleFileType::Texture,
|
||||
0x99736be1fff739a4 => BundleFileType::TimpaniBank,
|
||||
0x00a3e6c59a2b9c6c => BundleFileType::TimpaniMaster,
|
||||
0x19c792357c99f49b => BundleFileType::Tome,
|
||||
0x712d6e3dd1024c9c => BundleFileType::Ugg,
|
||||
0xe0a48d0be9a7453f => BundleFileType::Unit,
|
||||
0xa99510c6e86dd3c2 => BundleFileType::Upb,
|
||||
0xf7505933166d6755 => BundleFileType::VectorField,
|
||||
0x786f65c00a816b19 => BundleFileType::Wav,
|
||||
0x535a7bd3e650d799 => BundleFileType::WwiseBank,
|
||||
0xaf32095c82f2b070 => BundleFileType::WwiseDep,
|
||||
0xaabdd317b58dfc8a => BundleFileType::WwiseEvent,
|
||||
0xd50a8b7e1c82b110 => BundleFileType::WwiseMetadata,
|
||||
0x504b55235d21440e => BundleFileType::WwiseStream,
|
||||
0x76015845a6003765 => BundleFileType::Xml,
|
||||
|
||||
_ => BundleFileType::Unknown(Murmur64::from(hash)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BundleFileType> for u64 {
|
||||
fn from(t: BundleFileType) -> u64 {
|
||||
match t {
|
||||
BundleFileType::Animation => 0x931e336d7646cc26,
|
||||
BundleFileType::AnimationCurves => 0xdcfb9e18fff13984,
|
||||
BundleFileType::Apb => 0x3eed05ba83af5090,
|
||||
BundleFileType::BakedLighting => 0x7ffdb779b04e4ed1,
|
||||
BundleFileType::Bik => 0xaa5965f03029fa18,
|
||||
BundleFileType::BlendSet => 0xe301e8af94e3b5a3,
|
||||
BundleFileType::Bones => 0x18dead01056b72e9,
|
||||
BundleFileType::Chroma => 0xb7893adf7567506a,
|
||||
BundleFileType::CommonPackage => 0xfe9754bd19814a47,
|
||||
BundleFileType::Config => 0x82645835e6b73232,
|
||||
BundleFileType::Crypto => 0x69108ded1e3e634b,
|
||||
BundleFileType::Data => 0x8fd0d44d20650b68,
|
||||
BundleFileType::Entity => 0x9831ca893b0d087d,
|
||||
BundleFileType::Flow => 0x92d3ee038eeb610d,
|
||||
BundleFileType::Font => 0x9efe0a916aae7880,
|
||||
BundleFileType::Ies => 0x8f7d5a2c0f967655,
|
||||
BundleFileType::Ini => 0xd526a27da14f1dc5,
|
||||
BundleFileType::Input => 0x2bbcabe5074ade9e,
|
||||
BundleFileType::Ivf => 0xfa4a8e091a91201e,
|
||||
BundleFileType::Keys => 0xa62f9297dc969e85,
|
||||
BundleFileType::Level => 0x2a690fd348fe9ac5,
|
||||
BundleFileType::Lua => 0xa14e8dfa2cd117e2,
|
||||
BundleFileType::Material => 0xeac0b497876adedf,
|
||||
BundleFileType::Mod => 0x3fcdd69156a46417,
|
||||
BundleFileType::MouseCursor => 0xb277b11fe4a61d37,
|
||||
BundleFileType::NavData => 0x169de9566953d264,
|
||||
BundleFileType::NetworkConfig => 0x3b1fa9e8f6bac374,
|
||||
BundleFileType::OddleNet => 0xb0f2c12eb107f4d8,
|
||||
BundleFileType::Package => 0xad9c6d9ed1e5e77a,
|
||||
BundleFileType::Particles => 0xa8193123526fad64,
|
||||
BundleFileType::PhysicsProperties => 0xbf21403a3ab0bbb1,
|
||||
BundleFileType::RenderConfig => 0x27862fe24795319c,
|
||||
BundleFileType::RtPipeline => 0x9ca183c2d0e76dee,
|
||||
BundleFileType::Scene => 0x9d0a795bfe818d19,
|
||||
BundleFileType::Shader => 0xcce8d5b5f5ae333f,
|
||||
BundleFileType::ShaderLibrary => 0xe5ee32a477239a93,
|
||||
BundleFileType::ShaderLibraryGroup => 0x9e5c3cc74575aeb5,
|
||||
BundleFileType::ShadingEnvionmentMapping => 0x250e0a11ac8e26f8,
|
||||
BundleFileType::ShadingEnvironment => 0xfe73c7dcff8a7ca5,
|
||||
BundleFileType::Slug => 0xa27b4d04a9ba6f9e,
|
||||
BundleFileType::SlugAlbum => 0xe9fc9ea7042e5ec0,
|
||||
BundleFileType::SoundEnvironment => 0xd8b27864a97ffdd7,
|
||||
BundleFileType::SpuJob => 0xf97af9983c05b950,
|
||||
BundleFileType::StateMachine => 0xa486d4045106165c,
|
||||
BundleFileType::StaticPVS => 0xe3f0baa17d620321,
|
||||
BundleFileType::Strings => 0x0d972bab10b40fd3,
|
||||
BundleFileType::SurfaceProperties => 0xad2d3fa30d9ab394,
|
||||
BundleFileType::Texture => 0xcd4238c6a0c69e32,
|
||||
BundleFileType::TimpaniBank => 0x99736be1fff739a4,
|
||||
BundleFileType::TimpaniMaster => 0x00a3e6c59a2b9c6c,
|
||||
BundleFileType::Tome => 0x19c792357c99f49b,
|
||||
BundleFileType::Ugg => 0x712d6e3dd1024c9c,
|
||||
BundleFileType::Unit => 0xe0a48d0be9a7453f,
|
||||
BundleFileType::Upb => 0xa99510c6e86dd3c2,
|
||||
BundleFileType::VectorField => 0xf7505933166d6755,
|
||||
BundleFileType::Wav => 0x786f65c00a816b19,
|
||||
BundleFileType::WwiseBank => 0x535a7bd3e650d799,
|
||||
BundleFileType::WwiseDep => 0xaf32095c82f2b070,
|
||||
BundleFileType::WwiseEvent => 0xaabdd317b58dfc8a,
|
||||
BundleFileType::WwiseMetadata => 0xd50a8b7e1c82b110,
|
||||
BundleFileType::WwiseStream => 0x504b55235d21440e,
|
||||
BundleFileType::Xml => 0x76015845a6003765,
|
||||
|
||||
BundleFileType::Unknown(hash) => hash.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl From<BundleFileType> for Murmur64 {
|
||||
fn from(t: BundleFileType) -> Murmur64 {
|
||||
let hash: u64 = t.into();
|
||||
Murmur64::from(hash)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for BundleFileType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.ext_name())
|
||||
}
|
||||
}
|
||||
use super::filetype::BundleFileType;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct BundleFileHeader {
|
||||
|
@ -515,7 +120,7 @@ pub struct BundleFile {
|
|||
}
|
||||
|
||||
impl BundleFile {
|
||||
pub fn new(name: String, file_type: BundleFileType) -> Self {
|
||||
pub fn new(name: impl Into<IdString64>, file_type: BundleFileType) -> Self {
|
||||
Self {
|
||||
file_type,
|
||||
name: name.into(),
|
||||
|
@ -647,20 +252,15 @@ impl BundleFile {
|
|||
Ok(w.into_inner())
|
||||
}
|
||||
|
||||
#[tracing::instrument(name = "File::from_sjson", skip(sjson))]
|
||||
pub async fn from_sjson<P, S>(
|
||||
name: String,
|
||||
#[tracing::instrument("File::from_sjson", skip(sjson, name), fields(name = %name.display()))]
|
||||
pub async fn from_sjson(
|
||||
name: IdString64,
|
||||
file_type: BundleFileType,
|
||||
sjson: S,
|
||||
root: P,
|
||||
) -> Result<Self>
|
||||
where
|
||||
P: AsRef<Path> + std::fmt::Debug,
|
||||
S: AsRef<str>,
|
||||
{
|
||||
sjson: impl AsRef<str>,
|
||||
root: impl AsRef<Path> + std::fmt::Debug,
|
||||
) -> Result<Self> {
|
||||
match file_type {
|
||||
BundleFileType::Lua => lua::compile(name.clone(), sjson)
|
||||
.wrap_err_with(|| format!("Failed to compile Lua file '{}'", name)),
|
||||
BundleFileType::Lua => lua::compile(name, sjson).wrap_err("Failed to compile Lua file"),
|
||||
BundleFileType::Unknown(_) => {
|
||||
eyre::bail!("Unknown file type. Cannot compile from SJSON");
|
||||
}
|
||||
|
@ -699,10 +299,7 @@ impl BundleFile {
|
|||
s
|
||||
}
|
||||
|
||||
pub fn matches_name<S>(&self, name: S) -> bool
|
||||
where
|
||||
S: Into<IdString64>,
|
||||
{
|
||||
pub fn matches_name(&self, name: impl Into<IdString64>) -> bool {
|
||||
let name = name.into();
|
||||
if self.name == name {
|
||||
return true;
|
||||
|
|
400
lib/sdk/src/bundle/filetype.rs
Normal file
400
lib/sdk/src/bundle/filetype.rs
Normal file
|
@ -0,0 +1,400 @@
|
|||
use color_eyre::{eyre, Result};
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::murmur::Murmur64;
|
||||
|
||||
#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)]
|
||||
pub enum BundleFileType {
|
||||
Animation,
|
||||
AnimationCurves,
|
||||
Apb,
|
||||
BakedLighting,
|
||||
Bik,
|
||||
BlendSet,
|
||||
Bones,
|
||||
Chroma,
|
||||
CommonPackage,
|
||||
Config,
|
||||
Crypto,
|
||||
Data,
|
||||
Entity,
|
||||
Flow,
|
||||
Font,
|
||||
Ies,
|
||||
Ini,
|
||||
Input,
|
||||
Ivf,
|
||||
Keys,
|
||||
Level,
|
||||
Lua,
|
||||
Material,
|
||||
Mod,
|
||||
MouseCursor,
|
||||
NavData,
|
||||
NetworkConfig,
|
||||
OddleNet,
|
||||
Package,
|
||||
Particles,
|
||||
PhysicsProperties,
|
||||
RenderConfig,
|
||||
RtPipeline,
|
||||
Scene,
|
||||
Shader,
|
||||
ShaderLibrary,
|
||||
ShaderLibraryGroup,
|
||||
ShadingEnvionmentMapping,
|
||||
ShadingEnvironment,
|
||||
Slug,
|
||||
SlugAlbum,
|
||||
SoundEnvironment,
|
||||
SpuJob,
|
||||
StateMachine,
|
||||
StaticPVS,
|
||||
Strings,
|
||||
SurfaceProperties,
|
||||
Texture,
|
||||
TimpaniBank,
|
||||
TimpaniMaster,
|
||||
Tome,
|
||||
Ugg,
|
||||
Unit,
|
||||
Upb,
|
||||
VectorField,
|
||||
Wav,
|
||||
WwiseBank,
|
||||
WwiseDep,
|
||||
WwiseEvent,
|
||||
WwiseMetadata,
|
||||
WwiseStream,
|
||||
Xml,
|
||||
|
||||
Unknown(Murmur64),
|
||||
}
|
||||
|
||||
impl BundleFileType {
|
||||
pub fn ext_name(&self) -> String {
|
||||
match self {
|
||||
BundleFileType::AnimationCurves => String::from("animation_curves"),
|
||||
BundleFileType::Animation => String::from("animation"),
|
||||
BundleFileType::Apb => String::from("apb"),
|
||||
BundleFileType::BakedLighting => String::from("baked_lighting"),
|
||||
BundleFileType::Bik => String::from("bik"),
|
||||
BundleFileType::BlendSet => String::from("blend_set"),
|
||||
BundleFileType::Bones => String::from("bones"),
|
||||
BundleFileType::Chroma => String::from("chroma"),
|
||||
BundleFileType::CommonPackage => String::from("common_package"),
|
||||
BundleFileType::Config => String::from("config"),
|
||||
BundleFileType::Crypto => String::from("crypto"),
|
||||
BundleFileType::Data => String::from("data"),
|
||||
BundleFileType::Entity => String::from("entity"),
|
||||
BundleFileType::Flow => String::from("flow"),
|
||||
BundleFileType::Font => String::from("font"),
|
||||
BundleFileType::Ies => String::from("ies"),
|
||||
BundleFileType::Ini => String::from("ini"),
|
||||
BundleFileType::Input => String::from("input"),
|
||||
BundleFileType::Ivf => String::from("ivf"),
|
||||
BundleFileType::Keys => String::from("keys"),
|
||||
BundleFileType::Level => String::from("level"),
|
||||
BundleFileType::Lua => String::from("lua"),
|
||||
BundleFileType::Material => String::from("material"),
|
||||
BundleFileType::Mod => String::from("mod"),
|
||||
BundleFileType::MouseCursor => String::from("mouse_cursor"),
|
||||
BundleFileType::NavData => String::from("nav_data"),
|
||||
BundleFileType::NetworkConfig => String::from("network_config"),
|
||||
BundleFileType::OddleNet => String::from("oodle_net"),
|
||||
BundleFileType::Package => String::from("package"),
|
||||
BundleFileType::Particles => String::from("particles"),
|
||||
BundleFileType::PhysicsProperties => String::from("physics_properties"),
|
||||
BundleFileType::RenderConfig => String::from("render_config"),
|
||||
BundleFileType::RtPipeline => String::from("rt_pipeline"),
|
||||
BundleFileType::Scene => String::from("scene"),
|
||||
BundleFileType::ShaderLibraryGroup => String::from("shader_library_group"),
|
||||
BundleFileType::ShaderLibrary => String::from("shader_library"),
|
||||
BundleFileType::Shader => String::from("shader"),
|
||||
BundleFileType::ShadingEnvionmentMapping => String::from("shading_environment_mapping"),
|
||||
BundleFileType::ShadingEnvironment => String::from("shading_environment"),
|
||||
BundleFileType::SlugAlbum => String::from("slug_album"),
|
||||
BundleFileType::Slug => String::from("slug"),
|
||||
BundleFileType::SoundEnvironment => String::from("sound_environment"),
|
||||
BundleFileType::SpuJob => String::from("spu_job"),
|
||||
BundleFileType::StateMachine => String::from("state_machine"),
|
||||
BundleFileType::StaticPVS => String::from("static_pvs"),
|
||||
BundleFileType::Strings => String::from("strings"),
|
||||
BundleFileType::SurfaceProperties => String::from("surface_properties"),
|
||||
BundleFileType::Texture => String::from("texture"),
|
||||
BundleFileType::TimpaniBank => String::from("timpani_bank"),
|
||||
BundleFileType::TimpaniMaster => String::from("timpani_master"),
|
||||
BundleFileType::Tome => String::from("tome"),
|
||||
BundleFileType::Ugg => String::from("ugg"),
|
||||
BundleFileType::Unit => String::from("unit"),
|
||||
BundleFileType::Upb => String::from("upb"),
|
||||
BundleFileType::VectorField => String::from("vector_field"),
|
||||
BundleFileType::Wav => String::from("wav"),
|
||||
BundleFileType::WwiseBank => String::from("wwise_bank"),
|
||||
BundleFileType::WwiseDep => String::from("wwise_dep"),
|
||||
BundleFileType::WwiseEvent => String::from("wwise_event"),
|
||||
BundleFileType::WwiseMetadata => String::from("wwise_metadata"),
|
||||
BundleFileType::WwiseStream => String::from("wwise_stream"),
|
||||
BundleFileType::Xml => String::from("xml"),
|
||||
|
||||
BundleFileType::Unknown(s) => format!("{s:016X}"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn decompiled_ext_name(&self) -> String {
|
||||
match self {
|
||||
BundleFileType::Texture => String::from("dds"),
|
||||
BundleFileType::WwiseBank => String::from("bnk"),
|
||||
BundleFileType::WwiseStream => String::from("ogg"),
|
||||
_ => self.ext_name(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hash(&self) -> Murmur64 {
|
||||
Murmur64::from(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for BundleFileType {
|
||||
type Err = color_eyre::Report;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let val = match s {
|
||||
"animation_curves" => BundleFileType::AnimationCurves,
|
||||
"animation" => BundleFileType::Animation,
|
||||
"apb" => BundleFileType::Apb,
|
||||
"baked_lighting" => BundleFileType::BakedLighting,
|
||||
"bik" => BundleFileType::Bik,
|
||||
"blend_set" => BundleFileType::BlendSet,
|
||||
"bones" => BundleFileType::Bones,
|
||||
"chroma" => BundleFileType::Chroma,
|
||||
"common_package" => BundleFileType::CommonPackage,
|
||||
"config" => BundleFileType::Config,
|
||||
"crypto" => BundleFileType::Crypto,
|
||||
"data" => BundleFileType::Data,
|
||||
"entity" => BundleFileType::Entity,
|
||||
"flow" => BundleFileType::Flow,
|
||||
"font" => BundleFileType::Font,
|
||||
"ies" => BundleFileType::Ies,
|
||||
"ini" => BundleFileType::Ini,
|
||||
"input" => BundleFileType::Input,
|
||||
"ivf" => BundleFileType::Ivf,
|
||||
"keys" => BundleFileType::Keys,
|
||||
"level" => BundleFileType::Level,
|
||||
"lua" => BundleFileType::Lua,
|
||||
"material" => BundleFileType::Material,
|
||||
"mod" => BundleFileType::Mod,
|
||||
"mouse_cursor" => BundleFileType::MouseCursor,
|
||||
"nav_data" => BundleFileType::NavData,
|
||||
"network_config" => BundleFileType::NetworkConfig,
|
||||
"oodle_net" => BundleFileType::OddleNet,
|
||||
"package" => BundleFileType::Package,
|
||||
"particles" => BundleFileType::Particles,
|
||||
"physics_properties" => BundleFileType::PhysicsProperties,
|
||||
"render_config" => BundleFileType::RenderConfig,
|
||||
"rt_pipeline" => BundleFileType::RtPipeline,
|
||||
"scene" => BundleFileType::Scene,
|
||||
"shader_library_group" => BundleFileType::ShaderLibraryGroup,
|
||||
"shader_library" => BundleFileType::ShaderLibrary,
|
||||
"shader" => BundleFileType::Shader,
|
||||
"shading_environment_mapping" => BundleFileType::ShadingEnvionmentMapping,
|
||||
"shading_environment" => BundleFileType::ShadingEnvironment,
|
||||
"slug_album" => BundleFileType::SlugAlbum,
|
||||
"slug" => BundleFileType::Slug,
|
||||
"sound_environment" => BundleFileType::SoundEnvironment,
|
||||
"spu_job" => BundleFileType::SpuJob,
|
||||
"state_machine" => BundleFileType::StateMachine,
|
||||
"static_pvs" => BundleFileType::StaticPVS,
|
||||
"strings" => BundleFileType::Strings,
|
||||
"surface_properties" => BundleFileType::SurfaceProperties,
|
||||
"texture" => BundleFileType::Texture,
|
||||
"timpani_bank" => BundleFileType::TimpaniBank,
|
||||
"timpani_master" => BundleFileType::TimpaniMaster,
|
||||
"tome" => BundleFileType::Tome,
|
||||
"ugg" => BundleFileType::Ugg,
|
||||
"unit" => BundleFileType::Unit,
|
||||
"upb" => BundleFileType::Upb,
|
||||
"vector_field" => BundleFileType::VectorField,
|
||||
"wav" => BundleFileType::Wav,
|
||||
"wwise_bank" => BundleFileType::WwiseBank,
|
||||
"wwise_dep" => BundleFileType::WwiseDep,
|
||||
"wwise_event" => BundleFileType::WwiseEvent,
|
||||
"wwise_metadata" => BundleFileType::WwiseMetadata,
|
||||
"wwise_stream" => BundleFileType::WwiseStream,
|
||||
"xml" => BundleFileType::Xml,
|
||||
s => eyre::bail!("Unknown type string '{}'", s),
|
||||
};
|
||||
|
||||
Ok(val)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for BundleFileType {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let value = self.ext_name();
|
||||
value.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Murmur64> for BundleFileType {
|
||||
fn from(value: Murmur64) -> Self {
|
||||
Self::from(Into::<u64>::into(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<u64> for BundleFileType {
|
||||
fn from(hash: u64) -> BundleFileType {
|
||||
match hash {
|
||||
0x931e336d7646cc26 => BundleFileType::Animation,
|
||||
0xdcfb9e18fff13984 => BundleFileType::AnimationCurves,
|
||||
0x3eed05ba83af5090 => BundleFileType::Apb,
|
||||
0x7ffdb779b04e4ed1 => BundleFileType::BakedLighting,
|
||||
0xaa5965f03029fa18 => BundleFileType::Bik,
|
||||
0xe301e8af94e3b5a3 => BundleFileType::BlendSet,
|
||||
0x18dead01056b72e9 => BundleFileType::Bones,
|
||||
0xb7893adf7567506a => BundleFileType::Chroma,
|
||||
0xfe9754bd19814a47 => BundleFileType::CommonPackage,
|
||||
0x82645835e6b73232 => BundleFileType::Config,
|
||||
0x69108ded1e3e634b => BundleFileType::Crypto,
|
||||
0x8fd0d44d20650b68 => BundleFileType::Data,
|
||||
0x9831ca893b0d087d => BundleFileType::Entity,
|
||||
0x92d3ee038eeb610d => BundleFileType::Flow,
|
||||
0x9efe0a916aae7880 => BundleFileType::Font,
|
||||
0x8f7d5a2c0f967655 => BundleFileType::Ies,
|
||||
0xd526a27da14f1dc5 => BundleFileType::Ini,
|
||||
0x2bbcabe5074ade9e => BundleFileType::Input,
|
||||
0xfa4a8e091a91201e => BundleFileType::Ivf,
|
||||
0xa62f9297dc969e85 => BundleFileType::Keys,
|
||||
0x2a690fd348fe9ac5 => BundleFileType::Level,
|
||||
0xa14e8dfa2cd117e2 => BundleFileType::Lua,
|
||||
0xeac0b497876adedf => BundleFileType::Material,
|
||||
0x3fcdd69156a46417 => BundleFileType::Mod,
|
||||
0xb277b11fe4a61d37 => BundleFileType::MouseCursor,
|
||||
0x169de9566953d264 => BundleFileType::NavData,
|
||||
0x3b1fa9e8f6bac374 => BundleFileType::NetworkConfig,
|
||||
0xb0f2c12eb107f4d8 => BundleFileType::OddleNet,
|
||||
0xad9c6d9ed1e5e77a => BundleFileType::Package,
|
||||
0xa8193123526fad64 => BundleFileType::Particles,
|
||||
0xbf21403a3ab0bbb1 => BundleFileType::PhysicsProperties,
|
||||
0x27862fe24795319c => BundleFileType::RenderConfig,
|
||||
0x9ca183c2d0e76dee => BundleFileType::RtPipeline,
|
||||
0x9d0a795bfe818d19 => BundleFileType::Scene,
|
||||
0xcce8d5b5f5ae333f => BundleFileType::Shader,
|
||||
0xe5ee32a477239a93 => BundleFileType::ShaderLibrary,
|
||||
0x9e5c3cc74575aeb5 => BundleFileType::ShaderLibraryGroup,
|
||||
0x250e0a11ac8e26f8 => BundleFileType::ShadingEnvionmentMapping,
|
||||
0xfe73c7dcff8a7ca5 => BundleFileType::ShadingEnvironment,
|
||||
0xa27b4d04a9ba6f9e => BundleFileType::Slug,
|
||||
0xe9fc9ea7042e5ec0 => BundleFileType::SlugAlbum,
|
||||
0xd8b27864a97ffdd7 => BundleFileType::SoundEnvironment,
|
||||
0xf97af9983c05b950 => BundleFileType::SpuJob,
|
||||
0xa486d4045106165c => BundleFileType::StateMachine,
|
||||
0xe3f0baa17d620321 => BundleFileType::StaticPVS,
|
||||
0x0d972bab10b40fd3 => BundleFileType::Strings,
|
||||
0xad2d3fa30d9ab394 => BundleFileType::SurfaceProperties,
|
||||
0xcd4238c6a0c69e32 => BundleFileType::Texture,
|
||||
0x99736be1fff739a4 => BundleFileType::TimpaniBank,
|
||||
0x00a3e6c59a2b9c6c => BundleFileType::TimpaniMaster,
|
||||
0x19c792357c99f49b => BundleFileType::Tome,
|
||||
0x712d6e3dd1024c9c => BundleFileType::Ugg,
|
||||
0xe0a48d0be9a7453f => BundleFileType::Unit,
|
||||
0xa99510c6e86dd3c2 => BundleFileType::Upb,
|
||||
0xf7505933166d6755 => BundleFileType::VectorField,
|
||||
0x786f65c00a816b19 => BundleFileType::Wav,
|
||||
0x535a7bd3e650d799 => BundleFileType::WwiseBank,
|
||||
0xaf32095c82f2b070 => BundleFileType::WwiseDep,
|
||||
0xaabdd317b58dfc8a => BundleFileType::WwiseEvent,
|
||||
0xd50a8b7e1c82b110 => BundleFileType::WwiseMetadata,
|
||||
0x504b55235d21440e => BundleFileType::WwiseStream,
|
||||
0x76015845a6003765 => BundleFileType::Xml,
|
||||
|
||||
_ => BundleFileType::Unknown(Murmur64::from(hash)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BundleFileType> for u64 {
|
||||
fn from(t: BundleFileType) -> u64 {
|
||||
match t {
|
||||
BundleFileType::Animation => 0x931e336d7646cc26,
|
||||
BundleFileType::AnimationCurves => 0xdcfb9e18fff13984,
|
||||
BundleFileType::Apb => 0x3eed05ba83af5090,
|
||||
BundleFileType::BakedLighting => 0x7ffdb779b04e4ed1,
|
||||
BundleFileType::Bik => 0xaa5965f03029fa18,
|
||||
BundleFileType::BlendSet => 0xe301e8af94e3b5a3,
|
||||
BundleFileType::Bones => 0x18dead01056b72e9,
|
||||
BundleFileType::Chroma => 0xb7893adf7567506a,
|
||||
BundleFileType::CommonPackage => 0xfe9754bd19814a47,
|
||||
BundleFileType::Config => 0x82645835e6b73232,
|
||||
BundleFileType::Crypto => 0x69108ded1e3e634b,
|
||||
BundleFileType::Data => 0x8fd0d44d20650b68,
|
||||
BundleFileType::Entity => 0x9831ca893b0d087d,
|
||||
BundleFileType::Flow => 0x92d3ee038eeb610d,
|
||||
BundleFileType::Font => 0x9efe0a916aae7880,
|
||||
BundleFileType::Ies => 0x8f7d5a2c0f967655,
|
||||
BundleFileType::Ini => 0xd526a27da14f1dc5,
|
||||
BundleFileType::Input => 0x2bbcabe5074ade9e,
|
||||
BundleFileType::Ivf => 0xfa4a8e091a91201e,
|
||||
BundleFileType::Keys => 0xa62f9297dc969e85,
|
||||
BundleFileType::Level => 0x2a690fd348fe9ac5,
|
||||
BundleFileType::Lua => 0xa14e8dfa2cd117e2,
|
||||
BundleFileType::Material => 0xeac0b497876adedf,
|
||||
BundleFileType::Mod => 0x3fcdd69156a46417,
|
||||
BundleFileType::MouseCursor => 0xb277b11fe4a61d37,
|
||||
BundleFileType::NavData => 0x169de9566953d264,
|
||||
BundleFileType::NetworkConfig => 0x3b1fa9e8f6bac374,
|
||||
BundleFileType::OddleNet => 0xb0f2c12eb107f4d8,
|
||||
BundleFileType::Package => 0xad9c6d9ed1e5e77a,
|
||||
BundleFileType::Particles => 0xa8193123526fad64,
|
||||
BundleFileType::PhysicsProperties => 0xbf21403a3ab0bbb1,
|
||||
BundleFileType::RenderConfig => 0x27862fe24795319c,
|
||||
BundleFileType::RtPipeline => 0x9ca183c2d0e76dee,
|
||||
BundleFileType::Scene => 0x9d0a795bfe818d19,
|
||||
BundleFileType::Shader => 0xcce8d5b5f5ae333f,
|
||||
BundleFileType::ShaderLibrary => 0xe5ee32a477239a93,
|
||||
BundleFileType::ShaderLibraryGroup => 0x9e5c3cc74575aeb5,
|
||||
BundleFileType::ShadingEnvionmentMapping => 0x250e0a11ac8e26f8,
|
||||
BundleFileType::ShadingEnvironment => 0xfe73c7dcff8a7ca5,
|
||||
BundleFileType::Slug => 0xa27b4d04a9ba6f9e,
|
||||
BundleFileType::SlugAlbum => 0xe9fc9ea7042e5ec0,
|
||||
BundleFileType::SoundEnvironment => 0xd8b27864a97ffdd7,
|
||||
BundleFileType::SpuJob => 0xf97af9983c05b950,
|
||||
BundleFileType::StateMachine => 0xa486d4045106165c,
|
||||
BundleFileType::StaticPVS => 0xe3f0baa17d620321,
|
||||
BundleFileType::Strings => 0x0d972bab10b40fd3,
|
||||
BundleFileType::SurfaceProperties => 0xad2d3fa30d9ab394,
|
||||
BundleFileType::Texture => 0xcd4238c6a0c69e32,
|
||||
BundleFileType::TimpaniBank => 0x99736be1fff739a4,
|
||||
BundleFileType::TimpaniMaster => 0x00a3e6c59a2b9c6c,
|
||||
BundleFileType::Tome => 0x19c792357c99f49b,
|
||||
BundleFileType::Ugg => 0x712d6e3dd1024c9c,
|
||||
BundleFileType::Unit => 0xe0a48d0be9a7453f,
|
||||
BundleFileType::Upb => 0xa99510c6e86dd3c2,
|
||||
BundleFileType::VectorField => 0xf7505933166d6755,
|
||||
BundleFileType::Wav => 0x786f65c00a816b19,
|
||||
BundleFileType::WwiseBank => 0x535a7bd3e650d799,
|
||||
BundleFileType::WwiseDep => 0xaf32095c82f2b070,
|
||||
BundleFileType::WwiseEvent => 0xaabdd317b58dfc8a,
|
||||
BundleFileType::WwiseMetadata => 0xd50a8b7e1c82b110,
|
||||
BundleFileType::WwiseStream => 0x504b55235d21440e,
|
||||
BundleFileType::Xml => 0x76015845a6003765,
|
||||
|
||||
BundleFileType::Unknown(hash) => hash.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl From<BundleFileType> for Murmur64 {
|
||||
fn from(t: BundleFileType) -> Murmur64 {
|
||||
let hash: u64 = t.into();
|
||||
Murmur64::from(hash)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for BundleFileType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.ext_name())
|
||||
}
|
||||
}
|
|
@ -12,8 +12,10 @@ use crate::murmur::{HashGroup, IdString64, Murmur64};
|
|||
|
||||
pub(crate) mod database;
|
||||
pub(crate) mod file;
|
||||
pub(crate) mod filetype;
|
||||
|
||||
pub use file::{BundleFile, BundleFileType, BundleFileVariant};
|
||||
pub use file::{BundleFile, BundleFileVariant};
|
||||
pub use filetype::BundleFileType;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd)]
|
||||
enum BundleFormat {
|
||||
|
|
|
@ -15,6 +15,7 @@ use tokio::fs;
|
|||
use crate::binary::sync::ReadExt;
|
||||
use crate::binary::sync::WriteExt;
|
||||
use crate::bundle::file::{BundleFileVariant, UserFile};
|
||||
use crate::murmur::IdString64;
|
||||
use crate::{BundleFile, BundleFileType};
|
||||
|
||||
const BITSQUID_LUAJIT_HEADER: u32 = 0x8253461B;
|
||||
|
@ -117,17 +118,13 @@ where
|
|||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub fn compile<S, C>(name: S, code: C) -> Result<BundleFile>
|
||||
where
|
||||
S: Into<String>,
|
||||
C: AsRef<str>,
|
||||
{
|
||||
pub fn compile(name: impl Into<IdString64>, code: impl AsRef<str>) -> Result<BundleFile> {
|
||||
let name = name.into();
|
||||
let code = code.as_ref();
|
||||
|
||||
tracing::trace!(
|
||||
"Compiling '{}', {} bytes of code",
|
||||
name,
|
||||
name.display(),
|
||||
code.as_bytes().len()
|
||||
);
|
||||
|
||||
|
@ -135,8 +132,8 @@ where
|
|||
let state = lua::luaL_newstate();
|
||||
lua::luaL_openlibs(state);
|
||||
|
||||
let name = CString::new(format!("@{name}").into_bytes())
|
||||
.wrap_err_with(|| format!("Cannot convert name into CString: {}", name))?;
|
||||
let name = CString::new(format!("@{}", name.display()).into_bytes())
|
||||
.wrap_err_with(|| format!("Cannot convert name into CString: {}", name.display()))?;
|
||||
match lua::luaL_loadbuffer(
|
||||
state,
|
||||
code.as_ptr() as _,
|
||||
|
|
|
@ -7,13 +7,22 @@ use std::str::FromStr;
|
|||
use async_recursion::async_recursion;
|
||||
use color_eyre::eyre::{self, Context};
|
||||
use color_eyre::Result;
|
||||
use path_slash::PathBufExt;
|
||||
use tokio::fs;
|
||||
|
||||
use crate::binary::sync::{ReadExt, WriteExt};
|
||||
use crate::bundle::file::{BundleFileType, UserFile};
|
||||
use crate::murmur::{HashGroup, Murmur64};
|
||||
use crate::bundle::file::UserFile;
|
||||
use crate::bundle::filetype::BundleFileType;
|
||||
use crate::murmur::{HashGroup, IdString64, Murmur64};
|
||||
|
||||
/// Resolves a relative path that might contain wildcards into a list of
|
||||
/// paths that exist on disk and match that wildcard.
|
||||
/// This is similar to globbing in Unix shells, but with much less features.
|
||||
///
|
||||
/// The only wilcard character allowed is `*`, and only at the end of the string,
|
||||
/// where it matches all files recursively in that directory.
|
||||
///
|
||||
/// `t` is an optional extension name, that may be used to force a wildcard
|
||||
/// path to only match that file type `t`.
|
||||
#[tracing::instrument]
|
||||
#[async_recursion]
|
||||
async fn resolve_wildcard<P1, P2>(
|
||||
|
@ -90,12 +99,12 @@ where
|
|||
Ok(paths)
|
||||
}
|
||||
|
||||
type PackageType = HashMap<BundleFileType, HashSet<PathBuf>>;
|
||||
type PackageType = HashMap<BundleFileType, HashSet<String>>;
|
||||
type PackageDefinition = HashMap<String, HashSet<String>>;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Package {
|
||||
_name: String,
|
||||
_name: IdString64,
|
||||
_root: PathBuf,
|
||||
inner: PackageType,
|
||||
flags: u8,
|
||||
|
@ -116,9 +125,9 @@ impl DerefMut for Package {
|
|||
}
|
||||
|
||||
impl Package {
|
||||
pub fn new(name: String, root: PathBuf) -> Self {
|
||||
pub fn new(name: impl Into<IdString64>, root: PathBuf) -> Self {
|
||||
Self {
|
||||
_name: name,
|
||||
_name: name.into(),
|
||||
_root: root,
|
||||
inner: Default::default(),
|
||||
flags: 1,
|
||||
|
@ -129,17 +138,22 @@ impl Package {
|
|||
self.values().fold(0, |total, files| total + files.len())
|
||||
}
|
||||
|
||||
pub fn add_file<P: Into<PathBuf>>(&mut self, file_type: BundleFileType, name: P) {
|
||||
pub fn add_file(&mut self, file_type: BundleFileType, name: impl Into<String>) {
|
||||
self.inner.entry(file_type).or_default().insert(name.into());
|
||||
}
|
||||
|
||||
#[tracing::instrument("Package::from_sjson", skip(sjson), fields(sjson_len = sjson.as_ref().len()))]
|
||||
pub async fn from_sjson<P, S>(sjson: S, name: String, root: P) -> Result<Self>
|
||||
pub async fn from_sjson<P, S>(
|
||||
sjson: S,
|
||||
name: impl Into<IdString64> + std::fmt::Debug,
|
||||
root: P,
|
||||
) -> Result<Self>
|
||||
where
|
||||
P: AsRef<Path> + std::fmt::Debug,
|
||||
S: AsRef<str>,
|
||||
{
|
||||
let root = root.as_ref();
|
||||
let name = name.into();
|
||||
let definition: PackageDefinition = serde_sjson::from_str(sjson.as_ref())?;
|
||||
let mut inner: PackageType = Default::default();
|
||||
|
||||
|
@ -173,7 +187,11 @@ impl Package {
|
|||
continue;
|
||||
};
|
||||
|
||||
inner.entry(t).or_default().insert(path);
|
||||
tracing::debug!("Adding file {}", path.display());
|
||||
inner
|
||||
.entry(t)
|
||||
.or_default()
|
||||
.insert(path.display().to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -192,11 +210,9 @@ impl Package {
|
|||
pub fn to_sjson(&self) -> Result<String> {
|
||||
let mut map: PackageDefinition = Default::default();
|
||||
|
||||
for (t, paths) in self.iter() {
|
||||
for path in paths.iter() {
|
||||
map.entry(t.ext_name())
|
||||
.or_default()
|
||||
.insert(path.display().to_string());
|
||||
for (t, names) in self.iter() {
|
||||
for name in names.iter() {
|
||||
map.entry(t.ext_name()).or_default().insert(name.clone());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -222,11 +238,11 @@ impl Package {
|
|||
for _ in 0..file_count {
|
||||
let t = BundleFileType::from(r.read_u64()?);
|
||||
let hash = Murmur64::from(r.read_u64()?);
|
||||
let path = ctx.lookup_hash(hash, HashGroup::Filename);
|
||||
let name = ctx.lookup_hash(hash, HashGroup::Filename);
|
||||
inner
|
||||
.entry(t)
|
||||
.or_default()
|
||||
.insert(PathBuf::from(path.display().to_string()));
|
||||
.insert(name.display().to_string());
|
||||
}
|
||||
|
||||
let flags = r.read_u8()?;
|
||||
|
@ -239,7 +255,7 @@ impl Package {
|
|||
|
||||
let pkg = Self {
|
||||
inner,
|
||||
_name: name,
|
||||
_name: name.into(),
|
||||
_root: PathBuf::new(),
|
||||
flags,
|
||||
};
|
||||
|
@ -255,12 +271,10 @@ impl Package {
|
|||
w.write_u32(0x2b)?;
|
||||
w.write_u32(self.values().flatten().count() as u32)?;
|
||||
|
||||
for (t, paths) in self.iter() {
|
||||
for path in paths.iter() {
|
||||
for (t, names) in self.iter() {
|
||||
for name in names.iter() {
|
||||
w.write_u64(t.hash().into())?;
|
||||
|
||||
let hash = Murmur64::hash(path.to_slash_lossy().as_bytes());
|
||||
w.write_u64(hash.into())?;
|
||||
w.write_u64(Murmur64::hash(name.as_bytes()).into())?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -280,17 +294,11 @@ where
|
|||
Ok(vec![UserFile::new(s.into_bytes())])
|
||||
}
|
||||
|
||||
// #[tracing::instrument(skip_all)]
|
||||
// pub fn compile(_ctx: &crate::Context, data: String) -> Result<Vec<u8>> {
|
||||
// let pkg = Package::from_sjson(data)?;
|
||||
// pkg.to_binary()
|
||||
// }
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::BundleFileType;
|
||||
use crate::bundle::filetype::BundleFileType;
|
||||
|
||||
use super::resolve_wildcard;
|
||||
use super::Package;
|
||||
|
|
|
@ -147,14 +147,14 @@ impl Dictionary {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn add(&mut self, value: String, group: HashGroup) {
|
||||
let long = Murmur64::from(murmurhash64::hash(value.as_bytes(), SEED as u64));
|
||||
let short = Murmur32::from(murmurhash64::hash32(value.as_bytes(), SEED));
|
||||
pub fn add(&mut self, value: impl AsRef<[u8]>, group: HashGroup) {
|
||||
let long = Murmur64::from(murmurhash64::hash(value.as_ref(), SEED as u64));
|
||||
let short = Murmur32::from(murmurhash64::hash32(value.as_ref(), SEED));
|
||||
|
||||
let entry = Entry {
|
||||
long,
|
||||
short,
|
||||
value,
|
||||
value: String::from_utf8_lossy(value.as_ref()).to_string(),
|
||||
group,
|
||||
};
|
||||
|
||||
|
|
162
lib/sdk/src/murmur/idstring32.rs
Normal file
162
lib/sdk/src/murmur/idstring32.rs
Normal file
|
@ -0,0 +1,162 @@
|
|||
use std::fmt;
|
||||
|
||||
use serde::{Deserializer, Serializer};
|
||||
|
||||
use super::Murmur32;
|
||||
|
||||
// This type encodes the fact that when reading in a bundle, we don't always have a dictionary
|
||||
// entry for every hash in there. So we do want to have the real string available when needed,
|
||||
// but at the same time retain the original hash information for when we don't.
|
||||
// This is especially important when wanting to write back the read bundle, as the hashes need to
|
||||
// stay the same.
|
||||
// The previous system of always turning hashes into strings worked well for the purpose of
|
||||
// displaying hashes, but would have made it very hard to turn a stringyfied hash back into
|
||||
// an actual hash.
|
||||
#[derive(Clone, Debug, Eq)]
|
||||
pub enum IdString32 {
|
||||
Hash(Murmur32),
|
||||
String(String),
|
||||
}
|
||||
|
||||
impl IdString32 {
|
||||
pub fn to_murmur32(&self) -> Murmur32 {
|
||||
match self {
|
||||
Self::Hash(hash) => *hash,
|
||||
Self::String(s) => Murmur32::hash(s.as_bytes()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn display(&self) -> IdString32Display {
|
||||
let s = match self {
|
||||
IdString32::Hash(hash) => hash.to_string(),
|
||||
IdString32::String(s) => s.clone(),
|
||||
};
|
||||
|
||||
IdString32Display(s)
|
||||
}
|
||||
|
||||
pub fn is_string(&self) -> bool {
|
||||
match self {
|
||||
IdString32::Hash(_) => false,
|
||||
IdString32::String(_) => true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_hash(&self) -> bool {
|
||||
match self {
|
||||
IdString32::Hash(_) => true,
|
||||
IdString32::String(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for IdString32 {
|
||||
fn from(value: String) -> Self {
|
||||
Self::String(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<u32> for IdString32 {
|
||||
fn from(value: u32) -> Self {
|
||||
Self::Hash(value.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IdString32> for u32 {
|
||||
fn from(value: IdString32) -> Self {
|
||||
value.to_murmur32().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Murmur32> for IdString32 {
|
||||
fn from(value: Murmur32) -> Self {
|
||||
Self::Hash(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IdString32> for Murmur32 {
|
||||
fn from(value: IdString32) -> Self {
|
||||
value.to_murmur32()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for IdString32 {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.to_murmur32() == other.to_murmur32()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::hash::Hash for IdString32 {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
state.write_u32(self.to_murmur32().into());
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::Serialize for IdString32 {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_u32(self.to_murmur32().into())
|
||||
}
|
||||
}
|
||||
|
||||
struct IdString32Visitor;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for IdString32Visitor {
|
||||
type Value = IdString32;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str("an u32 or a string")
|
||||
}
|
||||
|
||||
fn visit_u32<E>(self, value: u32) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(IdString32::Hash(value.into()))
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(IdString32::String(v.to_string()))
|
||||
}
|
||||
|
||||
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(IdString32::String(v))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for IdString32 {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
deserializer.deserialize_u32(IdString32Visitor)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct IdString32Display(String);
|
||||
|
||||
impl std::fmt::Display for IdString32Display {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::UpperHex for IdString32 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
std::fmt::UpperHex::fmt(&self.to_murmur32(), f)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::LowerHex for IdString32 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
std::fmt::LowerHex::fmt(&self.to_murmur32(), f)
|
||||
}
|
||||
}
|
175
lib/sdk/src/murmur/idstring64.rs
Normal file
175
lib/sdk/src/murmur/idstring64.rs
Normal file
|
@ -0,0 +1,175 @@
|
|||
use std::{fmt, path::Path};
|
||||
|
||||
use path_slash::PathExt as _;
|
||||
use serde::{Deserializer, Serializer};
|
||||
|
||||
use super::Murmur64;
|
||||
|
||||
// This type encodes the fact that when reading in a bundle, we don't always have a dictionary
|
||||
// entry for every hash in there. So we do want to have the real string available when needed,
|
||||
// but at the same time retain the original hash information for when we don't.
|
||||
// This is especially important when wanting to write back the read bundle, as the hashes need to
|
||||
// stay the same.
|
||||
// The previous system of always turning hashes into strings worked well for the purpose of
|
||||
// displaying hashes, but would have made it very hard to turn a stringyfied hash back into
|
||||
// an actual hash.
|
||||
#[derive(Clone, Debug, Eq)]
|
||||
pub enum IdString64 {
|
||||
Hash(Murmur64),
|
||||
String(String),
|
||||
}
|
||||
|
||||
impl IdString64 {
|
||||
pub fn to_murmur64(&self) -> Murmur64 {
|
||||
match self {
|
||||
Self::Hash(hash) => *hash,
|
||||
Self::String(s) => Murmur64::hash(s.as_bytes()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn display(&self) -> IdString64Display {
|
||||
let s = match self {
|
||||
IdString64::Hash(hash) => hash.to_string(),
|
||||
IdString64::String(s) => s.clone(),
|
||||
};
|
||||
|
||||
IdString64Display(s)
|
||||
}
|
||||
|
||||
pub fn is_string(&self) -> bool {
|
||||
match self {
|
||||
IdString64::Hash(_) => false,
|
||||
IdString64::String(_) => true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_hash(&self) -> bool {
|
||||
match self {
|
||||
IdString64::Hash(_) => true,
|
||||
IdString64::String(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
// Would love to have this as a proper `impl From`, but
|
||||
// rustc will complain that it overlaps with the `impl From<Into<String>>`.
|
||||
pub fn from_path(p: impl AsRef<Path>) -> Self {
|
||||
Self::String(p.as_ref().to_slash_lossy().to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for IdString64 {
|
||||
fn from(value: String) -> Self {
|
||||
Self::String(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<u64> for IdString64 {
|
||||
fn from(value: u64) -> Self {
|
||||
Self::Hash(value.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Murmur64> for IdString64 {
|
||||
fn from(value: Murmur64) -> Self {
|
||||
Self::Hash(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IdString64> for Murmur64 {
|
||||
fn from(value: IdString64) -> Self {
|
||||
value.to_murmur64()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IdString64> for u64 {
|
||||
fn from(value: IdString64) -> Self {
|
||||
value.to_murmur64().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for IdString64 {
|
||||
fn default() -> Self {
|
||||
Self::Hash(0.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for IdString64 {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.to_murmur64() == other.to_murmur64()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::hash::Hash for IdString64 {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
state.write_u64(self.to_murmur64().into());
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::Serialize for IdString64 {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_u64(self.to_murmur64().into())
|
||||
}
|
||||
}
|
||||
|
||||
struct IdString64Visitor;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for IdString64Visitor {
|
||||
type Value = IdString64;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str("an u64 or a string")
|
||||
}
|
||||
|
||||
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(IdString64::Hash(value.into()))
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(IdString64::String(v.to_string()))
|
||||
}
|
||||
|
||||
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(IdString64::String(v))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for IdString64 {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
deserializer.deserialize_u64(IdString64Visitor)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct IdString64Display(String);
|
||||
|
||||
impl std::fmt::Display for IdString64Display {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::UpperHex for IdString64 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
std::fmt::UpperHex::fmt(&self.to_murmur64(), f)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::LowerHex for IdString64 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
std::fmt::LowerHex::fmt(&self.to_murmur64(), f)
|
||||
}
|
||||
}
|
|
@ -8,6 +8,8 @@ use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
|||
mod dictionary;
|
||||
// Currently unused
|
||||
// mod murmurhash32;
|
||||
mod idstring32;
|
||||
mod idstring64;
|
||||
mod murmurhash64;
|
||||
mod types;
|
||||
mod util;
|
||||
|
@ -15,6 +17,8 @@ mod util;
|
|||
pub const SEED: u32 = 0;
|
||||
|
||||
pub use dictionary::{Dictionary, Entry, HashGroup};
|
||||
pub use idstring32::*;
|
||||
pub use idstring64::*;
|
||||
pub use murmurhash64::hash;
|
||||
pub use murmurhash64::hash32;
|
||||
pub use murmurhash64::hash_inverse as inverse;
|
||||
|
|
|
@ -119,4 +119,9 @@ fn test_hash() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn test_inverse() {}
|
||||
fn test_inverse() {
|
||||
let h = hash("lua".as_bytes(), crate::murmur::SEED as u64);
|
||||
let inv = hash_inverse(h, crate::murmur::SEED as u64);
|
||||
assert_eq!(h, hash(&inv.to_le_bytes(), crate::murmur::SEED as u64));
|
||||
assert_ne!(h, hash(&inv.to_be_bytes(), crate::murmur::SEED as u64));
|
||||
}
|
||||
|
|
|
@ -150,6 +150,12 @@ impl fmt::UpperHex for Murmur32 {
|
|||
}
|
||||
}
|
||||
|
||||
impl fmt::LowerHex for Murmur32 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::LowerHex::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Murmur32 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:08X}", self)
|
||||
|
@ -218,148 +224,3 @@ impl<'de> Deserialize<'de> for Murmur32 {
|
|||
deserializer.deserialize_any(Self(0))
|
||||
}
|
||||
}
|
||||
|
||||
// This type encodes the fact that when reading in a bundle, we don't always have a dictionary
|
||||
// entry for every hash in there. So we do want to have the real string available when needed,
|
||||
// but at the same time retain the original hash information for when we don't.
|
||||
// This is especially important when wanting to write back the read bundle, as the hashes need to
|
||||
// stay the same.
|
||||
// The previous system of always turning hashes into strings worked well for the purpose of
|
||||
// displaying hashes, but would have made it very hard to turn a stringyfied hash back into
|
||||
// an actual hash.
|
||||
#[derive(Clone, Debug, Eq)]
|
||||
pub enum IdString64 {
|
||||
Hash(Murmur64),
|
||||
String(String),
|
||||
}
|
||||
|
||||
impl IdString64 {
|
||||
pub fn to_murmur64(&self) -> Murmur64 {
|
||||
match self {
|
||||
Self::Hash(hash) => *hash,
|
||||
Self::String(s) => Murmur64::hash(s.as_bytes()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn display(&self) -> IdString64Display {
|
||||
let s = match self {
|
||||
IdString64::Hash(hash) => hash.to_string(),
|
||||
IdString64::String(s) => s.clone(),
|
||||
};
|
||||
|
||||
IdString64Display(s)
|
||||
}
|
||||
|
||||
pub fn is_string(&self) -> bool {
|
||||
match self {
|
||||
IdString64::Hash(_) => false,
|
||||
IdString64::String(_) => true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_hash(&self) -> bool {
|
||||
match self {
|
||||
IdString64::Hash(_) => true,
|
||||
IdString64::String(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: Into<String>> From<S> for IdString64 {
|
||||
fn from(value: S) -> Self {
|
||||
Self::String(value.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Murmur64> for IdString64 {
|
||||
fn from(value: Murmur64) -> Self {
|
||||
Self::Hash(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IdString64> for Murmur64 {
|
||||
fn from(value: IdString64) -> Self {
|
||||
value.to_murmur64()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for IdString64 {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.to_murmur64() == other.to_murmur64()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::hash::Hash for IdString64 {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
state.write_u64(self.to_murmur64().into());
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::Serialize for IdString64 {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_u64(self.to_murmur64().into())
|
||||
}
|
||||
}
|
||||
|
||||
struct IdString64Visitor;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for IdString64Visitor {
|
||||
type Value = IdString64;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str("an u64 or a string")
|
||||
}
|
||||
|
||||
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(IdString64::Hash(value.into()))
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(IdString64::String(v.to_string()))
|
||||
}
|
||||
|
||||
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(IdString64::String(v))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for IdString64 {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
deserializer.deserialize_u64(IdString64Visitor)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct IdString64Display(String);
|
||||
|
||||
impl std::fmt::Display for IdString64Display {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::UpperHex for IdString64 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
std::fmt::UpperHex::fmt(&self.to_murmur64(), f)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::LowerHex for IdString64 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
std::fmt::LowerHex::fmt(&self.to_murmur64(), f)
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue