Compare commits
8 commits
master
...
feat/textu
Author | SHA1 | Date | |
---|---|---|---|
04b6a43f9a | |||
cbb3709c89 | |||
94af8862e8 | |||
9f849ab3ec | |||
63fb0a1c08 | |||
58071958d2 | |||
67f313107e | |||
db27dd9f39 |
21 changed files with 1689 additions and 93 deletions
120
Cargo.lock
generated
120
Cargo.lock
generated
|
@ -249,9 +249,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "bindgen"
|
||||
version = "0.72.0"
|
||||
version = "0.71.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4f72209734318d0b619a5e0f5129918b848c416e122a3c4ce054e03cb87b726f"
|
||||
checksum = "5f58bf3d7db68cfbac37cfc485a8d711e87e064c3d0fe0435b92f7a407f9d6b3"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"cexpr",
|
||||
|
@ -426,9 +426,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.40"
|
||||
version = "4.5.39"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "40b6887a1d8685cebccf115538db5c0efe625ccac9696ad45c409d96566e910f"
|
||||
checksum = "fd60e63e9be68e5fb56422e397cf9baddded06dae1d2e523401542383bc72a9f"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
|
@ -436,9 +436,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.40"
|
||||
version = "4.5.39"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e0c66c08ce9f0c698cbce5c0279d0bb6ac936d8674174fe48f736533b964f59e"
|
||||
checksum = "89cc6392a1f72bbeb820d71f32108f61fdaf18bc526e1d23954168a67759ef51"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
|
@ -450,9 +450,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.5.40"
|
||||
version = "4.5.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d2c7947ae4cc3d851207c1adb5b5e260ff0cca11446b1d6d1423788e442257ce"
|
||||
checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7"
|
||||
dependencies = [
|
||||
"heck 0.5.0",
|
||||
"proc-macro2",
|
||||
|
@ -581,13 +581,13 @@ checksum = "9226dbc05df4fb986f48d730b001532580883c4c06c5d1c213f4b34c1c157178"
|
|||
|
||||
[[package]]
|
||||
name = "confy"
|
||||
version = "1.0.0"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f29222b549d4e3ded127989d523da9e928918d0d0d7f7c1690b439d0d538bae9"
|
||||
checksum = "45b1f4c00870f07dc34adcac82bb6a72cc5aabca8536ba1797e01df51d2ce9a0"
|
||||
dependencies = [
|
||||
"directories",
|
||||
"serde",
|
||||
"thiserror 2.0.12",
|
||||
"thiserror 1.0.63",
|
||||
"toml 0.8.19",
|
||||
]
|
||||
|
||||
|
@ -766,9 +766,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "directories"
|
||||
version = "6.0.0"
|
||||
version = "5.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d"
|
||||
checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35"
|
||||
dependencies = [
|
||||
"dirs-sys",
|
||||
]
|
||||
|
@ -785,14 +785,14 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "dirs-sys"
|
||||
version = "0.5.0"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab"
|
||||
checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"option-ext",
|
||||
"redox_users 0.5.0",
|
||||
"windows-sys 0.59.0",
|
||||
"redox_users",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -802,7 +802,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"redox_users 0.4.6",
|
||||
"redox_users",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
|
@ -1118,6 +1118,7 @@ checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece"
|
|||
dependencies = [
|
||||
"crc32fast",
|
||||
"libz-rs-sys",
|
||||
"libz-sys",
|
||||
"miniz_oxide 0.8.8",
|
||||
]
|
||||
|
||||
|
@ -2184,7 +2185,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"windows-targets 0.52.6",
|
||||
"windows-targets 0.48.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -2207,6 +2208,17 @@ dependencies = [
|
|||
"zlib-rs",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libz-sys"
|
||||
version = "1.1.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "df9b68e50e6e0b26f672573834882eb57759f6db9b3be2ea3c35c91188bb4eaa"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"pkg-config",
|
||||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.4.14"
|
||||
|
@ -2369,9 +2381,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "nanorand"
|
||||
version = "0.8.0"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6e3d189da485332e96ba8a5ef646a311871abd7915bf06ac848a9117f19cf6e4"
|
||||
checksum = "6a51313c5820b0b02bd422f4b44776fbf47961755c74ce64afc73bfad10226c3"
|
||||
|
||||
[[package]]
|
||||
name = "native-tls"
|
||||
|
@ -2500,6 +2512,17 @@ version = "0.1.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
|
||||
|
||||
[[package]]
|
||||
name = "num-derive"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-traits"
|
||||
version = "0.2.19"
|
||||
|
@ -2546,7 +2569,7 @@ checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
|||
name = "oodle"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bindgen 0.72.0",
|
||||
"bindgen 0.71.1",
|
||||
"color-eyre",
|
||||
"tracing",
|
||||
]
|
||||
|
@ -3020,17 +3043,6 @@ dependencies = [
|
|||
"thiserror 1.0.63",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_users"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b"
|
||||
dependencies = [
|
||||
"getrandom 0.2.15",
|
||||
"libredox",
|
||||
"thiserror 2.0.12",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.11.1"
|
||||
|
@ -3077,9 +3089,9 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
|
|||
|
||||
[[package]]
|
||||
name = "reqwest"
|
||||
version = "0.12.20"
|
||||
version = "0.12.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eabf4c97d9130e2bf606614eb937e86edac8292eaa6f422f995d7e8de1eb1813"
|
||||
checksum = "e98ff6b0dbbe4d5a37318f433d4fc82babd21631f194d370409ceb2e40b2f0b5"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
|
@ -3093,10 +3105,12 @@ dependencies = [
|
|||
"hyper-rustls",
|
||||
"hyper-tls",
|
||||
"hyper-util",
|
||||
"ipnet",
|
||||
"js-sys",
|
||||
"log",
|
||||
"mime",
|
||||
"native-tls",
|
||||
"once_cell",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"rustls-pki-types",
|
||||
|
@ -3350,16 +3364,20 @@ dependencies = [
|
|||
"color-eyre",
|
||||
"csv-async",
|
||||
"fastrand",
|
||||
"flate2",
|
||||
"futures",
|
||||
"futures-util",
|
||||
"glob",
|
||||
"luajit2-sys",
|
||||
"nanorand",
|
||||
"num-derive",
|
||||
"num-traits",
|
||||
"oodle",
|
||||
"path-slash",
|
||||
"pin-project-lite",
|
||||
"serde",
|
||||
"serde_sjson",
|
||||
"strum",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tracing",
|
||||
|
@ -3621,6 +3639,28 @@ version = "0.11.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
||||
|
||||
[[package]]
|
||||
name = "strum"
|
||||
version = "0.26.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06"
|
||||
dependencies = [
|
||||
"strum_macros",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "strum_macros"
|
||||
version = "0.26.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be"
|
||||
dependencies = [
|
||||
"heck 0.5.0",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rustversion",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "subtle"
|
||||
version = "2.6.1"
|
||||
|
@ -4013,9 +4053,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tower-http"
|
||||
version = "0.6.5"
|
||||
version = "0.6.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5cc2d9e086a412a451384326f521c8123a99a466b329941a9403696bff9b0da2"
|
||||
checksum = "0fdb0c213ca27a9f57ab69ddb290fd80d970922355b83ae380b395d3986b8a2e"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bytes",
|
||||
|
@ -4581,7 +4621,7 @@ version = "0.1.9"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -5003,9 +5043,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "zip"
|
||||
version = "4.1.0"
|
||||
version = "3.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "af7dcdb4229c0e79c2531a24de7726a0e980417a74fb4d030a35f535665439a0"
|
||||
checksum = "12598812502ed0105f607f941c386f43d441e00148fce9dec3ca5ffb0bde9308"
|
||||
dependencies = [
|
||||
"arbitrary",
|
||||
"bzip2",
|
||||
|
|
10
Cargo.toml
10
Cargo.toml
|
@ -22,12 +22,13 @@ clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "str
|
|||
cli-table = { version = "0.5.0", default-features = false, features = ["derive"] }
|
||||
color-eyre = { path = "lib/color-eyre" }
|
||||
colors-transform = "0.2.11"
|
||||
confy = "1.0.0"
|
||||
confy = "0.6.1"
|
||||
csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
|
||||
druid = { version = "0.8", features = ["im", "serde", "image", "png", "jpeg", "bmp", "webp", "svg"] }
|
||||
druid-widget-nursery = "0.1"
|
||||
dtmt-shared = { path = "lib/dtmt-shared" }
|
||||
fastrand = "2.1.0"
|
||||
flate2 = { version = "1.0.30", features = ["zlib"] }
|
||||
futures = "0.3.25"
|
||||
futures-util = "0.3.24"
|
||||
glob = "0.3.0"
|
||||
|
@ -35,8 +36,10 @@ interprocess = "2.1.0"
|
|||
lazy_static = "1.4.0"
|
||||
luajit2-sys = { path = "lib/luajit2-sys" }
|
||||
minijinja = { version = "2.0.1", default-features = false, features = ["serde"] }
|
||||
nanorand = "0.8.0"
|
||||
nanorand = "0.7.0"
|
||||
nexusmods = { path = "lib/nexusmods" }
|
||||
num-derive = "0.4.2"
|
||||
num-traits = "0.2.19"
|
||||
notify = "8.0.0"
|
||||
oodle = { path = "lib/oodle" }
|
||||
open = "5.0.1"
|
||||
|
@ -49,6 +52,7 @@ serde = { version = "1.0.152", features = ["derive", "rc"] }
|
|||
serde_sjson = "1.2.1"
|
||||
steamlocate = "2.0.0-beta.2"
|
||||
strip-ansi-escapes = "0.2.0"
|
||||
strum = { version = "0.26.3", features = ["derive", "strum_macros"] }
|
||||
time = { version = "0.3.20", features = ["serde", "serde-well-known", "local-offset", "formatting", "macros"] }
|
||||
tokio = { version = "1.23.0", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
|
||||
tokio-stream = { version = "0.1.12", features = ["fs", "io-util"] }
|
||||
|
@ -56,7 +60,7 @@ tracing = { version = "0.1.37", features = ["async-await"] }
|
|||
tracing-error = "0.2.0"
|
||||
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
|
||||
usvg = "0.25.0"
|
||||
zip = { version = "4.0.0", default-features = false, features = ["deflate", "bzip2", "zstd", "time"] }
|
||||
zip = { version = "3.0.0", default-features = false, features = ["deflate", "bzip2", "zstd", "time"] }
|
||||
|
||||
[profile.dev.package.backtrace]
|
||||
opt-level = 3
|
||||
|
|
|
@ -275,7 +275,13 @@ struct ExtractOptions<'a> {
|
|||
|
||||
#[tracing::instrument(
|
||||
skip(ctx, options),
|
||||
fields(decompile = options.decompile, flatten = options.flatten, dry_run = options.dry_run)
|
||||
fields(
|
||||
bundle_name = tracing::field::Empty,
|
||||
bundle_hash = tracing::field::Empty,
|
||||
decompile = options.decompile,
|
||||
flatten = options.flatten,
|
||||
dry_run = options.dry_run,
|
||||
)
|
||||
)]
|
||||
async fn extract_bundle<P1, P2>(
|
||||
ctx: Arc<sdk::Context>,
|
||||
|
@ -287,9 +293,42 @@ where
|
|||
P1: AsRef<Path> + std::fmt::Debug,
|
||||
P2: AsRef<Path> + std::fmt::Debug,
|
||||
{
|
||||
let ctx = if ctx.game_dir.is_some() {
|
||||
tracing::debug!(
|
||||
"Got game directory from config: {}",
|
||||
ctx.game_dir.as_ref().unwrap().display()
|
||||
);
|
||||
|
||||
ctx
|
||||
} else {
|
||||
let game_dir = path
|
||||
.as_ref()
|
||||
.parent()
|
||||
.and_then(|parent| parent.parent())
|
||||
.map(|p| p.to_path_buf());
|
||||
|
||||
tracing::info!(
|
||||
"No game directory configured, guessing from bundle path: {:?}",
|
||||
game_dir
|
||||
);
|
||||
|
||||
Arc::new(sdk::Context {
|
||||
game_dir,
|
||||
lookup: Arc::clone(&ctx.lookup),
|
||||
ljd: ctx.ljd.clone(),
|
||||
revorb: ctx.revorb.clone(),
|
||||
ww2ogg: ctx.ww2ogg.clone(),
|
||||
})
|
||||
};
|
||||
|
||||
let bundle = {
|
||||
let data = fs::read(path.as_ref()).await?;
|
||||
let name = Bundle::get_name_from_path(&ctx, path.as_ref());
|
||||
{
|
||||
let span = tracing::span::Span::current();
|
||||
span.record("bundle_hash", format!("{:X}", name));
|
||||
span.record("bundle_name", name.display().to_string());
|
||||
}
|
||||
Bundle::from_binary(&ctx, name, data)?
|
||||
};
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command, ValueEnum};
|
||||
use cli_table::{print_stdout, WithTitle};
|
||||
|
@ -156,6 +157,8 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
|
|||
BufReader::new(Box::new(f))
|
||||
};
|
||||
|
||||
let lookup = Arc::make_mut(&mut ctx.lookup);
|
||||
|
||||
let group = sdk::murmur::HashGroup::from(*group);
|
||||
|
||||
let mut added = 0;
|
||||
|
@ -165,15 +168,15 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
|
|||
let total = {
|
||||
for line in lines.into_iter() {
|
||||
let value = line?;
|
||||
if ctx.lookup.find(&value, group).is_some() {
|
||||
if lookup.find(&value, group).is_some() {
|
||||
skipped += 1;
|
||||
} else {
|
||||
ctx.lookup.add(value, group);
|
||||
lookup.add(value, group);
|
||||
added += 1;
|
||||
}
|
||||
}
|
||||
|
||||
ctx.lookup.len()
|
||||
lookup.len()
|
||||
};
|
||||
|
||||
let out_path = matches
|
||||
|
@ -190,7 +193,7 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
|
|||
})
|
||||
.with_section(|| out_path.display().to_string().header("Path:"))?;
|
||||
|
||||
ctx.lookup
|
||||
lookup
|
||||
.to_csv(f)
|
||||
.await
|
||||
.wrap_err("Failed to write dictionary to disk")?;
|
||||
|
|
21
crates/dtmt/src/cmd/experiment/mod.rs
Normal file
21
crates/dtmt/src/cmd/experiment/mod.rs
Normal file
|
@ -0,0 +1,21 @@
|
|||
use clap::{ArgMatches, Command};
|
||||
use color_eyre::Result;
|
||||
|
||||
mod texture_meta;
|
||||
|
||||
pub(crate) fn command_definition() -> Command {
|
||||
Command::new("experiment")
|
||||
.subcommand_required(true)
|
||||
.about("A collection of utilities and experiments.")
|
||||
.subcommand(texture_meta::command_definition())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
||||
match matches.subcommand() {
|
||||
Some(("texture-meta", sub_matches)) => texture_meta::run(ctx, sub_matches).await,
|
||||
_ => unreachable!(
|
||||
"clap is configured to require a subcommand, and they're all handled above"
|
||||
),
|
||||
}
|
||||
}
|
121
crates/dtmt/src/cmd/experiment/texture_meta.rs
Normal file
121
crates/dtmt/src/cmd/experiment/texture_meta.rs
Normal file
|
@ -0,0 +1,121 @@
|
|||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::Result;
|
||||
use futures_util::StreamExt;
|
||||
use sdk::{Bundle, BundleFileType};
|
||||
use tokio::fs;
|
||||
|
||||
use crate::cmd::util::resolve_bundle_paths;
|
||||
|
||||
pub(crate) fn command_definition() -> Command {
|
||||
Command::new("texture-meta")
|
||||
.about(
|
||||
"Iterates over the provided bundles and lists certain meta data.
|
||||
Primarily intended to help spot patterns between dependend data fields and values.",
|
||||
)
|
||||
.arg(
|
||||
Arg::new("bundle")
|
||||
.required(true)
|
||||
.action(ArgAction::Append)
|
||||
.value_parser(value_parser!(PathBuf))
|
||||
.help(
|
||||
"Path to the bundle(s) to read. If this points to a directory instead \
|
||||
of a file, all files in that directory will be checked.",
|
||||
),
|
||||
)
|
||||
// TODO: Maybe provide JSON and CSV
|
||||
// TODO: Maybe allow toggling certain fields
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(ctx))]
|
||||
async fn handle_bundle(ctx: &sdk::Context, path: &PathBuf) -> Result<()> {
|
||||
let bundle = {
|
||||
let binary = fs::read(path).await?;
|
||||
let name = Bundle::get_name_from_path(ctx, path);
|
||||
Bundle::from_binary(ctx, name, binary)?
|
||||
};
|
||||
|
||||
let bundle_dir = ctx
|
||||
.game_dir
|
||||
.as_deref()
|
||||
.map(|dir| dir.join("bundle"))
|
||||
.or_else(|| path.parent().map(|p| p.to_path_buf()))
|
||||
.unwrap_or_default();
|
||||
|
||||
for f in bundle.files().iter() {
|
||||
if f.file_type() != BundleFileType::Texture {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (i, v) in f.variants().iter().enumerate() {
|
||||
let data_file_name = v.data_file_name();
|
||||
|
||||
let data_file_length = if let Some(file_name) = data_file_name {
|
||||
let path = bundle_dir.join(file_name);
|
||||
|
||||
match fs::metadata(&path).await {
|
||||
Ok(meta) => meta.len(),
|
||||
Err(err) => {
|
||||
return Err(err).wrap_err_with(|| {
|
||||
format!("Failed to open data file {}", path.display())
|
||||
})
|
||||
}
|
||||
}
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
println!(
|
||||
"{},{},{},{},{:b},{},{},{:?},{},{:#010b}",
|
||||
bundle.name().display(),
|
||||
f.name(false, None),
|
||||
f.file_type().ext_name(),
|
||||
i,
|
||||
v.property(),
|
||||
v.data().len(),
|
||||
v.external(),
|
||||
data_file_name,
|
||||
data_file_length,
|
||||
v.unknown_1(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
||||
let bundles = matches
|
||||
.get_many::<PathBuf>("bundle")
|
||||
.unwrap_or_default()
|
||||
.cloned();
|
||||
|
||||
let paths = resolve_bundle_paths(bundles);
|
||||
|
||||
let ctx = Arc::new(ctx);
|
||||
|
||||
println!(
|
||||
"Bundle Name,File Name,File Type,Variant,Property,Bundle Data Length,External,Data File,Data File Length,Unknown 1"
|
||||
);
|
||||
|
||||
paths
|
||||
.for_each_concurrent(10, |p| async {
|
||||
let ctx = ctx.clone();
|
||||
async move {
|
||||
if let Err(err) = handle_bundle(&ctx, &p)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to list contents of bundle {}", p.display()))
|
||||
{
|
||||
tracing::error!("Failed to handle bundle: {}", format!("{:#}", err));
|
||||
}
|
||||
}
|
||||
.await;
|
||||
})
|
||||
.await;
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -12,6 +12,7 @@ use clap::value_parser;
|
|||
use clap::{command, Arg};
|
||||
use color_eyre::eyre;
|
||||
use color_eyre::eyre::{Context, Result};
|
||||
use sdk::murmur::Dictionary;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::fs::File;
|
||||
use tokio::io::BufReader;
|
||||
|
@ -21,6 +22,7 @@ mod cmd {
|
|||
pub mod build;
|
||||
pub mod bundle;
|
||||
pub mod dictionary;
|
||||
pub mod experiment;
|
||||
pub mod migrate;
|
||||
pub mod murmur;
|
||||
pub mod new;
|
||||
|
@ -67,6 +69,7 @@ async fn main() -> Result<()> {
|
|||
.subcommand(cmd::build::command_definition())
|
||||
.subcommand(cmd::bundle::command_definition())
|
||||
.subcommand(cmd::dictionary::command_definition())
|
||||
.subcommand(cmd::experiment::command_definition())
|
||||
.subcommand(cmd::migrate::command_definition())
|
||||
.subcommand(cmd::murmur::command_definition())
|
||||
.subcommand(cmd::new::command_definition())
|
||||
|
@ -107,8 +110,9 @@ async fn main() -> Result<()> {
|
|||
|
||||
let r = BufReader::new(f);
|
||||
let mut ctx = ctx.write().await;
|
||||
if let Err(err) = ctx.lookup.from_csv(r).await {
|
||||
tracing::error!("{:#}", err);
|
||||
match Dictionary::from_csv(r).await {
|
||||
Ok(lookup) => ctx.lookup = Arc::new(lookup),
|
||||
Err(err) => tracing::error!("{:#}", err),
|
||||
}
|
||||
})
|
||||
};
|
||||
|
@ -144,6 +148,7 @@ async fn main() -> Result<()> {
|
|||
Some(("build", sub_matches)) => cmd::build::run(ctx, sub_matches).await?,
|
||||
Some(("bundle", sub_matches)) => cmd::bundle::run(ctx, sub_matches).await?,
|
||||
Some(("dictionary", sub_matches)) => cmd::dictionary::run(ctx, sub_matches).await?,
|
||||
Some(("experiment", sub_matches)) => cmd::experiment::run(ctx, sub_matches).await?,
|
||||
Some(("migrate", sub_matches)) => cmd::migrate::run(ctx, sub_matches).await?,
|
||||
Some(("murmur", sub_matches)) => cmd::murmur::run(ctx, sub_matches).await?,
|
||||
Some(("new", sub_matches)) => cmd::new::run(ctx, sub_matches).await?,
|
||||
|
|
|
@ -10,4 +10,4 @@ color-eyre = { workspace = true }
|
|||
tracing = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
bindgen = "0.72.0"
|
||||
bindgen = "0.71.0"
|
||||
|
|
|
@ -52,6 +52,7 @@ impl From<OodleLZ_CheckCRC> for bindings::OodleLZ_CheckCRC {
|
|||
#[tracing::instrument(skip(data))]
|
||||
pub fn decompress<I>(
|
||||
data: I,
|
||||
out_size: usize,
|
||||
fuzz_safe: OodleLZ_FuzzSafe,
|
||||
check_crc: OodleLZ_CheckCRC,
|
||||
) -> Result<Vec<u8>>
|
||||
|
@ -59,7 +60,7 @@ where
|
|||
I: AsRef<[u8]>,
|
||||
{
|
||||
let data = data.as_ref();
|
||||
let mut out = vec![0; CHUNK_SIZE];
|
||||
let mut out = vec![0; out_size];
|
||||
|
||||
let verbosity = if tracing::enabled!(tracing::Level::INFO) {
|
||||
bindings::OodleLZ_Verbosity_OodleLZ_Verbosity_Minimal
|
||||
|
|
|
@ -10,16 +10,20 @@ byteorder = { workspace = true }
|
|||
color-eyre = { workspace = true }
|
||||
csv-async = { workspace = true }
|
||||
fastrand = { workspace = true }
|
||||
flate2 = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
futures-util = { workspace = true }
|
||||
glob = { workspace = true }
|
||||
luajit2-sys = { workspace = true }
|
||||
nanorand = { workspace = true }
|
||||
num-derive = { workspace = true }
|
||||
num-traits = { workspace = true }
|
||||
oodle = { workspace = true }
|
||||
path-slash = { workspace = true }
|
||||
pin-project-lite = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_sjson = { workspace = true }
|
||||
strum = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tokio-stream = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
|
|
|
@ -42,12 +42,32 @@ impl<T: FromBinary> FromBinary for Vec<T> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn flags_from_bits<T: bitflags::Flags>(bits: T::Bits) -> T
|
||||
where
|
||||
<T as bitflags::Flags>::Bits: std::fmt::Binary,
|
||||
{
|
||||
if let Some(flags) = T::from_bits(bits) {
|
||||
flags
|
||||
} else {
|
||||
let unknown = bits & !T::all().bits();
|
||||
|
||||
tracing::warn!(
|
||||
"Unknown bits found for '{}': known = {:0b}, unknown = {:0b}",
|
||||
std::any::type_name::<T>(),
|
||||
T::all().bits(),
|
||||
unknown
|
||||
);
|
||||
|
||||
T::from_bits_truncate(bits)
|
||||
}
|
||||
}
|
||||
|
||||
pub mod sync {
|
||||
use std::ffi::CStr;
|
||||
use std::io::{self, Read, Seek, SeekFrom};
|
||||
use std::io::{self, Read, Seek, SeekFrom, Write};
|
||||
|
||||
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
|
||||
use color_eyre::eyre::WrapErr;
|
||||
use color_eyre::eyre::{self, WrapErr};
|
||||
use color_eyre::{Help, Report, Result, SectionExt};
|
||||
|
||||
macro_rules! make_read {
|
||||
|
@ -123,15 +143,16 @@ pub mod sync {
|
|||
};
|
||||
}
|
||||
|
||||
pub trait ReadExt: ReadBytesExt + Seek {
|
||||
pub trait ReadExt: Read + Seek {
|
||||
fn read_u8(&mut self) -> io::Result<u8> {
|
||||
ReadBytesExt::read_u8(self)
|
||||
}
|
||||
|
||||
make_read!(read_u16, read_u16_le, u16);
|
||||
make_read!(read_u32, read_u32_le, u32);
|
||||
make_read!(read_u64, read_u64_le, u64);
|
||||
|
||||
make_skip!(skip_u8, read_u8, u8);
|
||||
make_skip!(skip_u16, read_u16, u16);
|
||||
make_skip!(skip_u32, read_u32, u32);
|
||||
|
||||
// Implementation based on https://en.wikipedia.com/wiki/LEB128
|
||||
|
@ -181,9 +202,17 @@ pub mod sync {
|
|||
res
|
||||
}
|
||||
}
|
||||
|
||||
fn read_bool(&mut self) -> Result<bool> {
|
||||
match ReadExt::read_u8(self)? {
|
||||
0 => Ok(false),
|
||||
1 => Ok(true),
|
||||
v => eyre::bail!("Invalid value for boolean '{}'", v),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait WriteExt: WriteBytesExt + Seek {
|
||||
pub trait WriteExt: Write + Seek {
|
||||
fn write_u8(&mut self, val: u8) -> io::Result<()> {
|
||||
WriteBytesExt::write_u8(self, val)
|
||||
}
|
||||
|
@ -191,6 +220,10 @@ pub mod sync {
|
|||
make_write!(write_u32, write_u32_le, u32);
|
||||
make_write!(write_u64, write_u64_le, u64);
|
||||
|
||||
fn write_bool(&mut self, val: bool) -> io::Result<()> {
|
||||
WriteBytesExt::write_u8(self, if val { 1 } else { 0 })
|
||||
}
|
||||
|
||||
fn write_padding(&mut self) -> io::Result<usize> {
|
||||
let pos = self.stream_position()?;
|
||||
let size = 16 - (pos % 16) as usize;
|
||||
|
@ -207,8 +240,8 @@ pub mod sync {
|
|||
}
|
||||
}
|
||||
|
||||
impl<R: ReadBytesExt + Seek + ?Sized> ReadExt for R {}
|
||||
impl<W: WriteBytesExt + Seek + ?Sized> WriteExt for W {}
|
||||
impl<R: Read + Seek + ?Sized> ReadExt for R {}
|
||||
impl<W: Write + Seek + ?Sized> WriteExt for W {}
|
||||
|
||||
pub(crate) fn _read_up_to<R>(r: &mut R, buf: &mut Vec<u8>) -> Result<usize>
|
||||
where
|
||||
|
|
|
@ -15,17 +15,18 @@ use super::filetype::BundleFileType;
|
|||
#[derive(Debug)]
|
||||
struct BundleFileHeader {
|
||||
variant: u32,
|
||||
unknown_1: u8,
|
||||
external: bool,
|
||||
size: usize,
|
||||
unknown_1: u8,
|
||||
len_data_file_name: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone)]
|
||||
pub struct BundleFileVariant {
|
||||
property: u32,
|
||||
data: Vec<u8>,
|
||||
data_file_name: Option<String>,
|
||||
// Seems to be related to whether there is a data path.
|
||||
external: bool,
|
||||
unknown_1: u8,
|
||||
}
|
||||
|
||||
|
@ -39,6 +40,7 @@ impl BundleFileVariant {
|
|||
property: 0,
|
||||
data: Vec::new(),
|
||||
data_file_name: None,
|
||||
external: false,
|
||||
unknown_1: 0,
|
||||
}
|
||||
}
|
||||
|
@ -63,21 +65,30 @@ impl BundleFileVariant {
|
|||
self.data_file_name.as_ref()
|
||||
}
|
||||
|
||||
pub fn external(&self) -> bool {
|
||||
self.external
|
||||
}
|
||||
|
||||
pub fn unknown_1(&self) -> u8 {
|
||||
self.unknown_1
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
fn read_header<R>(r: &mut R) -> Result<BundleFileHeader>
|
||||
where
|
||||
R: Read + Seek,
|
||||
{
|
||||
let variant = r.read_u32()?;
|
||||
let unknown_1 = r.read_u8()?;
|
||||
let external = r.read_bool()?;
|
||||
let size = r.read_u32()? as usize;
|
||||
r.skip_u8(1)?;
|
||||
let unknown_1 = r.read_u8()?;
|
||||
let len_data_file_name = r.read_u32()? as usize;
|
||||
|
||||
Ok(BundleFileHeader {
|
||||
size,
|
||||
unknown_1,
|
||||
external,
|
||||
variant,
|
||||
unknown_1,
|
||||
len_data_file_name,
|
||||
})
|
||||
}
|
||||
|
@ -88,7 +99,7 @@ impl BundleFileVariant {
|
|||
W: Write + Seek,
|
||||
{
|
||||
w.write_u32(self.property)?;
|
||||
w.write_u8(self.unknown_1)?;
|
||||
w.write_bool(self.external)?;
|
||||
|
||||
let len_data_file_name = self.data_file_name.as_ref().map(|s| s.len()).unwrap_or(0);
|
||||
|
||||
|
@ -106,6 +117,26 @@ impl BundleFileVariant {
|
|||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for BundleFileVariant {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut out = f.debug_struct("BundleFileVariant");
|
||||
out.field("property", &self.property);
|
||||
|
||||
if self.data.len() <= 5 {
|
||||
out.field("data", &format!("{:x?}", &self.data));
|
||||
} else {
|
||||
out.field(
|
||||
"data",
|
||||
&format!("{:x?}.. ({} bytes)", &self.data[..5], &self.data.len()),
|
||||
);
|
||||
}
|
||||
|
||||
out.field("data_file_name", &self.data_file_name)
|
||||
.field("external", &self.external)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
#[derive(Default, Clone, Copy, Debug)]
|
||||
pub struct Properties: u32 {
|
||||
|
@ -204,6 +235,7 @@ impl BundleFile {
|
|||
let s = r
|
||||
.read_string_len(header.len_data_file_name)
|
||||
.wrap_err("Failed to read data file name")?;
|
||||
|
||||
Some(s)
|
||||
} else {
|
||||
None
|
||||
|
@ -216,6 +248,7 @@ impl BundleFile {
|
|||
property: header.variant,
|
||||
data,
|
||||
data_file_name,
|
||||
external: header.external,
|
||||
unknown_1: header.unknown_1,
|
||||
};
|
||||
|
||||
|
@ -243,7 +276,7 @@ impl BundleFile {
|
|||
|
||||
for variant in self.variants.iter() {
|
||||
w.write_u32(variant.property())?;
|
||||
w.write_u8(variant.unknown_1)?;
|
||||
w.write_bool(variant.external)?;
|
||||
|
||||
let len_data_file_name = variant.data_file_name().map(|s| s.len()).unwrap_or(0);
|
||||
|
||||
|
@ -277,6 +310,9 @@ impl BundleFile {
|
|||
) -> Result<Self> {
|
||||
match file_type {
|
||||
BundleFileType::Lua => lua::compile(name, sjson).wrap_err("Failed to compile Lua file"),
|
||||
BundleFileType::Texture => texture::compile(name, sjson, root)
|
||||
.await
|
||||
.wrap_err("Failed to compile Texture file"),
|
||||
BundleFileType::Unknown(_) => {
|
||||
eyre::bail!("Unknown file type. Cannot compile from SJSON");
|
||||
}
|
||||
|
@ -359,18 +395,16 @@ impl BundleFile {
|
|||
Ok(files)
|
||||
}
|
||||
|
||||
#[tracing::instrument(name = "File::decompiled", skip_all)]
|
||||
#[tracing::instrument(
|
||||
name = "File::decompiled",
|
||||
skip_all,
|
||||
fields(file = self.name(false, None), file_type = self.file_type().ext_name(), variants = self.variants.len())
|
||||
)]
|
||||
pub async fn decompiled(&self, ctx: &crate::Context) -> Result<Vec<UserFile>> {
|
||||
let file_type = self.file_type();
|
||||
|
||||
if tracing::enabled!(tracing::Level::DEBUG) {
|
||||
tracing::debug!(
|
||||
name = self.name(true, None),
|
||||
variants = self.variants.len(),
|
||||
"Attempting to decompile"
|
||||
);
|
||||
}
|
||||
|
||||
// The `Strings` type handles all variants combined.
|
||||
// For the other ones, each variant will be its own file.
|
||||
if file_type == BundleFileType::Strings {
|
||||
return strings::decompile(ctx, &self.variants);
|
||||
}
|
||||
|
@ -386,6 +420,7 @@ impl BundleFile {
|
|||
let res = match file_type {
|
||||
BundleFileType::Lua => lua::decompile(ctx, data).await,
|
||||
BundleFileType::Package => package::decompile(ctx, name.clone(), data),
|
||||
BundleFileType::Texture => texture::decompile(ctx, name.clone(), variant).await,
|
||||
_ => {
|
||||
tracing::debug!("Can't decompile, unknown file type");
|
||||
Ok(vec![UserFile::with_name(data.to_vec(), name.clone())])
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use color_eyre::{eyre, Result};
|
||||
use color_eyre::eyre;
|
||||
use color_eyre::Result;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::murmur::Murmur64;
|
||||
|
|
|
@ -162,6 +162,7 @@ impl Bundle {
|
|||
// TODO: Optimize to not reallocate?
|
||||
let mut raw_buffer = oodle::decompress(
|
||||
&compressed_buffer,
|
||||
oodle::CHUNK_SIZE,
|
||||
OodleLZ_FuzzSafe::No,
|
||||
OodleLZ_CheckCRC::No,
|
||||
)
|
||||
|
@ -359,6 +360,7 @@ where
|
|||
// TODO: Optimize to not reallocate?
|
||||
let mut raw_buffer = oodle::decompress(
|
||||
&compressed_buffer,
|
||||
oodle::CHUNK_SIZE,
|
||||
OodleLZ_FuzzSafe::No,
|
||||
OodleLZ_CheckCRC::No,
|
||||
)?;
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
use std::ffi::OsString;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use std::{ffi::OsString, path::PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::murmur::{Dictionary, HashGroup, IdString64, Murmur32, Murmur64};
|
||||
use crate::murmur::{Dictionary, HashGroup, IdString32, IdString64, Murmur32, Murmur64};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CmdLine {
|
||||
cmd: OsString,
|
||||
args: Vec<OsString>,
|
||||
|
@ -52,7 +55,7 @@ impl From<&CmdLine> for Command {
|
|||
}
|
||||
|
||||
pub struct Context {
|
||||
pub lookup: Dictionary,
|
||||
pub lookup: Arc<Dictionary>,
|
||||
pub ljd: Option<CmdLine>,
|
||||
pub revorb: Option<String>,
|
||||
pub ww2ogg: Option<String>,
|
||||
|
@ -62,7 +65,7 @@ pub struct Context {
|
|||
impl Context {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
lookup: Dictionary::new(),
|
||||
lookup: Arc::new(Dictionary::new()),
|
||||
ljd: None,
|
||||
revorb: None,
|
||||
ww2ogg: None,
|
||||
|
@ -84,17 +87,17 @@ impl Context {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn lookup_hash_short<M>(&self, hash: M, group: HashGroup) -> String
|
||||
pub fn lookup_hash_short<M>(&self, hash: M, group: HashGroup) -> IdString32
|
||||
where
|
||||
M: Into<Murmur32>,
|
||||
{
|
||||
let hash = hash.into();
|
||||
if let Some(s) = self.lookup.lookup_short(hash, group) {
|
||||
tracing::debug!(%hash, string = s, "Murmur32 lookup successful");
|
||||
s.to_owned()
|
||||
s.to_string().into()
|
||||
} else {
|
||||
tracing::debug!(%hash, "Murmur32 lookup failed");
|
||||
format!("{hash:08X}")
|
||||
hash.into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
pub mod lua;
|
||||
pub mod package;
|
||||
pub mod strings;
|
||||
pub mod texture;
|
||||
|
|
|
@ -5,7 +5,7 @@ use color_eyre::{Report, Result};
|
|||
|
||||
use crate::binary::sync::ReadExt;
|
||||
use crate::bundle::file::{BundleFileVariant, UserFile};
|
||||
use crate::murmur::HashGroup;
|
||||
use crate::murmur::{HashGroup, IdString32};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, serde::Serialize)]
|
||||
#[serde(untagged)]
|
||||
|
@ -26,7 +26,7 @@ impl Language {
|
|||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
pub struct Strings(HashMap<String, HashMap<Language, String>>);
|
||||
pub struct Strings(HashMap<IdString32, HashMap<Language, String>>);
|
||||
|
||||
#[inline(always)]
|
||||
fn read_string<R>(r: R) -> Result<String>
|
||||
|
@ -46,7 +46,7 @@ where
|
|||
impl Strings {
|
||||
#[tracing::instrument(skip_all, fields(languages = variants.len()))]
|
||||
pub fn from_variants(ctx: &crate::Context, variants: &[BundleFileVariant]) -> Result<Self> {
|
||||
let mut map: HashMap<String, HashMap<Language, String>> = HashMap::new();
|
||||
let mut map: HashMap<IdString32, HashMap<Language, String>> = HashMap::new();
|
||||
|
||||
for (i, variant) in variants.iter().enumerate() {
|
||||
let _span = tracing::trace_span!("variant {}", i);
|
||||
|
|
741
lib/sdk/src/filetype/texture.rs
Normal file
741
lib/sdk/src/filetype/texture.rs
Normal file
|
@ -0,0 +1,741 @@
|
|||
use std::io::{Cursor, Read, Seek, SeekFrom, Write as _};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use bitflags::bitflags;
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::{eyre, SectionExt};
|
||||
use color_eyre::{Help, Result};
|
||||
use flate2::read::ZlibDecoder;
|
||||
use oodle::{OodleLZ_CheckCRC, OodleLZ_FuzzSafe};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::fs;
|
||||
|
||||
use crate::binary::sync::{ReadExt, WriteExt};
|
||||
use crate::bundle::file::UserFile;
|
||||
use crate::murmur::{HashGroup, IdString32, IdString64};
|
||||
use crate::{binary, BundleFile, BundleFileType, BundleFileVariant};
|
||||
|
||||
mod dds;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
struct TextureDefinition {
|
||||
common: TextureDefinitionPlatform,
|
||||
// Stingray supports per-platform sections here, where you can create overrides with the same
|
||||
// values as in `common`. But since we only support PC, we don't need to implement
|
||||
// that.
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
struct TextureDefinitionPlatform {
|
||||
input: TextureDefinitionInput,
|
||||
output: TextureDefinitionOutput,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
struct TextureDefinitionInput {
|
||||
filename: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
struct TextureDefinitionOutput {
|
||||
category: String,
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
#[derive(Clone, Copy, Debug, Default)]
|
||||
struct TextureFlags: u32 {
|
||||
const STREAMABLE = 0b0000_0001;
|
||||
const UNKNOWN = 1 << 1;
|
||||
const SRGB = 1 << 8;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Default)]
|
||||
struct TextureHeaderMipInfo {
|
||||
offset: usize,
|
||||
size: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
struct TextureHeader {
|
||||
flags: TextureFlags,
|
||||
n_streamable_mipmaps: usize,
|
||||
width: usize,
|
||||
height: usize,
|
||||
mip_infos: [TextureHeaderMipInfo; 16],
|
||||
meta_size: usize,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for TextureHeader {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("TextureHeader")
|
||||
.field("flags", &self.flags)
|
||||
.field("n_streamable_mipmaps", &self.n_streamable_mipmaps)
|
||||
.field("width", &self.width)
|
||||
.field("height", &self.height)
|
||||
.field("mip_infos", &{
|
||||
let mut s = self
|
||||
.mip_infos
|
||||
.iter()
|
||||
.fold(String::from("["), |mut s, info| {
|
||||
s.push_str(&format!("{}/{}, ", info.offset, info.size));
|
||||
s
|
||||
});
|
||||
s.push(']');
|
||||
s
|
||||
})
|
||||
.field("meta_size", &self.meta_size)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl TextureHeader {
|
||||
#[tracing::instrument(skip(r))]
|
||||
fn from_binary(mut r: impl ReadExt) -> Result<Self> {
|
||||
let flags = r.read_u32().map(binary::flags_from_bits)?;
|
||||
let n_streamable_mipmaps = r.read_u32()? as usize;
|
||||
let width = r.read_u32()? as usize;
|
||||
let height = r.read_u32()? as usize;
|
||||
|
||||
let mut mip_infos = [TextureHeaderMipInfo::default(); 16];
|
||||
|
||||
for info in mip_infos.iter_mut() {
|
||||
info.offset = r.read_u32()? as usize;
|
||||
info.size = r.read_u32()? as usize;
|
||||
}
|
||||
|
||||
let meta_size = r.read_u32()? as usize;
|
||||
|
||||
Ok(Self {
|
||||
flags,
|
||||
n_streamable_mipmaps,
|
||||
width,
|
||||
height,
|
||||
mip_infos,
|
||||
meta_size,
|
||||
})
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(w))]
|
||||
fn to_binary(&self, mut w: impl WriteExt) -> Result<()> {
|
||||
eyre::ensure!(
|
||||
self.flags.is_empty() && self.n_streamable_mipmaps == 0,
|
||||
"Only textures are supported where `flags == 0` and `n_streamable_mipmaps == 0`."
|
||||
);
|
||||
|
||||
w.write_u32(self.flags.bits())?;
|
||||
w.write_u32(self.n_streamable_mipmaps as u32)?;
|
||||
w.write_u32(self.width as u32)?;
|
||||
w.write_u32(self.height as u32)?;
|
||||
|
||||
for info in self.mip_infos {
|
||||
w.write_u32(info.offset as u32)?;
|
||||
w.write_u32(info.size as u32)?;
|
||||
}
|
||||
|
||||
// TODO: For now we write `0` here, until the meta section is figured out
|
||||
w.write_u32(0)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct Texture {
|
||||
header: TextureHeader,
|
||||
data: Vec<u8>,
|
||||
stream: Option<Vec<u8>>,
|
||||
category: IdString32,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Texture {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut out = f.debug_struct("Texture");
|
||||
out.field("header", &self.header);
|
||||
|
||||
if self.data.len() <= 5 {
|
||||
out.field("data", &format!("{:x?}", &self.data));
|
||||
} else {
|
||||
out.field(
|
||||
"data",
|
||||
&format!("{:x?}.. ({} bytes)", &self.data[..5], &self.data.len()),
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(stream) = self.stream.as_ref() {
|
||||
if stream.len() <= 5 {
|
||||
out.field("stream", &format!("{:x?}", &stream));
|
||||
} else {
|
||||
out.field(
|
||||
"stream",
|
||||
&format!("{:x?}.. ({} bytes)", &stream[..5], &stream.len()),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
out.field("stream", &"None");
|
||||
}
|
||||
|
||||
out.field("category", &self.category).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl Texture {
|
||||
#[tracing::instrument(skip(data, chunks))]
|
||||
fn decompress_stream_data(mut data: impl Read, chunks: impl AsRef<[usize]>) -> Result<Vec<u8>> {
|
||||
const RAW_SIZE: usize = 0x10000;
|
||||
|
||||
let chunks = chunks.as_ref();
|
||||
|
||||
let max_size = chunks.iter().max().copied().unwrap_or(RAW_SIZE);
|
||||
let mut read_buf = vec![0; max_size];
|
||||
|
||||
let mut stream_raw = Vec::with_capacity(chunks.iter().sum());
|
||||
let mut last = 0;
|
||||
|
||||
for offset_next in chunks {
|
||||
let size = offset_next - last;
|
||||
|
||||
let span = tracing::info_span!(
|
||||
"stream chunk",
|
||||
num_chunks = chunks.len(),
|
||||
chunk_size_comp = size,
|
||||
offset = last
|
||||
);
|
||||
let _enter = span.enter();
|
||||
|
||||
let buf = &mut read_buf[0..size];
|
||||
data.read_exact(buf)
|
||||
.wrap_err("Failed to read chunk from stream file")?;
|
||||
|
||||
let raw = oodle::decompress(buf, RAW_SIZE, OodleLZ_FuzzSafe::No, OodleLZ_CheckCRC::No)
|
||||
.wrap_err("Failed to decompress stream chunk")?;
|
||||
eyre::ensure!(
|
||||
raw.len() == RAW_SIZE,
|
||||
"Invalid chunk length after decompression"
|
||||
);
|
||||
|
||||
stream_raw.extend_from_slice(&raw);
|
||||
|
||||
last = *offset_next;
|
||||
}
|
||||
Ok(stream_raw)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(data), fields(data_len = data.as_ref().len()))]
|
||||
fn reorder_stream_mipmap(
|
||||
data: impl AsRef<[u8]>,
|
||||
bits_per_block: usize,
|
||||
bytes_per_block: usize,
|
||||
block_size: usize,
|
||||
pitch: usize,
|
||||
) -> Result<Vec<u8>> {
|
||||
const CHUNK_SIZE: usize = 0x10000;
|
||||
let data = data.as_ref();
|
||||
|
||||
let mut out = Vec::with_capacity(data.len());
|
||||
let mut window = vec![0u8; pitch * 64];
|
||||
|
||||
let row_size = bits_per_block * block_size;
|
||||
tracing::Span::current().record("row_size", row_size);
|
||||
|
||||
eyre::ensure!(
|
||||
data.len() % CHUNK_SIZE == 0,
|
||||
"Stream data does not divide evenly into chunks"
|
||||
);
|
||||
|
||||
for (i, chunk) in data.chunks_exact(CHUNK_SIZE).enumerate() {
|
||||
let chunk_x = (i % bytes_per_block) * row_size;
|
||||
|
||||
let span = tracing::trace_span!("chunk", i, chunk_x = chunk_x);
|
||||
let _guard = span.enter();
|
||||
|
||||
if i > 0 && i % bytes_per_block == 0 {
|
||||
out.extend_from_slice(&window);
|
||||
}
|
||||
|
||||
for (j, row) in chunk.chunks_exact(row_size).enumerate() {
|
||||
let start = chunk_x + j * pitch;
|
||||
let end = start + row_size;
|
||||
tracing::trace!("{i}/{j} at {}:{}", start, end);
|
||||
window[start..end].copy_from_slice(row);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
#[tracing::instrument(
|
||||
"Texture::from_binary",
|
||||
skip(ctx, r, stream_r),
|
||||
fields(
|
||||
compression_type = tracing::field::Empty,
|
||||
compressed_size = tracing::field::Empty,
|
||||
uncompressed_size = tracing::field::Empty,
|
||||
)
|
||||
)]
|
||||
fn from_binary(
|
||||
ctx: &crate::Context,
|
||||
mut r: impl Read + Seek,
|
||||
mut stream_r: Option<impl Read>,
|
||||
) -> Result<Self> {
|
||||
let compression_type = r.read_u32()?;
|
||||
let compressed_size = r.read_u32()? as usize;
|
||||
let uncompressed_size = r.read_u32()? as usize;
|
||||
|
||||
{
|
||||
let span = tracing::Span::current();
|
||||
span.record("compression_type", compression_type);
|
||||
span.record("compressed_size", compressed_size);
|
||||
span.record("uncompressed_size", uncompressed_size);
|
||||
}
|
||||
|
||||
let mut comp_buf = vec![0; compressed_size];
|
||||
r.read_exact(&mut comp_buf)?;
|
||||
|
||||
let out_buf = match compression_type {
|
||||
// Uncompressed
|
||||
// This one never seems to contain the additional `TextureHeader` metadata,
|
||||
// so we return early in this branch.
|
||||
0 => {
|
||||
eyre::ensure!(
|
||||
compressed_size == 0 && uncompressed_size == 0,
|
||||
"Cannot handle texture with compression_type == 0, but buffer sizes > 0"
|
||||
);
|
||||
tracing::trace!("Found raw texture");
|
||||
|
||||
let pos = r.stream_position()?;
|
||||
let end = {
|
||||
r.seek(SeekFrom::End(0))?;
|
||||
let end = r.stream_position()?;
|
||||
r.seek(SeekFrom::Start(pos))?;
|
||||
end
|
||||
};
|
||||
|
||||
// Reads until the last u32.
|
||||
let mut data = vec![0u8; (end - pos - 4) as usize];
|
||||
r.read_exact(&mut data)?;
|
||||
|
||||
let category = r.read_u32().map(IdString32::from)?;
|
||||
|
||||
return Ok(Self {
|
||||
header: TextureHeader::default(),
|
||||
data,
|
||||
stream: None,
|
||||
category,
|
||||
});
|
||||
}
|
||||
1 => oodle::decompress(
|
||||
comp_buf,
|
||||
uncompressed_size,
|
||||
OodleLZ_FuzzSafe::No,
|
||||
OodleLZ_CheckCRC::No,
|
||||
)?,
|
||||
2 => {
|
||||
let mut decoder = ZlibDecoder::new(comp_buf.as_slice());
|
||||
let mut buf = Vec::with_capacity(uncompressed_size);
|
||||
|
||||
decoder.read_to_end(&mut buf)?;
|
||||
buf
|
||||
}
|
||||
_ => eyre::bail!(
|
||||
"Unknown compression type for texture '{}'",
|
||||
compression_type
|
||||
),
|
||||
};
|
||||
|
||||
eyre::ensure!(
|
||||
out_buf.len() == uncompressed_size,
|
||||
"Length of decompressed buffer did not match expected value. Expected {}, got {}",
|
||||
uncompressed_size,
|
||||
out_buf.len()
|
||||
);
|
||||
|
||||
// No idea what this number is supposed to mean.
|
||||
// Even the game engine just skips this one.
|
||||
r.skip_u32(0x43)?;
|
||||
|
||||
let header = TextureHeader::from_binary(&mut r)?;
|
||||
|
||||
eyre::ensure!(
|
||||
header.meta_size == 0 || stream_r.is_some(),
|
||||
"Compression chunks and stream file don't match up. meta_size = {}, has_stream = {}",
|
||||
header.meta_size,
|
||||
stream_r.is_some()
|
||||
);
|
||||
|
||||
let stream = if let Some(stream_r) = stream_r.as_mut() {
|
||||
// Number of compression chunks in the stream file
|
||||
let num_chunks = r.read_u32()? as usize;
|
||||
r.skip_u16(0)?;
|
||||
|
||||
{
|
||||
let num_chunks_1 = r.read_u16()? as usize;
|
||||
|
||||
eyre::ensure!(
|
||||
num_chunks == num_chunks_1,
|
||||
"Chunk numbers don't match. first = {}, second = {}",
|
||||
num_chunks,
|
||||
num_chunks_1
|
||||
);
|
||||
}
|
||||
|
||||
let mut chunks = Vec::with_capacity(num_chunks);
|
||||
|
||||
for _ in 0..num_chunks {
|
||||
chunks.push(r.read_u32()? as usize);
|
||||
}
|
||||
|
||||
let stream_raw = Self::decompress_stream_data(stream_r, chunks)
|
||||
.wrap_err("Failed to decompress stream data")?;
|
||||
|
||||
Some(stream_raw)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let category = ctx.lookup_hash_short(r.read_u32()?, HashGroup::TextureCategory);
|
||||
|
||||
Ok(Self {
|
||||
category,
|
||||
header,
|
||||
data: out_buf,
|
||||
stream,
|
||||
})
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(w))]
|
||||
fn to_binary(&self, mut w: impl WriteExt) -> Result<()> {
|
||||
let compression_type = 1;
|
||||
w.write_u32(compression_type)?;
|
||||
|
||||
let comp_buf = oodle::compress(&self.data).wrap_err("Failed to compress DDS data")?;
|
||||
|
||||
w.write_u32(comp_buf.len() as u32)?;
|
||||
w.write_u32(self.data.len() as u32)?;
|
||||
w.write_all(&comp_buf)?;
|
||||
|
||||
// Unknown field, which the engine seems to ignore.
|
||||
// All game files have the same value here, so we just mirror that.
|
||||
w.write_u32(0x43)?;
|
||||
|
||||
self.header.to_binary(&mut w)?;
|
||||
|
||||
w.write_u32(self.category.to_murmur32().into())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument]
|
||||
fn to_sjson(&self, filename: String) -> Result<String> {
|
||||
let texture = TextureDefinition {
|
||||
common: TextureDefinitionPlatform {
|
||||
input: TextureDefinitionInput { filename },
|
||||
output: TextureDefinitionOutput {
|
||||
category: self.category.display().to_string(),
|
||||
},
|
||||
},
|
||||
};
|
||||
serde_sjson::to_string(&texture).wrap_err("Failed to serialize texture definition")
|
||||
}
|
||||
|
||||
#[tracing::instrument(fields(
|
||||
dds_header = tracing::field::Empty,
|
||||
dx10_header = tracing::field::Empty,
|
||||
image_format = tracing::field::Empty,
|
||||
))]
|
||||
fn create_dds_user_file(&self, name: String) -> Result<UserFile> {
|
||||
let mut data = Cursor::new(&self.data);
|
||||
let mut dds_header =
|
||||
dds::DDSHeader::from_binary(&mut data).wrap_err("Failed to read DDS header")?;
|
||||
|
||||
{
|
||||
let span = tracing::Span::current();
|
||||
span.record("dds_header", format!("{:?}", dds_header));
|
||||
}
|
||||
|
||||
if !dds_header.pixel_format.flags.contains(dds::DDPF::FOURCC) {
|
||||
tracing::debug!("Found DDS without FourCC. Dumping raw data");
|
||||
return Ok(UserFile::with_name(self.data.clone(), name));
|
||||
}
|
||||
|
||||
// eyre::ensure!(
|
||||
// dds_header.pixel_format.four_cc == dds::FourCC::DX10,
|
||||
// "Only DX10 textures are currently supported. FourCC == {}",
|
||||
// dds_header.pixel_format.four_cc,
|
||||
// );
|
||||
|
||||
let dx10_header =
|
||||
dds::Dx10Header::from_binary(&mut data).wrap_err("Failed to read DX10 header")?;
|
||||
|
||||
{
|
||||
let span = tracing::Span::current();
|
||||
span.record("dx10_header", format!("{:?}", dx10_header));
|
||||
}
|
||||
|
||||
// match dx10_header.dxgi_format {
|
||||
// DXGIFormat::BC1_UNORM
|
||||
// | DXGIFormat::BC3_UNORM
|
||||
// | DXGIFormat::BC4_UNORM
|
||||
// | DXGIFormat::BC5_UNORM
|
||||
// | DXGIFormat::BC6H_UF16
|
||||
// | DXGIFormat::BC7_UNORM => {}
|
||||
// _ => {
|
||||
// eyre::bail!(
|
||||
// "Unsupported DXGI format: {} (0x{:0X})",
|
||||
// dx10_header.dxgi_format,
|
||||
// dx10_header.dxgi_format.to_u32().unwrap_or_default()
|
||||
// );
|
||||
// }
|
||||
// }
|
||||
|
||||
let stingray_image_format = dds::stripped_format_from_header(&dds_header, &dx10_header)?;
|
||||
{
|
||||
let span = tracing::Span::current();
|
||||
span.record("image_format", format!("{:?}", stingray_image_format));
|
||||
}
|
||||
|
||||
// eyre::ensure!(
|
||||
// stingray_image_format.image_type == ImageType::Image2D,
|
||||
// "Unsupported image type: {}",
|
||||
// stingray_image_format.image_type,
|
||||
// );
|
||||
|
||||
let block_size = 4 * dds_header.pitch_or_linear_size / dds_header.width;
|
||||
let bits_per_block: usize = match block_size {
|
||||
8 => 128,
|
||||
16 => 64,
|
||||
block_size => eyre::bail!("Unsupported block size {}", block_size),
|
||||
};
|
||||
|
||||
let pitch = self.header.width / 4 * block_size;
|
||||
let bytes_per_block = self.header.width / bits_per_block / 4;
|
||||
|
||||
tracing::debug!(
|
||||
"block_size = {} | pitch = {} | bits_per_block = {} | bytes_per_block = {}",
|
||||
block_size,
|
||||
pitch,
|
||||
bits_per_block,
|
||||
bytes_per_block
|
||||
);
|
||||
|
||||
let mut out_data = Cursor::new(Vec::with_capacity(self.data.len()));
|
||||
|
||||
// Currently, we only extract the largest mipmap,
|
||||
// so we need to set the dimensions accordingly, and remove the
|
||||
// flag.
|
||||
dds_header.width = self.header.width;
|
||||
dds_header.height = self.header.height;
|
||||
dds_header.mipmap_count = 0;
|
||||
dds_header.flags &= !dds::DDSD::MIPMAPCOUNT;
|
||||
|
||||
dds_header
|
||||
.to_binary(&mut out_data)
|
||||
.wrap_err("Failed to write DDS header")?;
|
||||
|
||||
dx10_header
|
||||
.to_binary(&mut out_data)
|
||||
.wrap_err("Failed to write DX10 header")?;
|
||||
|
||||
// If there is stream data, we build the mipmap data from it.
|
||||
// If not, we take whatever is left in the bundle file.
|
||||
if let Some(stream) = &self.stream {
|
||||
let data = Self::reorder_stream_mipmap(
|
||||
stream,
|
||||
bits_per_block,
|
||||
bytes_per_block,
|
||||
block_size,
|
||||
pitch,
|
||||
)
|
||||
.wrap_err("Failed to reorder stream chunks")?;
|
||||
|
||||
out_data
|
||||
.write_all(&data)
|
||||
.wrap_err("Failed to write streamed mipmap data")?;
|
||||
} else {
|
||||
let (_, remaining) = data.split();
|
||||
out_data
|
||||
.write_all(remaining)
|
||||
.wrap_err("Failed to write texture data")?;
|
||||
};
|
||||
|
||||
Ok(UserFile::with_name(out_data.into_inner(), name))
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
fn to_user_files(&self, name: String) -> Result<Vec<UserFile>> {
|
||||
let mut files = Vec::with_capacity(2);
|
||||
|
||||
{
|
||||
let data = self.to_sjson(name.clone())?.as_bytes().to_vec();
|
||||
let name = PathBuf::from(&name)
|
||||
.with_extension("texture")
|
||||
.display()
|
||||
.to_string();
|
||||
files.push(UserFile::with_name(data, name));
|
||||
}
|
||||
|
||||
// For debugging purposes, also extract the raw files
|
||||
if cfg!(debug_assertions) {
|
||||
if let Some(stream) = &self.stream {
|
||||
let stream_name = PathBuf::from(&name).with_extension("stream");
|
||||
files.push(UserFile::with_name(
|
||||
stream.clone(),
|
||||
stream_name.display().to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let name = PathBuf::from(&name)
|
||||
.with_extension("raw.dds")
|
||||
.display()
|
||||
.to_string();
|
||||
files.push(UserFile::with_name(self.data.clone(), name));
|
||||
}
|
||||
|
||||
match self
|
||||
.create_dds_user_file(name)
|
||||
.wrap_err("Failed to create DDS file")
|
||||
{
|
||||
Ok(dds) => files.push(dds),
|
||||
Err(err) => {
|
||||
if cfg!(debug_assertions) {
|
||||
tracing::error!(
|
||||
"{:?}",
|
||||
err.with_section(|| {
|
||||
"Running in debug mode, continuing to produce raw files".header("Note:")
|
||||
})
|
||||
);
|
||||
} else {
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(files)
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(ctx, data, stream_data), fields(data_len = data.as_ref().len()))]
|
||||
pub(crate) async fn decompile_data(
|
||||
ctx: &crate::Context,
|
||||
name: String,
|
||||
data: impl AsRef<[u8]>,
|
||||
stream_data: Option<impl AsRef<[u8]>>,
|
||||
) -> Result<Vec<UserFile>> {
|
||||
let mut r = Cursor::new(data);
|
||||
let mut stream_r = stream_data.map(Cursor::new);
|
||||
|
||||
let texture = Texture::from_binary(ctx, &mut r, stream_r.as_mut())?;
|
||||
texture
|
||||
.to_user_files(name)
|
||||
.wrap_err("Failed to build user files")
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(ctx))]
|
||||
pub(crate) async fn decompile(
|
||||
ctx: &crate::Context,
|
||||
name: String,
|
||||
variant: &BundleFileVariant,
|
||||
) -> Result<Vec<UserFile>> {
|
||||
let data_file = variant.data_file_name().map(|name| match &ctx.game_dir {
|
||||
Some(dir) => dir.join("bundle").join(name),
|
||||
None => PathBuf::from("bundle").join(name),
|
||||
});
|
||||
|
||||
if variant.external() {
|
||||
let Some(path) = data_file else {
|
||||
eyre::bail!("File is marked external but has no data file name");
|
||||
};
|
||||
|
||||
tracing::debug!(
|
||||
"Decompiling texture from external file '{}'",
|
||||
path.display()
|
||||
);
|
||||
|
||||
let data = fs::read(&path)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to read data file '{}'", path.display()))
|
||||
.with_suggestion(|| {
|
||||
"Provide a game directory in the config file or make sure the `data` directory is next to the provided bundle."
|
||||
})?;
|
||||
|
||||
decompile_data(ctx, name, data, None::<&[u8]>).await
|
||||
} else {
|
||||
tracing::debug!("Decompiling texture from bundle data");
|
||||
|
||||
let stream_data = match data_file {
|
||||
Some(path) => {
|
||||
let data = fs::read(&path)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to read data file '{}'", path.display()))
|
||||
.with_suggestion(|| {
|
||||
"Provide a game directory in the config file or make sure the `data` directory is next to the provided bundle."
|
||||
})?;
|
||||
Some(data)
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
decompile_data(ctx, name, variant.data(), stream_data).await
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(sjson, name), fields(sjson_len = sjson.as_ref().len(), name = %name.display()))]
|
||||
pub async fn compile(
|
||||
name: IdString64,
|
||||
sjson: impl AsRef<str>,
|
||||
root: impl AsRef<Path> + std::fmt::Debug,
|
||||
) -> Result<BundleFile> {
|
||||
let definitions: TextureDefinition = serde_sjson::from_str(sjson.as_ref())
|
||||
.wrap_err("Failed to deserialize SJSON")
|
||||
.with_section(|| sjson.as_ref().to_string().header("SJSON:"))?;
|
||||
|
||||
let dds = {
|
||||
let path = root.as_ref().join(definitions.common.input.filename);
|
||||
fs::read(&path)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to read DDS file '{}'", path.display()))?
|
||||
};
|
||||
|
||||
let (width, height) = {
|
||||
let mut r = Cursor::new(&dds);
|
||||
|
||||
let magic = r.read_u32()?;
|
||||
eyre::ensure!(
|
||||
magic == 0x20534444,
|
||||
"Invalid magic bytes for DDS. Expected 0x20534444, got {:08x}",
|
||||
magic
|
||||
);
|
||||
|
||||
r.seek(SeekFrom::Current(5))?;
|
||||
|
||||
let width = r.read_u32()? as usize;
|
||||
let height = r.read_u32()? as usize;
|
||||
|
||||
(width, height)
|
||||
};
|
||||
|
||||
let mut w = Cursor::new(Vec::new());
|
||||
|
||||
let texture = Texture {
|
||||
header: TextureHeader {
|
||||
// As long as we can't handle mipmaps, these two need be `0`
|
||||
flags: TextureFlags::empty(),
|
||||
n_streamable_mipmaps: 0,
|
||||
width,
|
||||
height,
|
||||
mip_infos: [TextureHeaderMipInfo::default(); 16],
|
||||
meta_size: 0,
|
||||
},
|
||||
data: dds,
|
||||
stream: None,
|
||||
category: IdString32::String(definitions.common.output.category),
|
||||
};
|
||||
texture.to_binary(&mut w)?;
|
||||
|
||||
let mut variant = BundleFileVariant::new();
|
||||
variant.set_data(w.into_inner());
|
||||
|
||||
let mut file = BundleFile::new(name, BundleFileType::Texture);
|
||||
file.add_variant(variant);
|
||||
|
||||
Ok(file)
|
||||
}
|
529
lib/sdk/src/filetype/texture/dds.rs
Normal file
529
lib/sdk/src/filetype/texture/dds.rs
Normal file
|
@ -0,0 +1,529 @@
|
|||
use std::io::SeekFrom;
|
||||
|
||||
use bitflags::bitflags;
|
||||
use color_eyre::eyre::Context as _;
|
||||
use color_eyre::eyre::{self, OptionExt as _};
|
||||
use color_eyre::Result;
|
||||
use num_derive::{FromPrimitive, ToPrimitive};
|
||||
use num_traits::{FromPrimitive as _, ToPrimitive as _};
|
||||
|
||||
use crate::binary;
|
||||
use crate::binary::sync::{ReadExt, WriteExt};
|
||||
|
||||
const MAGIC_DDS: u32 = 0x20534444;
|
||||
|
||||
bitflags! {
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct DDSD: u32 {
|
||||
/// Required
|
||||
const CAPS = 0x1;
|
||||
/// Required
|
||||
const HEIGHT = 0x2;
|
||||
/// Required
|
||||
const WIDTH = 0x4;
|
||||
/// Pitch for an uncompressed texture
|
||||
const PITCH = 0x8;
|
||||
/// Required
|
||||
const PIXELFORMAT = 0x1000;
|
||||
/// Required in a mipmapped texture
|
||||
const MIPMAPCOUNT = 0x20000;
|
||||
/// Pitch for a compressed texture
|
||||
const LINEARSIZE = 0x80000;
|
||||
/// Required in a depth texture
|
||||
const DEPTH = 0x800000;
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct DDSCAPS: u32 {
|
||||
const COMPLEX = 0x8;
|
||||
const MIPMAP = 0x400000;
|
||||
const TEXTURE = 0x1000;
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct DDSCAPS2: u32 {
|
||||
const CUBEMAP = 0x200;
|
||||
const CUBEMAP_POSITIVEX = 0x400;
|
||||
const CUBEMAP_NEGATIVEX = 0x800;
|
||||
const CUBEMAP_POSITIVEY = 0x1000;
|
||||
const CUBEMAP_NEGATIVEY = 0x2000;
|
||||
const CUBEMAP_POSITIVEZ = 0x4000;
|
||||
const CUBEMAP_NEGATIVEZ = 0x8000;
|
||||
const VOLUME = 0x200000;
|
||||
|
||||
const CUBEMAP_ALLFACES = Self::CUBEMAP_POSITIVEX.bits()
|
||||
| Self::CUBEMAP_NEGATIVEX.bits()
|
||||
| Self::CUBEMAP_POSITIVEY.bits()
|
||||
| Self::CUBEMAP_NEGATIVEY.bits()
|
||||
| Self::CUBEMAP_POSITIVEZ.bits()
|
||||
| Self::CUBEMAP_NEGATIVEZ.bits();
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct DDPF: u32 {
|
||||
const ALPHAPIXELS = 0x1;
|
||||
const ALPHA = 0x2;
|
||||
const FOURCC = 0x4;
|
||||
const RGB = 0x40;
|
||||
const YUV = 0x200;
|
||||
const LUMINANCE = 0x20000;
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct DdsResourceMiscFlags: u32 {
|
||||
const TEXTURECUBE = 0x4;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, FromPrimitive, ToPrimitive)]
|
||||
#[repr(u32)]
|
||||
pub enum D3D10ResourceDimension {
|
||||
Unknown = 0,
|
||||
Buffer = 1,
|
||||
Texture1D = 2,
|
||||
Texture2D = 3,
|
||||
Texture3D = 4,
|
||||
}
|
||||
|
||||
#[allow(clippy::upper_case_acronyms)]
|
||||
#[allow(non_camel_case_types)]
|
||||
#[derive(Clone, Copy, Debug, strum::Display, FromPrimitive, ToPrimitive)]
|
||||
#[repr(u32)]
|
||||
pub enum DXGIFormat {
|
||||
UNKNOWN = 0,
|
||||
R32G32B32A32_TYPELESS = 1,
|
||||
R32G32B32A32_FLOAT = 2,
|
||||
R32G32B32A32_UINT = 3,
|
||||
R32G32B32A32_SINT = 4,
|
||||
R32G32B32_TYPELESS = 5,
|
||||
R32G32B32_FLOAT = 6,
|
||||
R32G32B32_UINT = 7,
|
||||
R32G32B32_SINT = 8,
|
||||
R16G16B16A16_TYPELESS = 9,
|
||||
R16G16B16A16_FLOAT = 10,
|
||||
R16G16B16A16_UNORM = 11,
|
||||
R16G16B16A16_UINT = 12,
|
||||
R16G16B16A16_SNORM = 13,
|
||||
R16G16B16A16_SINT = 14,
|
||||
R32G32_TYPELESS = 15,
|
||||
R32G32_FLOAT = 16,
|
||||
R32G32_UINT = 17,
|
||||
R32G32_SINT = 18,
|
||||
R32G8X24_TYPELESS = 19,
|
||||
D32_FLOAT_S8X24_UINT = 20,
|
||||
R32_FLOAT_X8X24_TYPELESS = 21,
|
||||
X32_TYPELESS_G8X24_UINT = 22,
|
||||
R10G10B10A2_TYPELESS = 23,
|
||||
R10G10B10A2_UNORM = 24,
|
||||
R10G10B10A2_UINT = 25,
|
||||
R11G11B10_FLOAT = 26,
|
||||
R8G8B8A8_TYPELESS = 27,
|
||||
R8G8B8A8_UNORM = 28,
|
||||
R8G8B8A8_UNORM_SRGB = 29,
|
||||
R8G8B8A8_UINT = 30,
|
||||
R8G8B8A8_SNORM = 31,
|
||||
R8G8B8A8_SINT = 32,
|
||||
R16G16_TYPELESS = 33,
|
||||
R16G16_FLOAT = 34,
|
||||
R16G16_UNORM = 35,
|
||||
R16G16_UINT = 36,
|
||||
R16G16_SNORM = 37,
|
||||
R16G16_SINT = 38,
|
||||
R32_TYPELESS = 39,
|
||||
D32_FLOAT = 40,
|
||||
R32_FLOAT = 41,
|
||||
R32_UINT = 42,
|
||||
R32_SINT = 43,
|
||||
R24G8_TYPELESS = 44,
|
||||
D24_UNORM_S8_UINT = 45,
|
||||
R24_UNORM_X8_TYPELESS = 46,
|
||||
X24_TYPELESS_G8_UINT = 47,
|
||||
R8G8_TYPELESS = 48,
|
||||
R8G8_UNORM = 49,
|
||||
R8G8_UINT = 50,
|
||||
R8G8_SNORM = 51,
|
||||
R8G8_SINT = 52,
|
||||
R16_TYPELESS = 53,
|
||||
R16_FLOAT = 54,
|
||||
D16_UNORM = 55,
|
||||
R16_UNORM = 56,
|
||||
R16_UINT = 57,
|
||||
R16_SNORM = 58,
|
||||
R16_SINT = 59,
|
||||
R8_TYPELESS = 60,
|
||||
R8_UNORM = 61,
|
||||
R8_UINT = 62,
|
||||
R8_SNORM = 63,
|
||||
R8_SINT = 64,
|
||||
A8_UNORM = 65,
|
||||
R1_UNORM = 66,
|
||||
R9G9B9E5_SHAREDEXP = 67,
|
||||
R8G8_B8G8_UNORM = 68,
|
||||
G8R8_G8B8_UNORM = 69,
|
||||
BC1_TYPELESS = 70,
|
||||
BC1_UNORM = 71,
|
||||
BC1_UNORM_SRGB = 72,
|
||||
BC2_TYPELESS = 73,
|
||||
BC2_UNORM = 74,
|
||||
BC2_UNORM_SRGB = 75,
|
||||
BC3_TYPELESS = 76,
|
||||
BC3_UNORM = 77,
|
||||
BC3_UNORM_SRGB = 78,
|
||||
BC4_TYPELESS = 79,
|
||||
BC4_UNORM = 80,
|
||||
BC4_SNORM = 81,
|
||||
BC5_TYPELESS = 82,
|
||||
BC5_UNORM = 83,
|
||||
BC5_SNORM = 84,
|
||||
B5G6R5_UNORM = 85,
|
||||
B5G5R5A1_UNORM = 86,
|
||||
B8G8R8A8_UNORM = 87,
|
||||
B8G8R8X8_UNORM = 88,
|
||||
R10G10B10_XR_BIAS_A2_UNORM = 89,
|
||||
B8G8R8A8_TYPELESS = 90,
|
||||
B8G8R8A8_UNORM_SRGB = 91,
|
||||
B8G8R8X8_TYPELESS = 92,
|
||||
B8G8R8X8_UNORM_SRGB = 93,
|
||||
BC6H_TYPELESS = 94,
|
||||
BC6H_UF16 = 95,
|
||||
BC6H_SF16 = 96,
|
||||
BC7_TYPELESS = 97,
|
||||
BC7_UNORM = 98,
|
||||
BC7_UNORM_SRGB = 99,
|
||||
AYUV = 100,
|
||||
Y410 = 101,
|
||||
Y416 = 102,
|
||||
NV12 = 103,
|
||||
P010 = 104,
|
||||
P016 = 105,
|
||||
OPAQUE = 106,
|
||||
YUY2 = 107,
|
||||
Y210 = 108,
|
||||
Y216 = 109,
|
||||
NV11 = 110,
|
||||
AI44 = 111,
|
||||
IA44 = 112,
|
||||
P8 = 113,
|
||||
A8P8 = 114,
|
||||
B4G4R4A4_UNORM = 115,
|
||||
P208 = 130,
|
||||
V208 = 131,
|
||||
V408 = 132,
|
||||
SAMPLER_FEEDBACK_MIN_MIP_OPAQUE,
|
||||
SAMPLER_FEEDBACK_MIP_REGION_USED_OPAQUE,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct Dx10Header {
|
||||
/// Resource data formats, including fully-typed and typeless formats.
|
||||
/// See https://learn.microsoft.com/en-us/windows/win32/api/dxgiformat/ne-dxgiformat-dxgi_format
|
||||
pub dxgi_format: DXGIFormat,
|
||||
pub resource_dimension: D3D10ResourceDimension,
|
||||
pub misc_flag: DdsResourceMiscFlags,
|
||||
pub array_size: usize,
|
||||
pub misc_flags2: u32,
|
||||
}
|
||||
|
||||
impl Dx10Header {
|
||||
#[tracing::instrument("Dx10Header::from_binary", skip(r))]
|
||||
pub fn from_binary(mut r: impl ReadExt) -> Result<Self> {
|
||||
let dxgi_format = r
|
||||
.read_u32()
|
||||
.map(|val| DXGIFormat::from_u32(val).unwrap_or(DXGIFormat::UNKNOWN))?;
|
||||
let resource_dimension = r.read_u32().map(|val| {
|
||||
D3D10ResourceDimension::from_u32(val).unwrap_or(D3D10ResourceDimension::Unknown)
|
||||
})?;
|
||||
let misc_flag = r.read_u32().map(binary::flags_from_bits)?;
|
||||
let array_size = r.read_u32()? as usize;
|
||||
let misc_flags2 = r.read_u32()?;
|
||||
|
||||
Ok(Self {
|
||||
dxgi_format,
|
||||
resource_dimension,
|
||||
misc_flag,
|
||||
array_size,
|
||||
misc_flags2,
|
||||
})
|
||||
}
|
||||
|
||||
#[tracing::instrument("Dx10Header::to_binary", skip(w))]
|
||||
pub fn to_binary(&self, mut w: impl WriteExt) -> Result<()> {
|
||||
w.write_u32(
|
||||
self.dxgi_format
|
||||
.to_u32()
|
||||
.ok_or_eyre("DXGIFormat should fit in a u32")?,
|
||||
)?;
|
||||
w.write_u32(
|
||||
self.resource_dimension
|
||||
.to_u32()
|
||||
.ok_or_eyre("DXGIFormat should fit in a u32")?,
|
||||
)?;
|
||||
w.write_u32(self.misc_flag.bits())?;
|
||||
w.write_u32(self.array_size as u32)?;
|
||||
w.write_u32(self.misc_flags2)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(non_camel_case_types)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, strum::Display, FromPrimitive, ToPrimitive)]
|
||||
#[repr(u32)]
|
||||
pub enum FourCC {
|
||||
Empty = u32::MAX,
|
||||
DXT1 = 0x31545844,
|
||||
DXT2 = 0x33545844,
|
||||
DXT5 = 0x35545844,
|
||||
AXI1 = 0x31495441,
|
||||
AXI2 = 0x32495441,
|
||||
DX10 = 0x30315844,
|
||||
D3D_A16B16G16R16 = 0x24,
|
||||
D3D_R16F = 0x6F,
|
||||
D3D_G16R16F = 0x70,
|
||||
D3D_A16B16G16R16F = 0x71,
|
||||
D3D_R32F = 0x72,
|
||||
D3D_G32R32F = 0x73,
|
||||
D3D_A32B32G32R32F = 0x74,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct DDSPixelFormat {
|
||||
pub flags: DDPF,
|
||||
pub four_cc: FourCC,
|
||||
pub rgb_bit_count: u32,
|
||||
pub r_bit_mask: u32,
|
||||
pub g_bit_mask: u32,
|
||||
pub b_bit_mask: u32,
|
||||
pub a_bit_mask: u32,
|
||||
}
|
||||
|
||||
impl DDSPixelFormat {
|
||||
#[tracing::instrument("DDSPixelFormat::from_binary", skip(r))]
|
||||
pub fn from_binary(mut r: impl ReadExt) -> Result<Self> {
|
||||
let size = r.read_u32()? as usize;
|
||||
eyre::ensure!(
|
||||
size == 32,
|
||||
"Invalid structure size. Got 0X{:0X}, expected 0x20",
|
||||
size
|
||||
);
|
||||
|
||||
let flags: DDPF = r.read_u32().map(binary::flags_from_bits)?;
|
||||
|
||||
let four_cc = if flags.contains(DDPF::FOURCC) {
|
||||
r.read_u32().and_then(|bytes| {
|
||||
FourCC::from_u32(bytes).ok_or_eyre(format!("Unknown FourCC value: {:08X}", bytes))
|
||||
})?
|
||||
} else {
|
||||
r.skip_u32(0)?;
|
||||
FourCC::Empty
|
||||
};
|
||||
|
||||
let rgb_bit_count = r.read_u32()?;
|
||||
let r_bit_mask = r.read_u32()?;
|
||||
let g_bit_mask = r.read_u32()?;
|
||||
let b_bit_mask = r.read_u32()?;
|
||||
let a_bit_mask = r.read_u32()?;
|
||||
|
||||
Ok(Self {
|
||||
flags,
|
||||
four_cc,
|
||||
rgb_bit_count,
|
||||
r_bit_mask,
|
||||
g_bit_mask,
|
||||
b_bit_mask,
|
||||
a_bit_mask,
|
||||
})
|
||||
}
|
||||
|
||||
#[tracing::instrument("DDSPixelFormat::to_binary", skip(w))]
|
||||
pub fn to_binary(&self, mut w: impl WriteExt) -> Result<()> {
|
||||
// Structure size
|
||||
w.write_u32(32)?;
|
||||
|
||||
w.write_u32(self.flags.bits())?;
|
||||
w.write_u32(self.four_cc.to_u32().unwrap_or_default())?;
|
||||
w.write_u32(self.rgb_bit_count)?;
|
||||
w.write_u32(self.r_bit_mask)?;
|
||||
w.write_u32(self.g_bit_mask)?;
|
||||
w.write_u32(self.b_bit_mask)?;
|
||||
w.write_u32(self.a_bit_mask)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct DDSHeader {
|
||||
/// Flags to indicate which members contain valid data.
|
||||
pub flags: DDSD,
|
||||
pub height: usize,
|
||||
pub width: usize,
|
||||
pub pitch_or_linear_size: usize,
|
||||
pub depth: usize,
|
||||
pub mipmap_count: usize,
|
||||
pub pixel_format: DDSPixelFormat,
|
||||
pub caps: DDSCAPS,
|
||||
pub caps_2: DDSCAPS2,
|
||||
}
|
||||
|
||||
impl DDSHeader {
|
||||
#[tracing::instrument("DDSHeader::from_binary", skip(r))]
|
||||
pub fn from_binary(mut r: impl ReadExt) -> Result<Self> {
|
||||
r.skip_u32(MAGIC_DDS).wrap_err("Invalid magic bytes")?;
|
||||
|
||||
let size = r.read_u32()?;
|
||||
eyre::ensure!(
|
||||
size == 124,
|
||||
"Invalid structure size. Got 0x{:0X}, expected 0x7C",
|
||||
size
|
||||
);
|
||||
|
||||
let flags = r.read_u32().map(binary::flags_from_bits)?;
|
||||
let height = r.read_u32()? as usize;
|
||||
let width = r.read_u32()? as usize;
|
||||
let pitch_or_linear_size = r.read_u32()? as usize;
|
||||
let depth = r.read_u32()? as usize;
|
||||
let mipmap_count = r.read_u32()? as usize;
|
||||
|
||||
// Skip reserved bytes
|
||||
r.seek(SeekFrom::Current(11 * 4))?;
|
||||
|
||||
let pixel_format = DDSPixelFormat::from_binary(&mut r)?;
|
||||
let caps = r.read_u32().map(binary::flags_from_bits)?;
|
||||
let caps_2 = r.read_u32().map(binary::flags_from_bits)?;
|
||||
|
||||
// Skip unused and reserved bytes
|
||||
r.seek(SeekFrom::Current(3 * 4))?;
|
||||
|
||||
Ok(Self {
|
||||
flags,
|
||||
height,
|
||||
width,
|
||||
pitch_or_linear_size,
|
||||
depth,
|
||||
mipmap_count,
|
||||
pixel_format,
|
||||
caps,
|
||||
caps_2,
|
||||
})
|
||||
}
|
||||
|
||||
#[tracing::instrument("DDSHeader::to_binary", skip(w))]
|
||||
pub fn to_binary(&self, mut w: impl WriteExt) -> Result<()> {
|
||||
w.write_u32(MAGIC_DDS)?;
|
||||
|
||||
// Structure size in bytes
|
||||
w.write_u32(124)?;
|
||||
w.write_u32(self.flags.bits())?;
|
||||
w.write_u32(self.height as u32)?;
|
||||
w.write_u32(self.width as u32)?;
|
||||
w.write_u32(self.pitch_or_linear_size as u32)?;
|
||||
w.write_u32(self.depth as u32)?;
|
||||
w.write_u32(self.mipmap_count as u32)?;
|
||||
|
||||
w.write_all(&[0u8; 11 * 4])?;
|
||||
|
||||
self.pixel_format.to_binary(&mut w)?;
|
||||
w.write_u32(self.caps.bits())?;
|
||||
w.write_u32(self.caps_2.bits())?;
|
||||
|
||||
w.write_all(&[0u8; 3 * 4])?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, strum::Display)]
|
||||
#[repr(u32)]
|
||||
pub enum ImageType {
|
||||
Image2D = 0,
|
||||
Image3D = 1,
|
||||
ImageCube = 2,
|
||||
Unknown = 3,
|
||||
Image2dArray = 4,
|
||||
ImagecubeArray = 5,
|
||||
}
|
||||
|
||||
/// A stripped version of `ImageType` that only contains just the data needed
|
||||
/// to read a DDS image stream.
|
||||
#[allow(dead_code)]
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct StrippedImageFormat {
|
||||
pub image_type: ImageType,
|
||||
pub width: usize,
|
||||
pub height: usize,
|
||||
pub layers: usize,
|
||||
pub mip_levels: usize,
|
||||
}
|
||||
|
||||
// This is a stripped down version of the logic that the engine implements to fill
|
||||
// `stingray::ImageFormat`. With the `type` field we need to distinguish between `IMAGE3D`
|
||||
// and everything else, and we need the various dimensions filled to calculate the chunks.
|
||||
pub fn stripped_format_from_header(
|
||||
dds_header: &DDSHeader,
|
||||
dx10_header: &Dx10Header,
|
||||
) -> Result<StrippedImageFormat> {
|
||||
let mut image_format = StrippedImageFormat {
|
||||
image_type: ImageType::Unknown,
|
||||
width: dds_header.width,
|
||||
height: dds_header.height,
|
||||
layers: 0,
|
||||
mip_levels: 0,
|
||||
};
|
||||
|
||||
if dds_header.mipmap_count > 0 {
|
||||
image_format.mip_levels = dds_header.mipmap_count;
|
||||
} else {
|
||||
image_format.mip_levels = 1;
|
||||
}
|
||||
|
||||
// INFO: These next two sections are conditional in the engine code,
|
||||
// based on a lot of stuff in "fourcc" and other fields. But it might
|
||||
// actually be fine to just do it like this, as this seems universal
|
||||
// to DDS.
|
||||
// Will have to check how it plays out with actual assets.
|
||||
|
||||
if dds_header.caps_2.contains(DDSCAPS2::CUBEMAP) {
|
||||
image_format.image_type = ImageType::ImageCube;
|
||||
image_format.layers = 6;
|
||||
} else if dds_header.caps_2.contains(DDSCAPS2::VOLUME) {
|
||||
image_format.image_type = ImageType::Image3D;
|
||||
image_format.layers = dds_header.depth;
|
||||
} else {
|
||||
image_format.image_type = ImageType::Image2D;
|
||||
image_format.layers = 1;
|
||||
}
|
||||
|
||||
if dx10_header.resource_dimension == D3D10ResourceDimension::Texture2D {
|
||||
if dx10_header
|
||||
.misc_flag
|
||||
.contains(DdsResourceMiscFlags::TEXTURECUBE)
|
||||
{
|
||||
image_format.image_type = ImageType::ImageCube;
|
||||
if dx10_header.array_size > 1 {
|
||||
image_format.layers = dx10_header.array_size;
|
||||
} else {
|
||||
image_format.layers = 6;
|
||||
}
|
||||
} else {
|
||||
image_format.image_type = ImageType::Image2D;
|
||||
image_format.layers = dx10_header.array_size;
|
||||
}
|
||||
} else if dx10_header.resource_dimension == D3D10ResourceDimension::Texture3D {
|
||||
image_format.image_type = ImageType::Image3D;
|
||||
image_format.layers = dds_header.depth;
|
||||
}
|
||||
|
||||
if dx10_header.array_size > 1 {
|
||||
match image_format.image_type {
|
||||
ImageType::Image2D => image_format.image_type = ImageType::Image2dArray,
|
||||
ImageType::ImageCube => image_format.image_type = ImageType::ImagecubeArray,
|
||||
ImageType::Image3D => {
|
||||
eyre::bail!("3D-Arrays are not a supported image format")
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(image_format)
|
||||
}
|
|
@ -1,3 +1,4 @@
|
|||
#![feature(cursor_split)]
|
||||
#![feature(test)]
|
||||
|
||||
mod binary;
|
||||
|
|
|
@ -12,12 +12,19 @@ pub enum HashGroup {
|
|||
Filename,
|
||||
Filetype,
|
||||
Strings,
|
||||
TextureCategory,
|
||||
Other,
|
||||
}
|
||||
|
||||
impl HashGroup {
|
||||
pub fn all() -> [Self; 3] {
|
||||
[Self::Filename, Self::Filetype, Self::Other]
|
||||
pub fn all() -> [Self; 5] {
|
||||
[
|
||||
Self::Filename,
|
||||
Self::Filetype,
|
||||
Self::Strings,
|
||||
Self::TextureCategory,
|
||||
Self::Other,
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -27,6 +34,7 @@ impl std::fmt::Display for HashGroup {
|
|||
HashGroup::Filename => write!(f, "filename"),
|
||||
HashGroup::Filetype => write!(f, "filetype"),
|
||||
HashGroup::Strings => write!(f, "strings"),
|
||||
HashGroup::TextureCategory => write!(f, "texture-category"),
|
||||
HashGroup::Other => write!(f, "other"),
|
||||
}
|
||||
}
|
||||
|
@ -48,6 +56,7 @@ struct Row {
|
|||
group: HashGroup,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Entry {
|
||||
value: String,
|
||||
long: Murmur64,
|
||||
|
@ -73,6 +82,7 @@ impl Entry {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Dictionary {
|
||||
entries: Vec<Entry>,
|
||||
}
|
||||
|
@ -88,10 +98,12 @@ impl Dictionary {
|
|||
Self { entries: vec![] }
|
||||
}
|
||||
|
||||
pub async fn from_csv<R>(&mut self, r: R) -> Result<()>
|
||||
pub async fn from_csv<R>(r: R) -> Result<Self>
|
||||
where
|
||||
R: AsyncRead + std::marker::Unpin + std::marker::Send,
|
||||
{
|
||||
let mut entries = vec![];
|
||||
|
||||
let r = AsyncDeserializer::from_reader(r);
|
||||
let mut records = r.into_deserialize::<Row>();
|
||||
|
||||
|
@ -112,10 +124,10 @@ impl Dictionary {
|
|||
group: record.group,
|
||||
};
|
||||
|
||||
self.entries.push(entry);
|
||||
entries.push(entry);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
Ok(Self { entries })
|
||||
}
|
||||
|
||||
pub async fn to_csv<W>(&self, w: W) -> Result<()>
|
||||
|
@ -161,7 +173,7 @@ impl Dictionary {
|
|||
self.entries.push(entry);
|
||||
}
|
||||
|
||||
pub fn find(&mut self, value: &String, group: HashGroup) -> Option<&Entry> {
|
||||
pub fn find(&self, value: &String, group: HashGroup) -> Option<&Entry> {
|
||||
self.entries
|
||||
.iter()
|
||||
.find(|e| e.value == *value && e.group == group)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue