Compare commits

..

14 commits

Author SHA1 Message Date
7384d3359c
Only use texture files for texture-meta command
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
2025-03-06 13:32:47 +01:00
5f8016c655
Use macro to generate file type enum and impls
Due to the large amount of variants, and the different kind of values
connected to each variant (hash, extension name) being scattered
across the various `impl` blocks, the file became rather convoluted.

While I don't generally like the indirection of macros or meta
programming, it's not that bad with Rust, thanks to Rust Analyzer being
able to attach diagnostics to the source inside the macro definition,
and the ability to generate the macro's output for validation.

Therefore, the new macro allows putting all data used for this enum
definition into a single block.
2025-02-21 15:44:10 +01:00
ea2368c532
Switch to cursor_split feature
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/msvc Build for the target platform: msvc
build/linux Build for the target platform: linux
The `cursor_remaining` feature was replaced by that:
https://github.com/rust-lang/rust/pull/109174
2025-02-19 11:14:13 +01:00
ff6fd948c8
sdk: Remove unused function 2025-02-19 11:14:11 +01:00
572778f210
dtmt: Fix file injection
I ended up wrapping the raw data in a `BundleFile` twice.
I also made '--compile' the default, as it should be much less often
that raw data needs to be inserted. Even files that are essentially raw
binary blobs, like `.wwise_event`, still have some custom fields that
need to be accounted for.
2025-02-19 11:14:09 +01:00
4bf97e9f19
Add cmdline to tracing output
Can come in handy when other people report problems and show the error
message or full log, but not the command line.

Setting that span to `level = "error"` ensures that it won't be disabled
by level filters.
2025-02-19 11:14:07 +01:00
6f6df14bfc
Refactor code for file injection 2025-02-19 11:14:05 +01:00
bcbc005df7
Implement more texture formats
The second compression method found in the game's code seems to be Zlib,
but it doesn't seem to be used in the game files. What does get used is
a compression type of `0`, which appears to be uncompressed data.

For DDS formats, all the ones that are currently used by in the game
files can be emitted as is. Though for some of them, other tools might
not be able to display them.
2025-02-19 11:14:04 +01:00
233389ebb1
sdk: Implement decompiling streamed mipmaps
For now, we only extract the largest mipmap.
2025-02-19 11:13:59 +01:00
30b9a93fa3
Reverse DDSImage::load
Decompiling the game binary shows a rather elaborate algorithm to load
DDS images from binary. Though comparing it to Microsoft's documentation
on DDS, most of it seems to be pretty standard handling.

However, we don't actually need all of it. The part about calculating
pitch and reading blocks only accesses a subset of the `ImageFormat`
struct, so we can strip our implementation to just that.
2025-02-19 11:12:11 +01:00
84cb6ff985
sdk: Add decompiled SJSON texture file
In addition to the actual image file, also write a `.texture` engine
file.
2025-02-19 11:12:09 +01:00
0e2f0d4409
sdk: Add dictionary group for texture categories 2025-02-19 11:12:08 +01:00
126d3c743d
dtmt: Add option to compile file when injecting 2025-02-19 11:12:06 +01:00
ea7886b08f
feat(sdk): Implement partial texture decompilation 2025-02-19 11:12:05 +01:00
25 changed files with 2094 additions and 523 deletions

View file

@ -125,6 +125,8 @@ jobs:
vars:
pr: ""
target: msvc
gitea_url: http://forgejo:3000
gitea_api_key: ((gitea_api_key))
- load_var: version_number
reveal: true
@ -140,21 +142,10 @@ jobs:
fail_fast: true
override: true
globs:
- artifact/dtmt
- artifact/dtmm
- artifact/*.exe
- artifact/*.exe.sha256
- put: package
resource: gitea-package
no_get: true
inputs:
- artifact
params:
version: master
fail_fast: true
override: true
globs:
- artifact/*.exe
- artifact/*.exe.sha256
- artifact/*.sha256
- name: build-linux
on_success:
@ -211,20 +202,5 @@ jobs:
globs:
- artifact/dtmt
- artifact/dtmm
- artifact/dtmm.sha256
- artifact/dtmt.sha256
- put: package
resource: gitea-package
no_get: true
inputs:
- artifact
params:
version: master
fail_fast: true
override: true
globs:
- artifact/dtmt
- artifact/dtmm
- artifact/dtmm.sha256
- artifact/dtmt.sha256
- artifact/*.exe
- artifact/*.sha256

View file

@ -24,10 +24,8 @@ PR=${PR:-}
if [ -n "$PR" ]; then
title "PR: $(echo "$PR" | jq '.number') - $(echo "$PR" | jq '.title')"
ref="pr-$(echo "$PR" | jq '.number')-$(git rev-parse --short "$(cat .git/ref || echo "HEAD")" 2>/dev/null || echo 'manual')"
elif [ -f ".git/branch"]; then
ref=$(cat .git/branch)-$(git rev-parse --short $ref)
else
ref=$(git rev-parse --short "$(cat .git/ref || echo "HEAD")")
ref=$(git describe --tags)
fi
title "Version: '$ref'"

View file

@ -22,6 +22,7 @@ caches:
params:
CI: "true"
TARGET: ((target))
GITEA_API_KEY: ((gitea_api_key))
PR: ((pr))
OUTPUT: artifact

View file

@ -7,9 +7,5 @@
":rebaseStalePrs"
],
"prConcurrentLimit": 10,
"branchPrefix": "renovate/",
"baseBranches": [
"$default",
"/^release\\/.*/"
]
"branchPrefix": "renovate/"
}

55
Cargo.lock generated
View file

@ -1123,6 +1123,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c0596c1eac1f9e04ed902702e9878208b336edc9d6fddc8a48387349bab3666"
dependencies = [
"crc32fast",
"libz-sys",
"miniz_oxide 0.8.0",
]
@ -2036,6 +2037,17 @@ dependencies = [
"redox_syscall",
]
[[package]]
name = "libz-sys"
version = "1.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e143b5e666b2695d28f6bca6497720813f699c9602dd7f5cac91008b8ada7f9"
dependencies = [
"cc",
"pkg-config",
"vcpkg",
]
[[package]]
name = "linux-raw-sys"
version = "0.4.14"
@ -2308,6 +2320,17 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
[[package]]
name = "num-derive"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.90",
]
[[package]]
name = "num-traits"
version = "0.2.19"
@ -3066,6 +3089,12 @@ dependencies = [
"untrusted",
]
[[package]]
name = "rustversion"
version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6"
[[package]]
name = "rustybuzz"
version = "0.6.0"
@ -3152,16 +3181,20 @@ dependencies = [
"color-eyre",
"csv-async",
"fastrand",
"flate2",
"futures",
"futures-util",
"glob",
"luajit2-sys",
"nanorand",
"num-derive",
"num-traits",
"oodle",
"path-slash",
"pin-project-lite",
"serde",
"serde_sjson",
"strum",
"tokio",
"tokio-stream",
"tracing",
@ -3421,6 +3454,28 @@ version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "strum"
version = "0.26.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06"
dependencies = [
"strum_macros",
]
[[package]]
name = "strum_macros"
version = "0.26.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be"
dependencies = [
"heck 0.5.0",
"proc-macro2",
"quote",
"rustversion",
"syn 2.0.90",
]
[[package]]
name = "subtle"
version = "2.6.1"

View file

@ -29,6 +29,7 @@ druid = { version = "0.8", features = ["im", "serde", "image", "png", "jpeg", "b
druid-widget-nursery = "0.1"
dtmt-shared = { path = "lib/dtmt-shared" }
fastrand = "2.1.0"
flate2 = { version = "1.0.30", features = ["zlib"] }
futures = "0.3.25"
futures-util = "0.3.24"
glob = "0.3.0"
@ -38,6 +39,8 @@ luajit2-sys = { path = "lib/luajit2-sys" }
minijinja = { version = "2.0.1", default-features = false }
nanorand = "0.7.0"
nexusmods = { path = "lib/nexusmods" }
num-derive = "0.4.2"
num-traits = "0.2.19"
notify = "8.0.0"
oodle = { path = "lib/oodle" }
open = "5.0.1"
@ -50,6 +53,7 @@ serde = { version = "1.0.152", features = ["derive", "rc"] }
serde_sjson = { path = "lib/serde_sjson" }
steamlocate = "2.0.0-beta.2"
strip-ansi-escapes = "0.2.0"
strum = { version = "0.26.3", features = ["derive", "strum_macros"] }
time = { version = "0.3.20", features = ["serde", "serde-well-known", "local-offset", "formatting", "macros"] }
tokio = { version = "1.23.0", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
tokio-stream = { version = "0.1.12", features = ["fs", "io-util"] }

View file

@ -275,7 +275,13 @@ struct ExtractOptions<'a> {
#[tracing::instrument(
skip(ctx, options),
fields(decompile = options.decompile, flatten = options.flatten, dry_run = options.dry_run)
fields(
bundle_name = tracing::field::Empty,
bundle_hash = tracing::field::Empty,
decompile = options.decompile,
flatten = options.flatten,
dry_run = options.dry_run,
)
)]
async fn extract_bundle<P1, P2>(
ctx: Arc<sdk::Context>,
@ -287,9 +293,42 @@ where
P1: AsRef<Path> + std::fmt::Debug,
P2: AsRef<Path> + std::fmt::Debug,
{
let ctx = if ctx.game_dir.is_some() {
tracing::debug!(
"Got game directory from config: {}",
ctx.game_dir.as_ref().unwrap().display()
);
ctx
} else {
let game_dir = path
.as_ref()
.parent()
.and_then(|parent| parent.parent())
.map(|p| p.to_path_buf());
tracing::info!(
"No game directory configured, guessing from bundle path: {:?}",
game_dir
);
Arc::new(sdk::Context {
game_dir,
lookup: Arc::clone(&ctx.lookup),
ljd: ctx.ljd.clone(),
revorb: ctx.revorb.clone(),
ww2ogg: ctx.ww2ogg.clone(),
})
};
let bundle = {
let data = fs::read(path.as_ref()).await?;
let name = Bundle::get_name_from_path(&ctx, path.as_ref());
{
let span = tracing::span::Span::current();
span.record("bundle_hash", format!("{:X}", name));
span.record("bundle_name", name.display().to_string());
}
Bundle::from_binary(&ctx, name, data)?
};

View file

@ -1,31 +1,69 @@
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use std::str::FromStr as _;
use clap::{value_parser, Arg, ArgMatches, Command};
use color_eyre::eyre::{self, Context, Result};
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
use color_eyre::eyre::{self, Context, OptionExt, Result};
use color_eyre::Help;
use sdk::Bundle;
use tokio::fs::{self, File};
use tokio::io::AsyncReadExt;
use path_slash::PathBufExt as _;
use sdk::filetype::texture;
use sdk::murmur::IdString64;
use sdk::{Bundle, BundleFile, BundleFileType};
use tokio::fs;
pub(crate) fn command_definition() -> Command {
Command::new("inject")
.about("Inject a file into a bundle.")
.arg(
Arg::new("replace")
.help("The name of a file in the bundle whos content should be replaced.")
.short('r')
.long("replace"),
)
.subcommand_required(true)
.about("Inject a file into a bundle.\n\
Raw binary data can be used to directly replace the file's variant data blob without affecting the metadata.\n\
Alternatively, a compiler format may be specified, and a complete bundle file is created.")
.arg(
Arg::new("output")
.help(
"The path to write the changed bundle to. \
If omitted, the input bundle will be overwritten.",
If omitted, the input bundle will be overwritten.\n\
Remember to add a `.patch_<NUMBER>` suffix if you also use '--patch'.",
)
.short('o')
.long("output")
.value_parser(value_parser!(PathBuf)),
)
.arg(
Arg::new("patch")
.help("Create a patch bundle. Optionally, a patch NUMBER may be specified as \
'--patch=123'.\nThe maximum number is 999, the default is 1.\n\
If `--output` is not specified, the `.patch_<NUMBER>` suffix is added to \
the given bundle name.")
.short('p')
.long("patch")
.num_args(0..=1)
.require_equals(true)
.default_missing_value("1")
.value_name("NUMBER")
.value_parser(value_parser!(u16))
)
.arg(
Arg::new("type")
.help("Compile the new file as the given TYPE. If omitted, the file type is \
is guessed from the file extension.")
.value_name("TYPE")
)
.subcommand(
Command::new("replace")
.about("Replace an existing file in the bundle")
.arg(
Arg::new("variant")
.help("In combination with '--raw', specify the variant index to replace.")
.long("variant")
.default_value("0")
.value_parser(value_parser!(u8))
)
.arg(
Arg::new("raw")
.help("Insert the given file as raw binary data.\n\
Cannot be used with '--patch'.")
.long("raw")
.action(ArgAction::SetTrue)
)
.arg(
Arg::new("bundle")
.help("Path to the bundle to inject the file into.")
@ -33,80 +71,233 @@ pub(crate) fn command_definition() -> Command {
.value_parser(value_parser!(PathBuf)),
)
.arg(
Arg::new("file")
Arg::new("bundle-file")
.help("The name of a file in the bundle whose content should be replaced.")
.required(true),
)
.arg(
Arg::new("new-file")
.help("Path to the file to inject.")
.required(true)
.value_parser(value_parser!(PathBuf)),
),
)
// .subcommand(
// Command::new("add")
// .about("Add a new file to the bundle")
// .arg(
// Arg::new("new-file")
// .help("Path to the file to inject.")
// .required(true)
// .value_parser(value_parser!(PathBuf)),
// )
// .arg(
// Arg::new("bundle")
// .help("Path to the bundle to inject the file into.")
// .required(true)
// .value_parser(value_parser!(PathBuf)),
// ),
// )
}
#[tracing::instrument(skip_all)]
#[tracing::instrument]
async fn compile_file(
path: impl AsRef<Path> + std::fmt::Debug,
name: impl Into<IdString64> + std::fmt::Debug,
file_type: BundleFileType,
) -> Result<BundleFile> {
let path = path.as_ref();
let file_data = fs::read(&path)
.await
.wrap_err_with(|| format!("Failed to read file '{}'", path.display()))?;
let sjson = String::from_utf8(file_data)
.wrap_err_with(|| format!("Invalid UTF8 data in '{}'", path.display()))?;
let root = path.parent().ok_or_eyre("File path has no parent")?;
match file_type {
BundleFileType::Texture => texture::compile(name.into(), sjson, root)
.await
.wrap_err_with(|| format!("Failed to compile file as texture: {}", path.display())),
_ => eyre::bail!(
"Compilation for type '{}' is not implemented, yet",
file_type
),
}
}
#[tracing::instrument(
skip_all,
fields(
bundle_path = tracing::field::Empty,
in_file_path = tracing::field::Empty,
output_path = tracing::field::Empty,
target_name = tracing::field::Empty,
file_type = tracing::field::Empty,
raw = tracing::field::Empty,
)
)]
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
let bundle_path = matches
let Some((op, sub_matches)) = matches.subcommand() else {
unreachable!("clap is configured to require a subcommand, and they're all handled above");
};
let bundle_path = sub_matches
.get_one::<PathBuf>("bundle")
.expect("required parameter not found");
let file_path = matches
.get_one::<PathBuf>("file")
let in_file_path = sub_matches
.get_one::<PathBuf>("new-file")
.expect("required parameter not found");
tracing::trace!(bundle_path = %bundle_path.display(), file_path = %file_path.display());
let patch_number = matches
.get_one::<u16>("patch")
.map(|num| format!("{:03}", num));
let mut bundle = {
let binary = fs::read(bundle_path).await?;
let name = Bundle::get_name_from_path(&ctx, bundle_path);
Bundle::from_binary(&ctx, name, binary).wrap_err("Failed to open bundle file")?
let output_path = matches
.get_one::<PathBuf>("output")
.cloned()
.unwrap_or_else(|| {
let mut output_path = bundle_path.clone();
if let Some(patch_number) = patch_number.as_ref() {
output_path.set_extension(format!("patch_{:03}", patch_number));
}
output_path
});
let target_name = if op == "replace" {
sub_matches
.get_one::<String>("bundle-file")
.map(|name| match u64::from_str_radix(name, 16) {
Ok(id) => IdString64::from(id),
Err(_) => IdString64::String(name.clone()),
})
.expect("argument is required")
} else {
let mut path = PathBuf::from(in_file_path);
path.set_extension("");
IdString64::from(path.to_slash_lossy().to_string())
};
if let Some(name) = matches.get_one::<String>("replace") {
let mut file = File::open(&file_path)
.await
.wrap_err_with(|| format!("Failed to open '{}'", file_path.display()))?;
if let Some(variant) = bundle
.files_mut()
.filter(|file| file.matches_name(name.clone()))
// TODO: Handle file variants
.find_map(|file| file.variants_mut().next())
{
let mut data = Vec::new();
file.read_to_end(&mut data)
.await
.wrap_err("Failed to read input file")?;
variant.set_data(data);
let file_type = if let Some(forced_type) = matches.get_one::<String>("type") {
BundleFileType::from_str(forced_type.as_str()).wrap_err("Unknown file type")?
} else {
let err = eyre::eyre!("No file '{}' in this bundle.", name)
.with_suggestion(|| {
in_file_path
.extension()
.and_then(|s| s.to_str())
.ok_or_eyre("File extension missing")
.and_then(BundleFileType::from_str)
.wrap_err("Unknown file type")
.with_suggestion(|| "Use '--type TYPE' to specify the file type")?
};
{
let span = tracing::Span::current();
if !span.is_disabled() {
span.record("bundle_path", bundle_path.display().to_string());
span.record("in_file_path", in_file_path.display().to_string());
span.record("output_path", output_path.display().to_string());
span.record("raw", sub_matches.get_flag("raw"));
span.record("target_name", target_name.display().to_string());
span.record("file_type", format!("{:?}", file_type));
}
}
let bundle_name = Bundle::get_name_from_path(&ctx, bundle_path);
let mut bundle = {
let binary = fs::read(bundle_path).await?;
Bundle::from_binary(&ctx, bundle_name.clone(), binary)
.wrap_err_with(|| format!("Failed to open bundle '{}'", bundle_path.display()))?
};
if op == "copy" {
unimplemented!("Implement copying a file from one bundle to the other.");
}
let output_bundle = match op {
"replace" => {
let Some(file) = bundle
.files_mut()
.find(|file| *file.base_name() == target_name)
else {
let err = eyre::eyre!(
"No file with name '{}' in bundle '{}'",
target_name.display(),
bundle_path.display()
);
return Err(err).with_suggestion(|| {
format!(
"Run '{} bundle list {}' to list the files in this bundle.",
"Run '{} bundle list \"{}\"' to list the files in this bundle.",
clap::crate_name!(),
bundle_path.display()
)
})
.with_suggestion(|| {
});
};
if sub_matches.get_flag("raw") {
let variant_index = sub_matches
.get_one::<u8>("variant")
.expect("argument with default missing");
let Some(variant) = file.variants_mut().nth(*variant_index as usize) else {
let err = eyre::eyre!(
"Variant index '{}' does not exist in '{}'",
variant_index,
target_name.display()
);
return Err(err).with_suggestion(|| {
format!(
"Use '{} bundle inject --add {} {} {}' to add it as a new file",
"See '{} bundle inject add --help' if you want to add it as a new file",
clap::crate_name!(),
name,
bundle_path.display(),
file_path.display()
)
});
};
return Err(err);
let data = tokio::fs::read(&in_file_path).await.wrap_err_with(|| {
format!("Failed to read file '{}'", in_file_path.display())
})?;
variant.set_data(data);
file.set_modded(true);
bundle
} else {
let mut bundle_file = compile_file(in_file_path, target_name.clone(), file_type)
.await
.wrap_err("Failed to compile")?;
bundle_file.set_modded(true);
if patch_number.is_some() {
let mut output_bundle = Bundle::new(bundle_name);
output_bundle.add_file(bundle_file);
output_bundle
} else {
*file = bundle_file;
dbg!(&file);
bundle
}
}
}
"add" => {
unimplemented!("Implement adding a new file to the bundle.");
}
_ => unreachable!("no other operations exist"),
};
let out_path = matches.get_one::<PathBuf>("output").unwrap_or(bundle_path);
let data = bundle
let data = output_bundle
.to_binary()
.wrap_err("Failed to write changed bundle to output")?;
fs::write(out_path, &data)
fs::write(&output_path, &data)
.await
.wrap_err("Failed to write data to output file")?;
.wrap_err_with(|| format!("Failed to write data to '{}'", output_path.display()))?;
tracing::info!("Modified bundle written to '{}'", output_path.display());
Ok(())
} else {
eyre::bail!("Currently, only the '--replace' operation is supported.");
}
}

View file

@ -1,4 +1,5 @@
use std::path::PathBuf;
use std::sync::Arc;
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command, ValueEnum};
use cli_table::{print_stdout, WithTitle};
@ -156,6 +157,8 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
BufReader::new(Box::new(f))
};
let lookup = Arc::make_mut(&mut ctx.lookup);
let group = sdk::murmur::HashGroup::from(*group);
let mut added = 0;
@ -165,15 +168,15 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
let total = {
for line in lines.into_iter() {
let value = line?;
if ctx.lookup.find(&value, group).is_some() {
if lookup.find(&value, group).is_some() {
skipped += 1;
} else {
ctx.lookup.add(value, group);
lookup.add(value, group);
added += 1;
}
}
ctx.lookup.len()
lookup.len()
};
let out_path = matches
@ -190,7 +193,7 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
})
.with_section(|| out_path.display().to_string().header("Path:"))?;
ctx.lookup
lookup
.to_csv(f)
.await
.wrap_err("Failed to write dictionary to disk")?;

View file

@ -0,0 +1,21 @@
use clap::{ArgMatches, Command};
use color_eyre::Result;
mod texture_meta;
pub(crate) fn command_definition() -> Command {
Command::new("experiment")
.subcommand_required(true)
.about("A collection of utilities and experiments.")
.subcommand(texture_meta::command_definition())
}
#[tracing::instrument(skip_all)]
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
match matches.subcommand() {
Some(("texture-meta", sub_matches)) => texture_meta::run(ctx, sub_matches).await,
_ => unreachable!(
"clap is configured to require a subcommand, and they're all handled above"
),
}
}

View file

@ -0,0 +1,121 @@
use std::path::PathBuf;
use std::sync::Arc;
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
use color_eyre::eyre::Context;
use color_eyre::Result;
use futures_util::StreamExt;
use sdk::{Bundle, BundleFileType};
use tokio::fs;
use crate::cmd::util::resolve_bundle_paths;
pub(crate) fn command_definition() -> Command {
Command::new("texture-meta")
.about(
"Iterates over the provided bundles and lists certain meta data.
Primarily intended to help spot patterns between dependend data fields and values.",
)
.arg(
Arg::new("bundle")
.required(true)
.action(ArgAction::Append)
.value_parser(value_parser!(PathBuf))
.help(
"Path to the bundle(s) to read. If this points to a directory instead \
of a file, all files in that directory will be checked.",
),
)
// TODO: Maybe provide JSON and CSV
// TODO: Maybe allow toggling certain fields
}
#[tracing::instrument(skip(ctx))]
async fn handle_bundle(ctx: &sdk::Context, path: &PathBuf) -> Result<()> {
let bundle = {
let binary = fs::read(path).await?;
let name = Bundle::get_name_from_path(ctx, path);
Bundle::from_binary(ctx, name, binary)?
};
let bundle_dir = ctx
.game_dir
.as_deref()
.map(|dir| dir.join("bundle"))
.or_else(|| path.parent().map(|p| p.to_path_buf()))
.unwrap_or_default();
for f in bundle.files().iter() {
if f.file_type() != BundleFileType::Texture {
continue;
}
for (i, v) in f.variants().iter().enumerate() {
let data_file_name = v.data_file_name();
let data_file_length = if let Some(file_name) = data_file_name {
let path = bundle_dir.join(file_name);
match fs::metadata(&path).await {
Ok(meta) => meta.len(),
Err(err) => {
return Err(err).wrap_err_with(|| {
format!("Failed to open data file {}", path.display())
})
}
}
} else {
0
};
println!(
"{},{},{},{},{:b},{},{},{:?},{},{:#010b}",
bundle.name().display(),
f.name(false, None),
f.file_type().ext_name(),
i,
v.property(),
v.data().len(),
v.external(),
data_file_name,
data_file_length,
v.unknown_1(),
);
}
}
Ok(())
}
#[tracing::instrument(skip_all)]
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
let bundles = matches
.get_many::<PathBuf>("bundle")
.unwrap_or_default()
.cloned();
let paths = resolve_bundle_paths(bundles);
let ctx = Arc::new(ctx);
println!(
"Bundle Name,File Name,File Type,Variant,Property,Bundle Data Length,External,Data File,Data File Length,Unknown 1"
);
paths
.for_each_concurrent(10, |p| async {
let ctx = ctx.clone();
async move {
if let Err(err) = handle_bundle(&ctx, &p)
.await
.wrap_err_with(|| format!("Failed to list contents of bundle {}", p.display()))
{
tracing::error!("Failed to handle bundle: {}", format!("{:#}", err));
}
}
.await;
})
.await;
Ok(())
}

View file

@ -12,6 +12,7 @@ use clap::value_parser;
use clap::{command, Arg};
use color_eyre::eyre;
use color_eyre::eyre::{Context, Result};
use sdk::murmur::Dictionary;
use serde::{Deserialize, Serialize};
use tokio::fs::File;
use tokio::io::BufReader;
@ -21,6 +22,7 @@ mod cmd {
pub mod build;
pub mod bundle;
pub mod dictionary;
pub mod experiment;
pub mod migrate;
pub mod murmur;
pub mod new;
@ -36,10 +38,21 @@ struct GlobalConfig {
}
#[tokio::main]
#[tracing::instrument]
#[tracing::instrument(level = "error", fields(cmd_line = tracing::field::Empty))]
async fn main() -> Result<()> {
color_eyre::install()?;
{
let span = tracing::Span::current();
if !span.is_disabled() {
let cmdline: String = std::env::args_os().fold(String::new(), |mut s, arg| {
s.push_str(&arg.to_string_lossy());
s
});
span.record("cmd_line", cmdline);
}
}
let matches = command!()
.subcommand_required(true)
.arg(
@ -56,6 +69,7 @@ async fn main() -> Result<()> {
.subcommand(cmd::build::command_definition())
.subcommand(cmd::bundle::command_definition())
.subcommand(cmd::dictionary::command_definition())
.subcommand(cmd::experiment::command_definition())
.subcommand(cmd::migrate::command_definition())
.subcommand(cmd::murmur::command_definition())
.subcommand(cmd::new::command_definition())
@ -96,8 +110,9 @@ async fn main() -> Result<()> {
let r = BufReader::new(f);
let mut ctx = ctx.write().await;
if let Err(err) = ctx.lookup.from_csv(r).await {
tracing::error!("{:#}", err);
match Dictionary::from_csv(r).await {
Ok(lookup) => ctx.lookup = Arc::new(lookup),
Err(err) => tracing::error!("{:#}", err),
}
})
};
@ -133,6 +148,7 @@ async fn main() -> Result<()> {
Some(("build", sub_matches)) => cmd::build::run(ctx, sub_matches).await?,
Some(("bundle", sub_matches)) => cmd::bundle::run(ctx, sub_matches).await?,
Some(("dictionary", sub_matches)) => cmd::dictionary::run(ctx, sub_matches).await?,
Some(("experiment", sub_matches)) => cmd::experiment::run(ctx, sub_matches).await?,
Some(("migrate", sub_matches)) => cmd::migrate::run(ctx, sub_matches).await?,
Some(("murmur", sub_matches)) => cmd::murmur::run(ctx, sub_matches).await?,
Some(("new", sub_matches)) => cmd::new::run(ctx, sub_matches).await?,

View file

@ -52,6 +52,7 @@ impl From<OodleLZ_CheckCRC> for bindings::OodleLZ_CheckCRC {
#[tracing::instrument(skip(data))]
pub fn decompress<I>(
data: I,
out_size: usize,
fuzz_safe: OodleLZ_FuzzSafe,
check_crc: OodleLZ_CheckCRC,
) -> Result<Vec<u8>>
@ -59,7 +60,7 @@ where
I: AsRef<[u8]>,
{
let data = data.as_ref();
let mut out = vec![0; CHUNK_SIZE];
let mut out = vec![0; out_size];
let verbosity = if tracing::enabled!(tracing::Level::INFO) {
bindings::OodleLZ_Verbosity_OodleLZ_Verbosity_Minimal

View file

@ -10,16 +10,20 @@ byteorder = { workspace = true }
color-eyre = { workspace = true }
csv-async = { workspace = true }
fastrand = { workspace = true }
flate2 = { workspace = true }
futures = { workspace = true }
futures-util = { workspace = true }
glob = { workspace = true }
luajit2-sys = { workspace = true }
nanorand = { workspace = true }
num-derive = { workspace = true }
num-traits = { workspace = true }
oodle = { workspace = true }
path-slash = { workspace = true }
pin-project-lite = { workspace = true }
serde = { workspace = true }
serde_sjson = { workspace = true }
strum = { workspace = true }
tokio = { workspace = true }
tokio-stream = { workspace = true }
tracing = { workspace = true }

View file

@ -42,12 +42,32 @@ impl<T: FromBinary> FromBinary for Vec<T> {
}
}
pub fn flags_from_bits<T: bitflags::Flags>(bits: T::Bits) -> T
where
<T as bitflags::Flags>::Bits: std::fmt::Binary,
{
if let Some(flags) = T::from_bits(bits) {
flags
} else {
let unknown = bits & !T::all().bits();
tracing::warn!(
"Unknown bits found for '{}': known = {:0b}, unknown = {:0b}",
std::any::type_name::<T>(),
T::all().bits(),
unknown
);
T::from_bits_truncate(bits)
}
}
pub mod sync {
use std::ffi::CStr;
use std::io::{self, Read, Seek, SeekFrom};
use std::io::{self, Read, Seek, SeekFrom, Write};
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use color_eyre::eyre::WrapErr;
use color_eyre::eyre::{self, WrapErr};
use color_eyre::{Help, Report, Result, SectionExt};
macro_rules! make_read {
@ -123,15 +143,16 @@ pub mod sync {
};
}
pub trait ReadExt: ReadBytesExt + Seek {
pub trait ReadExt: Read + Seek {
fn read_u8(&mut self) -> io::Result<u8> {
ReadBytesExt::read_u8(self)
}
make_read!(read_u16, read_u16_le, u16);
make_read!(read_u32, read_u32_le, u32);
make_read!(read_u64, read_u64_le, u64);
make_skip!(skip_u8, read_u8, u8);
make_skip!(skip_u16, read_u16, u16);
make_skip!(skip_u32, read_u32, u32);
// Implementation based on https://en.wikipedia.com/wiki/LEB128
@ -181,9 +202,17 @@ pub mod sync {
res
}
}
fn read_bool(&mut self) -> Result<bool> {
match ReadExt::read_u8(self)? {
0 => Ok(false),
1 => Ok(true),
v => eyre::bail!("Invalid value for boolean '{}'", v),
}
}
}
pub trait WriteExt: WriteBytesExt + Seek {
pub trait WriteExt: Write + Seek {
fn write_u8(&mut self, val: u8) -> io::Result<()> {
WriteBytesExt::write_u8(self, val)
}
@ -191,6 +220,10 @@ pub mod sync {
make_write!(write_u32, write_u32_le, u32);
make_write!(write_u64, write_u64_le, u64);
fn write_bool(&mut self, val: bool) -> io::Result<()> {
WriteBytesExt::write_u8(self, if val { 1 } else { 0 })
}
fn write_padding(&mut self) -> io::Result<usize> {
let pos = self.stream_position()?;
let size = 16 - (pos % 16) as usize;
@ -207,8 +240,8 @@ pub mod sync {
}
}
impl<R: ReadBytesExt + Seek + ?Sized> ReadExt for R {}
impl<W: WriteBytesExt + Seek + ?Sized> WriteExt for W {}
impl<R: Read + Seek + ?Sized> ReadExt for R {}
impl<W: Write + Seek + ?Sized> WriteExt for W {}
pub(crate) fn _read_up_to<R>(r: &mut R, buf: &mut Vec<u8>) -> Result<usize>
where

View file

@ -15,16 +15,18 @@ use super::filetype::BundleFileType;
#[derive(Debug)]
struct BundleFileHeader {
variant: u32,
unknown_1: u8,
external: bool,
size: usize,
unknown_1: u8,
len_data_file_name: usize,
}
#[derive(Clone)]
pub struct BundleFileVariant {
property: u32,
data: Vec<u8>,
data_file_name: Option<String>,
// Seems to be related to whether there is a data path.
external: bool,
unknown_1: u8,
}
@ -38,6 +40,7 @@ impl BundleFileVariant {
property: 0,
data: Vec::new(),
data_file_name: None,
external: false,
unknown_1: 0,
}
}
@ -62,21 +65,30 @@ impl BundleFileVariant {
self.data_file_name.as_ref()
}
pub fn external(&self) -> bool {
self.external
}
pub fn unknown_1(&self) -> u8 {
self.unknown_1
}
#[tracing::instrument(skip_all)]
fn read_header<R>(r: &mut R) -> Result<BundleFileHeader>
where
R: Read + Seek,
{
let variant = r.read_u32()?;
let unknown_1 = r.read_u8()?;
let external = r.read_bool()?;
let size = r.read_u32()? as usize;
r.skip_u8(1)?;
let unknown_1 = r.read_u8()?;
let len_data_file_name = r.read_u32()? as usize;
Ok(BundleFileHeader {
size,
unknown_1,
external,
variant,
unknown_1,
len_data_file_name,
})
}
@ -87,7 +99,7 @@ impl BundleFileVariant {
W: Write + Seek,
{
w.write_u32(self.property)?;
w.write_u8(self.unknown_1)?;
w.write_bool(self.external)?;
let len_data_file_name = self.data_file_name.as_ref().map(|s| s.len()).unwrap_or(0);
@ -105,13 +117,36 @@ impl BundleFileVariant {
}
}
impl std::fmt::Debug for BundleFileVariant {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut out = f.debug_struct("BundleFileVariant");
out.field("property", &self.property);
if self.data.len() <= 5 {
out.field("data", &format!("{:x?}", &self.data));
} else {
out.field(
"data",
&format!("{:x?}.. ({} bytes)", &self.data[..5], &self.data.len()),
);
}
out.field("data_file_name", &self.data_file_name)
.field("external", &self.external)
.finish()
}
}
bitflags! {
#[derive(Default, Clone, Copy, Debug)]
pub struct Properties: u32 {
const DATA = 0b100;
// A custom flag used by DTMT to signify a file altered by mods.
const MODDED = 1 << 31;
}
}
#[derive(Clone, Debug)]
pub struct BundleFile {
file_type: BundleFileType,
name: IdString64,
@ -133,6 +168,18 @@ impl BundleFile {
self.variants.push(variant)
}
pub fn set_variants(&mut self, variants: Vec<BundleFileVariant>) {
self.variants = variants;
}
pub fn set_props(&mut self, props: Properties) {
self.props = props;
}
pub fn set_modded(&mut self, is_modded: bool) {
self.props.set(Properties::MODDED, is_modded);
}
#[tracing::instrument(name = "File::read", skip(ctx, r))]
pub fn from_reader<R>(ctx: &crate::Context, r: &mut R, props: Properties) -> Result<Self>
where
@ -188,6 +235,7 @@ impl BundleFile {
let s = r
.read_string_len(header.len_data_file_name)
.wrap_err("Failed to read data file name")?;
Some(s)
} else {
None
@ -200,6 +248,7 @@ impl BundleFile {
property: header.variant,
data,
data_file_name,
external: header.external,
unknown_1: header.unknown_1,
};
@ -227,7 +276,7 @@ impl BundleFile {
for variant in self.variants.iter() {
w.write_u32(variant.property())?;
w.write_u8(variant.unknown_1)?;
w.write_bool(variant.external)?;
let len_data_file_name = variant.data_file_name().map(|s| s.len()).unwrap_or(0);
@ -261,6 +310,9 @@ impl BundleFile {
) -> Result<Self> {
match file_type {
BundleFileType::Lua => lua::compile(name, sjson).wrap_err("Failed to compile Lua file"),
BundleFileType::Texture => texture::compile(name, sjson, root)
.await
.wrap_err("Failed to compile Texture file"),
BundleFileType::Unknown(_) => {
eyre::bail!("Unknown file type. Cannot compile from SJSON");
}
@ -299,14 +351,13 @@ impl BundleFile {
s
}
pub fn matches_name(&self, name: impl Into<IdString64>) -> bool {
let name = name.into();
if self.name == name {
pub fn matches_name(&self, name: &IdString64) -> bool {
if self.name == *name {
return true;
}
if let IdString64::String(name) = name {
self.name(false, None) == name || self.name(true, None) == name
self.name(false, None) == *name || self.name(true, None) == *name
} else {
false
}
@ -344,18 +395,16 @@ impl BundleFile {
Ok(files)
}
#[tracing::instrument(name = "File::decompiled", skip_all)]
#[tracing::instrument(
name = "File::decompiled",
skip_all,
fields(file = self.name(false, None), file_type = self.file_type().ext_name(), variants = self.variants.len())
)]
pub async fn decompiled(&self, ctx: &crate::Context) -> Result<Vec<UserFile>> {
let file_type = self.file_type();
if tracing::enabled!(tracing::Level::DEBUG) {
tracing::debug!(
name = self.name(true, None),
variants = self.variants.len(),
"Attempting to decompile"
);
}
// The `Strings` type handles all variants combined.
// For the other ones, each variant will be its own file.
if file_type == BundleFileType::Strings {
return strings::decompile(ctx, &self.variants);
}
@ -371,6 +420,7 @@ impl BundleFile {
let res = match file_type {
BundleFileType::Lua => lua::decompile(ctx, data).await,
BundleFileType::Package => package::decompile(ctx, name.clone(), data),
BundleFileType::Texture => texture::decompile(ctx, name.clone(), variant).await,
_ => {
tracing::debug!("Can't decompile, unknown file type");
Ok(vec![UserFile::with_name(data.to_vec(), name.clone())])

View file

@ -3,229 +3,144 @@ use serde::Serialize;
use crate::murmur::Murmur64;
#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)]
pub enum BundleFileType {
Animation,
AnimationCurves,
Apb,
BakedLighting,
Bik,
BlendSet,
Bones,
Chroma,
CommonPackage,
Config,
Crypto,
Data,
Entity,
Flow,
Font,
Ies,
Ini,
Input,
Ivf,
Keys,
Level,
Lua,
Material,
Mod,
MouseCursor,
NavData,
NetworkConfig,
OddleNet,
Package,
Particles,
PhysicsProperties,
RenderConfig,
RtPipeline,
Scene,
Shader,
ShaderLibrary,
ShaderLibraryGroup,
ShadingEnvionmentMapping,
ShadingEnvironment,
Slug,
SlugAlbum,
SoundEnvironment,
SpuJob,
StateMachine,
StaticPVS,
Strings,
SurfaceProperties,
Texture,
TimpaniBank,
TimpaniMaster,
Tome,
Ugg,
Unit,
Upb,
VectorField,
Wav,
WwiseBank,
WwiseDep,
WwiseEvent,
WwiseMetadata,
WwiseStream,
Xml,
macro_rules! make_enum {
(
$( $variant:ident, $hash:expr, $ext:expr $(, $decompiled:expr)? ; )+
) => {
#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)]
pub enum BundleFileType {
$(
$variant,
)+
Unknown(Murmur64),
}
}
impl BundleFileType {
impl BundleFileType {
pub fn ext_name(&self) -> String {
match self {
BundleFileType::AnimationCurves => String::from("animation_curves"),
BundleFileType::Animation => String::from("animation"),
BundleFileType::Apb => String::from("apb"),
BundleFileType::BakedLighting => String::from("baked_lighting"),
BundleFileType::Bik => String::from("bik"),
BundleFileType::BlendSet => String::from("blend_set"),
BundleFileType::Bones => String::from("bones"),
BundleFileType::Chroma => String::from("chroma"),
BundleFileType::CommonPackage => String::from("common_package"),
BundleFileType::Config => String::from("config"),
BundleFileType::Crypto => String::from("crypto"),
BundleFileType::Data => String::from("data"),
BundleFileType::Entity => String::from("entity"),
BundleFileType::Flow => String::from("flow"),
BundleFileType::Font => String::from("font"),
BundleFileType::Ies => String::from("ies"),
BundleFileType::Ini => String::from("ini"),
BundleFileType::Input => String::from("input"),
BundleFileType::Ivf => String::from("ivf"),
BundleFileType::Keys => String::from("keys"),
BundleFileType::Level => String::from("level"),
BundleFileType::Lua => String::from("lua"),
BundleFileType::Material => String::from("material"),
BundleFileType::Mod => String::from("mod"),
BundleFileType::MouseCursor => String::from("mouse_cursor"),
BundleFileType::NavData => String::from("nav_data"),
BundleFileType::NetworkConfig => String::from("network_config"),
BundleFileType::OddleNet => String::from("oodle_net"),
BundleFileType::Package => String::from("package"),
BundleFileType::Particles => String::from("particles"),
BundleFileType::PhysicsProperties => String::from("physics_properties"),
BundleFileType::RenderConfig => String::from("render_config"),
BundleFileType::RtPipeline => String::from("rt_pipeline"),
BundleFileType::Scene => String::from("scene"),
BundleFileType::ShaderLibraryGroup => String::from("shader_library_group"),
BundleFileType::ShaderLibrary => String::from("shader_library"),
BundleFileType::Shader => String::from("shader"),
BundleFileType::ShadingEnvionmentMapping => String::from("shading_environment_mapping"),
BundleFileType::ShadingEnvironment => String::from("shading_environment"),
BundleFileType::SlugAlbum => String::from("slug_album"),
BundleFileType::Slug => String::from("slug"),
BundleFileType::SoundEnvironment => String::from("sound_environment"),
BundleFileType::SpuJob => String::from("spu_job"),
BundleFileType::StateMachine => String::from("state_machine"),
BundleFileType::StaticPVS => String::from("static_pvs"),
BundleFileType::Strings => String::from("strings"),
BundleFileType::SurfaceProperties => String::from("surface_properties"),
BundleFileType::Texture => String::from("texture"),
BundleFileType::TimpaniBank => String::from("timpani_bank"),
BundleFileType::TimpaniMaster => String::from("timpani_master"),
BundleFileType::Tome => String::from("tome"),
BundleFileType::Ugg => String::from("ugg"),
BundleFileType::Unit => String::from("unit"),
BundleFileType::Upb => String::from("upb"),
BundleFileType::VectorField => String::from("vector_field"),
BundleFileType::Wav => String::from("wav"),
BundleFileType::WwiseBank => String::from("wwise_bank"),
BundleFileType::WwiseDep => String::from("wwise_dep"),
BundleFileType::WwiseEvent => String::from("wwise_event"),
BundleFileType::WwiseMetadata => String::from("wwise_metadata"),
BundleFileType::WwiseStream => String::from("wwise_stream"),
BundleFileType::Xml => String::from("xml"),
BundleFileType::Unknown(s) => format!("{s:016X}"),
$(
Self::$variant => String::from($ext),
)+
Self::Unknown(s) => format!("{s:016X}"),
}
}
pub fn decompiled_ext_name(&self) -> String {
match self {
BundleFileType::Texture => String::from("dds"),
BundleFileType::WwiseBank => String::from("bnk"),
BundleFileType::WwiseStream => String::from("ogg"),
$(
$( Self::$variant => String::from($decompiled), )?
)+
_ => self.ext_name(),
}
}
}
pub fn hash(&self) -> Murmur64 {
Murmur64::from(*self)
impl std::str::FromStr for BundleFileType {
type Err = color_eyre::Report;
fn from_str(s: &str) -> Result<Self> {
match s {
$(
$ext => Ok(Self::$variant),
)+
s => eyre::bail!("Unknown type string '{}'", s),
}
}
}
impl From<u64> for BundleFileType {
fn from(h: u64) -> Self {
match h {
$(
$hash => Self::$variant,
)+
hash => Self::Unknown(hash.into()),
}
}
}
impl From<BundleFileType> for u64 {
fn from(t: BundleFileType) -> u64 {
match t {
$(
BundleFileType::$variant => $hash,
)+
BundleFileType::Unknown(hash) => hash.into(),
}
}
}
}
}
impl std::str::FromStr for BundleFileType {
type Err = color_eyre::Report;
make_enum! {
AnimationCurves, 0xdcfb9e18fff13984, "animation_curves";
Animation, 0x931e336d7646cc26, "animation";
Apb, 0x3eed05ba83af5090, "apb";
BakedLighting, 0x7ffdb779b04e4ed1, "baked_lighting";
Bik, 0xaa5965f03029fa18, "bik";
BlendSet, 0xe301e8af94e3b5a3, "blend_set";
Bones, 0x18dead01056b72e9, "bones";
Chroma, 0xb7893adf7567506a, "chroma";
CommonPackage, 0xfe9754bd19814a47, "common_package";
Config, 0x82645835e6b73232, "config";
Crypto, 0x69108ded1e3e634b, "crypto";
Data, 0x8fd0d44d20650b68, "data";
Entity, 0x9831ca893b0d087d, "entity";
Flow, 0x92d3ee038eeb610d, "flow";
Font, 0x9efe0a916aae7880, "font";
Ies, 0x8f7d5a2c0f967655, "ies";
Ini, 0xd526a27da14f1dc5, "ini";
Input, 0x2bbcabe5074ade9e, "input";
Ivf, 0xfa4a8e091a91201e, "ivf";
Keys, 0xa62f9297dc969e85, "keys";
Level, 0x2a690fd348fe9ac5, "level";
Lua, 0xa14e8dfa2cd117e2, "lua";
Material, 0xeac0b497876adedf, "material";
Mod, 0x3fcdd69156a46417, "mod";
MouseCursor, 0xb277b11fe4a61d37, "mouse_cursor";
NavData, 0x169de9566953d264, "nav_data";
NetworkConfig, 0x3b1fa9e8f6bac374, "network_config";
OddleNet, 0xb0f2c12eb107f4d8, "oodle_net";
Package, 0xad9c6d9ed1e5e77a, "package";
Particles, 0xa8193123526fad64, "particles";
PhysicsProperties, 0xbf21403a3ab0bbb1, "physics_properties";
RenderConfig, 0x27862fe24795319c, "render_config";
RtPipeline, 0x9ca183c2d0e76dee, "rt_pipeline";
Scene, 0x9d0a795bfe818d19, "scene";
Shader, 0xcce8d5b5f5ae333f, "shader";
ShaderLibrary, 0xe5ee32a477239a93, "shader_library";
ShaderLibraryGroup, 0x9e5c3cc74575aeb5, "shader_library_group";
ShadingEnvionmentMapping, 0x250e0a11ac8e26f8, "shading_envionment_mapping";
ShadingEnvironment, 0xfe73c7dcff8a7ca5, "shading_environment";
Slug, 0xa27b4d04a9ba6f9e, "slug";
SlugAlbum, 0xe9fc9ea7042e5ec0, "slug_album";
SoundEnvironment, 0xd8b27864a97ffdd7, "sound_environment";
SpuJob, 0xf97af9983c05b950, "spu_job";
StateMachine, 0xa486d4045106165c, "state_machine";
StaticPVS, 0xe3f0baa17d620321, "static_pvs";
Strings, 0x0d972bab10b40fd3, "strings";
SurfaceProperties, 0xad2d3fa30d9ab394, "surface_properties";
Texture, 0xcd4238c6a0c69e32, "texture", "dds";
TimpaniBank, 0x99736be1fff739a4, "timpani_bank";
TimpaniMaster, 0x00a3e6c59a2b9c6c, "timpani_master";
Tome, 0x19c792357c99f49b, "tome";
Ugg, 0x712d6e3dd1024c9c, "ugg";
Unit, 0xe0a48d0be9a7453f, "unit";
Upb, 0xa99510c6e86dd3c2, "upb";
VectorField, 0xf7505933166d6755, "vector_field";
Wav, 0x786f65c00a816b19, "wav";
WwiseBank, 0x535a7bd3e650d799, "wwise_bank", "bnk";
WwiseDep, 0xaf32095c82f2b070, "wwise_dep";
WwiseEvent, 0xaabdd317b58dfc8a, "wwise_event";
WwiseMetadata, 0xd50a8b7e1c82b110, "wwise_metadata";
WwiseStream, 0x504b55235d21440e, "wwise_stream", "ogg";
Xml, 0x76015845a6003765, "xml";
Theme, 0x38BB9442048A7FBD, "theme";
MissionThemes, 0x80F2DE893657F83A, "mission_themes";
}
fn from_str(s: &str) -> Result<Self, Self::Err> {
let val = match s {
"animation_curves" => BundleFileType::AnimationCurves,
"animation" => BundleFileType::Animation,
"apb" => BundleFileType::Apb,
"baked_lighting" => BundleFileType::BakedLighting,
"bik" => BundleFileType::Bik,
"blend_set" => BundleFileType::BlendSet,
"bones" => BundleFileType::Bones,
"chroma" => BundleFileType::Chroma,
"common_package" => BundleFileType::CommonPackage,
"config" => BundleFileType::Config,
"crypto" => BundleFileType::Crypto,
"data" => BundleFileType::Data,
"entity" => BundleFileType::Entity,
"flow" => BundleFileType::Flow,
"font" => BundleFileType::Font,
"ies" => BundleFileType::Ies,
"ini" => BundleFileType::Ini,
"input" => BundleFileType::Input,
"ivf" => BundleFileType::Ivf,
"keys" => BundleFileType::Keys,
"level" => BundleFileType::Level,
"lua" => BundleFileType::Lua,
"material" => BundleFileType::Material,
"mod" => BundleFileType::Mod,
"mouse_cursor" => BundleFileType::MouseCursor,
"nav_data" => BundleFileType::NavData,
"network_config" => BundleFileType::NetworkConfig,
"oodle_net" => BundleFileType::OddleNet,
"package" => BundleFileType::Package,
"particles" => BundleFileType::Particles,
"physics_properties" => BundleFileType::PhysicsProperties,
"render_config" => BundleFileType::RenderConfig,
"rt_pipeline" => BundleFileType::RtPipeline,
"scene" => BundleFileType::Scene,
"shader_library_group" => BundleFileType::ShaderLibraryGroup,
"shader_library" => BundleFileType::ShaderLibrary,
"shader" => BundleFileType::Shader,
"shading_environment_mapping" => BundleFileType::ShadingEnvionmentMapping,
"shading_environment" => BundleFileType::ShadingEnvironment,
"slug_album" => BundleFileType::SlugAlbum,
"slug" => BundleFileType::Slug,
"sound_environment" => BundleFileType::SoundEnvironment,
"spu_job" => BundleFileType::SpuJob,
"state_machine" => BundleFileType::StateMachine,
"static_pvs" => BundleFileType::StaticPVS,
"strings" => BundleFileType::Strings,
"surface_properties" => BundleFileType::SurfaceProperties,
"texture" => BundleFileType::Texture,
"timpani_bank" => BundleFileType::TimpaniBank,
"timpani_master" => BundleFileType::TimpaniMaster,
"tome" => BundleFileType::Tome,
"ugg" => BundleFileType::Ugg,
"unit" => BundleFileType::Unit,
"upb" => BundleFileType::Upb,
"vector_field" => BundleFileType::VectorField,
"wav" => BundleFileType::Wav,
"wwise_bank" => BundleFileType::WwiseBank,
"wwise_dep" => BundleFileType::WwiseDep,
"wwise_event" => BundleFileType::WwiseEvent,
"wwise_metadata" => BundleFileType::WwiseMetadata,
"wwise_stream" => BundleFileType::WwiseStream,
"xml" => BundleFileType::Xml,
s => eyre::bail!("Unknown type string '{}'", s),
};
Ok(val)
impl BundleFileType {
pub fn hash(&self) -> Murmur64 {
Murmur64::from(*self)
}
}
@ -245,147 +160,6 @@ impl From<Murmur64> for BundleFileType {
}
}
impl From<u64> for BundleFileType {
fn from(hash: u64) -> BundleFileType {
match hash {
0x931e336d7646cc26 => BundleFileType::Animation,
0xdcfb9e18fff13984 => BundleFileType::AnimationCurves,
0x3eed05ba83af5090 => BundleFileType::Apb,
0x7ffdb779b04e4ed1 => BundleFileType::BakedLighting,
0xaa5965f03029fa18 => BundleFileType::Bik,
0xe301e8af94e3b5a3 => BundleFileType::BlendSet,
0x18dead01056b72e9 => BundleFileType::Bones,
0xb7893adf7567506a => BundleFileType::Chroma,
0xfe9754bd19814a47 => BundleFileType::CommonPackage,
0x82645835e6b73232 => BundleFileType::Config,
0x69108ded1e3e634b => BundleFileType::Crypto,
0x8fd0d44d20650b68 => BundleFileType::Data,
0x9831ca893b0d087d => BundleFileType::Entity,
0x92d3ee038eeb610d => BundleFileType::Flow,
0x9efe0a916aae7880 => BundleFileType::Font,
0x8f7d5a2c0f967655 => BundleFileType::Ies,
0xd526a27da14f1dc5 => BundleFileType::Ini,
0x2bbcabe5074ade9e => BundleFileType::Input,
0xfa4a8e091a91201e => BundleFileType::Ivf,
0xa62f9297dc969e85 => BundleFileType::Keys,
0x2a690fd348fe9ac5 => BundleFileType::Level,
0xa14e8dfa2cd117e2 => BundleFileType::Lua,
0xeac0b497876adedf => BundleFileType::Material,
0x3fcdd69156a46417 => BundleFileType::Mod,
0xb277b11fe4a61d37 => BundleFileType::MouseCursor,
0x169de9566953d264 => BundleFileType::NavData,
0x3b1fa9e8f6bac374 => BundleFileType::NetworkConfig,
0xb0f2c12eb107f4d8 => BundleFileType::OddleNet,
0xad9c6d9ed1e5e77a => BundleFileType::Package,
0xa8193123526fad64 => BundleFileType::Particles,
0xbf21403a3ab0bbb1 => BundleFileType::PhysicsProperties,
0x27862fe24795319c => BundleFileType::RenderConfig,
0x9ca183c2d0e76dee => BundleFileType::RtPipeline,
0x9d0a795bfe818d19 => BundleFileType::Scene,
0xcce8d5b5f5ae333f => BundleFileType::Shader,
0xe5ee32a477239a93 => BundleFileType::ShaderLibrary,
0x9e5c3cc74575aeb5 => BundleFileType::ShaderLibraryGroup,
0x250e0a11ac8e26f8 => BundleFileType::ShadingEnvionmentMapping,
0xfe73c7dcff8a7ca5 => BundleFileType::ShadingEnvironment,
0xa27b4d04a9ba6f9e => BundleFileType::Slug,
0xe9fc9ea7042e5ec0 => BundleFileType::SlugAlbum,
0xd8b27864a97ffdd7 => BundleFileType::SoundEnvironment,
0xf97af9983c05b950 => BundleFileType::SpuJob,
0xa486d4045106165c => BundleFileType::StateMachine,
0xe3f0baa17d620321 => BundleFileType::StaticPVS,
0x0d972bab10b40fd3 => BundleFileType::Strings,
0xad2d3fa30d9ab394 => BundleFileType::SurfaceProperties,
0xcd4238c6a0c69e32 => BundleFileType::Texture,
0x99736be1fff739a4 => BundleFileType::TimpaniBank,
0x00a3e6c59a2b9c6c => BundleFileType::TimpaniMaster,
0x19c792357c99f49b => BundleFileType::Tome,
0x712d6e3dd1024c9c => BundleFileType::Ugg,
0xe0a48d0be9a7453f => BundleFileType::Unit,
0xa99510c6e86dd3c2 => BundleFileType::Upb,
0xf7505933166d6755 => BundleFileType::VectorField,
0x786f65c00a816b19 => BundleFileType::Wav,
0x535a7bd3e650d799 => BundleFileType::WwiseBank,
0xaf32095c82f2b070 => BundleFileType::WwiseDep,
0xaabdd317b58dfc8a => BundleFileType::WwiseEvent,
0xd50a8b7e1c82b110 => BundleFileType::WwiseMetadata,
0x504b55235d21440e => BundleFileType::WwiseStream,
0x76015845a6003765 => BundleFileType::Xml,
_ => BundleFileType::Unknown(Murmur64::from(hash)),
}
}
}
impl From<BundleFileType> for u64 {
fn from(t: BundleFileType) -> u64 {
match t {
BundleFileType::Animation => 0x931e336d7646cc26,
BundleFileType::AnimationCurves => 0xdcfb9e18fff13984,
BundleFileType::Apb => 0x3eed05ba83af5090,
BundleFileType::BakedLighting => 0x7ffdb779b04e4ed1,
BundleFileType::Bik => 0xaa5965f03029fa18,
BundleFileType::BlendSet => 0xe301e8af94e3b5a3,
BundleFileType::Bones => 0x18dead01056b72e9,
BundleFileType::Chroma => 0xb7893adf7567506a,
BundleFileType::CommonPackage => 0xfe9754bd19814a47,
BundleFileType::Config => 0x82645835e6b73232,
BundleFileType::Crypto => 0x69108ded1e3e634b,
BundleFileType::Data => 0x8fd0d44d20650b68,
BundleFileType::Entity => 0x9831ca893b0d087d,
BundleFileType::Flow => 0x92d3ee038eeb610d,
BundleFileType::Font => 0x9efe0a916aae7880,
BundleFileType::Ies => 0x8f7d5a2c0f967655,
BundleFileType::Ini => 0xd526a27da14f1dc5,
BundleFileType::Input => 0x2bbcabe5074ade9e,
BundleFileType::Ivf => 0xfa4a8e091a91201e,
BundleFileType::Keys => 0xa62f9297dc969e85,
BundleFileType::Level => 0x2a690fd348fe9ac5,
BundleFileType::Lua => 0xa14e8dfa2cd117e2,
BundleFileType::Material => 0xeac0b497876adedf,
BundleFileType::Mod => 0x3fcdd69156a46417,
BundleFileType::MouseCursor => 0xb277b11fe4a61d37,
BundleFileType::NavData => 0x169de9566953d264,
BundleFileType::NetworkConfig => 0x3b1fa9e8f6bac374,
BundleFileType::OddleNet => 0xb0f2c12eb107f4d8,
BundleFileType::Package => 0xad9c6d9ed1e5e77a,
BundleFileType::Particles => 0xa8193123526fad64,
BundleFileType::PhysicsProperties => 0xbf21403a3ab0bbb1,
BundleFileType::RenderConfig => 0x27862fe24795319c,
BundleFileType::RtPipeline => 0x9ca183c2d0e76dee,
BundleFileType::Scene => 0x9d0a795bfe818d19,
BundleFileType::Shader => 0xcce8d5b5f5ae333f,
BundleFileType::ShaderLibrary => 0xe5ee32a477239a93,
BundleFileType::ShaderLibraryGroup => 0x9e5c3cc74575aeb5,
BundleFileType::ShadingEnvionmentMapping => 0x250e0a11ac8e26f8,
BundleFileType::ShadingEnvironment => 0xfe73c7dcff8a7ca5,
BundleFileType::Slug => 0xa27b4d04a9ba6f9e,
BundleFileType::SlugAlbum => 0xe9fc9ea7042e5ec0,
BundleFileType::SoundEnvironment => 0xd8b27864a97ffdd7,
BundleFileType::SpuJob => 0xf97af9983c05b950,
BundleFileType::StateMachine => 0xa486d4045106165c,
BundleFileType::StaticPVS => 0xe3f0baa17d620321,
BundleFileType::Strings => 0x0d972bab10b40fd3,
BundleFileType::SurfaceProperties => 0xad2d3fa30d9ab394,
BundleFileType::Texture => 0xcd4238c6a0c69e32,
BundleFileType::TimpaniBank => 0x99736be1fff739a4,
BundleFileType::TimpaniMaster => 0x00a3e6c59a2b9c6c,
BundleFileType::Tome => 0x19c792357c99f49b,
BundleFileType::Ugg => 0x712d6e3dd1024c9c,
BundleFileType::Unit => 0xe0a48d0be9a7453f,
BundleFileType::Upb => 0xa99510c6e86dd3c2,
BundleFileType::VectorField => 0xf7505933166d6755,
BundleFileType::Wav => 0x786f65c00a816b19,
BundleFileType::WwiseBank => 0x535a7bd3e650d799,
BundleFileType::WwiseDep => 0xaf32095c82f2b070,
BundleFileType::WwiseEvent => 0xaabdd317b58dfc8a,
BundleFileType::WwiseMetadata => 0xd50a8b7e1c82b110,
BundleFileType::WwiseStream => 0x504b55235d21440e,
BundleFileType::Xml => 0x76015845a6003765,
BundleFileType::Unknown(hash) => hash.into(),
}
}
}
impl From<BundleFileType> for Murmur64 {
fn from(t: BundleFileType) -> Murmur64 {
let hash: u64 = t.into();

View file

@ -7,14 +7,13 @@ use color_eyre::{Help, Report, SectionExt};
use oodle::{OodleLZ_CheckCRC, OodleLZ_FuzzSafe, CHUNK_SIZE};
use crate::binary::sync::*;
use crate::bundle::file::Properties;
use crate::murmur::{HashGroup, IdString64, Murmur64};
pub(crate) mod database;
pub(crate) mod file;
pub(crate) mod filetype;
pub use file::{BundleFile, BundleFileVariant};
pub use file::{BundleFile, BundleFileVariant, Properties};
pub use filetype::BundleFileType;
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd)]
@ -163,6 +162,7 @@ impl Bundle {
// TODO: Optimize to not reallocate?
let mut raw_buffer = oodle::decompress(
&compressed_buffer,
oodle::CHUNK_SIZE,
OodleLZ_FuzzSafe::No,
OodleLZ_CheckCRC::No,
)
@ -360,6 +360,7 @@ where
// TODO: Optimize to not reallocate?
let mut raw_buffer = oodle::decompress(
&compressed_buffer,
oodle::CHUNK_SIZE,
OodleLZ_FuzzSafe::No,
OodleLZ_CheckCRC::No,
)?;

View file

@ -1,8 +1,11 @@
use std::ffi::OsString;
use std::path::PathBuf;
use std::process::Command;
use std::{ffi::OsString, path::PathBuf};
use std::sync::Arc;
use crate::murmur::{Dictionary, HashGroup, IdString64, Murmur32, Murmur64};
use crate::murmur::{Dictionary, HashGroup, IdString32, IdString64, Murmur32, Murmur64};
#[derive(Clone)]
pub struct CmdLine {
cmd: OsString,
args: Vec<OsString>,
@ -52,7 +55,7 @@ impl From<&CmdLine> for Command {
}
pub struct Context {
pub lookup: Dictionary,
pub lookup: Arc<Dictionary>,
pub ljd: Option<CmdLine>,
pub revorb: Option<String>,
pub ww2ogg: Option<String>,
@ -62,7 +65,7 @@ pub struct Context {
impl Context {
pub fn new() -> Self {
Self {
lookup: Dictionary::new(),
lookup: Arc::new(Dictionary::new()),
ljd: None,
revorb: None,
ww2ogg: None,
@ -84,17 +87,17 @@ impl Context {
}
}
pub fn lookup_hash_short<M>(&self, hash: M, group: HashGroup) -> String
pub fn lookup_hash_short<M>(&self, hash: M, group: HashGroup) -> IdString32
where
M: Into<Murmur32>,
{
let hash = hash.into();
if let Some(s) = self.lookup.lookup_short(hash, group) {
tracing::debug!(%hash, string = s, "Murmur32 lookup successful");
s.to_owned()
s.to_string().into()
} else {
tracing::debug!(%hash, "Murmur32 lookup failed");
format!("{hash:08X}")
hash.into()
}
}
}

View file

@ -1,3 +1,4 @@
pub mod lua;
pub mod package;
pub mod strings;
pub mod texture;

View file

@ -5,7 +5,7 @@ use color_eyre::{Report, Result};
use crate::binary::sync::ReadExt;
use crate::bundle::file::{BundleFileVariant, UserFile};
use crate::murmur::HashGroup;
use crate::murmur::{HashGroup, IdString32};
#[derive(Copy, Clone, PartialEq, Eq, Hash, serde::Serialize)]
#[serde(untagged)]
@ -26,7 +26,7 @@ impl Language {
}
#[derive(serde::Serialize)]
pub struct Strings(HashMap<String, HashMap<Language, String>>);
pub struct Strings(HashMap<IdString32, HashMap<Language, String>>);
fn read_string<R>(r: R) -> Result<String>
where
@ -42,7 +42,7 @@ where
impl Strings {
#[tracing::instrument(skip_all, fields(languages = variants.len()))]
pub fn from_variants(ctx: &crate::Context, variants: &Vec<BundleFileVariant>) -> Result<Self> {
let mut map: HashMap<String, HashMap<Language, String>> = HashMap::new();
let mut map: HashMap<IdString32, HashMap<Language, String>> = HashMap::new();
for (i, variant) in variants.iter().enumerate() {
let _span = tracing::trace_span!("variant {}", i);

View file

@ -0,0 +1,741 @@
use std::io::{Cursor, Read, Seek, SeekFrom, Write as _};
use std::path::{Path, PathBuf};
use bitflags::bitflags;
use color_eyre::eyre::Context;
use color_eyre::{eyre, SectionExt};
use color_eyre::{Help, Result};
use flate2::read::ZlibDecoder;
use oodle::{OodleLZ_CheckCRC, OodleLZ_FuzzSafe};
use serde::{Deserialize, Serialize};
use tokio::fs;
use crate::binary::sync::{ReadExt, WriteExt};
use crate::bundle::file::UserFile;
use crate::murmur::{HashGroup, IdString32, IdString64};
use crate::{binary, BundleFile, BundleFileType, BundleFileVariant};
mod dds;
#[derive(Clone, Debug, Deserialize, Serialize)]
struct TextureDefinition {
common: TextureDefinitionPlatform,
// Stingray supports per-platform sections here, where you can create overrides with the same
// values as in `common`. But since we only support PC, we don't need to implement
// that.
}
#[derive(Clone, Debug, Deserialize, Serialize)]
struct TextureDefinitionPlatform {
input: TextureDefinitionInput,
output: TextureDefinitionOutput,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
struct TextureDefinitionInput {
filename: String,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
struct TextureDefinitionOutput {
category: String,
}
bitflags! {
#[derive(Clone, Copy, Debug, Default)]
struct TextureFlags: u32 {
const STREAMABLE = 0b0000_0001;
const UNKNOWN = 1 << 1;
const SRGB = 1 << 8;
}
}
#[derive(Copy, Clone, Debug, Default)]
struct TextureHeaderMipInfo {
offset: usize,
size: usize,
}
#[derive(Clone, Default)]
struct TextureHeader {
flags: TextureFlags,
n_streamable_mipmaps: usize,
width: usize,
height: usize,
mip_infos: [TextureHeaderMipInfo; 16],
meta_size: usize,
}
impl std::fmt::Debug for TextureHeader {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("TextureHeader")
.field("flags", &self.flags)
.field("n_streamable_mipmaps", &self.n_streamable_mipmaps)
.field("width", &self.width)
.field("height", &self.height)
.field("mip_infos", &{
let mut s = self
.mip_infos
.iter()
.fold(String::from("["), |mut s, info| {
s.push_str(&format!("{}/{}, ", info.offset, info.size));
s
});
s.push(']');
s
})
.field("meta_size", &self.meta_size)
.finish()
}
}
impl TextureHeader {
#[tracing::instrument(skip(r))]
fn from_binary(mut r: impl ReadExt) -> Result<Self> {
let flags = r.read_u32().map(binary::flags_from_bits)?;
let n_streamable_mipmaps = r.read_u32()? as usize;
let width = r.read_u32()? as usize;
let height = r.read_u32()? as usize;
let mut mip_infos = [TextureHeaderMipInfo::default(); 16];
for info in mip_infos.iter_mut() {
info.offset = r.read_u32()? as usize;
info.size = r.read_u32()? as usize;
}
let meta_size = r.read_u32()? as usize;
Ok(Self {
flags,
n_streamable_mipmaps,
width,
height,
mip_infos,
meta_size,
})
}
#[tracing::instrument(skip(w))]
fn to_binary(&self, mut w: impl WriteExt) -> Result<()> {
eyre::ensure!(
self.flags.is_empty() && self.n_streamable_mipmaps == 0,
"Only textures are supported where `flags == 0` and `n_streamable_mipmaps == 0`."
);
w.write_u32(self.flags.bits())?;
w.write_u32(self.n_streamable_mipmaps as u32)?;
w.write_u32(self.width as u32)?;
w.write_u32(self.height as u32)?;
for info in self.mip_infos {
w.write_u32(info.offset as u32)?;
w.write_u32(info.size as u32)?;
}
// TODO: For now we write `0` here, until the meta section is figured out
w.write_u32(0)?;
Ok(())
}
}
#[derive(Clone)]
struct Texture {
header: TextureHeader,
data: Vec<u8>,
stream: Option<Vec<u8>>,
category: IdString32,
}
impl std::fmt::Debug for Texture {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut out = f.debug_struct("Texture");
out.field("header", &self.header);
if self.data.len() <= 5 {
out.field("data", &format!("{:x?}", &self.data));
} else {
out.field(
"data",
&format!("{:x?}.. ({} bytes)", &self.data[..5], &self.data.len()),
);
}
if let Some(stream) = self.stream.as_ref() {
if stream.len() <= 5 {
out.field("stream", &format!("{:x?}", &stream));
} else {
out.field(
"stream",
&format!("{:x?}.. ({} bytes)", &stream[..5], &stream.len()),
);
}
} else {
out.field("stream", &"None");
}
out.field("category", &self.category).finish()
}
}
impl Texture {
#[tracing::instrument(skip(data, chunks))]
fn decompress_stream_data(mut data: impl Read, chunks: impl AsRef<[usize]>) -> Result<Vec<u8>> {
const RAW_SIZE: usize = 0x10000;
let chunks = chunks.as_ref();
let max_size = chunks.iter().max().copied().unwrap_or(RAW_SIZE);
let mut read_buf = vec![0; max_size];
let mut stream_raw = Vec::with_capacity(chunks.iter().sum());
let mut last = 0;
for offset_next in chunks {
let size = offset_next - last;
let span = tracing::info_span!(
"stream chunk",
num_chunks = chunks.len(),
chunk_size_comp = size,
offset = last
);
let _enter = span.enter();
let buf = &mut read_buf[0..size];
data.read_exact(buf)
.wrap_err("Failed to read chunk from stream file")?;
let raw = oodle::decompress(buf, RAW_SIZE, OodleLZ_FuzzSafe::No, OodleLZ_CheckCRC::No)
.wrap_err("Failed to decompress stream chunk")?;
eyre::ensure!(
raw.len() == RAW_SIZE,
"Invalid chunk length after decompression"
);
stream_raw.extend_from_slice(&raw);
last = *offset_next;
}
Ok(stream_raw)
}
#[tracing::instrument(skip(data), fields(data_len = data.as_ref().len()))]
fn reorder_stream_mipmap(
data: impl AsRef<[u8]>,
bits_per_block: usize,
bytes_per_block: usize,
block_size: usize,
pitch: usize,
) -> Result<Vec<u8>> {
const CHUNK_SIZE: usize = 0x10000;
let data = data.as_ref();
let mut out = Vec::with_capacity(data.len());
let mut window = vec![0u8; pitch * 64];
let row_size = bits_per_block * block_size;
tracing::Span::current().record("row_size", row_size);
eyre::ensure!(
data.len() % CHUNK_SIZE == 0,
"Stream data does not divide evenly into chunks"
);
for (i, chunk) in data.chunks_exact(CHUNK_SIZE).enumerate() {
let chunk_x = (i % bytes_per_block) * row_size;
let span = tracing::trace_span!("chunk", i, chunk_x = chunk_x);
let _guard = span.enter();
if i > 0 && i % bytes_per_block == 0 {
out.extend_from_slice(&window);
}
for (j, row) in chunk.chunks_exact(row_size).enumerate() {
let start = chunk_x + j * pitch;
let end = start + row_size;
tracing::trace!("{i}/{j} at {}:{}", start, end);
window[start..end].copy_from_slice(row);
}
}
Ok(out)
}
#[tracing::instrument(
"Texture::from_binary",
skip(ctx, r, stream_r),
fields(
compression_type = tracing::field::Empty,
compressed_size = tracing::field::Empty,
uncompressed_size = tracing::field::Empty,
)
)]
fn from_binary(
ctx: &crate::Context,
mut r: impl Read + Seek,
mut stream_r: Option<impl Read>,
) -> Result<Self> {
let compression_type = r.read_u32()?;
let compressed_size = r.read_u32()? as usize;
let uncompressed_size = r.read_u32()? as usize;
{
let span = tracing::Span::current();
span.record("compression_type", compression_type);
span.record("compressed_size", compressed_size);
span.record("uncompressed_size", uncompressed_size);
}
let mut comp_buf = vec![0; compressed_size];
r.read_exact(&mut comp_buf)?;
let out_buf = match compression_type {
// Uncompressed
// This one never seems to contain the additional `TextureHeader` metadata,
// so we return early in this branch.
0 => {
eyre::ensure!(
compressed_size == 0 && uncompressed_size == 0,
"Cannot handle texture with compression_type == 0, but buffer sizes > 0"
);
tracing::trace!("Found raw texture");
let pos = r.stream_position()?;
let end = {
r.seek(SeekFrom::End(0))?;
let end = r.stream_position()?;
r.seek(SeekFrom::Start(pos))?;
end
};
// Reads until the last u32.
let mut data = vec![0u8; (end - pos - 4) as usize];
r.read_exact(&mut data)?;
let category = r.read_u32().map(IdString32::from)?;
return Ok(Self {
header: TextureHeader::default(),
data,
stream: None,
category,
});
}
1 => oodle::decompress(
comp_buf,
uncompressed_size,
OodleLZ_FuzzSafe::No,
OodleLZ_CheckCRC::No,
)?,
2 => {
let mut decoder = ZlibDecoder::new(comp_buf.as_slice());
let mut buf = Vec::with_capacity(uncompressed_size);
decoder.read_to_end(&mut buf)?;
buf
}
_ => eyre::bail!(
"Unknown compression type for texture '{}'",
compression_type
),
};
eyre::ensure!(
out_buf.len() == uncompressed_size,
"Length of decompressed buffer did not match expected value. Expected {}, got {}",
uncompressed_size,
out_buf.len()
);
// No idea what this number is supposed to mean.
// Even the game engine just skips this one.
r.skip_u32(0x43)?;
let header = TextureHeader::from_binary(&mut r)?;
eyre::ensure!(
header.meta_size == 0 || stream_r.is_some(),
"Compression chunks and stream file don't match up. meta_size = {}, has_stream = {}",
header.meta_size,
stream_r.is_some()
);
let stream = if let Some(stream_r) = stream_r.as_mut() {
// Number of compression chunks in the stream file
let num_chunks = r.read_u32()? as usize;
r.skip_u16(0)?;
{
let num_chunks_1 = r.read_u16()? as usize;
eyre::ensure!(
num_chunks == num_chunks_1,
"Chunk numbers don't match. first = {}, second = {}",
num_chunks,
num_chunks_1
);
}
let mut chunks = Vec::with_capacity(num_chunks);
for _ in 0..num_chunks {
chunks.push(r.read_u32()? as usize);
}
let stream_raw = Self::decompress_stream_data(stream_r, chunks)
.wrap_err("Failed to decompress stream data")?;
Some(stream_raw)
} else {
None
};
let category = ctx.lookup_hash_short(r.read_u32()?, HashGroup::TextureCategory);
Ok(Self {
category,
header,
data: out_buf,
stream,
})
}
#[tracing::instrument(skip(w))]
fn to_binary(&self, mut w: impl WriteExt) -> Result<()> {
let compression_type = 1;
w.write_u32(compression_type)?;
let comp_buf = oodle::compress(&self.data).wrap_err("Failed to compress DDS data")?;
w.write_u32(comp_buf.len() as u32)?;
w.write_u32(self.data.len() as u32)?;
w.write_all(&comp_buf)?;
// Unknown field, which the engine seems to ignore.
// All game files have the same value here, so we just mirror that.
w.write_u32(0x43)?;
self.header.to_binary(&mut w)?;
w.write_u32(self.category.to_murmur32().into())?;
Ok(())
}
#[tracing::instrument]
fn to_sjson(&self, filename: String) -> Result<String> {
let texture = TextureDefinition {
common: TextureDefinitionPlatform {
input: TextureDefinitionInput { filename },
output: TextureDefinitionOutput {
category: self.category.display().to_string(),
},
},
};
serde_sjson::to_string(&texture).wrap_err("Failed to serialize texture definition")
}
#[tracing::instrument(fields(
dds_header = tracing::field::Empty,
dx10_header = tracing::field::Empty,
image_format = tracing::field::Empty,
))]
fn create_dds_user_file(&self, name: String) -> Result<UserFile> {
let mut data = Cursor::new(&self.data);
let mut dds_header =
dds::DDSHeader::from_binary(&mut data).wrap_err("Failed to read DDS header")?;
{
let span = tracing::Span::current();
span.record("dds_header", format!("{:?}", dds_header));
}
if !dds_header.pixel_format.flags.contains(dds::DDPF::FOURCC) {
tracing::debug!("Found DDS without FourCC. Dumping raw data");
return Ok(UserFile::with_name(self.data.clone(), name));
}
// eyre::ensure!(
// dds_header.pixel_format.four_cc == dds::FourCC::DX10,
// "Only DX10 textures are currently supported. FourCC == {}",
// dds_header.pixel_format.four_cc,
// );
let dx10_header =
dds::Dx10Header::from_binary(&mut data).wrap_err("Failed to read DX10 header")?;
{
let span = tracing::Span::current();
span.record("dx10_header", format!("{:?}", dx10_header));
}
// match dx10_header.dxgi_format {
// DXGIFormat::BC1_UNORM
// | DXGIFormat::BC3_UNORM
// | DXGIFormat::BC4_UNORM
// | DXGIFormat::BC5_UNORM
// | DXGIFormat::BC6H_UF16
// | DXGIFormat::BC7_UNORM => {}
// _ => {
// eyre::bail!(
// "Unsupported DXGI format: {} (0x{:0X})",
// dx10_header.dxgi_format,
// dx10_header.dxgi_format.to_u32().unwrap_or_default()
// );
// }
// }
let stingray_image_format = dds::stripped_format_from_header(&dds_header, &dx10_header)?;
{
let span = tracing::Span::current();
span.record("image_format", format!("{:?}", stingray_image_format));
}
// eyre::ensure!(
// stingray_image_format.image_type == ImageType::Image2D,
// "Unsupported image type: {}",
// stingray_image_format.image_type,
// );
let block_size = 4 * dds_header.pitch_or_linear_size / dds_header.width;
let bits_per_block: usize = match block_size {
8 => 128,
16 => 64,
block_size => eyre::bail!("Unsupported block size {}", block_size),
};
let pitch = self.header.width / 4 * block_size;
let bytes_per_block = self.header.width / bits_per_block / 4;
tracing::debug!(
"block_size = {} | pitch = {} | bits_per_block = {} | bytes_per_block = {}",
block_size,
pitch,
bits_per_block,
bytes_per_block
);
let mut out_data = Cursor::new(Vec::with_capacity(self.data.len()));
// Currently, we only extract the largest mipmap,
// so we need to set the dimensions accordingly, and remove the
// flag.
dds_header.width = self.header.width;
dds_header.height = self.header.height;
dds_header.mipmap_count = 0;
dds_header.flags &= !dds::DDSD::MIPMAPCOUNT;
dds_header
.to_binary(&mut out_data)
.wrap_err("Failed to write DDS header")?;
dx10_header
.to_binary(&mut out_data)
.wrap_err("Failed to write DX10 header")?;
// If there is stream data, we build the mipmap data from it.
// If not, we take whatever is left in the bundle file.
if let Some(stream) = &self.stream {
let data = Self::reorder_stream_mipmap(
stream,
bits_per_block,
bytes_per_block,
block_size,
pitch,
)
.wrap_err("Failed to reorder stream chunks")?;
out_data
.write_all(&data)
.wrap_err("Failed to write streamed mipmap data")?;
} else {
let (_, remaining) = data.split();
out_data
.write_all(remaining)
.wrap_err("Failed to write texture data")?;
};
Ok(UserFile::with_name(out_data.into_inner(), name))
}
#[tracing::instrument(skip(self))]
fn to_user_files(&self, name: String) -> Result<Vec<UserFile>> {
let mut files = Vec::with_capacity(2);
{
let data = self.to_sjson(name.clone())?.as_bytes().to_vec();
let name = PathBuf::from(&name)
.with_extension("texture")
.display()
.to_string();
files.push(UserFile::with_name(data, name));
}
// For debugging purposes, also extract the raw files
if cfg!(debug_assertions) {
if let Some(stream) = &self.stream {
let stream_name = PathBuf::from(&name).with_extension("stream");
files.push(UserFile::with_name(
stream.clone(),
stream_name.display().to_string(),
));
}
let name = PathBuf::from(&name)
.with_extension("raw.dds")
.display()
.to_string();
files.push(UserFile::with_name(self.data.clone(), name));
}
match self
.create_dds_user_file(name)
.wrap_err("Failed to create DDS file")
{
Ok(dds) => files.push(dds),
Err(err) => {
if cfg!(debug_assertions) {
tracing::error!(
"{:?}",
err.with_section(|| {
"Running in debug mode, continuing to produce raw files".header("Note:")
})
);
} else {
return Err(err);
}
}
};
Ok(files)
}
}
#[tracing::instrument(skip(ctx, data, stream_data), fields(data_len = data.as_ref().len()))]
pub(crate) async fn decompile_data(
ctx: &crate::Context,
name: String,
data: impl AsRef<[u8]>,
stream_data: Option<impl AsRef<[u8]>>,
) -> Result<Vec<UserFile>> {
let mut r = Cursor::new(data);
let mut stream_r = stream_data.map(Cursor::new);
let texture = Texture::from_binary(ctx, &mut r, stream_r.as_mut())?;
texture
.to_user_files(name)
.wrap_err("Failed to build user files")
}
#[tracing::instrument(skip(ctx))]
pub(crate) async fn decompile(
ctx: &crate::Context,
name: String,
variant: &BundleFileVariant,
) -> Result<Vec<UserFile>> {
let data_file = variant.data_file_name().map(|name| match &ctx.game_dir {
Some(dir) => dir.join("bundle").join(name),
None => PathBuf::from("bundle").join(name),
});
if variant.external() {
let Some(path) = data_file else {
eyre::bail!("File is marked external but has no data file name");
};
tracing::debug!(
"Decompiling texture from external file '{}'",
path.display()
);
let data = fs::read(&path)
.await
.wrap_err_with(|| format!("Failed to read data file '{}'", path.display()))
.with_suggestion(|| {
"Provide a game directory in the config file or make sure the `data` directory is next to the provided bundle."
})?;
decompile_data(ctx, name, data, None::<&[u8]>).await
} else {
tracing::debug!("Decompiling texture from bundle data");
let stream_data = match data_file {
Some(path) => {
let data = fs::read(&path)
.await
.wrap_err_with(|| format!("Failed to read data file '{}'", path.display()))
.with_suggestion(|| {
"Provide a game directory in the config file or make sure the `data` directory is next to the provided bundle."
})?;
Some(data)
}
None => None,
};
decompile_data(ctx, name, variant.data(), stream_data).await
}
}
#[tracing::instrument(skip(sjson, name), fields(sjson_len = sjson.as_ref().len(), name = %name.display()))]
pub async fn compile(
name: IdString64,
sjson: impl AsRef<str>,
root: impl AsRef<Path> + std::fmt::Debug,
) -> Result<BundleFile> {
let definitions: TextureDefinition = serde_sjson::from_str(sjson.as_ref())
.wrap_err("Failed to deserialize SJSON")
.with_section(|| sjson.as_ref().to_string().header("SJSON:"))?;
let dds = {
let path = root.as_ref().join(definitions.common.input.filename);
fs::read(&path)
.await
.wrap_err_with(|| format!("Failed to read DDS file '{}'", path.display()))?
};
let (width, height) = {
let mut r = Cursor::new(&dds);
let magic = r.read_u32()?;
eyre::ensure!(
magic == 0x20534444,
"Invalid magic bytes for DDS. Expected 0x20534444, got {:08x}",
magic
);
r.seek(SeekFrom::Current(5))?;
let width = r.read_u32()? as usize;
let height = r.read_u32()? as usize;
(width, height)
};
let mut w = Cursor::new(Vec::new());
let texture = Texture {
header: TextureHeader {
// As long as we can't handle mipmaps, these two need be `0`
flags: TextureFlags::empty(),
n_streamable_mipmaps: 0,
width,
height,
mip_infos: [TextureHeaderMipInfo::default(); 16],
meta_size: 0,
},
data: dds,
stream: None,
category: IdString32::String(definitions.common.output.category),
};
texture.to_binary(&mut w)?;
let mut variant = BundleFileVariant::new();
variant.set_data(w.into_inner());
let mut file = BundleFile::new(name, BundleFileType::Texture);
file.add_variant(variant);
Ok(file)
}

View file

@ -0,0 +1,529 @@
use std::io::SeekFrom;
use bitflags::bitflags;
use color_eyre::eyre::Context as _;
use color_eyre::eyre::{self, OptionExt as _};
use color_eyre::Result;
use num_derive::{FromPrimitive, ToPrimitive};
use num_traits::{FromPrimitive as _, ToPrimitive as _};
use crate::binary;
use crate::binary::sync::{ReadExt, WriteExt};
const MAGIC_DDS: u32 = 0x20534444;
bitflags! {
#[derive(Clone, Copy, Debug)]
pub struct DDSD: u32 {
/// Required
const CAPS = 0x1;
/// Required
const HEIGHT = 0x2;
/// Required
const WIDTH = 0x4;
/// Pitch for an uncompressed texture
const PITCH = 0x8;
/// Required
const PIXELFORMAT = 0x1000;
/// Required in a mipmapped texture
const MIPMAPCOUNT = 0x20000;
/// Pitch for a compressed texture
const LINEARSIZE = 0x80000;
/// Required in a depth texture
const DEPTH = 0x800000;
}
#[derive(Clone, Copy, Debug)]
pub struct DDSCAPS: u32 {
const COMPLEX = 0x8;
const MIPMAP = 0x400000;
const TEXTURE = 0x1000;
}
#[derive(Clone, Copy, Debug)]
pub struct DDSCAPS2: u32 {
const CUBEMAP = 0x200;
const CUBEMAP_POSITIVEX = 0x400;
const CUBEMAP_NEGATIVEX = 0x800;
const CUBEMAP_POSITIVEY = 0x1000;
const CUBEMAP_NEGATIVEY = 0x2000;
const CUBEMAP_POSITIVEZ = 0x4000;
const CUBEMAP_NEGATIVEZ = 0x8000;
const VOLUME = 0x200000;
const CUBEMAP_ALLFACES = Self::CUBEMAP_POSITIVEX.bits()
| Self::CUBEMAP_NEGATIVEX.bits()
| Self::CUBEMAP_POSITIVEY.bits()
| Self::CUBEMAP_NEGATIVEY.bits()
| Self::CUBEMAP_POSITIVEZ.bits()
| Self::CUBEMAP_NEGATIVEZ.bits();
}
#[derive(Clone, Copy, Debug)]
pub struct DDPF: u32 {
const ALPHAPIXELS = 0x1;
const ALPHA = 0x2;
const FOURCC = 0x4;
const RGB = 0x40;
const YUV = 0x200;
const LUMINANCE = 0x20000;
}
#[derive(Clone, Copy, Debug)]
pub struct DdsResourceMiscFlags: u32 {
const TEXTURECUBE = 0x4;
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, FromPrimitive, ToPrimitive)]
#[repr(u32)]
pub enum D3D10ResourceDimension {
Unknown = 0,
Buffer = 1,
Texture1D = 2,
Texture2D = 3,
Texture3D = 4,
}
#[allow(clippy::upper_case_acronyms)]
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug, strum::Display, FromPrimitive, ToPrimitive)]
#[repr(u32)]
pub enum DXGIFormat {
UNKNOWN = 0,
R32G32B32A32_TYPELESS = 1,
R32G32B32A32_FLOAT = 2,
R32G32B32A32_UINT = 3,
R32G32B32A32_SINT = 4,
R32G32B32_TYPELESS = 5,
R32G32B32_FLOAT = 6,
R32G32B32_UINT = 7,
R32G32B32_SINT = 8,
R16G16B16A16_TYPELESS = 9,
R16G16B16A16_FLOAT = 10,
R16G16B16A16_UNORM = 11,
R16G16B16A16_UINT = 12,
R16G16B16A16_SNORM = 13,
R16G16B16A16_SINT = 14,
R32G32_TYPELESS = 15,
R32G32_FLOAT = 16,
R32G32_UINT = 17,
R32G32_SINT = 18,
R32G8X24_TYPELESS = 19,
D32_FLOAT_S8X24_UINT = 20,
R32_FLOAT_X8X24_TYPELESS = 21,
X32_TYPELESS_G8X24_UINT = 22,
R10G10B10A2_TYPELESS = 23,
R10G10B10A2_UNORM = 24,
R10G10B10A2_UINT = 25,
R11G11B10_FLOAT = 26,
R8G8B8A8_TYPELESS = 27,
R8G8B8A8_UNORM = 28,
R8G8B8A8_UNORM_SRGB = 29,
R8G8B8A8_UINT = 30,
R8G8B8A8_SNORM = 31,
R8G8B8A8_SINT = 32,
R16G16_TYPELESS = 33,
R16G16_FLOAT = 34,
R16G16_UNORM = 35,
R16G16_UINT = 36,
R16G16_SNORM = 37,
R16G16_SINT = 38,
R32_TYPELESS = 39,
D32_FLOAT = 40,
R32_FLOAT = 41,
R32_UINT = 42,
R32_SINT = 43,
R24G8_TYPELESS = 44,
D24_UNORM_S8_UINT = 45,
R24_UNORM_X8_TYPELESS = 46,
X24_TYPELESS_G8_UINT = 47,
R8G8_TYPELESS = 48,
R8G8_UNORM = 49,
R8G8_UINT = 50,
R8G8_SNORM = 51,
R8G8_SINT = 52,
R16_TYPELESS = 53,
R16_FLOAT = 54,
D16_UNORM = 55,
R16_UNORM = 56,
R16_UINT = 57,
R16_SNORM = 58,
R16_SINT = 59,
R8_TYPELESS = 60,
R8_UNORM = 61,
R8_UINT = 62,
R8_SNORM = 63,
R8_SINT = 64,
A8_UNORM = 65,
R1_UNORM = 66,
R9G9B9E5_SHAREDEXP = 67,
R8G8_B8G8_UNORM = 68,
G8R8_G8B8_UNORM = 69,
BC1_TYPELESS = 70,
BC1_UNORM = 71,
BC1_UNORM_SRGB = 72,
BC2_TYPELESS = 73,
BC2_UNORM = 74,
BC2_UNORM_SRGB = 75,
BC3_TYPELESS = 76,
BC3_UNORM = 77,
BC3_UNORM_SRGB = 78,
BC4_TYPELESS = 79,
BC4_UNORM = 80,
BC4_SNORM = 81,
BC5_TYPELESS = 82,
BC5_UNORM = 83,
BC5_SNORM = 84,
B5G6R5_UNORM = 85,
B5G5R5A1_UNORM = 86,
B8G8R8A8_UNORM = 87,
B8G8R8X8_UNORM = 88,
R10G10B10_XR_BIAS_A2_UNORM = 89,
B8G8R8A8_TYPELESS = 90,
B8G8R8A8_UNORM_SRGB = 91,
B8G8R8X8_TYPELESS = 92,
B8G8R8X8_UNORM_SRGB = 93,
BC6H_TYPELESS = 94,
BC6H_UF16 = 95,
BC6H_SF16 = 96,
BC7_TYPELESS = 97,
BC7_UNORM = 98,
BC7_UNORM_SRGB = 99,
AYUV = 100,
Y410 = 101,
Y416 = 102,
NV12 = 103,
P010 = 104,
P016 = 105,
OPAQUE = 106,
YUY2 = 107,
Y210 = 108,
Y216 = 109,
NV11 = 110,
AI44 = 111,
IA44 = 112,
P8 = 113,
A8P8 = 114,
B4G4R4A4_UNORM = 115,
P208 = 130,
V208 = 131,
V408 = 132,
SAMPLER_FEEDBACK_MIN_MIP_OPAQUE,
SAMPLER_FEEDBACK_MIP_REGION_USED_OPAQUE,
}
#[derive(Clone, Copy, Debug)]
pub struct Dx10Header {
/// Resource data formats, including fully-typed and typeless formats.
/// See https://learn.microsoft.com/en-us/windows/win32/api/dxgiformat/ne-dxgiformat-dxgi_format
pub dxgi_format: DXGIFormat,
pub resource_dimension: D3D10ResourceDimension,
pub misc_flag: DdsResourceMiscFlags,
pub array_size: usize,
pub misc_flags2: u32,
}
impl Dx10Header {
#[tracing::instrument("Dx10Header::from_binary", skip(r))]
pub fn from_binary(mut r: impl ReadExt) -> Result<Self> {
let dxgi_format = r
.read_u32()
.map(|val| DXGIFormat::from_u32(val).unwrap_or(DXGIFormat::UNKNOWN))?;
let resource_dimension = r.read_u32().map(|val| {
D3D10ResourceDimension::from_u32(val).unwrap_or(D3D10ResourceDimension::Unknown)
})?;
let misc_flag = r.read_u32().map(binary::flags_from_bits)?;
let array_size = r.read_u32()? as usize;
let misc_flags2 = r.read_u32()?;
Ok(Self {
dxgi_format,
resource_dimension,
misc_flag,
array_size,
misc_flags2,
})
}
#[tracing::instrument("Dx10Header::to_binary", skip(w))]
pub fn to_binary(&self, mut w: impl WriteExt) -> Result<()> {
w.write_u32(
self.dxgi_format
.to_u32()
.ok_or_eyre("DXGIFormat should fit in a u32")?,
)?;
w.write_u32(
self.resource_dimension
.to_u32()
.ok_or_eyre("DXGIFormat should fit in a u32")?,
)?;
w.write_u32(self.misc_flag.bits())?;
w.write_u32(self.array_size as u32)?;
w.write_u32(self.misc_flags2)?;
Ok(())
}
}
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, strum::Display, FromPrimitive, ToPrimitive)]
#[repr(u32)]
pub enum FourCC {
Empty = u32::MAX,
DXT1 = 0x31545844,
DXT2 = 0x33545844,
DXT5 = 0x35545844,
AXI1 = 0x31495441,
AXI2 = 0x32495441,
DX10 = 0x30315844,
D3D_A16B16G16R16 = 0x24,
D3D_R16F = 0x6F,
D3D_G16R16F = 0x70,
D3D_A16B16G16R16F = 0x71,
D3D_R32F = 0x72,
D3D_G32R32F = 0x73,
D3D_A32B32G32R32F = 0x74,
}
#[derive(Clone, Copy, Debug)]
pub struct DDSPixelFormat {
pub flags: DDPF,
pub four_cc: FourCC,
pub rgb_bit_count: u32,
pub r_bit_mask: u32,
pub g_bit_mask: u32,
pub b_bit_mask: u32,
pub a_bit_mask: u32,
}
impl DDSPixelFormat {
#[tracing::instrument("DDSPixelFormat::from_binary", skip(r))]
pub fn from_binary(mut r: impl ReadExt) -> Result<Self> {
let size = r.read_u32()? as usize;
eyre::ensure!(
size == 32,
"Invalid structure size. Got 0X{:0X}, expected 0x20",
size
);
let flags: DDPF = r.read_u32().map(binary::flags_from_bits)?;
let four_cc = if flags.contains(DDPF::FOURCC) {
r.read_u32().and_then(|bytes| {
FourCC::from_u32(bytes).ok_or_eyre(format!("Unknown FourCC value: {:08X}", bytes))
})?
} else {
r.skip_u32(0)?;
FourCC::Empty
};
let rgb_bit_count = r.read_u32()?;
let r_bit_mask = r.read_u32()?;
let g_bit_mask = r.read_u32()?;
let b_bit_mask = r.read_u32()?;
let a_bit_mask = r.read_u32()?;
Ok(Self {
flags,
four_cc,
rgb_bit_count,
r_bit_mask,
g_bit_mask,
b_bit_mask,
a_bit_mask,
})
}
#[tracing::instrument("DDSPixelFormat::to_binary", skip(w))]
pub fn to_binary(&self, mut w: impl WriteExt) -> Result<()> {
// Structure size
w.write_u32(32)?;
w.write_u32(self.flags.bits())?;
w.write_u32(self.four_cc.to_u32().unwrap_or_default())?;
w.write_u32(self.rgb_bit_count)?;
w.write_u32(self.r_bit_mask)?;
w.write_u32(self.g_bit_mask)?;
w.write_u32(self.b_bit_mask)?;
w.write_u32(self.a_bit_mask)?;
Ok(())
}
}
#[derive(Clone, Copy, Debug)]
pub struct DDSHeader {
/// Flags to indicate which members contain valid data.
pub flags: DDSD,
pub height: usize,
pub width: usize,
pub pitch_or_linear_size: usize,
pub depth: usize,
pub mipmap_count: usize,
pub pixel_format: DDSPixelFormat,
pub caps: DDSCAPS,
pub caps_2: DDSCAPS2,
}
impl DDSHeader {
#[tracing::instrument("DDSHeader::from_binary", skip(r))]
pub fn from_binary(mut r: impl ReadExt) -> Result<Self> {
r.skip_u32(MAGIC_DDS).wrap_err("Invalid magic bytes")?;
let size = r.read_u32()?;
eyre::ensure!(
size == 124,
"Invalid structure size. Got 0x{:0X}, expected 0x7C",
size
);
let flags = r.read_u32().map(binary::flags_from_bits)?;
let height = r.read_u32()? as usize;
let width = r.read_u32()? as usize;
let pitch_or_linear_size = r.read_u32()? as usize;
let depth = r.read_u32()? as usize;
let mipmap_count = r.read_u32()? as usize;
// Skip reserved bytes
r.seek(SeekFrom::Current(11 * 4))?;
let pixel_format = DDSPixelFormat::from_binary(&mut r)?;
let caps = r.read_u32().map(binary::flags_from_bits)?;
let caps_2 = r.read_u32().map(binary::flags_from_bits)?;
// Skip unused and reserved bytes
r.seek(SeekFrom::Current(3 * 4))?;
Ok(Self {
flags,
height,
width,
pitch_or_linear_size,
depth,
mipmap_count,
pixel_format,
caps,
caps_2,
})
}
#[tracing::instrument("DDSHeader::to_binary", skip(w))]
pub fn to_binary(&self, mut w: impl WriteExt) -> Result<()> {
w.write_u32(MAGIC_DDS)?;
// Structure size in bytes
w.write_u32(124)?;
w.write_u32(self.flags.bits())?;
w.write_u32(self.height as u32)?;
w.write_u32(self.width as u32)?;
w.write_u32(self.pitch_or_linear_size as u32)?;
w.write_u32(self.depth as u32)?;
w.write_u32(self.mipmap_count as u32)?;
w.write_all(&[0u8; 11 * 4])?;
self.pixel_format.to_binary(&mut w)?;
w.write_u32(self.caps.bits())?;
w.write_u32(self.caps_2.bits())?;
w.write_all(&[0u8; 3 * 4])?;
Ok(())
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, strum::Display)]
#[repr(u32)]
pub enum ImageType {
Image2D = 0,
Image3D = 1,
ImageCube = 2,
Unknown = 3,
Image2dArray = 4,
ImagecubeArray = 5,
}
/// A stripped version of `ImageType` that only contains just the data needed
/// to read a DDS image stream.
#[allow(dead_code)]
#[derive(Clone, Copy, Debug)]
pub struct StrippedImageFormat {
pub image_type: ImageType,
pub width: usize,
pub height: usize,
pub layers: usize,
pub mip_levels: usize,
}
// This is a stripped down version of the logic that the engine implements to fill
// `stingray::ImageFormat`. With the `type` field we need to distinguish between `IMAGE3D`
// and everything else, and we need the various dimensions filled to calculate the chunks.
pub fn stripped_format_from_header(
dds_header: &DDSHeader,
dx10_header: &Dx10Header,
) -> Result<StrippedImageFormat> {
let mut image_format = StrippedImageFormat {
image_type: ImageType::Unknown,
width: dds_header.width,
height: dds_header.height,
layers: 0,
mip_levels: 0,
};
if dds_header.mipmap_count > 0 {
image_format.mip_levels = dds_header.mipmap_count;
} else {
image_format.mip_levels = 1;
}
// INFO: These next two sections are conditional in the engine code,
// based on a lot of stuff in "fourcc" and other fields. But it might
// actually be fine to just do it like this, as this seems universal
// to DDS.
// Will have to check how it plays out with actual assets.
if dds_header.caps_2.contains(DDSCAPS2::CUBEMAP) {
image_format.image_type = ImageType::ImageCube;
image_format.layers = 6;
} else if dds_header.caps_2.contains(DDSCAPS2::VOLUME) {
image_format.image_type = ImageType::Image3D;
image_format.layers = dds_header.depth;
} else {
image_format.image_type = ImageType::Image2D;
image_format.layers = 1;
}
if dx10_header.resource_dimension == D3D10ResourceDimension::Texture2D {
if dx10_header
.misc_flag
.contains(DdsResourceMiscFlags::TEXTURECUBE)
{
image_format.image_type = ImageType::ImageCube;
if dx10_header.array_size > 1 {
image_format.layers = dx10_header.array_size;
} else {
image_format.layers = 6;
}
} else {
image_format.image_type = ImageType::Image2D;
image_format.layers = dx10_header.array_size;
}
} else if dx10_header.resource_dimension == D3D10ResourceDimension::Texture3D {
image_format.image_type = ImageType::Image3D;
image_format.layers = dds_header.depth;
}
if dx10_header.array_size > 1 {
match image_format.image_type {
ImageType::Image2D => image_format.image_type = ImageType::Image2dArray,
ImageType::ImageCube => image_format.image_type = ImageType::ImagecubeArray,
ImageType::Image3D => {
eyre::bail!("3D-Arrays are not a supported image format")
}
_ => {}
}
}
Ok(image_format)
}

View file

@ -1,3 +1,4 @@
#![feature(cursor_split)]
#![feature(test)]
mod binary;
@ -9,5 +10,5 @@ pub mod murmur;
pub use binary::{FromBinary, ToBinary};
pub use bundle::database::BundleDatabase;
pub use bundle::decompress;
pub use bundle::{Bundle, BundleFile, BundleFileType, BundleFileVariant};
pub use bundle::{Bundle, BundleFile, BundleFileType, BundleFileVariant, Properties};
pub use context::{CmdLine, Context};

View file

@ -12,12 +12,19 @@ pub enum HashGroup {
Filename,
Filetype,
Strings,
TextureCategory,
Other,
}
impl HashGroup {
pub fn all() -> [Self; 3] {
[Self::Filename, Self::Filetype, Self::Other]
pub fn all() -> [Self; 5] {
[
Self::Filename,
Self::Filetype,
Self::Strings,
Self::TextureCategory,
Self::Other,
]
}
}
@ -27,6 +34,7 @@ impl std::fmt::Display for HashGroup {
HashGroup::Filename => write!(f, "filename"),
HashGroup::Filetype => write!(f, "filetype"),
HashGroup::Strings => write!(f, "strings"),
HashGroup::TextureCategory => write!(f, "texture-category"),
HashGroup::Other => write!(f, "other"),
}
}
@ -48,6 +56,7 @@ struct Row {
group: HashGroup,
}
#[derive(Clone)]
pub struct Entry {
value: String,
long: Murmur64,
@ -73,6 +82,7 @@ impl Entry {
}
}
#[derive(Clone)]
pub struct Dictionary {
entries: Vec<Entry>,
}
@ -88,10 +98,12 @@ impl Dictionary {
Self { entries: vec![] }
}
pub async fn from_csv<R>(&mut self, r: R) -> Result<()>
pub async fn from_csv<R>(r: R) -> Result<Self>
where
R: AsyncRead + std::marker::Unpin + std::marker::Send,
{
let mut entries = vec![];
let r = AsyncDeserializer::from_reader(r);
let mut records = r.into_deserialize::<Row>();
@ -112,10 +124,10 @@ impl Dictionary {
group: record.group,
};
self.entries.push(entry);
entries.push(entry);
}
Ok(())
Ok(Self { entries })
}
pub async fn to_csv<W>(&self, w: W) -> Result<()>
@ -161,7 +173,7 @@ impl Dictionary {
self.entries.push(entry);
}
pub fn find(&mut self, value: &String, group: HashGroup) -> Option<&Entry> {
pub fn find(&self, value: &String, group: HashGroup) -> Option<&Entry> {
self.entries
.iter()
.find(|e| e.value == *value && e.group == group)