feat(sdk): Implement partial texture decompilation

This commit is contained in:
Lucas Schwiderski 2023-03-03 16:55:22 +01:00
parent 69300e87e6
commit ea7886b08f
Signed by: lucas
GPG key ID: AA12679AAA6DF4D8
14 changed files with 667 additions and 36 deletions

View file

@ -287,6 +287,34 @@ where
P1: AsRef<Path> + std::fmt::Debug,
P2: AsRef<Path> + std::fmt::Debug,
{
let ctx = if ctx.game_dir.is_some() {
tracing::debug!(
"Got game directory from config: {}",
ctx.game_dir.as_ref().unwrap().display()
);
ctx
} else {
let game_dir = path
.as_ref()
.parent()
.and_then(|parent| parent.parent())
.map(|p| p.to_path_buf());
tracing::info!(
"No game directory configured, guessing from bundle path: {:?}",
game_dir
);
Arc::new(sdk::Context {
game_dir,
lookup: Arc::clone(&ctx.lookup),
ljd: ctx.ljd.clone(),
revorb: ctx.revorb.clone(),
ww2ogg: ctx.ww2ogg.clone(),
})
};
let bundle = {
let data = fs::read(path.as_ref()).await?;
let name = Bundle::get_name_from_path(&ctx, path.as_ref());

View file

@ -1,4 +1,5 @@
use std::path::PathBuf;
use std::sync::Arc;
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command, ValueEnum};
use cli_table::{print_stdout, WithTitle};
@ -156,6 +157,8 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
BufReader::new(Box::new(f))
};
let lookup = Arc::make_mut(&mut ctx.lookup);
let group = sdk::murmur::HashGroup::from(*group);
let mut added = 0;
@ -165,15 +168,15 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
let total = {
for line in lines.into_iter() {
let value = line?;
if ctx.lookup.find(&value, group).is_some() {
if lookup.find(&value, group).is_some() {
skipped += 1;
} else {
ctx.lookup.add(value, group);
lookup.add(value, group);
added += 1;
}
}
ctx.lookup.len()
lookup.len()
};
let out_path = matches
@ -190,7 +193,7 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
})
.with_section(|| out_path.display().to_string().header("Path:"))?;
ctx.lookup
lookup
.to_csv(f)
.await
.wrap_err("Failed to write dictionary to disk")?;

View file

@ -0,0 +1,21 @@
use clap::{ArgMatches, Command};
use color_eyre::Result;
mod texture_meta;
pub(crate) fn command_definition() -> Command {
Command::new("experiment")
.subcommand_required(true)
.about("A collection of utilities and experiments.")
.subcommand(texture_meta::command_definition())
}
#[tracing::instrument(skip_all)]
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
match matches.subcommand() {
Some(("texture-meta", sub_matches)) => texture_meta::run(ctx, sub_matches).await,
_ => unreachable!(
"clap is configured to require a subcommand, and they're all handled above"
),
}
}

View file

@ -0,0 +1,117 @@
use std::path::PathBuf;
use std::sync::Arc;
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
use color_eyre::eyre::Context;
use color_eyre::Result;
use futures_util::StreamExt;
use sdk::Bundle;
use tokio::fs;
use crate::cmd::util::resolve_bundle_paths;
pub(crate) fn command_definition() -> Command {
Command::new("texture-meta")
.about(
"Iterates over the provided bundles and lists certain meta data.
Primarily intended to help spot patterns between dependend data fields and values.",
)
.arg(
Arg::new("bundle")
.required(true)
.action(ArgAction::Append)
.value_parser(value_parser!(PathBuf))
.help(
"Path to the bundle(s) to read. If this points to a directory instead \
of a file, all files in that directory will be checked.",
),
)
// TODO: Maybe provide JSON and CSV
// TODO: Maybe allow toggling certain fields
}
#[tracing::instrument(skip(ctx))]
async fn handle_bundle(ctx: &sdk::Context, path: &PathBuf) -> Result<()> {
let bundle = {
let binary = fs::read(path).await?;
let name = Bundle::get_name_from_path(ctx, path);
Bundle::from_binary(ctx, name, binary)?
};
let bundle_dir = ctx
.game_dir
.as_deref()
.map(|dir| dir.join("bundle"))
.or_else(|| path.parent().map(|p| p.to_path_buf()))
.unwrap_or_default();
for f in bundle.files().iter() {
for (i, v) in f.variants().iter().enumerate() {
let data_file_name = v.data_file_name();
let data_file_length = if let Some(file_name) = data_file_name {
let path = bundle_dir.join(file_name);
match fs::metadata(&path).await {
Ok(meta) => meta.len(),
Err(err) => {
return Err(err).wrap_err_with(|| {
format!("Failed to open data file {}", path.display())
})
}
}
} else {
0
};
println!(
"{},{},{},{},{:b},{},{},{:?},{},{:#010b}",
bundle.name().display(),
f.name(false, None),
f.file_type().ext_name(),
i,
v.property(),
v.data().len(),
v.external(),
data_file_name,
data_file_length,
v.unknown_1(),
);
}
}
Ok(())
}
#[tracing::instrument(skip_all)]
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
let bundles = matches
.get_many::<PathBuf>("bundle")
.unwrap_or_default()
.cloned();
let paths = resolve_bundle_paths(bundles);
let ctx = Arc::new(ctx);
println!(
"Bundle Name,File Name,File Type,Variant,Property,Bundle Data Length,External,Data File,Data File Length,Unknown 1"
);
paths
.for_each_concurrent(10, |p| async {
let ctx = ctx.clone();
async move {
if let Err(err) = handle_bundle(&ctx, &p)
.await
.wrap_err_with(|| format!("Failed to list contents of bundle {}", p.display()))
{
tracing::error!("Failed to handle bundle: {}", format!("{:#}", err));
}
}
.await;
})
.await;
Ok(())
}

View file

@ -12,6 +12,7 @@ use clap::value_parser;
use clap::{command, Arg};
use color_eyre::eyre;
use color_eyre::eyre::{Context, Result};
use sdk::murmur::Dictionary;
use serde::{Deserialize, Serialize};
use tokio::fs::File;
use tokio::io::BufReader;
@ -21,6 +22,7 @@ mod cmd {
pub mod build;
pub mod bundle;
pub mod dictionary;
pub mod experiment;
pub mod migrate;
pub mod murmur;
pub mod new;
@ -56,6 +58,7 @@ async fn main() -> Result<()> {
.subcommand(cmd::build::command_definition())
.subcommand(cmd::bundle::command_definition())
.subcommand(cmd::dictionary::command_definition())
.subcommand(cmd::experiment::command_definition())
.subcommand(cmd::migrate::command_definition())
.subcommand(cmd::murmur::command_definition())
.subcommand(cmd::new::command_definition())
@ -96,8 +99,9 @@ async fn main() -> Result<()> {
let r = BufReader::new(f);
let mut ctx = ctx.write().await;
if let Err(err) = ctx.lookup.from_csv(r).await {
tracing::error!("{:#}", err);
match Dictionary::from_csv(r).await {
Ok(lookup) => ctx.lookup = Arc::new(lookup),
Err(err) => tracing::error!("{:#}", err),
}
})
};
@ -133,6 +137,7 @@ async fn main() -> Result<()> {
Some(("build", sub_matches)) => cmd::build::run(ctx, sub_matches).await?,
Some(("bundle", sub_matches)) => cmd::bundle::run(ctx, sub_matches).await?,
Some(("dictionary", sub_matches)) => cmd::dictionary::run(ctx, sub_matches).await?,
Some(("experiment", sub_matches)) => cmd::experiment::run(ctx, sub_matches).await?,
Some(("migrate", sub_matches)) => cmd::migrate::run(ctx, sub_matches).await?,
Some(("murmur", sub_matches)) => cmd::murmur::run(ctx, sub_matches).await?,
Some(("new", sub_matches)) => cmd::new::run(ctx, sub_matches).await?,

View file

@ -52,6 +52,7 @@ impl From<OodleLZ_CheckCRC> for bindings::OodleLZ_CheckCRC {
#[tracing::instrument(skip(data))]
pub fn decompress<I>(
data: I,
out_size: usize,
fuzz_safe: OodleLZ_FuzzSafe,
check_crc: OodleLZ_CheckCRC,
) -> Result<Vec<u8>>
@ -59,7 +60,7 @@ where
I: AsRef<[u8]>,
{
let data = data.as_ref();
let mut out = vec![0; CHUNK_SIZE];
let mut out = vec![0; out_size];
let verbosity = if tracing::enabled!(tracing::Level::INFO) {
bindings::OodleLZ_Verbosity_OodleLZ_Verbosity_Minimal

View file

@ -44,10 +44,10 @@ impl<T: FromBinary> FromBinary for Vec<T> {
pub mod sync {
use std::ffi::CStr;
use std::io::{self, Read, Seek, SeekFrom};
use std::io::{self, Read, Seek, SeekFrom, Write};
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use color_eyre::eyre::WrapErr;
use color_eyre::eyre::{self, WrapErr};
use color_eyre::{Help, Report, Result, SectionExt};
macro_rules! make_read {
@ -123,15 +123,17 @@ pub mod sync {
};
}
pub trait ReadExt: ReadBytesExt + Seek {
pub trait ReadExt: Read + Seek {
fn read_u8(&mut self) -> io::Result<u8> {
ReadBytesExt::read_u8(self)
}
make_read!(read_u16, read_u16_le, u16);
make_read!(read_u32, read_u32_le, u32);
make_read!(read_u64, read_u64_le, u64);
make_skip!(skip_u8, read_u8, u8);
make_skip!(skip_u16, read_u16, u16);
make_skip!(skip_u32, read_u32, u32);
// Implementation based on https://en.wikipedia.com/wiki/LEB128
@ -181,9 +183,17 @@ pub mod sync {
res
}
}
fn read_bool(&mut self) -> Result<bool> {
match ReadExt::read_u8(self)? {
0 => Ok(false),
1 => Ok(true),
v => eyre::bail!("Invalid value for boolean '{}'", v),
}
}
}
pub trait WriteExt: WriteBytesExt + Seek {
pub trait WriteExt: Write + Seek {
fn write_u8(&mut self, val: u8) -> io::Result<()> {
WriteBytesExt::write_u8(self, val)
}
@ -191,6 +201,10 @@ pub mod sync {
make_write!(write_u32, write_u32_le, u32);
make_write!(write_u64, write_u64_le, u64);
fn write_bool(&mut self, val: bool) -> io::Result<()> {
WriteBytesExt::write_u8(self, if val { 1 } else { 0 })
}
fn write_padding(&mut self) -> io::Result<usize> {
let pos = self.stream_position()?;
let size = 16 - (pos % 16) as usize;
@ -207,8 +221,8 @@ pub mod sync {
}
}
impl<R: ReadBytesExt + Seek + ?Sized> ReadExt for R {}
impl<W: WriteBytesExt + Seek + ?Sized> WriteExt for W {}
impl<R: Read + Seek + ?Sized> ReadExt for R {}
impl<W: Write + Seek + ?Sized> WriteExt for W {}
pub(crate) fn _read_up_to<R>(r: &mut R, buf: &mut Vec<u8>) -> Result<usize>
where

View file

@ -15,8 +15,9 @@ use super::filetype::BundleFileType;
#[derive(Debug)]
struct BundleFileHeader {
variant: u32,
unknown_1: u8,
external: bool,
size: usize,
unknown_1: u8,
len_data_file_name: usize,
}
@ -24,7 +25,7 @@ pub struct BundleFileVariant {
property: u32,
data: Vec<u8>,
data_file_name: Option<String>,
// Seems to be related to whether there is a data path.
external: bool,
unknown_1: u8,
}
@ -38,6 +39,7 @@ impl BundleFileVariant {
property: 0,
data: Vec::new(),
data_file_name: None,
external: false,
unknown_1: 0,
}
}
@ -62,21 +64,30 @@ impl BundleFileVariant {
self.data_file_name.as_ref()
}
pub fn external(&self) -> bool {
self.external
}
pub fn unknown_1(&self) -> u8 {
self.unknown_1
}
#[tracing::instrument(skip_all)]
fn read_header<R>(r: &mut R) -> Result<BundleFileHeader>
where
R: Read + Seek,
{
let variant = r.read_u32()?;
let unknown_1 = r.read_u8()?;
let external = r.read_bool()?;
let size = r.read_u32()? as usize;
r.skip_u8(1)?;
let unknown_1 = r.read_u8()?;
let len_data_file_name = r.read_u32()? as usize;
Ok(BundleFileHeader {
size,
unknown_1,
external,
variant,
unknown_1,
len_data_file_name,
})
}
@ -87,7 +98,7 @@ impl BundleFileVariant {
W: Write + Seek,
{
w.write_u32(self.property)?;
w.write_u8(self.unknown_1)?;
w.write_bool(self.external)?;
let len_data_file_name = self.data_file_name.as_ref().map(|s| s.len()).unwrap_or(0);
@ -105,6 +116,26 @@ impl BundleFileVariant {
}
}
impl std::fmt::Debug for BundleFileVariant {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut out = f.debug_struct("BundleFileVariant");
out.field("property", &self.property);
if self.data.len() <= 5 {
out.field("data", &format!("{:x?}", &self.data));
} else {
out.field(
"data",
&format!("{:x?}.. ({} bytes)", &self.data[..5], &self.data.len()),
);
}
out.field("data_file_name", &self.data_file_name)
.field("external", &self.external)
.finish()
}
}
bitflags! {
#[derive(Default, Clone, Copy, Debug)]
pub struct Properties: u32 {
@ -188,6 +219,7 @@ impl BundleFile {
let s = r
.read_string_len(header.len_data_file_name)
.wrap_err("Failed to read data file name")?;
Some(s)
} else {
None
@ -200,6 +232,7 @@ impl BundleFile {
property: header.variant,
data,
data_file_name,
external: header.external,
unknown_1: header.unknown_1,
};
@ -227,7 +260,7 @@ impl BundleFile {
for variant in self.variants.iter() {
w.write_u32(variant.property())?;
w.write_u8(variant.unknown_1)?;
w.write_bool(variant.external)?;
let len_data_file_name = variant.data_file_name().map(|s| s.len()).unwrap_or(0);
@ -261,6 +294,9 @@ impl BundleFile {
) -> Result<Self> {
match file_type {
BundleFileType::Lua => lua::compile(name, sjson).wrap_err("Failed to compile Lua file"),
BundleFileType::Texture => texture::compile(name, sjson, root)
.await
.wrap_err("Failed to compile Texture file"),
BundleFileType::Unknown(_) => {
eyre::bail!("Unknown file type. Cannot compile from SJSON");
}
@ -344,18 +380,16 @@ impl BundleFile {
Ok(files)
}
#[tracing::instrument(name = "File::decompiled", skip_all)]
#[tracing::instrument(
name = "File::decompiled",
skip_all,
fields(file = self.name(false, None), file_type = self.file_type().ext_name(), variants = self.variants.len())
)]
pub async fn decompiled(&self, ctx: &crate::Context) -> Result<Vec<UserFile>> {
let file_type = self.file_type();
if tracing::enabled!(tracing::Level::DEBUG) {
tracing::debug!(
name = self.name(true, None),
variants = self.variants.len(),
"Attempting to decompile"
);
}
// The `Strings` type handles all variants combined.
// For the other ones, each variant will be its own file.
if file_type == BundleFileType::Strings {
return strings::decompile(ctx, &self.variants);
}
@ -371,6 +405,7 @@ impl BundleFile {
let res = match file_type {
BundleFileType::Lua => lua::decompile(ctx, data).await,
BundleFileType::Package => package::decompile(ctx, name.clone(), data),
BundleFileType::Texture => texture::decompile(ctx, name.clone(), variant).await,
_ => {
tracing::debug!("Can't decompile, unknown file type");
Ok(vec![UserFile::with_name(data.to_vec(), name.clone())])

View file

@ -67,6 +67,8 @@ pub enum BundleFileType {
WwiseMetadata,
WwiseStream,
Xml,
Theme,
MissionThemes,
Unknown(Murmur64),
}
@ -136,6 +138,8 @@ impl BundleFileType {
BundleFileType::WwiseMetadata => String::from("wwise_metadata"),
BundleFileType::WwiseStream => String::from("wwise_stream"),
BundleFileType::Xml => String::from("xml"),
BundleFileType::Theme => String::from("theme"),
BundleFileType::MissionThemes => String::from("mission_themes"),
BundleFileType::Unknown(s) => format!("{s:016X}"),
}
@ -222,6 +226,8 @@ impl std::str::FromStr for BundleFileType {
"wwise_metadata" => BundleFileType::WwiseMetadata,
"wwise_stream" => BundleFileType::WwiseStream,
"xml" => BundleFileType::Xml,
"theme" => BundleFileType::Theme,
"mission_themes" => BundleFileType::MissionThemes,
s => eyre::bail!("Unknown type string '{}'", s),
};
@ -310,6 +316,8 @@ impl From<u64> for BundleFileType {
0xd50a8b7e1c82b110 => BundleFileType::WwiseMetadata,
0x504b55235d21440e => BundleFileType::WwiseStream,
0x76015845a6003765 => BundleFileType::Xml,
0x38BB9442048A7FBD => Self::Theme,
0x80F2DE893657F83A => Self::MissionThemes,
_ => BundleFileType::Unknown(Murmur64::from(hash)),
}
@ -381,6 +389,8 @@ impl From<BundleFileType> for u64 {
BundleFileType::WwiseMetadata => 0xd50a8b7e1c82b110,
BundleFileType::WwiseStream => 0x504b55235d21440e,
BundleFileType::Xml => 0x76015845a6003765,
BundleFileType::Theme => 0x38BB9442048A7FBD,
BundleFileType::MissionThemes => 0x80F2DE893657F83A,
BundleFileType::Unknown(hash) => hash.into(),
}

View file

@ -163,6 +163,7 @@ impl Bundle {
// TODO: Optimize to not reallocate?
let mut raw_buffer = oodle::decompress(
&compressed_buffer,
oodle::CHUNK_SIZE,
OodleLZ_FuzzSafe::No,
OodleLZ_CheckCRC::No,
)
@ -360,6 +361,7 @@ where
// TODO: Optimize to not reallocate?
let mut raw_buffer = oodle::decompress(
&compressed_buffer,
oodle::CHUNK_SIZE,
OodleLZ_FuzzSafe::No,
OodleLZ_CheckCRC::No,
)?;

View file

@ -1,8 +1,11 @@
use std::ffi::OsString;
use std::path::PathBuf;
use std::process::Command;
use std::{ffi::OsString, path::PathBuf};
use std::sync::Arc;
use crate::murmur::{Dictionary, HashGroup, IdString64, Murmur32, Murmur64};
#[derive(Clone)]
pub struct CmdLine {
cmd: OsString,
args: Vec<OsString>,
@ -52,7 +55,7 @@ impl From<&CmdLine> for Command {
}
pub struct Context {
pub lookup: Dictionary,
pub lookup: Arc<Dictionary>,
pub ljd: Option<CmdLine>,
pub revorb: Option<String>,
pub ww2ogg: Option<String>,
@ -62,7 +65,7 @@ pub struct Context {
impl Context {
pub fn new() -> Self {
Self {
lookup: Dictionary::new(),
lookup: Arc::new(Dictionary::new()),
ljd: None,
revorb: None,
ww2ogg: None,

View file

@ -1,3 +1,4 @@
pub mod lua;
pub mod package;
pub mod strings;
pub mod texture;

View file

@ -0,0 +1,387 @@
use std::io::{Cursor, Read, Seek, SeekFrom};
use std::path::{Path, PathBuf};
use bitflags::bitflags;
use color_eyre::eyre::Context;
use color_eyre::{eyre, SectionExt};
use color_eyre::{Help, Result};
use oodle::{OodleLZ_CheckCRC, OodleLZ_FuzzSafe};
use serde::Deserialize;
use tokio::fs;
use crate::binary::sync::{ReadExt, WriteExt};
use crate::bundle::file::UserFile;
use crate::murmur::{IdString32, IdString64};
use crate::{BundleFile, BundleFileType, BundleFileVariant};
bitflags! {
#[derive(Clone, Copy, Debug)]
struct TextureFlags: u32 {
const STREAMABLE = 0b0000_0001;
const UNKNOWN = 1 << 1;
const SRGB = 1 << 8;
}
}
#[derive(Clone, Debug)]
struct TextureHeader {
flags: TextureFlags,
n_streamable_mipmaps: u32,
width: u32,
height: u32,
}
impl TextureHeader {
#[tracing::instrument(skip(r))]
fn from_binary(mut r: impl ReadExt) -> Result<Self> {
let flags = r.read_u32().and_then(|bits| {
TextureFlags::from_bits(bits)
.ok_or_else(|| eyre::eyre!("Unknown bits set in TextureFlags: {:032b}", bits))
})?;
let n_streamable_mipmaps = r.read_u32()?;
let width = r.read_u32()?;
let height = r.read_u32()?;
// Don't quite know yet what this is, only that it is related to mipmaps.
// The reference to "streamable mipmaps" comes from VT2, so far.
// As such, it might be related to the stream file, but since all texture files have it,
// The engine calculates some offset and then moves 68 bytes at that offset to the beginning.
// Hence the split between `68` and `60` in the length.
r.seek(SeekFrom::Current(68 + 60))?;
Ok(Self {
flags,
n_streamable_mipmaps,
width,
height,
})
}
#[tracing::instrument(skip(w))]
fn to_binary(&self, mut w: impl WriteExt) -> Result<()> {
eyre::ensure!(
self.flags.is_empty() && self.n_streamable_mipmaps == 0,
"Only textures are supported where `flags == 0` and `n_streamable_mipmaps == 0`."
);
w.write_u32(self.flags.bits())?;
w.write_u32(self.n_streamable_mipmaps)?;
w.write_u32(self.width)?;
w.write_u32(self.height)?;
// See `from_binary` about this unknown section.
let buf = [0; 148];
w.write_all(&buf)?;
Ok(())
}
}
#[derive(Clone, Debug)]
struct Texture {
header: TextureHeader,
data: Vec<u8>,
stream: Option<Vec<u8>>,
category: IdString32,
}
impl Texture {
#[tracing::instrument(skip(r, stream_r))]
fn from_binary(mut r: impl Read + Seek, mut stream_r: Option<impl Read>) -> Result<Self> {
// Looking at the executable in IDA, there is one other valid value: `2`.
// If this ever comes up in the game data, I'll have to reverse engineer the
// (de)compression algorithm through IDA.
let compression_type = r.read_u32()?;
eyre::ensure!(
compression_type == 1,
"Unknown compression type for texture '{}'",
compression_type
);
let compressed_size = r.read_u32()? as usize;
let uncompressed_size = r.read_u32()? as usize;
let out_buf = {
let mut comp_buf = vec![0; compressed_size];
r.read_exact(&mut comp_buf)?;
oodle::decompress(
comp_buf,
uncompressed_size,
OodleLZ_FuzzSafe::No,
OodleLZ_CheckCRC::No,
)?
};
eyre::ensure!(
out_buf.len() == uncompressed_size,
"Length of decompressed buffer did not match expected value. Expected {}, got {}",
uncompressed_size,
out_buf.len()
);
// No idea what this number is supposed to mean.
// Even the game engine just skips this one.
r.skip_u32(0x43)?;
let header = TextureHeader::from_binary(&mut r)?;
let meta_size = r.read_u32()?;
eyre::ensure!(
meta_size == 0 || stream_r.is_some(),
"Compression chunks and stream file don't match up. meta_size = {}, stream = {}",
meta_size,
stream_r.is_some()
);
let stream = if let Some(stream_r) = stream_r.as_mut() {
// Number of compression chunks in the stream file
let num_chunks = r.read_u32()?;
r.skip_u16(0)?;
{
let num_chunks_1 = r.read_u16()? as u32;
eyre::ensure!(
num_chunks == num_chunks_1,
"Chunk numbers don't match. first = {}, second = {}",
num_chunks,
num_chunks_1
);
}
const RAW_SIZE: usize = 0x10000;
let mut stream_raw = Vec::new();
let mut last = 0;
for i in 0..num_chunks {
let offset_next = r.read_u32()? as usize;
let size = offset_next - last;
let span = tracing::info_span!(
"read stream chunk",
num_chunks,
i,
chunk_size = size,
offset = last
);
let _enter = span.enter();
let mut buf = vec![0; size];
stream_r
.read_exact(&mut buf)
.wrap_err("Failed to read chunk from stream file")?;
let raw =
oodle::decompress(&buf, RAW_SIZE, OodleLZ_FuzzSafe::No, OodleLZ_CheckCRC::No)
.wrap_err("Failed to decompress stream chunk")?;
stream_raw.extend_from_slice(&raw);
last = offset_next;
}
Some(stream_raw)
} else {
None
};
let category = r.read_u32().map(IdString32::from)?;
Ok(Self {
category,
header,
data: out_buf,
stream,
})
}
#[tracing::instrument(skip(w))]
fn to_binary(&self, mut w: impl WriteExt) -> Result<()> {
let compression_type = 1;
w.write_u32(compression_type)?;
let comp_buf = oodle::compress(&self.data).wrap_err("Failed to compress DDS data")?;
w.write_u32(comp_buf.len() as u32)?;
w.write_u32(self.data.len() as u32)?;
w.write_all(&comp_buf)?;
// Unknown field, which the engine seems to ignore.
// All game files have the same value here, so we just mirror that.
w.write_u32(0x43)?;
self.header.to_binary(&mut w)?;
// More data not fully figured out, yet.
let meta_size = 0;
w.write_u32(meta_size)?;
w.write_u32(self.category.to_murmur32().into())?;
Ok(())
}
#[tracing::instrument]
fn to_user_files(&self, name: String) -> Vec<UserFile> {
let mut files = Vec::with_capacity(2);
// TODO: Don't clone.
if let Some(stream) = &self.stream {
let stream_name = PathBuf::from(&name).with_extension("stream");
files.push(UserFile::with_name(
stream.clone(),
stream_name.display().to_string(),
));
}
files.push(UserFile::with_name(self.data.clone(), name));
files
}
}
#[derive(Clone, Debug, Deserialize)]
struct TextureDefinition {
common: TextureDefinitionPlatform,
// Stingray supports per-platform sections here, where you can create overrides with the same
// values as in `common`. But since we only support PC, we don't need to implement
// that.
}
#[derive(Clone, Debug, Deserialize)]
struct TextureDefinitionPlatform {
input: TextureDefinitionInput,
output: TextureDefinitionOutput,
}
#[derive(Clone, Debug, Deserialize)]
struct TextureDefinitionInput {
filename: String,
}
#[derive(Clone, Debug, Deserialize)]
struct TextureDefinitionOutput {
category: String,
}
#[tracing::instrument(skip(data), fields(buf_len = data.as_ref().len()))]
pub(crate) async fn decompile_data(
name: String,
data: impl AsRef<[u8]>,
stream_file_name: Option<PathBuf>,
) -> Result<Vec<UserFile>> {
let mut r = Cursor::new(data.as_ref());
let mut stream_r = if let Some(file_name) = stream_file_name {
let stream_data = fs::read(&file_name)
.await
.wrap_err_with(|| format!("Failed to read stream file '{}'", file_name.display()))?;
Some(Cursor::new(stream_data))
} else {
None
};
let texture = Texture::from_binary(&mut r, stream_r.as_mut())?;
let files = texture.to_user_files(name);
Ok(files)
}
#[tracing::instrument(skip(ctx))]
pub(crate) async fn decompile(
ctx: &crate::Context,
name: String,
variant: &BundleFileVariant,
) -> Result<Vec<UserFile>> {
if !variant.external() {
tracing::debug!("Decompiling texture from bundle data");
let stream_file_name = variant.data_file_name().map(|name| match &ctx.game_dir {
Some(dir) => dir.join("bundle").join(name),
None => PathBuf::from("bundle").join(name),
});
return decompile_data(name, variant.data(), stream_file_name).await;
}
let Some(file_name) = variant.data_file_name() else {
eyre::bail!("Texture file has no data and no data file");
};
tracing::debug!("Decompiling texture from external file '{}'", file_name);
let path = match &ctx.game_dir {
Some(dir) => dir.join("bundle").join(file_name),
None => PathBuf::from("bundle").join(file_name),
};
tracing::trace!(path = %path.display());
let data = fs::read(&path)
.await
.wrap_err_with(|| format!("Failed to read data file '{}'", path.display()))
.with_suggestion(|| {
"Provide a game directory in the config file or make sure the `data` directory is next to the provided bundle."
})?;
decompile_data(name, &data, None).await
}
#[tracing::instrument(skip(sjson, name), fields(sjson_len = sjson.as_ref().len(), name = %name.display()))]
pub async fn compile(
name: IdString64,
sjson: impl AsRef<str>,
root: impl AsRef<Path> + std::fmt::Debug,
) -> Result<BundleFile> {
let definitions: TextureDefinition = serde_sjson::from_str(sjson.as_ref())
.wrap_err("Failed to deserialize SJSON")
.with_section(|| sjson.as_ref().to_string().header("SJSON:"))?;
let dds = {
let path = root.as_ref().join(definitions.common.input.filename);
fs::read(&path)
.await
.wrap_err_with(|| format!("Failed to read DDS file '{}'", path.display()))?
};
let (width, height) = {
let mut r = Cursor::new(&dds);
let magic = r.read_u32()?;
eyre::ensure!(
magic == 0x20534444,
"Invalid magic bytes for DDS. Expected 0x20534444, got {:08x}",
magic
);
r.seek(SeekFrom::Current(5))?;
let width = r.read_u16()? as u32;
let height = r.read_u16()? as u32;
(width, height)
};
let mut w = Cursor::new(Vec::new());
let texture = Texture {
header: TextureHeader {
// As long as we can't handle mipmaps, these two need be `0`
flags: TextureFlags::empty(),
n_streamable_mipmaps: 0,
width,
height,
},
data: dds,
stream: None,
category: IdString32::String(definitions.common.output.category),
};
texture.to_binary(&mut w)?;
let mut variant = BundleFileVariant::new();
variant.set_data(w.into_inner());
let mut file = BundleFile::new(name, BundleFileType::Texture);
file.add_variant(variant);
Ok(file)
}

View file

@ -48,6 +48,7 @@ struct Row {
group: HashGroup,
}
#[derive(Clone)]
pub struct Entry {
value: String,
long: Murmur64,
@ -73,6 +74,7 @@ impl Entry {
}
}
#[derive(Clone)]
pub struct Dictionary {
entries: Vec<Entry>,
}
@ -88,10 +90,12 @@ impl Dictionary {
Self { entries: vec![] }
}
pub async fn from_csv<R>(&mut self, r: R) -> Result<()>
pub async fn from_csv<R>(r: R) -> Result<Self>
where
R: AsyncRead + std::marker::Unpin + std::marker::Send,
{
let mut entries = vec![];
let r = AsyncDeserializer::from_reader(r);
let mut records = r.into_deserialize::<Row>();
@ -112,10 +116,10 @@ impl Dictionary {
group: record.group,
};
self.entries.push(entry);
entries.push(entry);
}
Ok(())
Ok(Self { entries })
}
pub async fn to_csv<W>(&self, w: W) -> Result<()>
@ -161,7 +165,7 @@ impl Dictionary {
self.entries.push(entry);
}
pub fn find(&mut self, value: &String, group: HashGroup) -> Option<&Entry> {
pub fn find(&self, value: &String, group: HashGroup) -> Option<&Entry> {
self.entries
.iter()
.find(|e| e.value == *value && e.group == group)