feat: Implement directly loading Oodle library

Since I now found a way to obtain a version of the Oodle library
compiled for Linux, I don't need to bother with Wine anymore, and
can instead load the library directly.
This removes the need for the extra utility completely.

I still have to load the library at runtime, though, as Rust only
supports static linking, and I probably don't have a lincense to do
that with Oodle.
This commit is contained in:
Lucas Schwiderski 2022-11-13 22:25:22 +01:00
parent 82e4bcb9ed
commit 18af90ec1f
Signed by: lucas
GPG key ID: AA12679AAA6DF4D8
16 changed files with 753 additions and 234 deletions

View file

@ -1 +1,3 @@
/target /target
liboo2corelinux64.so
oo2core_8_win64.dll

2
.gitignore vendored
View file

@ -1,3 +1,5 @@
/target /target
/data /data
.envrc .envrc
liboo2corelinux64.so
oo2core_8_win64.dll

11
Cargo.lock generated
View file

@ -187,6 +187,7 @@ dependencies = [
"futures", "futures",
"futures-util", "futures-util",
"glob", "glob",
"libloading",
"nanorand", "nanorand",
"pin-project-lite", "pin-project-lite",
"serde", "serde",
@ -366,6 +367,16 @@ version = "0.2.137"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc7fcc620a3bff7cdd7a365be3376c97191aeaccc2a603e600951e452615bf89" checksum = "fc7fcc620a3bff7cdd7a365be3376c97191aeaccc2a603e600951e452615bf89"
[[package]]
name = "libloading"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
dependencies = [
"cfg-if",
"winapi",
]
[[package]] [[package]]
name = "log" name = "log"
version = "0.4.17" version = "0.4.17"

View file

@ -10,6 +10,7 @@ csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
futures = "0.3.25" futures = "0.3.25"
futures-util = "0.3.24" futures-util = "0.3.24"
glob = "0.3.0" glob = "0.3.0"
libloading = "0.7.4"
nanorand = "0.7.0" nanorand = "0.7.0"
pin-project-lite = "0.2.9" pin-project-lite = "0.2.9"
serde = { version = "1.0.147", features = ["derive"] } serde = { version = "1.0.147", features = ["derive"] }

View file

@ -2,7 +2,7 @@ use std::path::{Path, PathBuf};
use std::sync::Arc; use std::sync::Arc;
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command}; use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
use color_eyre::eyre::{self, Context, Result}; use color_eyre::eyre::{self, Result};
use color_eyre::{Help, SectionExt}; use color_eyre::{Help, SectionExt};
use dtmt::decompress; use dtmt::decompress;

View file

@ -4,10 +4,18 @@ use clap::{Arg, ArgMatches, Command};
use color_eyre::eyre::Result; use color_eyre::eyre::Result;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use dtmt::Oodle;
mod decompress; mod decompress;
mod extract; mod extract;
mod list; mod list;
#[cfg(target_os = "windows")]
const OODLE_LIB_NAME: &str = "oo2core_8_win64";
#[cfg(target_os = "linux")]
const OODLE_LIB_NAME: &str = "liboo2corelinux64.so";
pub(crate) fn command_definition() -> Command { pub(crate) fn command_definition() -> Command {
Command::new("bundle") Command::new("bundle")
.subcommand_required(true) .subcommand_required(true)
@ -15,11 +23,12 @@ pub(crate) fn command_definition() -> Command {
.arg( .arg(
Arg::new("oodle") Arg::new("oodle")
.long("oodle") .long("oodle")
.default_value("oodle-cli") .default_value(OODLE_LIB_NAME)
.help( .help(
"Name of or path to the Oodle decompression helper. \ "The oodle library to load. This may either be:\n\
The helper is a small executable that wraps the Oodle library \ - A library name that will be searched for in the system's default paths.\n\
with a CLI.", - A file path relative to the current working directory.\n\
- An absolute file path.",
), ),
) )
.subcommand(decompress::command_definition()) .subcommand(decompress::command_definition())
@ -29,12 +38,10 @@ pub(crate) fn command_definition() -> Command {
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
pub(crate) async fn run(ctx: Arc<RwLock<dtmt::Context>>, matches: &ArgMatches) -> Result<()> { pub(crate) async fn run(ctx: Arc<RwLock<dtmt::Context>>, matches: &ArgMatches) -> Result<()> {
let oodle_bin = matches if let Some(name) = matches.get_one::<String>("oodle") {
.get_one::<String>("oodle") let oodle = Oodle::new(name)?;
.expect("no default value for 'oodle' parameter");
{
let mut ctx = ctx.write().await; let mut ctx = ctx.write().await;
ctx.oodle = Some(oodle_bin.clone()); ctx.oodle = Some(oodle);
} }
match matches.subcommand() { match matches.subcommand() {

View file

@ -4,7 +4,6 @@ use std::sync::Arc;
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command}; use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
use color_eyre::eyre::{Context, Result}; use color_eyre::eyre::{Context, Result};
use color_eyre::{Help, SectionExt}; use color_eyre::{Help, SectionExt};
use dtmt::lookup_hash;
use dtmt::murmur::HashGroup; use dtmt::murmur::HashGroup;
use tokio::fs::File; use tokio::fs::File;
use tokio::io::{AsyncBufReadExt, BufReader}; use tokio::io::{AsyncBufReadExt, BufReader};
@ -71,8 +70,9 @@ pub(crate) async fn run(ctx: Arc<RwLock<dtmt::Context>>, matches: &ArgMatches) -
.get_many::<HashGroup>("group") .get_many::<HashGroup>("group")
.unwrap_or_default(); .unwrap_or_default();
let ctx = ctx.read().await;
for group in groups { for group in groups {
let value = lookup_hash(ctx.clone(), *hash, *group).await; let value = ctx.lookup_hash(*hash, *group);
println!("{}", value); println!("{}", value);
} }

View file

@ -6,7 +6,7 @@ use std::sync::Arc;
use clap::parser::ValueSource; use clap::parser::ValueSource;
use clap::value_parser; use clap::value_parser;
use clap::{command, Arg, ArgAction}; use clap::{command, Arg};
use color_eyre::eyre::{Context, Result}; use color_eyre::eyre::{Context, Result};
use color_eyre::{Help, SectionExt}; use color_eyre::{Help, SectionExt};
use tokio::fs::File; use tokio::fs::File;
@ -33,16 +33,6 @@ async fn main() -> Result<()> {
let matches = command!() let matches = command!()
.subcommand_required(true) .subcommand_required(true)
.arg(
Arg::new("verbose")
.long("verbose")
.short('v')
.action(ArgAction::Count)
.help(
"Increase verbosity of informational and debugging output. \
May be specified multiple times.",
),
)
.arg( .arg(
Arg::new("dictionary") Arg::new("dictionary")
.help( .help(

View file

@ -1,10 +1,14 @@
use std::io::SeekFrom;
use color_eyre::eyre::WrapErr; use color_eyre::eyre::WrapErr;
use color_eyre::{Help, Result, SectionExt}; use color_eyre::{Help, Result, SectionExt};
use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeek, AsyncSeekExt}; use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeek, AsyncSeekExt, AsyncWrite, AsyncWriteExt};
// TODO: Add versions for each write and read function that can work without `AsyncSeek`
macro_rules! make_read { macro_rules! make_read {
($func:ident, $op:ident, $type:ty) => { ($func:ident, $op:ident, $type:ty) => {
pub(crate) async fn $func<R>(mut r: R) -> Result<$type> pub(crate) async fn $func<R>(r: &mut R) -> Result<$type>
where where
R: AsyncRead + AsyncSeek + std::marker::Unpin, R: AsyncRead + AsyncSeek + std::marker::Unpin,
{ {
@ -13,15 +17,42 @@ macro_rules! make_read {
.await .await
.wrap_err(concat!("failed to read ", stringify!($type))); .wrap_err(concat!("failed to read ", stringify!($type)));
if res.is_err() { if res.is_ok() {
let pos = r.stream_position().await; return res;
if pos.is_ok() { }
res.with_section(|| {
format!("{pos:#X} ({pos})", pos = pos.unwrap()).header("Position: ") let pos = r.stream_position().await;
}) if pos.is_ok() {
} else { res.with_section(|| {
res format!("{pos:#X} ({pos})", pos = pos.unwrap()).header("Position: ")
} })
} else {
res
}
}
};
}
macro_rules! make_write {
($func:ident, $op:ident, $type:ty) => {
pub(crate) async fn $func<W>(r: &mut W, val: $type) -> Result<()>
where
W: AsyncWrite + AsyncSeek + std::marker::Unpin,
{
let res = r
.$op(val)
.await
.wrap_err(concat!("failed to write ", stringify!($type)));
if res.is_ok() {
return res;
}
let pos = r.stream_position().await;
if pos.is_ok() {
res.with_section(|| {
format!("{pos:#X} ({pos})", pos = pos.unwrap()).header("Position: ")
})
} else { } else {
res res
} }
@ -31,11 +62,11 @@ macro_rules! make_read {
macro_rules! make_skip { macro_rules! make_skip {
($func:ident, $read:ident, $op:ident, $type:ty) => { ($func:ident, $read:ident, $op:ident, $type:ty) => {
pub(crate) async fn $func<R>(mut r: R, cmp: $type) -> Result<()> pub(crate) async fn $func<R>(r: &mut R, cmp: $type) -> Result<()>
where where
R: AsyncRead + AsyncSeek + std::marker::Unpin, R: AsyncRead + AsyncSeek + std::marker::Unpin,
{ {
let val = $read(&mut r).await?; let val = $read(r).await?;
if val != cmp { if val != cmp {
let pos = r.stream_position().await.unwrap_or(u64::MAX); let pos = r.stream_position().await.unwrap_or(u64::MAX);
@ -57,5 +88,65 @@ make_read!(read_u8, read_u8, u8);
make_read!(read_u32, read_u32_le, u32); make_read!(read_u32, read_u32_le, u32);
make_read!(read_u64, read_u64_le, u64); make_read!(read_u64, read_u64_le, u64);
make_write!(write_u8, write_u8, u8);
make_write!(write_u32, write_u32_le, u32);
make_write!(write_u64, write_u64_le, u64);
make_skip!(skip_u8, read_u8, read_u8, u8); make_skip!(skip_u8, read_u8, read_u8, u8);
make_skip!(skip_u32, read_u32, read_u32_le, u32); make_skip!(skip_u32, read_u32, read_u32_le, u32);
pub(crate) async fn skip_padding<S>(stream: &mut S) -> Result<()>
where
S: AsyncSeek + std::marker::Unpin,
{
let pos = stream.stream_position().await?;
let padding_size = 16 - (pos % 16);
if padding_size < 16 && padding_size > 0 {
tracing::trace!(pos, padding_size, "Skipping padding");
stream.seek(SeekFrom::Current(padding_size as i64)).await?;
} else {
tracing::trace!(pos, padding_size, "No padding to skip");
}
Ok(())
}
pub(crate) async fn read_up_to<R>(r: &mut R, buf: &mut Vec<u8>) -> Result<usize>
where
R: AsyncRead + AsyncSeek + std::marker::Unpin,
{
let pos = r.stream_position().await?;
let err = {
match r.read_exact(buf).await {
Ok(_) => return Ok(buf.len()),
Err(err) if err.kind() == std::io::ErrorKind::UnexpectedEof => {
r.seek(SeekFrom::Start(pos)).await?;
match r.read_to_end(buf).await {
Ok(read) => return Ok(read),
Err(err) => err,
}
}
Err(err) => err,
}
};
Err(err).with_section(|| format!("{pos:#X} ({pos})", pos = pos).header("Position: "))
}
pub(crate) async fn write_padding<W>(w: &mut W) -> Result<usize>
where
W: AsyncWrite + AsyncSeek + std::marker::Unpin,
{
let pos = w.stream_position().await?;
let size = 16 - (pos % 16) as usize;
if size > 0 && size < 16 {
let buf = vec![0; size];
w.write_all(&buf).await?;
Ok(size)
} else {
Ok(0)
}
}

View file

@ -3,7 +3,7 @@ use std::sync::Arc;
use color_eyre::{Help, Result, SectionExt}; use color_eyre::{Help, Result, SectionExt};
use futures::future::join_all; use futures::future::join_all;
use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeek}; use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeek, AsyncWrite, AsyncWriteExt};
use tokio::sync::RwLock; use tokio::sync::RwLock;
use crate::binary::*; use crate::binary::*;
@ -157,6 +157,10 @@ impl BundleFileType {
_ => self.ext_name(), _ => self.ext_name(),
} }
} }
pub fn hash(&self) -> u64 {
*Murmur64::from(*self).deref()
}
} }
impl From<u64> for BundleFileType { impl From<u64> for BundleFileType {
@ -313,7 +317,7 @@ struct BundleFileHeader {
impl BundleFileHeader { impl BundleFileHeader {
#[tracing::instrument(name = "FileHeader::read", skip_all)] #[tracing::instrument(name = "FileHeader::read", skip_all)]
async fn read<R>(mut r: R) -> Result<Self> async fn read<R>(r: &mut R) -> Result<Self>
where where
R: AsyncRead + AsyncSeek + std::marker::Unpin, R: AsyncRead + AsyncSeek + std::marker::Unpin,
{ {
@ -321,15 +325,15 @@ impl BundleFileHeader {
// identifier between the different file entries. // identifier between the different file entries.
// Back in VT2 days, these different 'files' were used to separate // Back in VT2 days, these different 'files' were used to separate
// versions, e.g. different languages for the same `.strings` file. // versions, e.g. different languages for the same `.strings` file.
skip_u32(&mut r, 0).await?; skip_u32(r, 0).await?;
skip_u32(&mut r, 0).await?; skip_u32(r, 0).await?;
skip_u32(&mut r, 0).await?; skip_u32(r, 0).await?;
let size_1 = read_u32(&mut r).await? as usize; let size_1 = read_u32(r).await? as usize;
skip_u8(&mut r, 1).await?; skip_u8(r, 1).await?;
let size_2 = read_u32(&mut r).await? as usize; let size_2 = read_u32(r).await? as usize;
tracing::debug!(size_1, size_2); tracing::debug!(size_1, size_2);
@ -368,15 +372,15 @@ pub struct BundleFile {
impl BundleFile { impl BundleFile {
#[tracing::instrument(name = "File::read", skip_all)] #[tracing::instrument(name = "File::read", skip_all)]
pub async fn read<R>(ctx: Arc<RwLock<crate::Context>>, mut r: R) -> Result<Self> pub async fn read<R>(ctx: Arc<RwLock<crate::Context>>, r: &mut R) -> Result<Self>
where where
R: AsyncRead + AsyncSeek + std::marker::Unpin, R: AsyncRead + AsyncSeek + std::marker::Unpin,
{ {
let file_type = BundleFileType::from(read_u64(&mut r).await?); let file_type = BundleFileType::from(read_u64(r).await?);
let hash = Murmur64::from(read_u64(&mut r).await?); let hash = Murmur64::from(read_u64(r).await?);
let name = lookup_hash(ctx, hash, HashGroup::Filename).await; let name = lookup_hash(ctx, hash, HashGroup::Filename).await;
let header_count = read_u8(&mut r) let header_count = read_u8(r)
.await .await
.with_section(|| format!("{}.{}", name, file_type.ext_name()).header("File:"))?; .with_section(|| format!("{}.{}", name, file_type.ext_name()).header("File:"))?;
let header_count = header_count as usize; let header_count = header_count as usize;
@ -384,7 +388,7 @@ impl BundleFile {
let mut headers = Vec::with_capacity(header_count); let mut headers = Vec::with_capacity(header_count);
for _ in 0..header_count { for _ in 0..header_count {
let header = BundleFileHeader::read(&mut r) let header = BundleFileHeader::read(r)
.await .await
.with_section(|| format!("{}.{}", name, file_type.ext_name()).header("File:"))?; .with_section(|| format!("{}.{}", name, file_type.ext_name()).header("File:"))?;
headers.push(header); headers.push(header);
@ -409,6 +413,40 @@ impl BundleFile {
}) })
} }
#[tracing::instrument(name = "File::write", skip_all)]
pub async fn write<W>(&self, _ctx: Arc<RwLock<crate::Context>>, w: &mut W) -> Result<()>
where
W: AsyncWrite + AsyncSeek + std::marker::Unpin,
{
write_u64(w, self.file_type.hash()).await?;
write_u64(w, *self.hash).await?;
let header_count = self.variants.len();
write_u8(w, header_count as u8).await?;
for variant in self.variants.iter() {
// TODO: Unknown what these are
write_u32(w, 0).await?;
write_u32(w, 0).await?;
write_u32(w, 0).await?;
write_u32(w, variant.data.len() as u32).await?;
// TODO: Unknown what this is
write_u8(w, 1).await?;
// TODO: The previous size value and this one are somehow connected,
// but so far it is unknown how
write_u32(w, 0).await?;
}
for variant in self.variants.iter() {
w.write_all(&variant.data).await?;
}
Ok(())
}
pub fn base_name(&self) -> &String { pub fn base_name(&self) -> &String {
&self.name &self.name
} }

View file

@ -3,22 +3,24 @@ use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
use color_eyre::eyre::{self, Context, Result}; use color_eyre::eyre::{self, Context, Result};
use color_eyre::{Help, SectionExt}; use color_eyre::{Help, Report, SectionExt};
use tokio::fs; use tokio::fs;
use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeek, AsyncSeekExt, AsyncWrite, AsyncWriteExt}; use tokio::io::{
AsyncRead, AsyncReadExt, AsyncSeek, AsyncSeekExt, AsyncWrite, AsyncWriteExt, BufReader,
};
use tokio::sync::RwLock; use tokio::sync::RwLock;
use tracing::Instrument; use tracing::Instrument;
use crate::binary::*; use crate::binary::*;
use crate::context::lookup_hash;
use crate::murmur::{HashGroup, Murmur64}; use crate::murmur::{HashGroup, Murmur64};
use crate::oodle; use crate::oodle::types::{OodleLZ_CheckCRC, OodleLZ_FuzzSafe};
use crate::oodle::CHUNK_SIZE;
pub(crate) mod file; pub(crate) mod file;
use file::BundleFile; use file::BundleFile;
#[derive(Debug, PartialEq)] #[derive(Clone, Copy, Debug, PartialEq)]
enum BundleFormat { enum BundleFormat {
Darktide, Darktide,
} }
@ -34,15 +36,23 @@ impl TryFrom<u32> for BundleFormat {
} }
} }
impl From<BundleFormat> for u32 {
fn from(value: BundleFormat) -> Self {
match value {
BundleFormat::Darktide => 0xF0000007,
}
}
}
struct EntryHeader { struct EntryHeader {
_name_hash: u64, name_hash: u64,
_extension_hash: u64, extension_hash: u64,
_flags: u32, flags: u32,
} }
impl EntryHeader { impl EntryHeader {
#[tracing::instrument(name = "FileMeta::read", skip_all)] #[tracing::instrument(name = "FileMeta::read", skip_all)]
async fn read<R>(mut r: R) -> Result<Self> async fn read<R>(r: &mut R) -> Result<Self>
where where
R: AsyncRead + AsyncSeek + std::marker::Unpin, R: AsyncRead + AsyncSeek + std::marker::Unpin,
{ {
@ -64,18 +74,32 @@ impl EntryHeader {
} }
Ok(Self { Ok(Self {
_name_hash: name_hash, name_hash,
_extension_hash: extension_hash, extension_hash,
_flags: flags, flags,
}) })
} }
#[tracing::instrument(name = "FileMeta::write", skip_all)]
async fn write<W>(&self, w: &mut W) -> Result<()>
where
W: AsyncWrite + AsyncSeek + std::marker::Unpin,
{
write_u64(w, self.extension_hash).await?;
write_u64(w, self.name_hash).await?;
write_u32(w, self.flags).await?;
Ok(())
}
} }
pub struct Bundle { pub struct Bundle {
_format: BundleFormat, format: BundleFormat,
_headers: Vec<EntryHeader>, _headers: Vec<EntryHeader>,
files: Vec<BundleFile>, files: Vec<BundleFile>,
name: String, name: String,
unknown_1: u32,
unknown_header: [u8; 256],
} }
impl Bundle { impl Bundle {
@ -84,20 +108,24 @@ impl Bundle {
where where
P: AsRef<Path> + std::fmt::Debug, P: AsRef<Path> + std::fmt::Debug,
{ {
// We need to know the bundle name, so it's easier to be given the
// file path and open the File internally, than to be given a generic
// `AsyncRead` and the bundle name separately.
let path = path.as_ref(); let path = path.as_ref();
let bundle_name = if let Some(name) = path.file_name() { let bundle_name = if let Some(name) = path.file_name() {
let hash = Murmur64::try_from(name.to_string_lossy().as_ref())?; let hash = Murmur64::try_from(name.to_string_lossy().as_ref())?;
lookup_hash(ctx.clone(), hash, HashGroup::Filename).await ctx.read().await.lookup_hash(hash, HashGroup::Filename)
} else { } else {
return Err(eyre::eyre!("Invalid path to bundle file")) eyre::bail!("Invalid path to bundle file: {}", path.display());
.with_section(|| path.display().to_string().header("Path:"));
}; };
let mut r = fs::File::open(path) let f = fs::File::open(path)
.await .await
.wrap_err("Failed to open bundle file") .wrap_err("Failed to open bundle file")
.with_section(|| path.display().to_string().header("Path"))?; .with_section(|| path.display().to_string().header("Path"))?;
let mut r = BufReader::new(f);
let format = read_u32(&mut r) let format = read_u32(&mut r)
.await .await
.wrap_err("failed to read from file") .wrap_err("failed to read from file")
@ -107,14 +135,19 @@ impl Bundle {
return Err(eyre::eyre!("Unknown bundle format: {:?}", format)); return Err(eyre::eyre!("Unknown bundle format: {:?}", format));
} }
// Skip unknown 4 bytes let unknown_1 = read_u32(&mut r).await?;
r.seek(SeekFrom::Current(4)).await?; if unknown_1 != 0x3 {
tracing::warn!(
"Unexpected value for unknown header. Expected {:#08X}, got {:#08X}",
0x3,
unknown_1
);
}
let num_entries = read_u32(&mut r).await? as usize; let num_entries = read_u32(&mut r).await? as usize;
// Skip unknown 256 bytes. I believe this data is somewhat related to packaging and the let mut unknown_header = [0; 256];
// `.package` files r.read_exact(&mut unknown_header).await?;
r.seek(SeekFrom::Current(256)).await?;
let mut meta = Vec::with_capacity(num_entries); let mut meta = Vec::with_capacity(num_entries);
for _ in 0..num_entries { for _ in 0..num_entries {
@ -122,33 +155,56 @@ impl Bundle {
} }
let num_chunks = read_u32(&mut r).await? as usize; let num_chunks = read_u32(&mut r).await? as usize;
tracing::debug!(num_chunks); tracing::debug!(num_chunks);
let mut chunk_sizes = Vec::with_capacity(num_chunks); let mut chunk_sizes = Vec::with_capacity(num_chunks);
for _ in 0..num_chunks { for _ in 0..num_chunks {
chunk_sizes.push(read_u32(&mut r).await? as usize); chunk_sizes.push(read_u32(&mut r).await? as usize);
} }
let unpacked_size = { skip_padding(&mut r).await?;
let size_1 = read_u32(&mut r).await? as usize;
// Skip unknown 4 bytes let unpacked_size = read_u32(&mut r).await? as usize;
r.seek(SeekFrom::Current(4)).await?; // Skip 4 unknown bytes
r.seek(SeekFrom::Current(4)).await?;
// NOTE: Unknown why this sometimes needs a second value. let mut decompressed = Vec::with_capacity(unpacked_size);
// Also unknown if there is a different part in the data that actually
// determines whether this second value exists. for (chunk_index, chunk_size) in chunk_sizes.into_iter().enumerate() {
if size_1 == 0x0 { let span = tracing::debug_span!("Decompressing chunk", chunk_index, chunk_size);
let size_2 = read_u32(&mut r).await? as usize;
// Skip unknown 4 bytes async {
r.seek(SeekFrom::Current(4)).await?; let inner_chunk_size = read_u32(&mut r).await? as usize;
size_2
} else { if inner_chunk_size != chunk_size {
size_1 eyre::bail!(
"Chunk sizes do not match. Expected {}, got {}",
inner_chunk_size,
chunk_size,
);
}
skip_padding(&mut r).await?;
let mut compressed_buffer = vec![0u8; chunk_size];
r.read_exact(&mut compressed_buffer).await?;
// TODO: Optimize to not reallocate?
let ctx = ctx.read().await;
let oodle_lib = ctx.oodle.as_ref().unwrap();
let mut raw_buffer = oodle_lib.decompress(
&compressed_buffer,
OodleLZ_FuzzSafe::No,
OodleLZ_CheckCRC::No,
)?;
decompressed.append(&mut raw_buffer);
Ok(())
} }
}; .instrument(span)
.await?;
let mut decompressed = Vec::new(); }
oodle::decompress(ctx.clone(), r, &mut decompressed, num_chunks).await?;
if decompressed.len() < unpacked_size { if decompressed.len() < unpacked_size {
return Err(eyre::eyre!( return Err(eyre::eyre!(
@ -158,9 +214,6 @@ impl Bundle {
.with_section(|| unpacked_size.to_string().header("Expected:")); .with_section(|| unpacked_size.to_string().header("Expected:"));
} }
// Truncate to the actual data size
decompressed.resize(unpacked_size, 0);
let mut r = Cursor::new(decompressed); let mut r = Cursor::new(decompressed);
let mut files = Vec::with_capacity(num_entries); let mut files = Vec::with_capacity(num_entries);
for i in 0..num_entries { for i in 0..num_entries {
@ -173,12 +226,61 @@ impl Bundle {
Ok(Self { Ok(Self {
name: bundle_name, name: bundle_name,
_format: format, format,
_headers: meta, _headers: meta,
files, files,
unknown_1,
unknown_header,
}) })
} }
#[tracing::instrument(name = "Bundle::write", skip_all)]
pub async fn write<W>(&self, ctx: Arc<RwLock<crate::Context>>, w: &mut W) -> Result<()>
where
W: AsyncWrite + AsyncSeek + std::marker::Unpin,
{
write_u32(w, self.format.into()).await?;
write_u32(w, self.unknown_1).await?;
write_u32(w, self.files.len() as u32).await?;
w.write_all(&self.unknown_header).await?;
for meta in self._headers.iter() {
meta.write(w).await?;
}
let unpacked_data = {
let span = tracing::trace_span!("Write bundle files");
let buf = Vec::new();
let mut c = Cursor::new(buf);
async {
for file in self.files.iter() {
file.write(ctx.clone(), &mut c).await?;
}
Ok::<(), Report>(())
}
.instrument(span)
.await?;
c.into_inner()
};
let chunks = unpacked_data.chunks(CHUNK_SIZE);
let ctx = ctx.read().await;
let oodle_lib = ctx.oodle.as_ref().unwrap();
for chunk in chunks {
let compressed = oodle_lib.compress(chunk)?;
write_u32(w, compressed.len() as u32).await?;
write_padding(w).await?;
w.write_all(&compressed).await?;
}
todo!("compress data and count chunks");
}
pub fn name(&self) -> &String { pub fn name(&self) -> &String {
&self.name &self.name
} }
@ -191,7 +293,7 @@ impl Bundle {
/// Returns a decompressed version of the bundle data. /// Returns a decompressed version of the bundle data.
/// This is mainly useful for debugging purposes or /// This is mainly useful for debugging purposes or
/// to manullay inspect the raw data. /// to manullay inspect the raw data.
#[tracing::instrument(skip(ctx, r, w))] #[tracing::instrument(skip_all)]
pub async fn decompress<R, W>(ctx: Arc<RwLock<crate::Context>>, mut r: R, mut w: W) -> Result<()> pub async fn decompress<R, W>(ctx: Arc<RwLock<crate::Context>>, mut r: R, mut w: W) -> Result<()>
where where
R: AsyncRead + AsyncSeek + std::marker::Unpin, R: AsyncRead + AsyncSeek + std::marker::Unpin,
@ -200,13 +302,14 @@ where
let format = read_u32(&mut r).await.and_then(BundleFormat::try_from)?; let format = read_u32(&mut r).await.and_then(BundleFormat::try_from)?;
if format != BundleFormat::Darktide { if format != BundleFormat::Darktide {
return Err(eyre::eyre!("Unknown bundle format: {:?}", format)); eyre::bail!("Unknown bundle format: {:?}", format);
} }
// Skip unknown 4 bytes // Skip unknown 4 bytes
r.seek(SeekFrom::Current(4)).await?; r.seek(SeekFrom::Current(4)).await?;
let num_entries = read_u32(&mut r).await? as i64; let num_entries = read_u32(&mut r).await? as i64;
tracing::debug!(num_entries);
// Skip unknown 256 bytes // Skip unknown 256 bytes
r.seek(SeekFrom::Current(256)).await?; r.seek(SeekFrom::Current(256)).await?;
@ -214,31 +317,71 @@ where
r.seek(SeekFrom::Current(num_entries * 20)).await?; r.seek(SeekFrom::Current(num_entries * 20)).await?;
let num_chunks = read_u32(&mut r).await? as usize; let num_chunks = read_u32(&mut r).await? as usize;
tracing::debug!(num_chunks);
// Skip chunk sizes // Skip chunk sizes
r.seek(SeekFrom::Current(num_chunks as i64 * 4)).await?; r.seek(SeekFrom::Current(num_chunks as i64 * 4)).await?;
{ skip_padding(&mut r).await?;
let size_1 = read_u32(&mut r).await?;
// Skip unknown 4 bytes let mut unpacked_size = read_u32(&mut r).await? as usize;
r.seek(SeekFrom::Current(4)).await?; tracing::debug!(unpacked_size);
// NOTE: Unknown why there sometimes is a second value. // Skip unknown 4 bytes
if size_1 == 0x0 { r.seek(SeekFrom::Current(4)).await?;
// Skip unknown 4 bytes
r.seek(SeekFrom::Current(8)).await?;
}
}
let chunks_start = r.stream_position().await?; let chunks_start = r.stream_position().await?;
tracing::trace!(chunks_start);
// Pipe the header into the output
{ {
// Pipe the header into the output let span = tracing::debug_span!("Pipe file header", chunks_start);
r.seek(SeekFrom::Start(0)).await?; async {
let mut buf = vec![0; chunks_start as usize]; r.seek(SeekFrom::Start(0)).await?;
r.read_exact(&mut buf).await?;
w.write_all(&buf).await?; let mut buf = vec![0; chunks_start as usize];
r.read_exact(&mut buf).await?;
w.write_all(&buf).await?;
r.seek(SeekFrom::Start(chunks_start)).await
}
.instrument(span)
.await?;
} }
oodle::decompress(ctx, r, w, num_chunks).await for chunk_index in 0..num_chunks {
let span = tracing::debug_span!("Decompressing chunk", chunk_index);
async {
let chunk_size = read_u32(&mut r).await? as usize;
tracing::trace!(chunk_size);
skip_padding(&mut r).await?;
let mut compressed_buffer = vec![0u8; chunk_size];
r.read_exact(&mut compressed_buffer).await?;
let ctx = ctx.read().await;
let oodle_lib = ctx.oodle.as_ref().unwrap();
// TODO: Optimize to not reallocate?
let mut raw_buffer = oodle_lib.decompress(
&compressed_buffer,
OodleLZ_FuzzSafe::No,
OodleLZ_CheckCRC::No,
)?;
if unpacked_size < CHUNK_SIZE {
raw_buffer.resize(unpacked_size, 0);
} else {
unpacked_size -= CHUNK_SIZE;
}
w.write_all(&raw_buffer).await?;
Ok::<(), color_eyre::Report>(())
}
.instrument(span)
.await?;
}
Ok(())
} }

View file

@ -3,10 +3,11 @@ use std::sync::Arc;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use crate::murmur::{Dictionary, HashGroup, Murmur32, Murmur64}; use crate::murmur::{Dictionary, HashGroup, Murmur32, Murmur64};
use crate::oodle::Oodle;
pub struct Context { pub struct Context {
pub lookup: Dictionary, pub lookup: Dictionary,
pub oodle: Option<String>, pub oodle: Option<Oodle>,
pub ljd: Option<String>, pub ljd: Option<String>,
pub revorb: Option<String>, pub revorb: Option<String>,
pub ww2ogg: Option<String>, pub ww2ogg: Option<String>,
@ -22,6 +23,34 @@ impl Context {
ww2ogg: None, ww2ogg: None,
} }
} }
pub fn lookup_hash<M>(&self, hash: M, group: HashGroup) -> String
where
M: Into<Murmur64>,
{
let hash = hash.into();
if let Some(s) = self.lookup.lookup(hash, group) {
tracing::debug!(%hash, string = s, "Murmur64 lookup successful");
s.to_owned()
} else {
tracing::debug!(%hash, "Murmur64 lookup failed");
format!("{:016X}", hash)
}
}
pub fn lookup_hash_short<M>(&self, hash: M, group: HashGroup) -> String
where
M: Into<Murmur32>,
{
let hash = hash.into();
if let Some(s) = self.lookup.lookup_short(hash, group) {
tracing::debug!(%hash, string = s, "Murmur32 lookup successful");
s.to_owned()
} else {
tracing::debug!(%hash, "Murmur32 lookup failed");
format!("{:08X}", hash)
}
}
} }
impl Default for Context { impl Default for Context {
@ -43,17 +72,3 @@ where
format!("{:016X}", hash) format!("{:016X}", hash)
} }
} }
pub async fn lookup_hash_short<M>(ctx: Arc<RwLock<Context>>, hash: M, group: HashGroup) -> String
where
M: Into<Murmur32>,
{
let hash = hash.into();
if let Some(s) = ctx.read().await.lookup.lookup_short(hash, group) {
tracing::debug!(%hash, string = s, "Murmur32 lookup successful");
s.to_owned()
} else {
tracing::debug!(%hash, "Murmur32 lookup failed");
format!("{:08X}", hash)
}
}

View file

@ -1,3 +1,5 @@
#![feature(c_size_t)]
mod binary; mod binary;
mod bundle; mod bundle;
mod context; mod context;
@ -7,6 +9,5 @@ mod oodle;
pub use bundle::decompress; pub use bundle::decompress;
pub use bundle::Bundle; pub use bundle::Bundle;
pub use context::lookup_hash;
pub use context::lookup_hash_short;
pub use context::Context; pub use context::Context;
pub use oodle::Oodle;

View file

@ -1,106 +0,0 @@
use std::process::Stdio;
use std::sync::Arc;
use color_eyre::eyre::Context;
use color_eyre::{eyre, Help, Result, SectionExt};
use nanorand::Rng;
use tokio::fs::File;
use tokio::io::{AsyncRead, AsyncSeek, AsyncSeekExt, AsyncWrite, BufReader, BufWriter};
use tokio::process::Command;
use tokio::sync::RwLock;
use tokio::{fs, io};
use tracing::Instrument;
#[tracing::instrument(level = "debug", skip(ctx, r, w))]
pub(crate) async fn decompress<R, W>(
ctx: Arc<RwLock<crate::Context>>,
r: R,
w: W,
num_chunks: usize,
) -> Result<()>
where
R: AsyncRead + AsyncSeek + std::marker::Unpin,
W: AsyncWrite + std::marker::Unpin,
{
let mut r = BufReader::new(r);
let mut w = BufWriter::new(w);
let padding_start = r.stream_position().await?;
let mut rng = nanorand::WyRand::new();
let leaf = rng.generate::<u64>();
let tmp_dir = std::env::temp_dir().join(format!("dtmt-{}", leaf));
fs::create_dir(&tmp_dir).await?;
tracing::trace!(tmp_dir = %tmp_dir.display());
let in_path = tmp_dir.join("in.bin");
let out_path = tmp_dir.join("out.bin");
{
let mut in_file = File::create(&in_path).await?;
io::copy(&mut r, &mut in_file)
.await
.wrap_err("failed to write compressed data to file")
.with_section(|| in_path.display().to_string().header("Path"))?;
}
{
let _span = tracing::span!(tracing::Level::INFO, "Run decompression helper");
async {
let mut cmd = {
let ctx = ctx.read().await;
Command::new(ctx.oodle.as_ref().expect("`oodle` arg not passed through"))
};
let cmd = cmd
.args(["-v", "-v", "-v"])
.args(["--padding", &padding_start.to_string()])
.args(["--chunks", &num_chunks.to_string()])
.arg("decompress")
.arg(&in_path)
.arg(&out_path)
.stdin(Stdio::null());
tracing::debug!(?cmd, "Running Oodle decompression helper");
let res = cmd
.output()
.await
.wrap_err("failed to spawn the Oodle decompression helper")?;
tracing::trace!(
"Output of Oodle decompression helper:\n{}",
String::from_utf8_lossy(&res.stdout)
);
if !res.status.success() {
let stderr = String::from_utf8_lossy(&res.stderr);
let stdout = String::from_utf8_lossy(&res.stdout);
return Err(eyre::eyre!("failed to run Oodle decompression helper"))
.with_section(move || stdout.to_string().header("Logs:"))
.with_section(move || stderr.to_string().header("Stderr:"));
}
Ok(())
}
.instrument(_span)
.await
.with_section(|| tmp_dir.display().to_string().header("Temp Dir:"))?
}
{
let mut out_file = File::open(&out_path).await?;
io::copy(&mut out_file, &mut w)
.await
.wrap_err("failed to read decompressed file")
.with_section(|| out_path.display().to_string().header("Path"))?;
}
fs::remove_dir_all(tmp_dir)
.await
.wrap_err("failed to remove temporary directory")?;
Ok(())
}

128
src/oodle/mod.rs Normal file
View file

@ -0,0 +1,128 @@
use std::ffi::OsStr;
use std::ops::Deref;
use std::ptr;
use color_eyre::eyre;
use color_eyre::Result;
use libloading::{Library, Symbol};
pub mod types;
use types::*;
// Hardcoded chunk size of Bitsquid's bundle compression
pub const CHUNK_SIZE: usize = 512 * 1024;
pub struct Oodle {
lib: Library,
}
impl Oodle {
pub fn new<P>(lib: P) -> Result<Self>
where
P: AsRef<OsStr>,
{
let lib = unsafe { Library::new(lib)? };
unsafe {
let fun: Symbol<OodleCore_Plugins_SetPrintf> =
lib.get(b"OodleCore_Plugins_SetPrintf\0")?;
let printf: Symbol<t_fp_OodleCore_Plugin_Printf> =
lib.get(b"OodleCore_Plugin_Printf_Verbose\0")?;
fun(*printf.deref());
}
Ok(Self { lib })
}
#[tracing::instrument(name = "Oodle::decompress", skip(self, data))]
pub fn decompress<I>(
&self,
data: I,
fuzz_safe: OodleLZ_FuzzSafe,
check_crc: OodleLZ_CheckCRC,
) -> Result<Vec<u8>>
where
I: AsRef<[u8]>,
{
let data = data.as_ref();
let mut out = vec![0; CHUNK_SIZE];
let verbosity = if tracing::enabled!(tracing::Level::INFO) {
OodleLZ_Verbosity::Minimal
} else if tracing::enabled!(tracing::Level::DEBUG) {
OodleLZ_Verbosity::Some
} else if tracing::enabled!(tracing::Level::TRACE) {
OodleLZ_Verbosity::Lots
} else {
OodleLZ_Verbosity::None
};
let ret = unsafe {
let decompress: Symbol<OodleLZ_Decompress> = self.lib.get(b"OodleLZ_Decompress\0")?;
decompress(
data.as_ptr() as *const _,
data.len(),
out.as_mut_ptr() as *mut _,
out.len(),
fuzz_safe,
check_crc,
verbosity,
ptr::null_mut(),
0,
ptr::null_mut(),
ptr::null_mut(),
ptr::null_mut(),
0,
OodleLZ_Decode_ThreadPhase::UNTHREADED,
)
};
tracing::debug!(uncompressed_size = ret, "Decompressed chunk");
if ret == 0 {
eyre::bail!("Failed to decompress chunk.");
}
Ok(out)
}
#[tracing::instrument(name = "Oodle::compress", skip(self, data))]
pub fn compress<I>(&self, data: I) -> Result<Vec<u8>>
where
I: AsRef<[u8]>,
{
let raw = data.as_ref();
// TODO: Query oodle for buffer size
let mut out = vec![0u8; CHUNK_SIZE];
let compressor = OodleLZ_Compressor::Kraken;
let level = OodleLZ_CompressionLevel::Optimal2;
let ret = unsafe {
let compress: Symbol<OodleLZ_Compress> = self.lib.get(b"OodleLZ_Compress\0")?;
compress(
compressor,
raw.as_ptr() as *const _,
raw.len(),
out.as_mut_ptr() as *mut _,
level,
ptr::null_mut(),
0,
ptr::null_mut(),
ptr::null_mut(),
0,
)
};
tracing::debug!(compressed_size = ret, "Compressed chunk");
if ret == 0 {
eyre::bail!("Failed to compress chunk.");
}
Ok(out)
}
}

196
src/oodle/types.rs Normal file
View file

@ -0,0 +1,196 @@
#![allow(dead_code)]
use core::ffi::{c_char, c_int, c_size_t, c_ulonglong, c_void};
use clap::ValueEnum;
// Type definitions taken from Unreal Engine's `oodle2.h`
#[repr(C)]
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug)]
pub enum OodleLZ_FuzzSafe {
No = 0,
Yes = 1,
}
impl From<bool> for OodleLZ_FuzzSafe {
fn from(value: bool) -> Self {
if value {
Self::Yes
} else {
Self::No
}
}
}
#[repr(C)]
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug)]
pub enum OodleLZ_CheckCRC {
No = 0,
Yes = 1,
Force32 = 0x40000000,
}
impl From<bool> for OodleLZ_CheckCRC {
fn from(value: bool) -> Self {
if value {
Self::Yes
} else {
Self::No
}
}
}
#[repr(C)]
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug, ValueEnum)]
pub enum OodleLZ_Verbosity {
None = 0,
Minimal = 1,
Some = 2,
Lots = 3,
#[clap(hide = true)]
Force32 = 0x40000000,
}
#[repr(C)]
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug, ValueEnum)]
pub enum OodleLZ_Decode_ThreadPhase {
Phase1 = 1,
Phase2 = 2,
PhaseAll = 3,
}
impl OodleLZ_Decode_ThreadPhase {
pub const UNTHREADED: Self = OodleLZ_Decode_ThreadPhase::PhaseAll;
}
#[repr(C)]
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug, ValueEnum)]
pub enum OodleLZ_Compressor {
#[clap(hide = true)]
Invalid = -1,
// None = memcpy, pass through uncompressed bytes
None = 3,
// NEW COMPRESSORS:
// Fast decompression and high compression ratios, amazing!
Kraken = 8,
// Leviathan = Kraken's big brother with higher compression, slightly slower decompression.
Leviathan = 13,
// Mermaid is between Kraken & Selkie - crazy fast, still decent compression.
Mermaid = 9,
// Selkie is a super-fast relative of Mermaid. For maximum decode speed.
Selkie = 11,
// Hydra, the many-headed beast = Leviathan, Kraken, Mermaid, or Selkie (see $OodleLZ_About_Hydra)
Hydra = 12,
BitKnit = 10,
// DEPRECATED but still supported
Lzb16 = 4,
Lzna = 7,
Lzh = 0,
Lzhlw = 1,
Lznib = 2,
Lzblw = 5,
Lza = 6,
Count = 14,
#[clap(hide = true)]
Force32 = 0x40000000,
}
#[repr(C)]
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug, ValueEnum)]
pub enum OodleLZ_CompressionLevel {
// don't compress, just copy raw bytes
None = 0,
// super fast mode, lower compression ratio
SuperFast = 1,
// fastest LZ mode with still decent compression ratio
VeryFast = 2,
// fast - good for daily use
Fast = 3,
// standard medium speed LZ mode
Normal = 4,
// optimal parse level 1 (faster optimal encoder)
Optimal1 = 5,
// optimal parse level 2 (recommended baseline optimal encoder)
Optimal2 = 6,
// optimal parse level 3 (slower optimal encoder)
Optimal3 = 7,
// optimal parse level 4 (very slow optimal encoder)
Optimal4 = 8,
// optimal parse level 5 (don't care about encode speed, maximum compression)
Optimal5 = 9,
// faster than SuperFast, less compression
HyperFast1 = -1,
// faster than HyperFast1, less compression
HyperFast2 = -2,
// faster than HyperFast2, less compression
HyperFast3 = -3,
// fastest, less compression
HyperFast4 = -4,
#[clap(hide = true)]
Force32 = 0x40000000,
}
impl OodleLZ_CompressionLevel {
// alias hyperfast base level
pub const HYPERFAST: Self = OodleLZ_CompressionLevel::HyperFast1;
// alias optimal standard level
pub const OPTIMAL: Self = OodleLZ_CompressionLevel::Optimal2;
// maximum compression level
pub const MAX: Self = OodleLZ_CompressionLevel::Optimal5;
// fastest compression level
pub const MIN: Self = OodleLZ_CompressionLevel::HyperFast4;
pub const INVALID: Self = OodleLZ_CompressionLevel::Force32;
}
#[allow(non_camel_case_types)]
pub type t_fp_OodleCore_Plugin_Printf =
extern "C" fn(level: c_int, file: *const c_char, line: c_int, fmt: *const c_char);
#[allow(non_camel_case_types)]
pub type OodleLZ_Decompress = extern "C" fn(
compressed_buffer: *const c_void,
compressed_length: c_size_t,
raw_buffer: *mut c_void,
raw_length: c_size_t,
fuzz_safe: OodleLZ_FuzzSafe,
check_crc: OodleLZ_CheckCRC,
verbosity: OodleLZ_Verbosity,
decBufBase: *mut c_void,
decBufSize: c_size_t,
callback: *const c_void,
callback_user_data: *const c_void,
decoder_memory: *mut c_void,
decoder_memory_size: c_size_t,
thread_phase: OodleLZ_Decode_ThreadPhase,
) -> c_ulonglong;
#[allow(non_camel_case_types)]
pub type OodleLZ_Compress = extern "C" fn(
compressor: OodleLZ_Compressor,
raw_buffer: *const c_void,
raw_len: c_size_t,
compressed_buffer: *mut c_void,
level: OodleLZ_CompressionLevel,
options: *const c_void,
dictionary_base: c_size_t,
lrm: *const c_void,
scratch_memory: *mut c_void,
scratch_size: c_size_t,
) -> c_ulonglong;
#[allow(non_camel_case_types)]
pub type OodleCore_Plugins_SetPrintf =
extern "C" fn(f: t_fp_OodleCore_Plugin_Printf) -> t_fp_OodleCore_Plugin_Printf;
#[allow(non_camel_case_types)]
pub type OodleCore_Plugin_Printf_Verbose = t_fp_OodleCore_Plugin_Printf;
#[allow(non_camel_case_types)]
pub type OodleCore_Plugin_Printf_Default = t_fp_OodleCore_Plugin_Printf;