Compare commits

..

No commits in common. "385e1f98e275c1f9728997fafabdc5539d8718e8" and "fddf8fcbbd6a5fee07ec309f9e0533af78202387" have entirely different histories.

9 changed files with 474 additions and 332 deletions

View file

@ -4,7 +4,8 @@ use clap::{value_parser, Arg, ArgMatches, Command};
use color_eyre::eyre::Result;
use sdk::decompress;
use tokio::fs;
use tokio::fs::{self, File};
use tokio::io::BufReader;
pub(crate) fn command_definition() -> Command {
Command::new("decompress")
@ -39,11 +40,11 @@ where
P1: AsRef<Path> + std::fmt::Debug,
P2: AsRef<Path> + std::fmt::Debug,
{
let binary = fs::read(bundle).await?;
let data = decompress(ctx, binary)?;
fs::write(destination, &data).await?;
let in_file = File::open(bundle).await?;
let out_file = File::create(destination).await?;
Ok(())
// A `BufWriter` does not help here, as we're mostly just out chunks.
decompress(ctx, BufReader::new(in_file), out_file).await
}
#[tracing::instrument(skip_all)]

View file

@ -1,11 +1,10 @@
use std::path::{Path, PathBuf};
use std::sync::Arc;
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
use color_eyre::eyre::{self, Context, Result};
use color_eyre::{Help, Report, SectionExt};
use futures::future::try_join_all;
use futures::StreamExt;
use futures::{StreamExt, TryFutureExt};
use glob::Pattern;
use sdk::{Bundle, BundleFile};
use tokio::fs;
@ -175,66 +174,58 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
}
}
let includes = Arc::new(includes);
let excludes = Arc::new(excludes);
let ctx = Arc::new(ctx);
let mut paths = Box::pin(resolve_bundle_paths(bundles));
resolve_bundle_paths(bundles)
.for_each_concurrent(10, |p| async {
let ctx = ctx.clone();
let includes = includes.clone();
let excludes = excludes.clone();
let options = ExtractOptions {
includes,
excludes,
decompile: should_decompile,
flatten: should_flatten,
dry_run: is_dry_run,
};
async move {
match extract_bundle(ctx, &p, &dest, options).await {
Ok(_) => {}
Err(err) => tracing::error!("{err:#}"),
}
}
// TODO: Find a way to do this with `for_each_concurrent`. The first attempt
// just kept head-butting into a "use of moved value" wall.
while let Some(path) = paths.next().await {
let res = Bundle::open(&ctx, &path)
.and_then(|bundle| {
extract_bundle(
&ctx,
bundle,
&dest,
ExtractOptions {
includes: &includes,
excludes: &excludes,
decompile: should_decompile,
flatten: should_flatten,
dry_run: is_dry_run,
},
)
})
.await
})
.await;
.wrap_err_with(|| format!("failed to extract from bundle '{}'", path.display()));
if let Err(err) = res {
tracing::error!("{:#}", err)
}
}
Ok(())
}
#[derive(Clone)]
struct ExtractOptions<'a> {
decompile: bool,
flatten: bool,
dry_run: bool,
includes: Arc<Vec<&'a Pattern>>,
excludes: Arc<Vec<&'a Pattern>>,
includes: &'a dyn AsRef<[&'a Pattern]>,
excludes: &'a dyn AsRef<[&'a Pattern]>,
}
#[tracing::instrument(
skip(ctx, options),
skip(ctx, bundle, options),
fields(decompile = options.decompile, flatten = options.flatten, dry_run = options.dry_run)
)]
async fn extract_bundle<P1, P2>(
ctx: Arc<sdk::Context>,
path: P1,
dest: P2,
async fn extract_bundle<P>(
ctx: &sdk::Context,
bundle: Bundle,
dest: P,
options: ExtractOptions<'_>,
) -> Result<()>
where
P1: AsRef<Path> + std::fmt::Debug,
P2: AsRef<Path> + std::fmt::Debug,
P: AsRef<Path> + std::fmt::Debug,
{
let bundle = {
let data = fs::read(path.as_ref()).await?;
let name = Bundle::get_name_from_path(&ctx, path.as_ref());
Bundle::from_binary(&ctx, name, data)?
};
let includes = options.includes.as_ref();
let excludes = options.excludes.as_ref();
let dest = dest.as_ref();
@ -284,7 +275,7 @@ where
for file in files {
let name = file.name(options.decompile, None);
let data = if options.decompile {
file.decompiled(&ctx).await
file.decompiled(ctx).await
} else {
file.raw()
};

View file

@ -4,7 +4,7 @@ use clap::{value_parser, Arg, ArgMatches, Command};
use color_eyre::eyre::{self, Context, Result};
use color_eyre::Help;
use sdk::Bundle;
use tokio::fs::{self, File};
use tokio::fs::File;
use tokio::io::AsyncReadExt;
pub(crate) fn command_definition() -> Command {
@ -52,11 +52,9 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
tracing::trace!(bundle_path = %bundle_path.display(), file_path = %file_path.display());
let mut bundle = {
let binary = fs::read(bundle_path).await?;
let name = Bundle::get_name_from_path(&ctx, bundle_path);
Bundle::from_binary(&ctx, name, binary).wrap_err("Failed to open bundle file")?
};
let mut bundle = Bundle::open(&ctx, bundle_path)
.await
.wrap_err("Failed to open bundle file")?;
if let Some(_name) = matches.get_one::<String>("replace") {
let mut file = File::open(&file_path)
@ -97,13 +95,13 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
}
let out_path = matches.get_one::<PathBuf>("output").unwrap_or(bundle_path);
let data = bundle
.to_binary(&ctx)
.wrap_err("failed to write changed bundle to output")?;
fs::write(out_path, &data)
let mut out_file = File::create(out_path)
.await
.wrap_err("failed to write data to output file")?;
.wrap_err_with(|| format!("failed to open output file {}", out_path.display()))?;
bundle
.write(&ctx, &mut out_file)
.await
.wrap_err("failed to write changed bundle to output")?;
Ok(())
} else {

View file

@ -1,12 +1,12 @@
use std::path::{Path, PathBuf};
use std::path::PathBuf;
use std::sync::Arc;
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
use color_eyre::eyre::{self, Context, Result};
use color_eyre::eyre::{self, Result};
use color_eyre::{Help, SectionExt};
use futures::StreamExt;
use sdk::Bundle;
use tokio::fs;
use tracing::Instrument;
use crate::cmd::util::resolve_bundle_paths;
@ -31,23 +31,12 @@ pub(crate) fn command_definition() -> Command {
)
}
#[derive(Copy, Clone, Debug)]
#[derive(Copy, Clone)]
enum OutputFormat {
Text,
}
#[tracing::instrument(skip(ctx))]
async fn print_bundle_contents<P>(ctx: &sdk::Context, path: P, fmt: OutputFormat) -> Result<()>
where
P: AsRef<Path> + std::fmt::Debug,
{
let p = path.as_ref();
let bundle = {
let binary = fs::read(p).await?;
let name = Bundle::get_name_from_path(ctx, p);
Bundle::from_binary(ctx, name, binary)?
};
fn print_bundle_list(bundle: Bundle, fmt: OutputFormat) {
match fmt {
OutputFormat::Text => {
println!("Bundle: {}", bundle.name());
@ -71,8 +60,6 @@ where
}
}
}
Ok(())
}
#[tracing::instrument(skip_all)]
@ -94,16 +81,20 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
paths
.for_each_concurrent(10, |p| async {
let span = tracing::info_span!("list bundle");
let ctx = ctx.clone();
async move {
if let Err(err) = print_bundle_contents(&ctx, &p, fmt)
let span = tracing::info_span!("open bundle");
if let Err(err) = Bundle::open(&ctx, &p)
.instrument(span)
.await
.wrap_err_with(|| format!("failed to list contents of bundle {}", p.display()))
.map(|bundle| print_bundle_list(bundle, fmt))
{
tracing::error!("{err:?}");
tracing::error!("Failed to open bundle '{}': {:?}", p.display(), err);
}
}
.await;
.instrument(span)
.await
})
.await;

View file

@ -1,9 +1,164 @@
use std::io::SeekFrom;
use color_eyre::eyre::WrapErr;
use color_eyre::{Help, Result, SectionExt};
use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeek, AsyncSeekExt, AsyncWrite, AsyncWriteExt};
// TODO: Add versions for each write and read function that can work without `AsyncSeek`
macro_rules! make_read {
($func:ident, $op:ident, $type:ty) => {
pub(crate) async fn $func<R>(r: &mut R) -> Result<$type>
where
R: AsyncRead + AsyncSeek + std::marker::Unpin,
{
let res = r
.$op()
.await
.wrap_err(concat!("failed to read ", stringify!($type)));
if res.is_ok() {
return res;
}
let pos = r.stream_position().await;
if pos.is_ok() {
res.with_section(|| {
format!("{pos:#X} ({pos})", pos = pos.unwrap()).header("Position: ")
})
} else {
res
}
}
};
}
macro_rules! make_write {
($func:ident, $op:ident, $type:ty) => {
pub(crate) async fn $func<W>(w: &mut W, val: $type) -> Result<()>
where
W: AsyncWrite + AsyncSeek + std::marker::Unpin,
{
let res = w
.$op(val)
.await
.wrap_err(concat!("failed to write ", stringify!($type)));
if res.is_ok() {
return res;
}
let pos = w.stream_position().await;
if pos.is_ok() {
res.with_section(|| {
format!("{pos:#X} ({pos})", pos = pos.unwrap()).header("Position: ")
})
} else {
res
}
}
};
}
macro_rules! make_skip {
($func:ident, $read:ident, $type:ty) => {
pub(crate) async fn $func<R>(r: &mut R, cmp: $type) -> Result<()>
where
R: AsyncRead + AsyncSeek + std::marker::Unpin,
{
let val = $read(r).await?;
if val != cmp {
let pos = r.stream_position().await.unwrap_or(u64::MAX);
tracing::debug!(
pos,
expected = cmp,
actual = val,
"Unexpected value for skipped {}",
stringify!($type)
);
}
Ok(())
}
};
}
make_read!(read_u8, read_u8, u8);
make_read!(read_u32, read_u32_le, u32);
make_read!(read_u64, read_u64_le, u64);
make_write!(write_u8, write_u8, u8);
make_write!(write_u32, write_u32_le, u32);
make_write!(write_u64, write_u64_le, u64);
make_skip!(skip_u8, read_u8, u8);
make_skip!(skip_u32, read_u32, u32);
pub(crate) async fn skip_padding<S>(stream: &mut S) -> Result<()>
where
S: AsyncSeek + std::marker::Unpin,
{
let pos = stream.stream_position().await?;
let padding_size = 16 - (pos % 16);
if padding_size < 16 && padding_size > 0 {
tracing::trace!(pos, padding_size, "Skipping padding");
stream.seek(SeekFrom::Current(padding_size as i64)).await?;
} else {
tracing::trace!(pos, padding_size, "No padding to skip");
}
Ok(())
}
pub(crate) async fn _read_up_to<R>(r: &mut R, buf: &mut Vec<u8>) -> Result<usize>
where
R: AsyncRead + AsyncSeek + std::marker::Unpin,
{
let pos = r.stream_position().await?;
let err = {
match r.read_exact(buf).await {
Ok(_) => return Ok(buf.len()),
Err(err) if err.kind() == std::io::ErrorKind::UnexpectedEof => {
r.seek(SeekFrom::Start(pos)).await?;
match r.read_to_end(buf).await {
Ok(read) => return Ok(read),
Err(err) => err,
}
}
Err(err) => err,
}
};
Err(err).with_section(|| format!("{pos:#X} ({pos})").header("Position: "))
}
pub(crate) async fn write_padding<W>(w: &mut W) -> Result<usize>
where
W: AsyncWrite + AsyncSeek + std::marker::Unpin,
{
let pos = w.stream_position().await?;
let size = 16 - (pos % 16) as usize;
tracing::trace!(padding_size = size, "Writing padding");
if size > 0 && size < 16 {
let buf = vec![0; size];
w.write_all(&buf).await?;
Ok(size)
} else {
Ok(0)
}
}
pub mod sync {
use std::io::{self, Read, Seek, SeekFrom};
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use color_eyre::eyre::WrapErr;
use color_eyre::{Help, Report, Result, SectionExt};
use color_eyre::{Help, Result, SectionExt};
macro_rules! make_read {
($func:ident, $read:ident, $type:ty) => {
@ -102,42 +257,9 @@ pub mod sync {
Ok(())
}
fn read_string_len(&mut self, len: usize) -> Result<String> {
let mut buf = vec![0; len];
let res = self
.read_exact(&mut buf)
.map_err(Report::new)
.and_then(|_| {
String::from_utf8(buf).map_err(|err| {
let ascii = String::from_utf8_lossy(err.as_bytes()).to_string();
let bytes = format!("{:?}", err.as_bytes());
Report::new(err)
.with_section(move || bytes.header("Bytes:"))
.with_section(move || ascii.header("ASCII:"))
})
});
if res.is_ok() {
return res;
}
let pos = self.stream_position();
if pos.is_ok() {
res.with_section(|| {
format!("{pos:#X} ({pos})", pos = pos.unwrap()).header("Position: ")
})
} else {
res
}
}
}
pub trait WriteExt: WriteBytesExt + Seek {
fn write_u8(&mut self, val: u8) -> io::Result<()> {
WriteBytesExt::write_u8(self, val)
}
make_write!(write_u32, write_u32_le, u32);
make_write!(write_u64, write_u64_le, u64);

View file

@ -1,11 +1,11 @@
use std::io::{Cursor, Read, Seek, Write};
use std::io::Cursor;
use color_eyre::eyre::Context;
use color_eyre::{Help, Result, SectionExt};
use futures::future::join_all;
use serde::Serialize;
use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeek, AsyncWrite, AsyncWriteExt};
use crate::binary::sync::*;
use crate::binary::*;
use crate::filetype::*;
use crate::murmur::{HashGroup, Murmur64};
@ -326,16 +326,16 @@ struct BundleFileHeader {
}
impl BundleFileHeader {
#[tracing::instrument(name = "FileHeader::from_reader", skip_all)]
fn from_reader<R>(r: &mut R) -> Result<Self>
#[tracing::instrument(name = "FileHeader::read", skip_all)]
async fn read<R>(r: &mut R) -> Result<Self>
where
R: Read + Seek,
R: AsyncRead + AsyncSeek + std::marker::Unpin,
{
let variant = r.read_u32()?;
r.skip_u8(0)?;
let size = r.read_u32()? as usize;
r.skip_u8(1)?;
let len_data_file_name = r.read_u32()? as usize;
let variant = read_u32(r).await?;
skip_u8(r, 0).await?;
let size = read_u32(r).await? as usize;
skip_u8(r, 1).await?;
let len_data_file_name = read_u32(r).await? as usize;
Ok(Self {
size,
@ -343,20 +343,6 @@ impl BundleFileHeader {
len_data_file_name,
})
}
#[tracing::instrument(name = "FileHeader::to_writer", skip_all)]
fn to_writer<W>(&self, w: &mut W) -> Result<()>
where
W: Write + Seek,
{
w.write_u32(self.variant)?;
w.write_u8(0)?;
w.write_u32(self.size as u32)?;
w.write_u8(1)?;
w.write_u32(self.len_data_file_name as u32)?;
Ok(())
}
}
pub struct BundleFileVariant {
@ -393,37 +379,41 @@ pub struct BundleFile {
impl BundleFile {
#[tracing::instrument(name = "File::read", skip_all)]
pub fn from_reader<R>(ctx: &crate::Context, r: &mut R) -> Result<Self>
pub async fn read<R>(ctx: &crate::Context, r: &mut R) -> Result<Self>
where
R: Read + Seek,
R: AsyncRead + AsyncSeek + std::marker::Unpin,
{
let file_type = BundleFileType::from(r.read_u64()?);
let hash = Murmur64::from(r.read_u64()?);
let file_type = BundleFileType::from(read_u64(r).await?);
let hash = Murmur64::from(read_u64(r).await?);
let name = ctx.lookup_hash(hash, HashGroup::Filename);
tracing::trace!(name, ?file_type);
let header_count = read_u32(r)
.await
.with_section(|| format!("{}.{}", name, file_type.ext_name()).header("File:"))?;
let header_count = header_count as usize;
let header_count = r.read_u32()? as usize;
let mut headers = Vec::with_capacity(header_count);
r.skip_u32(0)?;
skip_u32(r, 0).await?;
for _ in 0..header_count {
let header = BundleFileHeader::from_reader(r)?;
let header = BundleFileHeader::read(r)
.await
.with_section(|| format!("{}.{}", name, file_type.ext_name()).header("File:"))?;
headers.push(header);
}
let mut variants = Vec::with_capacity(header_count);
for (i, header) in headers.into_iter().enumerate() {
let span = tracing::info_span!("Read file header {}", i, size = header.size);
let _enter = span.enter();
for header in headers.into_iter() {
let mut data = vec![0; header.size];
r.read_exact(&mut data)
.wrap_err_with(|| format!("failed to read header {i}"))?;
r.read_exact(&mut data).await?;
let data_file_name = r
.read_string_len(header.len_data_file_name)
.wrap_err("failed to read data file name")?;
let data_file_name = {
let mut buf = vec![0; header.len_data_file_name];
r.read_exact(&mut buf).await?;
String::from_utf8(buf)?
};
let variant = BundleFileVariant {
header,
@ -442,25 +432,38 @@ impl BundleFile {
})
}
#[tracing::instrument(name = "File::to_binary", skip_all)]
pub fn to_binary(&self) -> Result<Vec<u8>> {
let mut w = Cursor::new(Vec::new());
w.write_u64(*self.file_type.hash())?;
w.write_u64(*self.hash)?;
#[tracing::instrument(name = "File::write", skip_all)]
pub async fn write<W>(&self, w: &mut W) -> Result<()>
where
W: AsyncWrite + AsyncSeek + std::marker::Unpin,
{
write_u64(w, *self.file_type.hash()).await?;
write_u64(w, *self.hash).await?;
let header_count = self.variants.len();
w.write_u8(header_count as u8)?;
write_u32(w, header_count as u32).await?;
// TODO: Unknown what this is
write_u32(w, 0).await?;
for variant in self.variants.iter() {
variant.header.to_writer(&mut w)?;
// TODO: Unknown what these are
write_u32(w, variant.header.variant).await?;
// TODO: Unknown what this is
write_u8(w, 0).await?;
write_u32(w, variant.data.len() as u32).await?;
// TODO: Unknown what this is
write_u8(w, 1).await?;
// TODO: The previous size value and this one are somehow connected,
// but so far it is unknown how
write_u32(w, variant.data_file_name.len() as u32).await?;
}
for variant in self.variants.iter() {
w.write_all(&variant.data)?;
w.write_all(&variant.data).await?;
w.write_all(variant.data_file_name.as_bytes()).await?;
}
Ok(w.into_inner())
Ok(())
}
pub fn base_name(&self) -> &String {
@ -555,7 +558,10 @@ impl BundleFile {
let res = match file_type {
BundleFileType::Lua => lua::decompile(ctx, data).await,
BundleFileType::Package => package::decompile(ctx, data),
BundleFileType::Package => {
let mut c = Cursor::new(data);
package::decompile(ctx, &mut c).await
}
_ => {
tracing::debug!("Can't decompile, unknown file type");
Ok(vec![UserFile::with_name(data.to_vec(), name.clone())])

View file

@ -1,17 +1,22 @@
use std::io::{BufReader, Cursor, Read, Seek, SeekFrom, Write};
use std::io::{Cursor, SeekFrom};
use std::path::Path;
use color_eyre::eyre::{self, Context, Result};
use color_eyre::{Help, Report, SectionExt};
use tokio::fs;
use tokio::io::{
AsyncRead, AsyncReadExt, AsyncSeek, AsyncSeekExt, AsyncWrite, AsyncWriteExt, BufReader,
};
use tracing::Instrument;
use crate::binary::sync::*;
use crate::binary::*;
use crate::murmur::{HashGroup, Murmur64};
use crate::oodle::types::{OodleLZ_CheckCRC, OodleLZ_FuzzSafe};
use crate::oodle::CHUNK_SIZE;
pub(crate) mod file;
pub use file::{BundleFile, BundleFileType};
pub use file::BundleFile;
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd)]
enum BundleFormat {
@ -47,14 +52,14 @@ struct EntryHeader {
}
impl EntryHeader {
#[tracing::instrument(name = "EntryHeader::from_reader", skip_all)]
fn from_reader<R>(r: &mut R) -> Result<Self>
#[tracing::instrument(name = "FileMeta::read", skip_all)]
async fn read<R>(r: &mut R) -> Result<Self>
where
R: Read + Seek,
R: AsyncRead + AsyncSeek + std::marker::Unpin,
{
let extension_hash = r.read_u64()?;
let name_hash = r.read_u64()?;
let flags = r.read_u32()?;
let extension_hash = read_u64(r).await?;
let name_hash = read_u64(r).await?;
let flags = read_u32(r).await?;
// NOTE: Known values so far:
// - 0x0: seems to be the default
@ -63,7 +68,9 @@ impl EntryHeader {
if flags != 0x0 {
tracing::debug!(
flags,
"Unexpected meta flags for file {name_hash:016X}.{extension_hash:016X}",
"Unexpected meta flags for file {:016X}.{:016X}",
name_hash,
extension_hash
);
}
@ -74,14 +81,15 @@ impl EntryHeader {
})
}
#[tracing::instrument(name = "EntryHeader::to_writer", skip_all)]
fn to_writer<W>(&self, w: &mut W) -> Result<()>
#[tracing::instrument(name = "FileMeta::write", skip_all)]
async fn write<W>(&self, w: &mut W) -> Result<()>
where
W: Write + Seek,
W: AsyncWrite + AsyncSeek + std::marker::Unpin,
{
w.write_u64(self.extension_hash)?;
w.write_u64(self.name_hash)?;
w.write_u32(self.flags)?;
write_u64(w, self.extension_hash).await?;
write_u64(w, self.name_hash).await?;
write_u32(w, self.flags).await?;
Ok(())
}
}
@ -96,33 +104,43 @@ pub struct Bundle {
}
impl Bundle {
pub fn get_name_from_path<P>(ctx: &crate::Context, path: P) -> String
#[tracing::instrument(name = "Bundle::open", skip(ctx))]
pub async fn open<P>(ctx: &crate::Context, path: P) -> Result<Self>
where
P: AsRef<Path>,
P: AsRef<Path> + std::fmt::Debug,
{
// We need to know the bundle name, so it's easier to be given the
// file path and open the File internally, than to be given a generic
// `AsyncRead` and the bundle name separately.
let path = path.as_ref();
path.file_name()
.and_then(|name| name.to_str())
.and_then(|name| Murmur64::try_from(name).ok())
.map(|hash| ctx.lookup_hash(hash, HashGroup::Filename))
.unwrap_or_else(|| path.display().to_string())
}
let bundle_name = if let Some(name) = path.file_name() {
match Murmur64::try_from(name.to_string_lossy().as_ref()) {
Ok(hash) => ctx.lookup_hash(hash, HashGroup::Filename),
Err(err) => {
tracing::debug!("failed to turn bundle name into hash: {}", err);
name.to_string_lossy().to_string()
}
}
} else {
eyre::bail!("Invalid path to bundle file: {}", path.display());
};
#[tracing::instrument(skip(ctx, binary), fields(len_binary = binary.as_ref().len()))]
pub fn from_binary<B>(ctx: &crate::Context, name: String, binary: B) -> Result<Self>
where
B: AsRef<[u8]>,
{
let bundle_name = name;
let mut r = BufReader::new(Cursor::new(binary));
let f = fs::File::open(path)
.await
.wrap_err_with(|| format!("failed to open bundle file {}", path.display()))?;
let format = r.read_u32().and_then(BundleFormat::try_from)?;
let mut r = BufReader::new(f);
let format = read_u32(&mut r)
.await
.wrap_err("failed to read from file")
.and_then(BundleFormat::try_from)?;
if !matches!(format, BundleFormat::F7 | BundleFormat::F8) {
return Err(eyre::eyre!("Unknown bundle format: {:?}", format));
}
let unknown_1 = r.read_u32()?;
let unknown_1 = read_u32(&mut r).await?;
if unknown_1 != 0x3 {
tracing::warn!(
"Unexpected value for unknown header. Expected {:#08X}, got {:#08X}",
@ -131,74 +149,80 @@ impl Bundle {
);
}
let num_entries = r.read_u32()? as usize;
let num_entries = read_u32(&mut r).await? as usize;
let mut unknown_header = [0; 256];
r.read_exact(&mut unknown_header)?;
r.read_exact(&mut unknown_header).await?;
let mut meta = Vec::with_capacity(num_entries);
for _ in 0..num_entries {
meta.push(EntryHeader::from_reader(&mut r)?);
meta.push(EntryHeader::read(&mut r).await?);
}
let num_chunks = r.read_u32()? as usize;
let num_chunks = read_u32(&mut r).await? as usize;
tracing::debug!(num_chunks);
let mut chunk_sizes = Vec::with_capacity(num_chunks);
for _ in 0..num_chunks {
chunk_sizes.push(r.read_u32()? as usize);
chunk_sizes.push(read_u32(&mut r).await? as usize);
}
r.skip_padding()?;
skip_padding(&mut r).await?;
let unpacked_size = r.read_u32()? as usize;
let unpacked_size = read_u32(&mut r).await? as usize;
// Skip 4 unknown bytes
r.skip_u32(0)?;
r.seek(SeekFrom::Current(4)).await?;
let mut decompressed = Vec::with_capacity(unpacked_size);
let mut unpacked_size_tracked = unpacked_size;
for (chunk_index, chunk_size) in chunk_sizes.into_iter().enumerate() {
let span = tracing::debug_span!("Decompressing chunk", chunk_index, chunk_size);
let _enter = span.enter();
let inner_chunk_size = r.read_u32()? as usize;
async {
let inner_chunk_size = read_u32(&mut r).await? as usize;
if inner_chunk_size != chunk_size {
eyre::bail!(
"Chunk sizes do not match. Expected {inner_chunk_size}, got {chunk_size}",
);
}
r.skip_padding()?;
let mut compressed_buffer = vec![0u8; chunk_size];
r.read_exact(&mut compressed_buffer)?;
if format >= BundleFormat::F8 && chunk_size == CHUNK_SIZE {
decompressed.append(&mut compressed_buffer);
} else {
// TODO: Optimize to not reallocate?
let oodle_lib = ctx.oodle.as_ref().unwrap();
let mut raw_buffer = oodle_lib
.decompress(
&compressed_buffer,
OodleLZ_FuzzSafe::No,
OodleLZ_CheckCRC::No,
)
.wrap_err_with(|| format!("failed to decompress chunk {chunk_index}"))?;
if unpacked_size_tracked < CHUNK_SIZE {
raw_buffer.resize(unpacked_size_tracked, 0);
} else {
unpacked_size_tracked -= CHUNK_SIZE;
if inner_chunk_size != chunk_size {
eyre::bail!(
"Chunk sizes do not match. Expected {}, got {}",
inner_chunk_size,
chunk_size,
);
}
tracing::trace!(raw_size = raw_buffer.len());
skip_padding(&mut r).await?;
decompressed.append(&mut raw_buffer);
let mut compressed_buffer = vec![0u8; chunk_size];
r.read_exact(&mut compressed_buffer).await?;
if format >= BundleFormat::F8 && chunk_size == CHUNK_SIZE {
decompressed.append(&mut compressed_buffer);
} else {
// TODO: Optimize to not reallocate?
let oodle_lib = ctx.oodle.as_ref().unwrap();
let mut raw_buffer = oodle_lib
.decompress(
&compressed_buffer,
OodleLZ_FuzzSafe::No,
OodleLZ_CheckCRC::No,
)
.wrap_err_with(|| format!("failed to decompress chunk {chunk_index}"))?;
if unpacked_size_tracked < CHUNK_SIZE {
raw_buffer.resize(unpacked_size_tracked, 0);
} else {
unpacked_size_tracked -= CHUNK_SIZE;
}
tracing::trace!(raw_size = raw_buffer.len());
decompressed.append(&mut raw_buffer);
}
Ok(())
}
.instrument(span)
.await?;
}
if decompressed.len() < unpacked_size {
@ -212,8 +236,8 @@ impl Bundle {
let mut r = Cursor::new(decompressed);
let mut files = Vec::with_capacity(num_entries);
for i in 0..num_entries {
let file = BundleFile::from_reader(ctx, &mut r)
.wrap_err_with(|| format!("failed to read file {i}"))?;
let span = tracing::trace_span!("", file_index = i);
let file = BundleFile::read(ctx, &mut r).instrument(span).await?;
files.push(file);
}
@ -227,48 +251,56 @@ impl Bundle {
})
}
#[tracing::instrument(skip_all)]
pub fn to_binary(&self, ctx: &crate::Context) -> Result<Vec<u8>> {
let mut w = Cursor::new(Vec::new());
w.write_u32(self.format.into())?;
w.write_u32(self.unknown_1)?;
w.write_u32(self.files.len() as u32)?;
w.write_all(&self.unknown_header)?;
#[tracing::instrument(name = "Bundle::write", skip_all)]
pub async fn write<W>(&self, ctx: &crate::Context, w: &mut W) -> Result<()>
where
W: AsyncWrite + AsyncSeek + std::marker::Unpin,
{
write_u32(w, self.format.into()).await?;
write_u32(w, self.unknown_1).await?;
write_u32(w, self.files.len() as u32).await?;
w.write_all(&self.unknown_header).await?;
for meta in self._headers.iter() {
meta.to_writer(&mut w)?;
meta.write(w).await?;
}
let unpacked_data = {
let span = tracing::trace_span!("Write bundle files");
let _enter = span.enter();
let buf = Vec::new();
let mut c = Cursor::new(buf);
tracing::trace!(num_files = self.files.len());
self.files
.iter()
.fold(Ok::<Vec<u8>, Report>(Vec::new()), |data, file| {
let mut data = data?;
data.append(&mut file.to_binary()?);
Ok(data)
})?
async {
for file in self.files.iter() {
file.write(&mut c).await?;
}
Ok::<(), Report>(())
}
.instrument(span)
.await?;
c.into_inner()
};
// Ceiling division (or division toward infinity) to calculate
// the number of chunks required to fit the unpacked data.
let num_chunks = (unpacked_data.len() + CHUNK_SIZE - 1) / CHUNK_SIZE;
tracing::trace!(num_chunks);
w.write_u32(num_chunks as u32)?;
write_u32(w, num_chunks as u32).await?;
let chunk_sizes_start = w.stream_position()?;
let chunk_sizes_start = w.stream_position().await?;
tracing::trace!(chunk_sizes_start);
w.seek(SeekFrom::Current(num_chunks as i64 * 4))?;
w.seek(SeekFrom::Current(num_chunks as i64 * 4)).await?;
w.write_padding()?;
write_padding(w).await?;
tracing::trace!(unpacked_size = unpacked_data.len());
w.write_u32(unpacked_data.len() as u32)?;
write_u32(w, unpacked_data.len() as u32).await?;
// NOTE: Unknown u32 that's always been 0 so far
w.write_u32(0)?;
write_u32(w, 0).await?;
let chunks = unpacked_data.chunks(CHUNK_SIZE);
@ -282,18 +314,18 @@ impl Bundle {
compressed_chunk_size = compressed.len()
);
chunk_sizes.push(compressed.len());
w.write_u32(compressed.len() as u32)?;
w.write_padding()?;
w.write_all(&compressed)?;
write_u32(w, compressed.len() as u32).await?;
write_padding(w).await?;
w.write_all(&compressed).await?;
}
w.seek(SeekFrom::Start(chunk_sizes_start))?;
w.seek(SeekFrom::Start(chunk_sizes_start)).await?;
for size in chunk_sizes {
w.write_u32(size as u32)?;
write_u32(w, size as u32).await?;
}
Ok(w.into_inner())
Ok(())
}
pub fn name(&self) -> &String {
@ -313,86 +345,93 @@ impl Bundle {
/// This is mainly useful for debugging purposes or
/// to manullay inspect the raw data.
#[tracing::instrument(skip_all)]
pub fn decompress<B>(ctx: &crate::Context, binary: B) -> Result<Vec<u8>>
pub async fn decompress<R, W>(ctx: &crate::Context, mut r: R, mut w: W) -> Result<()>
where
B: AsRef<[u8]>,
R: AsyncRead + AsyncSeek + std::marker::Unpin,
W: AsyncWrite + std::marker::Unpin,
{
let mut r = BufReader::new(Cursor::new(binary.as_ref()));
let mut w = Cursor::new(Vec::new());
let format = r.read_u32().and_then(BundleFormat::try_from)?;
let format = read_u32(&mut r).await.and_then(BundleFormat::try_from)?;
if !matches!(format, BundleFormat::F7 | BundleFormat::F8) {
eyre::bail!("Unknown bundle format: {:?}", format);
}
// Skip unknown 4 bytes
r.seek(SeekFrom::Current(4))?;
r.seek(SeekFrom::Current(4)).await?;
let num_entries = r.read_u32()? as i64;
let num_entries = read_u32(&mut r).await? as i64;
tracing::debug!(num_entries);
// Skip unknown 256 bytes
r.seek(SeekFrom::Current(256))?;
r.seek(SeekFrom::Current(256)).await?;
// Skip file meta
r.seek(SeekFrom::Current(num_entries * 20))?;
r.seek(SeekFrom::Current(num_entries * 20)).await?;
let num_chunks = r.read_u32()? as usize;
let num_chunks = read_u32(&mut r).await? as usize;
tracing::debug!(num_chunks);
// Skip chunk sizes
r.seek(SeekFrom::Current(num_chunks as i64 * 4))?;
r.seek(SeekFrom::Current(num_chunks as i64 * 4)).await?;
r.skip_padding()?;
skip_padding(&mut r).await?;
let mut unpacked_size = r.read_u32()? as usize;
let mut unpacked_size = read_u32(&mut r).await? as usize;
tracing::debug!(unpacked_size);
// Skip unknown 4 bytes
r.seek(SeekFrom::Current(4))?;
r.seek(SeekFrom::Current(4)).await?;
let chunks_start = r.stream_position()?;
let chunks_start = r.stream_position().await?;
tracing::trace!(chunks_start);
// Pipe the header into the output
{
let span = tracing::debug_span!("Pipe file header", chunks_start);
let _enter = span.enter();
r.rewind()?;
async {
r.seek(SeekFrom::Start(0)).await?;
let mut buf = vec![0; chunks_start as usize];
r.read_exact(&mut buf)?;
w.write_all(&buf)?;
let mut buf = vec![0; chunks_start as usize];
r.read_exact(&mut buf).await?;
w.write_all(&buf).await?;
r.seek(SeekFrom::Start(chunks_start))?;
r.seek(SeekFrom::Start(chunks_start)).await
}
.instrument(span)
.await?;
}
for chunk_index in 0..num_chunks {
let span = tracing::debug_span!("Decompressing chunk", chunk_index);
let _enter = span.enter();
let chunk_size = r.read_u32()? as usize;
async {
let chunk_size = read_u32(&mut r).await? as usize;
tracing::trace!(chunk_size);
tracing::trace!(chunk_size);
r.skip_padding()?;
skip_padding(&mut r).await?;
let mut compressed_buffer = vec![0u8; chunk_size];
r.read_exact(&mut compressed_buffer)?;
let mut compressed_buffer = vec![0u8; chunk_size];
r.read_exact(&mut compressed_buffer).await?;
let oodle_lib = ctx.oodle.as_ref().unwrap();
// TODO: Optimize to not reallocate?
let mut raw_buffer = oodle_lib.decompress(
&compressed_buffer,
OodleLZ_FuzzSafe::No,
OodleLZ_CheckCRC::No,
)?;
let oodle_lib = ctx.oodle.as_ref().unwrap();
// TODO: Optimize to not reallocate?
let mut raw_buffer = oodle_lib.decompress(
&compressed_buffer,
OodleLZ_FuzzSafe::No,
OodleLZ_CheckCRC::No,
)?;
if unpacked_size < CHUNK_SIZE {
raw_buffer.resize(unpacked_size, 0);
} else {
unpacked_size -= CHUNK_SIZE;
if unpacked_size < CHUNK_SIZE {
raw_buffer.resize(unpacked_size, 0);
} else {
unpacked_size -= CHUNK_SIZE;
}
w.write_all(&raw_buffer).await?;
Ok::<(), color_eyre::Report>(())
}
w.write_all(&raw_buffer)?;
.instrument(span)
.await?;
}
Ok(w.into_inner())
Ok(())
}

View file

@ -1,12 +1,12 @@
use std::collections::HashMap;
use std::io::Cursor;
use std::ops::{Deref, DerefMut};
use color_eyre::eyre::Context;
use color_eyre::Result;
use serde::Serialize;
use tokio::io::{AsyncRead, AsyncSeek};
use crate::binary::sync::ReadExt;
use crate::binary::*;
use crate::bundle::file::{BundleFileType, UserFile};
use crate::murmur::{HashGroup, Murmur64};
@ -34,23 +34,22 @@ impl Package {
}
#[tracing::instrument(skip_all)]
pub fn decompile<B>(ctx: &crate::Context, binary: B) -> Result<Vec<UserFile>>
pub async fn decompile<R>(ctx: &crate::Context, data: &mut R) -> Result<Vec<UserFile>>
where
B: AsRef<[u8]>,
R: AsyncRead + AsyncSeek + std::marker::Unpin,
{
let mut r = Cursor::new(binary.as_ref());
// TODO: Figure out what this is
let unknown = r.read_u32()?;
let unknown = read_u32(data).await?;
if unknown != 0x2b {
tracing::warn!("Unknown u32 header. Expected 0x2b, got: {unknown:#08X} ({unknown})");
}
let file_count = r.read_u32()? as usize;
let file_count = read_u32(data).await? as usize;
let mut package = Package::new();
for i in 0..file_count {
let t = BundleFileType::from(r.read_u64()?);
let hash = Murmur64::from(r.read_u64()?);
let t = BundleFileType::from(read_u64(data).await?);
let hash = Murmur64::from(read_u64(data).await?);
let name = ctx.lookup_hash(hash, HashGroup::Filename);
tracing::trace!(index = i, r"type" = ?t, %hash, name, "Package entry");

View file

@ -1,8 +1,7 @@
use std::fmt;
use std::num::ParseIntError;
use std::ops::Deref;
use color_eyre::eyre::Context;
use color_eyre::Report;
use serde::de::Visitor;
use serde::{Deserialize, Serialize};
use serde::{Deserializer, Serializer};
@ -55,12 +54,10 @@ impl From<u64> for Murmur64 {
}
impl TryFrom<&str> for Murmur64 {
type Error = Report;
type Error = ParseIntError;
fn try_from(value: &str) -> Result<Self, Self::Error> {
u64::from_str_radix(value, 16)
.map(Self)
.wrap_err_with(|| format!("failed to convert value to Murmur64: {value}"))
u64::from_str_radix(value, 16).map(Self)
}
}
@ -151,12 +148,10 @@ impl From<u32> for Murmur32 {
}
impl TryFrom<&str> for Murmur32 {
type Error = Report;
type Error = ParseIntError;
fn try_from(value: &str) -> Result<Self, Self::Error> {
u32::from_str_radix(value, 16)
.map(Self)
.wrap_err_with(|| format!("failed to convert value to Murmur32: {value}"))
u32::from_str_radix(value, 16).map(Self)
}
}