Compare commits

..

5 commits

Author SHA1 Message Date
f1f9a818cc
sdk: Allow any byte stream for hashing dictionary entries
All checks were successful
lint/clippy Checking for common mistakes and opportunities for code improvement
build/linux Build for the target platform: linux
build/msvc Build for the target platform: msvc
2024-07-19 09:48:25 +02:00
c997489e18
Add some doc comments 2024-07-19 09:48:23 +02:00
08219f05ba
sdk: Fix reading strings
Fatshark has a few weird string fields, where they provide a length
field, but then sometimes write a shorter, NUL-terminated string into
that same field and adding padding up to the "advertised" length.
To properly read those strings, we can't rely on just the length field
anymore, but need to check for a NUL, too.
2024-07-19 09:48:21 +02:00
edad0d4493
Improve file listing output
Adds pretty printing for file size and always shows the bundle hash name
2024-07-19 09:48:20 +02:00
74a7aaa6e5
dtmt-shared: Write log lines to stderr
Ideally, I would prefer the usual split per logging level, but that
seems to be somewhat complex with `tracing_subscriber`, so this simply
switches everything over to stderr, so that some of the experiment
commands can write results to stdout.
2024-07-19 09:48:15 +02:00
6 changed files with 62 additions and 21 deletions

View file

@ -55,6 +55,7 @@ pub(crate) fn command_definition() -> Command {
) )
} }
/// Try to find a `dtmt.cfg` in the given directory or traverse up the parents.
#[tracing::instrument] #[tracing::instrument]
async fn find_project_config(dir: Option<PathBuf>) -> Result<ModConfig> { async fn find_project_config(dir: Option<PathBuf>) -> Result<ModConfig> {
let (path, mut file) = if let Some(path) = dir { let (path, mut file) = if let Some(path) = dir {
@ -102,6 +103,8 @@ async fn find_project_config(dir: Option<PathBuf>) -> Result<ModConfig> {
Ok(cfg) Ok(cfg)
} }
/// Iterate over the paths in the given `Package` and
/// compile each file by its file type.
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
async fn compile_package_files(pkg: &Package, cfg: &ModConfig) -> Result<Vec<BundleFile>> { async fn compile_package_files(pkg: &Package, cfg: &ModConfig) -> Result<Vec<BundleFile>> {
let root = Arc::new(&cfg.dir); let root = Arc::new(&cfg.dir);
@ -148,6 +151,8 @@ async fn compile_package_files(pkg: &Package, cfg: &ModConfig) -> Result<Vec<Bun
results.into_iter().collect() results.into_iter().collect()
} }
/// Read a `.package` file, collect the referenced files
/// and compile all of them into a bundle.
#[tracing::instrument] #[tracing::instrument]
async fn build_package( async fn build_package(
cfg: &ModConfig, cfg: &ModConfig,
@ -176,6 +181,8 @@ async fn build_package(
Ok(bundle) Ok(bundle)
} }
/// Cleans the path of internal parent (`../`) or self (`./`) components,
/// and ensures that it is relative.
fn normalize_file_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> { fn normalize_file_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
let path = path.as_ref(); let path = path.as_ref();

View file

@ -36,6 +36,18 @@ enum OutputFormat {
Text, Text,
} }
fn format_byte_size(size: usize) -> String {
if size < 1024 {
format!("{} Bytes", size)
} else if size < 1024 * 1024 {
format!("{} kB", size / 1024)
} else if size < 1024 * 1024 * 1024 {
format!("{} MB", size / (1024 * 1024))
} else {
format!("{} GB", size / (1024 * 1024 * 1024))
}
}
#[tracing::instrument(skip(ctx))] #[tracing::instrument(skip(ctx))]
async fn print_bundle_contents<P>(ctx: &sdk::Context, path: P, fmt: OutputFormat) -> Result<()> async fn print_bundle_contents<P>(ctx: &sdk::Context, path: P, fmt: OutputFormat) -> Result<()>
where where
@ -50,7 +62,11 @@ where
match fmt { match fmt {
OutputFormat::Text => { OutputFormat::Text => {
println!("Bundle: {}", bundle.name().display()); println!(
"Bundle: {} ({:016x})",
bundle.name().display(),
bundle.name()
);
for f in bundle.files().iter() { for f in bundle.files().iter() {
if f.variants().len() != 1 { if f.variants().len() != 1 {
@ -63,9 +79,10 @@ where
let v = &f.variants()[0]; let v = &f.variants()[0];
println!( println!(
"\t{}.{}: {} bytes", "\t{}.{}: {} ({})",
f.base_name().display(), f.base_name().display(),
f.file_type().ext_name(), f.file_type().ext_name(),
format_byte_size(v.size()),
v.size() v.size()
); );
} }

View file

@ -84,7 +84,7 @@ pub fn create_tracing_subscriber() {
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::try_new("info").unwrap()); EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::try_new("info").unwrap());
let (dev_stdout_layer, prod_stdout_layer, filter_layer) = if cfg!(debug_assertions) { let (dev_stdout_layer, prod_stdout_layer, filter_layer) = if cfg!(debug_assertions) {
let fmt_layer = fmt::layer().pretty(); let fmt_layer = fmt::layer().pretty().with_writer(std::io::stderr);
(Some(fmt_layer), None, None) (Some(fmt_layer), None, None)
} else { } else {
// Creates a layer that // Creates a layer that
@ -93,6 +93,7 @@ pub fn create_tracing_subscriber() {
// - does not print spans/targets // - does not print spans/targets
// - only prints time, not date // - only prints time, not date
let fmt_layer = fmt::layer() let fmt_layer = fmt::layer()
.with_writer(std::io::stderr)
.event_format(Formatter) .event_format(Formatter)
.fmt_fields(debug_fn(format_fields)); .fmt_fields(debug_fn(format_fields));

View file

@ -43,6 +43,7 @@ impl<T: FromBinary> FromBinary for Vec<T> {
} }
pub mod sync { pub mod sync {
use std::ffi::CStr;
use std::io::{self, Read, Seek, SeekFrom}; use std::io::{self, Read, Seek, SeekFrom};
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
@ -165,25 +166,13 @@ pub mod sync {
} }
fn read_string_len(&mut self, len: usize) -> Result<String> { fn read_string_len(&mut self, len: usize) -> Result<String> {
let mut buf = vec![0; len]; let pos = self.stream_position();
let res = self
.read_exact(&mut buf)
.map_err(Report::new)
.and_then(|_| {
String::from_utf8(buf).map_err(|err| {
let ascii = String::from_utf8_lossy(err.as_bytes()).to_string();
let bytes = format!("{:?}", err.as_bytes());
Report::new(err)
.with_section(move || bytes.header("Bytes:"))
.with_section(move || ascii.header("ASCII:"))
})
});
let res = read_string_len(self, len);
if res.is_ok() { if res.is_ok() {
return res; return res;
} }
let pos = self.stream_position();
if pos.is_ok() { if pos.is_ok() {
res.with_section(|| { res.with_section(|| {
format!("{pos:#X} ({pos})", pos = pos.unwrap()).header("Position: ") format!("{pos:#X} ({pos})", pos = pos.unwrap()).header("Position: ")
@ -243,4 +232,22 @@ pub mod sync {
Err(err).with_section(|| format!("{pos:#X} ({pos})").header("Position: ")) Err(err).with_section(|| format!("{pos:#X} ({pos})").header("Position: "))
} }
fn read_string_len(mut r: impl Read, len: usize) -> Result<String> {
let mut buf = vec![0; len];
r.read_exact(&mut buf)
.wrap_err_with(|| format!("Failed to read {} bytes", len))?;
let res = match CStr::from_bytes_until_nul(&buf) {
Ok(s) => {
let s = s.to_str()?;
Ok(s.to_string())
}
Err(_) => String::from_utf8(buf.clone()).map_err(Report::new),
};
res.wrap_err("Invalid binary for UTF8 string")
.with_section(|| format!("{}", String::from_utf8_lossy(&buf)).header("ASCI:"))
.with_section(|| format!("{:x?}", buf).header("Bytes:"))
}
} }

View file

@ -14,6 +14,15 @@ use crate::bundle::file::UserFile;
use crate::bundle::filetype::BundleFileType; use crate::bundle::filetype::BundleFileType;
use crate::murmur::{HashGroup, IdString64, Murmur64}; use crate::murmur::{HashGroup, IdString64, Murmur64};
/// Resolves a relative path that might contain wildcards into a list of
/// paths that exist on disk and match that wildcard.
/// This is similar to globbing in Unix shells, but with much less features.
///
/// The only wilcard character allowed is `*`, and only at the end of the string,
/// where it matches all files recursively in that directory.
///
/// `t` is an optional extension name, that may be used to force a wildcard
/// path to only match that file type `t`.
#[tracing::instrument] #[tracing::instrument]
#[async_recursion] #[async_recursion]
async fn resolve_wildcard<P1, P2>( async fn resolve_wildcard<P1, P2>(

View file

@ -147,14 +147,14 @@ impl Dictionary {
Ok(()) Ok(())
} }
pub fn add(&mut self, value: String, group: HashGroup) { pub fn add(&mut self, value: impl AsRef<[u8]>, group: HashGroup) {
let long = Murmur64::from(murmurhash64::hash(value.as_bytes(), SEED as u64)); let long = Murmur64::from(murmurhash64::hash(value.as_ref(), SEED as u64));
let short = Murmur32::from(murmurhash64::hash32(value.as_bytes(), SEED)); let short = Murmur32::from(murmurhash64::hash32(value.as_ref(), SEED));
let entry = Entry { let entry = Entry {
long, long,
short, short,
value, value: String::from_utf8_lossy(value.as_ref()).to_string(),
group, group,
}; };