Various minor changes extracted from unfinished projects #182

Merged
lucas merged 5 commits from feat/various into master 2024-07-19 11:13:48 +02:00
6 changed files with 62 additions and 21 deletions

View file

@ -55,6 +55,7 @@ pub(crate) fn command_definition() -> Command {
)
}
/// Try to find a `dtmt.cfg` in the given directory or traverse up the parents.
#[tracing::instrument]
async fn find_project_config(dir: Option<PathBuf>) -> Result<ModConfig> {
let (path, mut file) = if let Some(path) = dir {
@ -102,6 +103,8 @@ async fn find_project_config(dir: Option<PathBuf>) -> Result<ModConfig> {
Ok(cfg)
}
/// Iterate over the paths in the given `Package` and
/// compile each file by its file type.
#[tracing::instrument(skip_all)]
async fn compile_package_files(pkg: &Package, cfg: &ModConfig) -> Result<Vec<BundleFile>> {
let root = Arc::new(&cfg.dir);
@ -148,6 +151,8 @@ async fn compile_package_files(pkg: &Package, cfg: &ModConfig) -> Result<Vec<Bun
results.into_iter().collect()
}
/// Read a `.package` file, collect the referenced files
/// and compile all of them into a bundle.
#[tracing::instrument]
async fn build_package(
cfg: &ModConfig,
@ -176,6 +181,8 @@ async fn build_package(
Ok(bundle)
}
/// Cleans the path of internal parent (`../`) or self (`./`) components,
/// and ensures that it is relative.
fn normalize_file_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
let path = path.as_ref();

View file

@ -36,6 +36,18 @@ enum OutputFormat {
Text,
}
fn format_byte_size(size: usize) -> String {
if size < 1024 {
format!("{} Bytes", size)
} else if size < 1024 * 1024 {
format!("{} kB", size / 1024)
} else if size < 1024 * 1024 * 1024 {
format!("{} MB", size / (1024 * 1024))
} else {
format!("{} GB", size / (1024 * 1024 * 1024))
}
}
#[tracing::instrument(skip(ctx))]
async fn print_bundle_contents<P>(ctx: &sdk::Context, path: P, fmt: OutputFormat) -> Result<()>
where
@ -50,7 +62,11 @@ where
match fmt {
OutputFormat::Text => {
println!("Bundle: {}", bundle.name().display());
println!(
"Bundle: {} ({:016x})",
bundle.name().display(),
bundle.name()
);
for f in bundle.files().iter() {
if f.variants().len() != 1 {
@ -63,9 +79,10 @@ where
let v = &f.variants()[0];
println!(
"\t{}.{}: {} bytes",
"\t{}.{}: {} ({})",
f.base_name().display(),
f.file_type().ext_name(),
format_byte_size(v.size()),
v.size()
);
}

View file

@ -84,7 +84,7 @@ pub fn create_tracing_subscriber() {
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::try_new("info").unwrap());
let (dev_stdout_layer, prod_stdout_layer, filter_layer) = if cfg!(debug_assertions) {
let fmt_layer = fmt::layer().pretty();
let fmt_layer = fmt::layer().pretty().with_writer(std::io::stderr);
(Some(fmt_layer), None, None)
} else {
// Creates a layer that
@ -93,6 +93,7 @@ pub fn create_tracing_subscriber() {
// - does not print spans/targets
// - only prints time, not date
let fmt_layer = fmt::layer()
.with_writer(std::io::stderr)
.event_format(Formatter)
.fmt_fields(debug_fn(format_fields));

View file

@ -43,6 +43,7 @@ impl<T: FromBinary> FromBinary for Vec<T> {
}
pub mod sync {
use std::ffi::CStr;
use std::io::{self, Read, Seek, SeekFrom};
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
@ -165,25 +166,13 @@ pub mod sync {
}
fn read_string_len(&mut self, len: usize) -> Result<String> {
let mut buf = vec![0; len];
let res = self
.read_exact(&mut buf)
.map_err(Report::new)
.and_then(|_| {
String::from_utf8(buf).map_err(|err| {
let ascii = String::from_utf8_lossy(err.as_bytes()).to_string();
let bytes = format!("{:?}", err.as_bytes());
Report::new(err)
.with_section(move || bytes.header("Bytes:"))
.with_section(move || ascii.header("ASCII:"))
})
});
let pos = self.stream_position();
let res = read_string_len(self, len);
if res.is_ok() {
return res;
}
let pos = self.stream_position();
if pos.is_ok() {
res.with_section(|| {
format!("{pos:#X} ({pos})", pos = pos.unwrap()).header("Position: ")
@ -243,4 +232,22 @@ pub mod sync {
Err(err).with_section(|| format!("{pos:#X} ({pos})").header("Position: "))
}
fn read_string_len(mut r: impl Read, len: usize) -> Result<String> {
let mut buf = vec![0; len];
r.read_exact(&mut buf)
.wrap_err_with(|| format!("Failed to read {} bytes", len))?;
let res = match CStr::from_bytes_until_nul(&buf) {
Ok(s) => {
let s = s.to_str()?;
Ok(s.to_string())
}
Err(_) => String::from_utf8(buf.clone()).map_err(Report::new),
};
res.wrap_err("Invalid binary for UTF8 string")
.with_section(|| format!("{}", String::from_utf8_lossy(&buf)).header("ASCI:"))
.with_section(|| format!("{:x?}", buf).header("Bytes:"))
}
}

View file

@ -14,6 +14,15 @@ use crate::bundle::file::UserFile;
use crate::bundle::filetype::BundleFileType;
use crate::murmur::{HashGroup, IdString64, Murmur64};
/// Resolves a relative path that might contain wildcards into a list of
/// paths that exist on disk and match that wildcard.
/// This is similar to globbing in Unix shells, but with much less features.
///
/// The only wilcard character allowed is `*`, and only at the end of the string,
/// where it matches all files recursively in that directory.
///
/// `t` is an optional extension name, that may be used to force a wildcard
/// path to only match that file type `t`.
#[tracing::instrument]
#[async_recursion]
async fn resolve_wildcard<P1, P2>(

View file

@ -147,14 +147,14 @@ impl Dictionary {
Ok(())
}
pub fn add(&mut self, value: String, group: HashGroup) {
let long = Murmur64::from(murmurhash64::hash(value.as_bytes(), SEED as u64));
let short = Murmur32::from(murmurhash64::hash32(value.as_bytes(), SEED));
pub fn add(&mut self, value: impl AsRef<[u8]>, group: HashGroup) {
let long = Murmur64::from(murmurhash64::hash(value.as_ref(), SEED as u64));
let short = Murmur32::from(murmurhash64::hash32(value.as_ref(), SEED));
let entry = Entry {
long,
short,
value,
value: String::from_utf8_lossy(value.as_ref()).to_string(),
group,
};