Compare commits
No commits in common. "f1f9a818cc4dc006f2d8e8512564b4322d8ef1da" and "437e724d0715fde6a07efc546fa6c01448adabcd" have entirely different histories.
f1f9a818cc
...
437e724d07
6 changed files with 21 additions and 62 deletions
|
@ -55,7 +55,6 @@ pub(crate) fn command_definition() -> Command {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Try to find a `dtmt.cfg` in the given directory or traverse up the parents.
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument]
|
||||||
async fn find_project_config(dir: Option<PathBuf>) -> Result<ModConfig> {
|
async fn find_project_config(dir: Option<PathBuf>) -> Result<ModConfig> {
|
||||||
let (path, mut file) = if let Some(path) = dir {
|
let (path, mut file) = if let Some(path) = dir {
|
||||||
|
@ -103,8 +102,6 @@ async fn find_project_config(dir: Option<PathBuf>) -> Result<ModConfig> {
|
||||||
Ok(cfg)
|
Ok(cfg)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterate over the paths in the given `Package` and
|
|
||||||
/// compile each file by its file type.
|
|
||||||
#[tracing::instrument(skip_all)]
|
#[tracing::instrument(skip_all)]
|
||||||
async fn compile_package_files(pkg: &Package, cfg: &ModConfig) -> Result<Vec<BundleFile>> {
|
async fn compile_package_files(pkg: &Package, cfg: &ModConfig) -> Result<Vec<BundleFile>> {
|
||||||
let root = Arc::new(&cfg.dir);
|
let root = Arc::new(&cfg.dir);
|
||||||
|
@ -151,8 +148,6 @@ async fn compile_package_files(pkg: &Package, cfg: &ModConfig) -> Result<Vec<Bun
|
||||||
results.into_iter().collect()
|
results.into_iter().collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read a `.package` file, collect the referenced files
|
|
||||||
/// and compile all of them into a bundle.
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument]
|
||||||
async fn build_package(
|
async fn build_package(
|
||||||
cfg: &ModConfig,
|
cfg: &ModConfig,
|
||||||
|
@ -181,8 +176,6 @@ async fn build_package(
|
||||||
Ok(bundle)
|
Ok(bundle)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Cleans the path of internal parent (`../`) or self (`./`) components,
|
|
||||||
/// and ensures that it is relative.
|
|
||||||
fn normalize_file_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
|
fn normalize_file_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
|
||||||
let path = path.as_ref();
|
let path = path.as_ref();
|
||||||
|
|
||||||
|
|
|
@ -36,18 +36,6 @@ enum OutputFormat {
|
||||||
Text,
|
Text,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn format_byte_size(size: usize) -> String {
|
|
||||||
if size < 1024 {
|
|
||||||
format!("{} Bytes", size)
|
|
||||||
} else if size < 1024 * 1024 {
|
|
||||||
format!("{} kB", size / 1024)
|
|
||||||
} else if size < 1024 * 1024 * 1024 {
|
|
||||||
format!("{} MB", size / (1024 * 1024))
|
|
||||||
} else {
|
|
||||||
format!("{} GB", size / (1024 * 1024 * 1024))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip(ctx))]
|
#[tracing::instrument(skip(ctx))]
|
||||||
async fn print_bundle_contents<P>(ctx: &sdk::Context, path: P, fmt: OutputFormat) -> Result<()>
|
async fn print_bundle_contents<P>(ctx: &sdk::Context, path: P, fmt: OutputFormat) -> Result<()>
|
||||||
where
|
where
|
||||||
|
@ -62,11 +50,7 @@ where
|
||||||
|
|
||||||
match fmt {
|
match fmt {
|
||||||
OutputFormat::Text => {
|
OutputFormat::Text => {
|
||||||
println!(
|
println!("Bundle: {}", bundle.name().display());
|
||||||
"Bundle: {} ({:016x})",
|
|
||||||
bundle.name().display(),
|
|
||||||
bundle.name()
|
|
||||||
);
|
|
||||||
|
|
||||||
for f in bundle.files().iter() {
|
for f in bundle.files().iter() {
|
||||||
if f.variants().len() != 1 {
|
if f.variants().len() != 1 {
|
||||||
|
@ -79,10 +63,9 @@ where
|
||||||
|
|
||||||
let v = &f.variants()[0];
|
let v = &f.variants()[0];
|
||||||
println!(
|
println!(
|
||||||
"\t{}.{}: {} ({})",
|
"\t{}.{}: {} bytes",
|
||||||
f.base_name().display(),
|
f.base_name().display(),
|
||||||
f.file_type().ext_name(),
|
f.file_type().ext_name(),
|
||||||
format_byte_size(v.size()),
|
|
||||||
v.size()
|
v.size()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -84,7 +84,7 @@ pub fn create_tracing_subscriber() {
|
||||||
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::try_new("info").unwrap());
|
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::try_new("info").unwrap());
|
||||||
|
|
||||||
let (dev_stdout_layer, prod_stdout_layer, filter_layer) = if cfg!(debug_assertions) {
|
let (dev_stdout_layer, prod_stdout_layer, filter_layer) = if cfg!(debug_assertions) {
|
||||||
let fmt_layer = fmt::layer().pretty().with_writer(std::io::stderr);
|
let fmt_layer = fmt::layer().pretty();
|
||||||
(Some(fmt_layer), None, None)
|
(Some(fmt_layer), None, None)
|
||||||
} else {
|
} else {
|
||||||
// Creates a layer that
|
// Creates a layer that
|
||||||
|
@ -93,7 +93,6 @@ pub fn create_tracing_subscriber() {
|
||||||
// - does not print spans/targets
|
// - does not print spans/targets
|
||||||
// - only prints time, not date
|
// - only prints time, not date
|
||||||
let fmt_layer = fmt::layer()
|
let fmt_layer = fmt::layer()
|
||||||
.with_writer(std::io::stderr)
|
|
||||||
.event_format(Formatter)
|
.event_format(Formatter)
|
||||||
.fmt_fields(debug_fn(format_fields));
|
.fmt_fields(debug_fn(format_fields));
|
||||||
|
|
||||||
|
|
|
@ -43,7 +43,6 @@ impl<T: FromBinary> FromBinary for Vec<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub mod sync {
|
pub mod sync {
|
||||||
use std::ffi::CStr;
|
|
||||||
use std::io::{self, Read, Seek, SeekFrom};
|
use std::io::{self, Read, Seek, SeekFrom};
|
||||||
|
|
||||||
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
|
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
|
||||||
|
@ -166,13 +165,25 @@ pub mod sync {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_string_len(&mut self, len: usize) -> Result<String> {
|
fn read_string_len(&mut self, len: usize) -> Result<String> {
|
||||||
let pos = self.stream_position();
|
let mut buf = vec![0; len];
|
||||||
|
let res = self
|
||||||
|
.read_exact(&mut buf)
|
||||||
|
.map_err(Report::new)
|
||||||
|
.and_then(|_| {
|
||||||
|
String::from_utf8(buf).map_err(|err| {
|
||||||
|
let ascii = String::from_utf8_lossy(err.as_bytes()).to_string();
|
||||||
|
let bytes = format!("{:?}", err.as_bytes());
|
||||||
|
Report::new(err)
|
||||||
|
.with_section(move || bytes.header("Bytes:"))
|
||||||
|
.with_section(move || ascii.header("ASCII:"))
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
let res = read_string_len(self, len);
|
|
||||||
if res.is_ok() {
|
if res.is_ok() {
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let pos = self.stream_position();
|
||||||
if pos.is_ok() {
|
if pos.is_ok() {
|
||||||
res.with_section(|| {
|
res.with_section(|| {
|
||||||
format!("{pos:#X} ({pos})", pos = pos.unwrap()).header("Position: ")
|
format!("{pos:#X} ({pos})", pos = pos.unwrap()).header("Position: ")
|
||||||
|
@ -232,22 +243,4 @@ pub mod sync {
|
||||||
|
|
||||||
Err(err).with_section(|| format!("{pos:#X} ({pos})").header("Position: "))
|
Err(err).with_section(|| format!("{pos:#X} ({pos})").header("Position: "))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_string_len(mut r: impl Read, len: usize) -> Result<String> {
|
|
||||||
let mut buf = vec![0; len];
|
|
||||||
r.read_exact(&mut buf)
|
|
||||||
.wrap_err_with(|| format!("Failed to read {} bytes", len))?;
|
|
||||||
|
|
||||||
let res = match CStr::from_bytes_until_nul(&buf) {
|
|
||||||
Ok(s) => {
|
|
||||||
let s = s.to_str()?;
|
|
||||||
Ok(s.to_string())
|
|
||||||
}
|
|
||||||
Err(_) => String::from_utf8(buf.clone()).map_err(Report::new),
|
|
||||||
};
|
|
||||||
|
|
||||||
res.wrap_err("Invalid binary for UTF8 string")
|
|
||||||
.with_section(|| format!("{}", String::from_utf8_lossy(&buf)).header("ASCI:"))
|
|
||||||
.with_section(|| format!("{:x?}", buf).header("Bytes:"))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,15 +14,6 @@ use crate::bundle::file::UserFile;
|
||||||
use crate::bundle::filetype::BundleFileType;
|
use crate::bundle::filetype::BundleFileType;
|
||||||
use crate::murmur::{HashGroup, IdString64, Murmur64};
|
use crate::murmur::{HashGroup, IdString64, Murmur64};
|
||||||
|
|
||||||
/// Resolves a relative path that might contain wildcards into a list of
|
|
||||||
/// paths that exist on disk and match that wildcard.
|
|
||||||
/// This is similar to globbing in Unix shells, but with much less features.
|
|
||||||
///
|
|
||||||
/// The only wilcard character allowed is `*`, and only at the end of the string,
|
|
||||||
/// where it matches all files recursively in that directory.
|
|
||||||
///
|
|
||||||
/// `t` is an optional extension name, that may be used to force a wildcard
|
|
||||||
/// path to only match that file type `t`.
|
|
||||||
#[tracing::instrument]
|
#[tracing::instrument]
|
||||||
#[async_recursion]
|
#[async_recursion]
|
||||||
async fn resolve_wildcard<P1, P2>(
|
async fn resolve_wildcard<P1, P2>(
|
||||||
|
|
|
@ -147,14 +147,14 @@ impl Dictionary {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add(&mut self, value: impl AsRef<[u8]>, group: HashGroup) {
|
pub fn add(&mut self, value: String, group: HashGroup) {
|
||||||
let long = Murmur64::from(murmurhash64::hash(value.as_ref(), SEED as u64));
|
let long = Murmur64::from(murmurhash64::hash(value.as_bytes(), SEED as u64));
|
||||||
let short = Murmur32::from(murmurhash64::hash32(value.as_ref(), SEED));
|
let short = Murmur32::from(murmurhash64::hash32(value.as_bytes(), SEED));
|
||||||
|
|
||||||
let entry = Entry {
|
let entry = Entry {
|
||||||
long,
|
long,
|
||||||
short,
|
short,
|
||||||
value: String::from_utf8_lossy(value.as_ref()).to_string(),
|
value,
|
||||||
group,
|
group,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue