feat: Implement building mod bundles
This commit is contained in:
parent
56bcbd8648
commit
f61fab4257
12 changed files with 322 additions and 177 deletions
|
@ -5,6 +5,7 @@
|
|||
=== Added
|
||||
|
||||
- show status after adding dictionary entries
|
||||
- implement building mod bundles
|
||||
|
||||
== [v0.2.0] - 2022-12-28
|
||||
|
||||
|
|
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -344,7 +344,6 @@ dependencies = [
|
|||
"tempfile",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"toml",
|
||||
"tracing",
|
||||
"tracing-error",
|
||||
"tracing-subscriber",
|
||||
|
@ -939,6 +938,7 @@ dependencies = [
|
|||
"byteorder",
|
||||
"color-eyre",
|
||||
"csv-async",
|
||||
"fastrand",
|
||||
"futures",
|
||||
"futures-util",
|
||||
"glob",
|
||||
|
|
|
@ -22,7 +22,6 @@ tracing = { version = "0.1.37", features = ["async-await"] }
|
|||
tracing-error = "0.2.0"
|
||||
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
|
||||
confy = "0.5.1"
|
||||
toml = "0.5.9"
|
||||
zip = "0.6.3"
|
||||
|
||||
[dev-dependencies]
|
||||
|
|
|
@ -14,6 +14,8 @@ use tokio::io::AsyncReadExt;
|
|||
|
||||
use crate::mods::archive::Archive;
|
||||
|
||||
const PROJECT_CONFIG_NAME: &str = "dtmt.cfg";
|
||||
|
||||
pub(crate) fn command_definition() -> Command {
|
||||
Command::new("build")
|
||||
.about("Build a project")
|
||||
|
@ -50,7 +52,7 @@ struct ProjectConfig {
|
|||
#[tracing::instrument]
|
||||
async fn find_project_config(dir: Option<PathBuf>) -> Result<ProjectConfig> {
|
||||
let (path, mut file) = if let Some(path) = dir {
|
||||
let file = File::open(&path.join("dtmt.toml"))
|
||||
let file = File::open(&path.join(PROJECT_CONFIG_NAME))
|
||||
.await
|
||||
.wrap_err_with(|| format!("failed to open file: {}", path.display()))
|
||||
.with_suggestion(|| {
|
||||
|
@ -63,9 +65,9 @@ async fn find_project_config(dir: Option<PathBuf>) -> Result<ProjectConfig> {
|
|||
} else {
|
||||
let mut dir = std::env::current_dir()?;
|
||||
loop {
|
||||
let path = dir.join("dtmt.toml");
|
||||
let path = dir.join(PROJECT_CONFIG_NAME);
|
||||
match File::open(&path).await {
|
||||
Ok(file) => break (path, file),
|
||||
Ok(file) => break (dir, file),
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
if let Some(parent) = dir.parent() {
|
||||
// TODO: Re-write with recursion to avoid allocating the `PathBuf`.
|
||||
|
@ -83,10 +85,10 @@ async fn find_project_config(dir: Option<PathBuf>) -> Result<ProjectConfig> {
|
|||
}
|
||||
};
|
||||
|
||||
let mut buf = Vec::new();
|
||||
file.read_to_end(&mut buf).await?;
|
||||
let mut buf = String::new();
|
||||
file.read_to_string(&mut buf).await?;
|
||||
|
||||
let mut cfg: ProjectConfig = toml::from_slice(&buf)?;
|
||||
let mut cfg: ProjectConfig = serde_sjson::from_str(&buf)?;
|
||||
cfg.dir = path;
|
||||
Ok(cfg)
|
||||
}
|
||||
|
@ -113,7 +115,17 @@ where
|
|||
})
|
||||
.map(|(file_type, path, root)| async move {
|
||||
let sjson = fs::read_to_string(&path).await?;
|
||||
BundleFile::from_sjson(file_type, sjson, root.as_ref()).await
|
||||
|
||||
let mut path = path.clone();
|
||||
path.set_extension("");
|
||||
|
||||
BundleFile::from_sjson(
|
||||
path.to_string_lossy().to_string(),
|
||||
file_type,
|
||||
sjson,
|
||||
root.as_ref(),
|
||||
)
|
||||
.await
|
||||
});
|
||||
|
||||
let results = futures::stream::iter(tasks)
|
||||
|
@ -188,6 +200,13 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
|
|||
.iter()
|
||||
.map(|path| (path, cfg.clone()))
|
||||
.map(|(path, cfg)| async move {
|
||||
if path.extension().is_some() {
|
||||
eyre::bail!(
|
||||
"Package name must be specified without file extension: {}",
|
||||
path.display()
|
||||
);
|
||||
}
|
||||
|
||||
build_package(path, &cfg.dir).await.wrap_err_with(|| {
|
||||
format!(
|
||||
"failed to build package {} in {}",
|
||||
|
|
|
@ -181,9 +181,9 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
|
|||
|
||||
resolve_bundle_paths(bundles)
|
||||
.for_each_concurrent(10, |p| async {
|
||||
let ctx = ctx.clone();
|
||||
let includes = includes.clone();
|
||||
let excludes = excludes.clone();
|
||||
let ctx = ctx.clone();
|
||||
|
||||
let options = ExtractOptions {
|
||||
includes,
|
||||
|
@ -196,7 +196,7 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
|
|||
async move {
|
||||
match extract_bundle(ctx, &p, &dest, options).await {
|
||||
Ok(_) => {}
|
||||
Err(err) => tracing::error!("{err:#}"),
|
||||
Err(err) => tracing::error!("{err:?}"),
|
||||
}
|
||||
}
|
||||
.await
|
||||
|
|
|
@ -51,29 +51,28 @@ impl Archive {
|
|||
|
||||
zip.add_directory(&self.name, Default::default())?;
|
||||
|
||||
let base_path = PathBuf::from(&self.name);
|
||||
|
||||
{
|
||||
let mut name = path.as_ref().join(&self.name);
|
||||
let mut name = base_path.join(&self.name);
|
||||
name.set_extension("mod");
|
||||
zip.start_file(name.to_string_lossy(), Default::default())?;
|
||||
zip.write_all(mod_file)?;
|
||||
}
|
||||
|
||||
let path = PathBuf::from(&self.name);
|
||||
let mut file_map = HashMap::new();
|
||||
|
||||
for bundle in self.bundles.iter() {
|
||||
let bundle_name = bundle.name().clone();
|
||||
let bundle_path = PathBuf::from(&bundle_name);
|
||||
|
||||
let map_entry: &mut HashSet<_> = file_map.entry(bundle_name).or_default();
|
||||
|
||||
for file in bundle.files() {
|
||||
let bundle_path = bundle_path.join(file.base_name());
|
||||
map_entry.insert(bundle_path.to_string_lossy().to_string());
|
||||
map_entry.insert(file.name(false, None));
|
||||
}
|
||||
|
||||
let name = Murmur64::hash(bundle.name().as_bytes());
|
||||
let path = path.join(name.to_string());
|
||||
let path = base_path.join(name.to_string().to_ascii_lowercase());
|
||||
|
||||
zip.start_file(path.to_string_lossy(), Default::default())?;
|
||||
|
||||
|
@ -84,7 +83,7 @@ impl Archive {
|
|||
{
|
||||
let data = serde_sjson::to_string(&file_map)?;
|
||||
zip.start_file(
|
||||
path.join("files.sjson").to_string_lossy(),
|
||||
base_path.join("files.sjson").to_string_lossy(),
|
||||
Default::default(),
|
||||
)?;
|
||||
zip.write_all(data.as_bytes())?;
|
||||
|
|
|
@ -7,6 +7,7 @@ edition = "2021"
|
|||
byteorder = "1.4.3"
|
||||
color-eyre = "0.6.2"
|
||||
csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
|
||||
fastrand = "1.8.0"
|
||||
futures = "0.3.25"
|
||||
futures-util = "0.3.24"
|
||||
glob = "0.3.0"
|
||||
|
|
|
@ -409,9 +409,47 @@ struct BundleFileHeader {
|
|||
len_data_file_name: usize,
|
||||
}
|
||||
|
||||
impl BundleFileHeader {
|
||||
#[tracing::instrument(name = "FileHeader::from_reader", skip_all)]
|
||||
fn from_reader<R>(r: &mut R) -> Result<Self>
|
||||
pub struct BundleFileVariant {
|
||||
property: u32,
|
||||
data: Vec<u8>,
|
||||
data_file_name: Option<String>,
|
||||
}
|
||||
|
||||
impl BundleFileVariant {
|
||||
// We will need a parameter for `property` eventually, so the `Default` impl would need to go
|
||||
// eventually anyways.
|
||||
#[allow(clippy::new_without_default)]
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
// TODO: Hard coded for, as long as we don't support bundle properties
|
||||
property: 0,
|
||||
data: Vec::new(),
|
||||
data_file_name: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_data(&mut self, data: Vec<u8>) {
|
||||
self.data = data;
|
||||
}
|
||||
|
||||
pub fn size(&self) -> usize {
|
||||
self.data.len()
|
||||
}
|
||||
|
||||
pub fn property(&self) -> u32 {
|
||||
self.property
|
||||
}
|
||||
|
||||
pub fn data(&self) -> &[u8] {
|
||||
&self.data
|
||||
}
|
||||
|
||||
pub fn data_file_name(&self) -> Option<&String> {
|
||||
self.data_file_name.as_ref()
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
fn read_header<R>(r: &mut R) -> Result<BundleFileHeader>
|
||||
where
|
||||
R: Read + Seek,
|
||||
{
|
||||
|
@ -421,61 +459,49 @@ impl BundleFileHeader {
|
|||
r.skip_u8(1)?;
|
||||
let len_data_file_name = r.read_u32()? as usize;
|
||||
|
||||
Ok(Self {
|
||||
Ok(BundleFileHeader {
|
||||
size,
|
||||
variant,
|
||||
len_data_file_name,
|
||||
})
|
||||
}
|
||||
|
||||
#[tracing::instrument(name = "FileHeader::to_writer", skip_all)]
|
||||
fn to_writer<W>(&self, w: &mut W) -> Result<()>
|
||||
#[tracing::instrument(skip_all)]
|
||||
fn write_header<W>(&self, w: &mut W) -> Result<()>
|
||||
where
|
||||
W: Write + Seek,
|
||||
{
|
||||
w.write_u32(self.variant)?;
|
||||
w.write_u32(self.property)?;
|
||||
w.write_u8(0)?;
|
||||
w.write_u32(self.size as u32)?;
|
||||
w.write_u32(self.data.len() as u32)?;
|
||||
w.write_u8(1)?;
|
||||
w.write_u32(self.len_data_file_name as u32)?;
|
||||
|
||||
let len_data_file_name = self.data_file_name.as_ref().map(|s| s.len()).unwrap_or(0);
|
||||
w.write_u32(len_data_file_name as u32)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BundleFileVariant {
|
||||
header: BundleFileHeader,
|
||||
data: Vec<u8>,
|
||||
data_file_name: String,
|
||||
}
|
||||
|
||||
impl BundleFileVariant {
|
||||
pub fn size(&self) -> usize {
|
||||
self.header.size
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> u32 {
|
||||
self.header.variant
|
||||
}
|
||||
|
||||
pub fn data(&self) -> &[u8] {
|
||||
&self.data
|
||||
}
|
||||
|
||||
pub fn set_data(&mut self, data: Vec<u8>) {
|
||||
self.header.size = data.len();
|
||||
self.data = data;
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BundleFile {
|
||||
file_type: BundleFileType,
|
||||
hash: Murmur64,
|
||||
name: String,
|
||||
variants: Vec<BundleFileVariant>,
|
||||
}
|
||||
|
||||
impl BundleFile {
|
||||
pub fn new(name: String, file_type: BundleFileType) -> Self {
|
||||
Self {
|
||||
file_type,
|
||||
name,
|
||||
variants: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_variant(&mut self, variant: BundleFileVariant) {
|
||||
self.variants.push(variant)
|
||||
}
|
||||
|
||||
#[tracing::instrument(
|
||||
name = "File::read",
|
||||
skip_all,
|
||||
|
@ -494,7 +520,7 @@ impl BundleFile {
|
|||
r.skip_u32(0)?;
|
||||
|
||||
for _ in 0..header_count {
|
||||
let header = BundleFileHeader::from_reader(r)?;
|
||||
let header = BundleFileVariant::read_header(r)?;
|
||||
headers.push(header);
|
||||
}
|
||||
|
||||
|
@ -507,12 +533,17 @@ impl BundleFile {
|
|||
r.read_exact(&mut data)
|
||||
.wrap_err_with(|| format!("failed to read header {i}"))?;
|
||||
|
||||
let data_file_name = r
|
||||
let data_file_name = if header.len_data_file_name > 0 {
|
||||
let s = r
|
||||
.read_string_len(header.len_data_file_name)
|
||||
.wrap_err("failed to read data file name")?;
|
||||
Some(s)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let variant = BundleFileVariant {
|
||||
header,
|
||||
property: header.variant,
|
||||
data,
|
||||
data_file_name,
|
||||
};
|
||||
|
@ -523,7 +554,6 @@ impl BundleFile {
|
|||
Ok(Self {
|
||||
variants,
|
||||
file_type,
|
||||
hash,
|
||||
name,
|
||||
})
|
||||
}
|
||||
|
@ -533,13 +563,20 @@ impl BundleFile {
|
|||
let mut w = Cursor::new(Vec::new());
|
||||
|
||||
w.write_u64(*self.file_type.hash())?;
|
||||
w.write_u64(*self.hash)?;
|
||||
w.write_u64(*Murmur64::hash(self.name.as_bytes()))?;
|
||||
w.write_u32(self.variants.len() as u32)?;
|
||||
|
||||
let header_count = self.variants.len();
|
||||
w.write_u8(header_count as u8)?;
|
||||
// TODO: Figure out what this is
|
||||
w.write_u32(0x0)?;
|
||||
|
||||
for variant in self.variants.iter() {
|
||||
variant.header.to_writer(&mut w)?;
|
||||
w.write_u32(variant.property())?;
|
||||
w.write_u8(0)?;
|
||||
w.write_u32(variant.size() as u32)?;
|
||||
w.write_u8(1)?;
|
||||
|
||||
let len_data_file_name = variant.data_file_name().map(|s| s.len()).unwrap_or(0);
|
||||
w.write_u32(len_data_file_name as u32)?;
|
||||
}
|
||||
|
||||
for variant in self.variants.iter() {
|
||||
|
@ -549,13 +586,29 @@ impl BundleFile {
|
|||
Ok(w.into_inner())
|
||||
}
|
||||
|
||||
#[tracing::instrument(name = "File::from_sjson", skip(_sjson))]
|
||||
pub async fn from_sjson<P, S>(_file_type: BundleFileType, _sjson: S, _root: P) -> Result<Self>
|
||||
#[tracing::instrument(name = "File::from_sjson", skip(sjson))]
|
||||
pub async fn from_sjson<P, S>(
|
||||
name: String,
|
||||
file_type: BundleFileType,
|
||||
sjson: S,
|
||||
root: P,
|
||||
) -> Result<Self>
|
||||
where
|
||||
P: AsRef<Path> + std::fmt::Debug,
|
||||
S: AsRef<str>,
|
||||
{
|
||||
todo!();
|
||||
match file_type {
|
||||
BundleFileType::Lua => lua::compile(name, sjson).await,
|
||||
BundleFileType::Unknown(_) => {
|
||||
eyre::bail!("Unknown file type. Cannot compile from SJSON");
|
||||
}
|
||||
_ => {
|
||||
eyre::bail!(
|
||||
"Compiling file type {} is not yet supported",
|
||||
file_type.ext_name()
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn base_name(&self) -> &String {
|
||||
|
@ -588,10 +641,6 @@ impl BundleFile {
|
|||
self.name == name || self.name(false, None) == name || self.name(true, None) == name
|
||||
}
|
||||
|
||||
pub fn hash(&self) -> Murmur64 {
|
||||
self.hash
|
||||
}
|
||||
|
||||
pub fn file_type(&self) -> BundleFileType {
|
||||
self.file_type
|
||||
}
|
||||
|
@ -610,7 +659,7 @@ impl BundleFile {
|
|||
.iter()
|
||||
.map(|variant| {
|
||||
let name = if self.variants.len() > 1 {
|
||||
self.name(false, Some(variant.header.variant))
|
||||
self.name(false, Some(variant.property()))
|
||||
} else {
|
||||
self.name(false, None)
|
||||
};
|
||||
|
@ -643,7 +692,7 @@ impl BundleFile {
|
|||
let tasks = self.variants.iter().map(|variant| async move {
|
||||
let data = variant.data();
|
||||
let name = if self.variants.len() > 1 {
|
||||
self.name(true, Some(variant.header.variant))
|
||||
self.name(true, Some(variant.property()))
|
||||
} else {
|
||||
self.name(true, None)
|
||||
};
|
||||
|
|
|
@ -118,9 +118,11 @@ impl Bundle {
|
|||
}
|
||||
|
||||
pub fn add_file(&mut self, file: BundleFile) {
|
||||
tracing::trace!("Adding file {}", file.name(false, None));
|
||||
let header = EntryHeader {
|
||||
extension_hash: file.file_type().into(),
|
||||
name_hash: Murmur64::hash(file.base_name().as_bytes()),
|
||||
// TODO: Hard coded until we know what this is
|
||||
flags: 0x0,
|
||||
};
|
||||
|
||||
|
|
|
@ -1,10 +1,15 @@
|
|||
use std::io::Cursor;
|
||||
use std::io::{Cursor, Write};
|
||||
|
||||
use color_eyre::Result;
|
||||
use color_eyre::{eyre::Context, Result};
|
||||
use tokio::{fs, process::Command};
|
||||
|
||||
use crate::bundle::file::UserFile;
|
||||
use crate::{
|
||||
binary::sync::WriteExt,
|
||||
bundle::file::{BundleFileVariant, UserFile},
|
||||
BundleFile, BundleFileType,
|
||||
};
|
||||
|
||||
#[tracing::instrument(skip_all,fields(buf_len = data.as_ref().len()))]
|
||||
#[tracing::instrument(skip_all, fields(buf_len = data.as_ref().len()))]
|
||||
pub(crate) async fn decompile<T>(_ctx: &crate::Context, data: T) -> Result<Vec<UserFile>>
|
||||
where
|
||||
T: AsRef<[u8]>,
|
||||
|
@ -12,3 +17,75 @@ where
|
|||
let mut _r = Cursor::new(data.as_ref());
|
||||
todo!();
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub(crate) async fn compile<S>(name: String, code: S) -> Result<BundleFile>
|
||||
where
|
||||
S: AsRef<str>,
|
||||
{
|
||||
let in_file_path = {
|
||||
let mut path = std::env::temp_dir();
|
||||
let name: String = std::iter::repeat_with(fastrand::alphanumeric)
|
||||
.take(10)
|
||||
.collect();
|
||||
path.push(name + "-dtmt.lua");
|
||||
|
||||
path
|
||||
};
|
||||
|
||||
let out_file_path = {
|
||||
let mut path = std::env::temp_dir();
|
||||
|
||||
let name: String = std::iter::repeat_with(fastrand::alphanumeric)
|
||||
.take(10)
|
||||
.collect();
|
||||
path.push(name + "-dtmt.luab");
|
||||
|
||||
path
|
||||
};
|
||||
|
||||
fs::write(&in_file_path, code.as_ref().as_bytes())
|
||||
.await
|
||||
.wrap_err_with(|| format!("failed to write file {}", in_file_path.display()))?;
|
||||
|
||||
// TODO: Make executable name configurable
|
||||
Command::new("luajit")
|
||||
.arg("-bg")
|
||||
.arg("-F")
|
||||
.arg(name.clone() + ".lua")
|
||||
.arg("-o")
|
||||
.arg("Windows")
|
||||
.arg(&in_file_path)
|
||||
.arg(&out_file_path)
|
||||
.status()
|
||||
.await
|
||||
.wrap_err("failed to compile to LuaJIT byte code")?;
|
||||
|
||||
let mut data = Cursor::new(Vec::new());
|
||||
|
||||
let bytecode = {
|
||||
let mut data = fs::read(&out_file_path)
|
||||
.await
|
||||
.wrap_err_with(|| format!("failed to read file {}", out_file_path.display()))?;
|
||||
|
||||
// Add Fatshark's custom magic bytes
|
||||
data[1] = 0x46;
|
||||
data[2] = 0x53;
|
||||
data[3] = 0x82;
|
||||
|
||||
data
|
||||
};
|
||||
|
||||
data.write_u32(bytecode.len() as u32)?;
|
||||
// I believe this is supposed to be a uleb128, but it seems to be always 0x2 in binary.
|
||||
data.write_u64(0x2)?;
|
||||
data.write_all(&bytecode)?;
|
||||
|
||||
let mut file = BundleFile::new(name, BundleFileType::Lua);
|
||||
let mut variant = BundleFileVariant::new();
|
||||
|
||||
variant.set_data(data.into_inner());
|
||||
file.add_variant(variant);
|
||||
|
||||
Ok(file)
|
||||
}
|
||||
|
|
|
@ -6,19 +6,21 @@ use std::str::FromStr;
|
|||
|
||||
use color_eyre::eyre::{self, Context};
|
||||
use color_eyre::Result;
|
||||
use tokio::fs;
|
||||
|
||||
use crate::binary::sync::{ReadExt, WriteExt};
|
||||
use crate::bundle::file::{BundleFileType, UserFile};
|
||||
use crate::murmur::{HashGroup, Murmur64};
|
||||
|
||||
#[tracing::instrument(skip(_ctx))]
|
||||
async fn resolve_wildcard<P>(
|
||||
_ctx: &crate::Context,
|
||||
wildcard: P,
|
||||
#[tracing::instrument]
|
||||
async fn resolve_wildcard<P1, P2>(
|
||||
wildcard: P1,
|
||||
root: P2,
|
||||
t: Option<BundleFileType>,
|
||||
) -> Result<Vec<PathBuf>>
|
||||
where
|
||||
P: AsRef<Path> + std::fmt::Debug,
|
||||
P1: AsRef<Path> + std::fmt::Debug,
|
||||
P2: AsRef<Path> + std::fmt::Debug,
|
||||
{
|
||||
let wildcard = wildcard.as_ref();
|
||||
|
||||
|
@ -33,22 +35,48 @@ where
|
|||
let mut path = wildcard.to_path_buf();
|
||||
|
||||
if let Some(t) = t {
|
||||
path.push(t.ext_name());
|
||||
path.set_extension(t.ext_name());
|
||||
}
|
||||
|
||||
return Ok(vec![path]);
|
||||
}
|
||||
|
||||
// let parent = wildcard.parent().unwrap_or(&ctx.project_dir);
|
||||
let path = root.as_ref().join(wildcard);
|
||||
let parent = path
|
||||
.parent()
|
||||
.ok_or_else(|| eyre::eyre!("could not determine parent for wildcard"))?;
|
||||
|
||||
// let paths = Vec::new();
|
||||
// let dir = fs::read_dir(parent).await?;
|
||||
let mut paths = Vec::new();
|
||||
let mut dir = fs::read_dir(&parent).await?;
|
||||
|
||||
// while let Some(file) = dir.next_entry().await? {
|
||||
// if let Some(ext) = file.file_name()
|
||||
// }
|
||||
while let Some(entry) = dir.next_entry().await? {
|
||||
let file_path = {
|
||||
let path = entry.path();
|
||||
let path = path.strip_prefix(root.as_ref())?;
|
||||
path.to_path_buf()
|
||||
};
|
||||
|
||||
todo!();
|
||||
// Skip file if there is a desired extension `t`, but the file's
|
||||
// extension name doesn't match
|
||||
if t.is_some() {
|
||||
let ext = file_path
|
||||
.extension()
|
||||
.and_then(|ext| ext.to_str())
|
||||
.and_then(|ext| BundleFileType::from_str(ext).ok());
|
||||
|
||||
if ext != t {
|
||||
tracing::debug!(
|
||||
"Skipping wildcard result with invalid extension: {}",
|
||||
file_path.display(),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
paths.push(file_path);
|
||||
}
|
||||
|
||||
Ok(paths)
|
||||
}
|
||||
|
||||
type PackageType = HashMap<BundleFileType, HashSet<PathBuf>>;
|
||||
|
@ -75,39 +103,11 @@ impl DerefMut for Package {
|
|||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument]
|
||||
async fn glob_stream<PB, P>(pattern: PB, root: P) -> Result<(PathBuf, tokio::fs::ReadDir)>
|
||||
where
|
||||
PB: Into<PathBuf> + std::fmt::Debug,
|
||||
P: AsRef<Path> + std::fmt::Debug,
|
||||
{
|
||||
let pattern: PathBuf = pattern.into();
|
||||
if pattern.is_absolute() {
|
||||
eyre::bail!(
|
||||
"Path in package definition must not be absolute. Got '{}'",
|
||||
pattern.display()
|
||||
)
|
||||
}
|
||||
|
||||
let _is_pattern = pattern.ends_with("*");
|
||||
let _dir = pattern.parent().unwrap_or(root.as_ref());
|
||||
todo!();
|
||||
// let stream = fs::read_dir(dir).await?;
|
||||
// Ok((dir.to_path_buf(), stream))
|
||||
}
|
||||
|
||||
impl Package {
|
||||
fn len(&self) -> usize {
|
||||
self.values().fold(0, |total, files| total + files.len())
|
||||
}
|
||||
|
||||
fn add_file<P>(&mut self, t: BundleFileType, path: P)
|
||||
where
|
||||
P: Into<PathBuf>,
|
||||
{
|
||||
self.entry(t).or_default().insert(path.into());
|
||||
}
|
||||
|
||||
#[tracing::instrument("Package::from_sjson", skip(sjson), fields(sjson_len = sjson.as_ref().len()))]
|
||||
pub async fn from_sjson<P, S>(sjson: S, name: String, root: P) -> Result<Self>
|
||||
where
|
||||
|
@ -119,67 +119,38 @@ impl Package {
|
|||
let mut inner: PackageType = Default::default();
|
||||
|
||||
for (ty, patterns) in definition.iter() {
|
||||
if ty == "*" {
|
||||
let ext = if ty == "*" {
|
||||
None
|
||||
} else {
|
||||
let t = BundleFileType::from_str(ty)
|
||||
.wrap_err("invalid file type in package definition")?;
|
||||
Some(t)
|
||||
};
|
||||
|
||||
for pattern in patterns.iter() {
|
||||
let (dir, mut stream) = glob_stream(pattern, root).await?;
|
||||
|
||||
while let Some(entry) = stream.next_entry().await? {
|
||||
let name = PathBuf::from(entry.file_name());
|
||||
let ext = if let Some(ext) = name.extension().and_then(|ext| ext.to_str()) {
|
||||
match BundleFileType::from_str(ext) {
|
||||
Ok(t) => t,
|
||||
Err(_) => {
|
||||
tracing::debug!(
|
||||
"Skipping file with invalid extension: {}",
|
||||
dir.join(name).display()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
let paths = resolve_wildcard(pattern, root, ext).await?;
|
||||
for path in paths {
|
||||
let ext = if let Some(ext) = path.extension().and_then(|ext| ext.to_str()) {
|
||||
ext
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"Skipping file without extension: {}",
|
||||
dir.join(name).display()
|
||||
tracing::warn!("Skipping file without extension: {}", path.display());
|
||||
continue;
|
||||
};
|
||||
|
||||
let t = if let Ok(t) = BundleFileType::from_str(ext) {
|
||||
t
|
||||
} else {
|
||||
tracing::warn!(
|
||||
"Skipping file with unknown extension '{}': {}",
|
||||
ext,
|
||||
path.display()
|
||||
);
|
||||
continue;
|
||||
};
|
||||
|
||||
inner.entry(ext).or_default().insert(dir.join(name));
|
||||
inner.entry(t).or_default().insert(path);
|
||||
}
|
||||
}
|
||||
} else if let Ok(t) = BundleFileType::from_str(ty) {
|
||||
for pattern in patterns.iter() {
|
||||
let (dir, mut stream) = glob_stream(pattern, root).await?;
|
||||
|
||||
while let Some(entry) = stream.next_entry().await? {
|
||||
let name = PathBuf::from(entry.file_name());
|
||||
let ext = if let Some(ext) = name.extension().and_then(|ext| ext.to_str()) {
|
||||
match BundleFileType::from_str(ext) {
|
||||
Ok(t) => t,
|
||||
Err(_) => {
|
||||
tracing::debug!(
|
||||
"Skipping file with invalid extension: {}",
|
||||
dir.join(name).display()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"Skipping file without extension: {}",
|
||||
dir.join(name).display()
|
||||
);
|
||||
continue;
|
||||
};
|
||||
|
||||
if t == ext {
|
||||
inner.entry(ext).or_default().insert(dir.join(name));
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
eyre::bail!("Unknown file type '{}'", ty);
|
||||
};
|
||||
}
|
||||
|
||||
let pkg = Self {
|
||||
|
@ -277,8 +248,11 @@ where
|
|||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::BundleFileType;
|
||||
|
||||
use super::resolve_wildcard;
|
||||
use super::Package;
|
||||
|
||||
#[test]
|
||||
|
@ -334,4 +308,28 @@ mod test {
|
|||
Default::default()
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn absolute_wildcard() {
|
||||
let path = PathBuf::from("/tmp/test");
|
||||
let root = PathBuf::from("/tmp");
|
||||
|
||||
let res = resolve_wildcard(path, &root, None).await;
|
||||
assert!(res.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn wildcard_without_glob() {
|
||||
let mut path = PathBuf::from("test");
|
||||
let root = PathBuf::from("/tmp");
|
||||
|
||||
let paths = resolve_wildcard(&path, &root, None).await.unwrap();
|
||||
assert_eq!(paths, vec![path.clone()]);
|
||||
|
||||
let paths = resolve_wildcard(&path, &root, Some(BundleFileType::Texture))
|
||||
.await
|
||||
.unwrap();
|
||||
path.set_extension("texture");
|
||||
assert_eq!(paths, vec![path]);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -62,7 +62,7 @@ impl Strings {
|
|||
|
||||
map.entry(name)
|
||||
.or_default()
|
||||
.insert(Language::Unnamed(variant.kind()), s);
|
||||
.insert(Language::Unnamed(variant.property()), s);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue