Merge pull request 'Darktide Mod Manager' (#39) from feat/dtmm into master

Reviewed-on: #39
This commit is contained in:
Lucas Schwiderski 2023-03-01 22:27:40 +01:00
commit 3ed47e236b
59 changed files with 7155 additions and 902 deletions

1582
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -10,23 +10,18 @@
:tip-caption: :bulb:
:warning-caption: :warning:
A set of tools to develop mods for the newest generation of the Bitsquid game engine that powers the game _Warhammer 40.000: Darktide_.
A set of tools to use and develop mods for the newest generation of the Bitsquid game engine that powers the game _Warhammer 40.000: Darktide_.
== Quickstart
== Darktide Mod Manager (DTMM)
1. Download the latest https://git.sclu1034.dev/bitsquid_dt/dtmt/releases/[release] for your platform.
2. Place the binary for your system and `dictionary.csv` next to each other.
3. Open a command prompt, navigate to the downloaded binary and run `dtmt.exe help`.
4. Use the `help` command (it works for subcommands, too) and the https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki/CLI-Reference[CLI Reference].
DTMM is a GUI application to install and manage mods for the game.
== Runtime dependencies
image::docs/screenshots/dtmm.png[dtmm main view]
The LuaJit decompiler (short "ljd") is used to decompile Lua files. A version tailored specifically to Bitsquid may be found here: https://github.com/Aussiemon/ljd.
Head to https://git.sclu1034.dev/bitsquid_dt/dtmt/src/branch/master/crates/dtmm[crates/dtmm] for more information or check the https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki[Wiki].
A custom executable location may be passed via the `--ljd` flag during extraction, otherwise decompilation expects `ljd` to be found via the `PATH` environmental variable.
== Darktide Mod Tools (DTMT)
== Building
DTMT is a CLI application providing various commands that aid in developing mods for the game.
1. Install Rust from https://www.rust-lang.org/learn/get-started[rust-lang.org] or via the preferred means for your system.
2. Download or clone this source code. Make sure to include the submodules in `lib/`.
3. Run `cargo build`.
Head to https://git.sclu1034.dev/bitsquid_dt/dtmt/src/branch/master/crates/dtmt[crates/dtmt] for more information or check the https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki[Wiki].

25
crates/dtmm/Cargo.toml Normal file
View file

@ -0,0 +1,25 @@
[package]
name = "dtmm"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
bitflags = "1.3.2"
clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "string", "unicode"] }
color-eyre = "0.6.2"
confy = "0.5.1"
druid = { git = "https://github.com/linebender/druid.git", features = ["im", "serde"] }
dtmt-shared = { path = "../../lib/dtmt-shared", version = "*" }
futures = "0.3.25"
oodle = { path = "../../lib/oodle", version = "*" }
sdk = { path = "../../lib/sdk", version = "0.2.0" }
serde_sjson = { path = "../../lib/serde_sjson", version = "*" }
serde = { version = "1.0.152", features = ["derive", "rc"] }
tokio = { version = "1.23.0", features = ["rt", "fs", "tracing", "sync"] }
tracing = "0.1.37"
tracing-error = "0.2.0"
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
zip = "0.6.4"
tokio-stream = { version = "0.1.12", features = ["fs"] }

16
crates/dtmm/README.adoc Normal file
View file

@ -0,0 +1,16 @@
= Darktide Mod Manager (DTMM)
:idprefix:
:idseparator:
:toc: macro
:toclevels: 1
:!toc-title:
:caution-caption: :fire:
:important-caption: :exclamtion:
:note-caption: :paperclip:
:tip-caption: :bulb:
:warning-caption: :warning:
DTMM is a GUI application to install and manage mods for the game.
![dtmm main view](../../docs/screenshots/dtmm.png)

View file

@ -0,0 +1,192 @@
local _G = _G
local rawget = rawget
local rawset = rawset
local log = function(category, format, ...)
local Log = rawget(_G, "Log")
if Log then
Log.info(category, format, ...)
else
print(string.format("[%s] %s", category or "", string.format(format or "", ...)))
end
end
-- Patch `GameStateMachine.init` to add our own state for loading mods.
-- In the future, Fatshark might provide us with a dedicated way to do this.
local function patch_mod_loading_state()
local StateBootSubStateBase = require("scripts/game_states/boot/state_boot_sub_state_base")
-- A necessary override.
-- The original does not proxy `dt` to `_state_update`, but we need that.
StateBootSubStateBase.update = function (self, dt)
local done, error = self:_state_update(dt)
local params = self._params
if error then
return StateError, { error }
elseif done then
local next_index = params.sub_state_index + 1
params.sub_state_index = next_index
local next_state_data = params.states[next_index]
if next_state_data then
return next_state_data[1], self._params
else
self._parent:sub_states_done()
end
end
end
local StateBootLoadMods = class("StateBootLoadMods", "StateBootSubStateBase")
StateBootLoadMods.on_enter = function (self, parent, params)
log("StateBootLoadMods", "Entered")
StateBootLoadMods.super.on_enter(self, parent, params)
local state_params = self:_state_params()
local package_manager = state_params.package_manager
self._state = "load_package"
self._package_manager = package_manager
self._package_handles = {
["packages/mods"] = package_manager:load("packages/mods", "StateBootLoadMods", nil),
["packages/dml"] = package_manager:load("packages/dml", "StateBootLoadMods", nil),
}
end
StateBootLoadMods._state_update = function (self, dt)
local state = self._state
local package_manager = self._package_manager
if state == "load_package" and package_manager:update() then
log("StateBootLoadMods", "Packages loaded, loading mods")
self._state = "load_mods"
local mod_loader = require("scripts/mods/dml/init")
self._mod_loader = mod_loader
local mod_data = require("scripts/mods/mod_data")
mod_loader:init(mod_data, self._parent:gui())
elseif state == "load_mods" and self._mod_loader:update(dt) then
log("StateBootLoadMods", "Mods loaded, exiting")
return true, false
end
return false, false
end
local GameStateMachine = require("scripts/foundation/utilities/game_state_machine")
local patched = false
local GameStateMachine_init = GameStateMachine.init
GameStateMachine.init = function(self, parent, start_state, params, ...)
if not patched then
log("mod_main", "Injecting mod loading state")
patched = true
-- Hardcoded position after `StateRequireScripts`.
-- We do want to wait until then, so that most of the game's core
-- systems are at least loaded and can be hooked, even if they aren't
-- running, yet.
local pos = 4
table.insert(params.states, pos, {
StateBootLoadMods,
{
package_manager = params.package_manager,
},
})
end
GameStateMachine_init(self, parent, start_state, params, ...)
end
log("mod_main", "Mod patching complete")
end
log("mod_main", "Initializing mods...")
local require_store = {}
Mods = {
-- Keep a backup of certain system libraries before
-- Fatshark's code scrubs them.
-- The loader can then decide to pass them on to mods, or ignore them
lua = setmetatable({}, {
io = io,
debug = debug,
ffi = ffi,
os = os,
load = load,
loadfile = loadfile,
loadstring = loadstring,
}),
require_store = require_store
}
local can_insert = function(filepath, new_result)
local store = require_store[filepath]
if not store or #store then
return true
end
if store[#store] ~= new_result then
return true
end
end
local original_require = require
require = function(filepath, ...)
local result = original_require(filepath, ...)
if result and type(result) == "table" then
if can_insert(filepath, result) then
require_store[filepath] = require_store[filepath] or {}
local store = require_store[filepath]
table.insert(store, result)
if Mods.hook then
Mods.hook.enable_by_file(filepath, #store)
end
end
end
return result
end
require("scripts/boot_init")
require("scripts/foundation/utilities/class")
-- The `__index` metamethod maps a proper identifier `CLASS.MyClassName` to the
-- stringified version of the key: `"MyClassName"`.
-- This allows using LuaCheck for the stringified class names in hook parameters.
_G.CLASS = setmetatable({}, {
__index = function(_, key)
return key
end
})
local original_class = class
class = function(class_name, super_name, ...)
local result = original_class(class_name, super_name, ...)
if not rawget(_G, class_name) then
rawset(_G, class_name, result)
end
if not rawget(_G.CLASS, class_name) then
rawset(_G.CLASS, class_name, result)
end
return result
end
require("scripts/main")
log("mod_main", "'scripts/main' loaded")
-- Override `init` to run our injection
function init()
patch_mod_loading_state()
-- As requested by Fatshark
local StateRequireScripts = require("scripts/game_states/boot/state_require_scripts")
StateRequireScripts._get_is_modded = function() return true end
Main:init()
end

49
crates/dtmm/notes.adoc Normal file
View file

@ -0,0 +1,49 @@
= Notes
== Layout
- top bar:
- left aligned: a tab bar with "mods", "settings", "about"
- right aligned: a button to start the game
- in the future: center aligned a dropdown to select profiles, and button to edit them
- main view:
- left side: list view of mods
- right side: details pane and buttons
- always visible, first mod in list is selected by default
- buttons:
- add mod
- deploy mods
- remove selected mod
- enable/disable (text changes based on state)
== Mod list
- name
- description?
- image?
- click to get details pane?
== Managing mods
- for each mod in the list, have a checkbox
- need a button to remove mods
- need a button to add mods from downloaded files
- search
== Misc
- settings
- open mod storage
== Managing the game
- deploy mods
-
== Preparing the game
- click "Install mods" to prepare the game files with the enabled mods
== Playing the game
- if overlay file systems are used, the game has to be started through the mod manager

View file

@ -0,0 +1,213 @@
use std::collections::HashMap;
use std::io::{Cursor, ErrorKind, Read};
use std::path::Path;
use color_eyre::eyre::{self, Context};
use color_eyre::{Help, Result};
use druid::im::Vector;
use druid::FileInfo;
use dtmt_shared::ModConfig;
use serde::Deserialize;
use tokio::fs::{self, DirEntry};
use tokio::runtime::Runtime;
use tokio_stream::wrappers::ReadDirStream;
use tokio_stream::StreamExt;
use zip::ZipArchive;
use crate::state::{ModInfo, PackageInfo, State};
use crate::util::config::{ConfigSerialize, LoadOrderEntry};
#[tracing::instrument(skip(state))]
pub(crate) async fn import_mod(state: State, info: FileInfo) -> Result<ModInfo> {
let data = fs::read(&info.path)
.await
.wrap_err_with(|| format!("failed to read file {}", info.path.display()))?;
let data = Cursor::new(data);
let mut archive = ZipArchive::new(data).wrap_err("failed to open ZIP archive")?;
if tracing::enabled!(tracing::Level::DEBUG) {
let names = archive.file_names().fold(String::new(), |mut s, name| {
s.push('\n');
s.push_str(name);
s
});
tracing::debug!("Archive contents:{}", names);
}
let dir_name = {
let f = archive.by_index(0).wrap_err("archive is empty")?;
if !f.is_dir() {
let err = eyre::eyre!("archive does not have a top-level directory");
return Err(err).with_suggestion(|| "Use 'dtmt build' to create the mod archive.");
}
let name = f.name();
// The directory name is returned with a trailing slash, which we don't want
name[..(name.len().saturating_sub(1))].to_string()
};
tracing::info!("Importing mod {}", dir_name);
let mod_cfg: ModConfig = {
let mut f = archive
.by_name(&format!("{}/{}", dir_name, "dtmt.cfg"))
.wrap_err("failed to read mod config from archive")?;
let mut buf = Vec::with_capacity(f.size() as usize);
f.read_to_end(&mut buf)
.wrap_err("failed to read mod config from archive")?;
let data = String::from_utf8(buf).wrap_err("mod config is not valid UTF-8")?;
serde_sjson::from_str(&data).wrap_err("failed to deserialize mod config")?
};
tracing::debug!(?mod_cfg);
let files: HashMap<String, Vec<String>> = {
let mut f = archive
.by_name(&format!("{}/{}", dir_name, "files.sjson"))
.wrap_err("failed to read file index from archive")?;
let mut buf = Vec::with_capacity(f.size() as usize);
f.read_to_end(&mut buf)
.wrap_err("failed to read file index from archive")?;
let data = String::from_utf8(buf).wrap_err("file index is not valid UTF-8")?;
serde_sjson::from_str(&data).wrap_err("failed to deserialize file index")?
};
tracing::trace!(?files);
let mod_dir = state.get_mod_dir();
tracing::trace!("Creating mods directory {}", mod_dir.display());
fs::create_dir_all(&mod_dir)
.await
.wrap_err_with(|| format!("failed to create data directory {}", mod_dir.display()))?;
tracing::trace!("Extracting mod archive to {}", mod_dir.display());
archive
.extract(&mod_dir)
.wrap_err_with(|| format!("failed to extract archive to {}", mod_dir.display()))?;
let packages = files
.into_iter()
.map(|(name, files)| PackageInfo::new(name, files.into_iter().collect()))
.collect();
let info = ModInfo::new(mod_cfg, packages);
Ok(info)
}
#[tracing::instrument(skip(state))]
pub(crate) async fn delete_mod(state: State, info: &ModInfo) -> Result<()> {
let mod_dir = state.get_mod_dir().join(&info.id);
fs::remove_dir_all(&mod_dir)
.await
.wrap_err_with(|| format!("failed to remove directory {}", mod_dir.display()))?;
Ok(())
}
#[tracing::instrument(skip(state))]
pub(crate) async fn save_settings(state: State) -> Result<()> {
let cfg = ConfigSerialize::from(&state);
tracing::info!("Saving settings to '{}'", state.config_path.display());
tracing::debug!(?cfg);
let data = serde_sjson::to_string(&cfg).wrap_err("failed to serialize config")?;
fs::write(state.config_path.as_ref(), &data)
.await
.wrap_err_with(|| {
format!(
"failed to write config to '{}'",
state.config_path.display()
)
})
}
async fn read_sjson_file<P, T>(path: P) -> Result<T>
where
T: for<'a> Deserialize<'a>,
P: AsRef<Path> + std::fmt::Debug,
{
let buf = fs::read(path).await.wrap_err("failed to read file")?;
let data = String::from_utf8(buf).wrap_err("invalid UTF8")?;
serde_sjson::from_str(&data).wrap_err("failed to deserialize")
}
#[tracing::instrument(skip_all,fields(
name = ?res.as_ref().map(|entry| entry.file_name())
))]
async fn read_mod_dir_entry(res: Result<DirEntry>) -> Result<ModInfo> {
let entry = res?;
let config_path = entry.path().join("dtmt.cfg");
let index_path = entry.path().join("files.sjson");
let cfg: ModConfig = read_sjson_file(&config_path)
.await
.wrap_err_with(|| format!("failed to read mod config '{}'", config_path.display()))?;
let files: HashMap<String, Vec<String>> = read_sjson_file(&index_path)
.await
.wrap_err_with(|| format!("failed to read file index '{}'", index_path.display()))?;
let packages = files
.into_iter()
.map(|(name, files)| PackageInfo::new(name, files.into_iter().collect()))
.collect();
let info = ModInfo::new(cfg, packages);
Ok(info)
}
#[tracing::instrument(skip(mod_order))]
pub(crate) fn load_mods<'a, P, S>(mod_dir: P, mod_order: S) -> Result<Vector<ModInfo>>
where
S: Iterator<Item = &'a LoadOrderEntry>,
P: AsRef<Path> + std::fmt::Debug,
{
let rt = Runtime::new()?;
rt.block_on(async move {
let mod_dir = mod_dir.as_ref();
let read_dir = match fs::read_dir(mod_dir).await {
Ok(read_dir) => read_dir,
Err(err) if err.kind() == ErrorKind::NotFound => {
return Ok(Vector::new());
}
Err(err) => {
return Err(err)
.wrap_err_with(|| format!("failed to open directory '{}'", mod_dir.display()));
}
};
let stream = ReadDirStream::new(read_dir)
.map(|res| res.wrap_err("failed to read dir entry"))
.then(read_mod_dir_entry);
tokio::pin!(stream);
let mut mods: HashMap<String, ModInfo> = HashMap::new();
while let Some(res) = stream.next().await {
let info = res?;
mods.insert(info.id.clone(), info);
}
let mods = mod_order
.filter_map(|entry| {
if let Some(mut info) = mods.remove(&entry.id) {
info.enabled = entry.enabled;
Some(info)
} else {
None
}
})
.collect();
Ok::<_, color_eyre::Report>(mods)
})
}

View file

@ -0,0 +1,575 @@
use std::ffi::CString;
use std::io::{Cursor, ErrorKind};
use std::path::{Path, PathBuf};
use std::str::FromStr;
use std::sync::Arc;
use color_eyre::eyre::Context;
use color_eyre::{eyre, Help, Result};
use futures::stream;
use futures::StreamExt;
use sdk::filetype::lua;
use sdk::filetype::package::Package;
use sdk::murmur::Murmur64;
use sdk::{
Bundle, BundleDatabase, BundleFile, BundleFileType, BundleFileVariant, FromBinary, ToBinary,
};
use tokio::fs;
use tokio::io::AsyncWriteExt;
use tracing::Instrument;
use crate::state::{PackageInfo, State};
const MOD_BUNDLE_NAME: &str = "packages/mods";
const BOOT_BUNDLE_NAME: &str = "packages/boot";
const DML_BUNDLE_NAME: &str = "packages/dml";
const BUNDLE_DATABASE_NAME: &str = "bundle_database.data";
const MOD_BOOT_SCRIPT: &str = "scripts/mod_main";
const MOD_DATA_SCRIPT: &str = "scripts/mods/mod_data";
const SETTINGS_FILE_PATH: &str = "application_settings/settings_common.ini";
#[tracing::instrument]
async fn read_file_with_backup<P>(path: P) -> Result<Vec<u8>>
where
P: AsRef<Path> + std::fmt::Debug,
{
let path = path.as_ref();
let backup_path = {
let mut p = PathBuf::from(path);
let ext = if let Some(ext) = p.extension() {
ext.to_string_lossy().to_string() + ".bak"
} else {
String::from("bak")
};
p.set_extension(ext);
p
};
let file_name = path
.file_name()
.map(|s| s.to_string_lossy().to_string())
.unwrap_or_else(|| String::from("file"));
let bin = match fs::read(&backup_path).await {
Ok(bin) => bin,
Err(err) if err.kind() == ErrorKind::NotFound => {
// TODO: This doesn't need to be awaited here, yet.
// I only need to make sure it has finished before writing the changed bundle.
tracing::debug!(
"Backup does not exist. Backing up original {} to '{}'",
file_name,
backup_path.display()
);
fs::copy(path, &backup_path).await.wrap_err_with(|| {
format!(
"failed to back up {} '{}' to '{}'",
file_name,
path.display(),
backup_path.display()
)
})?;
tracing::debug!("Reading {} from original '{}'", file_name, path.display());
fs::read(path).await.wrap_err_with(|| {
format!("failed to read {} file: {}", file_name, path.display())
})?
}
Err(err) => {
return Err(err).wrap_err_with(|| {
format!(
"failed to read {} from backup '{}'",
file_name,
backup_path.display()
)
});
}
};
Ok(bin)
}
#[tracing::instrument(skip_all)]
async fn patch_game_settings(state: Arc<State>) -> Result<()> {
let settings_path = state.game_dir.join("bundle").join(SETTINGS_FILE_PATH);
let settings = read_file_with_backup(&settings_path)
.await
.wrap_err("failed to read settings.ini")?;
let settings = String::from_utf8(settings).wrap_err("settings.ini is not valid UTF-8")?;
let mut f = fs::File::create(&settings_path)
.await
.wrap_err_with(|| format!("failed to open {}", settings_path.display()))?;
let Some(i) = settings.find("boot_script =") else {
eyre::bail!("couldn't find 'boot_script' field");
};
f.write_all(settings[0..i].as_bytes()).await?;
f.write_all(b"boot_script = \"scripts/mod_main\"").await?;
let Some(j) = settings[i..].find('\n') else {
eyre::bail!("couldn't find end of 'boot_script' field");
};
f.write_all(settings[(i + j)..].as_bytes()).await?;
Ok(())
}
#[tracing::instrument(skip_all, fields(package = info.name))]
fn make_package(info: &PackageInfo) -> Result<Package> {
let mut pkg = Package::new(info.name.clone(), PathBuf::new());
for f in &info.files {
let mut it = f.rsplit('.');
let file_type = it
.next()
.ok_or_else(|| eyre::eyre!("missing file extension"))
.and_then(BundleFileType::from_str)
.wrap_err("invalid file name in package info")?;
let name: String = it.collect();
pkg.add_file(file_type, name);
}
Ok(pkg)
}
fn build_mod_data_lua(state: Arc<State>) -> String {
let mut lua = String::from("return {\n");
// DMF is handled explicitely by the loading procedures, as it actually drives most of that
// and should therefore not show up in the load order.
for mod_info in state.mods.iter().filter(|m| m.id != "dml" && m.enabled) {
lua.push_str(" {\n name = \"");
lua.push_str(&mod_info.name);
lua.push_str("\",\n id = \"");
lua.push_str(&mod_info.id);
lua.push_str("\",\n run = function()\n");
let resources = &mod_info.resources;
if resources.data.is_some() || resources.localization.is_some() {
lua.push_str(" new_mod(\"");
lua.push_str(&mod_info.id);
lua.push_str("\", {\n mod_script = \"");
lua.push_str(&resources.init.to_string_lossy());
if let Some(data) = resources.data.as_ref() {
lua.push_str("\",\n mod_data = \"");
lua.push_str(&data.to_string_lossy());
}
if let Some(localization) = &resources.localization {
lua.push_str("\",\n mod_localization = \"");
lua.push_str(&localization.to_string_lossy());
}
lua.push_str("\",\n })\n");
} else {
lua.push_str(" return dofile(\"");
lua.push_str(&resources.init.to_string_lossy());
lua.push_str("\")\n");
}
lua.push_str(" end,\n packages = {\n");
for pkg_info in &mod_info.packages {
lua.push_str(" \"");
lua.push_str(&pkg_info.name);
lua.push_str("\",\n");
}
lua.push_str(" },\n },\n");
}
lua.push('}');
tracing::debug!("mod_data_lua:\n{}", lua);
lua
}
#[tracing::instrument(skip_all)]
async fn build_bundles(state: Arc<State>) -> Result<Vec<Bundle>> {
let mut mod_bundle = Bundle::new(MOD_BUNDLE_NAME.to_string());
let mut tasks = Vec::new();
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
let mut bundles = Vec::new();
{
let span = tracing::debug_span!("Building mod data script");
let _enter = span.enter();
let lua = build_mod_data_lua(state.clone());
let lua = CString::new(lua).wrap_err("failed to build CString from mod data Lua string")?;
let file =
lua::compile(MOD_DATA_SCRIPT, &lua).wrap_err("failed to compile mod data Lua file")?;
mod_bundle.add_file(file);
}
for mod_info in state.mods.iter().filter(|m| m.id != "dml" && m.enabled) {
let span = tracing::trace_span!("building mod packages", name = mod_info.name);
let _enter = span.enter();
let mod_dir = state.get_mod_dir().join(&mod_info.id);
for pkg_info in &mod_info.packages {
let span = tracing::trace_span!("building package", name = pkg_info.name);
let _enter = span.enter();
let pkg = make_package(pkg_info).wrap_err("failed to make package")?;
let mut variant = BundleFileVariant::new();
let bin = pkg
.to_binary()
.wrap_err("failed to serialize package to binary")?;
variant.set_data(bin);
let mut file = BundleFile::new(pkg_info.name.clone(), BundleFileType::Package);
file.add_variant(variant);
mod_bundle.add_file(file);
let bundle_name = Murmur64::hash(&pkg_info.name)
.to_string()
.to_ascii_lowercase();
let src = mod_dir.join(&bundle_name);
let dest = bundle_dir.join(&bundle_name);
let pkg_name = pkg_info.name.clone();
let mod_name = mod_info.name.clone();
// Explicitely drop the guard, so that we can move the span
// into the async operation
drop(_enter);
let ctx = state.ctx.clone();
let task = async move {
let bundle = {
let bin = fs::read(&src).await.wrap_err_with(|| {
format!("failed to read bundle file '{}'", src.display())
})?;
let name = Bundle::get_name_from_path(&ctx, &src);
Bundle::from_binary(&ctx, name, bin)
.wrap_err_with(|| format!("failed to parse bundle '{}'", src.display()))?
};
tracing::debug!(
src = %src.display(),
dest = %dest.display(),
"Copying bundle '{}' for mod '{}'",
pkg_name,
mod_name,
);
// We attempt to remove any previous file, so that the hard link can be created.
// We can reasonably ignore errors here, as a 'NotFound' is actually fine, the copy
// may be possible despite an error here, or the error will be reported by it anyways.
// TODO: There is a chance that we delete an actual game bundle, but with 64bit
// hashes, it's low enough for now, and the setup required to detect
// "game bundle vs mod bundle" is non-trivial.
let _ = fs::remove_file(&dest).await;
fs::copy(&src, &dest).await.wrap_err_with(|| {
format!(
"failed to copy bundle {pkg_name} for mod {mod_name}. src: {}, dest: {}",
src.display(),
dest.display()
)
})?;
Ok::<Bundle, color_eyre::Report>(bundle)
}
.instrument(span);
tasks.push(task);
}
}
tracing::debug!("Copying {} mod bundles", tasks.len());
let mut tasks = stream::iter(tasks).buffer_unordered(10);
while let Some(res) = tasks.next().await {
let bundle = res?;
bundles.push(bundle);
}
{
let path = bundle_dir.join(format!("{:x}", mod_bundle.name().to_murmur64()));
tracing::trace!("Writing mod bundle to '{}'", path.display());
fs::write(&path, mod_bundle.to_binary()?)
.await
.wrap_err_with(|| format!("failed to write bundle to '{}'", path.display()))?;
}
bundles.push(mod_bundle);
Ok(bundles)
}
#[tracing::instrument(skip_all)]
async fn patch_boot_bundle(state: Arc<State>) -> Result<Vec<Bundle>> {
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
let bundle_path = bundle_dir.join(format!("{:x}", Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes())));
let mut bundles = Vec::with_capacity(2);
let mut boot_bundle = async {
let bin = read_file_with_backup(&bundle_path)
.await
.wrap_err("failed to read boot bundle")?;
Bundle::from_binary(&state.ctx, BOOT_BUNDLE_NAME.to_string(), bin)
.wrap_err("failed to parse boot bundle")
}
.instrument(tracing::trace_span!("read boot bundle"))
.await
.wrap_err_with(|| format!("failed to read bundle '{}'", BOOT_BUNDLE_NAME))?;
{
tracing::trace!("Adding mod package file to boot bundle");
let span = tracing::trace_span!("create mod package file");
let _enter = span.enter();
let mut pkg = Package::new(MOD_BUNDLE_NAME.to_string(), PathBuf::new());
for mod_info in &state.mods {
for pkg_info in &mod_info.packages {
pkg.add_file(BundleFileType::Package, &pkg_info.name);
}
}
pkg.add_file(BundleFileType::Lua, MOD_DATA_SCRIPT);
let mut variant = BundleFileVariant::new();
variant.set_data(pkg.to_binary()?);
let mut f = BundleFile::new(MOD_BUNDLE_NAME.to_string(), BundleFileType::Package);
f.add_variant(variant);
boot_bundle.add_file(f);
}
{
tracing::trace!("Handling DML packages and bundle");
let span = tracing::trace_span!("handle DML");
let _enter = span.enter();
let mut variant = BundleFileVariant::new();
let mod_info = state
.mods
.iter()
.find(|m| m.id == "dml")
.ok_or_else(|| eyre::eyre!("DML not found in mod list"))?;
let pkg_info = mod_info
.packages
.get(0)
.ok_or_else(|| eyre::eyre!("invalid mod package for DML"))
.with_suggestion(|| "Re-download and import the newest version.".to_string())?;
let bundle_name = Murmur64::hash(&pkg_info.name)
.to_string()
.to_ascii_lowercase();
let src = state.get_mod_dir().join(&mod_info.id).join(&bundle_name);
{
let bin = fs::read(&src)
.await
.wrap_err_with(|| format!("failed to read bundle file '{}'", src.display()))?;
let name = Bundle::get_name_from_path(&state.ctx, &src);
let dml_bundle = Bundle::from_binary(&state.ctx, name, bin)
.wrap_err_with(|| format!("failed to parse bundle '{}'", src.display()))?;
bundles.push(dml_bundle);
};
{
let dest = bundle_dir.join(&bundle_name);
let pkg_name = pkg_info.name.clone();
let mod_name = mod_info.name.clone();
tracing::debug!(
"Copying bundle {} for mod {}: {} -> {}",
pkg_name,
mod_name,
src.display(),
dest.display()
);
// We attempt to remove any previous file, so that the hard link can be created.
// We can reasonably ignore errors here, as a 'NotFound' is actually fine, the copy
// may be possible despite an error here, or the error will be reported by it anyways.
// TODO: There is a chance that we delete an actual game bundle, but with 64bit
// hashes, it's low enough for now, and the setup required to detect
// "game bundle vs mod bundle" is non-trivial.
let _ = fs::remove_file(&dest).await;
fs::copy(&src, &dest).await.wrap_err_with(|| {
format!(
"failed to copy bundle {pkg_name} for mod {mod_name}. src: {}, dest: {}",
src.display(),
dest.display()
)
})?;
}
let pkg = make_package(pkg_info).wrap_err("failed to create package file for dml")?;
variant.set_data(pkg.to_binary()?);
let mut f = BundleFile::new(DML_BUNDLE_NAME.to_string(), BundleFileType::Package);
f.add_variant(variant);
boot_bundle.add_file(f);
}
{
let span = tracing::debug_span!("Importing mod main script");
let _enter = span.enter();
let lua = include_str!("../../assets/mod_main.lua");
let lua = CString::new(lua).wrap_err("failed to build CString from mod main Lua string")?;
let file =
lua::compile(MOD_BOOT_SCRIPT, &lua).wrap_err("failed to compile mod main Lua file")?;
boot_bundle.add_file(file);
}
async {
let bin = boot_bundle
.to_binary()
.wrap_err("failed to serialize boot bundle")?;
fs::write(&bundle_path, bin)
.await
.wrap_err_with(|| format!("failed to write main bundle: {}", bundle_path.display()))
}
.instrument(tracing::trace_span!("write boot bundle"))
.await?;
bundles.push(boot_bundle);
Ok(bundles)
}
#[tracing::instrument(skip_all, fields(bundles = bundles.len()))]
async fn patch_bundle_database(state: Arc<State>, bundles: Vec<Bundle>) -> Result<()> {
let bundle_dir = Arc::new(state.game_dir.join("bundle"));
let database_path = bundle_dir.join(BUNDLE_DATABASE_NAME);
let mut db = {
let bin = read_file_with_backup(&database_path)
.await
.wrap_err("failed to read bundle database")?;
let mut r = Cursor::new(bin);
let db = BundleDatabase::from_binary(&mut r).wrap_err("failed to parse bundle database")?;
tracing::trace!("Finished parsing bundle database");
db
};
for bundle in bundles {
tracing::trace!("Adding '{}' to bundle database", bundle.name().display());
db.add_bundle(&bundle);
}
{
let bin = db
.to_binary()
.wrap_err("failed to serialize bundle database")?;
fs::write(&database_path, bin).await.wrap_err_with(|| {
format!(
"failed to write bundle database to '{}'",
database_path.display()
)
})?;
}
Ok(())
}
#[tracing::instrument(skip_all, fields(
game_dir = %state.game_dir.display(),
mods = state.mods.len()
))]
pub(crate) async fn deploy_mods(state: State) -> Result<()> {
let state = Arc::new(state);
{
let first = state.mods.get(0);
if first.is_none() || !(first.unwrap().id == "dml" && first.unwrap().enabled) {
// TODO: Add a suggestion where to get it, once that's published
eyre::bail!("'Darktide Mod Loader' needs to be installed, enabled and at the top of the load order");
}
}
tracing::info!(
"Deploying {} mods to {}",
state.mods.len(),
state.game_dir.join("bundle").display()
);
tracing::info!("Build mod bundles");
let mut bundles = build_bundles(state.clone())
.await
.wrap_err("failed to build mod bundles")?;
tracing::info!("Patch boot bundle");
let mut more_bundles = patch_boot_bundle(state.clone())
.await
.wrap_err("failed to patch boot bundle")?;
bundles.append(&mut more_bundles);
tracing::info!("Patch game settings");
patch_game_settings(state.clone())
.await
.wrap_err("failed to patch game settings")?;
tracing::info!("Patching bundle database");
patch_bundle_database(state.clone(), bundles)
.await
.wrap_err("failed to patch bundle database")?;
tracing::info!("Finished deploying mods");
Ok(())
}
#[tracing::instrument(skip(state))]
pub(crate) async fn reset_mod_deployment(state: State) -> Result<()> {
let boot_bundle_path = format!("{:016x}", Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes()));
let paths = [BUNDLE_DATABASE_NAME, &boot_bundle_path, SETTINGS_FILE_PATH];
let bundle_dir = state.game_dir.join("bundle");
tracing::info!("Resetting mod deployment in {}", bundle_dir.display());
for p in paths {
let path = bundle_dir.join(p);
let backup = bundle_dir.join(&format!("{}.bak", p));
let res = async {
tracing::debug!(
"Copying from backup: {} -> {}",
backup.display(),
path.display()
);
fs::copy(&backup, &path)
.await
.wrap_err_with(|| format!("failed to copy from '{}'", backup.display()))?;
tracing::debug!("Deleting backup: {}", backup.display());
fs::remove_file(&backup)
.await
.wrap_err_with(|| format!("failed to remove '{}'", backup.display()))
}
.await;
if let Err(err) = res {
tracing::error!(
"Failed to restore '{}' from backup. You may need to verify game files. Error: {:?}",
&p,
err
);
}
}
tracing::info!("Reset finished");
Ok(())
}

View file

@ -0,0 +1,131 @@
use std::sync::Arc;
use color_eyre::Result;
use druid::{ExtEventSink, SingleUse, Target};
use tokio::runtime::Runtime;
use tokio::sync::mpsc::UnboundedReceiver;
use tokio::sync::RwLock;
use crate::controller::app::*;
use crate::controller::game::*;
use crate::state::AsyncAction;
use crate::state::ACTION_FINISH_SAVE_SETTINGS;
use crate::state::{
ACTION_FINISH_ADD_MOD, ACTION_FINISH_DELETE_SELECTED_MOD, ACTION_FINISH_DEPLOY,
ACTION_FINISH_RESET_DEPLOYMENT, ACTION_LOG,
};
async fn handle_action(
event_sink: Arc<RwLock<ExtEventSink>>,
action_queue: Arc<RwLock<UnboundedReceiver<AsyncAction>>>,
) {
while let Some(action) = action_queue.write().await.recv().await {
let event_sink = event_sink.clone();
match action {
AsyncAction::DeployMods(state) => tokio::spawn(async move {
if let Err(err) = deploy_mods(state).await {
tracing::error!("Failed to deploy mods: {:?}", err);
}
event_sink
.write()
.await
.submit_command(ACTION_FINISH_DEPLOY, (), Target::Auto)
.expect("failed to send command");
}),
AsyncAction::AddMod((state, info)) => tokio::spawn(async move {
match import_mod(state, info).await {
Ok(mod_info) => {
event_sink
.write()
.await
.submit_command(
ACTION_FINISH_ADD_MOD,
SingleUse::new(mod_info),
Target::Auto,
)
.expect("failed to send command");
}
Err(err) => {
tracing::error!("Failed to import mod: {:?}", err);
}
}
}),
AsyncAction::DeleteMod((state, info)) => tokio::spawn(async move {
if let Err(err) = delete_mod(state, &info).await {
tracing::error!(
"Failed to delete mod files. \
You might want to clean up the data directory manually. \
Reason: {:?}",
err
);
}
event_sink
.write()
.await
.submit_command(
ACTION_FINISH_DELETE_SELECTED_MOD,
SingleUse::new(info),
Target::Auto,
)
.expect("failed to send command");
}),
AsyncAction::ResetDeployment(state) => tokio::spawn(async move {
if let Err(err) = reset_mod_deployment(state).await {
tracing::error!("Failed to reset mod deployment: {:?}", err);
}
event_sink
.write()
.await
.submit_command(ACTION_FINISH_RESET_DEPLOYMENT, (), Target::Auto)
.expect("failed to send command");
}),
AsyncAction::SaveSettings(state) => tokio::spawn(async move {
if let Err(err) = save_settings(state).await {
tracing::error!("Failed to save settings: {:?}", err);
}
event_sink
.write()
.await
.submit_command(ACTION_FINISH_SAVE_SETTINGS, (), Target::Auto)
.expect("failed to send command");
}),
};
}
}
async fn handle_log(
event_sink: Arc<RwLock<ExtEventSink>>,
log_queue: Arc<RwLock<UnboundedReceiver<String>>>,
) {
while let Some(line) = log_queue.write().await.recv().await {
let event_sink = event_sink.clone();
event_sink
.write()
.await
.submit_command(ACTION_LOG, SingleUse::new(line), Target::Auto)
.expect("failed to send command");
}
}
pub(crate) fn work_thread(
event_sink: Arc<RwLock<ExtEventSink>>,
action_queue: Arc<RwLock<UnboundedReceiver<AsyncAction>>>,
log_queue: Arc<RwLock<UnboundedReceiver<String>>>,
) -> Result<()> {
let rt = Runtime::new()?;
rt.block_on(async {
loop {
tokio::select! {
_ = handle_action(event_sink.clone(), action_queue.clone()) => {},
_ = handle_log(event_sink.clone(), log_queue.clone()) => {},
}
}
});
Ok(())
}

91
crates/dtmm/src/main.rs Normal file
View file

@ -0,0 +1,91 @@
#![recursion_limit = "256"]
#![feature(let_chains)]
use std::path::PathBuf;
use std::sync::Arc;
use clap::command;
use clap::value_parser;
use clap::Arg;
use color_eyre::eyre::Context;
use color_eyre::{Report, Result};
use druid::AppLauncher;
use tokio::sync::RwLock;
use crate::controller::app::load_mods;
use crate::controller::worker::work_thread;
use crate::state::{Delegate, State};
mod controller {
pub mod app;
pub mod game;
pub mod worker;
}
mod state;
mod util {
pub mod config;
pub mod log;
}
mod ui;
#[tracing::instrument]
fn main() -> Result<()> {
color_eyre::install()?;
let default_config_path = util::config::get_default_config_path();
tracing::trace!(default_config_path = %default_config_path.display());
let matches = command!()
.arg(Arg::new("oodle").long("oodle").help(
"The oodle library to load. This may either be:\n\
- A library name that will be searched for in the system's default paths.\n\
- A file path relative to the current working directory.\n\
- An absolute file path.",
))
.arg(
Arg::new("config")
.long("config")
.short('c')
.help("Path to the config file")
.value_parser(value_parser!(PathBuf))
.default_value(default_config_path.to_string_lossy().to_string()),
)
.get_matches();
let (log_tx, log_rx) = tokio::sync::mpsc::unbounded_channel();
util::log::create_tracing_subscriber(log_tx);
let config = util::config::read_config(&default_config_path, &matches)
.wrap_err("failed to read config file")?;
let initial_state = {
let mut state = State::new(
config.path,
config.game_dir.unwrap_or_default(),
config.data_dir.unwrap_or_default(),
);
state.mods = load_mods(state.get_mod_dir(), config.mod_order.iter())
.wrap_err("failed to load mods")?;
state
};
let (action_tx, action_rx) = tokio::sync::mpsc::unbounded_channel();
let delegate = Delegate::new(action_tx);
let launcher = AppLauncher::with_window(ui::window::main::new()).delegate(delegate);
let event_sink = launcher.get_external_handle();
std::thread::spawn(move || {
let event_sink = Arc::new(RwLock::new(event_sink));
let action_rx = Arc::new(RwLock::new(action_rx));
let log_rx = Arc::new(RwLock::new(log_rx));
loop {
if let Err(err) = work_thread(event_sink.clone(), action_rx.clone(), log_rx.clone()) {
tracing::error!("Work thread failed, restarting: {:?}", err);
}
}
});
launcher.launch(initial_state).map_err(Report::new)
}

View file

@ -0,0 +1,152 @@
use std::{path::PathBuf, sync::Arc};
use druid::{im::Vector, Data, Lens};
use dtmt_shared::ModConfig;
use super::SelectedModLens;
#[derive(Copy, Clone, Data, Debug, PartialEq)]
pub(crate) enum View {
Mods,
Settings,
}
impl Default for View {
fn default() -> Self {
Self::Mods
}
}
#[derive(Clone, Data, Debug)]
pub struct PackageInfo {
pub name: String,
pub files: Vector<String>,
}
impl PackageInfo {
pub fn new(name: String, files: Vector<String>) -> Self {
Self { name, files }
}
}
#[derive(Clone, Debug)]
pub(crate) struct ModResourceInfo {
pub init: PathBuf,
pub data: Option<PathBuf>,
pub localization: Option<PathBuf>,
}
#[derive(Clone, Data, Debug, Lens)]
pub(crate) struct ModInfo {
pub id: String,
pub name: String,
pub description: Arc<String>,
pub enabled: bool,
#[lens(ignore)]
#[data(ignore)]
pub packages: Vector<PackageInfo>,
#[lens(ignore)]
#[data(ignore)]
pub resources: ModResourceInfo,
}
impl ModInfo {
pub fn new(cfg: ModConfig, packages: Vector<PackageInfo>) -> Self {
Self {
id: cfg.id,
name: cfg.name,
description: Arc::new(cfg.description),
enabled: false,
packages,
resources: ModResourceInfo {
init: cfg.resources.init,
data: cfg.resources.data,
localization: cfg.resources.localization,
},
}
}
}
impl PartialEq for ModInfo {
fn eq(&self, other: &Self) -> bool {
self.name.eq(&other.name)
}
}
#[derive(Clone, Data, Lens)]
pub(crate) struct State {
pub current_view: View,
pub mods: Vector<ModInfo>,
pub selected_mod_index: Option<usize>,
pub is_deployment_in_progress: bool,
pub is_reset_in_progress: bool,
pub is_save_in_progress: bool,
pub is_next_save_pending: bool,
pub game_dir: Arc<PathBuf>,
pub data_dir: Arc<PathBuf>,
pub log: Arc<String>,
#[lens(ignore)]
#[data(ignore)]
pub config_path: Arc<PathBuf>,
#[lens(ignore)]
#[data(ignore)]
pub ctx: Arc<sdk::Context>,
}
impl State {
#[allow(non_upper_case_globals)]
pub const selected_mod: SelectedModLens = SelectedModLens;
pub fn new(config_path: PathBuf, game_dir: PathBuf, data_dir: PathBuf) -> Self {
let ctx = sdk::Context::new();
Self {
ctx: Arc::new(ctx),
current_view: View::default(),
mods: Vector::new(),
selected_mod_index: None,
is_deployment_in_progress: false,
is_reset_in_progress: false,
is_save_in_progress: false,
is_next_save_pending: false,
config_path: Arc::new(config_path),
game_dir: Arc::new(game_dir),
data_dir: Arc::new(data_dir),
log: Arc::new(String::new()),
}
}
pub fn select_mod(&mut self, index: usize) {
self.selected_mod_index = Some(index);
}
pub fn add_mod(&mut self, info: ModInfo) {
if let Some(pos) = self.mods.index_of(&info) {
self.mods.set(pos, info);
self.selected_mod_index = Some(pos);
} else {
self.mods.push_back(info);
self.selected_mod_index = Some(self.mods.len() - 1);
}
}
pub fn can_move_mod_down(&self) -> bool {
self.selected_mod_index
.map(|i| i < (self.mods.len().saturating_sub(1)))
.unwrap_or(false)
}
pub fn can_move_mod_up(&self) -> bool {
self.selected_mod_index.map(|i| i > 0).unwrap_or(false)
}
pub(crate) fn get_mod_dir(&self) -> PathBuf {
self.data_dir.join("mods")
}
pub(crate) fn add_log_line(&mut self, line: String) {
let log = Arc::make_mut(&mut self.log);
log.push_str(&line);
}
}

View file

@ -0,0 +1,237 @@
use druid::{
AppDelegate, Command, DelegateCtx, Env, FileInfo, Handled, Selector, SingleUse, Target,
};
use tokio::sync::mpsc::UnboundedSender;
use super::{ModInfo, State};
pub(crate) const ACTION_SELECT_MOD: Selector<usize> = Selector::new("dtmm.action.select-mod");
pub(crate) const ACTION_SELECTED_MOD_UP: Selector = Selector::new("dtmm.action.selected-mod-up");
pub(crate) const ACTION_SELECTED_MOD_DOWN: Selector =
Selector::new("dtmm.action.selected-mod-down");
pub(crate) const ACTION_START_DELETE_SELECTED_MOD: Selector<SingleUse<ModInfo>> =
Selector::new("dtmm.action.srart-delete-selected-mod");
pub(crate) const ACTION_FINISH_DELETE_SELECTED_MOD: Selector<SingleUse<ModInfo>> =
Selector::new("dtmm.action.finish-delete-selected-mod");
pub(crate) const ACTION_START_DEPLOY: Selector = Selector::new("dtmm.action.start-deploy");
pub(crate) const ACTION_FINISH_DEPLOY: Selector = Selector::new("dtmm.action.finish-deploy");
pub(crate) const ACTION_START_RESET_DEPLOYMENT: Selector =
Selector::new("dtmm.action.start-reset-deployment");
pub(crate) const ACTION_FINISH_RESET_DEPLOYMENT: Selector =
Selector::new("dtmm.action.finish-reset-deployment");
pub(crate) const ACTION_ADD_MOD: Selector<FileInfo> = Selector::new("dtmm.action.add-mod");
pub(crate) const ACTION_FINISH_ADD_MOD: Selector<SingleUse<ModInfo>> =
Selector::new("dtmm.action.finish-add-mod");
pub(crate) const ACTION_LOG: Selector<SingleUse<String>> = Selector::new("dtmm.action.log");
pub(crate) const ACTION_START_SAVE_SETTINGS: Selector =
Selector::new("dtmm.action.start-save-settings");
pub(crate) const ACTION_FINISH_SAVE_SETTINGS: Selector =
Selector::new("dtmm.action.finish-save-settings");
pub(crate) enum AsyncAction {
DeployMods(State),
ResetDeployment(State),
AddMod((State, FileInfo)),
DeleteMod((State, ModInfo)),
SaveSettings(State),
}
pub(crate) struct Delegate {
sender: UnboundedSender<AsyncAction>,
}
impl Delegate {
pub fn new(sender: UnboundedSender<AsyncAction>) -> Self {
Self { sender }
}
}
impl AppDelegate<State> for Delegate {
#[tracing::instrument(name = "Delegate", skip_all)]
fn command(
&mut self,
ctx: &mut DelegateCtx,
_target: Target,
cmd: &Command,
state: &mut State,
_env: &Env,
) -> Handled {
if cfg!(debug_assertions) && !cmd.is(ACTION_LOG) {
tracing::trace!(?cmd);
}
match cmd {
cmd if cmd.is(ACTION_START_DEPLOY) => {
if self
.sender
.send(AsyncAction::DeployMods(state.clone()))
.is_ok()
{
state.is_deployment_in_progress = true;
} else {
tracing::error!("Failed to queue action to deploy mods");
}
Handled::Yes
}
cmd if cmd.is(ACTION_FINISH_DEPLOY) => {
state.is_deployment_in_progress = false;
Handled::Yes
}
cmd if cmd.is(ACTION_START_RESET_DEPLOYMENT) => {
if self
.sender
.send(AsyncAction::ResetDeployment(state.clone()))
.is_ok()
{
state.is_reset_in_progress = true;
} else {
tracing::error!("Failed to queue action to reset mod deployment");
}
Handled::Yes
}
cmd if cmd.is(ACTION_FINISH_RESET_DEPLOYMENT) => {
state.is_reset_in_progress = false;
Handled::Yes
}
cmd if cmd.is(ACTION_SELECT_MOD) => {
let index = cmd
.get(ACTION_SELECT_MOD)
.expect("command type matched but didn't contain the expected value");
state.select_mod(*index);
// ctx.submit_command(ACTION_START_SAVE_SETTINGS);
Handled::Yes
}
cmd if cmd.is(ACTION_SELECTED_MOD_UP) => {
let Some(i) = state.selected_mod_index else {
return Handled::No;
};
let len = state.mods.len();
if len == 0 || i == 0 {
return Handled::No;
}
state.mods.swap(i, i - 1);
state.selected_mod_index = Some(i - 1);
// ctx.submit_command(ACTION_START_SAVE_SETTINGS);
Handled::Yes
}
cmd if cmd.is(ACTION_SELECTED_MOD_DOWN) => {
let Some(i) = state.selected_mod_index else {
return Handled::No;
};
let len = state.mods.len();
if len == 0 || i == usize::MAX || i >= len - 1 {
return Handled::No;
}
state.mods.swap(i, i + 1);
state.selected_mod_index = Some(i + 1);
// ctx.submit_command(ACTION_START_SAVE_SETTINGS);
Handled::Yes
}
cmd if cmd.is(ACTION_START_DELETE_SELECTED_MOD) => {
let info = cmd
.get(ACTION_START_DELETE_SELECTED_MOD)
.and_then(|info| info.take())
.expect("command type matched but didn't contain the expected value");
if self
.sender
.send(AsyncAction::DeleteMod((state.clone(), info)))
.is_err()
{
tracing::error!("Failed to queue action to deploy mods");
}
Handled::Yes
}
cmd if cmd.is(ACTION_FINISH_DELETE_SELECTED_MOD) => {
let info = cmd
.get(ACTION_FINISH_DELETE_SELECTED_MOD)
.and_then(|info| info.take())
.expect("command type matched but didn't contain the expected value");
let found = state.mods.iter().enumerate().find(|(_, i)| i.id == info.id);
let Some((index, _)) = found else {
return Handled::No;
};
state.mods.remove(index);
// ctx.submit_command(ACTION_START_SAVE_SETTINGS);
Handled::Yes
}
cmd if cmd.is(ACTION_ADD_MOD) => {
let info = cmd
.get(ACTION_ADD_MOD)
.expect("command type matched but didn't contain the expected value");
if self
.sender
.send(AsyncAction::AddMod((state.clone(), info.clone())))
.is_err()
{
tracing::error!("Failed to queue action to add mod");
}
Handled::Yes
}
cmd if cmd.is(ACTION_FINISH_ADD_MOD) => {
let info = cmd
.get(ACTION_FINISH_ADD_MOD)
.expect("command type matched but didn't contain the expected value");
if let Some(info) = info.take() {
state.add_mod(info);
// ctx.submit_command(ACTION_START_SAVE_SETTINGS);
}
Handled::Yes
}
cmd if cmd.is(ACTION_LOG) => {
let line = cmd
.get(ACTION_LOG)
.expect("command type matched but didn't contain the expected value");
if let Some(line) = line.take() {
state.add_log_line(line);
}
Handled::Yes
}
cmd if cmd.is(ACTION_START_SAVE_SETTINGS) => {
if state.is_save_in_progress {
state.is_next_save_pending = true;
} else if self
.sender
.send(AsyncAction::SaveSettings(state.clone()))
.is_ok()
{
state.is_save_in_progress = true;
} else {
tracing::error!("Failed to queue action to save settings");
}
Handled::Yes
}
cmd if cmd.is(ACTION_FINISH_SAVE_SETTINGS) => {
state.is_save_in_progress = false;
if state.is_next_save_pending {
state.is_next_save_pending = false;
ctx.submit_command(ACTION_START_SAVE_SETTINGS);
}
Handled::Yes
}
cmd => {
if cfg!(debug_assertions) {
tracing::warn!("Unknown command: {:?}", cmd);
}
Handled::No
}
}
}
}

View file

@ -0,0 +1,73 @@
use druid::im::Vector;
use druid::{Data, Lens};
use super::{ModInfo, State};
pub(crate) struct SelectedModLens;
impl Lens<State, Option<ModInfo>> for SelectedModLens {
#[tracing::instrument(name = "SelectedModLens::with", skip_all)]
fn with<V, F: FnOnce(&Option<ModInfo>) -> V>(&self, data: &State, f: F) -> V {
let info = data
.selected_mod_index
.and_then(|i| data.mods.get(i).cloned());
f(&info)
}
#[tracing::instrument(name = "SelectedModLens::with_mut", skip_all)]
fn with_mut<V, F: FnOnce(&mut Option<ModInfo>) -> V>(&self, data: &mut State, f: F) -> V {
match data.selected_mod_index {
Some(i) => {
let mut info = data.mods.get_mut(i).cloned();
let ret = f(&mut info);
if let Some(info) = info {
// TODO: Figure out a way to check for equality and
// only update when needed
data.mods.set(i, info);
} else {
data.selected_mod_index = None;
}
ret
}
None => f(&mut None),
}
}
}
/// A Lens that maps an `im::Vector<T>` to `im::Vector<(usize, T)>`,
/// where each element in the destination vector includes its index in the
/// source vector.
pub(crate) struct IndexedVectorLens;
impl<T: Data> Lens<Vector<T>, Vector<(usize, T)>> for IndexedVectorLens {
#[tracing::instrument(name = "IndexedVectorLens::with", skip_all)]
fn with<V, F: FnOnce(&Vector<(usize, T)>) -> V>(&self, values: &Vector<T>, f: F) -> V {
let indexed = values
.iter()
.enumerate()
.map(|(i, val)| (i, val.clone()))
.collect();
f(&indexed)
}
#[tracing::instrument(name = "IndexedVectorLens::with_mut", skip_all)]
fn with_mut<V, F: FnOnce(&mut Vector<(usize, T)>) -> V>(
&self,
values: &mut Vector<T>,
f: F,
) -> V {
let mut indexed = values
.iter()
.enumerate()
.map(|(i, val)| (i, val.clone()))
.collect();
let ret = f(&mut indexed);
*values = indexed.into_iter().map(|(_i, val)| val).collect();
ret
}
}

View file

@ -0,0 +1,7 @@
mod data;
mod delegate;
mod lens;
pub(crate) use data::*;
pub(crate) use delegate::*;
pub(crate) use lens::*;

View file

@ -0,0 +1,5 @@
pub mod theme;
pub mod widget;
pub mod window {
pub mod main;
}

View file

@ -0,0 +1,4 @@
use druid::{Color, Insets};
pub const TOP_BAR_BACKGROUND_COLOR: Color = Color::rgba8(255, 255, 255, 50);
pub const TOP_BAR_INSETS: Insets = Insets::uniform(5.0);

View file

@ -0,0 +1,82 @@
use druid::widget::{Button, Controller, Scroll};
use druid::{Data, Env, Event, EventCtx, Rect, UpdateCtx, Widget};
use crate::state::{State, ACTION_START_SAVE_SETTINGS};
pub struct DisabledButtonController;
impl<T: Data> Controller<T, Button<T>> for DisabledButtonController {
fn event(
&mut self,
child: &mut Button<T>,
ctx: &mut EventCtx,
event: &Event,
data: &mut T,
env: &Env,
) {
if !ctx.is_disabled() {
ctx.set_disabled(true);
ctx.request_paint();
}
child.event(ctx, event, data, env)
}
fn update(
&mut self,
child: &mut Button<T>,
ctx: &mut UpdateCtx,
old_data: &T,
data: &T,
env: &Env,
) {
if !ctx.is_disabled() {
ctx.set_disabled(true);
ctx.request_paint();
}
child.update(ctx, old_data, data, env)
}
}
pub struct AutoScrollController;
impl<T: Data, W: Widget<T>> Controller<T, Scroll<T, W>> for AutoScrollController {
fn update(
&mut self,
child: &mut Scroll<T, W>,
ctx: &mut UpdateCtx,
old_data: &T,
data: &T,
env: &Env,
) {
if !ctx.is_disabled() {
let size = child.child_size();
let end_region = Rect::new(size.width - 1., size.height - 1., size.width, size.height);
child.scroll_to(ctx, end_region);
}
child.update(ctx, old_data, data, env)
}
}
/// A controller that submits the command to save settings every time its widget's
/// data changes.
pub struct SaveSettingsController;
impl<W: Widget<State>> Controller<State, W> for SaveSettingsController {
fn update(
&mut self,
child: &mut W,
ctx: &mut UpdateCtx,
old_data: &State,
data: &State,
env: &Env,
) {
// Only filter for the values that actually go into the settings file.
if old_data.mods != data.mods
|| old_data.game_dir != data.game_dir
|| old_data.data_dir != data.data_dir
{
ctx.submit_command(ACTION_START_SAVE_SETTINGS);
}
child.update(ctx, old_data, data, env)
}
}

View file

@ -0,0 +1,63 @@
use std::f64::INFINITY;
use druid::widget::prelude::*;
use druid::{Point, WidgetPod};
pub struct FillContainer<T> {
child: WidgetPod<T, Box<dyn Widget<T>>>,
}
impl<T: Data> FillContainer<T> {
pub fn new(child: impl Widget<T> + 'static) -> Self {
Self {
child: WidgetPod::new(child).boxed(),
}
}
}
impl<T: Data> Widget<T> for FillContainer<T> {
#[tracing::instrument(name = "FillContainer", level = "trace", skip_all)]
fn event(&mut self, ctx: &mut EventCtx, event: &Event, data: &mut T, env: &Env) {
self.child.event(ctx, event, data, env);
}
#[tracing::instrument(name = "FillContainer", level = "trace", skip_all)]
fn lifecycle(&mut self, ctx: &mut LifeCycleCtx, event: &LifeCycle, data: &T, env: &Env) {
self.child.lifecycle(ctx, event, data, env)
}
#[tracing::instrument(name = "FillContainer", level = "trace", skip_all)]
fn update(&mut self, ctx: &mut UpdateCtx, _: &T, data: &T, env: &Env) {
self.child.update(ctx, data, env);
}
#[tracing::instrument(name = "FillContainer", level = "trace", skip_all)]
fn layout(&mut self, ctx: &mut LayoutCtx, bc: &BoxConstraints, data: &T, env: &Env) -> Size {
bc.debug_check("FillContainer");
let child_size = self.child.layout(ctx, bc, data, env);
let w = if bc.is_width_bounded() {
INFINITY
} else {
child_size.width
};
let h = if bc.is_height_bounded() {
INFINITY
} else {
child_size.height
};
let my_size = bc.constrain(Size::new(w, h));
self.child.set_origin(ctx, Point::new(0.0, 0.0));
tracing::trace!("Computed layout: size={}", my_size);
my_size
}
#[tracing::instrument(name = "FillContainer", level = "trace", skip_all)]
fn paint(&mut self, ctx: &mut PaintCtx, data: &T, env: &Env) {
self.child.paint(ctx, data, env);
}
}

View file

@ -0,0 +1,38 @@
use std::path::PathBuf;
use std::sync::Arc;
use druid::text::Formatter;
use druid::{Data, Widget};
pub mod controller;
pub trait ExtraWidgetExt<T: Data>: Widget<T> + Sized + 'static {}
impl<T: Data, W: Widget<T> + 'static> ExtraWidgetExt<T> for W {}
pub(crate) struct PathBufFormatter;
impl PathBufFormatter {
pub fn new() -> Self {
Self {}
}
}
impl Formatter<Arc<PathBuf>> for PathBufFormatter {
fn format(&self, value: &Arc<PathBuf>) -> String {
value.display().to_string()
}
fn validate_partial_input(
&self,
_input: &str,
_sel: &druid::text::Selection,
) -> druid::text::Validation {
druid::text::Validation::success()
}
fn value(&self, input: &str) -> Result<Arc<PathBuf>, druid::text::ValidationError> {
let p = PathBuf::from(input);
Ok(Arc::new(p))
}
}

View file

@ -0,0 +1,73 @@
use druid::widget::{Controller, Flex};
use druid::{Data, Widget};
pub struct TableSelect<T> {
widget: Flex<T>,
controller: TableSelectController<T>,
}
impl<T: Data> TableSelect<T> {
pub fn new(values: impl IntoIterator<Item = (impl Widget<T> + 'static)>) -> Self {
todo!();
}
}
impl<T: Data> Widget<T> for TableSelect<T> {
fn event(
&mut self,
ctx: &mut druid::EventCtx,
event: &druid::Event,
data: &mut T,
env: &druid::Env,
) {
todo!()
}
fn lifecycle(
&mut self,
ctx: &mut druid::LifeCycleCtx,
event: &druid::LifeCycle,
data: &T,
env: &druid::Env,
) {
todo!()
}
fn update(&mut self, ctx: &mut druid::UpdateCtx, old_data: &T, data: &T, env: &druid::Env) {
todo!()
}
fn layout(
&mut self,
ctx: &mut druid::LayoutCtx,
bc: &druid::BoxConstraints,
data: &T,
env: &druid::Env,
) -> druid::Size {
todo!()
}
fn paint(&mut self, ctx: &mut druid::PaintCtx, data: &T, env: &druid::Env) {
todo!()
}
}
struct TableSelectController<T> {
inner: T,
}
impl<T: Data> TableSelectController<T> {}
impl<T: Data> Controller<T, Flex<T>> for TableSelectController<T> {}
pub struct TableItem<T> {
inner: dyn Widget<T>,
}
impl<T: Data> TableItem<T> {
pub fn new(inner: impl Widget<T>) -> Self {
todo!();
}
}
impl<T: Data> Widget<T> for TableItem<T> {}

View file

@ -0,0 +1,316 @@
use druid::im::Vector;
use druid::lens;
use druid::widget::{
Button, Checkbox, CrossAxisAlignment, Flex, Label, LineBreaking, List, MainAxisAlignment,
Maybe, Scroll, SizedBox, Split, TextBox, ViewSwitcher,
};
use druid::{
Color, FileDialogOptions, FileSpec, FontDescriptor, FontFamily, Key, LensExt, SingleUse,
TextAlignment, Widget, WidgetExt, WindowDesc,
};
use crate::state::{
ModInfo, State, View, ACTION_ADD_MOD, ACTION_SELECTED_MOD_DOWN, ACTION_SELECTED_MOD_UP,
ACTION_SELECT_MOD, ACTION_START_DELETE_SELECTED_MOD, ACTION_START_DEPLOY,
ACTION_START_RESET_DEPLOYMENT,
};
use crate::ui::theme;
use crate::ui::widget::controller::{AutoScrollController, SaveSettingsController};
use crate::ui::widget::PathBufFormatter;
const TITLE: &str = "Darktide Mod Manager";
const WINDOW_SIZE: (f64, f64) = (1080., 720.);
const MOD_DETAILS_MIN_WIDTH: f64 = 325.;
const KEY_MOD_LIST_ITEM_BG_COLOR: Key<Color> = Key::new("dtmm.mod-list.item.background-color");
pub(crate) fn new() -> WindowDesc<State> {
WindowDesc::new(build_window())
.title(TITLE)
.window_size(WINDOW_SIZE)
}
fn build_top_bar() -> impl Widget<State> {
Flex::row()
.must_fill_main_axis(true)
.main_axis_alignment(MainAxisAlignment::SpaceBetween)
.with_child(
Flex::row()
.with_child(
Button::new("Mods")
.on_click(|_ctx, state: &mut State, _env| state.current_view = View::Mods),
)
.with_default_spacer()
.with_child(
Button::new("Settings").on_click(|_ctx, state: &mut State, _env| {
state.current_view = View::Settings;
}),
),
)
.with_child(
Flex::row()
.with_child(
Button::new("Deploy Mods")
.on_click(|ctx, _state: &mut State, _env| {
ctx.submit_command(ACTION_START_DEPLOY);
})
.disabled_if(|data, _| {
data.is_deployment_in_progress || data.is_reset_in_progress
}),
)
.with_default_spacer()
.with_child(
Button::new("Reset Game")
.on_click(|ctx, _state: &mut State, _env| {
ctx.submit_command(ACTION_START_RESET_DEPLOYMENT);
})
.disabled_if(|data, _| {
data.is_deployment_in_progress || data.is_reset_in_progress
}),
),
)
.padding(theme::TOP_BAR_INSETS)
.background(theme::TOP_BAR_BACKGROUND_COLOR)
// TODO: Add bottom border. Need a custom widget for that, as the built-in only provides
// uniform borders on all sides
}
fn build_mod_list() -> impl Widget<State> {
let list = List::new(|| {
let checkbox =
Checkbox::new("").lens(lens!((usize, ModInfo, bool), 1).then(ModInfo::enabled));
let name = Label::raw().lens(lens!((usize, ModInfo, bool), 1).then(ModInfo::name));
Flex::row()
.must_fill_main_axis(true)
.with_child(checkbox)
.with_child(name)
.padding((5.0, 4.0))
.background(KEY_MOD_LIST_ITEM_BG_COLOR)
.on_click(|ctx, (i, _, _), _env| ctx.submit_command(ACTION_SELECT_MOD.with(*i)))
.env_scope(|env, (i, _, selected)| {
if *selected {
env.set(KEY_MOD_LIST_ITEM_BG_COLOR, Color::NAVY);
} else if (i % 2) == 1 {
env.set(KEY_MOD_LIST_ITEM_BG_COLOR, Color::WHITE.with_alpha(0.05));
} else {
env.set(KEY_MOD_LIST_ITEM_BG_COLOR, Color::TRANSPARENT);
}
})
});
let scroll = Scroll::new(list).vertical().lens(lens::Identity.map(
|state: &State| {
state
.mods
.iter()
.enumerate()
.map(|(i, val)| (i, val.clone(), Some(i) == state.selected_mod_index))
.collect::<Vector<_>>()
},
|state, infos| {
infos.into_iter().for_each(|(i, info, _)| {
state.mods.set(i, info);
});
},
));
Flex::column()
.must_fill_main_axis(true)
.with_child(Flex::row())
.with_flex_child(scroll, 1.0)
}
fn build_mod_details_buttons() -> impl Widget<State> {
let button_move_up = Button::new("Move Up")
.on_click(|ctx, _state, _env| ctx.submit_command(ACTION_SELECTED_MOD_UP))
.disabled_if(|state: &State, _env: &druid::Env| !state.can_move_mod_up());
let button_move_down = Button::new("Move Down")
.on_click(|ctx, _state, _env| ctx.submit_command(ACTION_SELECTED_MOD_DOWN))
.disabled_if(|state: &State, _env: &druid::Env| !state.can_move_mod_down());
let button_toggle_mod = Maybe::new(
|| {
Button::dynamic(|enabled, _env| {
if *enabled {
"Disable Mod".into()
} else {
"Enable Mod".into()
}
})
.on_click(|_ctx, enabled: &mut bool, _env| {
*enabled = !(*enabled);
})
.lens(ModInfo::enabled)
},
// TODO: Gray out
|| Button::new("Enable Mod"),
)
.disabled_if(|info: &Option<ModInfo>, _env: &druid::Env| info.is_none())
.lens(State::selected_mod);
let button_add_mod = Button::new("Add Mod").on_click(|ctx, _state: &mut State, _env| {
let zip = FileSpec::new("Zip file", &["zip"]);
let opts = FileDialogOptions::new()
.allowed_types(vec![zip])
.default_type(zip)
.name_label("Mod Archive")
.title("Choose a mod to add")
.accept_command(ACTION_ADD_MOD);
ctx.submit_command(druid::commands::SHOW_OPEN_PANEL.with(opts))
});
let button_delete_mod = Button::new("Delete Mod")
.on_click(|ctx, data: &mut Option<ModInfo>, _env| {
if let Some(info) = data {
ctx.submit_command(
ACTION_START_DELETE_SELECTED_MOD.with(SingleUse::new(info.clone())),
);
}
})
.disabled_if(|info: &Option<ModInfo>, _env: &druid::Env| info.is_none())
.lens(State::selected_mod);
Flex::column()
.cross_axis_alignment(CrossAxisAlignment::Center)
.with_child(
Flex::row()
.main_axis_alignment(MainAxisAlignment::End)
.with_child(button_move_up)
.with_default_spacer()
.with_child(button_move_down),
)
.with_default_spacer()
.with_child(
Flex::row()
.main_axis_alignment(MainAxisAlignment::End)
.with_child(button_toggle_mod)
.with_default_spacer()
.with_child(button_add_mod)
.with_default_spacer()
.with_child(button_delete_mod),
)
.expand_width()
}
fn build_mod_details_info() -> impl Widget<State> {
Maybe::new(
|| {
let name = Label::raw()
.with_text_alignment(TextAlignment::Center)
.with_text_size(24.)
// Force the label to take up the entire details' pane width,
// so that we can center-align it.
.expand_width()
.lens(ModInfo::name);
let description = Label::raw()
.with_line_break_mode(LineBreaking::WordWrap)
.lens(ModInfo::description);
Flex::column()
.cross_axis_alignment(CrossAxisAlignment::Start)
.main_axis_alignment(MainAxisAlignment::Start)
.with_child(name)
.with_spacer(4.)
.with_child(description)
},
Flex::column,
)
.padding((4., 4.))
.lens(State::selected_mod)
}
fn build_mod_details() -> impl Widget<State> {
Flex::column()
.must_fill_main_axis(true)
.cross_axis_alignment(CrossAxisAlignment::Start)
.main_axis_alignment(MainAxisAlignment::SpaceBetween)
.with_flex_child(build_mod_details_info(), 1.0)
.with_child(build_mod_details_buttons().padding(4.))
}
fn build_view_mods() -> impl Widget<State> {
Split::columns(build_mod_list(), build_mod_details())
.split_point(0.75)
.min_size(0.0, MOD_DETAILS_MIN_WIDTH)
.solid_bar(true)
.bar_size(2.0)
.draggable(true)
}
fn build_view_settings() -> impl Widget<State> {
let data_dir_setting = Flex::row()
.must_fill_main_axis(true)
.main_axis_alignment(MainAxisAlignment::Start)
.with_child(Label::new("Data Directory:"))
.with_default_spacer()
.with_flex_child(
TextBox::new()
.with_formatter(PathBufFormatter::new())
.expand_width()
.lens(State::data_dir),
1.,
)
.expand_width();
let game_dir_setting = Flex::row()
.must_fill_main_axis(true)
.main_axis_alignment(MainAxisAlignment::Start)
.with_child(Label::new("Game Directory:"))
.with_default_spacer()
.with_flex_child(
TextBox::new()
.with_formatter(PathBufFormatter::new())
.expand_width()
.lens(State::game_dir),
1.,
)
.expand_width();
let content = Flex::column()
.must_fill_main_axis(true)
.cross_axis_alignment(CrossAxisAlignment::Start)
.with_child(data_dir_setting)
.with_default_spacer()
.with_child(game_dir_setting);
SizedBox::new(content)
.width(800.)
.expand_height()
.padding(5.)
}
fn build_main() -> impl Widget<State> {
ViewSwitcher::new(
|state: &State, _| state.current_view,
|selector, _, _| match selector {
View::Mods => Box::new(build_view_mods()),
View::Settings => Box::new(build_view_settings()),
},
)
}
fn build_log_view() -> impl Widget<State> {
let font = FontDescriptor::new(FontFamily::MONOSPACE);
let label = Label::raw()
.with_font(font)
.with_line_break_mode(LineBreaking::WordWrap)
.lens(State::log)
.padding(4.)
.scroll()
.vertical()
.controller(AutoScrollController);
SizedBox::new(label).expand_width().height(128.0)
}
fn build_window() -> impl Widget<State> {
// TODO: Add borders between the sections
Flex::column()
.must_fill_main_axis(true)
.with_child(build_top_bar())
.with_flex_child(build_main(), 1.0)
.with_child(build_log_view())
.controller(SaveSettingsController)
}

View file

@ -0,0 +1,161 @@
use std::io::ErrorKind;
use std::path::PathBuf;
use std::{fs, path::Path};
use clap::{parser::ValueSource, ArgMatches};
use color_eyre::{eyre::Context, Result};
use serde::{Deserialize, Serialize};
use crate::state::{ModInfo, State};
#[derive(Clone, Debug, Serialize)]
pub(crate) struct LoadOrderEntrySerialize<'a> {
pub id: &'a String,
pub enabled: bool,
}
impl<'a> From<&'a ModInfo> for LoadOrderEntrySerialize<'a> {
fn from(info: &'a ModInfo) -> Self {
Self {
id: &info.id,
enabled: info.enabled,
}
}
}
#[derive(Debug, Serialize)]
pub(crate) struct ConfigSerialize<'a> {
game_dir: &'a Path,
data_dir: &'a Path,
mod_order: Vec<LoadOrderEntrySerialize<'a>>,
}
impl<'a> From<&'a State> for ConfigSerialize<'a> {
fn from(state: &'a State) -> Self {
Self {
game_dir: &state.game_dir,
data_dir: &state.data_dir,
mod_order: state
.mods
.iter()
.map(LoadOrderEntrySerialize::from)
.collect(),
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub(crate) struct LoadOrderEntry {
pub id: String,
pub enabled: bool,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub(crate) struct Config {
#[serde(skip)]
pub path: PathBuf,
pub data_dir: Option<PathBuf>,
pub game_dir: Option<PathBuf>,
#[serde(default)]
pub mod_order: Vec<LoadOrderEntry>,
}
#[cfg(not(arget_os = "windows"))]
pub fn get_default_config_path() -> PathBuf {
let config_dir = std::env::var("XDG_CONFIG_DIR").unwrap_or_else(|_| {
let home = std::env::var("HOME").unwrap_or_else(|_| {
let user = std::env::var("USER").expect("user env variable not set");
format!("/home/{user}")
});
format!("{home}/.config")
});
PathBuf::from(config_dir).join("dtmm").join("dtmm.cfg")
}
#[cfg(target_os = "windows")]
pub fn get_default_config_path() -> PathBuf {
let config_dir = std::env::var("APPDATA").expect("appdata env var not set");
PathBuf::from(config_dir).join("dtmm").join("dtmm.cfg")
}
#[cfg(not(arget_os = "windows"))]
pub fn get_default_data_dir() -> PathBuf {
let data_dir = std::env::var("XDG_DATA_DIR").unwrap_or_else(|_| {
let home = std::env::var("HOME").unwrap_or_else(|_| {
let user = std::env::var("USER").expect("user env variable not set");
format!("/home/{user}")
});
format!("{home}/.local/share")
});
PathBuf::from(data_dir).join("dtmm")
}
#[cfg(target_os = "windows")]
pub fn get_default_data_dir() -> PathBuf {
let data_dir = std::env::var("APPDATA").expect("appdata env var not set");
PathBuf::from(data_dir).join("dtmm")
}
#[tracing::instrument(skip(matches),fields(path = ?matches.get_one::<PathBuf>("config")))]
pub(crate) fn read_config<P>(default: P, matches: &ArgMatches) -> Result<Config>
where
P: Into<PathBuf> + std::fmt::Debug,
{
let path = matches
.get_one::<PathBuf>("config")
.expect("argument missing despite default");
let default_path = default.into();
match fs::read(path) {
Ok(data) => {
let data = String::from_utf8(data).wrap_err_with(|| {
format!("config file {} contains invalid UTF-8", path.display())
})?;
let mut cfg: Config = serde_sjson::from_str(&data)
.wrap_err_with(|| format!("invalid config file {}", path.display()))?;
cfg.path = path.clone();
Ok(cfg)
}
Err(err) if err.kind() == ErrorKind::NotFound => {
if matches.value_source("config") != Some(ValueSource::DefaultValue) {
return Err(err)
.wrap_err_with(|| format!("failed to read config file {}", path.display()))?;
}
{
let parent = default_path
.parent()
.expect("a file path always has a parent directory");
fs::create_dir_all(parent).wrap_err_with(|| {
format!("failed to create directories {}", parent.display())
})?;
}
let config = Config {
path: default_path,
data_dir: Some(get_default_data_dir()),
game_dir: None,
mod_order: Vec::new(),
};
{
let data = serde_sjson::to_string(&config)
.wrap_err("failed to serialize default config value")?;
fs::write(&config.path, data).wrap_err_with(|| {
format!(
"failed to write default config to {}",
config.path.display()
)
})?;
}
Ok(config)
}
Err(err) => {
Err(err).wrap_err_with(|| format!("failed to read config file {}", path.display()))
}
}
}

View file

@ -0,0 +1,65 @@
use tokio::sync::mpsc::UnboundedSender;
use tracing_error::ErrorLayer;
use tracing_subscriber::filter::FilterFn;
use tracing_subscriber::fmt;
use tracing_subscriber::fmt::format::debug_fn;
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::prelude::*;
use tracing_subscriber::EnvFilter;
pub struct ChannelWriter {
tx: UnboundedSender<String>,
}
impl ChannelWriter {
pub fn new(tx: UnboundedSender<String>) -> Self {
Self { tx }
}
}
impl std::io::Write for ChannelWriter {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
let tx = self.tx.clone();
let string = String::from_utf8_lossy(buf).to_string();
// The `send` errors when the receiving end has closed.
// But there's not much we can do at that point, so we just ignore it.
let _ = tx.send(string);
Ok(buf.len())
}
fn flush(&mut self) -> std::io::Result<()> {
Ok(())
}
}
pub fn create_tracing_subscriber(tx: UnboundedSender<String>) {
let env_layer = if cfg!(debug_assertions) {
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info"))
} else {
EnvFilter::new("error,dtmm=info")
};
let stdout_layer = if cfg!(debug_assertions) {
let layer = fmt::layer().pretty();
Some(layer)
} else {
None
};
let channel_layer = fmt::layer()
// TODO: Re-enable and implement a formatter for the Druid widget
.with_ansi(false)
.event_format(dtmt_shared::Formatter)
.fmt_fields(debug_fn(dtmt_shared::format_fields))
.with_writer(move || ChannelWriter::new(tx.clone()))
.with_filter(FilterFn::new(dtmt_shared::filter_fields));
tracing_subscriber::registry()
.with(env_layer)
.with(channel_layer)
.with(stdout_layer)
.with(ErrorLayer::new(fmt::format::Pretty::default()))
.init();
}

View file

@ -5,27 +5,30 @@ edition = "2021"
[dependencies]
clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "unicode"] }
cli-table = { version = "0.4.7", default-features = false, features = ["derive"] }
color-eyre = "0.6.2"
confy = "0.5.1"
csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
sdk = { path = "../../lib/sdk", version = "0.2.0" }
dtmt-shared = { path = "../../lib/dtmt-shared", version = "*" }
futures = "0.3.25"
futures-util = "0.3.24"
glob = "0.3.0"
libloading = "0.7.4"
nanorand = "0.7.0"
oodle = { path = "../../lib/oodle", version = "*" }
pin-project-lite = "0.2.9"
serde = { version = "1.0.147", features = ["derive"] }
oodle-sys = { path = "../../lib/oodle-sys", version = "*" }
promptly = "0.3.1"
sdk = { path = "../../lib/sdk", version = "0.2.0" }
serde_sjson = { path = "../../lib/serde_sjson", version = "*" }
tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
serde = { version = "1.0.147", features = ["derive"] }
string_template = "0.2.1"
tokio-stream = { version = "0.1.11", features = ["fs", "io-util"] }
tracing = { version = "0.1.37", features = ["async-await"] }
tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
tracing-error = "0.2.0"
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
confy = "0.5.1"
tracing = { version = "0.1.37", features = ["async-await"] }
zip = "0.6.3"
string_template = "0.2.1"
promptly = "0.3.1"
path-clean = "1.0.1"
[dev-dependencies]
tempfile = "3.3.0"

32
crates/dtmt/README.adoc Normal file
View file

@ -0,0 +1,32 @@
= Darktide Mod Tools (DTMT)
:idprefix:
:idseparator:
:toc: macro
:toclevels: 1
:!toc-title:
:caution-caption: :fire:
:important-caption: :exclamtion:
:note-caption: :paperclip:
:tip-caption: :bulb:
:warning-caption: :warning:
A set of tools to develop mods for the newest generation of the Bitsquid game engine that powers the game _Warhammer 40.000: Darktide_.
== Quickstart
1. Head to the latest https://git.sclu1034.dev/bitsquid_dt/dtmt/releases/[release] and download the `dtmt` binary for your platform.
2. Place the binary and `dictionary.csv` next to each other.
3. Open a command prompt, navigate to the downloaded binary and run `dtmt.exe help`.
4. Use the `help` command (it works for subcommands, too) and the https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki/CLI-Reference[CLI Reference].
== Runtime dependencies
The LuaJit decompiler (short "ljd") is used to decompile Lua files. A version tailored specifically to Bitsquid may be found here: https://github.com/Aussiemon/ljd.
A custom executable location may be passed via the `--ljd` flag during extraction, otherwise decompilation expects `ljd` to be found via the `PATH` environmental variable.
== Building
1. Install Rust from https://www.rust-lang.org/learn/get-started[rust-lang.org] or via the preferred means for your system.
2. Download or clone this source code. Make sure to include the submodules in `lib/`.
3. Run `cargo build`.

View file

@ -4,11 +4,11 @@ use std::sync::Arc;
use clap::{value_parser, Arg, ArgMatches, Command};
use color_eyre::eyre::{self, Context, Result};
use color_eyre::{Help, Report};
use dtmt_shared::ModConfig;
use futures::future::try_join_all;
use futures::StreamExt;
use sdk::filetype::package::Package;
use sdk::{Bundle, BundleFile};
use serde::Deserialize;
use tokio::fs::{self, File};
use tokio::io::AsyncReadExt;
@ -36,16 +36,8 @@ pub(crate) fn command_definition() -> Command {
))
}
#[derive(Debug, Default, Deserialize)]
struct ProjectConfig {
#[serde(skip)]
dir: PathBuf,
name: String,
packages: Vec<PathBuf>,
}
#[tracing::instrument]
async fn find_project_config(dir: Option<PathBuf>) -> Result<ProjectConfig> {
async fn find_project_config(dir: Option<PathBuf>) -> Result<ModConfig> {
let (path, mut file) = if let Some(path) = dir {
let file = File::open(&path.join(PROJECT_CONFIG_NAME))
.await
@ -81,9 +73,12 @@ async fn find_project_config(dir: Option<PathBuf>) -> Result<ProjectConfig> {
};
let mut buf = String::new();
file.read_to_string(&mut buf).await?;
file.read_to_string(&mut buf)
.await
.wrap_err("invalid UTF-8")?;
let mut cfg: ProjectConfig = serde_sjson::from_str(&buf)?;
let mut cfg: ModConfig =
serde_sjson::from_str(&buf).wrap_err("failed to deserialize mod config")?;
cfg.dir = path;
Ok(cfg)
}
@ -169,19 +164,79 @@ where
.wrap_err("failed to build bundle")
}
#[tracing::instrument(skip_all)]
pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
unsafe {
oodle_sys::init(matches.get_one::<String>("oodle"));
fn normalize_file_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
let path = path.as_ref();
if path.is_absolute() || path.has_root() {
let err = eyre::eyre!("path is absolute: {}", path.display());
return Err(err).with_suggestion(|| "Specify a relative file path.".to_string());
}
let path = path_clean::clean(path);
if path.starts_with("..") {
eyre::bail!("path starts with a parent component: {}", path.display());
}
Ok(path)
}
#[tracing::instrument(skip_all)]
pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
let cfg = {
let dir = matches.get_one::<PathBuf>("directory").cloned();
find_project_config(dir).await?
let mut cfg = find_project_config(dir).await?;
cfg.resources.init = normalize_file_path(cfg.resources.init)
.wrap_err("invalid config field 'resources.init'")
.with_suggestion(|| {
"Specify a file path relative to and child path of the \
directory where 'dtmt.cfg' is."
.to_string()
})
.with_suggestion(|| {
"Use 'dtmt new' in a separate directory to generate \
a valid mod template."
.to_string()
})?;
if let Some(path) = cfg.resources.data {
let path = normalize_file_path(path)
.wrap_err("invalid config field 'resources.data'")
.with_suggestion(|| {
"Specify a file path relative to and child path of the \
directory where 'dtmt.cfg' is."
.to_string()
})
.with_suggestion(|| {
"Use 'dtmt new' in a separate directory to generate \
a valid mod template."
.to_string()
})?;
cfg.resources.data = Some(path);
}
if let Some(path) = cfg.resources.localization {
let path = normalize_file_path(path)
.wrap_err("invalid config field 'resources.localization'")
.with_suggestion(|| {
"Specify a file path relative to and child path of the \
directory where 'dtmt.cfg' is."
.to_string()
})
.with_suggestion(|| {
"Use 'dtmt new' in a separate directory to generate \
a valid mod template."
.to_string()
})?;
cfg.resources.localization = Some(path);
}
cfg
};
let dest = {
let mut path = PathBuf::from(&cfg.name);
let mut path = PathBuf::from(&cfg.id);
path.set_extension("zip");
Arc::new(path)
};
@ -210,21 +265,24 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()>
})
});
let bundles = try_join_all(tasks).await?;
let bundles = try_join_all(tasks)
.await
.wrap_err("failed to build mod bundles")?;
let mod_file = {
let mut path = cfg.dir.join(&cfg.name);
path.set_extension("mod");
fs::read(path).await?
let config_file = {
let path = cfg.dir.join("dtmt.cfg");
fs::read(&path)
.await
.wrap_err_with(|| format!("failed to read mod config at {}", path.display()))?
};
{
let dest = dest.clone();
let name = cfg.name.clone();
let id = cfg.id.clone();
tokio::task::spawn_blocking(move || {
let mut archive = Archive::new(name);
let mut archive = Archive::new(id);
archive.add_mod_file(mod_file);
archive.add_config(config_file);
for bundle in bundles {
archive.add_bundle(bundle);

View file

@ -58,14 +58,14 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
Bundle::from_binary(&ctx, name, binary).wrap_err("Failed to open bundle file")?
};
if let Some(_name) = matches.get_one::<String>("replace") {
if let Some(name) = matches.get_one::<String>("replace") {
let mut file = File::open(&file_path)
.await
.wrap_err_with(|| format!("failed to open '{}'", file_path.display()))?;
if let Some(variant) = bundle
.files_mut()
.filter(|file| file.matches_name(_name))
.filter(|file| file.matches_name(name.clone()))
// TODO: Handle file variants
.find_map(|file| file.variants_mut().next())
{
@ -75,7 +75,7 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
.wrap_err("failed to read input file")?;
variant.set_data(data);
} else {
let err = eyre::eyre!("No file '{}' in this bundle.", _name)
let err = eyre::eyre!("No file '{}' in this bundle.", name)
.with_suggestion(|| {
format!(
"Run '{} bundle list {}' to list the files in this bundle.",
@ -87,7 +87,7 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
format!(
"Use '{} bundle inject --add {} {} {}' to add it as a new file",
clap::crate_name!(),
_name,
name,
bundle_path.display(),
file_path.display()
)

View file

@ -50,13 +50,13 @@ where
match fmt {
OutputFormat::Text => {
println!("Bundle: {}", bundle.name());
println!("Bundle: {}", bundle.name().display());
for f in bundle.files().iter() {
if f.variants().len() != 1 {
let err = eyre::eyre!("Expected exactly one version for this file.")
.with_section(|| f.variants().len().to_string().header("Bundle:"))
.with_section(|| bundle.name().clone().header("Bundle:"));
.with_section(|| bundle.name().display().header("Bundle:"));
tracing::error!("{:#}", err);
}
@ -64,7 +64,7 @@ where
let v = &f.variants()[0];
println!(
"\t{}.{}: {} bytes",
f.base_name(),
f.base_name().display(),
f.file_type().ext_name(),
v.size()
);

View file

@ -24,10 +24,6 @@ pub(crate) fn command_definition() -> Command {
#[tracing::instrument(skip_all)]
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
unsafe {
oodle_sys::init(matches.get_one::<String>("oodle"));
}
match matches.subcommand() {
Some(("decompress", sub_matches)) => decompress::run(ctx, sub_matches).await,
Some(("extract", sub_matches)) => extract::run(ctx, sub_matches).await,

View file

@ -1,8 +1,10 @@
use std::path::PathBuf;
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command, ValueEnum};
use cli_table::{print_stdout, WithTitle};
use color_eyre::eyre::{Context, Result};
use color_eyre::{Help, SectionExt};
use sdk::murmur::{IdString64, Murmur32, Murmur64};
use tokio::fs::File;
use tokio::io::{AsyncBufReadExt, BufReader};
use tokio_stream::wrappers::LinesStream;
@ -27,6 +29,40 @@ impl From<HashGroup> for sdk::murmur::HashGroup {
}
}
impl std::fmt::Display for HashGroup {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
HashGroup::Filename => write!(f, "filename"),
HashGroup::Filetype => write!(f, "filetype"),
HashGroup::Strings => write!(f, "strings"),
HashGroup::Other => write!(f, "other"),
}
}
}
#[derive(cli_table::Table)]
struct TableRow {
#[table(title = "Value")]
value: String,
#[table(title = "Murmur64")]
long: Murmur64,
#[table(title = "Murmur32")]
short: Murmur32,
#[table(title = "Group")]
group: sdk::murmur::HashGroup,
}
impl From<&sdk::murmur::Entry> for TableRow {
fn from(entry: &sdk::murmur::Entry) -> Self {
Self {
value: entry.value().clone(),
long: entry.long(),
short: entry.short(),
group: entry.group(),
}
}
}
pub(crate) fn command_definition() -> Command {
Command::new("dictionary")
.about("Manipulate a hash dictionary file.")
@ -43,7 +79,8 @@ pub(crate) fn command_definition() -> Command {
.short('g')
.long("group")
.action(ArgAction::Append)
.value_parser(value_parser!(HashGroup)),
.value_parser(value_parser!(HashGroup))
.default_values(["other", "filename", "filetype", "strings"]),
),
)
.subcommand(
@ -67,6 +104,7 @@ pub(crate) fn command_definition() -> Command {
.value_parser(value_parser!(PathBuf)),
),
)
.subcommand(Command::new("show").about("Show the contents of the dictionary"))
.subcommand(Command::new("save").about(
"Save back the currently loaded dictionary, with hashes pre-computed. \
Pre-computing hashes speeds up loading large dictionaries, as they would \
@ -78,17 +116,23 @@ pub(crate) fn command_definition() -> Command {
pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
match matches.subcommand() {
Some(("lookup", sub_matches)) => {
let hash = sub_matches
.get_one::<u64>("hash")
let hash = {
let s = sub_matches
.get_one::<String>("hash")
.expect("required argument not found");
u64::from_str_radix(s, 16)
.wrap_err("failed to parse argument as hexadecimal string")?
};
let groups = sub_matches
.get_many::<HashGroup>("group")
.unwrap_or_default();
for group in groups {
let value = ctx.lookup_hash(*hash, (*group).into());
println!("{value}");
if let IdString64::String(value) = ctx.lookup_hash(hash, (*group).into()) {
println!("{group}: {value}");
}
}
Ok(())
@ -176,6 +220,14 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<(
.await
.wrap_err("Failed to write dictionary to disk")
}
Some(("show", _)) => {
let lookup = &ctx.lookup;
let rows: Vec<_> = lookup.entries().iter().map(TableRow::from).collect();
print_stdout(rows.with_title())?;
Ok(())
}
_ => unreachable!(
"clap is configured to require a subcommand, and they're all handled above"
),

View file

@ -8,15 +8,22 @@ use futures::{StreamExt, TryStreamExt};
use string_template::Template;
use tokio::fs::{self, DirBuilder};
const TEMPLATES: [(&str, &str); 6] = [
const TEMPLATES: [(&str, &str); 5] = [
(
"dtmt.cfg",
r#"name = "{{name}}"
description = "An elaborate description of my cool game mod!"
r#"id = "{{id}}"
name = "{{name}}"
description = "This is my new mod '{{name}}'!"
version = "0.1.0"
resources = {
init = "scripts/mods/{{id}}/init"
data = "scripts/mods/{{id}}/data"
localization = "scripts/mods/{{id}}/localization"
}
packages = [
"packages/{{name}}"
"packages/{{id}}"
]
depends = [
@ -25,50 +32,35 @@ depends = [
"#,
),
(
"{{name}}.mod",
r#"return {
run = function()
fassert(rawget(_G, "new_mod"), "`{{title}}` encountered an error loading the Darktide Mod Framework.")
new_mod("{{name}}", {
mod_script = "scripts/mods/{{name}}/{{name}}",
mod_data = "scripts/mods/{{name}}/{{name}}_data",
mod_localization = "scripts/mods/{{name}}/{{name}}_localization",
})
end,
packages = {},
}"#,
),
(
"packages/{{name}}.package",
"packages/{{id}}.package",
r#"lua = [
"scripts/mods/{{name}}/*"
"scripts/mods/{{id}}/*"
]
"#,
),
(
"scripts/mods/{{name}}/{{name}}.lua",
r#"local mod = get_mod("{{name}}")
"scripts/mods/{{id}}/init.lua",
r#"local mod = get_mod("{{id}}")
-- Your mod code goes here.
-- https://vmf-docs.verminti.de
"#,
),
(
"scripts/mods/{{name}}/{{name}}_data.lua",
r#"local mod = get_mod("{{name}}")
"scripts/mods/{{id}}/data.lua",
r#"local mod = get_mod("{{id}}")
return {
name = "{{title}}",
name = "{{name}}",
description = mod:localize("mod_description"),
is_togglable = true,
}"#,
),
(
"scripts/mods/{{name}}/{{name}}_localization.lua",
"scripts/mods/{{id}}/localization.lua",
r#"return {
mod_description = {
en = "An elaborate description of my cool game mod!",
en = "This is my new mod '{{name}}'!",
},
}"#,
),
@ -78,8 +70,8 @@ pub(crate) fn command_definition() -> Command {
Command::new("new")
.about("Create a new project")
.arg(
Arg::new("title")
.long("title")
Arg::new("name")
.long("name")
.help("The display name of the new mod."),
)
.arg(Arg::new("root").help(
@ -107,14 +99,14 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()>
}
};
let title = if let Some(title) = matches.get_one::<String>("title") {
title.clone()
let name = if let Some(name) = matches.get_one::<String>("name") {
name.clone()
} else {
promptly::prompt("The mod display name")?
promptly::prompt("The display name")?
};
let name = {
let default = title
let id = {
let default = name
.chars()
.map(|c| {
if c.is_ascii_alphanumeric() {
@ -124,15 +116,14 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()>
}
})
.collect::<String>();
promptly::prompt_default("The mod identifier name", default)?
promptly::prompt_default("The unique mod ID", default)?
};
tracing::debug!(root = %root.display());
tracing::debug!(title, name);
tracing::debug!(root = %root.display(), name, id);
let mut data = HashMap::new();
data.insert("name", name.as_str());
data.insert("title", title.as_str());
data.insert("id", id.as_str());
let templates = TEMPLATES
.iter()
@ -168,7 +159,7 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()>
tracing::info!(
"Created {} files for mod '{}' in '{}'.",
TEMPLATES.len(),
title,
name,
root.display()
);

View file

@ -13,9 +13,6 @@ use serde::{Deserialize, Serialize};
use tokio::fs::File;
use tokio::io::BufReader;
use tokio::sync::RwLock;
use tracing_error::ErrorLayer;
use tracing_subscriber::prelude::*;
use tracing_subscriber::EnvFilter;
mod cmd {
pub mod build;
@ -62,19 +59,7 @@ async fn main() -> Result<()> {
// .subcommand(cmd::watch::command_definition())
.get_matches();
{
let fmt_layer = tracing_subscriber::fmt::layer().pretty();
let filter_layer =
EnvFilter::try_from_default_env().or_else(|_| EnvFilter::try_new("info"))?;
tracing_subscriber::registry()
.with(filter_layer)
.with(fmt_layer)
.with(ErrorLayer::new(
tracing_subscriber::fmt::format::Pretty::default(),
))
.init();
}
dtmt_shared::create_tracing_subscriber();
// TODO: Move this into a `Context::init` method?
let ctx = sdk::Context::new();

View file

@ -5,14 +5,14 @@ use std::path::{Path, PathBuf};
use color_eyre::eyre::{self, Context};
use color_eyre::Result;
use sdk::murmur::Murmur64;
use sdk::murmur::IdString64;
use sdk::Bundle;
use zip::ZipWriter;
pub struct Archive {
name: String,
bundles: Vec<Bundle>,
mod_file: Option<Vec<u8>>,
config_file: Option<Vec<u8>>,
}
impl Archive {
@ -20,7 +20,7 @@ impl Archive {
Self {
name,
bundles: Vec::new(),
mod_file: None,
config_file: None,
}
}
@ -28,18 +28,18 @@ impl Archive {
self.bundles.push(bundle)
}
pub fn add_mod_file(&mut self, content: Vec<u8>) {
self.mod_file = Some(content);
pub fn add_config(&mut self, content: Vec<u8>) {
self.config_file = Some(content);
}
pub fn write<P>(&self, path: P) -> Result<()>
where
P: AsRef<Path>,
{
let mod_file = self
.mod_file
let config_file = self
.config_file
.as_ref()
.ok_or_else(|| eyre::eyre!("Mod file is missing from mod archive"))?;
.ok_or_else(|| eyre::eyre!("Config file is missing in mod archive"))?;
let f = File::create(path.as_ref()).wrap_err_with(|| {
format!(
@ -54,16 +54,18 @@ impl Archive {
let base_path = PathBuf::from(&self.name);
{
let mut name = base_path.join(&self.name);
name.set_extension("mod");
let name = base_path.join("dtmt.cfg");
zip.start_file(name.to_string_lossy(), Default::default())?;
zip.write_all(mod_file)?;
zip.write_all(config_file)?;
}
let mut file_map = HashMap::new();
for bundle in self.bundles.iter() {
let bundle_name = bundle.name().clone();
let bundle_name = match bundle.name() {
IdString64::Hash(_) => eyre::bail!("bundle name must be known as string. got hash"),
IdString64::String(s) => s,
};
let map_entry: &mut HashSet<_> = file_map.entry(bundle_name).or_default();
@ -71,7 +73,7 @@ impl Archive {
map_entry.insert(file.name(false, None));
}
let name = Murmur64::hash(bundle.name().as_bytes());
let name = bundle.name().to_murmur64();
let path = base_path.join(name.to_string().to_ascii_lowercase());
zip.start_file(path.to_string_lossy(), Default::default())?;

BIN
docs/screenshots/dtmm.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 58 KiB

View file

@ -0,0 +1,13 @@
[package]
name = "dtmt-shared"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
serde = "1.0.152"
time = { version = "0.3.19", features = ["formatting", "local-offset", "macros"] }
tracing = "0.1.37"
tracing-error = "0.2.0"
tracing-subscriber = "0.3.16"

View file

@ -0,0 +1,13 @@
= dtmt-shared
:idprefix:
:idseparator:
:toc: macro
:toclevels: 1
:!toc-title:
:caution-caption: :fire:
:important-caption: :exclamtion:
:note-caption: :paperclip:
:tip-caption: :bulb:
:warning-caption: :warning:
A set of types and functions shared between multiple crates within _Darktide Mod Tools_ that don't fit into the engine SDK.

View file

@ -0,0 +1,28 @@
mod log;
use std::path::PathBuf;
pub use log::*;
#[derive(Clone, Debug, Default, serde::Deserialize)]
pub struct ModConfigResources {
pub init: PathBuf,
#[serde(default)]
pub data: Option<PathBuf>,
#[serde(default)]
pub localization: Option<PathBuf>,
}
#[derive(Clone, Debug, Default, serde::Deserialize)]
pub struct ModConfig {
#[serde(skip)]
pub dir: std::path::PathBuf,
pub id: String,
pub name: String,
pub description: String,
pub version: String,
pub packages: Vec<std::path::PathBuf>,
pub resources: ModConfigResources,
#[serde(default)]
pub depends: Vec<String>,
}

View file

@ -0,0 +1,87 @@
use std::fmt::Result;
use time::format_description::FormatItem;
use time::macros::format_description;
use time::OffsetDateTime;
use tracing::field::Field;
use tracing::{Event, Metadata, Subscriber};
use tracing_error::ErrorLayer;
use tracing_subscriber::filter::FilterFn;
use tracing_subscriber::fmt::format::{debug_fn, Writer};
use tracing_subscriber::fmt::{self, FmtContext, FormatEvent, FormatFields};
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::prelude::*;
use tracing_subscriber::registry::LookupSpan;
use tracing_subscriber::EnvFilter;
pub const TIME_FORMAT: &[FormatItem] = format_description!("[hour]:[minute]:[second]");
pub fn format_fields(w: &mut Writer<'_>, field: &Field, val: &dyn std::fmt::Debug) -> Result {
if field.name() == "message" {
write!(w, "{:?}", val)
} else {
Ok(())
}
}
pub fn filter_fields(metadata: &Metadata<'_>) -> bool {
metadata
.fields()
.iter()
.any(|field| field.name() == "message")
}
pub struct Formatter;
impl<S, N> FormatEvent<S, N> for Formatter
where
S: Subscriber + for<'a> LookupSpan<'a>,
N: for<'a> FormatFields<'a> + 'static,
{
fn format_event(
&self,
ctx: &FmtContext<'_, S, N>,
mut writer: Writer<'_>,
event: &Event<'_>,
) -> Result {
let meta = event.metadata();
let time = OffsetDateTime::now_local().unwrap_or_else(|_| OffsetDateTime::now_utc());
let time = time.format(TIME_FORMAT).map_err(|_| std::fmt::Error)?;
write!(writer, "[{}] [{:>5}] ", time, meta.level())?;
ctx.field_format().format_fields(writer.by_ref(), event)?;
writeln!(writer)
}
}
pub fn create_tracing_subscriber() {
let env_layer =
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::try_new("info").unwrap());
let (dev_stdout_layer, prod_stdout_layer, filter_layer) = if cfg!(debug_assertions) {
let fmt_layer = fmt::layer().pretty();
(Some(fmt_layer), None, None)
} else {
// Creates a layer that
// - only prints events that contain a message
// - does not print fields
// - does not print spans/targets
// - only prints time, not date
let fmt_layer = fmt::layer()
.event_format(Formatter)
.fmt_fields(debug_fn(format_fields));
(None, Some(fmt_layer), Some(FilterFn::new(filter_fields)))
};
tracing_subscriber::registry()
.with(filter_layer)
.with(env_layer)
.with(dev_stdout_layer)
.with(prod_stdout_layer)
.with(ErrorLayer::new(fmt::format::Pretty::default()))
.init();
}

View file

@ -1,2 +0,0 @@
/target
/Cargo.lock

View file

@ -1,77 +0,0 @@
#![feature(c_size_t)]
#![feature(once_cell)]
use std::ffi::OsStr;
use std::sync::OnceLock;
mod library;
mod types;
pub use library::Library;
pub use library::CHUNK_SIZE;
pub use types::*;
#[derive(thiserror::Error, Debug)]
pub enum OodleError {
#[error("{0}")]
Oodle(String),
#[error(transparent)]
Library(#[from] libloading::Error),
}
type Result<T> = std::result::Result<T, OodleError>;
static LIB: OnceLock<Library> = OnceLock::new();
/// Initialize the global library handle that this module's
/// functions operate on.
///
/// # Safety
///
/// The safety concerns as described by [`libloading::Library::new`] apply.
pub unsafe fn init<P: AsRef<OsStr>>(name: Option<P>) {
let lib = match name {
Some(name) => Library::with_name(name),
None => Library::new(),
};
let lib = lib.expect("Failed to load library.");
if LIB.set(lib).is_err() {
panic!("Library was already initialized. Did you call `init` twice?");
}
}
fn get() -> Result<&'static Library> {
match LIB.get() {
Some(lib) => Ok(lib),
None => {
let err = OodleError::Oodle(String::from("Library has not been initialized, yet."));
Err(err)
}
}
}
pub fn decompress<I>(
data: I,
fuzz_safe: OodleLZ_FuzzSafe,
check_crc: OodleLZ_CheckCRC,
) -> Result<Vec<u8>>
where
I: AsRef<[u8]>,
{
let lib = get()?;
lib.decompress(data, fuzz_safe, check_crc)
}
pub fn compress<I>(data: I) -> Result<Vec<u8>>
where
I: AsRef<[u8]>,
{
let lib = get()?;
lib.compress(data)
}
pub fn get_decode_buffer_size(raw_size: usize, corruption_possible: bool) -> Result<usize> {
let lib = get()?;
lib.get_decode_buffer_size(raw_size, corruption_possible)
}

View file

@ -1,154 +0,0 @@
use std::{ffi::OsStr, ptr};
use libloading::Symbol;
use super::Result;
use crate::{types::*, OodleError};
// Hardcoded chunk size of Bitsquid's bundle compression
pub const CHUNK_SIZE: usize = 512 * 1024;
pub const COMPRESSOR: OodleLZ_Compressor = OodleLZ_Compressor::Kraken;
pub const LEVEL: OodleLZ_CompressionLevel = OodleLZ_CompressionLevel::Optimal2;
#[cfg(target_os = "windows")]
const OODLE_LIB_NAME: &str = "oo2core_8_win64";
#[cfg(target_os = "linux")]
const OODLE_LIB_NAME: &str = "liboo2corelinux64.so";
pub struct Library {
inner: libloading::Library,
}
impl Library {
/// Load the Oodle library by its default name.
///
/// The default name is platform-specific:
/// - Windows: `oo2core_8_win64`
/// - Linux: `liboo2corelinux64.so`
///
/// # Safety
///
/// The safety concerns as described by [`libloading::Library::new`] apply.
pub unsafe fn new() -> Result<Self> {
Self::with_name(OODLE_LIB_NAME)
}
/// Load the Oodle library by the given name or path.
///
/// See [`libloading::Library::new`] for how the `name` parameter is handled.
///
/// # Safety
///
/// The safety concerns as described by [`libloading::Library::new`] apply.
pub unsafe fn with_name<P: AsRef<OsStr>>(name: P) -> Result<Self> {
let inner = libloading::Library::new(name)?;
Ok(Self { inner })
}
#[tracing::instrument(skip(self, data))]
pub fn decompress<I>(
&self,
data: I,
fuzz_safe: OodleLZ_FuzzSafe,
check_crc: OodleLZ_CheckCRC,
) -> Result<Vec<u8>>
where
I: AsRef<[u8]>,
{
let data = data.as_ref();
let mut out = vec![0; CHUNK_SIZE];
let verbosity = if tracing::enabled!(tracing::Level::INFO) {
OodleLZ_Verbosity::Minimal
} else if tracing::enabled!(tracing::Level::DEBUG) {
OodleLZ_Verbosity::Some
} else if tracing::enabled!(tracing::Level::TRACE) {
OodleLZ_Verbosity::Lots
} else {
OodleLZ_Verbosity::None
};
let ret = unsafe {
let decompress: Symbol<OodleLZ_Decompress> = self.inner.get(b"OodleLZ_Decompress\0")?;
decompress(
data.as_ptr() as *const _,
data.len(),
out.as_mut_ptr() as *mut _,
out.len(),
fuzz_safe,
check_crc,
verbosity,
ptr::null_mut(),
0,
ptr::null_mut(),
ptr::null_mut(),
ptr::null_mut(),
0,
OodleLZ_Decode_ThreadPhase::UNTHREADED,
)
};
if ret == 0 {
let err = OodleError::Oodle(String::from("Decompression failed."));
return Err(err);
}
Ok(out)
}
#[tracing::instrument(name = "Oodle::compress", skip(self, data))]
pub fn compress<I>(&self, data: I) -> Result<Vec<u8>>
where
I: AsRef<[u8]>,
{
let mut raw = Vec::from(data.as_ref());
raw.resize(CHUNK_SIZE, 0);
// TODO: Query oodle for buffer size
let mut out = vec![0u8; CHUNK_SIZE];
let ret = unsafe {
let compress: Symbol<OodleLZ_Compress> = self.inner.get(b"OodleLZ_Compress\0")?;
compress(
COMPRESSOR,
raw.as_ptr() as *const _,
raw.len(),
out.as_mut_ptr() as *mut _,
LEVEL,
ptr::null_mut(),
0,
ptr::null_mut(),
ptr::null_mut(),
0,
)
};
tracing::debug!(compressed_size = ret, "Compressed chunk");
if ret == 0 {
let err = OodleError::Oodle(String::from("Compression failed."));
return Err(err);
}
out.resize(ret as usize, 0);
Ok(out)
}
pub fn get_decode_buffer_size(
&self,
raw_size: usize,
corruption_possible: bool,
) -> Result<usize> {
unsafe {
let f: Symbol<OodleLZ_GetDecodeBufferSize> =
self.inner.get(b"OodleLZ_GetDecodeBufferSize\0")?;
let size = f(COMPRESSOR, raw_size, corruption_possible);
Ok(size)
}
}
}

View file

@ -1,197 +0,0 @@
#![allow(dead_code)]
use core::ffi::{c_char, c_int, c_size_t, c_ulonglong, c_void};
// Type definitions taken from Unreal Engine's `oodle2.h`
#[repr(C)]
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug)]
pub enum OodleLZ_FuzzSafe {
No = 0,
Yes = 1,
}
impl From<bool> for OodleLZ_FuzzSafe {
fn from(value: bool) -> Self {
if value {
Self::Yes
} else {
Self::No
}
}
}
#[repr(C)]
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug)]
pub enum OodleLZ_CheckCRC {
No = 0,
Yes = 1,
Force32 = 0x40000000,
}
impl From<bool> for OodleLZ_CheckCRC {
fn from(value: bool) -> Self {
if value {
Self::Yes
} else {
Self::No
}
}
}
#[repr(C)]
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug)]
pub enum OodleLZ_Verbosity {
None = 0,
Minimal = 1,
Some = 2,
Lots = 3,
Force32 = 0x40000000,
}
#[repr(C)]
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug)]
pub enum OodleLZ_Decode_ThreadPhase {
Phase1 = 1,
Phase2 = 2,
PhaseAll = 3,
}
impl OodleLZ_Decode_ThreadPhase {
pub const UNTHREADED: Self = OodleLZ_Decode_ThreadPhase::PhaseAll;
}
#[repr(C)]
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug)]
pub enum OodleLZ_Compressor {
Invalid = -1,
// None = memcpy, pass through uncompressed bytes
None = 3,
// NEW COMPRESSORS:
// Fast decompression and high compression ratios, amazing!
Kraken = 8,
// Leviathan = Kraken's big brother with higher compression, slightly slower decompression.
Leviathan = 13,
// Mermaid is between Kraken & Selkie - crazy fast, still decent compression.
Mermaid = 9,
// Selkie is a super-fast relative of Mermaid. For maximum decode speed.
Selkie = 11,
// Hydra, the many-headed beast = Leviathan, Kraken, Mermaid, or Selkie (see $OodleLZ_About_Hydra)
Hydra = 12,
BitKnit = 10,
// DEPRECATED but still supported
Lzb16 = 4,
Lzna = 7,
Lzh = 0,
Lzhlw = 1,
Lznib = 2,
Lzblw = 5,
Lza = 6,
Count = 14,
Force32 = 0x40000000,
}
#[repr(C)]
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug)]
pub enum OodleLZ_CompressionLevel {
// don't compress, just copy raw bytes
None = 0,
// super fast mode, lower compression ratio
SuperFast = 1,
// fastest LZ mode with still decent compression ratio
VeryFast = 2,
// fast - good for daily use
Fast = 3,
// standard medium speed LZ mode
Normal = 4,
// optimal parse level 1 (faster optimal encoder)
Optimal1 = 5,
// optimal parse level 2 (recommended baseline optimal encoder)
Optimal2 = 6,
// optimal parse level 3 (slower optimal encoder)
Optimal3 = 7,
// optimal parse level 4 (very slow optimal encoder)
Optimal4 = 8,
// optimal parse level 5 (don't care about encode speed, maximum compression)
Optimal5 = 9,
// faster than SuperFast, less compression
HyperFast1 = -1,
// faster than HyperFast1, less compression
HyperFast2 = -2,
// faster than HyperFast2, less compression
HyperFast3 = -3,
// fastest, less compression
HyperFast4 = -4,
Force32 = 0x40000000,
}
impl OodleLZ_CompressionLevel {
// alias hyperfast base level
pub const HYPERFAST: Self = OodleLZ_CompressionLevel::HyperFast1;
// alias optimal standard level
pub const OPTIMAL: Self = OodleLZ_CompressionLevel::Optimal2;
// maximum compression level
pub const MAX: Self = OodleLZ_CompressionLevel::Optimal5;
// fastest compression level
pub const MIN: Self = OodleLZ_CompressionLevel::HyperFast4;
pub const INVALID: Self = OodleLZ_CompressionLevel::Force32;
}
#[allow(non_camel_case_types)]
pub type t_fp_OodleCore_Plugin_Printf =
extern "C" fn(level: c_int, file: *const c_char, line: c_int, fmt: *const c_char);
#[allow(non_camel_case_types)]
pub type OodleLZ_Decompress = extern "C" fn(
compressed_buffer: *const c_void,
compressed_length: c_size_t,
raw_buffer: *mut c_void,
raw_length: c_size_t,
fuzz_safe: OodleLZ_FuzzSafe,
check_crc: OodleLZ_CheckCRC,
verbosity: OodleLZ_Verbosity,
decBufBase: *mut c_void,
decBufSize: c_size_t,
callback: *const c_void,
callback_user_data: *const c_void,
decoder_memory: *mut c_void,
decoder_memory_size: c_size_t,
thread_phase: OodleLZ_Decode_ThreadPhase,
) -> c_ulonglong;
#[allow(non_camel_case_types)]
pub type OodleLZ_Compress = extern "C" fn(
compressor: OodleLZ_Compressor,
raw_buffer: *const c_void,
raw_len: c_size_t,
compressed_buffer: *mut c_void,
level: OodleLZ_CompressionLevel,
options: *const c_void,
dictionary_base: c_size_t,
lrm: *const c_void,
scratch_memory: *mut c_void,
scratch_size: c_size_t,
) -> c_ulonglong;
#[allow(non_camel_case_types)]
pub type OodleLZ_GetDecodeBufferSize = extern "C" fn(
compressor: OodleLZ_Compressor,
raw_size: c_size_t,
corruption_possible: bool,
) -> c_size_t;
#[allow(non_camel_case_types)]
pub type OodleCore_Plugins_SetPrintf =
extern "C" fn(f: t_fp_OodleCore_Plugin_Printf) -> t_fp_OodleCore_Plugin_Printf;
#[allow(non_camel_case_types)]
pub type OodleCore_Plugin_Printf_Verbose = t_fp_OodleCore_Plugin_Printf;
#[allow(non_camel_case_types)]
pub type OodleCore_Plugin_Printf_Default = t_fp_OodleCore_Plugin_Printf;

View file

@ -1,11 +1,13 @@
[package]
name = "oodle-sys"
name = "oodle"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
libloading = "0.7.4"
thiserror = "1.0.38"
color-eyre = "0.6.2"
tracing = "0.1.37"
[build-dependencies]
bindgen = "0.64.0"

44
lib/oodle/build.rs Normal file
View file

@ -0,0 +1,44 @@
extern crate bindgen;
use std::env;
use std::path::PathBuf;
fn main() {
// Tell cargo to look for shared libraries in the specified directory
// println!("cargo:rustc-link-search=/path/to/lib");
// Tell cargo to tell rustc to link the system bzip2
// shared library.
if cfg!(target_os = "windows") {
println!("cargo:rustc-link-lib=oo2core_8_win64");
} else {
println!("cargo:rustc-link-lib=oo2corelinux64");
}
// Tell cargo to invalidate the built crate whenever the wrapper changes
println!("cargo:rerun-if-changed=oodle2.h");
// The bindgen::Builder is the main entry point
// to bindgen, and lets you build up options for
// the resulting bindings.
let bindings = bindgen::Builder::default()
// The input header we would like to generate
// bindings for.
.header("oodle2base.h")
.header("oodle2.h")
.blocklist_file("stdint.h")
.blocklist_file("stdlib.h")
// Tell cargo to invalidate the built crate whenever any of the
// included header files changed.
.parse_callbacks(Box::new(bindgen::CargoCallbacks))
// Finish the builder and generate the bindings.
.generate()
// Unwrap the Result and panic on failure.
.expect("Unable to generate bindings");
// Write the bindings to the $OUT_DIR/bindings.rs file.
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
bindings
.write_to_file(out_path.join("bindings.rs"))
.expect("Couldn't write bindings!");
}

1643
lib/oodle/oodle2.h Normal file

File diff suppressed because it is too large Load diff

167
lib/oodle/oodle2base.h Normal file
View file

@ -0,0 +1,167 @@
//===================================================
// Oodle2 Base header
// (C) Copyright 1994-2021 Epic Games Tools LLC
//===================================================
#ifndef __OODLE2BASE_H_INCLUDED__
#define __OODLE2BASE_H_INCLUDED__
#ifndef OODLE2BASE_PUBLIC_HEADER
#define OODLE2BASE_PUBLIC_HEADER 1
#endif
#ifdef _MSC_VER
#pragma pack(push, Oodle, 8)
#pragma warning(push)
#pragma warning(disable : 4127) // conditional is constant
#endif
#ifndef OODLE_BASE_TYPES_H
#define OODLE_BASE_TYPES_H
#include <stdint.h>
#define OOCOPYRIGHT "Copyright (C) 1994-2021, Epic Games Tools LLC"
// Typedefs
typedef int8_t OO_S8;
typedef uint8_t OO_U8;
typedef int16_t OO_S16;
typedef uint16_t OO_U16;
typedef int32_t OO_S32;
typedef uint32_t OO_U32;
typedef int64_t OO_S64;
typedef uint64_t OO_U64;
typedef float OO_F32;
typedef double OO_F64;
typedef intptr_t OO_SINTa;
typedef uintptr_t OO_UINTa;
typedef int32_t OO_BOOL;
// Struct packing handling and inlining
#if defined(__GNUC__) || defined(__clang__)
#define OOSTRUCT struct __attribute__((__packed__))
#define OOINLINEFUNC inline
#elif defined(_MSC_VER)
// on VC++, we use pragmas for the struct packing
#define OOSTRUCT struct
#define OOINLINEFUNC __inline
#endif
// Linkage stuff
#if defined(_WIN32)
#define OOLINK __stdcall
#define OOEXPLINK __stdcall
#else
#define OOLINK
#define OOEXPLINK
#endif
// C++ name demangaling
#ifdef __cplusplus
#define OODEFFUNC extern "C"
#define OODEFSTART extern "C" {
#define OODEFEND }
#define OODEFAULT( val ) =val
#else
#define OODEFFUNC
#define OODEFSTART
#define OODEFEND
#define OODEFAULT( val )
#endif
// ========================================================
// Exported function declarations
#define OOEXPFUNC OODEFFUNC
//===========================================================================
// OO_STRING_JOIN joins strings in the preprocessor and works with LINESTRING
#define OO_STRING_JOIN(arg1, arg2) OO_STRING_JOIN_DELAY(arg1, arg2)
#define OO_STRING_JOIN_DELAY(arg1, arg2) OO_STRING_JOIN_IMMEDIATE(arg1, arg2)
#define OO_STRING_JOIN_IMMEDIATE(arg1, arg2) arg1 ## arg2
//===========================================================================
// OO_NUMBERNAME is a macro to make a name unique, so that you can use it to declare
// variable names and they won't conflict with each other
// using __LINE__ is broken in MSVC with /ZI , but __COUNTER__ is an MSVC extension that works
#ifdef _MSC_VER
#define OO_NUMBERNAME(name) OO_STRING_JOIN(name,__COUNTER__)
#else
#define OO_NUMBERNAME(name) OO_STRING_JOIN(name,__LINE__)
#endif
//===================================================================
// simple compiler assert
// this happens at declaration time, so if it's inside a function in a C file, drop {} around it
#ifndef OO_COMPILER_ASSERT
#if defined(__clang__)
#define OO_COMPILER_ASSERT_UNUSED __attribute__((unused)) // hides warnings when compiler_asserts are in a local scope
#else
#define OO_COMPILER_ASSERT_UNUSED
#endif
#define OO_COMPILER_ASSERT(exp) typedef char OO_NUMBERNAME(_dummy_array) [ (exp) ? 1 : -1 ] OO_COMPILER_ASSERT_UNUSED
#endif
#endif
// Oodle2 base header
#ifndef OODLE2_PUBLIC_CORE_DEFINES
#define OODLE2_PUBLIC_CORE_DEFINES 1
#define OOFUNC1 OOEXPFUNC
#define OOFUNC2 OOEXPLINK
#define OOFUNCSTART
#define OODLE_CALLBACK OOLINK
// Check build flags
#if defined(OODLE_BUILDING_LIB) || defined(OODLE_BUILDING_DLL)
#error Should not see OODLE_BUILDING set for users of oodle.h
#endif
#ifndef NULL
#define NULL (0)
#endif
// OODLE_MALLOC_MINIMUM_ALIGNMENT is 8 in 32-bit, 16 in 64-bit
#define OODLE_MALLOC_MINIMUM_ALIGNMENT ((OO_SINTa)(2*sizeof(void *)))
typedef void (OODLE_CALLBACK t_OodleFPVoidVoid)(void);
/* void-void callback func pointer
takes void, returns void
*/
typedef void (OODLE_CALLBACK t_OodleFPVoidVoidStar)(void *);
/* void-void-star callback func pointer
takes void pointer, returns void
*/
#define OODLE_JOB_MAX_DEPENDENCIES (4) /* Maximum number of dependencies Oodle will ever pass to a RunJob callback
*/
#define OODLE_JOB_NULL_HANDLE (0) /* Value 0 of Jobify handles is reserved to mean none
* Wait(OODLE_JOB_NULL_HANDLE) is a nop
* if RunJob returns OODLE_JOB_NULL_HANDLE it means the job
* was run synchronously and no wait is required
*/
#define t_fp_Oodle_Job t_OodleFPVoidVoidStar /* Job function pointer for Plugin Jobify system
takes void pointer returns void
*/
#endif // OODLE2_PUBLIC_CORE_DEFINES
#ifdef _MSC_VER
#pragma warning(pop)
#pragma pack(pop, Oodle)
#endif
#endif // __OODLE2BASE_H_INCLUDED__

145
lib/oodle/src/lib.rs Normal file
View file

@ -0,0 +1,145 @@
#![allow(non_upper_case_globals)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
use std::ptr;
use color_eyre::{eyre, Result};
#[allow(dead_code)]
mod bindings {
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
}
// Hardcoded chunk size of Bitsquid's bundle compression
pub const CHUNK_SIZE: usize = 512 * 1024;
pub const COMPRESSOR: bindings::OodleLZ_Compressor =
bindings::OodleLZ_Compressor_OodleLZ_Compressor_Kraken;
pub const LEVEL: bindings::OodleLZ_CompressionLevel =
bindings::OodleLZ_CompressionLevel_OodleLZ_CompressionLevel_Optimal2;
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum OodleLZ_FuzzSafe {
Yes,
No,
}
impl From<OodleLZ_FuzzSafe> for bindings::OodleLZ_FuzzSafe {
fn from(value: OodleLZ_FuzzSafe) -> Self {
match value {
OodleLZ_FuzzSafe::Yes => bindings::OodleLZ_FuzzSafe_OodleLZ_FuzzSafe_Yes,
OodleLZ_FuzzSafe::No => bindings::OodleLZ_FuzzSafe_OodleLZ_FuzzSafe_No,
}
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum OodleLZ_CheckCRC {
Yes,
No,
}
impl From<OodleLZ_CheckCRC> for bindings::OodleLZ_CheckCRC {
fn from(value: OodleLZ_CheckCRC) -> Self {
match value {
OodleLZ_CheckCRC::Yes => bindings::OodleLZ_CheckCRC_OodleLZ_CheckCRC_Yes,
OodleLZ_CheckCRC::No => bindings::OodleLZ_CheckCRC_OodleLZ_CheckCRC_No,
}
}
}
#[tracing::instrument(skip(data))]
pub fn decompress<I>(
data: I,
fuzz_safe: OodleLZ_FuzzSafe,
check_crc: OodleLZ_CheckCRC,
) -> Result<Vec<u8>>
where
I: AsRef<[u8]>,
{
let data = data.as_ref();
let mut out = vec![0; CHUNK_SIZE];
let verbosity = if tracing::enabled!(tracing::Level::INFO) {
bindings::OodleLZ_Verbosity_OodleLZ_Verbosity_Minimal
} else if tracing::enabled!(tracing::Level::DEBUG) {
bindings::OodleLZ_Verbosity_OodleLZ_Verbosity_Some
} else if tracing::enabled!(tracing::Level::TRACE) {
bindings::OodleLZ_Verbosity_OodleLZ_Verbosity_Lots
} else {
bindings::OodleLZ_Verbosity_OodleLZ_Verbosity_None
};
let ret = unsafe {
bindings::OodleLZ_Decompress(
data.as_ptr() as *const _,
data.len() as isize,
out.as_mut_ptr() as *mut _,
out.len() as isize,
fuzz_safe.into(),
check_crc.into(),
verbosity,
ptr::null_mut(),
0,
None,
ptr::null_mut(),
ptr::null_mut(),
0,
bindings::OodleLZ_Decode_ThreadPhase_OodleLZ_Decode_Unthreaded,
)
};
if ret == 0 {
eyre::bail!("Decompression failed");
}
Ok(out)
}
#[tracing::instrument(skip(data))]
pub fn compress<I>(data: I) -> Result<Vec<u8>>
where
I: AsRef<[u8]>,
{
let mut raw = Vec::from(data.as_ref());
raw.resize(CHUNK_SIZE, 0);
// TODO: Query oodle for buffer size
let mut out = vec![0u8; CHUNK_SIZE];
let ret = unsafe {
bindings::OodleLZ_Compress(
COMPRESSOR,
raw.as_ptr() as *const _,
raw.len() as isize,
out.as_mut_ptr() as *mut _,
LEVEL,
ptr::null_mut(),
ptr::null_mut(),
ptr::null_mut(),
ptr::null_mut(),
0,
)
};
tracing::debug!(compressed_size = ret, "Compressed chunk");
if ret == 0 {
eyre::bail!("Compression failed");
}
out.resize(ret as usize, 0);
Ok(out)
}
pub fn get_decode_buffer_size(raw_size: usize, corruption_possible: bool) -> Result<usize> {
let size = unsafe {
bindings::OodleLZ_GetDecodeBufferSize(
COMPRESSOR,
raw_size as isize,
if corruption_possible { 1 } else { 0 },
)
};
Ok(size as usize)
}

View file

@ -4,6 +4,7 @@ version = "0.2.0"
edition = "2021"
[dependencies]
bitflags = "1.3.2"
byteorder = "1.4.3"
color-eyre = "0.6.2"
csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
@ -16,9 +17,10 @@ nanorand = "0.7.0"
pin-project-lite = "0.2.9"
serde = { version = "1.0.147", features = ["derive"] }
serde_sjson = { path = "../../lib/serde_sjson", version = "*" }
oodle-sys = { path = "../../lib/oodle-sys", version = "*" }
oodle = { path = "../../lib/oodle", version = "*" }
tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
tokio-stream = { version = "0.1.11", features = ["fs", "io-util"] }
tracing = { version = "0.1.37", features = ["async-await"] }
tracing-error = "0.2.0"
luajit2-sys = "0.0.2"
async-recursion = "1.0.2"

View file

@ -1,3 +1,47 @@
use std::io::{Cursor, Read, Seek, Write};
use color_eyre::Result;
use self::sync::{ReadExt, WriteExt};
pub trait FromBinary: Sized {
fn from_binary<R: Read + Seek>(r: &mut R) -> Result<Self>;
}
pub trait ToBinary {
fn to_binary(&self) -> Result<Vec<u8>>;
}
impl<T: ToBinary> ToBinary for Vec<T> {
fn to_binary(&self) -> Result<Vec<u8>> {
// TODO: Allocations for the vector could be optimized by first
// serializing one value, then calculating the size from that.
let mut bin = Cursor::new(Vec::new());
bin.write_u32(self.len() as u32)?;
for val in self.iter() {
let buf = val.to_binary()?;
bin.write_all(&buf)?;
}
Ok(bin.into_inner())
}
}
impl<T: FromBinary> FromBinary for Vec<T> {
fn from_binary<R: Read + Seek>(r: &mut R) -> Result<Self> {
let size = r.read_u32()? as usize;
let mut list = Vec::with_capacity(size);
for _ in 0..size {
list.push(T::from_binary(r)?);
}
Ok(list)
}
}
pub mod sync {
use std::io::{self, Read, Seek, SeekFrom};

View file

@ -0,0 +1,252 @@
use std::collections::HashMap;
use std::io::Cursor;
use std::io::Read;
use std::io::Seek;
use std::io::Write;
use color_eyre::eyre;
use color_eyre::Result;
use crate::binary::sync::*;
use crate::binary::FromBinary;
use crate::binary::ToBinary;
use crate::murmur::Murmur64;
use crate::Bundle;
use super::file::BundleFileType;
const DATABASE_VERSION: u32 = 0x6;
const FILE_VERSION: u32 = 0x4;
pub struct BundleFile {
name: String,
stream: String,
platform_specific: bool,
file_time: u64,
}
pub struct FileName {
extension: BundleFileType,
name: Murmur64,
}
pub struct BundleDatabase {
stored_files: HashMap<Murmur64, Vec<BundleFile>>,
resource_hashes: HashMap<Murmur64, u64>,
bundle_contents: HashMap<Murmur64, Vec<FileName>>,
}
impl BundleDatabase {
pub fn add_bundle(&mut self, bundle: &Bundle) {
let hash = bundle.name().to_murmur64();
let name = hash.to_string();
let stream = format!("{}.stream", &name);
tracing::trace!(
"Adding bundle '{} ({:?} | {:016X})' to database. Hash exists: {}",
bundle.name().display(),
bundle.name(),
hash,
self.stored_files.contains_key(&hash)
);
{
let entry = self.stored_files.entry(hash).or_default();
let existing = entry.iter().position(|f| f.name == name);
let file = BundleFile {
name,
stream,
file_time: 0,
platform_specific: false,
};
entry.push(file);
if let Some(pos) = existing {
tracing::debug!("Found bundle '{}' at {}. Replacing.", hash.to_string(), pos);
entry.swap_remove(pos);
}
}
for f in bundle.files() {
let file_name = FileName {
extension: f.file_type(),
name: f.base_name().to_murmur64(),
};
// TODO: Compute actual resource hash
self.resource_hashes.insert(hash, 0);
self.bundle_contents
.entry(hash)
.or_default()
.push(file_name);
}
}
}
impl FromBinary for BundleDatabase {
#[tracing::instrument(name = "BundleDatabase::from_binary", skip_all)]
fn from_binary<R: Read + Seek>(r: &mut R) -> Result<Self> {
{
let format = r.read_u32()?;
eyre::ensure!(
format == DATABASE_VERSION,
"invalid file format, expected {:#X}, got {:#X}",
DATABASE_VERSION,
format
);
}
let num_entries = r.read_u32()? as usize;
let mut stored_files = HashMap::with_capacity(num_entries);
for _ in 0..num_entries {
let hash = Murmur64::from(r.read_u64()?);
let num_files = r.read_u32()? as usize;
let mut files = Vec::with_capacity(num_files);
for _ in 0..num_files {
{
let version = r.read_u32()?;
eyre::ensure!(
version == FILE_VERSION,
"invalid file version, expected {:#X}, got {:#X}",
FILE_VERSION,
version
);
}
let len_name = r.read_u32()? as usize;
let mut buf = vec![0; len_name];
r.read_exact(&mut buf)?;
let name = String::from_utf8(buf)?;
let len_stream = r.read_u32()? as usize;
let mut buf = vec![0; len_stream];
r.read_exact(&mut buf)?;
let stream = String::from_utf8(buf)?;
let platform_specific = r.read_u8()? != 0;
// TODO: Unknown what this is. In VT2's SDK, it's simply ignored,
// and always written as `0`, but in DT, it seems to be used.
let mut buffer = [0; 20];
r.read_exact(&mut buffer)?;
if cfg!(debug_assertions) && buffer.iter().any(|b| *b != 0) {
tracing::warn!("Unknown value in 20-byte buffer: {:?}", buffer);
}
let file_time = r.read_u64()?;
let file = BundleFile {
name,
stream,
platform_specific,
file_time,
};
files.push(file);
}
stored_files.insert(hash, files);
}
let num_hashes = r.read_u32()? as usize;
let mut resource_hashes = HashMap::with_capacity(num_hashes);
for _ in 0..num_hashes {
let name = Murmur64::from(r.read_u64()?);
let hash = r.read_u64()?;
resource_hashes.insert(name, hash);
}
let num_contents = r.read_u32()? as usize;
let mut bundle_contents = HashMap::with_capacity(num_contents);
for _ in 0..num_contents {
let hash = Murmur64::from(r.read_u64()?);
let num_files = r.read_u32()? as usize;
let mut files = Vec::with_capacity(num_files);
for _ in 0..num_files {
let extension = BundleFileType::from(r.read_u64()?);
let name = Murmur64::from(r.read_u64()?);
files.push(FileName { extension, name });
}
bundle_contents.insert(hash, files);
}
Ok(Self {
stored_files,
resource_hashes,
bundle_contents,
})
}
}
impl ToBinary for BundleDatabase {
#[tracing::instrument(name = "BundleDatabase::to_binary", skip_all)]
fn to_binary(&self) -> Result<Vec<u8>> {
let mut binary = Vec::new();
{
let mut w = Cursor::new(&mut binary);
w.write_u32(DATABASE_VERSION)?;
w.write_u32(self.stored_files.len() as u32)?;
for (hash, files) in self.stored_files.iter() {
w.write_u64((*hash).into())?;
w.write_u32(files.len() as u32)?;
for f in files.iter() {
w.write_u32(FILE_VERSION)?;
w.write_u32(f.name.len() as u32)?;
w.write_all(f.name.as_bytes())?;
w.write_u32(f.stream.len() as u32)?;
w.write_all(f.stream.as_bytes())?;
w.write_u8(if f.platform_specific { 1 } else { 0 })?;
// TODO: Don't know what goes here
let buffer = [0; 20];
w.write_all(&buffer)?;
w.write_u64(f.file_time)?;
}
}
w.write_u32(self.resource_hashes.len() as u32)?;
for (name, hash) in self.resource_hashes.iter() {
w.write_u64((*name).into())?;
w.write_u64(*hash)?;
}
w.write_u32(self.bundle_contents.len() as u32)?;
for (hash, contents) in self.bundle_contents.iter() {
w.write_u64((*hash).into())?;
w.write_u32(contents.len() as u32)?;
for FileName { extension, name } in contents.iter() {
w.write_u64((*extension).into())?;
w.write_u64((*name).into())?;
}
}
}
Ok(binary)
}
}

View file

@ -1,6 +1,8 @@
use std::ffi::CString;
use std::io::{Cursor, Read, Seek, Write};
use std::path::Path;
use bitflags::bitflags;
use color_eyre::eyre::Context;
use color_eyre::{eyre, Result};
use futures::future::join_all;
@ -8,9 +10,7 @@ use serde::Serialize;
use crate::binary::sync::*;
use crate::filetype::*;
use crate::murmur::{HashGroup, Murmur64};
use super::EntryHeader;
use crate::murmur::{HashGroup, IdString64, Murmur64};
#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)]
pub enum BundleFileType {
@ -397,7 +397,8 @@ impl From<BundleFileType> for u64 {
}
impl From<BundleFileType> for Murmur64 {
fn from(t: BundleFileType) -> Murmur64 {
t.into()
let hash: u64 = t.into();
Murmur64::from(hash)
}
}
@ -410,6 +411,7 @@ impl std::fmt::Display for BundleFileType {
#[derive(Debug)]
struct BundleFileHeader {
variant: u32,
unknown_1: u8,
size: usize,
len_data_file_name: usize,
}
@ -418,6 +420,8 @@ pub struct BundleFileVariant {
property: u32,
data: Vec<u8>,
data_file_name: Option<String>,
// Seems to be related to whether there is a data path.
unknown_1: u8,
}
impl BundleFileVariant {
@ -430,6 +434,7 @@ impl BundleFileVariant {
property: 0,
data: Vec::new(),
data_file_name: None,
unknown_1: 0,
}
}
@ -459,47 +464,64 @@ impl BundleFileVariant {
R: Read + Seek,
{
let variant = r.read_u32()?;
r.skip_u8(0)?;
let unknown_1 = r.read_u8()?;
let size = r.read_u32()? as usize;
r.skip_u8(1)?;
let len_data_file_name = r.read_u32()? as usize;
Ok(BundleFileHeader {
size,
unknown_1,
variant,
len_data_file_name,
})
}
#[tracing::instrument(skip_all)]
fn write_header<W>(&self, w: &mut W) -> Result<()>
fn write_header<W>(&self, w: &mut W, props: Properties) -> Result<()>
where
W: Write + Seek,
{
w.write_u32(self.property)?;
w.write_u8(0)?;
w.write_u32(self.data.len() as u32)?;
w.write_u8(1)?;
w.write_u8(self.unknown_1)?;
let len_data_file_name = self.data_file_name.as_ref().map(|s| s.len()).unwrap_or(0);
if props.contains(Properties::DATA) {
w.write_u32(len_data_file_name as u32)?;
w.write_u8(1)?;
w.write_u32(0)?;
} else {
w.write_u32(self.data.len() as u32)?;
w.write_u8(1)?;
w.write_u32(len_data_file_name as u32)?;
}
Ok(())
}
}
bitflags! {
#[derive(Default)]
pub struct Properties: u32 {
const DATA = 0b100;
}
}
pub struct BundleFile {
file_type: BundleFileType,
name: String,
name: IdString64,
variants: Vec<BundleFileVariant>,
props: Properties,
}
impl BundleFile {
pub fn new(name: String, file_type: BundleFileType) -> Self {
Self {
file_type,
name,
name: name.into(),
variants: Vec::new(),
props: Properties::empty(),
}
}
@ -507,12 +529,8 @@ impl BundleFile {
self.variants.push(variant)
}
#[tracing::instrument(
name = "File::read",
skip_all,
fields(name = %meta.name_hash, ext = %meta.extension_hash, flags = meta.flags)
)]
pub fn from_reader<R>(ctx: &crate::Context, r: &mut R, meta: &EntryHeader) -> Result<Self>
#[tracing::instrument(name = "File::read", skip(ctx, r))]
pub fn from_reader<R>(ctx: &crate::Context, r: &mut R, props: Properties) -> Result<Self>
where
R: Read + Seek,
{
@ -521,22 +539,46 @@ impl BundleFile {
let name = ctx.lookup_hash(hash, HashGroup::Filename);
let header_count = r.read_u32()? as usize;
tracing::trace!(header_count);
let mut headers = Vec::with_capacity(header_count);
r.skip_u32(0)?;
for _ in 0..header_count {
let header = BundleFileVariant::read_header(r)?;
for i in 0..header_count {
let span = tracing::debug_span!("Read file header", i);
let _enter = span.enter();
let header = BundleFileVariant::read_header(r)
.wrap_err_with(|| format!("failed to read header {i}"))?;
// TODO: Figure out how `header.unknown_1` correlates to `properties::DATA`
// if props.contains(Properties::DATA) {
// tracing::debug!("props: {props:?} | unknown_1: {}", header.unknown_1)
// }
headers.push(header);
}
let mut variants = Vec::with_capacity(header_count);
for (i, header) in headers.into_iter().enumerate() {
let span = tracing::info_span!("Read file header {}", i, size = header.size);
let span = tracing::debug_span!(
"Read file data {}",
i,
size = header.size,
len_data_file_name = header.len_data_file_name
);
let _enter = span.enter();
let (data, data_file_name) = if props.contains(Properties::DATA) {
let data = vec![];
let s = r
.read_string_len(header.size)
.wrap_err("failed to read data file name")?;
(data, Some(s))
} else {
let mut data = vec![0; header.size];
r.read_exact(&mut data)
.wrap_err_with(|| format!("failed to read header {i}"))?;
.wrap_err_with(|| format!("failed to read file {i}"))?;
let data_file_name = if header.len_data_file_name > 0 {
let s = r
@ -547,10 +589,14 @@ impl BundleFile {
None
};
(data, data_file_name)
};
let variant = BundleFileVariant {
property: header.variant,
data,
data_file_name,
unknown_1: header.unknown_1,
};
variants.push(variant);
@ -560,6 +606,7 @@ impl BundleFile {
variants,
file_type,
name,
props,
})
}
@ -568,7 +615,7 @@ impl BundleFile {
let mut w = Cursor::new(Vec::new());
w.write_u64(self.file_type.hash().into())?;
w.write_u64(Murmur64::hash(self.name.as_bytes()).into())?;
w.write_u64(self.name.to_murmur64().into())?;
w.write_u32(self.variants.len() as u32)?;
// TODO: Figure out what this is
@ -576,16 +623,26 @@ impl BundleFile {
for variant in self.variants.iter() {
w.write_u32(variant.property())?;
w.write_u8(0)?;
w.write_u32(variant.size() as u32)?;
w.write_u8(1)?;
w.write_u8(variant.unknown_1)?;
let len_data_file_name = variant.data_file_name().map(|s| s.len()).unwrap_or(0);
if self.props.contains(Properties::DATA) {
w.write_u32(len_data_file_name as u32)?;
w.write_u8(1)?;
w.write_u32(0)?;
} else {
w.write_u32(variant.size() as u32)?;
w.write_u8(1)?;
w.write_u32(len_data_file_name as u32)?;
}
}
for variant in self.variants.iter() {
w.write_all(&variant.data)?;
if let Some(s) = &variant.data_file_name {
w.write_all(s.as_bytes())?;
}
}
Ok(w.into_inner())
@ -603,7 +660,11 @@ impl BundleFile {
S: AsRef<str>,
{
match file_type {
BundleFileType::Lua => lua::compile(name, sjson).await,
BundleFileType::Lua => {
let sjson =
CString::new(sjson.as_ref()).wrap_err("failed to build CString from SJSON")?;
lua::compile(name, sjson)
}
BundleFileType::Unknown(_) => {
eyre::bail!("Unknown file type. Cannot compile from SJSON");
}
@ -616,12 +677,16 @@ impl BundleFile {
}
}
pub fn base_name(&self) -> &String {
pub fn props(&self) -> Properties {
self.props
}
pub fn base_name(&self) -> &IdString64 {
&self.name
}
pub fn name(&self, decompiled: bool, variant: Option<u32>) -> String {
let mut s = self.name.clone();
let mut s = self.name.display().to_string();
s.push('.');
if let Some(variant) = variant {
@ -640,10 +705,18 @@ impl BundleFile {
pub fn matches_name<S>(&self, name: S) -> bool
where
S: AsRef<str>,
S: Into<IdString64>,
{
let name = name.as_ref();
self.name == name || self.name(false, None) == name || self.name(true, None) == name
let name = name.into();
if self.name == name {
return true;
}
if let IdString64::String(name) = name {
self.name(false, None) == name || self.name(true, None) == name
} else {
false
}
}
pub fn file_type(&self) -> BundleFileType {
@ -727,6 +800,12 @@ impl BundleFile {
}
}
impl PartialEq for BundleFile {
fn eq(&self, other: &Self) -> bool {
self.name == other.name && self.file_type == other.file_type
}
}
pub struct UserFile {
// TODO: Might be able to avoid some allocations with a Cow here
data: Vec<u8>,

View file

@ -1,16 +1,19 @@
use std::io::{BufReader, Cursor, Read, Seek, SeekFrom, Write};
use std::mem::size_of;
use std::path::Path;
use color_eyre::eyre::{self, Context, Result};
use color_eyre::{Help, Report, SectionExt};
use oodle_sys::{OodleLZ_CheckCRC, OodleLZ_FuzzSafe, CHUNK_SIZE};
use oodle::{OodleLZ_CheckCRC, OodleLZ_FuzzSafe, CHUNK_SIZE};
use crate::binary::sync::*;
use crate::murmur::{HashGroup, Murmur64};
use crate::bundle::file::Properties;
use crate::murmur::{HashGroup, IdString64, Murmur64};
pub(crate) mod database;
pub(crate) mod file;
pub use file::{BundleFile, BundleFileType};
pub use file::{BundleFile, BundleFileType, BundleFileVariant};
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd)]
enum BundleFormat {
@ -39,72 +42,24 @@ impl From<BundleFormat> for u32 {
}
}
pub struct EntryHeader {
name_hash: Murmur64,
extension_hash: Murmur64,
flags: u32,
}
impl EntryHeader {
#[tracing::instrument(name = "EntryHeader::from_reader", skip_all)]
fn from_reader<R>(r: &mut R) -> Result<Self>
where
R: Read + Seek,
{
let extension_hash = Murmur64::from(r.read_u64()?);
let name_hash = Murmur64::from(r.read_u64()?);
let flags = r.read_u32()?;
// NOTE: Known values so far:
// - 0x0: seems to be the default
// - 0x4: seems to be used for files that point to something in `data/`
// seems to correspond to a change in value in the header's 'unknown_3'
if flags != 0x0 {
tracing::debug!(
flags,
"Unexpected meta flags for file {name_hash:016X}.{extension_hash:016X}",
);
}
Ok(Self {
name_hash,
extension_hash,
flags,
})
}
#[tracing::instrument(name = "EntryHeader::to_writer", skip_all)]
fn to_writer<W>(&self, w: &mut W) -> Result<()>
where
W: Write + Seek,
{
w.write_u64(self.extension_hash.into())?;
w.write_u64(self.name_hash.into())?;
w.write_u32(self.flags)?;
Ok(())
}
}
pub struct Bundle {
format: BundleFormat,
properties: [Murmur64; 32],
headers: Vec<EntryHeader>,
files: Vec<BundleFile>,
name: String,
name: IdString64,
}
impl Bundle {
pub fn new(name: String) -> Self {
pub fn new<S: Into<IdString64>>(name: S) -> Self {
Self {
name,
name: name.into(),
format: BundleFormat::F8,
properties: [0.into(); 32],
headers: Vec::new(),
files: Vec::new(),
}
}
pub fn get_name_from_path<P>(ctx: &crate::Context, path: P) -> String
pub fn get_name_from_path<P>(ctx: &crate::Context, path: P) -> IdString64
where
P: AsRef<Path>,
{
@ -113,28 +68,31 @@ impl Bundle {
.and_then(|name| name.to_str())
.and_then(|name| Murmur64::try_from(name).ok())
.map(|hash| ctx.lookup_hash(hash, HashGroup::Filename))
.unwrap_or_else(|| path.display().to_string())
.unwrap_or_else(|| path.display().to_string().into())
}
pub fn add_file(&mut self, file: BundleFile) {
tracing::trace!("Adding file {}", file.name(false, None));
let header = EntryHeader {
extension_hash: file.file_type().into(),
name_hash: Murmur64::hash(file.base_name().as_bytes()),
// TODO: Hard coded until we know what this is
flags: 0x0,
};
let existing_index = self
.files
.iter()
.enumerate()
.find(|(_, f)| **f == file)
.map(|val| val.0);
self.files.push(file);
self.headers.push(header);
if let Some(i) = existing_index {
self.files.swap_remove(i);
}
}
#[tracing::instrument(skip(ctx, binary), fields(len_binary = binary.as_ref().len()))]
pub fn from_binary<B>(ctx: &crate::Context, name: String, binary: B) -> Result<Self>
pub fn from_binary<B, S>(ctx: &crate::Context, name: S, binary: B) -> Result<Self>
where
B: AsRef<[u8]>,
S: Into<IdString64> + std::fmt::Debug,
{
let bundle_name = name;
let mut r = BufReader::new(Cursor::new(binary));
let format = r.read_u32().and_then(BundleFormat::try_from)?;
@ -153,9 +111,13 @@ impl Bundle {
*prop = Murmur64::from(r.read_u64()?);
}
let mut headers = Vec::with_capacity(num_entries);
let mut file_props = Vec::with_capacity(num_entries);
for _ in 0..num_entries {
headers.push(EntryHeader::from_reader(&mut r)?);
// Skip two u64 that contain the extension hash and file name hash.
// We don't need them here, since we're reading the whole bundle into memory
// anyways.
r.seek(SeekFrom::Current((2 * size_of::<u64>()) as i64))?;
file_props.push(Properties::from_bits_truncate(r.read_u32()?));
}
let num_chunks = r.read_u32()? as usize;
@ -197,7 +159,7 @@ impl Bundle {
decompressed.append(&mut compressed_buffer);
} else {
// TODO: Optimize to not reallocate?
let mut raw_buffer = oodle_sys::decompress(
let mut raw_buffer = oodle::decompress(
&compressed_buffer,
OodleLZ_FuzzSafe::No,
OodleLZ_CheckCRC::No,
@ -210,8 +172,6 @@ impl Bundle {
unpacked_size_tracked -= CHUNK_SIZE;
}
tracing::trace!(raw_size = raw_buffer.len());
decompressed.append(&mut raw_buffer);
}
}
@ -226,17 +186,19 @@ impl Bundle {
let mut r = Cursor::new(decompressed);
let mut files = Vec::with_capacity(num_entries);
for i in 0..num_entries {
let meta = headers.get(i).unwrap();
let file = BundleFile::from_reader(ctx, &mut r, meta)
tracing::trace!(num_files = num_entries);
for (i, props) in file_props.iter().enumerate() {
let span = tracing::debug_span!("Read file {}", i);
let _enter = span.enter();
let file = BundleFile::from_reader(ctx, &mut r, *props)
.wrap_err_with(|| format!("failed to read file {i}"))?;
files.push(file);
}
Ok(Self {
name: bundle_name,
name: name.into(),
format,
headers,
files,
properties,
})
@ -254,8 +216,10 @@ impl Bundle {
w.write_u64((*prop).into())?;
}
for meta in self.headers.iter() {
meta.to_writer(&mut w)?;
for file in self.files.iter() {
w.write_u64(file.file_type().into())?;
w.write_u64(file.base_name().to_murmur64().into())?;
w.write_u32(file.props().bits())?;
}
let unpacked_data = {
@ -293,7 +257,7 @@ impl Bundle {
let mut chunk_sizes = Vec::with_capacity(num_chunks);
for chunk in chunks {
let compressed = oodle_sys::compress(chunk)?;
let compressed = oodle::compress(chunk)?;
tracing::trace!(
raw_chunk_size = chunk.len(),
compressed_chunk_size = compressed.len()
@ -313,7 +277,7 @@ impl Bundle {
Ok(w.into_inner())
}
pub fn name(&self) -> &String {
pub fn name(&self) -> &IdString64 {
&self.name
}
@ -395,7 +359,7 @@ where
r.read_exact(&mut compressed_buffer)?;
// TODO: Optimize to not reallocate?
let mut raw_buffer = oodle_sys::decompress(
let mut raw_buffer = oodle::decompress(
&compressed_buffer,
OodleLZ_FuzzSafe::No,
OodleLZ_CheckCRC::No,

View file

@ -1,6 +1,6 @@
use std::path::PathBuf;
use crate::murmur::{Dictionary, HashGroup, Murmur32, Murmur64};
use crate::murmur::{Dictionary, HashGroup, IdString64, Murmur32, Murmur64};
pub struct Context {
pub lookup: Dictionary,
@ -21,17 +21,17 @@ impl Context {
}
}
pub fn lookup_hash<M>(&self, hash: M, group: HashGroup) -> String
pub fn lookup_hash<M>(&self, hash: M, group: HashGroup) -> IdString64
where
M: Into<Murmur64>,
{
let hash = hash.into();
if let Some(s) = self.lookup.lookup(hash, group) {
tracing::debug!(%hash, string = s, "Murmur64 lookup successful");
s.to_owned()
s.to_string().into()
} else {
tracing::debug!(%hash, "Murmur64 lookup failed");
format!("{hash:016X}")
hash.into()
}
}

View file

@ -1,13 +1,16 @@
use std::io::{Cursor, Write};
use std::ffi::CStr;
use std::ffi::CString;
use std::io::Cursor;
use std::io::Write;
use color_eyre::{eyre::Context, Result};
use tokio::{fs, process::Command};
use color_eyre::eyre;
use color_eyre::eyre::Context;
use color_eyre::Result;
use luajit2_sys as lua;
use crate::{
binary::sync::WriteExt,
bundle::file::{BundleFileVariant, UserFile},
BundleFile, BundleFileType,
};
use crate::binary::sync::WriteExt;
use crate::bundle::file::{BundleFileVariant, UserFile};
use crate::{BundleFile, BundleFileType};
#[tracing::instrument(skip_all, fields(buf_len = data.as_ref().len()))]
pub(crate) async fn decompile<T>(_ctx: &crate::Context, data: T) -> Result<Vec<UserFile>>
@ -19,67 +22,85 @@ where
}
#[tracing::instrument(skip_all)]
pub(crate) async fn compile<S>(name: String, code: S) -> Result<BundleFile>
pub fn compile<S, C>(name: S, code: C) -> Result<BundleFile>
where
S: AsRef<str>,
S: Into<String>,
C: AsRef<CStr>,
{
let in_file_path = {
let mut path = std::env::temp_dir();
let name: String = std::iter::repeat_with(fastrand::alphanumeric)
.take(10)
.collect();
path.push(name + "-dtmt.lua");
let name = name.into();
let code = code.as_ref();
path
};
let bytecode = unsafe {
let state = lua::luaL_newstate();
lua::luaL_openlibs(state);
let out_file_path = {
let mut path = std::env::temp_dir();
lua::lua_pushstring(state, code.as_ptr() as _);
lua::lua_setglobal(state, b"code\0".as_ptr() as _);
let name: String = std::iter::repeat_with(fastrand::alphanumeric)
.take(10)
.collect();
path.push(name + "-dtmt.luab");
let name = CString::new(name.as_bytes())
.wrap_err_with(|| format!("cannot convert name into CString: {}", name))?;
lua::lua_pushstring(state, name.as_ptr() as _);
lua::lua_setglobal(state, b"name\0".as_ptr() as _);
path
};
let run = b"return string.dump(loadstring(code, \"@\" .. name), false)\0";
match lua::luaL_loadstring(state, run.as_ptr() as _) as u32 {
lua::LUA_OK => {}
lua::LUA_ERRSYNTAX => {
let err = lua::lua_tostring(state, -1);
let err = CStr::from_ptr(err).to_string_lossy().to_string();
fs::write(&in_file_path, code.as_ref().as_bytes())
.await
.wrap_err_with(|| format!("failed to write file {}", in_file_path.display()))?;
lua::lua_close(state);
// TODO: Make executable name configurable
Command::new("luajit")
.arg("-bg")
.arg("-F")
.arg(name.clone() + ".lua")
.arg("-o")
.arg("Windows")
.arg(&in_file_path)
.arg(&out_file_path)
.status()
.await
.wrap_err("failed to compile to LuaJIT byte code")?;
eyre::bail!("Invalid syntax: {}", err);
}
lua::LUA_ERRMEM => {
lua::lua_close(state);
eyre::bail!("Failed to allocate sufficient memory to compile LuaJIT bytecode")
}
_ => unreachable!(),
}
let mut data = Cursor::new(Vec::new());
match lua::lua_pcall(state, 0, 1, 0) as u32 {
lua::LUA_OK => {
// The binary data is pretty much guaranteed to contain NUL bytes,
// so we can't rely on `lua_tostring` and `CStr` here. Instead we have to
// explicitely query the string length and build our vector from that.
// However, on the bright side, we don't have to go through any string types anymore,
// and can instead treat it as raw bytes immediately.
let mut len = 0;
let data = lua::lua_tolstring(state, -1, &mut len) as *const u8;
let data = std::slice::from_raw_parts(data, len).to_vec();
let bytecode = {
let mut data = fs::read(&out_file_path)
.await
.wrap_err_with(|| format!("failed to read file {}", out_file_path.display()))?;
// Add Fatshark's custom magic bytes
data[1] = 0x46;
data[2] = 0x53;
data[3] = 0x82;
lua::lua_close(state);
data
}
lua::LUA_ERRRUN => {
let err = lua::lua_tostring(state, -1);
let err = CStr::from_ptr(err).to_string_lossy().to_string();
lua::lua_close(state);
eyre::bail!("Failed to compile LuaJIT bytecode: {}", err);
}
lua::LUA_ERRMEM => {
lua::lua_close(state);
eyre::bail!("Failed to allocate sufficient memory to compile LuaJIT bytecode")
}
// We don't use an error handler function, so this should be unreachable
lua::LUA_ERRERR => unreachable!(),
_ => unreachable!(),
}
};
let mut data = Cursor::new(Vec::with_capacity(bytecode.len() + 12));
data.write_u32(bytecode.len() as u32)?;
// I believe this is supposed to be a uleb128, but it seems to be always 0x2 in binary.
data.write_u64(0x2)?;
data.write_all(&bytecode)?;
// TODO: Figure out what these two values are
data.write_u32(0x2)?;
data.write_u32(0x0)?;
// Use Fatshark's custom magic bytes
data.write_all(&[0x1b, 0x46, 0x53, 0x82])?;
data.write_all(&bytecode[4..])?;
let mut file = BundleFile::new(name, BundleFileType::Lua);
let mut variant = BundleFileVariant::new();

View file

@ -97,6 +97,7 @@ pub struct Package {
_name: String,
_root: PathBuf,
inner: PackageType,
flags: u8,
}
impl Deref for Package {
@ -114,6 +115,15 @@ impl DerefMut for Package {
}
impl Package {
pub fn new(name: String, root: PathBuf) -> Self {
Self {
_name: name,
_root: root,
inner: Default::default(),
flags: 1,
}
}
fn len(&self) -> usize {
self.values().fold(0, |total, files| total + files.len())
}
@ -171,6 +181,7 @@ impl Package {
inner,
_name: name,
_root: root.to_path_buf(),
flags: 1,
};
Ok(pkg)
@ -211,13 +222,25 @@ impl Package {
let t = BundleFileType::from(r.read_u64()?);
let hash = Murmur64::from(r.read_u64()?);
let path = ctx.lookup_hash(hash, HashGroup::Filename);
inner.entry(t).or_default().insert(PathBuf::from(path));
inner
.entry(t)
.or_default()
.insert(PathBuf::from(path.display().to_string()));
}
let flags = r.read_u8()?;
if cfg!(debug_assertions) && flags != 1 {
tracing::warn!("Unexpected value for package flags: {:0x}", flags);
} else if (flags & 0xFE) >= 2 {
tracing::warn!("Resource Package has common packages. Ignoring.");
}
let pkg = Self {
inner,
_name: name,
_root: PathBuf::new(),
flags,
};
Ok(pkg)
@ -240,6 +263,8 @@ impl Package {
}
}
w.write_u8(self.flags)?;
Ok(w.into_inner())
}
}

View file

@ -4,6 +4,8 @@ mod context;
pub mod filetype;
pub mod murmur;
pub use binary::{FromBinary, ToBinary};
pub use bundle::database::BundleDatabase;
pub use bundle::decompress;
pub use bundle::{Bundle, BundleFile, BundleFileType};
pub use bundle::{Bundle, BundleFile, BundleFileType, BundleFileVariant};
pub use context::Context;

View file

@ -55,6 +55,24 @@ pub struct Entry {
group: HashGroup,
}
impl Entry {
pub fn value(&self) -> &String {
&self.value
}
pub fn long(&self) -> Murmur64 {
self.long
}
pub fn short(&self) -> Murmur32 {
self.short
}
pub fn group(&self) -> HashGroup {
self.group
}
}
pub struct Dictionary {
entries: Vec<Entry>,
}
@ -172,4 +190,8 @@ impl Dictionary {
pub fn is_empty(&self) -> bool {
self.entries.is_empty()
}
pub fn entries(&self) -> &Vec<Entry> {
&self.entries
}
}

View file

@ -13,8 +13,7 @@ mod murmurhash64;
pub const SEED: u32 = 0;
pub use dictionary::Dictionary;
pub use dictionary::HashGroup;
pub use dictionary::{Dictionary, Entry, HashGroup};
pub use murmurhash64::hash;
pub use murmurhash64::hash32;
pub use murmurhash64::hash_inverse as inverse;
@ -67,6 +66,12 @@ impl fmt::UpperHex for Murmur64 {
}
}
impl fmt::LowerHex for Murmur64 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::LowerHex::fmt(&self.0, f)
}
}
impl fmt::Display for Murmur64 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::UpperHex::fmt(&self.0, f)
@ -237,3 +242,148 @@ impl<'de> Deserialize<'de> for Murmur32 {
deserializer.deserialize_any(Self(0))
}
}
// This type encodes the fact that when reading in a bundle, we don't always have a dictionary
// entry for every hash in there. So we do want to have the real string available when needed,
// but at the same time retain the original hash information for when we don't.
// This is especially important when wanting to write back the read bundle, as the hashes need to
// stay the same.
// The previous system of always turning hashes into strings worked well for the purpose of
// displaying hashes, but would have made it very hard to turn a stringyfied hash back into
// an actual hash.
#[derive(Clone, Debug, Eq)]
pub enum IdString64 {
Hash(Murmur64),
String(String),
}
impl IdString64 {
pub fn to_murmur64(&self) -> Murmur64 {
match self {
Self::Hash(hash) => *hash,
Self::String(s) => Murmur64::hash(s.as_bytes()),
}
}
pub fn display(&self) -> IdString64Display {
let s = match self {
IdString64::Hash(hash) => hash.to_string(),
IdString64::String(s) => s.clone(),
};
IdString64Display(s)
}
pub fn is_string(&self) -> bool {
match self {
IdString64::Hash(_) => false,
IdString64::String(_) => true,
}
}
pub fn is_hash(&self) -> bool {
match self {
IdString64::Hash(_) => true,
IdString64::String(_) => false,
}
}
}
impl<S: Into<String>> From<S> for IdString64 {
fn from(value: S) -> Self {
Self::String(value.into())
}
}
impl From<Murmur64> for IdString64 {
fn from(value: Murmur64) -> Self {
Self::Hash(value)
}
}
impl From<IdString64> for Murmur64 {
fn from(value: IdString64) -> Self {
value.to_murmur64()
}
}
impl PartialEq for IdString64 {
fn eq(&self, other: &Self) -> bool {
self.to_murmur64() == other.to_murmur64()
}
}
impl std::hash::Hash for IdString64 {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
state.write_u64(self.to_murmur64().into());
}
}
impl serde::Serialize for IdString64 {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_u64(self.to_murmur64().into())
}
}
struct IdString64Visitor;
impl<'de> serde::de::Visitor<'de> for IdString64Visitor {
type Value = IdString64;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("an u64 or a string")
}
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(IdString64::Hash(value.into()))
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(IdString64::String(v.to_string()))
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(IdString64::String(v))
}
}
impl<'de> serde::Deserialize<'de> for IdString64 {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_u64(IdString64Visitor)
}
}
pub struct IdString64Display(String);
impl std::fmt::Display for IdString64Display {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl std::fmt::UpperHex for IdString64 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
std::fmt::UpperHex::fmt(&self.to_murmur64(), f)
}
}
impl std::fmt::LowerHex for IdString64 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
std::fmt::LowerHex::fmt(&self.to_murmur64(), f)
}
}

@ -1 +1 @@
Subproject commit a6ef5a914e15f22d3ebcc475969b65182475139f
Subproject commit e94218d8f52a51529c83af33a99cc17f66caae2e