diff --git a/Cargo.lock b/Cargo.lock index b409111..52c1f05 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -38,6 +38,24 @@ dependencies = [ "memchr", ] +[[package]] +name = "anyhow" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "224afbd727c3d6e4b90103ece64b8d1b67fbb1973b1046c2281eed3f3803f800" + +[[package]] +name = "arrayvec" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" + +[[package]] +name = "associative-cache" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46016233fc1bb55c23b856fe556b7db6ccd05119a0a392e04f0b3b7c79058f16" + [[package]] name = "async-recursion" version = "1.0.2" @@ -49,6 +67,30 @@ dependencies = [ "syn", ] +[[package]] +name = "atk" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39991bc421ddf72f70159011b323ff49b0f783cc676a7287c59453da2e2531cf" +dependencies = [ + "atk-sys", + "bitflags", + "glib", + "libc", +] + +[[package]] +name = "atk-sys" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11ad703eb64dc058024f0e57ccfa069e15a413b98dbd50a1a950e743b7f11148" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + [[package]] name = "autocfg" version = "1.1.0" @@ -72,9 +114,31 @@ dependencies = [ [[package]] name = "base64ct" -version = "1.5.3" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b645a089122eccb6111b4f81cbc1a49f5900ac4666bb93ac027feaecf15607bf" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" + +[[package]] +name = "bindgen" +version = "0.64.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4243e6031260db77ede97ad86c27e501d646a27ab57b59a574f725d98ab1fb4" +dependencies = [ + "bitflags", + "cexpr", + "clang-sys", + "lazy_static", + "lazycell", + "log", + "peeking_take_while", + "proc-macro2", + "quote", + "regex", + "rustc-hash", + "shlex", + "syn", + "which", +] [[package]] name = "bitflags" @@ -82,6 +146,21 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +[[package]] +name = "bitmaps" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" +dependencies = [ + "typenum", +] + +[[package]] +name = "block" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d8c1fef690941d3e7788d328517591fecc684c084084702d6ff1641e993699a" + [[package]] name = "block-buffer" version = "0.10.3" @@ -93,9 +172,9 @@ dependencies = [ [[package]] name = "bstr" -version = "1.1.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45ea9b00a7b3f2988e9a65ad3917e62123c38dba709b666506207be96d1790b" +checksum = "5ffdb39cb703212f3c11973452c2861b972f757b021158f3516ba10f2fa8b2c1" dependencies = [ "memchr", "once_cell", @@ -103,6 +182,12 @@ dependencies = [ "serde", ] +[[package]] +name = "bumpalo" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" + [[package]] name = "bytecount" version = "0.6.3" @@ -117,9 +202,9 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfb24e866b15a1af2a1b663f10c6b6b8f397a84aadb828f12e5b289ec23a3a3c" +checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" [[package]] name = "bzip2" @@ -143,14 +228,57 @@ dependencies = [ ] [[package]] -name = "cc" -version = "1.0.78" +name = "cairo-rs" +version = "0.16.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a20104e2335ce8a659d6dd92a51a767a0c062599c73b343fd152cb401e828c3d" +checksum = "f3125b15ec28b84c238f6f476c6034016a5f6cc0221cb514ca46c532139fc97d" +dependencies = [ + "bitflags", + "cairo-sys-rs", + "glib", + "libc", + "once_cell", + "thiserror", +] + +[[package]] +name = "cairo-sys-rs" +version = "0.16.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c48f4af05fabdcfa9658178e1326efa061853f040ce7d72e33af6885196f421" +dependencies = [ + "glib-sys", + "libc", + "system-deps", +] + +[[package]] +name = "cc" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" dependencies = [ "jobserver", ] +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + +[[package]] +name = "cfg-expr" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0357a6402b295ca3a86bc148e84df46c02e41f41fef186bda662557ef6328aa" +dependencies = [ + "smallvec", +] + [[package]] name = "cfg-if" version = "1.0.0" @@ -167,10 +295,21 @@ dependencies = [ ] [[package]] -name = "clap" -version = "4.0.32" +name = "clang-sys" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7db700bc935f9e43e88d00b0850dae18a63773cfbec6d8e070fccf7fef89a39" +checksum = "77ed9a53e5d4d9c573ae844bfac6872b159cb1d1585a83b29e7a64b7eef7332a" +dependencies = [ + "glob", + "libc", + "libloading", +] + +[[package]] +name = "clap" +version = "4.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f3061d6db6d8fcbbd4b05e057f2acace52e64e96b498c08c2d7a4e65addd340" dependencies = [ "bitflags", "clap_derive", @@ -185,9 +324,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.0.21" +version = "4.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0177313f9f02afc995627906bbd8967e2be069f5261954222dac78290c2b9014" +checksum = "34d122164198950ba84a918270a3bb3f7ededd25e15f7451673d986f55bd2667" dependencies = [ "heck", "proc-macro-error", @@ -198,24 +337,77 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.3.0" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d4198f73e42b4936b35b5bb248d81d2b595ecb170da0bac7655c54eedfa8da8" +checksum = "350b9cf31731f9957399229e9b2adc51eeabdfbe9d71d9a0552275fd12710d09" dependencies = [ "os_str_bytes", ] [[package]] -name = "clipboard-win" -version = "4.4.2" +name = "cli-table" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4ab1b92798304eedc095b53942963240037c0516452cb11aeba709d420b2219" +checksum = "adfbb116d9e2c4be7011360d0c0bee565712c11e969c9609b25b619366dc379d" +dependencies = [ + "cli-table-derive", + "termcolor", + "unicode-width", +] + +[[package]] +name = "cli-table-derive" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2af3bfb9da627b0a6c467624fb7963921433774ed435493b5c08a3053e829ad4" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "clipboard-win" +version = "4.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7191c27c2357d9b7ef96baac1773290d4ca63b24205b82a3fd8a0637afcf0362" dependencies = [ "error-code", "str-buf", "winapi", ] +[[package]] +name = "cocoa" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f425db7937052c684daec3bd6375c8abe2d146dca4b8b143d6db777c39138f3a" +dependencies = [ + "bitflags", + "block", + "cocoa-foundation", + "core-foundation", + "core-graphics", + "foreign-types", + "libc", + "objc", +] + +[[package]] +name = "cocoa-foundation" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ade49b65d560ca58c403a479bb396592b155c0185eada742ee323d1d68d6318" +dependencies = [ + "bitflags", + "block", + "core-foundation", + "core-graphics-types", + "foreign-types", + "libc", + "objc", +] + [[package]] name = "color-eyre" version = "0.6.2" @@ -255,12 +447,75 @@ dependencies = [ "toml", ] +[[package]] +name = "console_error_panic_hook" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" +dependencies = [ + "cfg-if", + "wasm-bindgen", +] + [[package]] name = "constant_time_eq" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" +[[package]] +name = "core-foundation" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" + +[[package]] +name = "core-graphics" +version = "0.22.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2581bbab3b8ffc6fcbd550bf46c355135d16e9ff2a6ea032ad6b9bf1d7efe4fb" +dependencies = [ + "bitflags", + "core-foundation", + "core-graphics-types", + "foreign-types", + "libc", +] + +[[package]] +name = "core-graphics-types" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a68b68b3446082644c91ac778bf50cd4104bfb002b5a6a7c44cca5a2c70788b" +dependencies = [ + "bitflags", + "core-foundation", + "foreign-types", + "libc", +] + +[[package]] +name = "core-text" +version = "19.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d74ada66e07c1cefa18f8abfba765b486f250de2e4a999e5727fc0dd4b4a25" +dependencies = [ + "core-foundation", + "core-graphics", + "foreign-types", + "libc", +] + [[package]] name = "cpufeatures" version = "0.2.5" @@ -376,20 +631,121 @@ dependencies = [ "winapi", ] +[[package]] +name = "displaydoc" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3bf95dc3f046b9da4f2d51833c0d3547d8564ef6910f5c1ed130306a75b92886" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "druid" +version = "0.8.2" +source = "git+https://github.com/linebender/druid.git#5fa4ce51ed3d74640388de6385f135c50d346c8d" +dependencies = [ + "console_error_panic_hook", + "druid-derive", + "druid-shell", + "fluent-bundle", + "fluent-langneg", + "fluent-syntax", + "fnv", + "im", + "instant", + "tracing", + "tracing-subscriber", + "tracing-wasm", + "unic-langid", + "unicode-segmentation", + "xi-unicode", +] + +[[package]] +name = "druid-derive" +version = "0.5.0" +source = "git+https://github.com/linebender/druid.git#5fa4ce51ed3d74640388de6385f135c50d346c8d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "druid-shell" +version = "0.8.0" +source = "git+https://github.com/linebender/druid.git#5fa4ce51ed3d74640388de6385f135c50d346c8d" +dependencies = [ + "anyhow", + "bitflags", + "block", + "cairo-rs", + "cfg-if", + "cocoa", + "core-graphics", + "foreign-types", + "gdk-sys", + "glib-sys", + "gtk", + "gtk-sys", + "instant", + "js-sys", + "keyboard-types", + "objc", + "once_cell", + "piet-common", + "scopeguard", + "time", + "tracing", + "wasm-bindgen", + "web-sys", + "winapi", + "wio", +] + +[[package]] +name = "dtmm" +version = "0.1.0" +dependencies = [ + "bitflags", + "clap", + "color-eyre", + "confy", + "druid", + "dtmt-shared", + "futures", + "oodle", + "sdk", + "serde", + "serde_sjson", + "tokio", + "tokio-stream", + "tracing", + "tracing-error", + "tracing-subscriber", + "zip", +] + [[package]] name = "dtmt" version = "0.2.0" dependencies = [ "clap", + "cli-table", "color-eyre", "confy", "csv-async", + "dtmt-shared", "futures", "futures-util", "glob", "libloading", "nanorand", - "oodle-sys", + "oodle", + "path-clean", "pin-project-lite", "promptly", "sdk", @@ -405,6 +761,35 @@ dependencies = [ "zip", ] +[[package]] +name = "dtmt-shared" +version = "0.1.0" +dependencies = [ + "serde", + "time", + "tracing", + "tracing-error", + "tracing-subscriber", +] + +[[package]] +name = "dwrote" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439a1c2ba5611ad3ed731280541d36d2e9c4ac5e7fb818a27b604bdc5a6aa65b" +dependencies = [ + "lazy_static", + "libc", + "winapi", + "wio", +] + +[[package]] +name = "either" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" + [[package]] name = "endian-type" version = "0.1.2" @@ -454,22 +839,32 @@ dependencies = [ [[package]] name = "fastrand" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" dependencies = [ "instant", ] [[package]] name = "fd-lock" -version = "3.0.8" +version = "3.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb21c69b9fea5e15dbc1049e4b77145dd0ba1c84019c488102de0dc4ea4b0a27" +checksum = "8ef1a30ae415c3a691a4f41afddc2dbcd6d70baf338368d85ebc1e8ed92cedb9" dependencies = [ "cfg-if", "rustix", - "windows-sys", + "windows-sys 0.45.0", +] + +[[package]] +name = "field-offset" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e1c54951450cbd39f3dbcf1005ac413b49487dabf18a720ad2383eccfeffb92" +dependencies = [ + "memoffset", + "rustc_version", ] [[package]] @@ -483,10 +878,71 @@ dependencies = [ ] [[package]] -name = "futures" -version = "0.3.25" +name = "fluent-bundle" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38390104763dc37a5145a53c29c63c1290b5d316d6086ec32c293f6736051bb0" +checksum = "e242c601dec9711505f6d5bbff5bedd4b61b2469f2e8bb8e57ee7c9747a87ffd" +dependencies = [ + "fluent-langneg", + "fluent-syntax", + "intl-memoizer", + "intl_pluralrules", + "rustc-hash", + "self_cell", + "smallvec", + "unic-langid", +] + +[[package]] +name = "fluent-langneg" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c4ad0989667548f06ccd0e306ed56b61bd4d35458d54df5ec7587c0e8ed5e94" +dependencies = [ + "unic-langid", +] + +[[package]] +name = "fluent-syntax" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0abed97648395c902868fee9026de96483933faa54ea3b40d652f7dfe61ca78" +dependencies = [ + "thiserror", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + +[[package]] +name = "futures" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13e2792b0ff0340399d58445b88fd9770e3489eff258a4cbc1523418f12abf84" dependencies = [ "futures-channel", "futures-core", @@ -499,9 +955,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.25" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52ba265a92256105f45b719605a571ffe2d1f0fea3807304b522c1d778f79eed" +checksum = "2e5317663a9089767a1ec00a487df42e0ca174b61b4483213ac24448e4664df5" dependencies = [ "futures-core", "futures-sink", @@ -509,15 +965,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.25" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04909a7a7e4633ae6c4a9ab280aeb86da1236243a77b694a49eacd659a4bd3ac" +checksum = "ec90ff4d0fe1f57d600049061dc6bb68ed03c7d2fbd697274c41805dcb3f8608" [[package]] name = "futures-executor" -version = "0.3.25" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7acc85df6714c176ab5edf386123fafe217be88c0840ec11f199441134a074e2" +checksum = "e8de0a35a6ab97ec8869e32a2473f4b1324459e14c29275d14b10cb1fd19b50e" dependencies = [ "futures-core", "futures-task", @@ -526,15 +982,15 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.25" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00f5fb52a06bdcadeb54e8d3671f8888a39697dcb0b81b23b55174030427f4eb" +checksum = "bfb8371b6fb2aeb2d280374607aeabfc99d95c72edfe51692e42d3d7f0d08531" [[package]] name = "futures-macro" -version = "0.3.25" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdfb8ce053d86b91919aad980c220b1fb8401a9394410e1c289ed7e66b61835d" +checksum = "95a73af87da33b5acf53acfebdc339fe592ecf5357ac7c0a7734ab9d8c876a70" dependencies = [ "proc-macro2", "quote", @@ -543,21 +999,21 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.25" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39c15cf1a4aa79df40f1bb462fb39676d0ad9e366c2a33b590d7c66f4f81fcf9" +checksum = "f310820bb3e8cfd46c80db4d7fb8353e15dfff853a127158425f31e0be6c8364" [[package]] name = "futures-task" -version = "0.3.25" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ffb393ac5d9a6eaa9d3fdf37ae2776656b706e200c8e16b1bdb227f5198e6ea" +checksum = "dcf79a1bf610b10f42aea489289c5a2c478a786509693b80cd39c44ccd936366" [[package]] name = "futures-util" -version = "0.3.25" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "197676987abd2f9cadff84926f410af1c183608d36641465df73ae8211dc65d6" +checksum = "9c1d6de3acfef38d2be4b1f543f553131788603495be83da675e180c8d6b7bd1" dependencies = [ "futures-channel", "futures-core", @@ -571,6 +1027,65 @@ dependencies = [ "slab", ] +[[package]] +name = "gdk" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa9cb33da481c6c040404a11f8212d193889e9b435db2c14fd86987f630d3ce1" +dependencies = [ + "bitflags", + "cairo-rs", + "gdk-pixbuf", + "gdk-sys", + "gio", + "glib", + "libc", + "pango", +] + +[[package]] +name = "gdk-pixbuf" +version = "0.16.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3578c60dee9d029ad86593ed88cb40f35c1b83360e12498d055022385dd9a05" +dependencies = [ + "bitflags", + "gdk-pixbuf-sys", + "gio", + "glib", + "libc", +] + +[[package]] +name = "gdk-pixbuf-sys" +version = "0.16.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3092cf797a5f1210479ea38070d9ae8a5b8e9f8f1be9f32f4643c529c7d70016" +dependencies = [ + "gio-sys", + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + +[[package]] +name = "gdk-sys" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d76354f97a913e55b984759a997b693aa7dc71068c9e98bcce51aa167a0a5c5a" +dependencies = [ + "cairo-sys-rs", + "gdk-pixbuf-sys", + "gio-sys", + "glib-sys", + "gobject-sys", + "libc", + "pango-sys", + "pkg-config", + "system-deps", +] + [[package]] name = "generic-array" version = "0.14.6" @@ -594,9 +1109,89 @@ dependencies = [ [[package]] name = "gimli" -version = "0.27.0" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dec7af912d60cdbd3677c1af9352ebae6fb8394d165568a2234df0fa00f87793" +checksum = "ad0a93d233ebf96623465aad4046a8d3aa4da22d4f4beba5388838c8a434bbb4" + +[[package]] +name = "gio" +version = "0.16.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a1c84b4534a290a29160ef5c6eff2a9c95833111472e824fc5cb78b513dd092" +dependencies = [ + "bitflags", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "gio-sys", + "glib", + "libc", + "once_cell", + "pin-project-lite", + "smallvec", + "thiserror", +] + +[[package]] +name = "gio-sys" +version = "0.16.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9b693b8e39d042a95547fc258a7b07349b1f0b48f4b2fa3108ba3c51c0b5229" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps", + "winapi", +] + +[[package]] +name = "glib" +version = "0.16.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddd4df61a866ed7259d6189b8bcb1464989a77f1d85d25d002279bbe9dd38b2f" +dependencies = [ + "bitflags", + "futures-channel", + "futures-core", + "futures-executor", + "futures-task", + "futures-util", + "gio-sys", + "glib-macros", + "glib-sys", + "gobject-sys", + "libc", + "once_cell", + "smallvec", + "thiserror", +] + +[[package]] +name = "glib-macros" +version = "0.16.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e084807350b01348b6d9dbabb724d1a0bb987f47a2c85de200e98e12e30733bf" +dependencies = [ + "anyhow", + "heck", + "proc-macro-crate", + "proc-macro-error", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "glib-sys" +version = "0.16.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61a4f46316d06bfa33a7ac22df6f0524c8be58e3db2d9ca99ccb1f357b62a65" +dependencies = [ + "libc", + "system-deps", +] [[package]] name = "glob" @@ -605,10 +1200,82 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] -name = "heck" -version = "0.4.0" +name = "gobject-sys" +version = "0.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" +checksum = "3520bb9c07ae2a12c7f2fbb24d4efc11231c8146a86956413fb1a79bb760a0f1" +dependencies = [ + "glib-sys", + "libc", + "system-deps", +] + +[[package]] +name = "gtk" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4d3507d43908c866c805f74c9dd593c0ce7ba5c38e576e41846639cdcd4bee6" +dependencies = [ + "atk", + "bitflags", + "cairo-rs", + "field-offset", + "futures-channel", + "gdk", + "gdk-pixbuf", + "gio", + "glib", + "gtk-sys", + "gtk3-macros", + "libc", + "once_cell", + "pango", + "pkg-config", +] + +[[package]] +name = "gtk-sys" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89b5f8946685d5fe44497007786600c2f368ff6b1e61a16251c89f72a97520a3" +dependencies = [ + "atk-sys", + "cairo-sys-rs", + "gdk-pixbuf-sys", + "gdk-sys", + "gio-sys", + "glib-sys", + "gobject-sys", + "libc", + "pango-sys", + "system-deps", +] + +[[package]] +name = "gtk3-macros" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cfd6557b1018b773e43c8de9d0d13581d6b36190d0501916cbec4731db5ccff" +dependencies = [ + "anyhow", + "proc-macro-crate", + "proc-macro-error", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] name = "hermit-abi" @@ -619,6 +1286,12 @@ dependencies = [ "libc", ] +[[package]] +name = "hermit-abi" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286" + [[package]] name = "hmac" version = "0.12.1" @@ -628,12 +1301,37 @@ dependencies = [ "digest", ] +[[package]] +name = "im" +version = "15.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0acd33ff0285af998aaf9b57342af478078f53492322fafc47450e09397e0e9" +dependencies = [ + "bitmaps", + "rand_core", + "rand_xoshiro", + "serde", + "sized-chunks", + "typenum", + "version_check", +] + [[package]] name = "indenter" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" +[[package]] +name = "indexmap" +version = "1.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" +dependencies = [ + "autocfg", + "hashbrown", +] + [[package]] name = "instant" version = "0.1.12" @@ -641,28 +1339,50 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" dependencies = [ "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "intl-memoizer" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c310433e4a310918d6ed9243542a6b83ec1183df95dff8f23f87bb88a264a66f" +dependencies = [ + "type-map", + "unic-langid", +] + +[[package]] +name = "intl_pluralrules" +version = "7.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "078ea7b7c29a2b4df841a7f6ac8775ff6074020c6776d48491ce2268e068f972" +dependencies = [ + "unic-langid", ] [[package]] name = "io-lifetimes" -version = "1.0.3" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46112a93252b123d31a119a8d1a1ac19deac4fac6e0e8b0df58f0d4e5870e63c" +checksum = "1abeb7a0dd0f8181267ff8adc397075586500b81b28a73e8a0208b00fc170fb3" dependencies = [ "libc", - "windows-sys", + "windows-sys 0.45.0", ] [[package]] name = "is-terminal" -version = "0.4.2" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28dfb6c8100ccc63462345b67d1bbc3679177c75ee4bf59bf29c8b1d110b8189" +checksum = "21b6b32576413a8e69b90e952e4a026476040d81017b80445deda5f2d3921857" dependencies = [ - "hermit-abi", + "hermit-abi 0.3.1", "io-lifetimes", "rustix", - "windows-sys", + "windows-sys 0.45.0", ] [[package]] @@ -680,12 +1400,46 @@ dependencies = [ "libc", ] +[[package]] +name = "js-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "keyboard-types" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7668b7cff6a51fe61cdde64cd27c8a220786f399501b57ebe36f7d8112fd68" +dependencies = [ + "bitflags", +] + +[[package]] +name = "kurbo" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e119590a03caff1f7a582e8ee8c2164ddcc975791701188132fd1d1b518d3871" +dependencies = [ + "arrayvec", + "serde", +] + [[package]] name = "lazy_static" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + [[package]] name = "libc" version = "0.2.139" @@ -717,6 +1471,26 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "luajit2-sys" +version = "0.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33bb7acccd5a0224645ba06eba391af5f7194ff1762c2545860b43afcfd41af2" +dependencies = [ + "cc", + "fs_extra", + "libc", +] + +[[package]] +name = "malloc_buf" +version = "0.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62bb907fe88d54d8d9ce32a3cceab4218ed2f6b7d35617cafe9adf84e43919cb" +dependencies = [ + "libc", +] + [[package]] name = "matchers" version = "0.1.0" @@ -726,6 +1500,12 @@ dependencies = [ "regex-automata", ] +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + [[package]] name = "memchr" version = "2.5.0" @@ -758,14 +1538,14 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de" +checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9" dependencies = [ "libc", "log", "wasi", - "windows-sys", + "windows-sys 0.45.0", ] [[package]] @@ -798,9 +1578,9 @@ dependencies = [ [[package]] name = "nom" -version = "7.1.2" +version = "7.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5507769c4919c998e69e49c839d9dc6e693ede4cc4290d6ad8b41d4f09c548c" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" dependencies = [ "memchr", "minimal-lexical", @@ -808,9 +1588,9 @@ dependencies = [ [[package]] name = "nom_locate" -version = "4.0.0" +version = "4.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37794436ca3029a3089e0b95d42da1f0b565ad271e4d3bb4bad0c7bb70b10605" +checksum = "b1e299bf5ea7b212e811e71174c5d1a5d065c4c0ad0c8691ecb1f97e3e66025e" dependencies = [ "bytecount", "memchr", @@ -833,31 +1613,49 @@ version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" dependencies = [ - "hermit-abi", + "hermit-abi 0.2.6", "libc", ] [[package]] -name = "object" -version = "0.30.1" +name = "num_threads" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d864c91689fdc196779b98dba0aceac6118594c2df6ee5d943eb6a8df4d107a" +checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44" +dependencies = [ + "libc", +] + +[[package]] +name = "objc" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "915b1b472bc21c53464d6c8461c9d3af805ba1ef837e1cac254428f4a77177b1" +dependencies = [ + "malloc_buf", +] + +[[package]] +name = "object" +version = "0.30.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea86265d3d3dcb6a27fc51bd29a4bf387fae9d2986b823079d4986af253eb439" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.17.0" +version = "1.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" +checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" [[package]] -name = "oodle-sys" +name = "oodle" version = "0.1.0" dependencies = [ - "libloading", - "thiserror", + "bindgen", + "color-eyre", "tracing", ] @@ -885,6 +1683,59 @@ version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f" +[[package]] +name = "pango" +version = "0.16.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdff66b271861037b89d028656184059e03b0b6ccb36003820be19f7200b1e94" +dependencies = [ + "bitflags", + "gio", + "glib", + "libc", + "once_cell", + "pango-sys", +] + +[[package]] +name = "pango-sys" +version = "0.16.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e134909a9a293e04d2cc31928aa95679c5e4df954d0b85483159bd20d8f047f" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + +[[package]] +name = "pangocairo" +version = "0.16.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16ad2ec87789371b551fd2367c10aa37060412ffd3e60abd99491b21b93a3f9b" +dependencies = [ + "bitflags", + "cairo-rs", + "glib", + "libc", + "pango", + "pangocairo-sys", +] + +[[package]] +name = "pangocairo-sys" +version = "0.16.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "848d2df9b7f1a8c7a19d994de443bcbe5d4382610ccb8e64247f932be74fcf76" +dependencies = [ + "cairo-sys-rs", + "glib-sys", + "libc", + "pango-sys", + "system-deps", +] + [[package]] name = "password-hash" version = "0.4.2" @@ -896,6 +1747,12 @@ dependencies = [ "subtle", ] +[[package]] +name = "path-clean" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17359afc20d7ab31fdb42bb844c8b3bb1dabd7dcf7e68428492da7f16966fcef" + [[package]] name = "pbkdf2" version = "0.11.0" @@ -908,6 +1765,108 @@ dependencies = [ "sha2", ] +[[package]] +name = "peeking_take_while" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" + +[[package]] +name = "pest" +version = "2.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "028accff104c4e513bad663bbcd2ad7cfd5304144404c31ed0a77ac103d00660" +dependencies = [ + "thiserror", + "ucd-trie", +] + +[[package]] +name = "piet" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e381186490a3e2017a506d62b759ea8eaf4be14666b13ed53973e8ae193451b1" +dependencies = [ + "kurbo", + "unic-bidi", +] + +[[package]] +name = "piet-cairo" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12dc0b38ac300c79deb9bfc8c7f91a08f2b080338648f7202981094b22321bb9" +dependencies = [ + "cairo-rs", + "pango", + "pangocairo", + "piet", + "unicode-segmentation", + "xi-unicode", +] + +[[package]] +name = "piet-common" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dd8497cc0bcfecb1e14e027428c5e3eaf9af6e14761176e1212006d8bdba387" +dependencies = [ + "cairo-rs", + "cairo-sys-rs", + "cfg-if", + "core-graphics", + "piet", + "piet-cairo", + "piet-coregraphics", + "piet-direct2d", + "piet-web", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "piet-coregraphics" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a819b41d2ddb1d8abf3e45e49422f866cba281b4abb5e2fb948bba06e2c3d3f7" +dependencies = [ + "associative-cache", + "core-foundation", + "core-foundation-sys", + "core-graphics", + "core-text", + "foreign-types", + "piet", +] + +[[package]] +name = "piet-direct2d" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd00e91df4f987be40eb13042afe6ee9e54468466bdb7486390b40d4fef0993e" +dependencies = [ + "associative-cache", + "dwrote", + "piet", + "utf16_lit", + "winapi", + "wio", +] + +[[package]] +name = "piet-web" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a560232a94e535979923d49062d1c6d5407b3804bcd0d0b4cb9e25a9b41db1e" +dependencies = [ + "js-sys", + "piet", + "unicode-segmentation", + "wasm-bindgen", + "web-sys", + "xi-unicode", +] + [[package]] name = "pin-project-lite" version = "0.2.9" @@ -926,6 +1885,16 @@ version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160" +[[package]] +name = "proc-macro-crate" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +dependencies = [ + "once_cell", + "toml_edit", +] + [[package]] name = "proc-macro-error" version = "1.0.4" @@ -952,9 +1921,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.49" +version = "1.0.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57a8eca9f9c4ffde41714334dee777596264c7825420f521abc92b5b5deb63a5" +checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6" dependencies = [ "unicode-ident", ] @@ -993,6 +1962,15 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +[[package]] +name = "rand_xoshiro" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" +dependencies = [ + "rand_core", +] + [[package]] name = "redox_syscall" version = "0.2.16" @@ -1015,9 +1993,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.7.0" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a" +checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733" dependencies = [ "aho-corasick", "memchr", @@ -1039,15 +2017,6 @@ version = "0.6.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" -[[package]] -name = "remove_dir_all" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" -dependencies = [ - "winapi", -] - [[package]] name = "rustc-demangle" version = "0.1.21" @@ -1055,17 +2024,32 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342" [[package]] -name = "rustix" -version = "0.36.6" +name = "rustc-hash" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4feacf7db682c6c329c4ede12649cd36ecab0f3be5b7d74e6a20304725db4549" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustc_version" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "0.36.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f43abb88211988493c1abb44a70efa56ff0ce98f233b7b276146f1f3f7ba9644" dependencies = [ "bitflags", "errno", "io-lifetimes", "libc", "linux-raw-sys", - "windows-sys", + "windows-sys 0.45.0", ] [[package]] @@ -1109,6 +2093,7 @@ name = "sdk" version = "0.2.0" dependencies = [ "async-recursion", + "bitflags", "byteorder", "color-eyre", "csv-async", @@ -1117,8 +2102,9 @@ dependencies = [ "futures-util", "glob", "libloading", + "luajit2-sys", "nanorand", - "oodle-sys", + "oodle", "pin-project-lite", "serde", "serde_sjson", @@ -1128,6 +2114,30 @@ dependencies = [ "tracing-error", ] +[[package]] +name = "self_cell" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ef965a420fe14fdac7dd018862966a4c14094f900e1650bbc71ddd7d580c8af" + +[[package]] +name = "semver" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" +dependencies = [ + "semver-parser", +] + +[[package]] +name = "semver-parser" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" +dependencies = [ + "pest", +] + [[package]] name = "serde" version = "1.0.152" @@ -1150,7 +2160,7 @@ dependencies = [ [[package]] name = "serde_sjson" -version = "0.2.3" +version = "0.2.4" dependencies = [ "nom", "nom_locate", @@ -1189,19 +2199,35 @@ dependencies = [ ] [[package]] -name = "signal-hook-registry" -version = "1.4.0" +name = "shlex" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0" +checksum = "43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3" + +[[package]] +name = "signal-hook-registry" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" dependencies = [ "libc", ] [[package]] -name = "slab" -version = "0.4.7" +name = "sized-chunks" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" +checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e" +dependencies = [ + "bitmaps", + "typenum", +] + +[[package]] +name = "slab" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" dependencies = [ "autocfg", ] @@ -1241,9 +2267,9 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" [[package]] name = "syn" -version = "1.0.107" +version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ "proc-macro2", "quote", @@ -1251,24 +2277,36 @@ dependencies = [ ] [[package]] -name = "tempfile" -version = "3.3.0" +name = "system-deps" +version = "6.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" +checksum = "2955b1fe31e1fa2fbd1976b71cc69a606d7d4da16f6de3333d0c92d51419aeff" +dependencies = [ + "cfg-expr", + "heck", + "pkg-config", + "toml", + "version-compare", +] + +[[package]] +name = "tempfile" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af18f7ae1acd354b992402e9ec5864359d693cd8a79dcbef59f76891701c1e95" dependencies = [ "cfg-if", "fastrand", - "libc", "redox_syscall", - "remove_dir_all", - "winapi", + "rustix", + "windows-sys 0.42.0", ] [[package]] name = "termcolor" -version = "1.1.3" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" +checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" dependencies = [ "winapi-util", ] @@ -1295,20 +2333,23 @@ dependencies = [ [[package]] name = "thread_local" -version = "1.1.4" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" +checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" dependencies = [ + "cfg-if", "once_cell", ] [[package]] name = "time" -version = "0.3.17" +version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376" +checksum = "cd0cbfecb4d19b5ea75bb31ad904eb5b9fa13f21079c3b92017ebdf4999a5890" dependencies = [ "itoa", + "libc", + "num_threads", "serde", "time-core", "time-macros", @@ -1322,18 +2363,27 @@ checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd" [[package]] name = "time-macros" -version = "0.2.6" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2" +checksum = "fd80a657e71da814b8e5d60d3374fc6d35045062245d80224748ae522dd76f36" dependencies = [ "time-core", ] [[package]] -name = "tokio" -version = "1.24.1" +name = "tinystr" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d9f76183f91ecfb55e1d7d5602bd1d979e38a3a522fe900241cf195624d67ae" +checksum = "7ac3f5b6856e931e15e07b478e98c8045239829a65f9156d4fa7e7788197a5ef" +dependencies = [ + "displaydoc", +] + +[[package]] +name = "tokio" +version = "1.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8e00990ebabbe4c14c08aca901caed183ecd5c09562a12c824bb53d3c3fd3af" dependencies = [ "autocfg", "bytes", @@ -1345,7 +2395,7 @@ dependencies = [ "signal-hook-registry", "tokio-macros", "tracing", - "windows-sys", + "windows-sys 0.42.0", ] [[package]] @@ -1361,9 +2411,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.11" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d660770404473ccd7bc9f8b28494a811bc18542b915c0855c51e8f419d5223ce" +checksum = "8fb52b74f05dbf495a8fba459fdc331812b96aa086d9eb78101fa0d4569c3313" dependencies = [ "futures-core", "pin-project-lite", @@ -1372,13 +2422,30 @@ dependencies = [ [[package]] name = "toml" -version = "0.5.10" +version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1333c76748e868a4d9d1017b5ab53171dfd095f70c712fdb4653a406547f598f" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" dependencies = [ "serde", ] +[[package]] +name = "toml_datetime" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ab8ed2edee10b50132aed5f331333428b011c99402b5a534154ed15746f9622" + +[[package]] +name = "toml_edit" +version = "0.19.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a1eb0622d28f4b9c90adc4ea4b2b46b47663fde9ac5fafcb14a1369d5508825" +dependencies = [ + "indexmap", + "toml_datetime", + "winnow", +] + [[package]] name = "tracing" version = "0.1.37" @@ -1451,12 +2518,107 @@ dependencies = [ "tracing-log", ] +[[package]] +name = "tracing-wasm" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4575c663a174420fa2d78f4108ff68f65bf2fbb7dd89f33749b6e826b3626e07" +dependencies = [ + "tracing", + "tracing-subscriber", + "wasm-bindgen", +] + +[[package]] +name = "type-map" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6d3364c5e96cb2ad1603037ab253ddd34d7fb72a58bdddf4b7350760fc69a46" +dependencies = [ + "rustc-hash", +] + [[package]] name = "typenum" version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" +[[package]] +name = "ucd-trie" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81" + +[[package]] +name = "unic-bidi" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1356b759fb6a82050666f11dce4b6fe3571781f1449f3ef78074e408d468ec09" +dependencies = [ + "matches", + "unic-ucd-bidi", +] + +[[package]] +name = "unic-char-property" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8c57a407d9b6fa02b4795eb81c5b6652060a15a7903ea981f3d723e6c0be221" +dependencies = [ + "unic-char-range", +] + +[[package]] +name = "unic-char-range" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0398022d5f700414f6b899e10b8348231abf9173fa93144cbc1a43b9793c1fbc" + +[[package]] +name = "unic-common" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80d7ff825a6a654ee85a63e80f92f054f904f21e7d12da4e22f9834a4aaa35bc" + +[[package]] +name = "unic-langid" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "398f9ad7239db44fd0f80fe068d12ff22d78354080332a5077dc6f52f14dcf2f" +dependencies = [ + "unic-langid-impl", +] + +[[package]] +name = "unic-langid-impl" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e35bfd2f2b8796545b55d7d3fd3e89a0613f68a0d1c8bc28cb7ff96b411a35ff" +dependencies = [ + "tinystr", +] + +[[package]] +name = "unic-ucd-bidi" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1d568b51222484e1f8209ce48caa6b430bf352962b877d592c29ab31fb53d8c" +dependencies = [ + "unic-char-property", + "unic-char-range", + "unic-ucd-version", +] + +[[package]] +name = "unic-ucd-version" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96bd2f2237fe450fcd0a1d2f5f4e91711124f7857ba2e964247776ebeeb7b0c4" +dependencies = [ + "unic-common", +] + [[package]] name = "unicase" version = "2.6.0" @@ -1474,9 +2636,9 @@ checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc" [[package]] name = "unicode-segmentation" -version = "1.10.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" [[package]] name = "unicode-width" @@ -1484,6 +2646,12 @@ version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" +[[package]] +name = "utf16_lit" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14706d2a800ee8ff38c1d3edb873cd616971ea59eb7c0d046bb44ef59b06a1ae" + [[package]] name = "utf8parse" version = "0.2.0" @@ -1496,6 +2664,12 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" +[[package]] +name = "version-compare" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "579a42fc0b8e0c63b76519a339be31bed574929511fa53c1a3acae26eb258f29" + [[package]] name = "version_check" version = "0.9.4" @@ -1508,6 +2682,81 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +[[package]] +name = "wasm-bindgen" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" + +[[package]] +name = "web-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "which" +version = "4.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269" +dependencies = [ + "either", + "libc", + "once_cell", +] + [[package]] name = "winapi" version = "0.3.9" @@ -1555,52 +2804,100 @@ dependencies = [ ] [[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.0" +name = "windows-sys" +version = "0.45.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e2522491fbfcd58cc84d47aeb2958948c4b8982e9a2d8a2a35bbaed431390e7" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c9864e83243fdec7fc9c5444389dcbbfd258f745e7853198f365e3c4968a608" [[package]] name = "windows_aarch64_msvc" -version = "0.42.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4" +checksum = "4c8b1b673ffc16c47a9ff48570a9d85e25d265735c503681332589af6253c6c7" [[package]] name = "windows_i686_gnu" -version = "0.42.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7" +checksum = "de3887528ad530ba7bdbb1faa8275ec7a1155a45ffa57c37993960277145d640" [[package]] name = "windows_i686_msvc" -version = "0.42.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246" +checksum = "bf4d1122317eddd6ff351aa852118a2418ad4214e6613a50e0191f7004372605" [[package]] name = "windows_x86_64_gnu" -version = "0.42.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed" +checksum = "c1040f221285e17ebccbc2591ffdc2d44ee1f9186324dd3e84e99ac68d699c45" [[package]] name = "windows_x86_64_gnullvm" -version = "0.42.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028" +checksum = "628bfdf232daa22b0d64fdb62b09fcc36bb01f05a3939e20ab73aaf9470d0463" [[package]] name = "windows_x86_64_msvc" -version = "0.42.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5" +checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd" + +[[package]] +name = "winnow" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faf09497b8f8b5ac5d3bb4d05c0a99be20f26fd3d5f2db7b0716e946d5103658" +dependencies = [ + "memchr", +] + +[[package]] +name = "wio" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d129932f4644ac2396cb456385cbf9e63b5b30c6e8dc4820bdca4eb082037a5" +dependencies = [ + "winapi", +] + +[[package]] +name = "xi-unicode" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a67300977d3dc3f8034dae89778f502b6ba20b269527b3223ba59c0cf393bb8a" [[package]] name = "zip" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "537ce7411d25e54e8ae21a7ce0b15840e7bfcff15b51d697ec3266cc76bdf080" +checksum = "0445d0fbc924bb93539b4316c11afb121ea39296f99a3c4c9edad09e3658cdef" dependencies = [ "aes", "byteorder", @@ -1637,10 +2934,11 @@ dependencies = [ [[package]] name = "zstd-sys" -version = "2.0.4+zstd.1.5.2" +version = "2.0.7+zstd.1.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fa202f2ef00074143e219d15b62ffc317d17cc33909feac471c044087cad7b0" +checksum = "94509c3ba2fe55294d752b79842c530ccfab760192521df74a081a78d2b3c7f5" dependencies = [ "cc", "libc", + "pkg-config", ] diff --git a/README.adoc b/README.adoc index bc322fd..34e0ef0 100644 --- a/README.adoc +++ b/README.adoc @@ -10,23 +10,18 @@ :tip-caption: :bulb: :warning-caption: :warning: -A set of tools to develop mods for the newest generation of the Bitsquid game engine that powers the game _Warhammer 40.000: Darktide_. +A set of tools to use and develop mods for the newest generation of the Bitsquid game engine that powers the game _Warhammer 40.000: Darktide_. -== Quickstart +== Darktide Mod Manager (DTMM) -1. Download the latest https://git.sclu1034.dev/bitsquid_dt/dtmt/releases/[release] for your platform. -2. Place the binary for your system and `dictionary.csv` next to each other. -3. Open a command prompt, navigate to the downloaded binary and run `dtmt.exe help`. -4. Use the `help` command (it works for subcommands, too) and the https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki/CLI-Reference[CLI Reference]. +DTMM is a GUI application to install and manage mods for the game. -== Runtime dependencies +image::docs/screenshots/dtmm.png[dtmm main view] -The LuaJit decompiler (short "ljd") is used to decompile Lua files. A version tailored specifically to Bitsquid may be found here: https://github.com/Aussiemon/ljd. +Head to https://git.sclu1034.dev/bitsquid_dt/dtmt/src/branch/master/crates/dtmm[crates/dtmm] for more information or check the https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki[Wiki]. -A custom executable location may be passed via the `--ljd` flag during extraction, otherwise decompilation expects `ljd` to be found via the `PATH` environmental variable. +== Darktide Mod Tools (DTMT) -== Building +DTMT is a CLI application providing various commands that aid in developing mods for the game. -1. Install Rust from https://www.rust-lang.org/learn/get-started[rust-lang.org] or via the preferred means for your system. -2. Download or clone this source code. Make sure to include the submodules in `lib/`. -3. Run `cargo build`. +Head to https://git.sclu1034.dev/bitsquid_dt/dtmt/src/branch/master/crates/dtmt[crates/dtmt] for more information or check the https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki[Wiki]. diff --git a/crates/dtmm/Cargo.toml b/crates/dtmm/Cargo.toml new file mode 100644 index 0000000..3971ee3 --- /dev/null +++ b/crates/dtmm/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "dtmm" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +bitflags = "1.3.2" +clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "string", "unicode"] } +color-eyre = "0.6.2" +confy = "0.5.1" +druid = { git = "https://github.com/linebender/druid.git", features = ["im", "serde"] } +dtmt-shared = { path = "../../lib/dtmt-shared", version = "*" } +futures = "0.3.25" +oodle = { path = "../../lib/oodle", version = "*" } +sdk = { path = "../../lib/sdk", version = "0.2.0" } +serde_sjson = { path = "../../lib/serde_sjson", version = "*" } +serde = { version = "1.0.152", features = ["derive", "rc"] } +tokio = { version = "1.23.0", features = ["rt", "fs", "tracing", "sync"] } +tracing = "0.1.37" +tracing-error = "0.2.0" +tracing-subscriber = { version = "0.3.16", features = ["env-filter"] } +zip = "0.6.4" +tokio-stream = { version = "0.1.12", features = ["fs"] } diff --git a/crates/dtmm/README.adoc b/crates/dtmm/README.adoc new file mode 100644 index 0000000..45130f1 --- /dev/null +++ b/crates/dtmm/README.adoc @@ -0,0 +1,16 @@ += Darktide Mod Manager (DTMM) +:idprefix: +:idseparator: +:toc: macro +:toclevels: 1 +:!toc-title: +:caution-caption: :fire: +:important-caption: :exclamtion: +:note-caption: :paperclip: +:tip-caption: :bulb: +:warning-caption: :warning: + +DTMM is a GUI application to install and manage mods for the game. + +![dtmm main view](../../docs/screenshots/dtmm.png) + diff --git a/crates/dtmm/assets/mod_main.lua b/crates/dtmm/assets/mod_main.lua new file mode 100644 index 0000000..715397f --- /dev/null +++ b/crates/dtmm/assets/mod_main.lua @@ -0,0 +1,192 @@ +local _G = _G +local rawget = rawget +local rawset = rawset + +local log = function(category, format, ...) + local Log = rawget(_G, "Log") + if Log then + Log.info(category, format, ...) + else + print(string.format("[%s] %s", category or "", string.format(format or "", ...))) + end +end + +-- Patch `GameStateMachine.init` to add our own state for loading mods. +-- In the future, Fatshark might provide us with a dedicated way to do this. +local function patch_mod_loading_state() + local StateBootSubStateBase = require("scripts/game_states/boot/state_boot_sub_state_base") + + -- A necessary override. + -- The original does not proxy `dt` to `_state_update`, but we need that. + StateBootSubStateBase.update = function (self, dt) + local done, error = self:_state_update(dt) + local params = self._params + + if error then + return StateError, { error } + elseif done then + local next_index = params.sub_state_index + 1 + params.sub_state_index = next_index + local next_state_data = params.states[next_index] + + if next_state_data then + return next_state_data[1], self._params + else + self._parent:sub_states_done() + end + end + end + + local StateBootLoadMods = class("StateBootLoadMods", "StateBootSubStateBase") + + StateBootLoadMods.on_enter = function (self, parent, params) + log("StateBootLoadMods", "Entered") + StateBootLoadMods.super.on_enter(self, parent, params) + + local state_params = self:_state_params() + local package_manager = state_params.package_manager + + self._state = "load_package" + self._package_manager = package_manager + self._package_handles = { + ["packages/mods"] = package_manager:load("packages/mods", "StateBootLoadMods", nil), + ["packages/dml"] = package_manager:load("packages/dml", "StateBootLoadMods", nil), + } + end + + StateBootLoadMods._state_update = function (self, dt) + local state = self._state + local package_manager = self._package_manager + + if state == "load_package" and package_manager:update() then + log("StateBootLoadMods", "Packages loaded, loading mods") + self._state = "load_mods" + local mod_loader = require("scripts/mods/dml/init") + self._mod_loader = mod_loader + + local mod_data = require("scripts/mods/mod_data") + mod_loader:init(mod_data, self._parent:gui()) + elseif state == "load_mods" and self._mod_loader:update(dt) then + log("StateBootLoadMods", "Mods loaded, exiting") + return true, false + end + + return false, false + end + + local GameStateMachine = require("scripts/foundation/utilities/game_state_machine") + + local patched = false + + local GameStateMachine_init = GameStateMachine.init + GameStateMachine.init = function(self, parent, start_state, params, ...) + if not patched then + log("mod_main", "Injecting mod loading state") + patched = true + + -- Hardcoded position after `StateRequireScripts`. + -- We do want to wait until then, so that most of the game's core + -- systems are at least loaded and can be hooked, even if they aren't + -- running, yet. + local pos = 4 + table.insert(params.states, pos, { + StateBootLoadMods, + { + package_manager = params.package_manager, + }, + }) + end + + GameStateMachine_init(self, parent, start_state, params, ...) + end + + log("mod_main", "Mod patching complete") +end + +log("mod_main", "Initializing mods...") + +local require_store = {} + +Mods = { + -- Keep a backup of certain system libraries before + -- Fatshark's code scrubs them. + -- The loader can then decide to pass them on to mods, or ignore them + lua = setmetatable({}, { + io = io, + debug = debug, + ffi = ffi, + os = os, + load = load, + loadfile = loadfile, + loadstring = loadstring, + }), + require_store = require_store +} + +local can_insert = function(filepath, new_result) + local store = require_store[filepath] + if not store or #store then + return true + end + + if store[#store] ~= new_result then + return true + end +end + +local original_require = require +require = function(filepath, ...) + local result = original_require(filepath, ...) + if result and type(result) == "table" then + if can_insert(filepath, result) then + require_store[filepath] = require_store[filepath] or {} + local store = require_store[filepath] + + table.insert(store, result) + + if Mods.hook then + Mods.hook.enable_by_file(filepath, #store) + end + end + end + + return result +end + +require("scripts/boot_init") +require("scripts/foundation/utilities/class") + +-- The `__index` metamethod maps a proper identifier `CLASS.MyClassName` to the +-- stringified version of the key: `"MyClassName"`. +-- This allows using LuaCheck for the stringified class names in hook parameters. +_G.CLASS = setmetatable({}, { + __index = function(_, key) + return key + end +}) + +local original_class = class +class = function(class_name, super_name, ...) + local result = original_class(class_name, super_name, ...) + if not rawget(_G, class_name) then + rawset(_G, class_name, result) + end + if not rawget(_G.CLASS, class_name) then + rawset(_G.CLASS, class_name, result) + end + return result +end + +require("scripts/main") +log("mod_main", "'scripts/main' loaded") + +-- Override `init` to run our injection +function init() + patch_mod_loading_state() + + -- As requested by Fatshark + local StateRequireScripts = require("scripts/game_states/boot/state_require_scripts") + StateRequireScripts._get_is_modded = function() return true end + + Main:init() +end diff --git a/crates/dtmm/notes.adoc b/crates/dtmm/notes.adoc new file mode 100644 index 0000000..bbf29e7 --- /dev/null +++ b/crates/dtmm/notes.adoc @@ -0,0 +1,49 @@ += Notes + +== Layout + +- top bar: + - left aligned: a tab bar with "mods", "settings", "about" + - right aligned: a button to start the game + - in the future: center aligned a dropdown to select profiles, and button to edit them +- main view: + - left side: list view of mods + - right side: details pane and buttons + - always visible, first mod in list is selected by default + - buttons: + - add mod + - deploy mods + - remove selected mod + - enable/disable (text changes based on state) + +== Mod list + +- name +- description? +- image? +- click to get details pane? + +== Managing mods + +- for each mod in the list, have a checkbox +- need a button to remove mods +- need a button to add mods from downloaded files +- search + +== Misc + +- settings +- open mod storage + +== Managing the game + +- deploy mods +- + +== Preparing the game + +- click "Install mods" to prepare the game files with the enabled mods + +== Playing the game + +- if overlay file systems are used, the game has to be started through the mod manager diff --git a/crates/dtmm/src/controller/app.rs b/crates/dtmm/src/controller/app.rs new file mode 100644 index 0000000..d8cb619 --- /dev/null +++ b/crates/dtmm/src/controller/app.rs @@ -0,0 +1,213 @@ +use std::collections::HashMap; +use std::io::{Cursor, ErrorKind, Read}; +use std::path::Path; + +use color_eyre::eyre::{self, Context}; +use color_eyre::{Help, Result}; +use druid::im::Vector; +use druid::FileInfo; +use dtmt_shared::ModConfig; +use serde::Deserialize; +use tokio::fs::{self, DirEntry}; +use tokio::runtime::Runtime; +use tokio_stream::wrappers::ReadDirStream; +use tokio_stream::StreamExt; +use zip::ZipArchive; + +use crate::state::{ModInfo, PackageInfo, State}; +use crate::util::config::{ConfigSerialize, LoadOrderEntry}; + +#[tracing::instrument(skip(state))] +pub(crate) async fn import_mod(state: State, info: FileInfo) -> Result { + let data = fs::read(&info.path) + .await + .wrap_err_with(|| format!("failed to read file {}", info.path.display()))?; + let data = Cursor::new(data); + + let mut archive = ZipArchive::new(data).wrap_err("failed to open ZIP archive")?; + + if tracing::enabled!(tracing::Level::DEBUG) { + let names = archive.file_names().fold(String::new(), |mut s, name| { + s.push('\n'); + s.push_str(name); + s + }); + tracing::debug!("Archive contents:{}", names); + } + + let dir_name = { + let f = archive.by_index(0).wrap_err("archive is empty")?; + + if !f.is_dir() { + let err = eyre::eyre!("archive does not have a top-level directory"); + return Err(err).with_suggestion(|| "Use 'dtmt build' to create the mod archive."); + } + + let name = f.name(); + // The directory name is returned with a trailing slash, which we don't want + name[..(name.len().saturating_sub(1))].to_string() + }; + + tracing::info!("Importing mod {}", dir_name); + + let mod_cfg: ModConfig = { + let mut f = archive + .by_name(&format!("{}/{}", dir_name, "dtmt.cfg")) + .wrap_err("failed to read mod config from archive")?; + let mut buf = Vec::with_capacity(f.size() as usize); + f.read_to_end(&mut buf) + .wrap_err("failed to read mod config from archive")?; + + let data = String::from_utf8(buf).wrap_err("mod config is not valid UTF-8")?; + + serde_sjson::from_str(&data).wrap_err("failed to deserialize mod config")? + }; + + tracing::debug!(?mod_cfg); + + let files: HashMap> = { + let mut f = archive + .by_name(&format!("{}/{}", dir_name, "files.sjson")) + .wrap_err("failed to read file index from archive")?; + let mut buf = Vec::with_capacity(f.size() as usize); + f.read_to_end(&mut buf) + .wrap_err("failed to read file index from archive")?; + + let data = String::from_utf8(buf).wrap_err("file index is not valid UTF-8")?; + + serde_sjson::from_str(&data).wrap_err("failed to deserialize file index")? + }; + + tracing::trace!(?files); + + let mod_dir = state.get_mod_dir(); + + tracing::trace!("Creating mods directory {}", mod_dir.display()); + fs::create_dir_all(&mod_dir) + .await + .wrap_err_with(|| format!("failed to create data directory {}", mod_dir.display()))?; + + tracing::trace!("Extracting mod archive to {}", mod_dir.display()); + archive + .extract(&mod_dir) + .wrap_err_with(|| format!("failed to extract archive to {}", mod_dir.display()))?; + + let packages = files + .into_iter() + .map(|(name, files)| PackageInfo::new(name, files.into_iter().collect())) + .collect(); + let info = ModInfo::new(mod_cfg, packages); + + Ok(info) +} + +#[tracing::instrument(skip(state))] +pub(crate) async fn delete_mod(state: State, info: &ModInfo) -> Result<()> { + let mod_dir = state.get_mod_dir().join(&info.id); + fs::remove_dir_all(&mod_dir) + .await + .wrap_err_with(|| format!("failed to remove directory {}", mod_dir.display()))?; + + Ok(()) +} + +#[tracing::instrument(skip(state))] +pub(crate) async fn save_settings(state: State) -> Result<()> { + let cfg = ConfigSerialize::from(&state); + + tracing::info!("Saving settings to '{}'", state.config_path.display()); + tracing::debug!(?cfg); + + let data = serde_sjson::to_string(&cfg).wrap_err("failed to serialize config")?; + + fs::write(state.config_path.as_ref(), &data) + .await + .wrap_err_with(|| { + format!( + "failed to write config to '{}'", + state.config_path.display() + ) + }) +} + +async fn read_sjson_file(path: P) -> Result +where + T: for<'a> Deserialize<'a>, + P: AsRef + std::fmt::Debug, +{ + let buf = fs::read(path).await.wrap_err("failed to read file")?; + let data = String::from_utf8(buf).wrap_err("invalid UTF8")?; + serde_sjson::from_str(&data).wrap_err("failed to deserialize") +} + +#[tracing::instrument(skip_all,fields( + name = ?res.as_ref().map(|entry| entry.file_name()) +))] +async fn read_mod_dir_entry(res: Result) -> Result { + let entry = res?; + let config_path = entry.path().join("dtmt.cfg"); + let index_path = entry.path().join("files.sjson"); + + let cfg: ModConfig = read_sjson_file(&config_path) + .await + .wrap_err_with(|| format!("failed to read mod config '{}'", config_path.display()))?; + + let files: HashMap> = read_sjson_file(&index_path) + .await + .wrap_err_with(|| format!("failed to read file index '{}'", index_path.display()))?; + + let packages = files + .into_iter() + .map(|(name, files)| PackageInfo::new(name, files.into_iter().collect())) + .collect(); + let info = ModInfo::new(cfg, packages); + Ok(info) +} + +#[tracing::instrument(skip(mod_order))] +pub(crate) fn load_mods<'a, P, S>(mod_dir: P, mod_order: S) -> Result> +where + S: Iterator, + P: AsRef + std::fmt::Debug, +{ + let rt = Runtime::new()?; + + rt.block_on(async move { + let mod_dir = mod_dir.as_ref(); + let read_dir = match fs::read_dir(mod_dir).await { + Ok(read_dir) => read_dir, + Err(err) if err.kind() == ErrorKind::NotFound => { + return Ok(Vector::new()); + } + Err(err) => { + return Err(err) + .wrap_err_with(|| format!("failed to open directory '{}'", mod_dir.display())); + } + }; + + let stream = ReadDirStream::new(read_dir) + .map(|res| res.wrap_err("failed to read dir entry")) + .then(read_mod_dir_entry); + tokio::pin!(stream); + + let mut mods: HashMap = HashMap::new(); + + while let Some(res) = stream.next().await { + let info = res?; + mods.insert(info.id.clone(), info); + } + + let mods = mod_order + .filter_map(|entry| { + if let Some(mut info) = mods.remove(&entry.id) { + info.enabled = entry.enabled; + Some(info) + } else { + None + } + }) + .collect(); + + Ok::<_, color_eyre::Report>(mods) + }) +} diff --git a/crates/dtmm/src/controller/game.rs b/crates/dtmm/src/controller/game.rs new file mode 100644 index 0000000..508e84a --- /dev/null +++ b/crates/dtmm/src/controller/game.rs @@ -0,0 +1,575 @@ +use std::ffi::CString; +use std::io::{Cursor, ErrorKind}; +use std::path::{Path, PathBuf}; +use std::str::FromStr; +use std::sync::Arc; + +use color_eyre::eyre::Context; +use color_eyre::{eyre, Help, Result}; +use futures::stream; +use futures::StreamExt; +use sdk::filetype::lua; +use sdk::filetype::package::Package; +use sdk::murmur::Murmur64; +use sdk::{ + Bundle, BundleDatabase, BundleFile, BundleFileType, BundleFileVariant, FromBinary, ToBinary, +}; +use tokio::fs; +use tokio::io::AsyncWriteExt; +use tracing::Instrument; + +use crate::state::{PackageInfo, State}; + +const MOD_BUNDLE_NAME: &str = "packages/mods"; +const BOOT_BUNDLE_NAME: &str = "packages/boot"; +const DML_BUNDLE_NAME: &str = "packages/dml"; +const BUNDLE_DATABASE_NAME: &str = "bundle_database.data"; +const MOD_BOOT_SCRIPT: &str = "scripts/mod_main"; +const MOD_DATA_SCRIPT: &str = "scripts/mods/mod_data"; +const SETTINGS_FILE_PATH: &str = "application_settings/settings_common.ini"; + +#[tracing::instrument] +async fn read_file_with_backup

(path: P) -> Result> +where + P: AsRef + std::fmt::Debug, +{ + let path = path.as_ref(); + let backup_path = { + let mut p = PathBuf::from(path); + let ext = if let Some(ext) = p.extension() { + ext.to_string_lossy().to_string() + ".bak" + } else { + String::from("bak") + }; + p.set_extension(ext); + p + }; + + let file_name = path + .file_name() + .map(|s| s.to_string_lossy().to_string()) + .unwrap_or_else(|| String::from("file")); + + let bin = match fs::read(&backup_path).await { + Ok(bin) => bin, + Err(err) if err.kind() == ErrorKind::NotFound => { + // TODO: This doesn't need to be awaited here, yet. + // I only need to make sure it has finished before writing the changed bundle. + tracing::debug!( + "Backup does not exist. Backing up original {} to '{}'", + file_name, + backup_path.display() + ); + fs::copy(path, &backup_path).await.wrap_err_with(|| { + format!( + "failed to back up {} '{}' to '{}'", + file_name, + path.display(), + backup_path.display() + ) + })?; + + tracing::debug!("Reading {} from original '{}'", file_name, path.display()); + fs::read(path).await.wrap_err_with(|| { + format!("failed to read {} file: {}", file_name, path.display()) + })? + } + Err(err) => { + return Err(err).wrap_err_with(|| { + format!( + "failed to read {} from backup '{}'", + file_name, + backup_path.display() + ) + }); + } + }; + Ok(bin) +} + +#[tracing::instrument(skip_all)] +async fn patch_game_settings(state: Arc) -> Result<()> { + let settings_path = state.game_dir.join("bundle").join(SETTINGS_FILE_PATH); + + let settings = read_file_with_backup(&settings_path) + .await + .wrap_err("failed to read settings.ini")?; + let settings = String::from_utf8(settings).wrap_err("settings.ini is not valid UTF-8")?; + + let mut f = fs::File::create(&settings_path) + .await + .wrap_err_with(|| format!("failed to open {}", settings_path.display()))?; + + let Some(i) = settings.find("boot_script =") else { + eyre::bail!("couldn't find 'boot_script' field"); + }; + + f.write_all(settings[0..i].as_bytes()).await?; + f.write_all(b"boot_script = \"scripts/mod_main\"").await?; + + let Some(j) = settings[i..].find('\n') else { + eyre::bail!("couldn't find end of 'boot_script' field"); + }; + + f.write_all(settings[(i + j)..].as_bytes()).await?; + + Ok(()) +} + +#[tracing::instrument(skip_all, fields(package = info.name))] +fn make_package(info: &PackageInfo) -> Result { + let mut pkg = Package::new(info.name.clone(), PathBuf::new()); + + for f in &info.files { + let mut it = f.rsplit('.'); + let file_type = it + .next() + .ok_or_else(|| eyre::eyre!("missing file extension")) + .and_then(BundleFileType::from_str) + .wrap_err("invalid file name in package info")?; + let name: String = it.collect(); + pkg.add_file(file_type, name); + } + + Ok(pkg) +} + +fn build_mod_data_lua(state: Arc) -> String { + let mut lua = String::from("return {\n"); + + // DMF is handled explicitely by the loading procedures, as it actually drives most of that + // and should therefore not show up in the load order. + for mod_info in state.mods.iter().filter(|m| m.id != "dml" && m.enabled) { + lua.push_str(" {\n name = \""); + lua.push_str(&mod_info.name); + + lua.push_str("\",\n id = \""); + lua.push_str(&mod_info.id); + + lua.push_str("\",\n run = function()\n"); + + let resources = &mod_info.resources; + if resources.data.is_some() || resources.localization.is_some() { + lua.push_str(" new_mod(\""); + lua.push_str(&mod_info.id); + lua.push_str("\", {\n mod_script = \""); + lua.push_str(&resources.init.to_string_lossy()); + + if let Some(data) = resources.data.as_ref() { + lua.push_str("\",\n mod_data = \""); + lua.push_str(&data.to_string_lossy()); + } + + if let Some(localization) = &resources.localization { + lua.push_str("\",\n mod_localization = \""); + lua.push_str(&localization.to_string_lossy()); + } + + lua.push_str("\",\n })\n"); + } else { + lua.push_str(" return dofile(\""); + lua.push_str(&resources.init.to_string_lossy()); + lua.push_str("\")\n"); + } + + lua.push_str(" end,\n packages = {\n"); + + for pkg_info in &mod_info.packages { + lua.push_str(" \""); + lua.push_str(&pkg_info.name); + lua.push_str("\",\n"); + } + + lua.push_str(" },\n },\n"); + } + + lua.push('}'); + + tracing::debug!("mod_data_lua:\n{}", lua); + + lua +} + +#[tracing::instrument(skip_all)] +async fn build_bundles(state: Arc) -> Result> { + let mut mod_bundle = Bundle::new(MOD_BUNDLE_NAME.to_string()); + let mut tasks = Vec::new(); + + let bundle_dir = Arc::new(state.game_dir.join("bundle")); + + let mut bundles = Vec::new(); + + { + let span = tracing::debug_span!("Building mod data script"); + let _enter = span.enter(); + + let lua = build_mod_data_lua(state.clone()); + let lua = CString::new(lua).wrap_err("failed to build CString from mod data Lua string")?; + let file = + lua::compile(MOD_DATA_SCRIPT, &lua).wrap_err("failed to compile mod data Lua file")?; + + mod_bundle.add_file(file); + } + + for mod_info in state.mods.iter().filter(|m| m.id != "dml" && m.enabled) { + let span = tracing::trace_span!("building mod packages", name = mod_info.name); + let _enter = span.enter(); + + let mod_dir = state.get_mod_dir().join(&mod_info.id); + for pkg_info in &mod_info.packages { + let span = tracing::trace_span!("building package", name = pkg_info.name); + let _enter = span.enter(); + + let pkg = make_package(pkg_info).wrap_err("failed to make package")?; + let mut variant = BundleFileVariant::new(); + let bin = pkg + .to_binary() + .wrap_err("failed to serialize package to binary")?; + variant.set_data(bin); + let mut file = BundleFile::new(pkg_info.name.clone(), BundleFileType::Package); + file.add_variant(variant); + + mod_bundle.add_file(file); + + let bundle_name = Murmur64::hash(&pkg_info.name) + .to_string() + .to_ascii_lowercase(); + let src = mod_dir.join(&bundle_name); + let dest = bundle_dir.join(&bundle_name); + let pkg_name = pkg_info.name.clone(); + let mod_name = mod_info.name.clone(); + + // Explicitely drop the guard, so that we can move the span + // into the async operation + drop(_enter); + + let ctx = state.ctx.clone(); + + let task = async move { + let bundle = { + let bin = fs::read(&src).await.wrap_err_with(|| { + format!("failed to read bundle file '{}'", src.display()) + })?; + let name = Bundle::get_name_from_path(&ctx, &src); + Bundle::from_binary(&ctx, name, bin) + .wrap_err_with(|| format!("failed to parse bundle '{}'", src.display()))? + }; + + tracing::debug!( + src = %src.display(), + dest = %dest.display(), + "Copying bundle '{}' for mod '{}'", + pkg_name, + mod_name, + ); + // We attempt to remove any previous file, so that the hard link can be created. + // We can reasonably ignore errors here, as a 'NotFound' is actually fine, the copy + // may be possible despite an error here, or the error will be reported by it anyways. + // TODO: There is a chance that we delete an actual game bundle, but with 64bit + // hashes, it's low enough for now, and the setup required to detect + // "game bundle vs mod bundle" is non-trivial. + let _ = fs::remove_file(&dest).await; + fs::copy(&src, &dest).await.wrap_err_with(|| { + format!( + "failed to copy bundle {pkg_name} for mod {mod_name}. src: {}, dest: {}", + src.display(), + dest.display() + ) + })?; + + Ok::(bundle) + } + .instrument(span); + + tasks.push(task); + } + } + + tracing::debug!("Copying {} mod bundles", tasks.len()); + + let mut tasks = stream::iter(tasks).buffer_unordered(10); + + while let Some(res) = tasks.next().await { + let bundle = res?; + bundles.push(bundle); + } + + { + let path = bundle_dir.join(format!("{:x}", mod_bundle.name().to_murmur64())); + tracing::trace!("Writing mod bundle to '{}'", path.display()); + fs::write(&path, mod_bundle.to_binary()?) + .await + .wrap_err_with(|| format!("failed to write bundle to '{}'", path.display()))?; + } + + bundles.push(mod_bundle); + + Ok(bundles) +} + +#[tracing::instrument(skip_all)] +async fn patch_boot_bundle(state: Arc) -> Result> { + let bundle_dir = Arc::new(state.game_dir.join("bundle")); + let bundle_path = bundle_dir.join(format!("{:x}", Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes()))); + + let mut bundles = Vec::with_capacity(2); + + let mut boot_bundle = async { + let bin = read_file_with_backup(&bundle_path) + .await + .wrap_err("failed to read boot bundle")?; + + Bundle::from_binary(&state.ctx, BOOT_BUNDLE_NAME.to_string(), bin) + .wrap_err("failed to parse boot bundle") + } + .instrument(tracing::trace_span!("read boot bundle")) + .await + .wrap_err_with(|| format!("failed to read bundle '{}'", BOOT_BUNDLE_NAME))?; + + { + tracing::trace!("Adding mod package file to boot bundle"); + let span = tracing::trace_span!("create mod package file"); + let _enter = span.enter(); + + let mut pkg = Package::new(MOD_BUNDLE_NAME.to_string(), PathBuf::new()); + + for mod_info in &state.mods { + for pkg_info in &mod_info.packages { + pkg.add_file(BundleFileType::Package, &pkg_info.name); + } + } + + pkg.add_file(BundleFileType::Lua, MOD_DATA_SCRIPT); + + let mut variant = BundleFileVariant::new(); + variant.set_data(pkg.to_binary()?); + let mut f = BundleFile::new(MOD_BUNDLE_NAME.to_string(), BundleFileType::Package); + f.add_variant(variant); + + boot_bundle.add_file(f); + } + + { + tracing::trace!("Handling DML packages and bundle"); + let span = tracing::trace_span!("handle DML"); + let _enter = span.enter(); + + let mut variant = BundleFileVariant::new(); + + let mod_info = state + .mods + .iter() + .find(|m| m.id == "dml") + .ok_or_else(|| eyre::eyre!("DML not found in mod list"))?; + let pkg_info = mod_info + .packages + .get(0) + .ok_or_else(|| eyre::eyre!("invalid mod package for DML")) + .with_suggestion(|| "Re-download and import the newest version.".to_string())?; + let bundle_name = Murmur64::hash(&pkg_info.name) + .to_string() + .to_ascii_lowercase(); + let src = state.get_mod_dir().join(&mod_info.id).join(&bundle_name); + + { + let bin = fs::read(&src) + .await + .wrap_err_with(|| format!("failed to read bundle file '{}'", src.display()))?; + let name = Bundle::get_name_from_path(&state.ctx, &src); + + let dml_bundle = Bundle::from_binary(&state.ctx, name, bin) + .wrap_err_with(|| format!("failed to parse bundle '{}'", src.display()))?; + + bundles.push(dml_bundle); + }; + + { + let dest = bundle_dir.join(&bundle_name); + let pkg_name = pkg_info.name.clone(); + let mod_name = mod_info.name.clone(); + + tracing::debug!( + "Copying bundle {} for mod {}: {} -> {}", + pkg_name, + mod_name, + src.display(), + dest.display() + ); + // We attempt to remove any previous file, so that the hard link can be created. + // We can reasonably ignore errors here, as a 'NotFound' is actually fine, the copy + // may be possible despite an error here, or the error will be reported by it anyways. + // TODO: There is a chance that we delete an actual game bundle, but with 64bit + // hashes, it's low enough for now, and the setup required to detect + // "game bundle vs mod bundle" is non-trivial. + let _ = fs::remove_file(&dest).await; + fs::copy(&src, &dest).await.wrap_err_with(|| { + format!( + "failed to copy bundle {pkg_name} for mod {mod_name}. src: {}, dest: {}", + src.display(), + dest.display() + ) + })?; + } + + let pkg = make_package(pkg_info).wrap_err("failed to create package file for dml")?; + variant.set_data(pkg.to_binary()?); + + let mut f = BundleFile::new(DML_BUNDLE_NAME.to_string(), BundleFileType::Package); + f.add_variant(variant); + + boot_bundle.add_file(f); + } + + { + let span = tracing::debug_span!("Importing mod main script"); + let _enter = span.enter(); + + let lua = include_str!("../../assets/mod_main.lua"); + let lua = CString::new(lua).wrap_err("failed to build CString from mod main Lua string")?; + let file = + lua::compile(MOD_BOOT_SCRIPT, &lua).wrap_err("failed to compile mod main Lua file")?; + + boot_bundle.add_file(file); + } + + async { + let bin = boot_bundle + .to_binary() + .wrap_err("failed to serialize boot bundle")?; + fs::write(&bundle_path, bin) + .await + .wrap_err_with(|| format!("failed to write main bundle: {}", bundle_path.display())) + } + .instrument(tracing::trace_span!("write boot bundle")) + .await?; + + bundles.push(boot_bundle); + + Ok(bundles) +} + +#[tracing::instrument(skip_all, fields(bundles = bundles.len()))] +async fn patch_bundle_database(state: Arc, bundles: Vec) -> Result<()> { + let bundle_dir = Arc::new(state.game_dir.join("bundle")); + let database_path = bundle_dir.join(BUNDLE_DATABASE_NAME); + + let mut db = { + let bin = read_file_with_backup(&database_path) + .await + .wrap_err("failed to read bundle database")?; + let mut r = Cursor::new(bin); + let db = BundleDatabase::from_binary(&mut r).wrap_err("failed to parse bundle database")?; + tracing::trace!("Finished parsing bundle database"); + db + }; + + for bundle in bundles { + tracing::trace!("Adding '{}' to bundle database", bundle.name().display()); + db.add_bundle(&bundle); + } + + { + let bin = db + .to_binary() + .wrap_err("failed to serialize bundle database")?; + fs::write(&database_path, bin).await.wrap_err_with(|| { + format!( + "failed to write bundle database to '{}'", + database_path.display() + ) + })?; + } + + Ok(()) +} + +#[tracing::instrument(skip_all, fields( + game_dir = %state.game_dir.display(), + mods = state.mods.len() +))] +pub(crate) async fn deploy_mods(state: State) -> Result<()> { + let state = Arc::new(state); + + { + let first = state.mods.get(0); + if first.is_none() || !(first.unwrap().id == "dml" && first.unwrap().enabled) { + // TODO: Add a suggestion where to get it, once that's published + eyre::bail!("'Darktide Mod Loader' needs to be installed, enabled and at the top of the load order"); + } + } + + tracing::info!( + "Deploying {} mods to {}", + state.mods.len(), + state.game_dir.join("bundle").display() + ); + + tracing::info!("Build mod bundles"); + let mut bundles = build_bundles(state.clone()) + .await + .wrap_err("failed to build mod bundles")?; + + tracing::info!("Patch boot bundle"); + let mut more_bundles = patch_boot_bundle(state.clone()) + .await + .wrap_err("failed to patch boot bundle")?; + bundles.append(&mut more_bundles); + + tracing::info!("Patch game settings"); + patch_game_settings(state.clone()) + .await + .wrap_err("failed to patch game settings")?; + + tracing::info!("Patching bundle database"); + patch_bundle_database(state.clone(), bundles) + .await + .wrap_err("failed to patch bundle database")?; + + tracing::info!("Finished deploying mods"); + Ok(()) +} + +#[tracing::instrument(skip(state))] +pub(crate) async fn reset_mod_deployment(state: State) -> Result<()> { + let boot_bundle_path = format!("{:016x}", Murmur64::hash(BOOT_BUNDLE_NAME.as_bytes())); + let paths = [BUNDLE_DATABASE_NAME, &boot_bundle_path, SETTINGS_FILE_PATH]; + let bundle_dir = state.game_dir.join("bundle"); + + tracing::info!("Resetting mod deployment in {}", bundle_dir.display()); + + for p in paths { + let path = bundle_dir.join(p); + let backup = bundle_dir.join(&format!("{}.bak", p)); + + let res = async { + tracing::debug!( + "Copying from backup: {} -> {}", + backup.display(), + path.display() + ); + + fs::copy(&backup, &path) + .await + .wrap_err_with(|| format!("failed to copy from '{}'", backup.display()))?; + + tracing::debug!("Deleting backup: {}", backup.display()); + + fs::remove_file(&backup) + .await + .wrap_err_with(|| format!("failed to remove '{}'", backup.display())) + } + .await; + + if let Err(err) = res { + tracing::error!( + "Failed to restore '{}' from backup. You may need to verify game files. Error: {:?}", + &p, + err + ); + } + } + + tracing::info!("Reset finished"); + + Ok(()) +} diff --git a/crates/dtmm/src/controller/worker.rs b/crates/dtmm/src/controller/worker.rs new file mode 100644 index 0000000..80abf53 --- /dev/null +++ b/crates/dtmm/src/controller/worker.rs @@ -0,0 +1,131 @@ +use std::sync::Arc; + +use color_eyre::Result; +use druid::{ExtEventSink, SingleUse, Target}; +use tokio::runtime::Runtime; +use tokio::sync::mpsc::UnboundedReceiver; +use tokio::sync::RwLock; + +use crate::controller::app::*; +use crate::controller::game::*; +use crate::state::AsyncAction; +use crate::state::ACTION_FINISH_SAVE_SETTINGS; +use crate::state::{ + ACTION_FINISH_ADD_MOD, ACTION_FINISH_DELETE_SELECTED_MOD, ACTION_FINISH_DEPLOY, + ACTION_FINISH_RESET_DEPLOYMENT, ACTION_LOG, +}; + +async fn handle_action( + event_sink: Arc>, + action_queue: Arc>>, +) { + while let Some(action) = action_queue.write().await.recv().await { + let event_sink = event_sink.clone(); + match action { + AsyncAction::DeployMods(state) => tokio::spawn(async move { + if let Err(err) = deploy_mods(state).await { + tracing::error!("Failed to deploy mods: {:?}", err); + } + + event_sink + .write() + .await + .submit_command(ACTION_FINISH_DEPLOY, (), Target::Auto) + .expect("failed to send command"); + }), + AsyncAction::AddMod((state, info)) => tokio::spawn(async move { + match import_mod(state, info).await { + Ok(mod_info) => { + event_sink + .write() + .await + .submit_command( + ACTION_FINISH_ADD_MOD, + SingleUse::new(mod_info), + Target::Auto, + ) + .expect("failed to send command"); + } + Err(err) => { + tracing::error!("Failed to import mod: {:?}", err); + } + } + }), + AsyncAction::DeleteMod((state, info)) => tokio::spawn(async move { + if let Err(err) = delete_mod(state, &info).await { + tracing::error!( + "Failed to delete mod files. \ + You might want to clean up the data directory manually. \ + Reason: {:?}", + err + ); + } + + event_sink + .write() + .await + .submit_command( + ACTION_FINISH_DELETE_SELECTED_MOD, + SingleUse::new(info), + Target::Auto, + ) + .expect("failed to send command"); + }), + AsyncAction::ResetDeployment(state) => tokio::spawn(async move { + if let Err(err) = reset_mod_deployment(state).await { + tracing::error!("Failed to reset mod deployment: {:?}", err); + } + + event_sink + .write() + .await + .submit_command(ACTION_FINISH_RESET_DEPLOYMENT, (), Target::Auto) + .expect("failed to send command"); + }), + AsyncAction::SaveSettings(state) => tokio::spawn(async move { + if let Err(err) = save_settings(state).await { + tracing::error!("Failed to save settings: {:?}", err); + } + + event_sink + .write() + .await + .submit_command(ACTION_FINISH_SAVE_SETTINGS, (), Target::Auto) + .expect("failed to send command"); + }), + }; + } +} + +async fn handle_log( + event_sink: Arc>, + log_queue: Arc>>, +) { + while let Some(line) = log_queue.write().await.recv().await { + let event_sink = event_sink.clone(); + event_sink + .write() + .await + .submit_command(ACTION_LOG, SingleUse::new(line), Target::Auto) + .expect("failed to send command"); + } +} + +pub(crate) fn work_thread( + event_sink: Arc>, + action_queue: Arc>>, + log_queue: Arc>>, +) -> Result<()> { + let rt = Runtime::new()?; + + rt.block_on(async { + loop { + tokio::select! { + _ = handle_action(event_sink.clone(), action_queue.clone()) => {}, + _ = handle_log(event_sink.clone(), log_queue.clone()) => {}, + } + } + }); + + Ok(()) +} diff --git a/crates/dtmm/src/main.rs b/crates/dtmm/src/main.rs new file mode 100644 index 0000000..9ce6192 --- /dev/null +++ b/crates/dtmm/src/main.rs @@ -0,0 +1,91 @@ +#![recursion_limit = "256"] +#![feature(let_chains)] + +use std::path::PathBuf; +use std::sync::Arc; + +use clap::command; +use clap::value_parser; +use clap::Arg; +use color_eyre::eyre::Context; +use color_eyre::{Report, Result}; +use druid::AppLauncher; +use tokio::sync::RwLock; + +use crate::controller::app::load_mods; +use crate::controller::worker::work_thread; +use crate::state::{Delegate, State}; + +mod controller { + pub mod app; + pub mod game; + pub mod worker; +} +mod state; +mod util { + pub mod config; + pub mod log; +} +mod ui; + +#[tracing::instrument] +fn main() -> Result<()> { + color_eyre::install()?; + + let default_config_path = util::config::get_default_config_path(); + + tracing::trace!(default_config_path = %default_config_path.display()); + + let matches = command!() + .arg(Arg::new("oodle").long("oodle").help( + "The oodle library to load. This may either be:\n\ + - A library name that will be searched for in the system's default paths.\n\ + - A file path relative to the current working directory.\n\ + - An absolute file path.", + )) + .arg( + Arg::new("config") + .long("config") + .short('c') + .help("Path to the config file") + .value_parser(value_parser!(PathBuf)) + .default_value(default_config_path.to_string_lossy().to_string()), + ) + .get_matches(); + + let (log_tx, log_rx) = tokio::sync::mpsc::unbounded_channel(); + util::log::create_tracing_subscriber(log_tx); + + let config = util::config::read_config(&default_config_path, &matches) + .wrap_err("failed to read config file")?; + + let initial_state = { + let mut state = State::new( + config.path, + config.game_dir.unwrap_or_default(), + config.data_dir.unwrap_or_default(), + ); + state.mods = load_mods(state.get_mod_dir(), config.mod_order.iter()) + .wrap_err("failed to load mods")?; + state + }; + + let (action_tx, action_rx) = tokio::sync::mpsc::unbounded_channel(); + let delegate = Delegate::new(action_tx); + + let launcher = AppLauncher::with_window(ui::window::main::new()).delegate(delegate); + + let event_sink = launcher.get_external_handle(); + std::thread::spawn(move || { + let event_sink = Arc::new(RwLock::new(event_sink)); + let action_rx = Arc::new(RwLock::new(action_rx)); + let log_rx = Arc::new(RwLock::new(log_rx)); + loop { + if let Err(err) = work_thread(event_sink.clone(), action_rx.clone(), log_rx.clone()) { + tracing::error!("Work thread failed, restarting: {:?}", err); + } + } + }); + + launcher.launch(initial_state).map_err(Report::new) +} diff --git a/crates/dtmm/src/state/data.rs b/crates/dtmm/src/state/data.rs new file mode 100644 index 0000000..c8dc3aa --- /dev/null +++ b/crates/dtmm/src/state/data.rs @@ -0,0 +1,152 @@ +use std::{path::PathBuf, sync::Arc}; + +use druid::{im::Vector, Data, Lens}; +use dtmt_shared::ModConfig; + +use super::SelectedModLens; + +#[derive(Copy, Clone, Data, Debug, PartialEq)] +pub(crate) enum View { + Mods, + Settings, +} + +impl Default for View { + fn default() -> Self { + Self::Mods + } +} + +#[derive(Clone, Data, Debug)] +pub struct PackageInfo { + pub name: String, + pub files: Vector, +} + +impl PackageInfo { + pub fn new(name: String, files: Vector) -> Self { + Self { name, files } + } +} + +#[derive(Clone, Debug)] +pub(crate) struct ModResourceInfo { + pub init: PathBuf, + pub data: Option, + pub localization: Option, +} + +#[derive(Clone, Data, Debug, Lens)] +pub(crate) struct ModInfo { + pub id: String, + pub name: String, + pub description: Arc, + pub enabled: bool, + #[lens(ignore)] + #[data(ignore)] + pub packages: Vector, + #[lens(ignore)] + #[data(ignore)] + pub resources: ModResourceInfo, +} + +impl ModInfo { + pub fn new(cfg: ModConfig, packages: Vector) -> Self { + Self { + id: cfg.id, + name: cfg.name, + description: Arc::new(cfg.description), + enabled: false, + packages, + resources: ModResourceInfo { + init: cfg.resources.init, + data: cfg.resources.data, + localization: cfg.resources.localization, + }, + } + } +} + +impl PartialEq for ModInfo { + fn eq(&self, other: &Self) -> bool { + self.name.eq(&other.name) + } +} + +#[derive(Clone, Data, Lens)] +pub(crate) struct State { + pub current_view: View, + pub mods: Vector, + pub selected_mod_index: Option, + pub is_deployment_in_progress: bool, + pub is_reset_in_progress: bool, + pub is_save_in_progress: bool, + pub is_next_save_pending: bool, + pub game_dir: Arc, + pub data_dir: Arc, + pub log: Arc, + + #[lens(ignore)] + #[data(ignore)] + pub config_path: Arc, + #[lens(ignore)] + #[data(ignore)] + pub ctx: Arc, +} + +impl State { + #[allow(non_upper_case_globals)] + pub const selected_mod: SelectedModLens = SelectedModLens; + + pub fn new(config_path: PathBuf, game_dir: PathBuf, data_dir: PathBuf) -> Self { + let ctx = sdk::Context::new(); + + Self { + ctx: Arc::new(ctx), + current_view: View::default(), + mods: Vector::new(), + selected_mod_index: None, + is_deployment_in_progress: false, + is_reset_in_progress: false, + is_save_in_progress: false, + is_next_save_pending: false, + config_path: Arc::new(config_path), + game_dir: Arc::new(game_dir), + data_dir: Arc::new(data_dir), + log: Arc::new(String::new()), + } + } + + pub fn select_mod(&mut self, index: usize) { + self.selected_mod_index = Some(index); + } + + pub fn add_mod(&mut self, info: ModInfo) { + if let Some(pos) = self.mods.index_of(&info) { + self.mods.set(pos, info); + self.selected_mod_index = Some(pos); + } else { + self.mods.push_back(info); + self.selected_mod_index = Some(self.mods.len() - 1); + } + } + + pub fn can_move_mod_down(&self) -> bool { + self.selected_mod_index + .map(|i| i < (self.mods.len().saturating_sub(1))) + .unwrap_or(false) + } + + pub fn can_move_mod_up(&self) -> bool { + self.selected_mod_index.map(|i| i > 0).unwrap_or(false) + } + + pub(crate) fn get_mod_dir(&self) -> PathBuf { + self.data_dir.join("mods") + } + + pub(crate) fn add_log_line(&mut self, line: String) { + let log = Arc::make_mut(&mut self.log); + log.push_str(&line); + } +} diff --git a/crates/dtmm/src/state/delegate.rs b/crates/dtmm/src/state/delegate.rs new file mode 100644 index 0000000..08d17b0 --- /dev/null +++ b/crates/dtmm/src/state/delegate.rs @@ -0,0 +1,237 @@ +use druid::{ + AppDelegate, Command, DelegateCtx, Env, FileInfo, Handled, Selector, SingleUse, Target, +}; +use tokio::sync::mpsc::UnboundedSender; + +use super::{ModInfo, State}; + +pub(crate) const ACTION_SELECT_MOD: Selector = Selector::new("dtmm.action.select-mod"); +pub(crate) const ACTION_SELECTED_MOD_UP: Selector = Selector::new("dtmm.action.selected-mod-up"); +pub(crate) const ACTION_SELECTED_MOD_DOWN: Selector = + Selector::new("dtmm.action.selected-mod-down"); +pub(crate) const ACTION_START_DELETE_SELECTED_MOD: Selector> = + Selector::new("dtmm.action.srart-delete-selected-mod"); +pub(crate) const ACTION_FINISH_DELETE_SELECTED_MOD: Selector> = + Selector::new("dtmm.action.finish-delete-selected-mod"); + +pub(crate) const ACTION_START_DEPLOY: Selector = Selector::new("dtmm.action.start-deploy"); +pub(crate) const ACTION_FINISH_DEPLOY: Selector = Selector::new("dtmm.action.finish-deploy"); + +pub(crate) const ACTION_START_RESET_DEPLOYMENT: Selector = + Selector::new("dtmm.action.start-reset-deployment"); +pub(crate) const ACTION_FINISH_RESET_DEPLOYMENT: Selector = + Selector::new("dtmm.action.finish-reset-deployment"); + +pub(crate) const ACTION_ADD_MOD: Selector = Selector::new("dtmm.action.add-mod"); +pub(crate) const ACTION_FINISH_ADD_MOD: Selector> = + Selector::new("dtmm.action.finish-add-mod"); + +pub(crate) const ACTION_LOG: Selector> = Selector::new("dtmm.action.log"); + +pub(crate) const ACTION_START_SAVE_SETTINGS: Selector = + Selector::new("dtmm.action.start-save-settings"); +pub(crate) const ACTION_FINISH_SAVE_SETTINGS: Selector = + Selector::new("dtmm.action.finish-save-settings"); + +pub(crate) enum AsyncAction { + DeployMods(State), + ResetDeployment(State), + AddMod((State, FileInfo)), + DeleteMod((State, ModInfo)), + SaveSettings(State), +} + +pub(crate) struct Delegate { + sender: UnboundedSender, +} + +impl Delegate { + pub fn new(sender: UnboundedSender) -> Self { + Self { sender } + } +} + +impl AppDelegate for Delegate { + #[tracing::instrument(name = "Delegate", skip_all)] + fn command( + &mut self, + ctx: &mut DelegateCtx, + _target: Target, + cmd: &Command, + state: &mut State, + _env: &Env, + ) -> Handled { + if cfg!(debug_assertions) && !cmd.is(ACTION_LOG) { + tracing::trace!(?cmd); + } + + match cmd { + cmd if cmd.is(ACTION_START_DEPLOY) => { + if self + .sender + .send(AsyncAction::DeployMods(state.clone())) + .is_ok() + { + state.is_deployment_in_progress = true; + } else { + tracing::error!("Failed to queue action to deploy mods"); + } + + Handled::Yes + } + cmd if cmd.is(ACTION_FINISH_DEPLOY) => { + state.is_deployment_in_progress = false; + Handled::Yes + } + cmd if cmd.is(ACTION_START_RESET_DEPLOYMENT) => { + if self + .sender + .send(AsyncAction::ResetDeployment(state.clone())) + .is_ok() + { + state.is_reset_in_progress = true; + } else { + tracing::error!("Failed to queue action to reset mod deployment"); + } + + Handled::Yes + } + cmd if cmd.is(ACTION_FINISH_RESET_DEPLOYMENT) => { + state.is_reset_in_progress = false; + Handled::Yes + } + cmd if cmd.is(ACTION_SELECT_MOD) => { + let index = cmd + .get(ACTION_SELECT_MOD) + .expect("command type matched but didn't contain the expected value"); + + state.select_mod(*index); + // ctx.submit_command(ACTION_START_SAVE_SETTINGS); + Handled::Yes + } + cmd if cmd.is(ACTION_SELECTED_MOD_UP) => { + let Some(i) = state.selected_mod_index else { + return Handled::No; + }; + + let len = state.mods.len(); + if len == 0 || i == 0 { + return Handled::No; + } + + state.mods.swap(i, i - 1); + state.selected_mod_index = Some(i - 1); + // ctx.submit_command(ACTION_START_SAVE_SETTINGS); + Handled::Yes + } + cmd if cmd.is(ACTION_SELECTED_MOD_DOWN) => { + let Some(i) = state.selected_mod_index else { + return Handled::No; + }; + + let len = state.mods.len(); + if len == 0 || i == usize::MAX || i >= len - 1 { + return Handled::No; + } + + state.mods.swap(i, i + 1); + state.selected_mod_index = Some(i + 1); + // ctx.submit_command(ACTION_START_SAVE_SETTINGS); + Handled::Yes + } + cmd if cmd.is(ACTION_START_DELETE_SELECTED_MOD) => { + let info = cmd + .get(ACTION_START_DELETE_SELECTED_MOD) + .and_then(|info| info.take()) + .expect("command type matched but didn't contain the expected value"); + if self + .sender + .send(AsyncAction::DeleteMod((state.clone(), info))) + .is_err() + { + tracing::error!("Failed to queue action to deploy mods"); + } + + Handled::Yes + } + cmd if cmd.is(ACTION_FINISH_DELETE_SELECTED_MOD) => { + let info = cmd + .get(ACTION_FINISH_DELETE_SELECTED_MOD) + .and_then(|info| info.take()) + .expect("command type matched but didn't contain the expected value"); + let found = state.mods.iter().enumerate().find(|(_, i)| i.id == info.id); + let Some((index, _)) = found else { + return Handled::No; + }; + + state.mods.remove(index); + // ctx.submit_command(ACTION_START_SAVE_SETTINGS); + + Handled::Yes + } + cmd if cmd.is(ACTION_ADD_MOD) => { + let info = cmd + .get(ACTION_ADD_MOD) + .expect("command type matched but didn't contain the expected value"); + if self + .sender + .send(AsyncAction::AddMod((state.clone(), info.clone()))) + .is_err() + { + tracing::error!("Failed to queue action to add mod"); + } + Handled::Yes + } + cmd if cmd.is(ACTION_FINISH_ADD_MOD) => { + let info = cmd + .get(ACTION_FINISH_ADD_MOD) + .expect("command type matched but didn't contain the expected value"); + if let Some(info) = info.take() { + state.add_mod(info); + // ctx.submit_command(ACTION_START_SAVE_SETTINGS); + } + Handled::Yes + } + cmd if cmd.is(ACTION_LOG) => { + let line = cmd + .get(ACTION_LOG) + .expect("command type matched but didn't contain the expected value"); + if let Some(line) = line.take() { + state.add_log_line(line); + } + Handled::Yes + } + cmd if cmd.is(ACTION_START_SAVE_SETTINGS) => { + if state.is_save_in_progress { + state.is_next_save_pending = true; + } else if self + .sender + .send(AsyncAction::SaveSettings(state.clone())) + .is_ok() + { + state.is_save_in_progress = true; + } else { + tracing::error!("Failed to queue action to save settings"); + } + + Handled::Yes + } + cmd if cmd.is(ACTION_FINISH_SAVE_SETTINGS) => { + state.is_save_in_progress = false; + + if state.is_next_save_pending { + state.is_next_save_pending = false; + ctx.submit_command(ACTION_START_SAVE_SETTINGS); + } + + Handled::Yes + } + cmd => { + if cfg!(debug_assertions) { + tracing::warn!("Unknown command: {:?}", cmd); + } + Handled::No + } + } + } +} diff --git a/crates/dtmm/src/state/lens.rs b/crates/dtmm/src/state/lens.rs new file mode 100644 index 0000000..6c457a4 --- /dev/null +++ b/crates/dtmm/src/state/lens.rs @@ -0,0 +1,73 @@ +use druid::im::Vector; +use druid::{Data, Lens}; + +use super::{ModInfo, State}; + +pub(crate) struct SelectedModLens; + +impl Lens> for SelectedModLens { + #[tracing::instrument(name = "SelectedModLens::with", skip_all)] + fn with) -> V>(&self, data: &State, f: F) -> V { + let info = data + .selected_mod_index + .and_then(|i| data.mods.get(i).cloned()); + + f(&info) + } + + #[tracing::instrument(name = "SelectedModLens::with_mut", skip_all)] + fn with_mut) -> V>(&self, data: &mut State, f: F) -> V { + match data.selected_mod_index { + Some(i) => { + let mut info = data.mods.get_mut(i).cloned(); + let ret = f(&mut info); + + if let Some(info) = info { + // TODO: Figure out a way to check for equality and + // only update when needed + data.mods.set(i, info); + } else { + data.selected_mod_index = None; + } + + ret + } + None => f(&mut None), + } + } +} + +/// A Lens that maps an `im::Vector` to `im::Vector<(usize, T)>`, +/// where each element in the destination vector includes its index in the +/// source vector. +pub(crate) struct IndexedVectorLens; + +impl Lens, Vector<(usize, T)>> for IndexedVectorLens { + #[tracing::instrument(name = "IndexedVectorLens::with", skip_all)] + fn with) -> V>(&self, values: &Vector, f: F) -> V { + let indexed = values + .iter() + .enumerate() + .map(|(i, val)| (i, val.clone())) + .collect(); + f(&indexed) + } + + #[tracing::instrument(name = "IndexedVectorLens::with_mut", skip_all)] + fn with_mut) -> V>( + &self, + values: &mut Vector, + f: F, + ) -> V { + let mut indexed = values + .iter() + .enumerate() + .map(|(i, val)| (i, val.clone())) + .collect(); + let ret = f(&mut indexed); + + *values = indexed.into_iter().map(|(_i, val)| val).collect(); + + ret + } +} diff --git a/crates/dtmm/src/state/mod.rs b/crates/dtmm/src/state/mod.rs new file mode 100644 index 0000000..f0eb8c3 --- /dev/null +++ b/crates/dtmm/src/state/mod.rs @@ -0,0 +1,7 @@ +mod data; +mod delegate; +mod lens; + +pub(crate) use data::*; +pub(crate) use delegate::*; +pub(crate) use lens::*; diff --git a/crates/dtmm/src/ui/mod.rs b/crates/dtmm/src/ui/mod.rs new file mode 100644 index 0000000..cf8554f --- /dev/null +++ b/crates/dtmm/src/ui/mod.rs @@ -0,0 +1,5 @@ +pub mod theme; +pub mod widget; +pub mod window { + pub mod main; +} diff --git a/crates/dtmm/src/ui/theme.rs b/crates/dtmm/src/ui/theme.rs new file mode 100644 index 0000000..7658f3f --- /dev/null +++ b/crates/dtmm/src/ui/theme.rs @@ -0,0 +1,4 @@ +use druid::{Color, Insets}; + +pub const TOP_BAR_BACKGROUND_COLOR: Color = Color::rgba8(255, 255, 255, 50); +pub const TOP_BAR_INSETS: Insets = Insets::uniform(5.0); diff --git a/crates/dtmm/src/ui/widget/controller.rs b/crates/dtmm/src/ui/widget/controller.rs new file mode 100644 index 0000000..ce18d5b --- /dev/null +++ b/crates/dtmm/src/ui/widget/controller.rs @@ -0,0 +1,82 @@ +use druid::widget::{Button, Controller, Scroll}; +use druid::{Data, Env, Event, EventCtx, Rect, UpdateCtx, Widget}; + +use crate::state::{State, ACTION_START_SAVE_SETTINGS}; + +pub struct DisabledButtonController; + +impl Controller> for DisabledButtonController { + fn event( + &mut self, + child: &mut Button, + ctx: &mut EventCtx, + event: &Event, + data: &mut T, + env: &Env, + ) { + if !ctx.is_disabled() { + ctx.set_disabled(true); + ctx.request_paint(); + } + child.event(ctx, event, data, env) + } + + fn update( + &mut self, + child: &mut Button, + ctx: &mut UpdateCtx, + old_data: &T, + data: &T, + env: &Env, + ) { + if !ctx.is_disabled() { + ctx.set_disabled(true); + ctx.request_paint(); + } + child.update(ctx, old_data, data, env) + } +} + +pub struct AutoScrollController; + +impl> Controller> for AutoScrollController { + fn update( + &mut self, + child: &mut Scroll, + ctx: &mut UpdateCtx, + old_data: &T, + data: &T, + env: &Env, + ) { + if !ctx.is_disabled() { + let size = child.child_size(); + let end_region = Rect::new(size.width - 1., size.height - 1., size.width, size.height); + child.scroll_to(ctx, end_region); + } + child.update(ctx, old_data, data, env) + } +} + +/// A controller that submits the command to save settings every time its widget's +/// data changes. +pub struct SaveSettingsController; + +impl> Controller for SaveSettingsController { + fn update( + &mut self, + child: &mut W, + ctx: &mut UpdateCtx, + old_data: &State, + data: &State, + env: &Env, + ) { + // Only filter for the values that actually go into the settings file. + if old_data.mods != data.mods + || old_data.game_dir != data.game_dir + || old_data.data_dir != data.data_dir + { + ctx.submit_command(ACTION_START_SAVE_SETTINGS); + } + child.update(ctx, old_data, data, env) + } +} diff --git a/crates/dtmm/src/ui/widget/fill_container.rs b/crates/dtmm/src/ui/widget/fill_container.rs new file mode 100644 index 0000000..540715e --- /dev/null +++ b/crates/dtmm/src/ui/widget/fill_container.rs @@ -0,0 +1,63 @@ +use std::f64::INFINITY; + +use druid::widget::prelude::*; +use druid::{Point, WidgetPod}; + +pub struct FillContainer { + child: WidgetPod>>, +} + +impl FillContainer { + pub fn new(child: impl Widget + 'static) -> Self { + Self { + child: WidgetPod::new(child).boxed(), + } + } +} + +impl Widget for FillContainer { + #[tracing::instrument(name = "FillContainer", level = "trace", skip_all)] + fn event(&mut self, ctx: &mut EventCtx, event: &Event, data: &mut T, env: &Env) { + self.child.event(ctx, event, data, env); + } + + #[tracing::instrument(name = "FillContainer", level = "trace", skip_all)] + fn lifecycle(&mut self, ctx: &mut LifeCycleCtx, event: &LifeCycle, data: &T, env: &Env) { + self.child.lifecycle(ctx, event, data, env) + } + + #[tracing::instrument(name = "FillContainer", level = "trace", skip_all)] + fn update(&mut self, ctx: &mut UpdateCtx, _: &T, data: &T, env: &Env) { + self.child.update(ctx, data, env); + } + + #[tracing::instrument(name = "FillContainer", level = "trace", skip_all)] + fn layout(&mut self, ctx: &mut LayoutCtx, bc: &BoxConstraints, data: &T, env: &Env) -> Size { + bc.debug_check("FillContainer"); + + let child_size = self.child.layout(ctx, bc, data, env); + + let w = if bc.is_width_bounded() { + INFINITY + } else { + child_size.width + }; + + let h = if bc.is_height_bounded() { + INFINITY + } else { + child_size.height + }; + + let my_size = bc.constrain(Size::new(w, h)); + + self.child.set_origin(ctx, Point::new(0.0, 0.0)); + tracing::trace!("Computed layout: size={}", my_size); + my_size + } + + #[tracing::instrument(name = "FillContainer", level = "trace", skip_all)] + fn paint(&mut self, ctx: &mut PaintCtx, data: &T, env: &Env) { + self.child.paint(ctx, data, env); + } +} diff --git a/crates/dtmm/src/ui/widget/mod.rs b/crates/dtmm/src/ui/widget/mod.rs new file mode 100644 index 0000000..ebb634e --- /dev/null +++ b/crates/dtmm/src/ui/widget/mod.rs @@ -0,0 +1,38 @@ +use std::path::PathBuf; +use std::sync::Arc; + +use druid::text::Formatter; +use druid::{Data, Widget}; + +pub mod controller; + +pub trait ExtraWidgetExt: Widget + Sized + 'static {} + +impl + 'static> ExtraWidgetExt for W {} + +pub(crate) struct PathBufFormatter; + +impl PathBufFormatter { + pub fn new() -> Self { + Self {} + } +} + +impl Formatter> for PathBufFormatter { + fn format(&self, value: &Arc) -> String { + value.display().to_string() + } + + fn validate_partial_input( + &self, + _input: &str, + _sel: &druid::text::Selection, + ) -> druid::text::Validation { + druid::text::Validation::success() + } + + fn value(&self, input: &str) -> Result, druid::text::ValidationError> { + let p = PathBuf::from(input); + Ok(Arc::new(p)) + } +} diff --git a/crates/dtmm/src/ui/widget/table_select.rs b/crates/dtmm/src/ui/widget/table_select.rs new file mode 100644 index 0000000..00321f8 --- /dev/null +++ b/crates/dtmm/src/ui/widget/table_select.rs @@ -0,0 +1,73 @@ +use druid::widget::{Controller, Flex}; +use druid::{Data, Widget}; + +pub struct TableSelect { + widget: Flex, + controller: TableSelectController, +} + +impl TableSelect { + pub fn new(values: impl IntoIterator + 'static)>) -> Self { + todo!(); + } +} + +impl Widget for TableSelect { + fn event( + &mut self, + ctx: &mut druid::EventCtx, + event: &druid::Event, + data: &mut T, + env: &druid::Env, + ) { + todo!() + } + + fn lifecycle( + &mut self, + ctx: &mut druid::LifeCycleCtx, + event: &druid::LifeCycle, + data: &T, + env: &druid::Env, + ) { + todo!() + } + + fn update(&mut self, ctx: &mut druid::UpdateCtx, old_data: &T, data: &T, env: &druid::Env) { + todo!() + } + + fn layout( + &mut self, + ctx: &mut druid::LayoutCtx, + bc: &druid::BoxConstraints, + data: &T, + env: &druid::Env, + ) -> druid::Size { + todo!() + } + + fn paint(&mut self, ctx: &mut druid::PaintCtx, data: &T, env: &druid::Env) { + todo!() + } +} + +struct TableSelectController { + inner: T, +} + +impl TableSelectController {} + +impl Controller> for TableSelectController {} + +pub struct TableItem { + inner: dyn Widget, +} + +impl TableItem { + pub fn new(inner: impl Widget) -> Self { + todo!(); + } +} + +impl Widget for TableItem {} diff --git a/crates/dtmm/src/ui/window/main.rs b/crates/dtmm/src/ui/window/main.rs new file mode 100644 index 0000000..a0ccaa2 --- /dev/null +++ b/crates/dtmm/src/ui/window/main.rs @@ -0,0 +1,316 @@ +use druid::im::Vector; +use druid::lens; +use druid::widget::{ + Button, Checkbox, CrossAxisAlignment, Flex, Label, LineBreaking, List, MainAxisAlignment, + Maybe, Scroll, SizedBox, Split, TextBox, ViewSwitcher, +}; +use druid::{ + Color, FileDialogOptions, FileSpec, FontDescriptor, FontFamily, Key, LensExt, SingleUse, + TextAlignment, Widget, WidgetExt, WindowDesc, +}; + +use crate::state::{ + ModInfo, State, View, ACTION_ADD_MOD, ACTION_SELECTED_MOD_DOWN, ACTION_SELECTED_MOD_UP, + ACTION_SELECT_MOD, ACTION_START_DELETE_SELECTED_MOD, ACTION_START_DEPLOY, + ACTION_START_RESET_DEPLOYMENT, +}; +use crate::ui::theme; +use crate::ui::widget::controller::{AutoScrollController, SaveSettingsController}; +use crate::ui::widget::PathBufFormatter; + +const TITLE: &str = "Darktide Mod Manager"; +const WINDOW_SIZE: (f64, f64) = (1080., 720.); +const MOD_DETAILS_MIN_WIDTH: f64 = 325.; + +const KEY_MOD_LIST_ITEM_BG_COLOR: Key = Key::new("dtmm.mod-list.item.background-color"); + +pub(crate) fn new() -> WindowDesc { + WindowDesc::new(build_window()) + .title(TITLE) + .window_size(WINDOW_SIZE) +} + +fn build_top_bar() -> impl Widget { + Flex::row() + .must_fill_main_axis(true) + .main_axis_alignment(MainAxisAlignment::SpaceBetween) + .with_child( + Flex::row() + .with_child( + Button::new("Mods") + .on_click(|_ctx, state: &mut State, _env| state.current_view = View::Mods), + ) + .with_default_spacer() + .with_child( + Button::new("Settings").on_click(|_ctx, state: &mut State, _env| { + state.current_view = View::Settings; + }), + ), + ) + .with_child( + Flex::row() + .with_child( + Button::new("Deploy Mods") + .on_click(|ctx, _state: &mut State, _env| { + ctx.submit_command(ACTION_START_DEPLOY); + }) + .disabled_if(|data, _| { + data.is_deployment_in_progress || data.is_reset_in_progress + }), + ) + .with_default_spacer() + .with_child( + Button::new("Reset Game") + .on_click(|ctx, _state: &mut State, _env| { + ctx.submit_command(ACTION_START_RESET_DEPLOYMENT); + }) + .disabled_if(|data, _| { + data.is_deployment_in_progress || data.is_reset_in_progress + }), + ), + ) + .padding(theme::TOP_BAR_INSETS) + .background(theme::TOP_BAR_BACKGROUND_COLOR) + // TODO: Add bottom border. Need a custom widget for that, as the built-in only provides + // uniform borders on all sides +} + +fn build_mod_list() -> impl Widget { + let list = List::new(|| { + let checkbox = + Checkbox::new("").lens(lens!((usize, ModInfo, bool), 1).then(ModInfo::enabled)); + let name = Label::raw().lens(lens!((usize, ModInfo, bool), 1).then(ModInfo::name)); + + Flex::row() + .must_fill_main_axis(true) + .with_child(checkbox) + .with_child(name) + .padding((5.0, 4.0)) + .background(KEY_MOD_LIST_ITEM_BG_COLOR) + .on_click(|ctx, (i, _, _), _env| ctx.submit_command(ACTION_SELECT_MOD.with(*i))) + .env_scope(|env, (i, _, selected)| { + if *selected { + env.set(KEY_MOD_LIST_ITEM_BG_COLOR, Color::NAVY); + } else if (i % 2) == 1 { + env.set(KEY_MOD_LIST_ITEM_BG_COLOR, Color::WHITE.with_alpha(0.05)); + } else { + env.set(KEY_MOD_LIST_ITEM_BG_COLOR, Color::TRANSPARENT); + } + }) + }); + + let scroll = Scroll::new(list).vertical().lens(lens::Identity.map( + |state: &State| { + state + .mods + .iter() + .enumerate() + .map(|(i, val)| (i, val.clone(), Some(i) == state.selected_mod_index)) + .collect::>() + }, + |state, infos| { + infos.into_iter().for_each(|(i, info, _)| { + state.mods.set(i, info); + }); + }, + )); + + Flex::column() + .must_fill_main_axis(true) + .with_child(Flex::row()) + .with_flex_child(scroll, 1.0) +} + +fn build_mod_details_buttons() -> impl Widget { + let button_move_up = Button::new("Move Up") + .on_click(|ctx, _state, _env| ctx.submit_command(ACTION_SELECTED_MOD_UP)) + .disabled_if(|state: &State, _env: &druid::Env| !state.can_move_mod_up()); + + let button_move_down = Button::new("Move Down") + .on_click(|ctx, _state, _env| ctx.submit_command(ACTION_SELECTED_MOD_DOWN)) + .disabled_if(|state: &State, _env: &druid::Env| !state.can_move_mod_down()); + + let button_toggle_mod = Maybe::new( + || { + Button::dynamic(|enabled, _env| { + if *enabled { + "Disable Mod".into() + } else { + "Enable Mod".into() + } + }) + .on_click(|_ctx, enabled: &mut bool, _env| { + *enabled = !(*enabled); + }) + .lens(ModInfo::enabled) + }, + // TODO: Gray out + || Button::new("Enable Mod"), + ) + .disabled_if(|info: &Option, _env: &druid::Env| info.is_none()) + .lens(State::selected_mod); + + let button_add_mod = Button::new("Add Mod").on_click(|ctx, _state: &mut State, _env| { + let zip = FileSpec::new("Zip file", &["zip"]); + let opts = FileDialogOptions::new() + .allowed_types(vec![zip]) + .default_type(zip) + .name_label("Mod Archive") + .title("Choose a mod to add") + .accept_command(ACTION_ADD_MOD); + ctx.submit_command(druid::commands::SHOW_OPEN_PANEL.with(opts)) + }); + + let button_delete_mod = Button::new("Delete Mod") + .on_click(|ctx, data: &mut Option, _env| { + if let Some(info) = data { + ctx.submit_command( + ACTION_START_DELETE_SELECTED_MOD.with(SingleUse::new(info.clone())), + ); + } + }) + .disabled_if(|info: &Option, _env: &druid::Env| info.is_none()) + .lens(State::selected_mod); + + Flex::column() + .cross_axis_alignment(CrossAxisAlignment::Center) + .with_child( + Flex::row() + .main_axis_alignment(MainAxisAlignment::End) + .with_child(button_move_up) + .with_default_spacer() + .with_child(button_move_down), + ) + .with_default_spacer() + .with_child( + Flex::row() + .main_axis_alignment(MainAxisAlignment::End) + .with_child(button_toggle_mod) + .with_default_spacer() + .with_child(button_add_mod) + .with_default_spacer() + .with_child(button_delete_mod), + ) + .expand_width() +} + +fn build_mod_details_info() -> impl Widget { + Maybe::new( + || { + let name = Label::raw() + .with_text_alignment(TextAlignment::Center) + .with_text_size(24.) + // Force the label to take up the entire details' pane width, + // so that we can center-align it. + .expand_width() + .lens(ModInfo::name); + let description = Label::raw() + .with_line_break_mode(LineBreaking::WordWrap) + .lens(ModInfo::description); + + Flex::column() + .cross_axis_alignment(CrossAxisAlignment::Start) + .main_axis_alignment(MainAxisAlignment::Start) + .with_child(name) + .with_spacer(4.) + .with_child(description) + }, + Flex::column, + ) + .padding((4., 4.)) + .lens(State::selected_mod) +} + +fn build_mod_details() -> impl Widget { + Flex::column() + .must_fill_main_axis(true) + .cross_axis_alignment(CrossAxisAlignment::Start) + .main_axis_alignment(MainAxisAlignment::SpaceBetween) + .with_flex_child(build_mod_details_info(), 1.0) + .with_child(build_mod_details_buttons().padding(4.)) +} + +fn build_view_mods() -> impl Widget { + Split::columns(build_mod_list(), build_mod_details()) + .split_point(0.75) + .min_size(0.0, MOD_DETAILS_MIN_WIDTH) + .solid_bar(true) + .bar_size(2.0) + .draggable(true) +} + +fn build_view_settings() -> impl Widget { + let data_dir_setting = Flex::row() + .must_fill_main_axis(true) + .main_axis_alignment(MainAxisAlignment::Start) + .with_child(Label::new("Data Directory:")) + .with_default_spacer() + .with_flex_child( + TextBox::new() + .with_formatter(PathBufFormatter::new()) + .expand_width() + .lens(State::data_dir), + 1., + ) + .expand_width(); + + let game_dir_setting = Flex::row() + .must_fill_main_axis(true) + .main_axis_alignment(MainAxisAlignment::Start) + .with_child(Label::new("Game Directory:")) + .with_default_spacer() + .with_flex_child( + TextBox::new() + .with_formatter(PathBufFormatter::new()) + .expand_width() + .lens(State::game_dir), + 1., + ) + .expand_width(); + + let content = Flex::column() + .must_fill_main_axis(true) + .cross_axis_alignment(CrossAxisAlignment::Start) + .with_child(data_dir_setting) + .with_default_spacer() + .with_child(game_dir_setting); + + SizedBox::new(content) + .width(800.) + .expand_height() + .padding(5.) +} + +fn build_main() -> impl Widget { + ViewSwitcher::new( + |state: &State, _| state.current_view, + |selector, _, _| match selector { + View::Mods => Box::new(build_view_mods()), + View::Settings => Box::new(build_view_settings()), + }, + ) +} + +fn build_log_view() -> impl Widget { + let font = FontDescriptor::new(FontFamily::MONOSPACE); + let label = Label::raw() + .with_font(font) + .with_line_break_mode(LineBreaking::WordWrap) + .lens(State::log) + .padding(4.) + .scroll() + .vertical() + .controller(AutoScrollController); + + SizedBox::new(label).expand_width().height(128.0) +} + +fn build_window() -> impl Widget { + // TODO: Add borders between the sections + Flex::column() + .must_fill_main_axis(true) + .with_child(build_top_bar()) + .with_flex_child(build_main(), 1.0) + .with_child(build_log_view()) + .controller(SaveSettingsController) +} diff --git a/crates/dtmm/src/util/config.rs b/crates/dtmm/src/util/config.rs new file mode 100644 index 0000000..d2ae44c --- /dev/null +++ b/crates/dtmm/src/util/config.rs @@ -0,0 +1,161 @@ +use std::io::ErrorKind; +use std::path::PathBuf; +use std::{fs, path::Path}; + +use clap::{parser::ValueSource, ArgMatches}; +use color_eyre::{eyre::Context, Result}; +use serde::{Deserialize, Serialize}; + +use crate::state::{ModInfo, State}; + +#[derive(Clone, Debug, Serialize)] +pub(crate) struct LoadOrderEntrySerialize<'a> { + pub id: &'a String, + pub enabled: bool, +} + +impl<'a> From<&'a ModInfo> for LoadOrderEntrySerialize<'a> { + fn from(info: &'a ModInfo) -> Self { + Self { + id: &info.id, + enabled: info.enabled, + } + } +} + +#[derive(Debug, Serialize)] +pub(crate) struct ConfigSerialize<'a> { + game_dir: &'a Path, + data_dir: &'a Path, + mod_order: Vec>, +} + +impl<'a> From<&'a State> for ConfigSerialize<'a> { + fn from(state: &'a State) -> Self { + Self { + game_dir: &state.game_dir, + data_dir: &state.data_dir, + mod_order: state + .mods + .iter() + .map(LoadOrderEntrySerialize::from) + .collect(), + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub(crate) struct LoadOrderEntry { + pub id: String, + pub enabled: bool, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub(crate) struct Config { + #[serde(skip)] + pub path: PathBuf, + pub data_dir: Option, + pub game_dir: Option, + #[serde(default)] + pub mod_order: Vec, +} + +#[cfg(not(arget_os = "windows"))] +pub fn get_default_config_path() -> PathBuf { + let config_dir = std::env::var("XDG_CONFIG_DIR").unwrap_or_else(|_| { + let home = std::env::var("HOME").unwrap_or_else(|_| { + let user = std::env::var("USER").expect("user env variable not set"); + format!("/home/{user}") + }); + format!("{home}/.config") + }); + + PathBuf::from(config_dir).join("dtmm").join("dtmm.cfg") +} + +#[cfg(target_os = "windows")] +pub fn get_default_config_path() -> PathBuf { + let config_dir = std::env::var("APPDATA").expect("appdata env var not set"); + PathBuf::from(config_dir).join("dtmm").join("dtmm.cfg") +} + +#[cfg(not(arget_os = "windows"))] +pub fn get_default_data_dir() -> PathBuf { + let data_dir = std::env::var("XDG_DATA_DIR").unwrap_or_else(|_| { + let home = std::env::var("HOME").unwrap_or_else(|_| { + let user = std::env::var("USER").expect("user env variable not set"); + format!("/home/{user}") + }); + format!("{home}/.local/share") + }); + + PathBuf::from(data_dir).join("dtmm") +} + +#[cfg(target_os = "windows")] +pub fn get_default_data_dir() -> PathBuf { + let data_dir = std::env::var("APPDATA").expect("appdata env var not set"); + PathBuf::from(data_dir).join("dtmm") +} + +#[tracing::instrument(skip(matches),fields(path = ?matches.get_one::("config")))] +pub(crate) fn read_config

(default: P, matches: &ArgMatches) -> Result +where + P: Into + std::fmt::Debug, +{ + let path = matches + .get_one::("config") + .expect("argument missing despite default"); + let default_path = default.into(); + + match fs::read(path) { + Ok(data) => { + let data = String::from_utf8(data).wrap_err_with(|| { + format!("config file {} contains invalid UTF-8", path.display()) + })?; + let mut cfg: Config = serde_sjson::from_str(&data) + .wrap_err_with(|| format!("invalid config file {}", path.display()))?; + + cfg.path = path.clone(); + Ok(cfg) + } + Err(err) if err.kind() == ErrorKind::NotFound => { + if matches.value_source("config") != Some(ValueSource::DefaultValue) { + return Err(err) + .wrap_err_with(|| format!("failed to read config file {}", path.display()))?; + } + + { + let parent = default_path + .parent() + .expect("a file path always has a parent directory"); + fs::create_dir_all(parent).wrap_err_with(|| { + format!("failed to create directories {}", parent.display()) + })?; + } + + let config = Config { + path: default_path, + data_dir: Some(get_default_data_dir()), + game_dir: None, + mod_order: Vec::new(), + }; + + { + let data = serde_sjson::to_string(&config) + .wrap_err("failed to serialize default config value")?; + fs::write(&config.path, data).wrap_err_with(|| { + format!( + "failed to write default config to {}", + config.path.display() + ) + })?; + } + + Ok(config) + } + Err(err) => { + Err(err).wrap_err_with(|| format!("failed to read config file {}", path.display())) + } + } +} diff --git a/crates/dtmm/src/util/log.rs b/crates/dtmm/src/util/log.rs new file mode 100644 index 0000000..e6a019e --- /dev/null +++ b/crates/dtmm/src/util/log.rs @@ -0,0 +1,65 @@ +use tokio::sync::mpsc::UnboundedSender; +use tracing_error::ErrorLayer; +use tracing_subscriber::filter::FilterFn; +use tracing_subscriber::fmt; +use tracing_subscriber::fmt::format::debug_fn; +use tracing_subscriber::layer::SubscriberExt; +use tracing_subscriber::prelude::*; +use tracing_subscriber::EnvFilter; + +pub struct ChannelWriter { + tx: UnboundedSender, +} + +impl ChannelWriter { + pub fn new(tx: UnboundedSender) -> Self { + Self { tx } + } +} + +impl std::io::Write for ChannelWriter { + fn write(&mut self, buf: &[u8]) -> std::io::Result { + let tx = self.tx.clone(); + let string = String::from_utf8_lossy(buf).to_string(); + + // The `send` errors when the receiving end has closed. + // But there's not much we can do at that point, so we just ignore it. + let _ = tx.send(string); + + Ok(buf.len()) + } + + fn flush(&mut self) -> std::io::Result<()> { + Ok(()) + } +} + +pub fn create_tracing_subscriber(tx: UnboundedSender) { + let env_layer = if cfg!(debug_assertions) { + EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info")) + } else { + EnvFilter::new("error,dtmm=info") + }; + + let stdout_layer = if cfg!(debug_assertions) { + let layer = fmt::layer().pretty(); + Some(layer) + } else { + None + }; + + let channel_layer = fmt::layer() + // TODO: Re-enable and implement a formatter for the Druid widget + .with_ansi(false) + .event_format(dtmt_shared::Formatter) + .fmt_fields(debug_fn(dtmt_shared::format_fields)) + .with_writer(move || ChannelWriter::new(tx.clone())) + .with_filter(FilterFn::new(dtmt_shared::filter_fields)); + + tracing_subscriber::registry() + .with(env_layer) + .with(channel_layer) + .with(stdout_layer) + .with(ErrorLayer::new(fmt::format::Pretty::default())) + .init(); +} diff --git a/crates/dtmt/Cargo.toml b/crates/dtmt/Cargo.toml index 73d2d72..f20e062 100644 --- a/crates/dtmt/Cargo.toml +++ b/crates/dtmt/Cargo.toml @@ -5,27 +5,30 @@ edition = "2021" [dependencies] clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "unicode"] } +cli-table = { version = "0.4.7", default-features = false, features = ["derive"] } color-eyre = "0.6.2" +confy = "0.5.1" csv-async = { version = "1.2.4", features = ["tokio", "serde"] } -sdk = { path = "../../lib/sdk", version = "0.2.0" } +dtmt-shared = { path = "../../lib/dtmt-shared", version = "*" } futures = "0.3.25" futures-util = "0.3.24" glob = "0.3.0" libloading = "0.7.4" nanorand = "0.7.0" +oodle = { path = "../../lib/oodle", version = "*" } pin-project-lite = "0.2.9" -serde = { version = "1.0.147", features = ["derive"] } -oodle-sys = { path = "../../lib/oodle-sys", version = "*" } +promptly = "0.3.1" +sdk = { path = "../../lib/sdk", version = "0.2.0" } serde_sjson = { path = "../../lib/serde_sjson", version = "*" } -tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] } +serde = { version = "1.0.147", features = ["derive"] } +string_template = "0.2.1" tokio-stream = { version = "0.1.11", features = ["fs", "io-util"] } -tracing = { version = "0.1.37", features = ["async-await"] } +tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] } tracing-error = "0.2.0" tracing-subscriber = { version = "0.3.16", features = ["env-filter"] } -confy = "0.5.1" +tracing = { version = "0.1.37", features = ["async-await"] } zip = "0.6.3" -string_template = "0.2.1" -promptly = "0.3.1" +path-clean = "1.0.1" [dev-dependencies] tempfile = "3.3.0" diff --git a/crates/dtmt/README.adoc b/crates/dtmt/README.adoc new file mode 100644 index 0000000..4304805 --- /dev/null +++ b/crates/dtmt/README.adoc @@ -0,0 +1,32 @@ += Darktide Mod Tools (DTMT) +:idprefix: +:idseparator: +:toc: macro +:toclevels: 1 +:!toc-title: +:caution-caption: :fire: +:important-caption: :exclamtion: +:note-caption: :paperclip: +:tip-caption: :bulb: +:warning-caption: :warning: + +A set of tools to develop mods for the newest generation of the Bitsquid game engine that powers the game _Warhammer 40.000: Darktide_. + +== Quickstart + +1. Head to the latest https://git.sclu1034.dev/bitsquid_dt/dtmt/releases/[release] and download the `dtmt` binary for your platform. +2. Place the binary and `dictionary.csv` next to each other. +3. Open a command prompt, navigate to the downloaded binary and run `dtmt.exe help`. +4. Use the `help` command (it works for subcommands, too) and the https://git.sclu1034.dev/bitsquid_dt/dtmt/wiki/CLI-Reference[CLI Reference]. + +== Runtime dependencies + +The LuaJit decompiler (short "ljd") is used to decompile Lua files. A version tailored specifically to Bitsquid may be found here: https://github.com/Aussiemon/ljd. + +A custom executable location may be passed via the `--ljd` flag during extraction, otherwise decompilation expects `ljd` to be found via the `PATH` environmental variable. + +== Building + +1. Install Rust from https://www.rust-lang.org/learn/get-started[rust-lang.org] or via the preferred means for your system. +2. Download or clone this source code. Make sure to include the submodules in `lib/`. +3. Run `cargo build`. diff --git a/crates/dtmt/src/cmd/build.rs b/crates/dtmt/src/cmd/build.rs index 618d9d2..f077094 100644 --- a/crates/dtmt/src/cmd/build.rs +++ b/crates/dtmt/src/cmd/build.rs @@ -4,11 +4,11 @@ use std::sync::Arc; use clap::{value_parser, Arg, ArgMatches, Command}; use color_eyre::eyre::{self, Context, Result}; use color_eyre::{Help, Report}; +use dtmt_shared::ModConfig; use futures::future::try_join_all; use futures::StreamExt; use sdk::filetype::package::Package; use sdk::{Bundle, BundleFile}; -use serde::Deserialize; use tokio::fs::{self, File}; use tokio::io::AsyncReadExt; @@ -25,7 +25,7 @@ pub(crate) fn command_definition() -> Command { .value_parser(value_parser!(PathBuf)) .help( "The path to the project to build. \ - If omitted, dtmt will search from the current working directory upward.", + If omitted, dtmt will search from the current working directory upward.", ), ) .arg(Arg::new("oodle").long("oodle").help( @@ -36,16 +36,8 @@ pub(crate) fn command_definition() -> Command { )) } -#[derive(Debug, Default, Deserialize)] -struct ProjectConfig { - #[serde(skip)] - dir: PathBuf, - name: String, - packages: Vec, -} - #[tracing::instrument] -async fn find_project_config(dir: Option) -> Result { +async fn find_project_config(dir: Option) -> Result { let (path, mut file) = if let Some(path) = dir { let file = File::open(&path.join(PROJECT_CONFIG_NAME)) .await @@ -81,9 +73,12 @@ async fn find_project_config(dir: Option) -> Result { }; let mut buf = String::new(); - file.read_to_string(&mut buf).await?; + file.read_to_string(&mut buf) + .await + .wrap_err("invalid UTF-8")?; - let mut cfg: ProjectConfig = serde_sjson::from_str(&buf)?; + let mut cfg: ModConfig = + serde_sjson::from_str(&buf).wrap_err("failed to deserialize mod config")?; cfg.dir = path; Ok(cfg) } @@ -169,19 +164,79 @@ where .wrap_err("failed to build bundle") } -#[tracing::instrument(skip_all)] -pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> { - unsafe { - oodle_sys::init(matches.get_one::("oodle")); +fn normalize_file_path>(path: P) -> Result { + let path = path.as_ref(); + + if path.is_absolute() || path.has_root() { + let err = eyre::eyre!("path is absolute: {}", path.display()); + return Err(err).with_suggestion(|| "Specify a relative file path.".to_string()); } + let path = path_clean::clean(path); + + if path.starts_with("..") { + eyre::bail!("path starts with a parent component: {}", path.display()); + } + + Ok(path) +} + +#[tracing::instrument(skip_all)] +pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> { let cfg = { let dir = matches.get_one::("directory").cloned(); - find_project_config(dir).await? + let mut cfg = find_project_config(dir).await?; + + cfg.resources.init = normalize_file_path(cfg.resources.init) + .wrap_err("invalid config field 'resources.init'") + .with_suggestion(|| { + "Specify a file path relative to and child path of the \ + directory where 'dtmt.cfg' is." + .to_string() + }) + .with_suggestion(|| { + "Use 'dtmt new' in a separate directory to generate \ + a valid mod template." + .to_string() + })?; + + if let Some(path) = cfg.resources.data { + let path = normalize_file_path(path) + .wrap_err("invalid config field 'resources.data'") + .with_suggestion(|| { + "Specify a file path relative to and child path of the \ + directory where 'dtmt.cfg' is." + .to_string() + }) + .with_suggestion(|| { + "Use 'dtmt new' in a separate directory to generate \ + a valid mod template." + .to_string() + })?; + cfg.resources.data = Some(path); + } + + if let Some(path) = cfg.resources.localization { + let path = normalize_file_path(path) + .wrap_err("invalid config field 'resources.localization'") + .with_suggestion(|| { + "Specify a file path relative to and child path of the \ + directory where 'dtmt.cfg' is." + .to_string() + }) + .with_suggestion(|| { + "Use 'dtmt new' in a separate directory to generate \ + a valid mod template." + .to_string() + })?; + cfg.resources.localization = Some(path); + } + + cfg }; let dest = { - let mut path = PathBuf::from(&cfg.name); + let mut path = PathBuf::from(&cfg.id); path.set_extension("zip"); Arc::new(path) }; @@ -210,21 +265,24 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> }) }); - let bundles = try_join_all(tasks).await?; + let bundles = try_join_all(tasks) + .await + .wrap_err("failed to build mod bundles")?; - let mod_file = { - let mut path = cfg.dir.join(&cfg.name); - path.set_extension("mod"); - fs::read(path).await? + let config_file = { + let path = cfg.dir.join("dtmt.cfg"); + fs::read(&path) + .await + .wrap_err_with(|| format!("failed to read mod config at {}", path.display()))? }; { let dest = dest.clone(); - let name = cfg.name.clone(); + let id = cfg.id.clone(); tokio::task::spawn_blocking(move || { - let mut archive = Archive::new(name); + let mut archive = Archive::new(id); - archive.add_mod_file(mod_file); + archive.add_config(config_file); for bundle in bundles { archive.add_bundle(bundle); diff --git a/crates/dtmt/src/cmd/bundle/inject.rs b/crates/dtmt/src/cmd/bundle/inject.rs index 6d583b7..9c47686 100644 --- a/crates/dtmt/src/cmd/bundle/inject.rs +++ b/crates/dtmt/src/cmd/bundle/inject.rs @@ -58,14 +58,14 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> { Bundle::from_binary(&ctx, name, binary).wrap_err("Failed to open bundle file")? }; - if let Some(_name) = matches.get_one::("replace") { + if let Some(name) = matches.get_one::("replace") { let mut file = File::open(&file_path) .await .wrap_err_with(|| format!("failed to open '{}'", file_path.display()))?; if let Some(variant) = bundle .files_mut() - .filter(|file| file.matches_name(_name)) + .filter(|file| file.matches_name(name.clone())) // TODO: Handle file variants .find_map(|file| file.variants_mut().next()) { @@ -75,7 +75,7 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> { .wrap_err("failed to read input file")?; variant.set_data(data); } else { - let err = eyre::eyre!("No file '{}' in this bundle.", _name) + let err = eyre::eyre!("No file '{}' in this bundle.", name) .with_suggestion(|| { format!( "Run '{} bundle list {}' to list the files in this bundle.", @@ -87,7 +87,7 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> { format!( "Use '{} bundle inject --add {} {} {}' to add it as a new file", clap::crate_name!(), - _name, + name, bundle_path.display(), file_path.display() ) diff --git a/crates/dtmt/src/cmd/bundle/list.rs b/crates/dtmt/src/cmd/bundle/list.rs index ec869ba..b985ad2 100644 --- a/crates/dtmt/src/cmd/bundle/list.rs +++ b/crates/dtmt/src/cmd/bundle/list.rs @@ -50,13 +50,13 @@ where match fmt { OutputFormat::Text => { - println!("Bundle: {}", bundle.name()); + println!("Bundle: {}", bundle.name().display()); for f in bundle.files().iter() { if f.variants().len() != 1 { let err = eyre::eyre!("Expected exactly one version for this file.") .with_section(|| f.variants().len().to_string().header("Bundle:")) - .with_section(|| bundle.name().clone().header("Bundle:")); + .with_section(|| bundle.name().display().header("Bundle:")); tracing::error!("{:#}", err); } @@ -64,7 +64,7 @@ where let v = &f.variants()[0]; println!( "\t{}.{}: {} bytes", - f.base_name(), + f.base_name().display(), f.file_type().ext_name(), v.size() ); diff --git a/crates/dtmt/src/cmd/bundle/mod.rs b/crates/dtmt/src/cmd/bundle/mod.rs index 03ab3f5..6baf860 100644 --- a/crates/dtmt/src/cmd/bundle/mod.rs +++ b/crates/dtmt/src/cmd/bundle/mod.rs @@ -24,10 +24,6 @@ pub(crate) fn command_definition() -> Command { #[tracing::instrument(skip_all)] pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> { - unsafe { - oodle_sys::init(matches.get_one::("oodle")); - } - match matches.subcommand() { Some(("decompress", sub_matches)) => decompress::run(ctx, sub_matches).await, Some(("extract", sub_matches)) => extract::run(ctx, sub_matches).await, diff --git a/crates/dtmt/src/cmd/dictionary.rs b/crates/dtmt/src/cmd/dictionary.rs index 22a225b..20ec1fc 100644 --- a/crates/dtmt/src/cmd/dictionary.rs +++ b/crates/dtmt/src/cmd/dictionary.rs @@ -1,8 +1,10 @@ use std::path::PathBuf; use clap::{value_parser, Arg, ArgAction, ArgMatches, Command, ValueEnum}; +use cli_table::{print_stdout, WithTitle}; use color_eyre::eyre::{Context, Result}; use color_eyre::{Help, SectionExt}; +use sdk::murmur::{IdString64, Murmur32, Murmur64}; use tokio::fs::File; use tokio::io::{AsyncBufReadExt, BufReader}; use tokio_stream::wrappers::LinesStream; @@ -27,6 +29,40 @@ impl From for sdk::murmur::HashGroup { } } +impl std::fmt::Display for HashGroup { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + HashGroup::Filename => write!(f, "filename"), + HashGroup::Filetype => write!(f, "filetype"), + HashGroup::Strings => write!(f, "strings"), + HashGroup::Other => write!(f, "other"), + } + } +} + +#[derive(cli_table::Table)] +struct TableRow { + #[table(title = "Value")] + value: String, + #[table(title = "Murmur64")] + long: Murmur64, + #[table(title = "Murmur32")] + short: Murmur32, + #[table(title = "Group")] + group: sdk::murmur::HashGroup, +} + +impl From<&sdk::murmur::Entry> for TableRow { + fn from(entry: &sdk::murmur::Entry) -> Self { + Self { + value: entry.value().clone(), + long: entry.long(), + short: entry.short(), + group: entry.group(), + } + } +} + pub(crate) fn command_definition() -> Command { Command::new("dictionary") .about("Manipulate a hash dictionary file.") @@ -43,7 +79,8 @@ pub(crate) fn command_definition() -> Command { .short('g') .long("group") .action(ArgAction::Append) - .value_parser(value_parser!(HashGroup)), + .value_parser(value_parser!(HashGroup)) + .default_values(["other", "filename", "filetype", "strings"]), ), ) .subcommand( @@ -67,6 +104,7 @@ pub(crate) fn command_definition() -> Command { .value_parser(value_parser!(PathBuf)), ), ) + .subcommand(Command::new("show").about("Show the contents of the dictionary")) .subcommand(Command::new("save").about( "Save back the currently loaded dictionary, with hashes pre-computed. \ Pre-computing hashes speeds up loading large dictionaries, as they would \ @@ -78,17 +116,23 @@ pub(crate) fn command_definition() -> Command { pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<()> { match matches.subcommand() { Some(("lookup", sub_matches)) => { - let hash = sub_matches - .get_one::("hash") - .expect("required argument not found"); + let hash = { + let s = sub_matches + .get_one::("hash") + .expect("required argument not found"); + + u64::from_str_radix(s, 16) + .wrap_err("failed to parse argument as hexadecimal string")? + }; let groups = sub_matches .get_many::("group") .unwrap_or_default(); for group in groups { - let value = ctx.lookup_hash(*hash, (*group).into()); - println!("{value}"); + if let IdString64::String(value) = ctx.lookup_hash(hash, (*group).into()) { + println!("{group}: {value}"); + } } Ok(()) @@ -176,6 +220,14 @@ pub(crate) async fn run(mut ctx: sdk::Context, matches: &ArgMatches) -> Result<( .await .wrap_err("Failed to write dictionary to disk") } + Some(("show", _)) => { + let lookup = &ctx.lookup; + let rows: Vec<_> = lookup.entries().iter().map(TableRow::from).collect(); + + print_stdout(rows.with_title())?; + + Ok(()) + } _ => unreachable!( "clap is configured to require a subcommand, and they're all handled above" ), diff --git a/crates/dtmt/src/cmd/new.rs b/crates/dtmt/src/cmd/new.rs index a7a66ca..187706c 100644 --- a/crates/dtmt/src/cmd/new.rs +++ b/crates/dtmt/src/cmd/new.rs @@ -8,15 +8,22 @@ use futures::{StreamExt, TryStreamExt}; use string_template::Template; use tokio::fs::{self, DirBuilder}; -const TEMPLATES: [(&str, &str); 6] = [ +const TEMPLATES: [(&str, &str); 5] = [ ( "dtmt.cfg", - r#"name = "{{name}}" -description = "An elaborate description of my cool game mod!" + r#"id = "{{id}}" +name = "{{name}}" +description = "This is my new mod '{{name}}'!" version = "0.1.0" +resources = { + init = "scripts/mods/{{id}}/init" + data = "scripts/mods/{{id}}/data" + localization = "scripts/mods/{{id}}/localization" +} + packages = [ - "packages/{{name}}" + "packages/{{id}}" ] depends = [ @@ -25,50 +32,35 @@ depends = [ "#, ), ( - "{{name}}.mod", - r#"return { - run = function() - fassert(rawget(_G, "new_mod"), "`{{title}}` encountered an error loading the Darktide Mod Framework.") - - new_mod("{{name}}", { - mod_script = "scripts/mods/{{name}}/{{name}}", - mod_data = "scripts/mods/{{name}}/{{name}}_data", - mod_localization = "scripts/mods/{{name}}/{{name}}_localization", - }) - end, - packages = {}, -}"#, - ), - ( - "packages/{{name}}.package", + "packages/{{id}}.package", r#"lua = [ - "scripts/mods/{{name}}/*" + "scripts/mods/{{id}}/*" ] "#, ), ( - "scripts/mods/{{name}}/{{name}}.lua", - r#"local mod = get_mod("{{name}}") + "scripts/mods/{{id}}/init.lua", + r#"local mod = get_mod("{{id}}") -- Your mod code goes here. -- https://vmf-docs.verminti.de "#, ), ( - "scripts/mods/{{name}}/{{name}}_data.lua", - r#"local mod = get_mod("{{name}}") + "scripts/mods/{{id}}/data.lua", + r#"local mod = get_mod("{{id}}") return { - name = "{{title}}", + name = "{{name}}", description = mod:localize("mod_description"), is_togglable = true, }"#, ), ( - "scripts/mods/{{name}}/{{name}}_localization.lua", + "scripts/mods/{{id}}/localization.lua", r#"return { mod_description = { - en = "An elaborate description of my cool game mod!", + en = "This is my new mod '{{name}}'!", }, }"#, ), @@ -78,8 +70,8 @@ pub(crate) fn command_definition() -> Command { Command::new("new") .about("Create a new project") .arg( - Arg::new("title") - .long("title") + Arg::new("name") + .long("name") .help("The display name of the new mod."), ) .arg(Arg::new("root").help( @@ -107,14 +99,14 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> } }; - let title = if let Some(title) = matches.get_one::("title") { - title.clone() + let name = if let Some(name) = matches.get_one::("name") { + name.clone() } else { - promptly::prompt("The mod display name")? + promptly::prompt("The display name")? }; - let name = { - let default = title + let id = { + let default = name .chars() .map(|c| { if c.is_ascii_alphanumeric() { @@ -124,15 +116,14 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> } }) .collect::(); - promptly::prompt_default("The mod identifier name", default)? + promptly::prompt_default("The unique mod ID", default)? }; - tracing::debug!(root = %root.display()); - tracing::debug!(title, name); + tracing::debug!(root = %root.display(), name, id); let mut data = HashMap::new(); data.insert("name", name.as_str()); - data.insert("title", title.as_str()); + data.insert("id", id.as_str()); let templates = TEMPLATES .iter() @@ -168,7 +159,7 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()> tracing::info!( "Created {} files for mod '{}' in '{}'.", TEMPLATES.len(), - title, + name, root.display() ); diff --git a/crates/dtmt/src/main.rs b/crates/dtmt/src/main.rs index 830550d..dc4853e 100644 --- a/crates/dtmt/src/main.rs +++ b/crates/dtmt/src/main.rs @@ -13,9 +13,6 @@ use serde::{Deserialize, Serialize}; use tokio::fs::File; use tokio::io::BufReader; use tokio::sync::RwLock; -use tracing_error::ErrorLayer; -use tracing_subscriber::prelude::*; -use tracing_subscriber::EnvFilter; mod cmd { pub mod build; @@ -62,19 +59,7 @@ async fn main() -> Result<()> { // .subcommand(cmd::watch::command_definition()) .get_matches(); - { - let fmt_layer = tracing_subscriber::fmt::layer().pretty(); - let filter_layer = - EnvFilter::try_from_default_env().or_else(|_| EnvFilter::try_new("info"))?; - - tracing_subscriber::registry() - .with(filter_layer) - .with(fmt_layer) - .with(ErrorLayer::new( - tracing_subscriber::fmt::format::Pretty::default(), - )) - .init(); - } + dtmt_shared::create_tracing_subscriber(); // TODO: Move this into a `Context::init` method? let ctx = sdk::Context::new(); diff --git a/crates/dtmt/src/mods/archive.rs b/crates/dtmt/src/mods/archive.rs index 16d74d8..37fec19 100644 --- a/crates/dtmt/src/mods/archive.rs +++ b/crates/dtmt/src/mods/archive.rs @@ -5,14 +5,14 @@ use std::path::{Path, PathBuf}; use color_eyre::eyre::{self, Context}; use color_eyre::Result; -use sdk::murmur::Murmur64; +use sdk::murmur::IdString64; use sdk::Bundle; use zip::ZipWriter; pub struct Archive { name: String, bundles: Vec, - mod_file: Option>, + config_file: Option>, } impl Archive { @@ -20,7 +20,7 @@ impl Archive { Self { name, bundles: Vec::new(), - mod_file: None, + config_file: None, } } @@ -28,18 +28,18 @@ impl Archive { self.bundles.push(bundle) } - pub fn add_mod_file(&mut self, content: Vec) { - self.mod_file = Some(content); + pub fn add_config(&mut self, content: Vec) { + self.config_file = Some(content); } pub fn write

(&self, path: P) -> Result<()> where P: AsRef, { - let mod_file = self - .mod_file + let config_file = self + .config_file .as_ref() - .ok_or_else(|| eyre::eyre!("Mod file is missing from mod archive"))?; + .ok_or_else(|| eyre::eyre!("Config file is missing in mod archive"))?; let f = File::create(path.as_ref()).wrap_err_with(|| { format!( @@ -54,16 +54,18 @@ impl Archive { let base_path = PathBuf::from(&self.name); { - let mut name = base_path.join(&self.name); - name.set_extension("mod"); + let name = base_path.join("dtmt.cfg"); zip.start_file(name.to_string_lossy(), Default::default())?; - zip.write_all(mod_file)?; + zip.write_all(config_file)?; } let mut file_map = HashMap::new(); for bundle in self.bundles.iter() { - let bundle_name = bundle.name().clone(); + let bundle_name = match bundle.name() { + IdString64::Hash(_) => eyre::bail!("bundle name must be known as string. got hash"), + IdString64::String(s) => s, + }; let map_entry: &mut HashSet<_> = file_map.entry(bundle_name).or_default(); @@ -71,7 +73,7 @@ impl Archive { map_entry.insert(file.name(false, None)); } - let name = Murmur64::hash(bundle.name().as_bytes()); + let name = bundle.name().to_murmur64(); let path = base_path.join(name.to_string().to_ascii_lowercase()); zip.start_file(path.to_string_lossy(), Default::default())?; diff --git a/docs/screenshots/dtmm.png b/docs/screenshots/dtmm.png new file mode 100644 index 0000000..af2a980 Binary files /dev/null and b/docs/screenshots/dtmm.png differ diff --git a/lib/dtmt-shared/Cargo.toml b/lib/dtmt-shared/Cargo.toml new file mode 100644 index 0000000..0f8ed63 --- /dev/null +++ b/lib/dtmt-shared/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "dtmt-shared" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +serde = "1.0.152" +time = { version = "0.3.19", features = ["formatting", "local-offset", "macros"] } +tracing = "0.1.37" +tracing-error = "0.2.0" +tracing-subscriber = "0.3.16" diff --git a/lib/dtmt-shared/README.adoc b/lib/dtmt-shared/README.adoc new file mode 100644 index 0000000..01b26ec --- /dev/null +++ b/lib/dtmt-shared/README.adoc @@ -0,0 +1,13 @@ += dtmt-shared +:idprefix: +:idseparator: +:toc: macro +:toclevels: 1 +:!toc-title: +:caution-caption: :fire: +:important-caption: :exclamtion: +:note-caption: :paperclip: +:tip-caption: :bulb: +:warning-caption: :warning: + +A set of types and functions shared between multiple crates within _Darktide Mod Tools_ that don't fit into the engine SDK. diff --git a/lib/dtmt-shared/src/lib.rs b/lib/dtmt-shared/src/lib.rs new file mode 100644 index 0000000..3c8690d --- /dev/null +++ b/lib/dtmt-shared/src/lib.rs @@ -0,0 +1,28 @@ +mod log; + +use std::path::PathBuf; + +pub use log::*; + +#[derive(Clone, Debug, Default, serde::Deserialize)] +pub struct ModConfigResources { + pub init: PathBuf, + #[serde(default)] + pub data: Option, + #[serde(default)] + pub localization: Option, +} + +#[derive(Clone, Debug, Default, serde::Deserialize)] +pub struct ModConfig { + #[serde(skip)] + pub dir: std::path::PathBuf, + pub id: String, + pub name: String, + pub description: String, + pub version: String, + pub packages: Vec, + pub resources: ModConfigResources, + #[serde(default)] + pub depends: Vec, +} diff --git a/lib/dtmt-shared/src/log.rs b/lib/dtmt-shared/src/log.rs new file mode 100644 index 0000000..3c46a4b --- /dev/null +++ b/lib/dtmt-shared/src/log.rs @@ -0,0 +1,87 @@ +use std::fmt::Result; + +use time::format_description::FormatItem; +use time::macros::format_description; +use time::OffsetDateTime; +use tracing::field::Field; +use tracing::{Event, Metadata, Subscriber}; +use tracing_error::ErrorLayer; +use tracing_subscriber::filter::FilterFn; +use tracing_subscriber::fmt::format::{debug_fn, Writer}; +use tracing_subscriber::fmt::{self, FmtContext, FormatEvent, FormatFields}; +use tracing_subscriber::layer::SubscriberExt; +use tracing_subscriber::prelude::*; +use tracing_subscriber::registry::LookupSpan; +use tracing_subscriber::EnvFilter; + +pub const TIME_FORMAT: &[FormatItem] = format_description!("[hour]:[minute]:[second]"); + +pub fn format_fields(w: &mut Writer<'_>, field: &Field, val: &dyn std::fmt::Debug) -> Result { + if field.name() == "message" { + write!(w, "{:?}", val) + } else { + Ok(()) + } +} + +pub fn filter_fields(metadata: &Metadata<'_>) -> bool { + metadata + .fields() + .iter() + .any(|field| field.name() == "message") +} + +pub struct Formatter; + +impl FormatEvent for Formatter +where + S: Subscriber + for<'a> LookupSpan<'a>, + N: for<'a> FormatFields<'a> + 'static, +{ + fn format_event( + &self, + ctx: &FmtContext<'_, S, N>, + mut writer: Writer<'_>, + event: &Event<'_>, + ) -> Result { + let meta = event.metadata(); + + let time = OffsetDateTime::now_local().unwrap_or_else(|_| OffsetDateTime::now_utc()); + let time = time.format(TIME_FORMAT).map_err(|_| std::fmt::Error)?; + + write!(writer, "[{}] [{:>5}] ", time, meta.level())?; + + ctx.field_format().format_fields(writer.by_ref(), event)?; + + writeln!(writer) + } +} + +pub fn create_tracing_subscriber() { + let env_layer = + EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::try_new("info").unwrap()); + + let (dev_stdout_layer, prod_stdout_layer, filter_layer) = if cfg!(debug_assertions) { + let fmt_layer = fmt::layer().pretty(); + (Some(fmt_layer), None, None) + } else { + // Creates a layer that + // - only prints events that contain a message + // - does not print fields + // - does not print spans/targets + // - only prints time, not date + let fmt_layer = fmt::layer() + .event_format(Formatter) + .fmt_fields(debug_fn(format_fields)); + + (None, Some(fmt_layer), Some(FilterFn::new(filter_fields))) + }; + + tracing_subscriber::registry() + .with(filter_layer) + .with(env_layer) + .with(dev_stdout_layer) + .with(prod_stdout_layer) + .with(ErrorLayer::new(fmt::format::Pretty::default())) + .init(); +} diff --git a/lib/oodle-sys/.gitignore b/lib/oodle-sys/.gitignore deleted file mode 100644 index 4fffb2f..0000000 --- a/lib/oodle-sys/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -/target -/Cargo.lock diff --git a/lib/oodle-sys/src/lib.rs b/lib/oodle-sys/src/lib.rs deleted file mode 100644 index 8346c5f..0000000 --- a/lib/oodle-sys/src/lib.rs +++ /dev/null @@ -1,77 +0,0 @@ -#![feature(c_size_t)] -#![feature(once_cell)] - -use std::ffi::OsStr; -use std::sync::OnceLock; - -mod library; -mod types; - -pub use library::Library; -pub use library::CHUNK_SIZE; -pub use types::*; - -#[derive(thiserror::Error, Debug)] -pub enum OodleError { - #[error("{0}")] - Oodle(String), - #[error(transparent)] - Library(#[from] libloading::Error), -} - -type Result = std::result::Result; - -static LIB: OnceLock = OnceLock::new(); - -/// Initialize the global library handle that this module's -/// functions operate on. -/// -/// # Safety -/// -/// The safety concerns as described by [`libloading::Library::new`] apply. -pub unsafe fn init>(name: Option

) { - let lib = match name { - Some(name) => Library::with_name(name), - None => Library::new(), - }; - - let lib = lib.expect("Failed to load library."); - if LIB.set(lib).is_err() { - panic!("Library was already initialized. Did you call `init` twice?"); - } -} - -fn get() -> Result<&'static Library> { - match LIB.get() { - Some(lib) => Ok(lib), - None => { - let err = OodleError::Oodle(String::from("Library has not been initialized, yet.")); - Err(err) - } - } -} - -pub fn decompress( - data: I, - fuzz_safe: OodleLZ_FuzzSafe, - check_crc: OodleLZ_CheckCRC, -) -> Result> -where - I: AsRef<[u8]>, -{ - let lib = get()?; - lib.decompress(data, fuzz_safe, check_crc) -} - -pub fn compress(data: I) -> Result> -where - I: AsRef<[u8]>, -{ - let lib = get()?; - lib.compress(data) -} - -pub fn get_decode_buffer_size(raw_size: usize, corruption_possible: bool) -> Result { - let lib = get()?; - lib.get_decode_buffer_size(raw_size, corruption_possible) -} diff --git a/lib/oodle-sys/src/library.rs b/lib/oodle-sys/src/library.rs deleted file mode 100644 index ef773e4..0000000 --- a/lib/oodle-sys/src/library.rs +++ /dev/null @@ -1,154 +0,0 @@ -use std::{ffi::OsStr, ptr}; - -use libloading::Symbol; - -use super::Result; -use crate::{types::*, OodleError}; - -// Hardcoded chunk size of Bitsquid's bundle compression -pub const CHUNK_SIZE: usize = 512 * 1024; -pub const COMPRESSOR: OodleLZ_Compressor = OodleLZ_Compressor::Kraken; -pub const LEVEL: OodleLZ_CompressionLevel = OodleLZ_CompressionLevel::Optimal2; - -#[cfg(target_os = "windows")] -const OODLE_LIB_NAME: &str = "oo2core_8_win64"; - -#[cfg(target_os = "linux")] -const OODLE_LIB_NAME: &str = "liboo2corelinux64.so"; - -pub struct Library { - inner: libloading::Library, -} - -impl Library { - /// Load the Oodle library by its default name. - /// - /// The default name is platform-specific: - /// - Windows: `oo2core_8_win64` - /// - Linux: `liboo2corelinux64.so` - /// - /// # Safety - /// - /// The safety concerns as described by [`libloading::Library::new`] apply. - pub unsafe fn new() -> Result { - Self::with_name(OODLE_LIB_NAME) - } - - /// Load the Oodle library by the given name or path. - /// - /// See [`libloading::Library::new`] for how the `name` parameter is handled. - /// - /// # Safety - /// - /// The safety concerns as described by [`libloading::Library::new`] apply. - pub unsafe fn with_name>(name: P) -> Result { - let inner = libloading::Library::new(name)?; - Ok(Self { inner }) - } - - #[tracing::instrument(skip(self, data))] - pub fn decompress( - &self, - data: I, - fuzz_safe: OodleLZ_FuzzSafe, - check_crc: OodleLZ_CheckCRC, - ) -> Result> - where - I: AsRef<[u8]>, - { - let data = data.as_ref(); - let mut out = vec![0; CHUNK_SIZE]; - - let verbosity = if tracing::enabled!(tracing::Level::INFO) { - OodleLZ_Verbosity::Minimal - } else if tracing::enabled!(tracing::Level::DEBUG) { - OodleLZ_Verbosity::Some - } else if tracing::enabled!(tracing::Level::TRACE) { - OodleLZ_Verbosity::Lots - } else { - OodleLZ_Verbosity::None - }; - - let ret = unsafe { - let decompress: Symbol = self.inner.get(b"OodleLZ_Decompress\0")?; - - decompress( - data.as_ptr() as *const _, - data.len(), - out.as_mut_ptr() as *mut _, - out.len(), - fuzz_safe, - check_crc, - verbosity, - ptr::null_mut(), - 0, - ptr::null_mut(), - ptr::null_mut(), - ptr::null_mut(), - 0, - OodleLZ_Decode_ThreadPhase::UNTHREADED, - ) - }; - - if ret == 0 { - let err = OodleError::Oodle(String::from("Decompression failed.")); - return Err(err); - } - - Ok(out) - } - - #[tracing::instrument(name = "Oodle::compress", skip(self, data))] - pub fn compress(&self, data: I) -> Result> - where - I: AsRef<[u8]>, - { - let mut raw = Vec::from(data.as_ref()); - raw.resize(CHUNK_SIZE, 0); - - // TODO: Query oodle for buffer size - let mut out = vec![0u8; CHUNK_SIZE]; - - let ret = unsafe { - let compress: Symbol = self.inner.get(b"OodleLZ_Compress\0")?; - - compress( - COMPRESSOR, - raw.as_ptr() as *const _, - raw.len(), - out.as_mut_ptr() as *mut _, - LEVEL, - ptr::null_mut(), - 0, - ptr::null_mut(), - ptr::null_mut(), - 0, - ) - }; - - tracing::debug!(compressed_size = ret, "Compressed chunk"); - - if ret == 0 { - let err = OodleError::Oodle(String::from("Compression failed.")); - return Err(err); - } - - out.resize(ret as usize, 0); - - Ok(out) - } - - pub fn get_decode_buffer_size( - &self, - raw_size: usize, - corruption_possible: bool, - ) -> Result { - unsafe { - let f: Symbol = - self.inner.get(b"OodleLZ_GetDecodeBufferSize\0")?; - - let size = f(COMPRESSOR, raw_size, corruption_possible); - Ok(size) - } - } -} diff --git a/lib/oodle-sys/src/types.rs b/lib/oodle-sys/src/types.rs deleted file mode 100644 index 5d306f8..0000000 --- a/lib/oodle-sys/src/types.rs +++ /dev/null @@ -1,197 +0,0 @@ -#![allow(dead_code)] -use core::ffi::{c_char, c_int, c_size_t, c_ulonglong, c_void}; - -// Type definitions taken from Unreal Engine's `oodle2.h` - -#[repr(C)] -#[allow(non_camel_case_types)] -#[derive(Clone, Copy, Debug)] -pub enum OodleLZ_FuzzSafe { - No = 0, - Yes = 1, -} - -impl From for OodleLZ_FuzzSafe { - fn from(value: bool) -> Self { - if value { - Self::Yes - } else { - Self::No - } - } -} - -#[repr(C)] -#[allow(non_camel_case_types)] -#[derive(Clone, Copy, Debug)] -pub enum OodleLZ_CheckCRC { - No = 0, - Yes = 1, - Force32 = 0x40000000, -} - -impl From for OodleLZ_CheckCRC { - fn from(value: bool) -> Self { - if value { - Self::Yes - } else { - Self::No - } - } -} - -#[repr(C)] -#[allow(non_camel_case_types)] -#[derive(Clone, Copy, Debug)] -pub enum OodleLZ_Verbosity { - None = 0, - Minimal = 1, - Some = 2, - Lots = 3, - Force32 = 0x40000000, -} - -#[repr(C)] -#[allow(non_camel_case_types)] -#[derive(Clone, Copy, Debug)] -pub enum OodleLZ_Decode_ThreadPhase { - Phase1 = 1, - Phase2 = 2, - PhaseAll = 3, -} - -impl OodleLZ_Decode_ThreadPhase { - pub const UNTHREADED: Self = OodleLZ_Decode_ThreadPhase::PhaseAll; -} - -#[repr(C)] -#[allow(non_camel_case_types)] -#[derive(Clone, Copy, Debug)] -pub enum OodleLZ_Compressor { - Invalid = -1, - // None = memcpy, pass through uncompressed bytes - None = 3, - - // NEW COMPRESSORS: - // Fast decompression and high compression ratios, amazing! - Kraken = 8, - // Leviathan = Kraken's big brother with higher compression, slightly slower decompression. - Leviathan = 13, - // Mermaid is between Kraken & Selkie - crazy fast, still decent compression. - Mermaid = 9, - // Selkie is a super-fast relative of Mermaid. For maximum decode speed. - Selkie = 11, - // Hydra, the many-headed beast = Leviathan, Kraken, Mermaid, or Selkie (see $OodleLZ_About_Hydra) - Hydra = 12, - BitKnit = 10, - // DEPRECATED but still supported - Lzb16 = 4, - Lzna = 7, - Lzh = 0, - Lzhlw = 1, - Lznib = 2, - Lzblw = 5, - Lza = 6, - Count = 14, - Force32 = 0x40000000, -} - -#[repr(C)] -#[allow(non_camel_case_types)] -#[derive(Clone, Copy, Debug)] -pub enum OodleLZ_CompressionLevel { - // don't compress, just copy raw bytes - None = 0, - // super fast mode, lower compression ratio - SuperFast = 1, - // fastest LZ mode with still decent compression ratio - VeryFast = 2, - // fast - good for daily use - Fast = 3, - // standard medium speed LZ mode - Normal = 4, - // optimal parse level 1 (faster optimal encoder) - Optimal1 = 5, - // optimal parse level 2 (recommended baseline optimal encoder) - Optimal2 = 6, - // optimal parse level 3 (slower optimal encoder) - Optimal3 = 7, - // optimal parse level 4 (very slow optimal encoder) - Optimal4 = 8, - // optimal parse level 5 (don't care about encode speed, maximum compression) - Optimal5 = 9, - // faster than SuperFast, less compression - HyperFast1 = -1, - // faster than HyperFast1, less compression - HyperFast2 = -2, - // faster than HyperFast2, less compression - HyperFast3 = -3, - // fastest, less compression - HyperFast4 = -4, - Force32 = 0x40000000, -} - -impl OodleLZ_CompressionLevel { - // alias hyperfast base level - pub const HYPERFAST: Self = OodleLZ_CompressionLevel::HyperFast1; - // alias optimal standard level - pub const OPTIMAL: Self = OodleLZ_CompressionLevel::Optimal2; - // maximum compression level - pub const MAX: Self = OodleLZ_CompressionLevel::Optimal5; - // fastest compression level - pub const MIN: Self = OodleLZ_CompressionLevel::HyperFast4; - pub const INVALID: Self = OodleLZ_CompressionLevel::Force32; -} - -#[allow(non_camel_case_types)] -pub type t_fp_OodleCore_Plugin_Printf = - extern "C" fn(level: c_int, file: *const c_char, line: c_int, fmt: *const c_char); - -#[allow(non_camel_case_types)] -pub type OodleLZ_Decompress = extern "C" fn( - compressed_buffer: *const c_void, - compressed_length: c_size_t, - raw_buffer: *mut c_void, - raw_length: c_size_t, - fuzz_safe: OodleLZ_FuzzSafe, - check_crc: OodleLZ_CheckCRC, - verbosity: OodleLZ_Verbosity, - decBufBase: *mut c_void, - decBufSize: c_size_t, - callback: *const c_void, - callback_user_data: *const c_void, - decoder_memory: *mut c_void, - decoder_memory_size: c_size_t, - thread_phase: OodleLZ_Decode_ThreadPhase, -) -> c_ulonglong; - -#[allow(non_camel_case_types)] -pub type OodleLZ_Compress = extern "C" fn( - compressor: OodleLZ_Compressor, - raw_buffer: *const c_void, - raw_len: c_size_t, - compressed_buffer: *mut c_void, - level: OodleLZ_CompressionLevel, - options: *const c_void, - dictionary_base: c_size_t, - lrm: *const c_void, - scratch_memory: *mut c_void, - scratch_size: c_size_t, -) -> c_ulonglong; - -#[allow(non_camel_case_types)] -pub type OodleLZ_GetDecodeBufferSize = extern "C" fn( - compressor: OodleLZ_Compressor, - raw_size: c_size_t, - corruption_possible: bool, -) -> c_size_t; - -#[allow(non_camel_case_types)] -pub type OodleCore_Plugins_SetPrintf = - extern "C" fn(f: t_fp_OodleCore_Plugin_Printf) -> t_fp_OodleCore_Plugin_Printf; - -#[allow(non_camel_case_types)] -pub type OodleCore_Plugin_Printf_Verbose = t_fp_OodleCore_Plugin_Printf; - -#[allow(non_camel_case_types)] -pub type OodleCore_Plugin_Printf_Default = t_fp_OodleCore_Plugin_Printf; diff --git a/lib/oodle-sys/Cargo.toml b/lib/oodle/Cargo.toml similarity index 69% rename from lib/oodle-sys/Cargo.toml rename to lib/oodle/Cargo.toml index 539427d..3283592 100644 --- a/lib/oodle-sys/Cargo.toml +++ b/lib/oodle/Cargo.toml @@ -1,11 +1,13 @@ [package] -name = "oodle-sys" +name = "oodle" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -libloading = "0.7.4" -thiserror = "1.0.38" +color-eyre = "0.6.2" tracing = "0.1.37" + +[build-dependencies] +bindgen = "0.64.0" diff --git a/lib/oodle/build.rs b/lib/oodle/build.rs new file mode 100644 index 0000000..2a48078 --- /dev/null +++ b/lib/oodle/build.rs @@ -0,0 +1,44 @@ +extern crate bindgen; + +use std::env; +use std::path::PathBuf; + +fn main() { + // Tell cargo to look for shared libraries in the specified directory + // println!("cargo:rustc-link-search=/path/to/lib"); + + // Tell cargo to tell rustc to link the system bzip2 + // shared library. + if cfg!(target_os = "windows") { + println!("cargo:rustc-link-lib=oo2core_8_win64"); + } else { + println!("cargo:rustc-link-lib=oo2corelinux64"); + } + + // Tell cargo to invalidate the built crate whenever the wrapper changes + println!("cargo:rerun-if-changed=oodle2.h"); + + // The bindgen::Builder is the main entry point + // to bindgen, and lets you build up options for + // the resulting bindings. + let bindings = bindgen::Builder::default() + // The input header we would like to generate + // bindings for. + .header("oodle2base.h") + .header("oodle2.h") + .blocklist_file("stdint.h") + .blocklist_file("stdlib.h") + // Tell cargo to invalidate the built crate whenever any of the + // included header files changed. + .parse_callbacks(Box::new(bindgen::CargoCallbacks)) + // Finish the builder and generate the bindings. + .generate() + // Unwrap the Result and panic on failure. + .expect("Unable to generate bindings"); + + // Write the bindings to the $OUT_DIR/bindings.rs file. + let out_path = PathBuf::from(env::var("OUT_DIR").unwrap()); + bindings + .write_to_file(out_path.join("bindings.rs")) + .expect("Couldn't write bindings!"); +} diff --git a/lib/oodle/oodle2.h b/lib/oodle/oodle2.h new file mode 100644 index 0000000..ffe4152 --- /dev/null +++ b/lib/oodle/oodle2.h @@ -0,0 +1,1643 @@ + +//=================================================== +// Oodle2 Core header +// (C) Copyright 1994-2021 Epic Games Tools LLC +//=================================================== + +#ifndef __OODLE2_H_INCLUDED__ +#define __OODLE2_H_INCLUDED__ + +#ifndef OODLE2_PUBLIC_HEADER +#define OODLE2_PUBLIC_HEADER 1 +#endif + +#ifndef __OODLE2BASE_H_INCLUDED__ +#include "oodle2base.h" +#endif + +#ifdef _MSC_VER +#pragma pack(push, Oodle, 8) + +#pragma warning(push) +#pragma warning(disable : 4127) // conditional is constant +#endif + +// header version : +// the DLL is incompatible when MAJOR is bumped +// MINOR is for internal revs and bug fixes that don't affect API compatibility +#define OODLE2_VERSION_MAJOR 9 +#define OODLE2_VERSION_MINOR 5 + +// OodleVersion string is 1 . MAJOR . MINOR +// don't make it from macros cuz the doc tool has to parse the string literal + +#define OodleVersion "2.9.5" /* +*/ + +//----------------------------------------------------- +// OodleLZ + +#if 0 +#define OODLE_ALLOW_DEPRECATED_COMPRESSORS /* If you need to encode with the deprecated compressors, define this before including oodle2.h + + You may still decode with them without defining this. +*/ +#endif + +// Default verbosity selection of 0 will not even log when it sees corruption +typedef enum OodleLZ_Verbosity +{ + OodleLZ_Verbosity_None = 0, + OodleLZ_Verbosity_Minimal = 1, + OodleLZ_Verbosity_Some = 2, + OodleLZ_Verbosity_Lots = 3, + OodleLZ_Verbosity_Force32 = 0x40000000 +} OodleLZ_Verbosity; +/* Verbosity of LZ functions + LZ functions print information to the function set by $OodleCore_Plugins_SetPrintf + or $OodleXLog_Printf if using OodleX. +*/ + +OO_COMPILER_ASSERT( sizeof(OodleLZ_Verbosity) == 4 ); + +typedef enum OodleLZ_Compressor +{ + OodleLZ_Compressor_Invalid = -1, + OodleLZ_Compressor_None = 3, // None = memcpy, pass through uncompressed bytes + + // NEW COMPRESSORS : + OodleLZ_Compressor_Kraken = 8, // Fast decompression and high compression ratios, amazing! + OodleLZ_Compressor_Leviathan = 13,// Leviathan = Kraken's big brother with higher compression, slightly slower decompression. + OodleLZ_Compressor_Mermaid = 9, // Mermaid is between Kraken & Selkie - crazy fast, still decent compression. + OodleLZ_Compressor_Selkie = 11, // Selkie is a super-fast relative of Mermaid. For maximum decode speed. + OodleLZ_Compressor_Hydra = 12, // Hydra, the many-headed beast = Leviathan, Kraken, Mermaid, or Selkie (see $OodleLZ_About_Hydra) + +#ifdef OODLE_ALLOW_DEPRECATED_COMPRESSORS + OodleLZ_Compressor_BitKnit = 10, // no longer supported as of Oodle 2.9.0 + OodleLZ_Compressor_LZB16 = 4, // DEPRECATED but still supported + OodleLZ_Compressor_LZNA = 7, // no longer supported as of Oodle 2.9.0 + OodleLZ_Compressor_LZH = 0, // no longer supported as of Oodle 2.9.0 + OodleLZ_Compressor_LZHLW = 1, // no longer supported as of Oodle 2.9.0 + OodleLZ_Compressor_LZNIB = 2, // no longer supported as of Oodle 2.9.0 + OodleLZ_Compressor_LZBLW = 5, // no longer supported as of Oodle 2.9.0 + OodleLZ_Compressor_LZA = 6, // no longer supported as of Oodle 2.9.0 +#endif + + OodleLZ_Compressor_Count = 14, + OodleLZ_Compressor_Force32 = 0x40000000 +} OodleLZ_Compressor; +/* Selection of compression algorithm. + + Each compressor provides a different balance of speed vs compression ratio. + + New Oodle users should only use the new sea monster family of compressors. + + The OODLE_ALLOW_DEPRECATED_COMPRESSORS set of compressors is no longer supported + as of Oodle 2.9.0 ; see $Oodle_FAQ_deprecated_compressors + + The sea monsters are all fuzz safe and use whole-block quantum (not the 16k quantum) + ($OodleLZ_Compressor_UsesWholeBlockQuantum) + + If you need to encode the deprecated compressors, define $OODLE_ALLOW_DEPRECATED_COMPRESSORS before + including oodle2.h + + See $Oodle_FAQ_WhichLZ for a quick FAQ on which compressor to use + + See $OodleLZ_About for discussion of how to choose a compressor. +*/ + +OO_COMPILER_ASSERT( sizeof(OodleLZ_Compressor) == 4 ); + +typedef enum OodleLZ_PackedRawOverlap +{ + OodleLZ_PackedRawOverlap_No = 0, + OodleLZ_PackedRawOverlap_Yes = 1, + OodleLZ_PackedRawOverlap_Force32 = 0x40000000 +} OodleLZ_PackedRawOverlap; +/* Bool enum +*/ + +typedef enum OodleLZ_CheckCRC +{ + OodleLZ_CheckCRC_No = 0, + OodleLZ_CheckCRC_Yes = 1, + OodleLZ_CheckCRC_Force32 = 0x40000000 +} OodleLZ_CheckCRC; +/* Bool enum for the LZ decoder - should it check CRC before decoding or not? + + NOTE : the CRC's in the LZH decompress checks are the CRC's of the *compressed* bytes. This allows checking the CRc + prior to decompression, so corrupted data cannot be fed to the compressor. + + To use OodleLZ_CheckCRC_Yes, the compressed data must have been made with $(OodleLZ_CompressOptions:sendQuantumCRCs) set to true. + + If you want a CRC of the raw bytes, there is one optionally stored in the $OodleLZ_SeekTable and can be confirmed with + $OodleLZ_CheckSeekTableCRCs +*/ + + +typedef enum OodleLZ_Profile +{ + OodleLZ_Profile_Main=0, // Main profile (all current features allowed) + OodleLZ_Profile_Reduced=1, // Reduced profile (Kraken only, limited feature set) + OodleLZ_Profile_Force32 = 0x40000000 +} OodleLZ_Profile; +/* Decode profile to target */ + +// Not flagged for idoc and done using a #define since it's internal (testing) use only +#define OodleLZ_Profile_Internal_Custom ((OodleLZ_Profile)100) + +OO_COMPILER_ASSERT( sizeof(OodleLZ_Profile) == 4 ); + +typedef enum OodleDecompressCallbackRet +{ + OodleDecompressCallbackRet_Continue=0, + OodleDecompressCallbackRet_Cancel=1, + OodleDecompressCallbackRet_Invalid=2, + OodleDecompressCallbackRet_Force32 = 0x40000000 +} OodleDecompressCallbackRet; +/* Return value for $OodleDecompressCallback + return OodleDecompressCallbackRet_Cancel to abort the in-progress decompression +*/ + +OODEFFUNC typedef OodleDecompressCallbackRet (OODLE_CALLBACK OodleDecompressCallback)(void * userdata, const OO_U8 * rawBuf,OO_SINTa rawLen,const OO_U8 * compBuf,OO_SINTa compBufferSize , OO_SINTa rawDone, OO_SINTa compUsed); +/* User-provided callback for decompression + + $:userdata the data you passed for _pcbData_ + $:rawBuf the decompressed buffer + $:rawLen the total decompressed length + $:compBuf the compressed buffer + $:compBufferSize the total compressed length + $:rawDone number of bytes in rawBuf decompressed so far + $:compUsed number of bytes in compBuf consumed so far + + OodleDecompressCallback is called incrementally during decompression. +*/ + +typedef enum OodleLZ_CompressionLevel +{ + OodleLZ_CompressionLevel_None=0, // don't compress, just copy raw bytes + OodleLZ_CompressionLevel_SuperFast=1, // super fast mode, lower compression ratio + OodleLZ_CompressionLevel_VeryFast=2, // fastest LZ mode with still decent compression ratio + OodleLZ_CompressionLevel_Fast=3, // fast - good for daily use + OodleLZ_CompressionLevel_Normal=4, // standard medium speed LZ mode + + OodleLZ_CompressionLevel_Optimal1=5, // optimal parse level 1 (faster optimal encoder) + OodleLZ_CompressionLevel_Optimal2=6, // optimal parse level 2 (recommended baseline optimal encoder) + OodleLZ_CompressionLevel_Optimal3=7, // optimal parse level 3 (slower optimal encoder) + OodleLZ_CompressionLevel_Optimal4=8, // optimal parse level 4 (very slow optimal encoder) + OodleLZ_CompressionLevel_Optimal5=9, // optimal parse level 5 (don't care about encode speed, maximum compression) + + OodleLZ_CompressionLevel_HyperFast1=-1, // faster than SuperFast, less compression + OodleLZ_CompressionLevel_HyperFast2=-2, // faster than HyperFast1, less compression + OodleLZ_CompressionLevel_HyperFast3=-3, // faster than HyperFast2, less compression + OodleLZ_CompressionLevel_HyperFast4=-4, // fastest, less compression + + // aliases : + OodleLZ_CompressionLevel_HyperFast=OodleLZ_CompressionLevel_HyperFast1, // alias hyperfast base level + OodleLZ_CompressionLevel_Optimal = OodleLZ_CompressionLevel_Optimal2, // alias optimal standard level + OodleLZ_CompressionLevel_Max = OodleLZ_CompressionLevel_Optimal5, // maximum compression level + OodleLZ_CompressionLevel_Min = OodleLZ_CompressionLevel_HyperFast4, // fastest compression level + + OodleLZ_CompressionLevel_Force32 = 0x40000000, + OodleLZ_CompressionLevel_Invalid = OodleLZ_CompressionLevel_Force32 +} OodleLZ_CompressionLevel; +/* Selection of compression encoder complexity + + Higher numerical value of CompressionLevel = slower compression, but smaller compressed data. + + The compressed stream is always decodable with the same decompressors. + CompressionLevel controls the amount of work the encoder does to find the best compressed bit stream. + CompressionLevel does not primary affect decode speed, it trades off encode speed for compressed bit stream quality. + + I recommend starting with OodleLZ_CompressionLevel_Normal, then try up or down if you want + faster encoding or smaller output files. + + The Optimal levels are good for distribution when you compress rarely and decompress often; + they provide very high compression ratios but are slow to encode. Optimal2 is the recommended level + to start with of the optimal levels. + Optimal4 and 5 are not recommended for common use, they are very slow and provide the maximum compression ratio, + but the gain over Optimal3 is usually small. + + The HyperFast levels have negative numeric CompressionLevel values. + They are faster than SuperFast for when you're encoder CPU time constrained or want + something closer to symmetric compression vs. decompression time. + The HyperFast levels are currently only available in Kraken, Mermaid & Selkie. + Higher levels of HyperFast are faster to encode, eg. HyperFast4 is the fastest. + + The CompressionLevel does not affect decode speed much. Higher compression level does not mean + slower to decode. To trade off decode speed vs ratio, use _spaceSpeedTradeoffBytes_ in $OodleLZ_CompressOptions + +*/ + +OO_COMPILER_ASSERT( sizeof(OodleLZ_CompressionLevel) == 4 ); + +typedef enum OodleLZ_Jobify +{ + OodleLZ_Jobify_Default=0, // Use compressor default for level of internal job usage + OodleLZ_Jobify_Disable=1, // Don't use jobs at all + OodleLZ_Jobify_Normal=2, // Try to balance parallelism with increased memory usage + OodleLZ_Jobify_Aggressive=3, // Maximize parallelism even when doing so requires large amounts of memory + OodleLZ_Jobify_Count=4, + + OodleLZ_Jobify_Force32 = 0x40000000, +} OodleLZ_Jobify; +/* Controls the amount of internal threading in $OodleLZ_Compress calls + + Once you install a pluggable job system via $OodleCore_Plugins_SetJobSystem, Oodle can internally break + heavy-weight compression tasks into smaller jobs that can run in parallel. This can speed up + compression of large blocks of data at Optimal1 and higher levels substantially. + + The trade-off is that running more jobs concurrently rather than sequentially can greatly increase + memory requirements when there are multiple outstanding memory-intensive jobs. + + OodleLZ_Jobify_Default lets the compressor decide; typically compressors will default to "Normal" + when a pluggable job system has been installed, and "Disable" otherwise. + + OodleLZ_Jobify_Disable disables use of internal jobs entirely; all compression work is done on + the calling thread. This minimizes the amount of memory used, and is also appropriate when you're + getting parallelism in other ways, e.g. by running OodleLZ_Compress on many threads yourself. + + OodleLZ_Jobify_Normal uses jobs to increase compressor parallelism and speeds up compression of + large blocks of data, but avoids handing out many concurrent jobs for tasks that are memory-intensive. + + OodleLZ_Jobify_Aggressive will use concurrent jobs even for highly memory-intensive tasks. This + can speed up things further, but at a potentially significant increase in the amount of memory used + by Oodle. + +*/ + +#define OODLELZ_LOCALDICTIONARYSIZE_MAX (1<<30) /* Maximum value of maxLocalDictionarySize in OodleLZ_CompressOptions +*/ + +#define OODLELZ_SPACESPEEDTRADEOFFBYTES_DEFAULT (256) /* Default value of spaceSpeedTradeoffBytes in OodleLZ_CompressOptions + Changes how the encoder makes decisions in the bit stream + Higher spaceSpeedTradeoffBytes favors decode speed more (larger compressed files) + Lower spaceSpeedTradeoffBytes favors smaller compressed files (slower decoder) + Goes in a power of 2 scale; so try 64,128 and 512,1024 + (OODLELZ_SPACESPEEDTRADEOFFBYTES_DEFAULT/2) or (OODLELZ_SPACESPEEDTRADEOFFBYTES_DEFAULT*2) +*/ + + +typedef OOSTRUCT OodleLZ_CompressOptions +{ + OO_U32 unused_was_verbosity; // unused ; was verbosity (set to zero) + OO_S32 minMatchLen; // minimum match length ; cannot be used to reduce a compressor's default MML, but can be higher. On some types of data, a large MML (6 or 8) is a space-speed win. + OO_BOOL seekChunkReset; // whether chunks should be independent, for seeking and parallelism + OO_S32 seekChunkLen; // length of independent seek chunks (if seekChunkReset) ; must be a power of 2 and >= $OODLELZ_BLOCK_LEN ; you can use $OodleLZ_MakeSeekChunkLen + OodleLZ_Profile profile; // decoder profile to target (set to zero) + OO_S32 dictionarySize; // sets a maximum offset for matches, if lower than the maximum the format supports. <= 0 means infinite (use whole buffer). Often power of 2 but doesn't have to be. + OO_S32 spaceSpeedTradeoffBytes; // this is a number of bytes; I must gain at least this many bytes of compressed size to accept a speed-decreasing decision + OO_S32 unused_was_maxHuffmansPerChunk; // unused ; was maxHuffmansPerChunk + OO_BOOL sendQuantumCRCs; // should the encoder send a CRC of each compressed quantum, for integrity checks; this is necessary if you want to use OodleLZ_CheckCRC_Yes on decode + OO_S32 maxLocalDictionarySize; // (Optimals) size of local dictionary before needing a long range matcher. This does not set a window size for the decoder; it's useful to limit memory use and time taken in the encoder. maxLocalDictionarySize must be a power of 2. Must be <= OODLELZ_LOCALDICTIONARYSIZE_MAX + OO_BOOL makeLongRangeMatcher; // (Optimals) should the encoder find matches beyond maxLocalDictionarySize using an LRM + OO_S32 matchTableSizeLog2; //(non-Optimals) when variable, sets the size of the match finder structure (often a hash table) ; use 0 for the compressor's default + + OodleLZ_Jobify jobify; // controls internal job usage by compressors + void * jobifyUserPtr; // user pointer passed through to RunJob and WaitJob callbacks + + OO_S32 farMatchMinLen; // far matches must be at least this len + OO_S32 farMatchOffsetLog2; // if not zero, the log2 of an offset that must meet farMatchMinLen + + OO_U32 reserved[4]; // reserved space for adding more options; zero these! +} OodleLZ_CompressOptions; +/* Options for the compressor + + Typically filled by calling $OodleLZ_CompressOptions_GetDefault , then individual options may be modified, like : + + OodleLZ_CompressOptions my_options = *OodleLZ_CompressOptions_GetDefault() + + To ensure you have set up the options correctly, call $OodleLZ_CompressOptions_Validate. + + _unused_was_verbosity_ : place holder, set to zero + + _minMatchLen_ : rarely useful. Default value of 0 means let the compressor decide. On some types of data, + bumping this up to 4,6, or 8 can improve decode speed with little effect on compression ratio. Most of the + Oodle compressors use a default MML of 4 at levels below 7, and MML 3 at levels >= 7. If you want to keep MML 4 + at the higher levels, set _minMatchLen_ here to 4. _minMatchLen_ cannot be used to reduce the base MML of the compressor, only to increase it. + + _seekChunkReset_ must be true if you want the decode to be able to run "Wide", with pieces that can be + decoded independently (not keeping previous pieces in memory for match references). + + _seekChunkLen_ : length of independent seek chunks (if seekChunkReset) ; must be a power of 2 and >= $OODLELZ_BLOCK_LEN ; you can use $OodleLZ_MakeSeekChunkLen + + _profile_ : tells the encoder to target alternate bitstream profile. Default value of zero for normal use. + + _dictionarySize_ : limits the encoder to partial buffer access for matches. Can be useful for decoding incrementally + without keeping the entire output buffer in memory. + + _spaceSpeedTradeoffBytes_ is a way to trade off compression ratio for decode speed. If you make it smaller, + you get more compression ratio and slower decodes. It's the number of bytes that a decision must save to + be worth a slower decode. Default is 256 (OODLELZ_SPACESPEEDTRADEOFFBYTES_DEFAULT). So that means the encoder must be able to save >= 256 bytes to + accept something that will slow down decoding (like adding another Huffman table). The typical range is + 64-1024. + + Lower _spaceSpeedTradeoffBytes_ = more compression, slower decode + Higher _spaceSpeedTradeoffBytes_ = less compression, faster decode + + _spaceSpeedTradeoffBytes_ is the primary parameter for controlling Hydra. The default value of 256 will make + Hydra decodes that are just a little bit faster than Kraken. You get Kraken speeds around 200, and Mermaid + speeds around 1200. + + At the extreme, a _spaceSpeedTradeoffBytes_ of zero would mean all you care about is compression ratio, not decode + speed, you want the encoder to make the smallest possible output. (you cannot actually set zero, as zero values + always mean "use default" in this struct; you never really want zero anyway) + Generally _spaceSpeedTradeoffBytes_ below 16 provides diminishing gains in size with pointless decode speed loss. + + _spaceSpeedTradeoffBytes_ is on sort of powers of 2 scale, so you might want to experiment with 32,64,128,256,512 + + _spaceSpeedTradeoffBytes_ outside the range [16 - 2048] is not recommended. + + _unused_was_maxHuffmansPerChunk_ : place holder, set to zero + + _sendQuantumCRCs_ : send hashes of the compressed data to verify in the decoder; not recommended, if you need data + verification, use your own system outside of Oodle. DEPRECATED, not recommended. For backwards compatibility only. + + _maxLocalDictionarySize_ : only applies to optimal parsers at level >= Optimal2. This limits the encoder memory use. + Making it larger = more compression, higher memory use. Matches within maxLocalDictionarySize are found exactly, + outside the maxLocalDictionarySize window an approximate long range matcher is used. + + _makeLongRangeMatcher_ : whether an LRM should be used to find matches outside the _maxLocalDictionarySize_ window + (Optimal levels only) + + _matchTableSizeLog2_ : for non-optimal levels (level <= Normal), controls the hash table size. Making this very + small can sometimes boost encoder speed. For the very fastest encoding, use the SuperFast level and change + _matchTableSizeLog2_ to 12 or 13. + + _matchTableSizeLog2_ should usually be left zero to use the encoder's default + + _matchTableSizeLog2_ allows you to limit memory use of the non-Optimal encoder levels. Memory use is roughly + ( 1 MB + 4 << matchTableSizeLog2 ) + + _jobify_ tells compressors how to use internal jobs for compression tasks. Jobs can be run in parallel using the + job system plugins set with $OodleCore_Plugins_SetJobSystem. Not all compressors or compression level support + jobs, but the slower ones generally do. The default value of jobify is to use a thread system if one is installed. + + _farMatchMinLen_ and _farMatchOffsetLog2_ can be used to tune the encoded stream for a known cache size on the + decoding hardware. If set, then offsets with log2 greater or each to _farMatchOffsetLog2_ must have a minimum + length of _farMatchMinLen_. For example to target a machine with a 2 MB cache, set _farMatchOffsetLog2_ to 21, + and _farMatchMinLen_ to something large, like 16 or 20. + + Without _farMatchMinLen_ and _farMatchOffsetLog2_ set, the Oodle encoders tune for a blend of cache sizes that works + well on most machines. _dictionarySize_ can also be used to tune for cache size, but cuts off all matches + beyond a certain distance. That may be more appropriate when you don't want to go out of cache at all. + _farMatchMinLen_ can only be used to make the standard blend target more restrictive; it can reduce the target cache size + but can't make it larger (or it can raise min match len outside cache but can't make it shorter). + + For help on setting up OodleLZ_CompressOptions contact support at oodle@radgametools.com + + NOTE : fields you do not set should always be zero initialized. In particular the _reserved_ fields should be zeroed. + Zero always means "use default" and is a future-portable initialization value. + + If you set fields to zero to mean "use default" you can call $OodleLZ_CompressOptions_Validate to change them + to default values. This is done automatically internally if you don't do it explicitly. + +*/ + +typedef enum OodleLZ_Decode_ThreadPhase +{ + OodleLZ_Decode_ThreadPhase1 = 1, + OodleLZ_Decode_ThreadPhase2 = 2, + OodleLZ_Decode_ThreadPhaseAll = 3, + OodleLZ_Decode_Unthreaded = OodleLZ_Decode_ThreadPhaseAll +} OodleLZ_Decode_ThreadPhase; +/* ThreadPhase for threaded Oodle decode + + Check $OodleLZ_Compressor_CanDecodeThreadPhased + (currently only used by Kraken) + + See $OodleLZ_About_ThreadPhasedDecode + +*/ + +typedef enum OodleLZ_FuzzSafe +{ + OodleLZ_FuzzSafe_No = 0, + OodleLZ_FuzzSafe_Yes = 1 +} OodleLZ_FuzzSafe; +/* OodleLZ_FuzzSafe (deprecated) + + About fuzz safety: + + Fuzz Safe decodes will not crash on corrupt data. They may or may not return failure, and produce garbage output. + + Fuzz safe decodes will not read out of bounds. They won't put data on the stack or previously in memory + into the output buffer. + + As of Oodle 2.9.0 all compressors supported are fuzzsafe, so OodleLZ_FuzzSafe_Yes should always be used and this + enum is deprecated. + +*/ + +#define OODLELZ_BLOCK_LEN (1<<18) /* The number of raw bytes per "seek chunk" + Seek chunks can be decompressed independently if $(OodleLZ_CompressOptions:seekChunkReset) is set. +*/ + +#define OODLELZ_BLOCK_MAXIMUM_EXPANSION (2) +#define OODLELZ_BLOCK_MAX_COMPLEN (OODLELZ_BLOCK_LEN+OODLELZ_BLOCK_MAXIMUM_EXPANSION) /* Maximum expansion per $OODLELZ_BLOCK_LEN is 1 byte. + Note that the compressed buffer must be allocated bigger than this (use $OodleLZ_GetCompressedBufferSizeNeeded) +*/ + +#define OODLELZ_QUANTUM_LEN (1<<14) /* Minimum decompression quantum (for old legacy codecs only) + + Deprecated. + + The new sea monster family of compressors use a whole block quantum (OODLELZ_BLOCK_LEN). + Check $OodleLZ_Compressor_UsesWholeBlockQuantum +*/ + +// 5 byte expansion per-quantum with CRC's +#define OODLELZ_QUANTUM_MAXIMUM_EXPANSION (5) + +#define OODLELZ_QUANTUM_MAX_COMPLEN (OODLELZ_QUANTUM_LEN+OODLELZ_QUANTUM_MAXIMUM_EXPANSION) + +#define OODLELZ_SEEKCHUNKLEN_MIN OODLELZ_BLOCK_LEN +#define OODLELZ_SEEKCHUNKLEN_MAX (1<<29) // half GB + +typedef OOSTRUCT OodleLZ_DecodeSome_Out +{ + OO_S32 decodedCount; // number of uncompressed bytes decoded + OO_S32 compBufUsed; // number of compressed bytes consumed + + + OO_S32 curQuantumRawLen; // tells you the current quantum size. you must have at least this much room available in the output buffer to be able to decode anything. + OO_S32 curQuantumCompLen; // if you didn't pass in enough data, nothing will decode (decodedCount will be 0), and this will tell you how much is needed +} OodleLZ_DecodeSome_Out; +/* Output value of $OodleLZDecoder_DecodeSome +*/ + +//--------------------------------------------- + +//======================================================= + +typedef OOSTRUCT OodleLZ_SeekTable +{ + OodleLZ_Compressor compressor; // which compressor was used + OO_BOOL seekChunksIndependent; // are the seek chunks independent, or must they be decompressed in sequence + + OO_S64 totalRawLen; // total uncompressed data lenth + OO_S64 totalCompLen; // sum of seekChunkCompLens + + OO_S32 numSeekChunks; // derived from rawLen & seekChunkLen + OO_S32 seekChunkLen; // multiple of OODLELZ_BLOCK_LEN + + OO_U32 * seekChunkCompLens; // array of compressed lengths of seek chunks + OO_U32 * rawCRCs; // crc of the raw bytes of the chunk (optional; NULL unless $OodleLZSeekTable_Flags_MakeRawCRCs was specified) +} OodleLZ_SeekTable; + +typedef enum OodleLZSeekTable_Flags +{ + OodleLZSeekTable_Flags_None = 0, // default + OodleLZSeekTable_Flags_MakeRawCRCs = 1, // make the _rawCRCs_ member of $OodleLZ_SeekTable + OodleLZSeekTable_Flags_Force32 = 0x40000000 +} OodleLZSeekTable_Flags; + +//===================================================== + + +typedef OOSTRUCT OodleConfigValues +{ + OO_S32 m_OodleLZ_LW_LRM_step; // LZHLW LRM : bytes between LRM entries + OO_S32 m_OodleLZ_LW_LRM_hashLength; // LZHLW LRM : bytes hashed for each LRM entries + OO_S32 m_OodleLZ_LW_LRM_jumpbits; // LZHLW LRM : bits of hash used for jump table + + OO_S32 m_OodleLZ_Decoder_Max_Stack_Size; // if OodleLZ_Decompress needs to allocator a Decoder object, and it's smaller than this size, it's put on the stack instead of the heap + OO_S32 m_OodleLZ_Small_Buffer_LZ_Fallback_Size_Unused; // deprecated + OO_S32 m_OodleLZ_BackwardsCompatible_MajorVersion; // if you need to encode streams that can be read with an older version of Oodle, set this to the Oodle2 MAJOR version number that you need compatibility with. eg to be compatible with oodle 2.7.3 you would put 7 here + + OO_U32 m_oodle_header_version; // = OODLE_HEADER_VERSION + +} OodleConfigValues; +/* OodleConfigValues + + Struct of user-settable low level config values. See $Oodle_SetConfigValues. + + May have different defaults per platform. +*/ + +OOFUNC1 void OOFUNC2 Oodle_GetConfigValues(OodleConfigValues * ptr); +/* Get $OodleConfigValues + + $:ptr filled with OodleConfigValues + + Gets the current $OodleConfigValues. + + May be different per platform. +*/ + +OOFUNC1 void OOFUNC2 Oodle_SetConfigValues(const OodleConfigValues * ptr); +/* Set $OodleConfigValues + + $:ptr your desired OodleConfigValues + + Sets the global $OodleConfigValues from your struct. + + You should call $Oodle_GetConfigValues to fill the struct, then change the values you + want to change, then call $Oodle_SetConfigValues. + + This should generally be done before doing anything with Oodle (eg. even before OodleX_Init). + Changing OodleConfigValues after Oodle has started has undefined effects. +*/ + +typedef enum Oodle_UsageWarnings +{ + Oodle_UsageWarnings_Enabled = 0, + Oodle_UsageWarnings_Disabled = 1, + Oodle_UsageWarnings_Force32 = 0x40000000 +} Oodle_UsageWarnings; +/* Whether Oodle usage warnings are enable or disabled. */ + +OOFUNC1 void OOFUNC2 Oodle_SetUsageWarnings(Oodle_UsageWarnings state); +/* Enables or disables Oodle usage warnings. + + $:state whether usage warnings should be enabled or disabled. + + Usage warnings are enabled by default and try to be low-noise, but in case you want to + disable them, this is how. + + This should generally be done once at startup. Setting this state while there are Oodle + calls running on other threads has undefined results. +*/ + +// function pointers to mallocs needed : + +OODEFFUNC typedef void * (OODLE_CALLBACK t_fp_OodleCore_Plugin_MallocAligned)( OO_SINTa bytes, OO_S32 alignment); +/* Function pointer type for OodleMallocAligned + + $:bytes number of bytes to allocate + $:alignment required alignment of returned pointer + $:return pointer to memory allocated (must not be NULL) + + _alignment_ will always be a power of two + + _alignment_ will always be >= $OODLE_MALLOC_MINIMUM_ALIGNMENT + +*/ + +OODEFFUNC typedef void (OODLE_CALLBACK t_fp_OodleCore_Plugin_Free)( void * ptr ); +/* Function pointer type for OodleFree + + $:return pointer to memory to free + +*/ + +OOFUNC1 void OOFUNC2 OodleCore_Plugins_SetAllocators( + t_fp_OodleCore_Plugin_MallocAligned * fp_OodleMallocAligned, + t_fp_OodleCore_Plugin_Free * fp_OodleFree); +/* Set the function pointers for allocation needed by Oodle2 Core + + If these are not set, the default implementation on most platforms uses the C stdlib. + On Microsoft platforms the default implementation uses HeapAlloc. + + These must not be changed once they are set! Set them once then don't change them. + + NOTE: if you are using Oodle Ext, do NOT call this. OodleX_Init will install an allocator for Oodle Core. Do not mix your own allocator with the OodleX allocator. See $OodleXAPI_Malloc. + + If you want to ensure that Oodle is not doing any allocations, you can call OodleCore_Plugins_SetAllocators(NULL,NULL); + If you do that, then any time Oodle needs to allocate memory internally, it will stop the process. + It is STRONGLY not recommended that you ship that way. You can verify that Oodle is not allocating, but then leave some + fallback allocator installed when you actually ship just in case. + + Also note that on many consoles the standard allocation practices may not + leave much heap memory for the C stdlib malloc. In this case Oodle may fail to allocate. + +*/ + +OODEFFUNC typedef OO_U64 (OODLE_CALLBACK t_fp_OodleCore_Plugin_RunJob)( t_fp_Oodle_Job * fp_job, void * job_data , OO_U64 * dependencies, int num_dependencies, void * user_ptr ); +/* Function pointer type for OodleCore_Plugins_SetJobSystem + + $:dependencies array of handles of other pending jobs. All guaranteed to be nonzero. + $:num_dependencies number of dependencies. Guaranteed to be no more than OODLE_JOB_MAX_DEPENDENCIES. + $:user_ptr is passed through from the OodleLZ_CompressOptions. + $:return handle to the async job, or 0 if it was run synchronously + + RunJob will call fp_job(job_data) + + it may be done on a thread, or it may run the function synchronously and return 0, indicating the job is already done. + The returned OO_U64 is a handle passed to WaitJob, unless it is 0, in which case WaitJob won't get called. + + fp_job should not run until all the dependencies are done. This function should not delete the dependencies. + + RunJob must be callable from within an Oodle Job, i.e. jobs may spawn their own sub-jobs directly. + However, the matching WaitJob calls will only ever occur on the thread that called the + internally threaded Oodle API function. + + See $Oodle_About_Job_Threading_Plugins +*/ + +OODEFFUNC typedef void (OODLE_CALLBACK t_fp_OodleCore_Plugin_WaitJob)( OO_U64 job_handle, void * user_ptr ); +/* Function pointer type for OodleCore_Plugins_SetJobSystem + + $:job_handle a job handle returned from RunJob. Never 0. + $:user_ptr is passed through from the OodleLZ_CompressOptions. + + Waits until the job specified by job_handle is done and cleans up any associated resources. Oodle + will call WaitJob exactly once for every RunJob call that didn't return 0. + + If job_handle was already completed, this should clean it up without waiting. + + A handle value should not be reused by another RunJob until WaitJob has been done with that value. + + WaitJob will not be called from running jobs. It will be only be called from the original thread that + invoked Oodle. If you are running Oodle from a worker thread, ensure that that thread is allowed to wait + on other job threads. + + See $Oodle_About_Job_Threading_Plugins +*/ + +OOFUNC1 void OOFUNC2 OodleCore_Plugins_SetJobSystem( + t_fp_OodleCore_Plugin_RunJob * fp_RunJob, + t_fp_OodleCore_Plugin_WaitJob * fp_WaitJob); +/* DEPRECATED use OodleCore_Plugins_SetJobSystemAndCount instead + + See $OodleCore_Plugins_SetJobSystemAndCount +*/ + + +OOFUNC1 void OOFUNC2 OodleCore_Plugins_SetJobSystemAndCount( + t_fp_OodleCore_Plugin_RunJob * fp_RunJob, + t_fp_OodleCore_Plugin_WaitJob * fp_WaitJob, + int target_parallelism); +/* Set the function pointers for async job system needed by Oodle2 Core + + $:fp_RunJob pointer to RunJob function + $:fp_WaitJob pointer to WaitJob function + $:target_parallelism goal of number of jobs to run simultaneously + + If these are not set, the default implementation runs jobs synchronously on the calling thread. + + These must not be changed once they are set! Set them once then don't change them. + + _target_parallelism_ allows you to tell Oodle how many Jobs it should try to keep in flight at once. + Depending on the operation it may not be able to split work into this many jobs (so fewer will be used), + but it will not exceed this count. + + For Oodle Data LZ work, typically _target_parallelism_ is usually best at the number of hardware cores + not including hyper threads). + + For Oodle Texture BCN encoding work, _target_parallelism_ is usually best as the full number of hyper cores. + + In some cases you may wish to reduce _target_parallelism_ by 1 or 2 cores to leave some of the CPU free for + other work. + + For example on a CPU with 16 cores and 32 hardware threads, for LZ work you might set _target_parallelism_ to 15 + when calling OodleCorePlugins. For BC7 encoding you might set _target_parallelism_ to 30 when calling OodleTexPlugins. + + NOTE : if you are using Oodle Ext, do NOT call this. OodleX_Init will install a job system for Oodle Core. + Note OodleX only installs automatically to Oodle Core, not Net or Tex. See example_jobify.cpp for manual + plugin. + + Replaces deprecated $OodleCore_Plugins_SetJobSystem + + See $Oodle_About_Job_Threading_Plugins +*/ + +// the main func pointer for log : +OODEFFUNC typedef void (OODLE_CALLBACK t_fp_OodleCore_Plugin_Printf)(int verboseLevel,const char * file,int line,const char * fmt,...); +/* Function pointer to Oodle Core printf + + $:verboseLevel verbosity of the message; 0-2 ; lower = more important + $:file C file that sent the message + $:line C line that sent the message + $:fmt vararg printf format string + + The logging function installed here must parse varargs like printf. + + _verboseLevel_ may be used to omit verbose messages. +*/ + +OOFUNC1 t_fp_OodleCore_Plugin_Printf * OOFUNC2 OodleCore_Plugins_SetPrintf(t_fp_OodleCore_Plugin_Printf * fp_rrRawPrintf); +/* Install the callback used by Oodle Core for logging + + $:fp_rrRawPrintf function pointer to your log function; may be NULL to disable all logging + $:return returns the previous function pointer + + Use this function to install your own printf for Oodle Core. + + The default implementation in debug builds, if you install nothing, uses the C stdio printf for logging. + On Microsoft platforms, it uses OutputDebugString and not stdio. + + To disable all logging, call OodleCore_Plugins_SetPrintf(NULL) + + WARNING : this function is NOT thread safe! It should be done only once and done in a place where the caller can guarantee thread safety. + + In the debug build of Oodle, you can install OodleCore_Plugin_Printf_Verbose to get more verbose logging + +*/ + +OODEFFUNC typedef OO_BOOL (OODLE_CALLBACK t_fp_OodleCore_Plugin_DisplayAssertion)(const char * file,const int line,const char * function,const char * message); +/* Function pointer to Oodle Core assert callback + + $:file C file that triggered the assert + $:line C line that triggered the assert + $:function C function that triggered the assert (may be NULL) + $:message assert message + $:return true to break execution at the assertion site, false to continue + + This callback is called by Oodle Core when it detects an assertion condition. + + This will only happen in debug builds. + + +*/ + +OOFUNC1 t_fp_OodleCore_Plugin_DisplayAssertion * OOFUNC2 OodleCore_Plugins_SetAssertion(t_fp_OodleCore_Plugin_DisplayAssertion * fp_rrDisplayAssertion); +/* Install the callback used by Oodle Core for asserts + + $:fp_rrDisplayAssertion function pointer to your assert display function + $:return returns the previous function pointer + + Use this function to install your own display for Oodle Core assertions. + This will only happen in debug builds. + + The default implementation in debug builds, if you install nothing, uses the C stderr printf for logging, + except on Microsoft platforms where it uses OutputDebugString. + + WARNING : this function is NOT thread safe! It should be done only once and done in a place where the caller can guarantee thread safety. + +*/ + +//============================================================= + + +OOFUNC1 void * OOFUNC2 OodleCore_Plugin_MallocAligned_Default(OO_SINTa size,OO_S32 alignment); +OOFUNC1 void OOFUNC2 OodleCore_Plugin_Free_Default(void * ptr); +OOFUNC1 void OOFUNC2 OodleCore_Plugin_Printf_Default(int verboseLevel,const char * file,int line,const char * fmt,...); +OOFUNC1 void OOFUNC2 OodleCore_Plugin_Printf_Verbose(int verboseLevel,const char * file,int line,const char * fmt,...); +OOFUNC1 OO_BOOL OOFUNC2 OodleCore_Plugin_DisplayAssertion_Default(const char * file,const int line,const char * function,const char * message); +OOFUNC1 OO_U64 OOFUNC2 OodleCore_Plugin_RunJob_Default( t_fp_Oodle_Job * fp_job, void * job_data, OO_U64 * dependencies, int num_dependencies, void * user_ptr ); +OOFUNC1 void OOFUNC2 OodleCore_Plugin_WaitJob_Default( OO_U64 job_handle, void * user_ptr ); + +//============================================================= + +//---------------------------------------------- +// OodleLZ + +#define OODLELZ_FAILED (0) /* Return value of OodleLZ_Decompress on failure +*/ + +//======================================================= + +OOFUNC1 OO_SINTa OOFUNC2 OodleLZ_Compress(OodleLZ_Compressor compressor, + const void * rawBuf,OO_SINTa rawLen,void * compBuf, + OodleLZ_CompressionLevel level, + const OodleLZ_CompressOptions * pOptions OODEFAULT(NULL), + const void * dictionaryBase OODEFAULT(NULL), + const void * lrm OODEFAULT(NULL), + void * scratchMem OODEFAULT(NULL), + OO_SINTa scratchSize OODEFAULT(0) ); +/* Compress some data from memory to memory, synchronously, with OodleLZ + + $:compressor which OodleLZ variant to use in compression + $:rawBuf raw data to compress + $:rawLen number of bytes in rawBuf to compress + $:compBuf pointer to write compressed data to ; should be at least $OodleLZ_GetCompressedBufferSizeNeeded + $:level OodleLZ_CompressionLevel controls how much CPU effort is put into maximizing compression + $:pOptions (optional) options; if NULL, $OodleLZ_CompressOptions_GetDefault is used + $:dictionaryBase (optional) if not NULL, provides preceding data to prime the dictionary; must be contiguous with rawBuf, the data between the pointers _dictionaryBase_ and _rawBuf_ is used as the preconditioning data. The exact same precondition must be passed to encoder and decoder. + $:lrm (optional) long range matcher + $:scratchMem (optional) pointer to scratch memory + $:scratchSize (optional) size of scratch memory (see $OodleLZ_GetCompressScratchMemBound) + $:return size of compressed data written, or $OODLELZ_FAILED for failure + + Performs synchronous memory to memory LZ compression. + + In tools, you should generally use $OodleXLZ_Compress_AsyncAndWait instead to get parallelism. (in the Oodle2 Ext lib) + + You can compress a large buffer in several calls by setting _dictionaryBase_ to the start + of the buffer, and then making _rawBuf_ and _rawLen_ select portions of that buffer. As long + as _rawLen_ is a multiple of $OODLELZ_BLOCK_LEN , the compressed chunks can simply be + concatenated together. + + If _scratchMem_ is provided, it will be used for the compressor's scratch memory needs before OodleMalloc is + called. If the scratch is big enough, no malloc will be done. If the scratch is not big enough, the compress + will not fail, instead OodleMalloc will be used. OodleMalloc should not return null. There is currently no way + to make compress fail cleanly due to using too much memory, it must either succeed or abort the process. + + If _scratchSize_ is at least $OodleLZ_GetCompressScratchMemBound , additional allocations will not be needed. + + See $OodleLZ_About for tips on setting the compression options. + + If _dictionaryBase_ is provided, the backup distance from _rawBuf_ must be a multiple of $OODLELZ_BLOCK_LEN + + If $(OodleLZ_CompressOptions:seekChunkReset) is enabled, and _dictionaryBase_ is not NULL or _rawBuf_ , then the + seek chunk boundaries are relative to _dictionaryBase_, not to _rawBuf_. + +*/ + +// Decompress returns raw (decompressed) len received +// Decompress returns 0 (OODLELZ_FAILED) if it detects corruption +OOFUNC1 OO_SINTa OOFUNC2 OodleLZ_Decompress(const void * compBuf,OO_SINTa compBufSize,void * rawBuf,OO_SINTa rawLen, + OodleLZ_FuzzSafe fuzzSafe OODEFAULT(OodleLZ_FuzzSafe_Yes), + OodleLZ_CheckCRC checkCRC OODEFAULT(OodleLZ_CheckCRC_No), + OodleLZ_Verbosity verbosity OODEFAULT(OodleLZ_Verbosity_None), + void * decBufBase OODEFAULT(NULL), + OO_SINTa decBufSize OODEFAULT(0), + OodleDecompressCallback * fpCallback OODEFAULT(NULL), + void * callbackUserData OODEFAULT(NULL), + void * decoderMemory OODEFAULT(NULL), + OO_SINTa decoderMemorySize OODEFAULT(0), + OodleLZ_Decode_ThreadPhase threadPhase OODEFAULT(OodleLZ_Decode_Unthreaded) + ); +/* Decompress a some data from memory to memory, synchronously. + + $:compBuf pointer to compressed data + $:compBufSize number of compressed bytes available (must be greater or equal to the number consumed) + $:rawBuf pointer to output uncompressed data into + $:rawLen number of uncompressed bytes to output + $:fuzzSafe (optional) should the decode fail if it contains non-fuzz safe codecs? + $:checkCRC (optional) if data could be corrupted and you want to know about it, pass OodleLZ_CheckCRC_Yes + $:verbosity (optional) if not OodleLZ_Verbosity_None, logs some info + $:decBufBase (optional) if not NULL, provides preceding data to prime the dictionary; must be contiguous with rawBuf, the data between the pointers _dictionaryBase_ and _rawBuf_ is used as the preconditioning data. The exact same precondition must be passed to encoder and decoder. The decBufBase must be a reset point. + $:decBufSize (optional) size of decode buffer starting at decBufBase, if 0, _rawLen_ is assumed + $:fpCallback (optional) OodleDecompressCallback to call incrementally as decode proceeds + $:callbackUserData (optional) passed as userData to fpCallback + $:decoderMemory (optional) pre-allocated memory for the Decoder, of size _decoderMemorySize_ + $:decoderMemorySize (optional) size of the buffer at _decoderMemory_; must be at least $OodleLZDecoder_MemorySizeNeeded bytes to be used + $:threadPhase (optional) for threaded decode; see $OodleLZ_About_ThreadPhasedDecode (default OodleLZ_Decode_Unthreaded) + $:return the number of decompressed bytes output, $OODLELZ_FAILED (0) if none can be decompressed + + Decodes data encoded with any $OodleLZ_Compressor. + + Note : _rawLen_ must be the actual number of bytes to output, the same as the number that were encoded with the corresponding + OodleLZ_Compress size. You must store this somewhere in your own header and pass it in to this call. _compBufSize_ does NOT + need to be the exact number of compressed bytes, is the number of bytes available in the buffer, it must be greater or equal to + the actual compressed length. + + Note that the new compressors (Kraken,Mermaid,Selkie,BitKnit) are all fuzz safe and you can use OodleLZ_FuzzSafe_Yes + with them and no padding of the decode target buffer. + + If checkCRC is OodleLZ_CheckCRC_Yes, then corrupt data will be detected and the decode aborted. + If checkCRC is OodleLZ_CheckCRC_No, then corruption might result in invalid data, but no detection of any error (garbage in, garbage out). + + If corruption is possible, _fuzzSafe_ is No and _checkCRC_ is OodleLZ_CheckCRC_No, $OodleLZ_GetDecodeBufferSize must be used to allocate + _rawBuf_ large enough to prevent overrun. + + $OodleLZ_GetDecodeBufferSize should always be used to ensure _rawBuf_ is large enough, even when corruption is not + possible (when fuzzSafe is No). + + _compBuf_ and _rawBuf_ are allowed to overlap for "in place" decoding, but then _rawBuf_ must be allocated to + the size given by $OodleLZ_GetInPlaceDecodeBufferSize , and the compressed data must be at the end of that buffer. + + An easy way to take the next step to parallel decoding is with $OodleXLZ_Decompress_MakeSeekTable_Wide_Async (in the Oodle2 Ext lib) + + NOTE : the return value is the *total* number of decompressed bytes output so far. If rawBuf is > decBufBase, that means + the initial inset of (rawBuf - decBufBase) is included! (eg. you won't just get _rawLen_) + + If _decBufBase_ is provided, the backup distance from _rawBuf_ must be a multiple of $OODLELZ_BLOCK_LEN + + About fuzz safety: + + OodleLZ_Decompress is guaranteed not to crash even if the data is corrupted when _fuzzSafe_ is set to OodleLZ_FuzzSafe_Yes. + When _fuzzSafe_ is Yes, the target buffer (_rawBuf_ and _rawLen_) will never be overrun. Note that corrupted data might not + be detected (the return value might indicate success). + + Fuzz Safe decodes will not crash on corrupt data. They may or may not return failure, and produce garbage output. + + Fuzz safe decodes will not read out of bounds. They won't put data on the stack or previously in memory + into the output buffer. + + Fuzz safe decodes will not output more than the uncompressed size. (eg. the output buffer does not need to + be padded like OodleLZ_GetDecodeBufferSize) + + If you ask for a fuzz safe decode and the compressor doesn't satisfy OodleLZ_Compressor_CanDecodeFuzzSafe + then it will return failure. + + The _fuzzSafe_ argument should always be OodleLZ_FuzzSafe_Yes as of Oodle 2.9.0 ; older compressors did not + support fuzz safety but they now all do. + + Use of OodleLZ_FuzzSafe_No is deprecated. + +*/ + + +//------------------------------------------- +// Incremental Decoder functions : + +struct _OodleLZDecoder; +typedef struct _OodleLZDecoder OodleLZDecoder; +/* Opaque type for OodleLZDecoder + + See $OodleLZDecoder_Create +*/ + + +OOFUNC1 OodleLZDecoder * OOFUNC2 OodleLZDecoder_Create(OodleLZ_Compressor compressor,OO_S64 rawLen,void * memory, OO_SINTa memorySize); +/* Create a OodleLZDecoder + + $:compressor the type of data you will decode; use $OodleLZ_Compressor_Invalid if unknown + $:rawLen total raw bytes of the decode + $:memory (optional) provide memory for the OodleLZDecoder object (not the window) + $:memorySize (optional) if memory is provided, this is its size in bytes + $:return the OodleLZDecoder + + If memory is provided, it must be of size $OodleLZDecoder_MemorySizeNeeded. If it is NULL it will be + allocated with the malloc specified by $OodleAPI_OodleCore_Plugins. + + Free with $OodleLZDecoder_Destroy. You should Destroy even if you passed in the memory. + + Providing _compressor_ lets the OodleLZDecoder be the minimum size needed for that type of data. + If you pass $OodleLZ_Compressor_Invalid, then any type of data may be decoded, and the Decoder is allocated + large enought to handle any of them. + + If you are going to pass rawLen to OodleLZDecoder_Reset , then you can pass 0 to rawLen here. + + See $OodleLZDecoder_DecodeSome for more. +*/ + +OOFUNC1 OO_S32 OOFUNC2 OodleLZDecoder_MemorySizeNeeded(OodleLZ_Compressor compressor OODEFAULT(OodleLZ_Compressor_Invalid), OO_SINTa rawLen OODEFAULT(-1)); +/* If you want to provide the memory needed by $OodleLZDecoder_Create , this tells you how big it must be. + + $:compressor the type of data you will decode; use $OodleLZ_Compressor_Invalid if unknown + $:rawLen should almost always be -1, which supports any size of raw data decompression + $:return bytes to allocate or reserve, 0 for failure + + NOTE : using $OodleLZ_Compressor_Invalid lets you decode any time of compressed data. + It requests as much memory as the largest compressor. This may be a *lot* more than your data needs; + try to use the correct compressor type. + + If _rawLen_ is -1 (default) then the Decoder object created can be used on any length of raw data + decompression. If _rawLen_ is specified here, then you can only use it to decode data shorter than + the length you specified here. This use case is very rare, contact support for details. +*/ + +OOFUNC1 OO_S32 OOFUNC2 OodleLZ_ThreadPhased_BlockDecoderMemorySizeNeeded(void); +/* Returns the size of the decoder needed for ThreadPhased decode + + For use with $OodleLZ_Decode_ThreadPhase + See $OodleLZ_About_ThreadPhasedDecode +*/ + +OOFUNC1 void OOFUNC2 OodleLZDecoder_Destroy(OodleLZDecoder * decoder); +/* Pairs with $OodleLZDecoder_Create + + You should always call Destroy even if you provided the memory for $OodleLZDecoder_Create +*/ + +// Reset decoder - can reset to the start of any OODLELZ_BLOCK_LEN chunk +OOFUNC1 OO_BOOL OOFUNC2 OodleLZDecoder_Reset(OodleLZDecoder * decoder, OO_SINTa decPos, OO_SINTa decLen OODEFAULT(0)); +/* Reset an OodleLZDecoder to restart at given pos + + $:decoder the OodleLZDecoder, made by $OodleLZDecoder_Create + $:decPos position to reset to; must be a multiple of OODLELZ_BLOCK_LEN + $:decLen (optional) if not zero, change the length of the data we expect to decode + $:return true for success + + If you are seeking in a packed stream, you must seek to a seek chunk reset point, as was made at compress time. + + That is, $(OodleLZ_CompressOptions:seekChunkReset) must have been true, and + _decPos_ must be a multiple of $(OodleLZ_CompressOptions:seekChunkLen) that was used at compress time. + + You can use $OodleLZ_GetChunkCompressor to verify that you are at a valid + independent chunk start point. + +*/ + +// returns false if corruption detected +OOFUNC1 OO_BOOL OOFUNC2 OodleLZDecoder_DecodeSome( + OodleLZDecoder * decoder, + OodleLZ_DecodeSome_Out * out, + + // the decode sliding window : we output here & read from this for matches + void * decBuf, + OO_SINTa decBufPos, + OO_SINTa decBufferSize, // decBufferSize should be the result of OodleLZDecoder_MakeDecodeBufferSize() + OO_SINTa decBufAvail, // usually Size - Pos, but maybe less if you have pending IO flushes + + // compressed data : + const void * compPtr, + OO_SINTa compAvail, + + OodleLZ_FuzzSafe fuzzSafe OODEFAULT(OodleLZ_FuzzSafe_No), + OodleLZ_CheckCRC checkCRC OODEFAULT(OodleLZ_CheckCRC_No), + OodleLZ_Verbosity verbosity OODEFAULT(OodleLZ_Verbosity_None), + OodleLZ_Decode_ThreadPhase threadPhase OODEFAULT(OodleLZ_Decode_Unthreaded) + + ); +/* Incremental decode some LZ compressed data + + $:decoder the OodleLZDecoder, made by $OodleLZDecoder_Create + $:out filled with results + $:decBuf the decode buffer (window) + $:decBufPos the current position in the buffer + $:decBufferSize size of decBuf ; this must be either equal to the total decompressed size (_rawLen_ passed to $OodleLZDecoder_Create) or the result of $OodleLZDecoder_MakeValidCircularWindowSize + $:decBufAvail the number of bytes available after decBufPos in decBuf ; usually (decBufferSize - decBufPos), but can be less + $:compPtr pointer to compressed data to read + $:compAvail number of compressed bytes available at compPtr + $:fuzzSafe (optional) should the decode be fuzz safe + $:checkCRC (optional) if data could be corrupted and you want to know about it, pass OodleLZ_CheckCRC_Yes + $:verbosity (optional) if not OodleLZ_Verbosity_None, logs some info + $:threadPhase (optional) for threaded decode; see $OodleLZ_About_ThreadPhasedDecode (default OodleLZ_Decode_Unthreaded) + $:return true if success, false if invalid arguments or data is encountered + + Decodes data encoded with an OodleLZ compressor. + + Decodes an integer number of quanta; quanta are $OODLELZ_QUANTUM_LEN uncompressed bytes. + + _decBuf_ can either be a circular window or the whole _rawLen_ array. + In either case, _decBufPos_ should be in the range [0,_decBufferSize_). + If _decBuf_ is a circular window, then _decBufferSize_ should come from $OodleLZDecoder_MakeValidCircularWindowSize. + + (circular windows are deprecated as of 2.9.0) + + NOTE : all the new LZ codecs (Kraken, etc.) do not do circular windows. They can do sliding windows, see lz_test_11 in $example_lz. + They should always have decBufferSize = total raw size, even if the decode buffer is smaller than that. + + NOTE : insufficient data provided (with _compAvail_ > 0 but not enough to decode a quantum) is a *success* case + (return value of true), even though nothing is decoded. A return of false always indicates a non-recoverable error. + + If _decBufAvail_ or _compAvail_ is insufficient for any decompression, the "curQuantum" fields of $OodleLZ_DecodeSome_Out + will tell you how much you must provide to proceed. That is, if enough compressed bytes are provided to get a quantum header, but not enough to decode a quantum, this + function returns true and fills out the $OodleLZ_DecodeSome_Out structure with the size of the quantum. + + See $OodleLZ_Decompress about fuzz safety. + + NOTE : DecodeSome expect to decode either one full quantum (of len $OODLELZ_QUANTUM_LEN) or up to the length of the total buffer specified in the +call to $OodleLZDecoder_Create or $OodleLZDecoder_Reset. That total buffer length +must match what was use during compression (or be a seek-chunk portion thereof). +That is, you cannot decompress partial streams in intervals smaller than +$OODLELZ_QUANTUM_LEN except for the final partial quantum at the end of the stream. + +*/ + +// pass in how much you want to alloc and it will tell you a valid size as close that as possible +// the main use is just to call OodleLZDecoder_MakeDecodeBufferSize(0) to get the min size; the min size is a good size +OOFUNC1 OO_S32 OOFUNC2 OodleLZDecoder_MakeValidCircularWindowSize(OodleLZ_Compressor compressor,OO_S32 minWindowSize OODEFAULT(0)); +/* Get a valid "Window" size for an LZ + + $:compressor which compressor you will be decoding + $:minWindowSize (optional) minimum size of the window + + NOTE: circular windows are deprecated as of 2.9.0 + + Most common usage is OodleLZDecoder_MakeValidCircularWindowSize(0) to get the minimum window size. + + Only compressors which pass $OodleLZ_Compressor_CanDecodeInCircularWindow can be decoded in a circular window. + + WARNING : this is NOT the size to malloc the window! you need to call $OodleLZ_GetDecodeBufferSize() and + pass in the window size to get the malloc size. +*/ + +//======================================================= + +//======================================================= +// remember if you want to IO the SeekEntries you need to make them endian-independent +// see WriteOOZHeader for example + +#define OODLELZ_SEEKPOINTCOUNT_DEFAULT 16 + +OOFUNC1 OO_S32 OOFUNC2 OodleLZ_MakeSeekChunkLen(OO_S64 rawLen, OO_S32 desiredSeekPointCount); +/* Compute a valid seekChunkLen + + $:rawLen total length of uncompressed data + $:desiredSeekPointCount desired number of seek chunks + $:return a valid seekChunkLen for use in $OodleLZ_CreateSeekTable + + Returns a seekChunkLen which is close to (rawLen/desiredSeekPointCount) but is a power of two multiple of $OODLELZ_BLOCK_LEN + + _desiredSeekPointCount_ = 16 is good for parallel decompression. + (OODLELZ_SEEKPOINTCOUNT_DEFAULT) +*/ + +OOFUNC1 OO_S32 OOFUNC2 OodleLZ_GetNumSeekChunks(OO_S64 rawLen, OO_S32 seekChunkLen); +/* Compute the number of seek chunks + + $:rawLen total length of uncompressed data + $:seekChunkLen the length of a seek chunk (eg from $OodleLZ_MakeSeekChunkLen) + $:return the number of seek chunks + + returns (rawLen+seekChunkLen-1)/seekChunkLen +*/ + +OOFUNC1 OO_SINTa OOFUNC2 OodleLZ_GetSeekTableMemorySizeNeeded(OO_S32 numSeekChunks,OodleLZSeekTable_Flags flags); +/* Tells you the size in bytes to allocate the seekTable before calling $OodleLZ_FillSeekTable + + $:numSeekChunks number of seek chunks (eg from $OodleLZ_GetNumSeekChunks) + $:flags options that will be passed to $OodleLZ_CreateSeekTable + $:return size in bytes of memory needed for seek table + + If you wish to provide the memory for the seek table yourself, you may call this to get the required size, + allocate the memory, and then simply point a $OodleLZ_SeekTable at your memory. + Then use $OodleLZ_FillSeekTable to fill it out. + + Do NOT use sizeof(OodleLZ_SeekTable) ! +*/ + +OOFUNC1 OO_BOOL OOFUNC2 OodleLZ_FillSeekTable(OodleLZ_SeekTable * pTable,OodleLZSeekTable_Flags flags,OO_S32 seekChunkLen,const void * rawBuf, OO_SINTa rawLen,const void * compBuf,OO_SINTa compLen); +/* scan compressed LZ stream to fill the seek table + + $:pTable pointer to table to be filled + $:flags options + $:seekChunkLen the length of a seek chunk (eg from $OodleLZ_MakeSeekChunkLen) + $:rawBuf (optional) uncompressed buffer; used to compute the _rawCRCs_ member of $OodleLZ_SeekTable + $:rawLen size of rawBuf + $:compBuf compressed buffer + $:compLen size of compBuf + $:return true for success + + _pTable_ must be able to hold at least $OodleLZ_GetSeekTableMemorySizeNeeded + + _seekChunkLen_ must be a multiple of $OODLELZ_BLOCK_LEN. + _seekChunkLen_ must match what was in CompressOptions when the buffer was made, or any integer multiple thereof. +*/ + + +OOFUNC1 OodleLZ_SeekTable * OOFUNC2 OodleLZ_CreateSeekTable(OodleLZSeekTable_Flags flags,OO_S32 seekChunkLen,const void * rawBuf, OO_SINTa rawLen,const void * compBuf,OO_SINTa compLen); +/* allocate a table, then scan compressed LZ stream to fill the seek table + + $:flags options + $:seekChunkLen the length of a seek chunk (eg from $OodleLZ_MakeSeekChunkLen) + $:rawBuf (optional) uncompressed buffer; used to compute the _rawCRCs_ member of $OodleLZ_SeekTable + $:rawLen size of rawBuf + $:compBuf compressed buffer + $:compLen size of compBuf + $:return pointer to table if succeeded, null if failed + + Same as $OodleLZ_FillSeekTable , but allocates the memory for you. Use $OodleLZ_FreeSeekTable to free. + + _seekChunkLen_ must be a multiple of $OODLELZ_BLOCK_LEN. + _seekChunkLen_ must match what was in CompressOptions when the buffer was made, or any integer multiple thereof. + +*/ + +OOFUNC1 void OOFUNC2 OodleLZ_FreeSeekTable(OodleLZ_SeekTable * pTable); +/* Frees a table allocated by $OodleLZ_CreateSeekTable +*/ + +OOFUNC1 OO_BOOL OOFUNC2 OodleLZ_CheckSeekTableCRCs(const void * rawBuf,OO_SINTa rawLen, const OodleLZ_SeekTable * seekTable); +/* Check the CRC's in seekTable vs rawBuf + + $:rawBuf uncompressed buffer + $:rawLen size of rawBuf + $:seekTable result of $OodleLZ_CreateSeekTable + $:return true if the CRC's check out + + Note that $OodleLZ_Decompress option of $OodleLZ_CheckCRC checks the CRC of *compressed* data, + this call checks the CRC of the *raw* (uncompressed) data. + + OodleLZ data contains a CRC of the compressed data if it was made with $(OodleLZ_CompressOptions:sendQuantumCRCs). + The SeekTable contains a CRC of the raw data if it was made with $OodleLZSeekTable_Flags_MakeRawCRCs. + + Checking the CRC of compressed data is faster, but does not verify that the decompress succeeded. +*/ + +OOFUNC1 OO_S32 OOFUNC2 OodleLZ_FindSeekEntry( OO_S64 rawPos, const OodleLZ_SeekTable * seekTable); +/* Find the seek entry that contains a raw position + + $:rawPos uncompressed position to look for + $:seekTable result of $OodleLZ_CreateSeekTable + $:return a seek entry index + + returns the index of the chunk that contains _rawPos_ +*/ + +OOFUNC1 OO_S64 OOFUNC2 OodleLZ_GetSeekEntryPackedPos( OO_S32 seekI , const OodleLZ_SeekTable * seekTable ); +/* Get the compressed position of a seek entry + + $:seekI seek entry index , in [0,numSeekEntries) + $:seekTable result of $OodleLZ_CreateSeekTable + $:return compressed buffer position of the start of this seek entry + + +*/ + +//============================================================= + +OOFUNC1 const char * OOFUNC2 OodleLZ_CompressionLevel_GetName(OodleLZ_CompressionLevel compressSelect); +/* Provides a string naming a $OodleLZ_CompressionLevel compressSelect +*/ + +OOFUNC1 const char * OOFUNC2 OodleLZ_Compressor_GetName(OodleLZ_Compressor compressor); +/* Provides a string naming a $OodleLZ_Compressor compressor +*/ + +OOFUNC1 const char * OOFUNC2 OodleLZ_Jobify_GetName(OodleLZ_Jobify jobify); +/* Provides a string naming a $OodleLZ_Jobify enum +*/ + +OOFUNC1 const OodleLZ_CompressOptions * OOFUNC2 OodleLZ_CompressOptions_GetDefault( + OodleLZ_Compressor compressor OODEFAULT(OodleLZ_Compressor_Invalid), + OodleLZ_CompressionLevel lzLevel OODEFAULT(OodleLZ_CompressionLevel_Normal)); +/* Provides a pointer to default compression options + + $:compressor deprecated, ignored + $:lzLevel deprecated, ignored + + Use to fill your own $OodleLZ_CompressOptions then change individual fields. + +*/ + +// after you fiddle with options, call this to ensure they are allowed +OOFUNC1 void OOFUNC2 OodleLZ_CompressOptions_Validate(OodleLZ_CompressOptions * pOptions); +/* Clamps the values in _pOptions_ to be in valid range + +*/ + +// inline functions for compressor property queries +OODEFSTART + +OO_BOOL OodleLZ_Compressor_UsesWholeBlockQuantum(OodleLZ_Compressor compressor); +/* OodleLZ_Compressor properties helper. + + Tells you if this compressor is "whole block quantum" ; must decode in steps of + $OODLELZ_BLOCK_LEN , not $OODLELZ_QUANTUM_LEN like others. +*/ +OO_BOOL OodleLZ_Compressor_UsesLargeWindow(OodleLZ_Compressor compressor); +/* OodleLZ_Compressor properties helper. + + Tells you if this compressor is "LargeWindow" or not, meaning it can benefit from + a Long-Range-Matcher and windows larger than $OODLELZ_BLOCK_LEN +*/ +OO_BOOL OodleLZ_Compressor_CanDecodeInCircularWindow(OodleLZ_Compressor compressor); +/* OodleLZ_Compressor properties helper. + + Tells you if this compressor can be decoded using a fixed size circular window. + deprecated as of 2.9.0 +*/ +OO_BOOL OodleLZ_Compressor_CanDecodeThreadPhased(OodleLZ_Compressor compressor); +/* OodleLZ_Compressor properties helper. + + Tells you if this compressor can be used with the $OodleLZ_Decode_ThreadPhase. + + See $OodleLZ_About_ThreadPhasedDecode +*/ +OO_BOOL OodleLZ_Compressor_CanDecodeInPlace(OodleLZ_Compressor compressor); +/* OodleLZ_Compressor properties helper. + + Tells you if this compressor can be used with "in-place" decoding. + + This is now always true (all compressors support in-place decoding). The function is left + for backward compatibility. + + All compressors in the future will support in-place, you don't need to check this property. + +*/ +OO_BOOL OodleLZ_Compressor_MustDecodeWithoutResets(OodleLZ_Compressor compressor); +/* OodleLZ_Compressor properties helper. + + Tells you if this compressor must decode contiguous ranges of buffer with the same Decoder. + + That is, most of the compressors can be Reset and restart on any block, not just seek blocks, + as long as the correct window data is provided. That is, if this returns false then the only + state required across a non-reset block is the dictionary of previously decoded data. + + But if OodleLZ_Compressor_MustDecodeWithoutResets returns true, then you cannot do that, + because the Decoder object must carry state across blocks (except reset blocks). + + This does not apply to seek points - you can always reset and restart decompression at a seek point. +*/ +OO_BOOL OodleLZ_Compressor_CanDecodeFuzzSafe(OodleLZ_Compressor compressor); +/* OodleLZ_Compressor properties helper. + + Tells you if this compressor is "fuzz safe" which means it can accept corrupted data + and won't crash or overrun any buffers. +*/ + +OO_BOOL OodleLZ_Compressor_RespectsDictionarySize(OodleLZ_Compressor compressor); +/* OodleLZ_Compressor properties helper. + + Tells you if this compressor obeys $(OodleLZ_CompressOptions:dictionarySize) which limits + match references to a finite bound. (eg. for sliding window decompression). + + All the new codecs do (Kraken,Mermaid,Selkie,Leviathan). Some old codecs don't. +*/ +//===================================================================== + +#define OODLELZ_COMPRESSOR_MASK(c) (((OO_U32)1)<<((OO_S32)(c))) +// OODLELZ_COMPRESSOR_BOOLBIT : extract a value of 1 or 0 so it maps to "bool" +#define OODLELZ_COMPRESSOR_BOOLBIT(s,c) (((s)>>(OO_S32)(c))&1) + +OOINLINEFUNC OO_BOOL OodleLZ_Compressor_IsNewLZFamily(OodleLZ_Compressor compressor) +{ + const OO_U32 set = + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Kraken) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Leviathan) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Mermaid) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Selkie) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Hydra); + return OODLELZ_COMPRESSOR_BOOLBIT(set,compressor); +} + +OOINLINEFUNC OO_BOOL OodleLZ_Compressor_CanDecodeFuzzSafe(OodleLZ_Compressor compressor) +{ + #ifdef OODLE_ALLOW_DEPRECATED_COMPRESSORS + const OO_U32 set = + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_None) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Kraken) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Leviathan) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Mermaid) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Selkie) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Hydra) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_BitKnit) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_LZB16); + return OODLELZ_COMPRESSOR_BOOLBIT(set,compressor); + #else + // all new compressors are fuzz safe + return compressor != OodleLZ_Compressor_Invalid; + #endif +} + +OOINLINEFUNC OO_BOOL OodleLZ_Compressor_RespectsDictionarySize(OodleLZ_Compressor compressor) +{ + #ifdef OODLE_ALLOW_DEPRECATED_COMPRESSORS + const OO_U32 set = + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_None) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Kraken) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Leviathan) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Mermaid) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Selkie) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_Hydra) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_LZNA) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_BitKnit); + return OODLELZ_COMPRESSOR_BOOLBIT(set,compressor); + #else + // all new compressors respect dictionarySize + return compressor != OodleLZ_Compressor_Invalid; + #endif +} + +OOINLINEFUNC OO_BOOL OodleLZ_Compressor_UsesWholeBlockQuantum(OodleLZ_Compressor compressor) +{ + return OodleLZ_Compressor_IsNewLZFamily(compressor); +} + +OOINLINEFUNC OO_BOOL OodleLZ_Compressor_CanDecodeThreadPhased(OodleLZ_Compressor compressor) +{ + return OodleLZ_Compressor_IsNewLZFamily(compressor); +} + +OOINLINEFUNC OO_BOOL OodleLZ_Compressor_CanDecodeInPlace(OodleLZ_Compressor compressor) +{ + // all compressors can now decode in place : + return compressor != OodleLZ_Compressor_Invalid; +} + +OOINLINEFUNC OO_BOOL OodleLZ_Compressor_CanDecodeInCircularWindow(OodleLZ_Compressor compressor) +{ + #ifdef OODLE_ALLOW_DEPRECATED_COMPRESSORS + const OO_U32 set = + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_LZH) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_LZB16); + #else + const OO_U32 set = 0; + #endif + + return OODLELZ_COMPRESSOR_BOOLBIT(set,compressor); +} + +OOINLINEFUNC OO_BOOL OodleLZ_Compressor_UsesLargeWindow(OodleLZ_Compressor compressor) +{ + // all but LZH and LZB16 now are large window + return ! OodleLZ_Compressor_CanDecodeInCircularWindow(compressor); +} + +OOINLINEFUNC OO_BOOL OodleLZ_Compressor_MustDecodeWithoutResets(OodleLZ_Compressor compressor) +{ + #ifdef OODLE_ALLOW_DEPRECATED_COMPRESSORS + const OO_U32 set = + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_BitKnit) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_LZA) | + OODLELZ_COMPRESSOR_MASK(OodleLZ_Compressor_LZNA); + #else + const OO_U32 set = 0; + #endif + + return OODLELZ_COMPRESSOR_BOOLBIT(set,compressor); +} + +OODEFEND + +//======================================================= + + +#define OODLELZ_SCRATCH_MEM_NO_BOUND (-1) /* Scratch mem size when bound is unknown. + Installed allocator may be used no matter how much scratch mem you provide. +*/ + +OOFUNC1 OO_SINTa OOFUNC2 OodleLZ_GetCompressScratchMemBound( + OodleLZ_Compressor compressor, + OodleLZ_CompressionLevel level, + OO_SINTa rawLen, + const OodleLZ_CompressOptions * pOptions OODEFAULT(NULL) + ); +/* Return the maximum amount of scratch mem that will be needed by OodleLZ_Compress + + $:compressor which OodleLZ variant to use in compression + $:level OodleLZ_CompressionLevel controls how much CPU effort is put into maximizing compression + $:rawLen maximum number of bytes you will compress (plus dictionary backup) + $:pOptions (optional) options; if NULL, $OodleLZ_CompressOptions_GetDefault is used + + If you pass scratch mem to $OodleLZ_Compress of this size, it is gauranteed to do no allocations. + (normally if it runs out of scratch mem, it falls back to the installed allocator) + + For _rawLen_ pass at least the maximum size you will ever encode. If your data is divided into chunks, + pass the chunk size. If you will encode full buffers of unbounded size, pass -1. + + The options must be the same as when you call $OodleLZ_Compress + + Some options and levels may not have simple finite bounds. Then $OODLELZ_SCRATCH_MEM_NO_BOUND is returned + and the call to $OodleLZ_Compress may use the allocator even if infinite scratch memory is provided. + Currently this applies to all the Optimal levels. + + When OODLELZ_SCRATCH_MEM_NO_BOUND is returned, you can still pass in scratch mem which will be used before + going to the plugin allocator. + +*/ + +// get maximum expanded size for compBuf alloc : +// (note this is actually larger than the maximum compressed stream, it includes trash padding) +OOFUNC1 OO_SINTa OOFUNC2 OodleLZ_GetCompressedBufferSizeNeeded(OodleLZ_Compressor compressor,OO_SINTa rawSize); +/* Return the size you must malloc the compressed buffer + + $:compressor compressor used; OodleLZ_Compressor_Invalid to make it enough for any compressor + $:rawSize uncompressed size you will compress into this buffer + + The _compBuf_ passed to $OodleLZ_Compress must be allocated at least this big. + + note this is actually larger than the maximum size of a compressed stream, it includes overrun padding. + +*/ + +// decBuf needs to be a little larger than rawLen, +// this will tell you exactly how much : +OOFUNC1 OO_SINTa OOFUNC2 OodleLZ_GetDecodeBufferSize(OodleLZ_Compressor compressor,OO_SINTa rawSize,OO_BOOL corruptionPossible); +/* Get the size you must malloc the decode (raw) buffer + + $:compressor compressor used; OodleLZ_Compressor_Invalid to make it enough for any compressor + $:rawSize uncompressed (raw) size without padding + $:corruptionPossible true if it is possible for the decoder to get corrupted data + $:return size of buffer to malloc; slightly larger than rawSize if padding is needed + + As of Oodle 2.9.0 this function is deprecated. For all new codecs you can just use the size of the + uncompressed data for the decode buffer size (_rawSize_), no padding is needed. + + Note that LZB16 is still supported in 2.9.0 but does require padding when used in a circular + window (which is deprecated). + + This padding is necessary for the older compressors when FuzzSafe_No is used. The old compressors + and FuzzSafe_No are no longer supported. + + If _corruptionPossible_ is true, a slightly larger buffer size is returned. + + If _corruptionPossible_ is false, then you must ensure that the decoder does not get corrupted data, + either by passing $OodleLZ_CheckCRC_Yes , or by your own mechanism. + + Note about possible overrun in LZ decoding (applies to the old non-fuzz-safe compressors) : + as long as the compresseddata is not corrupted, + and you decode either the entire compressed buffer, or an integer number of "seek chunks" ($OODLELZ_BLOCK_LEN), + then there will be no overrun. So you can decode LZ data in place and it won't stomp any following bytes. + If those conditions are not true (eg. decoding only part of a larger compressed stream, decoding + around a circular window, decoding data that may be corrupted), then there may be some limited amount of + overrun on decode, as returned by $OodleLZ_GetDecodeBufferSize. + + +*/ + +// OodleLZ_GetInPlaceDecodeBufferSize : +// after compressing, ask how big the in-place buffer needs to be +OOFUNC1 OO_SINTa OOFUNC2 OodleLZ_GetInPlaceDecodeBufferSize(OodleLZ_Compressor compressor,OO_SINTa compLen, OO_SINTa rawLen); +/* Get the size of buffer needed for "in place" decode + + $:compressor compressor used; OodleLZ_Compressor_Invalid to make it enough for any compressor + $:compLen compressed data length + $:rawLen decompressed data length + $:return size of buffer needed for "in place" decode ; slighly larger than rawLen + + To do an "in place" decode, allocate a buffer of this size (or larger). Read the compressed data into the end of + the buffer, and decompress to the front of the buffer. The size returned here guarantees that the writes to the + front of the buffer don't conflict with the reads from the end. + + If _compressor_ is one of the new codecs (Kraken,Mermaid,Selkie,Leviathan), the padding for in place decodes can be + very small indeed. It is assumed you will be passing FuzzSafe_Yes to the decompress call. + + If _compLen_ is unknown, you want an in place buffer size that can accomodate any compressed data, then + pass compLen = 0. + + See $OodleLZ_Decompress for more. +*/ + +// GetCompressedStepForRawStep is at OODLELZ_QUANTUM_LEN granularity +// returns how many packed bytes to step to get the desired raw count step +OOFUNC1 OO_SINTa OOFUNC2 OodleLZ_GetCompressedStepForRawStep( + const void * compPtr, OO_SINTa compAvail, + OO_SINTa startRawPos, OO_SINTa rawSeekBytes, + OO_SINTa * pEndRawPos OODEFAULT(NULL), + OO_BOOL * pIndependent OODEFAULT(NULL) ); +/* How many bytes to step a compressed pointer to advance a certain uncompressed amount + + $:compPtr current compressed pointer + $:compAvail compressed bytes available at compPtr + $:startRawPos initial raw pos (corresponding to compPtr) + $:rawSeekBytes the desired step in raw bytes, must be a multiple of $OODLELZ_QUANTUM_LEN or $OODLELZ_BLOCK_LEN + $:pEndRawPos (optional) filled with the end raw pos actually reached + $:pIndependent (optional) filled with a bool that is true if the current chunk is independent from previous + $:return the number of compressed bytes to step + + You should try to use GetCompressedStepForRawStep only at block granularity - both _startRawPos_ and + _rawSeekBytes_ should be multiples of OODLELZ_BLOCK_LEN (except at the end of the stream). As long as you + do that, then *pEndRawPos will = startRawPos + rawSeekBytes. + + You can use it at quantum granularity (OODLELZ_QUANTUM_LEN), but there are some caveats. You cannot step + quanta inside uncompressed blocks, only in normal LZ blocks. If you try to seek quanta inside an uncompressed + block, you will get *pEndRawPos = the end of the block. + + You can only resume seeking from *pEndRawPos . + + returns 0 for valid not-enough-data case + returns -1 for error + + If _compAvail_ is not the whole compressed buffer, then the returned step may be less than the amount you requested. + eg. if the compressed data in _compAvail_ does not contain enough data to make a step of _rawSeekBytes_ a smaller + step will be taken. + NOTE : *can* return comp step > comp avail ! + + +*/ + +OOFUNC1 OodleLZ_Compressor OOFUNC2 OodleLZ_GetAllChunksCompressor(const void * compBuf,OO_SINTa compBufSize, + OO_SINTa rawLen); +/* ask who compressed all chunks in this buf chunk + + $:compBuf pointer to compressed data; must be the start of compressed buffer, or a step of $OODLELZ_BLOCK_LEN raw bytes + $:compBufSize size of _compBuf_ + $:rawLen rawlen of data in _compBuf_ + $:return the $OodleLZ_Compressor used to encode this chunk + + returns a simple compressor (for example OodleLZ_Compressor_Kraken) if that was used on all chunks + + returns OodleLZ_Compressor_Hydra if different NewLZ encoders were used (for example Kraken+Mermaid) + + returns OodleLZ_Compressor_Count if a heterogenous mix of compressors was used (not just NewLZ) + + returns OodleLZ_Compressor_Invalid on error + + note this is only for this chunk - later chunks may have different compressors (eg. with Hydra) + if you compressed all chunks the same it's up to you to store that info in your header + + returns OodleLZ_Compressor_Invalid if _compBufSize_ is too small or any chunk is corrupt +*/ + +OOFUNC1 OodleLZ_Compressor OOFUNC2 OodleLZ_GetFirstChunkCompressor(const void * compChunkPtr, + OO_SINTa compBufAvail, + OO_BOOL * pIndependent); +/* ask who compressed this chunk + + $:compChunkPtr pointer to compressed data; must be the start of compressed buffer, or a step of $OODLELZ_BLOCK_LEN raw bytes + $:compBufAvail number of bytes at _compChunkPtr_ available to read + $:pIndependent (optional) filled with a bool for whether this chunk is independent of predecessors + $:return the $OodleLZ_Compressor used to encode this chunk + + note this is only for this chunk - later chunks may have different compressors (eg. with Hydra) + if you compressed all chunks the same it's up to you to store that info in your header + + Use $OodleLZ_GetAllChunksCompressor for data that might be mixed compressors. + + This replaces the deprecated function $OodleLZ_GetChunkCompressor + + returns OodleLZ_Compressor_Invalid if _compBufAvail_ is too small or the chunk is corrupt +*/ + +OOFUNC1 OodleLZ_Compressor OOFUNC2 OodleLZ_GetChunkCompressor(const void * compChunkPtr, + OO_SINTa compBufAvail, + OO_BOOL * pIndependent); +/* Deprecated entry point for backwards compatibility + + Use $OodleLZ_GetFirstChunkCompressor or $OodleLZ_GetAllChunksCompressor + +*/ + +//======================================================= + +#define OODLE_HEADER_VERSION ((46<<24)|(OODLE2_VERSION_MAJOR<<16)|(OODLE2_VERSION_MINOR<<8)|(OO_U32)sizeof(OodleLZ_SeekTable)) /* OODLE_HEADER_VERSION is used to ensure the Oodle header matches the lib. Don't copy the value of this macro, it will change when + the header is rev'ed. + + This is what you pass to $OodleX_Init or $Oodle_CheckVersion +*/ + +OOFUNC1 OO_BOOL OOFUNC2 Oodle_CheckVersion(OO_U32 oodle_header_version, OO_U32 * pOodleLibVersion OODEFAULT(NULL)); +/* Check the Oodle lib version against the header you are compiling with + + $:oodle_header_version pass $OODLE_HEADER_VERSION here + $:pOodleLibVersion (optional) filled with the Oodle lib version + $:return false if $OODLE_HEADER_VERSION is not compatible with this lib + + If you use the Oodle2 Ext lib,, $OodleX_Init does it for you. But if you want to check that you have a + compatible lib before trying to Init, then use this. +*/ + +OOFUNC1 void OOFUNC2 Oodle_LogHeader(void); +/* Log the Oodle version & copyright + + Uses the log set with $OodleCore_Plugins_SetPrintf +*/ + +// define old names so they still compile : +#define OODLECORE_PLUGIN_JOB_MAX_DEPENDENCIES OODLE_JOB_MAX_DEPENDENCIES +#define t_fp_OodleCore_Plugin_Job t_fp_Oodle_Job + +#ifdef _MSC_VER +#pragma warning(pop) +#pragma pack(pop, Oodle) +#endif + +#endif // __OODLE2_H_INCLUDED__ diff --git a/lib/oodle/oodle2base.h b/lib/oodle/oodle2base.h new file mode 100644 index 0000000..05f73f3 --- /dev/null +++ b/lib/oodle/oodle2base.h @@ -0,0 +1,167 @@ + +//=================================================== +// Oodle2 Base header +// (C) Copyright 1994-2021 Epic Games Tools LLC +//=================================================== + +#ifndef __OODLE2BASE_H_INCLUDED__ +#define __OODLE2BASE_H_INCLUDED__ + +#ifndef OODLE2BASE_PUBLIC_HEADER +#define OODLE2BASE_PUBLIC_HEADER 1 +#endif + +#ifdef _MSC_VER +#pragma pack(push, Oodle, 8) + +#pragma warning(push) +#pragma warning(disable : 4127) // conditional is constant +#endif + +#ifndef OODLE_BASE_TYPES_H +#define OODLE_BASE_TYPES_H + +#include + +#define OOCOPYRIGHT "Copyright (C) 1994-2021, Epic Games Tools LLC" + +// Typedefs +typedef int8_t OO_S8; +typedef uint8_t OO_U8; +typedef int16_t OO_S16; +typedef uint16_t OO_U16; +typedef int32_t OO_S32; +typedef uint32_t OO_U32; +typedef int64_t OO_S64; +typedef uint64_t OO_U64; +typedef float OO_F32; +typedef double OO_F64; +typedef intptr_t OO_SINTa; +typedef uintptr_t OO_UINTa; +typedef int32_t OO_BOOL; + +// Struct packing handling and inlining +#if defined(__GNUC__) || defined(__clang__) + #define OOSTRUCT struct __attribute__((__packed__)) + #define OOINLINEFUNC inline +#elif defined(_MSC_VER) + // on VC++, we use pragmas for the struct packing + #define OOSTRUCT struct + #define OOINLINEFUNC __inline +#endif + +// Linkage stuff +#if defined(_WIN32) + #define OOLINK __stdcall + #define OOEXPLINK __stdcall +#else + #define OOLINK + #define OOEXPLINK +#endif + +// C++ name demangaling +#ifdef __cplusplus + #define OODEFFUNC extern "C" + #define OODEFSTART extern "C" { + #define OODEFEND } + #define OODEFAULT( val ) =val +#else + #define OODEFFUNC + #define OODEFSTART + #define OODEFEND + #define OODEFAULT( val ) +#endif + +// ======================================================== +// Exported function declarations +#define OOEXPFUNC OODEFFUNC + +//=========================================================================== +// OO_STRING_JOIN joins strings in the preprocessor and works with LINESTRING +#define OO_STRING_JOIN(arg1, arg2) OO_STRING_JOIN_DELAY(arg1, arg2) +#define OO_STRING_JOIN_DELAY(arg1, arg2) OO_STRING_JOIN_IMMEDIATE(arg1, arg2) +#define OO_STRING_JOIN_IMMEDIATE(arg1, arg2) arg1 ## arg2 + +//=========================================================================== +// OO_NUMBERNAME is a macro to make a name unique, so that you can use it to declare +// variable names and they won't conflict with each other +// using __LINE__ is broken in MSVC with /ZI , but __COUNTER__ is an MSVC extension that works + +#ifdef _MSC_VER + #define OO_NUMBERNAME(name) OO_STRING_JOIN(name,__COUNTER__) +#else + #define OO_NUMBERNAME(name) OO_STRING_JOIN(name,__LINE__) +#endif + +//=================================================================== +// simple compiler assert +// this happens at declaration time, so if it's inside a function in a C file, drop {} around it +#ifndef OO_COMPILER_ASSERT + #if defined(__clang__) + #define OO_COMPILER_ASSERT_UNUSED __attribute__((unused)) // hides warnings when compiler_asserts are in a local scope + #else + #define OO_COMPILER_ASSERT_UNUSED + #endif + + #define OO_COMPILER_ASSERT(exp) typedef char OO_NUMBERNAME(_dummy_array) [ (exp) ? 1 : -1 ] OO_COMPILER_ASSERT_UNUSED +#endif + + +#endif + + + +// Oodle2 base header + +#ifndef OODLE2_PUBLIC_CORE_DEFINES +#define OODLE2_PUBLIC_CORE_DEFINES 1 + +#define OOFUNC1 OOEXPFUNC +#define OOFUNC2 OOEXPLINK +#define OOFUNCSTART +#define OODLE_CALLBACK OOLINK + +// Check build flags + #if defined(OODLE_BUILDING_LIB) || defined(OODLE_BUILDING_DLL) + #error Should not see OODLE_BUILDING set for users of oodle.h + #endif + +#ifndef NULL +#define NULL (0) +#endif + +// OODLE_MALLOC_MINIMUM_ALIGNMENT is 8 in 32-bit, 16 in 64-bit +#define OODLE_MALLOC_MINIMUM_ALIGNMENT ((OO_SINTa)(2*sizeof(void *))) + +typedef void (OODLE_CALLBACK t_OodleFPVoidVoid)(void); +/* void-void callback func pointer + takes void, returns void +*/ + +typedef void (OODLE_CALLBACK t_OodleFPVoidVoidStar)(void *); +/* void-void-star callback func pointer + takes void pointer, returns void +*/ + +#define OODLE_JOB_MAX_DEPENDENCIES (4) /* Maximum number of dependencies Oodle will ever pass to a RunJob callback +*/ + +#define OODLE_JOB_NULL_HANDLE (0) /* Value 0 of Jobify handles is reserved to mean none +* Wait(OODLE_JOB_NULL_HANDLE) is a nop +* if RunJob returns OODLE_JOB_NULL_HANDLE it means the job +* was run synchronously and no wait is required +*/ + +#define t_fp_Oodle_Job t_OodleFPVoidVoidStar /* Job function pointer for Plugin Jobify system + + takes void pointer returns void +*/ + +#endif // OODLE2_PUBLIC_CORE_DEFINES + +#ifdef _MSC_VER +#pragma warning(pop) +#pragma pack(pop, Oodle) +#endif + +#endif // __OODLE2BASE_H_INCLUDED__ diff --git a/lib/oodle/src/lib.rs b/lib/oodle/src/lib.rs new file mode 100644 index 0000000..76b1d16 --- /dev/null +++ b/lib/oodle/src/lib.rs @@ -0,0 +1,145 @@ +#![allow(non_upper_case_globals)] +#![allow(non_camel_case_types)] +#![allow(non_snake_case)] + +use std::ptr; + +use color_eyre::{eyre, Result}; + +#[allow(dead_code)] +mod bindings { + include!(concat!(env!("OUT_DIR"), "/bindings.rs")); +} + +// Hardcoded chunk size of Bitsquid's bundle compression +pub const CHUNK_SIZE: usize = 512 * 1024; +pub const COMPRESSOR: bindings::OodleLZ_Compressor = + bindings::OodleLZ_Compressor_OodleLZ_Compressor_Kraken; +pub const LEVEL: bindings::OodleLZ_CompressionLevel = + bindings::OodleLZ_CompressionLevel_OodleLZ_CompressionLevel_Optimal2; + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum OodleLZ_FuzzSafe { + Yes, + No, +} + +impl From for bindings::OodleLZ_FuzzSafe { + fn from(value: OodleLZ_FuzzSafe) -> Self { + match value { + OodleLZ_FuzzSafe::Yes => bindings::OodleLZ_FuzzSafe_OodleLZ_FuzzSafe_Yes, + OodleLZ_FuzzSafe::No => bindings::OodleLZ_FuzzSafe_OodleLZ_FuzzSafe_No, + } + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum OodleLZ_CheckCRC { + Yes, + No, +} + +impl From for bindings::OodleLZ_CheckCRC { + fn from(value: OodleLZ_CheckCRC) -> Self { + match value { + OodleLZ_CheckCRC::Yes => bindings::OodleLZ_CheckCRC_OodleLZ_CheckCRC_Yes, + OodleLZ_CheckCRC::No => bindings::OodleLZ_CheckCRC_OodleLZ_CheckCRC_No, + } + } +} + +#[tracing::instrument(skip(data))] +pub fn decompress( + data: I, + fuzz_safe: OodleLZ_FuzzSafe, + check_crc: OodleLZ_CheckCRC, +) -> Result> +where + I: AsRef<[u8]>, +{ + let data = data.as_ref(); + let mut out = vec![0; CHUNK_SIZE]; + + let verbosity = if tracing::enabled!(tracing::Level::INFO) { + bindings::OodleLZ_Verbosity_OodleLZ_Verbosity_Minimal + } else if tracing::enabled!(tracing::Level::DEBUG) { + bindings::OodleLZ_Verbosity_OodleLZ_Verbosity_Some + } else if tracing::enabled!(tracing::Level::TRACE) { + bindings::OodleLZ_Verbosity_OodleLZ_Verbosity_Lots + } else { + bindings::OodleLZ_Verbosity_OodleLZ_Verbosity_None + }; + + let ret = unsafe { + bindings::OodleLZ_Decompress( + data.as_ptr() as *const _, + data.len() as isize, + out.as_mut_ptr() as *mut _, + out.len() as isize, + fuzz_safe.into(), + check_crc.into(), + verbosity, + ptr::null_mut(), + 0, + None, + ptr::null_mut(), + ptr::null_mut(), + 0, + bindings::OodleLZ_Decode_ThreadPhase_OodleLZ_Decode_Unthreaded, + ) + }; + + if ret == 0 { + eyre::bail!("Decompression failed"); + } + + Ok(out) +} + +#[tracing::instrument(skip(data))] +pub fn compress(data: I) -> Result> +where + I: AsRef<[u8]>, +{ + let mut raw = Vec::from(data.as_ref()); + raw.resize(CHUNK_SIZE, 0); + + // TODO: Query oodle for buffer size + let mut out = vec![0u8; CHUNK_SIZE]; + + let ret = unsafe { + bindings::OodleLZ_Compress( + COMPRESSOR, + raw.as_ptr() as *const _, + raw.len() as isize, + out.as_mut_ptr() as *mut _, + LEVEL, + ptr::null_mut(), + ptr::null_mut(), + ptr::null_mut(), + ptr::null_mut(), + 0, + ) + }; + + tracing::debug!(compressed_size = ret, "Compressed chunk"); + + if ret == 0 { + eyre::bail!("Compression failed"); + } + + out.resize(ret as usize, 0); + + Ok(out) +} + +pub fn get_decode_buffer_size(raw_size: usize, corruption_possible: bool) -> Result { + let size = unsafe { + bindings::OodleLZ_GetDecodeBufferSize( + COMPRESSOR, + raw_size as isize, + if corruption_possible { 1 } else { 0 }, + ) + }; + Ok(size as usize) +} diff --git a/lib/sdk/Cargo.toml b/lib/sdk/Cargo.toml index 7a1857b..46d02db 100644 --- a/lib/sdk/Cargo.toml +++ b/lib/sdk/Cargo.toml @@ -4,6 +4,7 @@ version = "0.2.0" edition = "2021" [dependencies] +bitflags = "1.3.2" byteorder = "1.4.3" color-eyre = "0.6.2" csv-async = { version = "1.2.4", features = ["tokio", "serde"] } @@ -16,9 +17,10 @@ nanorand = "0.7.0" pin-project-lite = "0.2.9" serde = { version = "1.0.147", features = ["derive"] } serde_sjson = { path = "../../lib/serde_sjson", version = "*" } -oodle-sys = { path = "../../lib/oodle-sys", version = "*" } +oodle = { path = "../../lib/oodle", version = "*" } tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] } tokio-stream = { version = "0.1.11", features = ["fs", "io-util"] } tracing = { version = "0.1.37", features = ["async-await"] } tracing-error = "0.2.0" +luajit2-sys = "0.0.2" async-recursion = "1.0.2" diff --git a/lib/sdk/src/binary.rs b/lib/sdk/src/binary.rs index 4782440..9ce3f11 100644 --- a/lib/sdk/src/binary.rs +++ b/lib/sdk/src/binary.rs @@ -1,3 +1,47 @@ +use std::io::{Cursor, Read, Seek, Write}; + +use color_eyre::Result; + +use self::sync::{ReadExt, WriteExt}; + +pub trait FromBinary: Sized { + fn from_binary(r: &mut R) -> Result; +} + +pub trait ToBinary { + fn to_binary(&self) -> Result>; +} + +impl ToBinary for Vec { + fn to_binary(&self) -> Result> { + // TODO: Allocations for the vector could be optimized by first + // serializing one value, then calculating the size from that. + let mut bin = Cursor::new(Vec::new()); + bin.write_u32(self.len() as u32)?; + + for val in self.iter() { + let buf = val.to_binary()?; + bin.write_all(&buf)?; + } + + Ok(bin.into_inner()) + } +} + +impl FromBinary for Vec { + fn from_binary(r: &mut R) -> Result { + let size = r.read_u32()? as usize; + + let mut list = Vec::with_capacity(size); + + for _ in 0..size { + list.push(T::from_binary(r)?); + } + + Ok(list) + } +} + pub mod sync { use std::io::{self, Read, Seek, SeekFrom}; diff --git a/lib/sdk/src/bundle/database.rs b/lib/sdk/src/bundle/database.rs new file mode 100644 index 0000000..6152ede --- /dev/null +++ b/lib/sdk/src/bundle/database.rs @@ -0,0 +1,252 @@ +use std::collections::HashMap; +use std::io::Cursor; +use std::io::Read; +use std::io::Seek; +use std::io::Write; + +use color_eyre::eyre; +use color_eyre::Result; + +use crate::binary::sync::*; +use crate::binary::FromBinary; +use crate::binary::ToBinary; +use crate::murmur::Murmur64; +use crate::Bundle; + +use super::file::BundleFileType; + +const DATABASE_VERSION: u32 = 0x6; +const FILE_VERSION: u32 = 0x4; + +pub struct BundleFile { + name: String, + stream: String, + platform_specific: bool, + file_time: u64, +} + +pub struct FileName { + extension: BundleFileType, + name: Murmur64, +} + +pub struct BundleDatabase { + stored_files: HashMap>, + resource_hashes: HashMap, + bundle_contents: HashMap>, +} + +impl BundleDatabase { + pub fn add_bundle(&mut self, bundle: &Bundle) { + let hash = bundle.name().to_murmur64(); + let name = hash.to_string(); + let stream = format!("{}.stream", &name); + + tracing::trace!( + "Adding bundle '{} ({:?} | {:016X})' to database. Hash exists: {}", + bundle.name().display(), + bundle.name(), + hash, + self.stored_files.contains_key(&hash) + ); + + { + let entry = self.stored_files.entry(hash).or_default(); + let existing = entry.iter().position(|f| f.name == name); + + let file = BundleFile { + name, + stream, + file_time: 0, + platform_specific: false, + }; + + entry.push(file); + + if let Some(pos) = existing { + tracing::debug!("Found bundle '{}' at {}. Replacing.", hash.to_string(), pos); + entry.swap_remove(pos); + } + } + + for f in bundle.files() { + let file_name = FileName { + extension: f.file_type(), + name: f.base_name().to_murmur64(), + }; + + // TODO: Compute actual resource hash + self.resource_hashes.insert(hash, 0); + + self.bundle_contents + .entry(hash) + .or_default() + .push(file_name); + } + } +} + +impl FromBinary for BundleDatabase { + #[tracing::instrument(name = "BundleDatabase::from_binary", skip_all)] + fn from_binary(r: &mut R) -> Result { + { + let format = r.read_u32()?; + eyre::ensure!( + format == DATABASE_VERSION, + "invalid file format, expected {:#X}, got {:#X}", + DATABASE_VERSION, + format + ); + } + + let num_entries = r.read_u32()? as usize; + let mut stored_files = HashMap::with_capacity(num_entries); + + for _ in 0..num_entries { + let hash = Murmur64::from(r.read_u64()?); + + let num_files = r.read_u32()? as usize; + let mut files = Vec::with_capacity(num_files); + + for _ in 0..num_files { + { + let version = r.read_u32()?; + eyre::ensure!( + version == FILE_VERSION, + "invalid file version, expected {:#X}, got {:#X}", + FILE_VERSION, + version + ); + } + + let len_name = r.read_u32()? as usize; + let mut buf = vec![0; len_name]; + r.read_exact(&mut buf)?; + + let name = String::from_utf8(buf)?; + + let len_stream = r.read_u32()? as usize; + let mut buf = vec![0; len_stream]; + r.read_exact(&mut buf)?; + + let stream = String::from_utf8(buf)?; + + let platform_specific = r.read_u8()? != 0; + + // TODO: Unknown what this is. In VT2's SDK, it's simply ignored, + // and always written as `0`, but in DT, it seems to be used. + let mut buffer = [0; 20]; + r.read_exact(&mut buffer)?; + + if cfg!(debug_assertions) && buffer.iter().any(|b| *b != 0) { + tracing::warn!("Unknown value in 20-byte buffer: {:?}", buffer); + } + + let file_time = r.read_u64()?; + + let file = BundleFile { + name, + stream, + platform_specific, + file_time, + }; + + files.push(file); + } + + stored_files.insert(hash, files); + } + + let num_hashes = r.read_u32()? as usize; + let mut resource_hashes = HashMap::with_capacity(num_hashes); + + for _ in 0..num_hashes { + let name = Murmur64::from(r.read_u64()?); + let hash = r.read_u64()?; + + resource_hashes.insert(name, hash); + } + + let num_contents = r.read_u32()? as usize; + let mut bundle_contents = HashMap::with_capacity(num_contents); + + for _ in 0..num_contents { + let hash = Murmur64::from(r.read_u64()?); + + let num_files = r.read_u32()? as usize; + let mut files = Vec::with_capacity(num_files); + + for _ in 0..num_files { + let extension = BundleFileType::from(r.read_u64()?); + let name = Murmur64::from(r.read_u64()?); + + files.push(FileName { extension, name }); + } + + bundle_contents.insert(hash, files); + } + + Ok(Self { + stored_files, + resource_hashes, + bundle_contents, + }) + } +} + +impl ToBinary for BundleDatabase { + #[tracing::instrument(name = "BundleDatabase::to_binary", skip_all)] + fn to_binary(&self) -> Result> { + let mut binary = Vec::new(); + + { + let mut w = Cursor::new(&mut binary); + + w.write_u32(DATABASE_VERSION)?; + + w.write_u32(self.stored_files.len() as u32)?; + + for (hash, files) in self.stored_files.iter() { + w.write_u64((*hash).into())?; + w.write_u32(files.len() as u32)?; + + for f in files.iter() { + w.write_u32(FILE_VERSION)?; + w.write_u32(f.name.len() as u32)?; + w.write_all(f.name.as_bytes())?; + w.write_u32(f.stream.len() as u32)?; + w.write_all(f.stream.as_bytes())?; + + w.write_u8(if f.platform_specific { 1 } else { 0 })?; + + // TODO: Don't know what goes here + let buffer = [0; 20]; + w.write_all(&buffer)?; + + w.write_u64(f.file_time)?; + } + } + + w.write_u32(self.resource_hashes.len() as u32)?; + + for (name, hash) in self.resource_hashes.iter() { + w.write_u64((*name).into())?; + w.write_u64(*hash)?; + } + + w.write_u32(self.bundle_contents.len() as u32)?; + + for (hash, contents) in self.bundle_contents.iter() { + w.write_u64((*hash).into())?; + w.write_u32(contents.len() as u32)?; + + for FileName { extension, name } in contents.iter() { + w.write_u64((*extension).into())?; + w.write_u64((*name).into())?; + } + } + } + + Ok(binary) + } +} diff --git a/lib/sdk/src/bundle/file.rs b/lib/sdk/src/bundle/file.rs index d5f0c58..872b48b 100644 --- a/lib/sdk/src/bundle/file.rs +++ b/lib/sdk/src/bundle/file.rs @@ -1,6 +1,8 @@ +use std::ffi::CString; use std::io::{Cursor, Read, Seek, Write}; use std::path::Path; +use bitflags::bitflags; use color_eyre::eyre::Context; use color_eyre::{eyre, Result}; use futures::future::join_all; @@ -8,9 +10,7 @@ use serde::Serialize; use crate::binary::sync::*; use crate::filetype::*; -use crate::murmur::{HashGroup, Murmur64}; - -use super::EntryHeader; +use crate::murmur::{HashGroup, IdString64, Murmur64}; #[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)] pub enum BundleFileType { @@ -397,7 +397,8 @@ impl From for u64 { } impl From for Murmur64 { fn from(t: BundleFileType) -> Murmur64 { - t.into() + let hash: u64 = t.into(); + Murmur64::from(hash) } } @@ -410,6 +411,7 @@ impl std::fmt::Display for BundleFileType { #[derive(Debug)] struct BundleFileHeader { variant: u32, + unknown_1: u8, size: usize, len_data_file_name: usize, } @@ -418,6 +420,8 @@ pub struct BundleFileVariant { property: u32, data: Vec, data_file_name: Option, + // Seems to be related to whether there is a data path. + unknown_1: u8, } impl BundleFileVariant { @@ -430,6 +434,7 @@ impl BundleFileVariant { property: 0, data: Vec::new(), data_file_name: None, + unknown_1: 0, } } @@ -459,47 +464,64 @@ impl BundleFileVariant { R: Read + Seek, { let variant = r.read_u32()?; - r.skip_u8(0)?; + let unknown_1 = r.read_u8()?; let size = r.read_u32()? as usize; r.skip_u8(1)?; let len_data_file_name = r.read_u32()? as usize; Ok(BundleFileHeader { size, + unknown_1, variant, len_data_file_name, }) } #[tracing::instrument(skip_all)] - fn write_header(&self, w: &mut W) -> Result<()> + fn write_header(&self, w: &mut W, props: Properties) -> Result<()> where W: Write + Seek, { w.write_u32(self.property)?; - w.write_u8(0)?; - w.write_u32(self.data.len() as u32)?; - w.write_u8(1)?; + w.write_u8(self.unknown_1)?; let len_data_file_name = self.data_file_name.as_ref().map(|s| s.len()).unwrap_or(0); - w.write_u32(len_data_file_name as u32)?; + + if props.contains(Properties::DATA) { + w.write_u32(len_data_file_name as u32)?; + w.write_u8(1)?; + w.write_u32(0)?; + } else { + w.write_u32(self.data.len() as u32)?; + w.write_u8(1)?; + w.write_u32(len_data_file_name as u32)?; + } Ok(()) } } +bitflags! { + #[derive(Default)] + pub struct Properties: u32 { + const DATA = 0b100; + } +} + pub struct BundleFile { file_type: BundleFileType, - name: String, + name: IdString64, variants: Vec, + props: Properties, } impl BundleFile { pub fn new(name: String, file_type: BundleFileType) -> Self { Self { file_type, - name, + name: name.into(), variants: Vec::new(), + props: Properties::empty(), } } @@ -507,12 +529,8 @@ impl BundleFile { self.variants.push(variant) } - #[tracing::instrument( - name = "File::read", - skip_all, - fields(name = %meta.name_hash, ext = %meta.extension_hash, flags = meta.flags) - )] - pub fn from_reader(ctx: &crate::Context, r: &mut R, meta: &EntryHeader) -> Result + #[tracing::instrument(name = "File::read", skip(ctx, r))] + pub fn from_reader(ctx: &crate::Context, r: &mut R, props: Properties) -> Result where R: Read + Seek, { @@ -521,36 +539,64 @@ impl BundleFile { let name = ctx.lookup_hash(hash, HashGroup::Filename); let header_count = r.read_u32()? as usize; + tracing::trace!(header_count); let mut headers = Vec::with_capacity(header_count); r.skip_u32(0)?; - for _ in 0..header_count { - let header = BundleFileVariant::read_header(r)?; + for i in 0..header_count { + let span = tracing::debug_span!("Read file header", i); + let _enter = span.enter(); + + let header = BundleFileVariant::read_header(r) + .wrap_err_with(|| format!("failed to read header {i}"))?; + + // TODO: Figure out how `header.unknown_1` correlates to `properties::DATA` + // if props.contains(Properties::DATA) { + // tracing::debug!("props: {props:?} | unknown_1: {}", header.unknown_1) + // } + headers.push(header); } let mut variants = Vec::with_capacity(header_count); for (i, header) in headers.into_iter().enumerate() { - let span = tracing::info_span!("Read file header {}", i, size = header.size); + let span = tracing::debug_span!( + "Read file data {}", + i, + size = header.size, + len_data_file_name = header.len_data_file_name + ); let _enter = span.enter(); - let mut data = vec![0; header.size]; - r.read_exact(&mut data) - .wrap_err_with(|| format!("failed to read header {i}"))?; - - let data_file_name = if header.len_data_file_name > 0 { + let (data, data_file_name) = if props.contains(Properties::DATA) { + let data = vec![]; let s = r - .read_string_len(header.len_data_file_name) + .read_string_len(header.size) .wrap_err("failed to read data file name")?; - Some(s) + + (data, Some(s)) } else { - None + let mut data = vec![0; header.size]; + r.read_exact(&mut data) + .wrap_err_with(|| format!("failed to read file {i}"))?; + + let data_file_name = if header.len_data_file_name > 0 { + let s = r + .read_string_len(header.len_data_file_name) + .wrap_err("failed to read data file name")?; + Some(s) + } else { + None + }; + + (data, data_file_name) }; let variant = BundleFileVariant { property: header.variant, data, data_file_name, + unknown_1: header.unknown_1, }; variants.push(variant); @@ -560,6 +606,7 @@ impl BundleFile { variants, file_type, name, + props, }) } @@ -568,7 +615,7 @@ impl BundleFile { let mut w = Cursor::new(Vec::new()); w.write_u64(self.file_type.hash().into())?; - w.write_u64(Murmur64::hash(self.name.as_bytes()).into())?; + w.write_u64(self.name.to_murmur64().into())?; w.write_u32(self.variants.len() as u32)?; // TODO: Figure out what this is @@ -576,16 +623,26 @@ impl BundleFile { for variant in self.variants.iter() { w.write_u32(variant.property())?; - w.write_u8(0)?; - w.write_u32(variant.size() as u32)?; - w.write_u8(1)?; + w.write_u8(variant.unknown_1)?; let len_data_file_name = variant.data_file_name().map(|s| s.len()).unwrap_or(0); - w.write_u32(len_data_file_name as u32)?; + + if self.props.contains(Properties::DATA) { + w.write_u32(len_data_file_name as u32)?; + w.write_u8(1)?; + w.write_u32(0)?; + } else { + w.write_u32(variant.size() as u32)?; + w.write_u8(1)?; + w.write_u32(len_data_file_name as u32)?; + } } for variant in self.variants.iter() { w.write_all(&variant.data)?; + if let Some(s) = &variant.data_file_name { + w.write_all(s.as_bytes())?; + } } Ok(w.into_inner()) @@ -603,7 +660,11 @@ impl BundleFile { S: AsRef, { match file_type { - BundleFileType::Lua => lua::compile(name, sjson).await, + BundleFileType::Lua => { + let sjson = + CString::new(sjson.as_ref()).wrap_err("failed to build CString from SJSON")?; + lua::compile(name, sjson) + } BundleFileType::Unknown(_) => { eyre::bail!("Unknown file type. Cannot compile from SJSON"); } @@ -616,12 +677,16 @@ impl BundleFile { } } - pub fn base_name(&self) -> &String { + pub fn props(&self) -> Properties { + self.props + } + + pub fn base_name(&self) -> &IdString64 { &self.name } pub fn name(&self, decompiled: bool, variant: Option) -> String { - let mut s = self.name.clone(); + let mut s = self.name.display().to_string(); s.push('.'); if let Some(variant) = variant { @@ -640,10 +705,18 @@ impl BundleFile { pub fn matches_name(&self, name: S) -> bool where - S: AsRef, + S: Into, { - let name = name.as_ref(); - self.name == name || self.name(false, None) == name || self.name(true, None) == name + let name = name.into(); + if self.name == name { + return true; + } + + if let IdString64::String(name) = name { + self.name(false, None) == name || self.name(true, None) == name + } else { + false + } } pub fn file_type(&self) -> BundleFileType { @@ -727,6 +800,12 @@ impl BundleFile { } } +impl PartialEq for BundleFile { + fn eq(&self, other: &Self) -> bool { + self.name == other.name && self.file_type == other.file_type + } +} + pub struct UserFile { // TODO: Might be able to avoid some allocations with a Cow here data: Vec, diff --git a/lib/sdk/src/bundle/mod.rs b/lib/sdk/src/bundle/mod.rs index 000df1c..1c08530 100644 --- a/lib/sdk/src/bundle/mod.rs +++ b/lib/sdk/src/bundle/mod.rs @@ -1,16 +1,19 @@ use std::io::{BufReader, Cursor, Read, Seek, SeekFrom, Write}; +use std::mem::size_of; use std::path::Path; use color_eyre::eyre::{self, Context, Result}; use color_eyre::{Help, Report, SectionExt}; -use oodle_sys::{OodleLZ_CheckCRC, OodleLZ_FuzzSafe, CHUNK_SIZE}; +use oodle::{OodleLZ_CheckCRC, OodleLZ_FuzzSafe, CHUNK_SIZE}; use crate::binary::sync::*; -use crate::murmur::{HashGroup, Murmur64}; +use crate::bundle::file::Properties; +use crate::murmur::{HashGroup, IdString64, Murmur64}; +pub(crate) mod database; pub(crate) mod file; -pub use file::{BundleFile, BundleFileType}; +pub use file::{BundleFile, BundleFileType, BundleFileVariant}; #[derive(Clone, Copy, Debug, PartialEq, PartialOrd)] enum BundleFormat { @@ -39,72 +42,24 @@ impl From for u32 { } } -pub struct EntryHeader { - name_hash: Murmur64, - extension_hash: Murmur64, - flags: u32, -} - -impl EntryHeader { - #[tracing::instrument(name = "EntryHeader::from_reader", skip_all)] - fn from_reader(r: &mut R) -> Result - where - R: Read + Seek, - { - let extension_hash = Murmur64::from(r.read_u64()?); - let name_hash = Murmur64::from(r.read_u64()?); - let flags = r.read_u32()?; - - // NOTE: Known values so far: - // - 0x0: seems to be the default - // - 0x4: seems to be used for files that point to something in `data/` - // seems to correspond to a change in value in the header's 'unknown_3' - if flags != 0x0 { - tracing::debug!( - flags, - "Unexpected meta flags for file {name_hash:016X}.{extension_hash:016X}", - ); - } - - Ok(Self { - name_hash, - extension_hash, - flags, - }) - } - - #[tracing::instrument(name = "EntryHeader::to_writer", skip_all)] - fn to_writer(&self, w: &mut W) -> Result<()> - where - W: Write + Seek, - { - w.write_u64(self.extension_hash.into())?; - w.write_u64(self.name_hash.into())?; - w.write_u32(self.flags)?; - Ok(()) - } -} - pub struct Bundle { format: BundleFormat, properties: [Murmur64; 32], - headers: Vec, files: Vec, - name: String, + name: IdString64, } impl Bundle { - pub fn new(name: String) -> Self { + pub fn new>(name: S) -> Self { Self { - name, + name: name.into(), format: BundleFormat::F8, properties: [0.into(); 32], - headers: Vec::new(), files: Vec::new(), } } - pub fn get_name_from_path

(ctx: &crate::Context, path: P) -> String + pub fn get_name_from_path

(ctx: &crate::Context, path: P) -> IdString64 where P: AsRef, { @@ -113,28 +68,31 @@ impl Bundle { .and_then(|name| name.to_str()) .and_then(|name| Murmur64::try_from(name).ok()) .map(|hash| ctx.lookup_hash(hash, HashGroup::Filename)) - .unwrap_or_else(|| path.display().to_string()) + .unwrap_or_else(|| path.display().to_string().into()) } pub fn add_file(&mut self, file: BundleFile) { tracing::trace!("Adding file {}", file.name(false, None)); - let header = EntryHeader { - extension_hash: file.file_type().into(), - name_hash: Murmur64::hash(file.base_name().as_bytes()), - // TODO: Hard coded until we know what this is - flags: 0x0, - }; + let existing_index = self + .files + .iter() + .enumerate() + .find(|(_, f)| **f == file) + .map(|val| val.0); self.files.push(file); - self.headers.push(header); + + if let Some(i) = existing_index { + self.files.swap_remove(i); + } } #[tracing::instrument(skip(ctx, binary), fields(len_binary = binary.as_ref().len()))] - pub fn from_binary(ctx: &crate::Context, name: String, binary: B) -> Result + pub fn from_binary(ctx: &crate::Context, name: S, binary: B) -> Result where B: AsRef<[u8]>, + S: Into + std::fmt::Debug, { - let bundle_name = name; let mut r = BufReader::new(Cursor::new(binary)); let format = r.read_u32().and_then(BundleFormat::try_from)?; @@ -153,9 +111,13 @@ impl Bundle { *prop = Murmur64::from(r.read_u64()?); } - let mut headers = Vec::with_capacity(num_entries); + let mut file_props = Vec::with_capacity(num_entries); for _ in 0..num_entries { - headers.push(EntryHeader::from_reader(&mut r)?); + // Skip two u64 that contain the extension hash and file name hash. + // We don't need them here, since we're reading the whole bundle into memory + // anyways. + r.seek(SeekFrom::Current((2 * size_of::()) as i64))?; + file_props.push(Properties::from_bits_truncate(r.read_u32()?)); } let num_chunks = r.read_u32()? as usize; @@ -197,7 +159,7 @@ impl Bundle { decompressed.append(&mut compressed_buffer); } else { // TODO: Optimize to not reallocate? - let mut raw_buffer = oodle_sys::decompress( + let mut raw_buffer = oodle::decompress( &compressed_buffer, OodleLZ_FuzzSafe::No, OodleLZ_CheckCRC::No, @@ -210,8 +172,6 @@ impl Bundle { unpacked_size_tracked -= CHUNK_SIZE; } - tracing::trace!(raw_size = raw_buffer.len()); - decompressed.append(&mut raw_buffer); } } @@ -226,17 +186,19 @@ impl Bundle { let mut r = Cursor::new(decompressed); let mut files = Vec::with_capacity(num_entries); - for i in 0..num_entries { - let meta = headers.get(i).unwrap(); - let file = BundleFile::from_reader(ctx, &mut r, meta) + tracing::trace!(num_files = num_entries); + for (i, props) in file_props.iter().enumerate() { + let span = tracing::debug_span!("Read file {}", i); + let _enter = span.enter(); + + let file = BundleFile::from_reader(ctx, &mut r, *props) .wrap_err_with(|| format!("failed to read file {i}"))?; files.push(file); } Ok(Self { - name: bundle_name, + name: name.into(), format, - headers, files, properties, }) @@ -254,8 +216,10 @@ impl Bundle { w.write_u64((*prop).into())?; } - for meta in self.headers.iter() { - meta.to_writer(&mut w)?; + for file in self.files.iter() { + w.write_u64(file.file_type().into())?; + w.write_u64(file.base_name().to_murmur64().into())?; + w.write_u32(file.props().bits())?; } let unpacked_data = { @@ -293,7 +257,7 @@ impl Bundle { let mut chunk_sizes = Vec::with_capacity(num_chunks); for chunk in chunks { - let compressed = oodle_sys::compress(chunk)?; + let compressed = oodle::compress(chunk)?; tracing::trace!( raw_chunk_size = chunk.len(), compressed_chunk_size = compressed.len() @@ -313,7 +277,7 @@ impl Bundle { Ok(w.into_inner()) } - pub fn name(&self) -> &String { + pub fn name(&self) -> &IdString64 { &self.name } @@ -395,7 +359,7 @@ where r.read_exact(&mut compressed_buffer)?; // TODO: Optimize to not reallocate? - let mut raw_buffer = oodle_sys::decompress( + let mut raw_buffer = oodle::decompress( &compressed_buffer, OodleLZ_FuzzSafe::No, OodleLZ_CheckCRC::No, diff --git a/lib/sdk/src/context.rs b/lib/sdk/src/context.rs index 0116c4a..b0de6dc 100644 --- a/lib/sdk/src/context.rs +++ b/lib/sdk/src/context.rs @@ -1,6 +1,6 @@ use std::path::PathBuf; -use crate::murmur::{Dictionary, HashGroup, Murmur32, Murmur64}; +use crate::murmur::{Dictionary, HashGroup, IdString64, Murmur32, Murmur64}; pub struct Context { pub lookup: Dictionary, @@ -21,17 +21,17 @@ impl Context { } } - pub fn lookup_hash(&self, hash: M, group: HashGroup) -> String + pub fn lookup_hash(&self, hash: M, group: HashGroup) -> IdString64 where M: Into, { let hash = hash.into(); if let Some(s) = self.lookup.lookup(hash, group) { tracing::debug!(%hash, string = s, "Murmur64 lookup successful"); - s.to_owned() + s.to_string().into() } else { tracing::debug!(%hash, "Murmur64 lookup failed"); - format!("{hash:016X}") + hash.into() } } diff --git a/lib/sdk/src/filetype/lua.rs b/lib/sdk/src/filetype/lua.rs index 87ac629..68b95e3 100644 --- a/lib/sdk/src/filetype/lua.rs +++ b/lib/sdk/src/filetype/lua.rs @@ -1,13 +1,16 @@ -use std::io::{Cursor, Write}; +use std::ffi::CStr; +use std::ffi::CString; +use std::io::Cursor; +use std::io::Write; -use color_eyre::{eyre::Context, Result}; -use tokio::{fs, process::Command}; +use color_eyre::eyre; +use color_eyre::eyre::Context; +use color_eyre::Result; +use luajit2_sys as lua; -use crate::{ - binary::sync::WriteExt, - bundle::file::{BundleFileVariant, UserFile}, - BundleFile, BundleFileType, -}; +use crate::binary::sync::WriteExt; +use crate::bundle::file::{BundleFileVariant, UserFile}; +use crate::{BundleFile, BundleFileType}; #[tracing::instrument(skip_all, fields(buf_len = data.as_ref().len()))] pub(crate) async fn decompile(_ctx: &crate::Context, data: T) -> Result> @@ -19,67 +22,85 @@ where } #[tracing::instrument(skip_all)] -pub(crate) async fn compile(name: String, code: S) -> Result +pub fn compile(name: S, code: C) -> Result where - S: AsRef, + S: Into, + C: AsRef, { - let in_file_path = { - let mut path = std::env::temp_dir(); - let name: String = std::iter::repeat_with(fastrand::alphanumeric) - .take(10) - .collect(); - path.push(name + "-dtmt.lua"); + let name = name.into(); + let code = code.as_ref(); - path - }; - - let out_file_path = { - let mut path = std::env::temp_dir(); - - let name: String = std::iter::repeat_with(fastrand::alphanumeric) - .take(10) - .collect(); - path.push(name + "-dtmt.luab"); - - path - }; - - fs::write(&in_file_path, code.as_ref().as_bytes()) - .await - .wrap_err_with(|| format!("failed to write file {}", in_file_path.display()))?; - - // TODO: Make executable name configurable - Command::new("luajit") - .arg("-bg") - .arg("-F") - .arg(name.clone() + ".lua") - .arg("-o") - .arg("Windows") - .arg(&in_file_path) - .arg(&out_file_path) - .status() - .await - .wrap_err("failed to compile to LuaJIT byte code")?; - - let mut data = Cursor::new(Vec::new()); - - let bytecode = { - let mut data = fs::read(&out_file_path) - .await - .wrap_err_with(|| format!("failed to read file {}", out_file_path.display()))?; - - // Add Fatshark's custom magic bytes - data[1] = 0x46; - data[2] = 0x53; - data[3] = 0x82; - - data + let bytecode = unsafe { + let state = lua::luaL_newstate(); + lua::luaL_openlibs(state); + + lua::lua_pushstring(state, code.as_ptr() as _); + lua::lua_setglobal(state, b"code\0".as_ptr() as _); + + let name = CString::new(name.as_bytes()) + .wrap_err_with(|| format!("cannot convert name into CString: {}", name))?; + lua::lua_pushstring(state, name.as_ptr() as _); + lua::lua_setglobal(state, b"name\0".as_ptr() as _); + + let run = b"return string.dump(loadstring(code, \"@\" .. name), false)\0"; + match lua::luaL_loadstring(state, run.as_ptr() as _) as u32 { + lua::LUA_OK => {} + lua::LUA_ERRSYNTAX => { + let err = lua::lua_tostring(state, -1); + let err = CStr::from_ptr(err).to_string_lossy().to_string(); + + lua::lua_close(state); + + eyre::bail!("Invalid syntax: {}", err); + } + lua::LUA_ERRMEM => { + lua::lua_close(state); + eyre::bail!("Failed to allocate sufficient memory to compile LuaJIT bytecode") + } + _ => unreachable!(), + } + + match lua::lua_pcall(state, 0, 1, 0) as u32 { + lua::LUA_OK => { + // The binary data is pretty much guaranteed to contain NUL bytes, + // so we can't rely on `lua_tostring` and `CStr` here. Instead we have to + // explicitely query the string length and build our vector from that. + // However, on the bright side, we don't have to go through any string types anymore, + // and can instead treat it as raw bytes immediately. + let mut len = 0; + let data = lua::lua_tolstring(state, -1, &mut len) as *const u8; + let data = std::slice::from_raw_parts(data, len).to_vec(); + + lua::lua_close(state); + + data + } + lua::LUA_ERRRUN => { + let err = lua::lua_tostring(state, -1); + let err = CStr::from_ptr(err).to_string_lossy().to_string(); + + lua::lua_close(state); + + eyre::bail!("Failed to compile LuaJIT bytecode: {}", err); + } + lua::LUA_ERRMEM => { + lua::lua_close(state); + eyre::bail!("Failed to allocate sufficient memory to compile LuaJIT bytecode") + } + // We don't use an error handler function, so this should be unreachable + lua::LUA_ERRERR => unreachable!(), + _ => unreachable!(), + } }; + let mut data = Cursor::new(Vec::with_capacity(bytecode.len() + 12)); data.write_u32(bytecode.len() as u32)?; - // I believe this is supposed to be a uleb128, but it seems to be always 0x2 in binary. - data.write_u64(0x2)?; - data.write_all(&bytecode)?; + // TODO: Figure out what these two values are + data.write_u32(0x2)?; + data.write_u32(0x0)?; + // Use Fatshark's custom magic bytes + data.write_all(&[0x1b, 0x46, 0x53, 0x82])?; + data.write_all(&bytecode[4..])?; let mut file = BundleFile::new(name, BundleFileType::Lua); let mut variant = BundleFileVariant::new(); diff --git a/lib/sdk/src/filetype/package.rs b/lib/sdk/src/filetype/package.rs index 63ec712..338cc8e 100644 --- a/lib/sdk/src/filetype/package.rs +++ b/lib/sdk/src/filetype/package.rs @@ -97,6 +97,7 @@ pub struct Package { _name: String, _root: PathBuf, inner: PackageType, + flags: u8, } impl Deref for Package { @@ -114,6 +115,15 @@ impl DerefMut for Package { } impl Package { + pub fn new(name: String, root: PathBuf) -> Self { + Self { + _name: name, + _root: root, + inner: Default::default(), + flags: 1, + } + } + fn len(&self) -> usize { self.values().fold(0, |total, files| total + files.len()) } @@ -171,6 +181,7 @@ impl Package { inner, _name: name, _root: root.to_path_buf(), + flags: 1, }; Ok(pkg) @@ -211,13 +222,25 @@ impl Package { let t = BundleFileType::from(r.read_u64()?); let hash = Murmur64::from(r.read_u64()?); let path = ctx.lookup_hash(hash, HashGroup::Filename); - inner.entry(t).or_default().insert(PathBuf::from(path)); + inner + .entry(t) + .or_default() + .insert(PathBuf::from(path.display().to_string())); + } + + let flags = r.read_u8()?; + + if cfg!(debug_assertions) && flags != 1 { + tracing::warn!("Unexpected value for package flags: {:0x}", flags); + } else if (flags & 0xFE) >= 2 { + tracing::warn!("Resource Package has common packages. Ignoring."); } let pkg = Self { inner, _name: name, _root: PathBuf::new(), + flags, }; Ok(pkg) @@ -240,6 +263,8 @@ impl Package { } } + w.write_u8(self.flags)?; + Ok(w.into_inner()) } } diff --git a/lib/sdk/src/lib.rs b/lib/sdk/src/lib.rs index 6890317..e229e28 100644 --- a/lib/sdk/src/lib.rs +++ b/lib/sdk/src/lib.rs @@ -4,6 +4,8 @@ mod context; pub mod filetype; pub mod murmur; +pub use binary::{FromBinary, ToBinary}; +pub use bundle::database::BundleDatabase; pub use bundle::decompress; -pub use bundle::{Bundle, BundleFile, BundleFileType}; +pub use bundle::{Bundle, BundleFile, BundleFileType, BundleFileVariant}; pub use context::Context; diff --git a/lib/sdk/src/murmur/dictionary.rs b/lib/sdk/src/murmur/dictionary.rs index 322dded..2d51af1 100644 --- a/lib/sdk/src/murmur/dictionary.rs +++ b/lib/sdk/src/murmur/dictionary.rs @@ -55,6 +55,24 @@ pub struct Entry { group: HashGroup, } +impl Entry { + pub fn value(&self) -> &String { + &self.value + } + + pub fn long(&self) -> Murmur64 { + self.long + } + + pub fn short(&self) -> Murmur32 { + self.short + } + + pub fn group(&self) -> HashGroup { + self.group + } +} + pub struct Dictionary { entries: Vec, } @@ -172,4 +190,8 @@ impl Dictionary { pub fn is_empty(&self) -> bool { self.entries.is_empty() } + + pub fn entries(&self) -> &Vec { + &self.entries + } } diff --git a/lib/sdk/src/murmur/mod.rs b/lib/sdk/src/murmur/mod.rs index 95e66fa..7ede170 100644 --- a/lib/sdk/src/murmur/mod.rs +++ b/lib/sdk/src/murmur/mod.rs @@ -13,8 +13,7 @@ mod murmurhash64; pub const SEED: u32 = 0; -pub use dictionary::Dictionary; -pub use dictionary::HashGroup; +pub use dictionary::{Dictionary, Entry, HashGroup}; pub use murmurhash64::hash; pub use murmurhash64::hash32; pub use murmurhash64::hash_inverse as inverse; @@ -67,6 +66,12 @@ impl fmt::UpperHex for Murmur64 { } } +impl fmt::LowerHex for Murmur64 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::LowerHex::fmt(&self.0, f) + } +} + impl fmt::Display for Murmur64 { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::UpperHex::fmt(&self.0, f) @@ -237,3 +242,148 @@ impl<'de> Deserialize<'de> for Murmur32 { deserializer.deserialize_any(Self(0)) } } + +// This type encodes the fact that when reading in a bundle, we don't always have a dictionary +// entry for every hash in there. So we do want to have the real string available when needed, +// but at the same time retain the original hash information for when we don't. +// This is especially important when wanting to write back the read bundle, as the hashes need to +// stay the same. +// The previous system of always turning hashes into strings worked well for the purpose of +// displaying hashes, but would have made it very hard to turn a stringyfied hash back into +// an actual hash. +#[derive(Clone, Debug, Eq)] +pub enum IdString64 { + Hash(Murmur64), + String(String), +} + +impl IdString64 { + pub fn to_murmur64(&self) -> Murmur64 { + match self { + Self::Hash(hash) => *hash, + Self::String(s) => Murmur64::hash(s.as_bytes()), + } + } + + pub fn display(&self) -> IdString64Display { + let s = match self { + IdString64::Hash(hash) => hash.to_string(), + IdString64::String(s) => s.clone(), + }; + + IdString64Display(s) + } + + pub fn is_string(&self) -> bool { + match self { + IdString64::Hash(_) => false, + IdString64::String(_) => true, + } + } + + pub fn is_hash(&self) -> bool { + match self { + IdString64::Hash(_) => true, + IdString64::String(_) => false, + } + } +} + +impl> From for IdString64 { + fn from(value: S) -> Self { + Self::String(value.into()) + } +} + +impl From for IdString64 { + fn from(value: Murmur64) -> Self { + Self::Hash(value) + } +} + +impl From for Murmur64 { + fn from(value: IdString64) -> Self { + value.to_murmur64() + } +} + +impl PartialEq for IdString64 { + fn eq(&self, other: &Self) -> bool { + self.to_murmur64() == other.to_murmur64() + } +} + +impl std::hash::Hash for IdString64 { + fn hash(&self, state: &mut H) { + state.write_u64(self.to_murmur64().into()); + } +} + +impl serde::Serialize for IdString64 { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_u64(self.to_murmur64().into()) + } +} + +struct IdString64Visitor; + +impl<'de> serde::de::Visitor<'de> for IdString64Visitor { + type Value = IdString64; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("an u64 or a string") + } + + fn visit_u64(self, value: u64) -> Result + where + E: serde::de::Error, + { + Ok(IdString64::Hash(value.into())) + } + + fn visit_str(self, v: &str) -> Result + where + E: serde::de::Error, + { + Ok(IdString64::String(v.to_string())) + } + + fn visit_string(self, v: String) -> Result + where + E: serde::de::Error, + { + Ok(IdString64::String(v)) + } +} + +impl<'de> serde::Deserialize<'de> for IdString64 { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + deserializer.deserialize_u64(IdString64Visitor) + } +} + +pub struct IdString64Display(String); + +impl std::fmt::Display for IdString64Display { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl std::fmt::UpperHex for IdString64 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + std::fmt::UpperHex::fmt(&self.to_murmur64(), f) + } +} + +impl std::fmt::LowerHex for IdString64 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + std::fmt::LowerHex::fmt(&self.to_murmur64(), f) + } +} diff --git a/lib/serde_sjson b/lib/serde_sjson index a6ef5a9..e94218d 160000 --- a/lib/serde_sjson +++ b/lib/serde_sjson @@ -1 +1 @@ -Subproject commit a6ef5a914e15f22d3ebcc475969b65182475139f +Subproject commit e94218d8f52a51529c83af33a99cc17f66caae2e