Compare commits
8 commits
master
...
feat/textu
Author | SHA1 | Date | |
---|---|---|---|
04b6a43f9a | |||
cbb3709c89 | |||
94af8862e8 | |||
9f849ab3ec | |||
63fb0a1c08 | |||
58071958d2 | |||
67f313107e | |||
db27dd9f39 |
45 changed files with 1649 additions and 796 deletions
|
@ -1,8 +1,7 @@
|
|||
# https://jake-shadle.github.io/xwin/
|
||||
FROM debian:bullseye-slim AS xwin
|
||||
FROM debian:bullseye-slim as xwin
|
||||
|
||||
# renovate: datasource=github-releases depName=xwin packageName=Jake-Shadle/xwin
|
||||
ARG XWIN_VERSION=0.6.6
|
||||
ARG XWIN_VERSION=0.5.2
|
||||
ARG XWIN_PREFIX="xwin-$XWIN_VERSION-x86_64-unknown-linux-musl"
|
||||
ADD https://github.com/Jake-Shadle/xwin/releases/download/$XWIN_VERSION/$XWIN_PREFIX.tar.gz /root/$XWIN_PREFIX.tar.gz
|
||||
|
||||
|
@ -32,7 +31,7 @@ RUN set -eux; \
|
|||
# And to keep that to a minimum, we still delete the stuff we don't need.
|
||||
rm -rf /root/.xwin-cache;
|
||||
|
||||
FROM rust:slim-bullseye AS linux
|
||||
FROM rust:slim-bullseye as linux
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
|
@ -59,10 +58,9 @@ WORKDIR /src/dtmt
|
|||
|
||||
COPY lib/oodle/*.so lib/oodle/*.a /src/
|
||||
|
||||
FROM linux AS msvc
|
||||
FROM linux as msvc
|
||||
|
||||
# renovate: datasource=github-releases depName=llvm packageName=llvm/llvm-project
|
||||
ARG LLVM_VERSION=20
|
||||
ARG LLVM_VERSION=18
|
||||
ENV KEYRINGS /usr/local/share/keyrings
|
||||
|
||||
ADD https://apt.llvm.org/llvm-snapshot.gpg.key /root/llvm-snapshot.gpg.key
|
||||
|
|
6
.gitmodules
vendored
6
.gitmodules
vendored
|
@ -1,3 +1,6 @@
|
|||
[submodule "lib/luajit2-sys"]
|
||||
path = lib/luajit2-sys
|
||||
url = https://github.com/sclu1034/luajit2-sys.git
|
||||
[submodule "lib/color-eyre"]
|
||||
path = lib/color-eyre
|
||||
url = https://github.com/sclu1034/color-eyre.git
|
||||
|
@ -6,6 +9,3 @@
|
|||
path = lib/ansi-parser
|
||||
url = https://gitlab.com/lschwiderski/ansi-parser.git
|
||||
branch = "issue/outdated-nom"
|
||||
[submodule "lib/luajit2-sys/luajit"]
|
||||
path = lib/luajit2-sys/luajit
|
||||
url = https://github.com/LuaJIT/LuaJIT.git
|
||||
|
|
30
.renovaterc
30
.renovaterc
|
@ -10,35 +10,5 @@
|
|||
"baseBranches": [
|
||||
"$default",
|
||||
"/^release\\/.*/"
|
||||
],
|
||||
"ignorePaths": [
|
||||
"lib/color_eyre/**",
|
||||
"lib/ansi-parser/**",
|
||||
"lib/luajit2-sys/**",
|
||||
"**/target/**"
|
||||
],
|
||||
"customManagers": [
|
||||
{
|
||||
"customType": "regex",
|
||||
"description": "Update _VERSION variables in Dockerfiles",
|
||||
"fileMatch": [
|
||||
"(^|/|\\.)Dockerfile$",
|
||||
"(^|/)Dockerfile\\.[^/]*$"
|
||||
],
|
||||
"matchStrings": [
|
||||
"# renovate: datasource=(?<datasource>[a-z-]+?)(?: depName=(?<depName>.+?))? packageName=(?<packageName>.+?)(?: versioning=(?<versioning>[a-z-]+?))?\\s(?:ENV|ARG) .+?_VERSION=(?<currentValue>.+?)\\s"
|
||||
]
|
||||
}
|
||||
],
|
||||
"packageRules": [
|
||||
{
|
||||
"matchDatasources": [
|
||||
"github-releases"
|
||||
],
|
||||
"matchPackageNames": [
|
||||
"llvm/llvm-project"
|
||||
],
|
||||
"extractVersion": "^llvmorg-(?<version>\\d+)\\.\\d+\\.\\d+$"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
167
Cargo.lock
generated
167
Cargo.lock
generated
|
@ -229,9 +229,29 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "bindgen"
|
||||
version = "0.72.0"
|
||||
version = "0.70.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4f72209734318d0b619a5e0f5129918b848c416e122a3c4ce054e03cb87b726f"
|
||||
checksum = "f49d8fed880d473ea71efb9bf597651e77201bdd4893efe54c9e5d65ae04ce6f"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"cexpr",
|
||||
"clang-sys",
|
||||
"itertools",
|
||||
"log",
|
||||
"prettyplease",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"regex",
|
||||
"rustc-hash 1.1.0",
|
||||
"shlex",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bindgen"
|
||||
version = "0.71.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f58bf3d7db68cfbac37cfc485a8d711e87e064c3d0fe0435b92f7a407f9d6b3"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"cexpr",
|
||||
|
@ -359,9 +379,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.2.29"
|
||||
version = "1.1.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c1599538de2394445747c8cf7935946e3cc27e9625f889d979bfb2aaf569362"
|
||||
checksum = "72db2f7947ecee9b03b510377e8bb9077afa27176fdbff55c51027e976fdcc48"
|
||||
dependencies = [
|
||||
"jobserver",
|
||||
"libc",
|
||||
|
@ -406,9 +426,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.40"
|
||||
version = "4.5.39"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "40b6887a1d8685cebccf115538db5c0efe625ccac9696ad45c409d96566e910f"
|
||||
checksum = "fd60e63e9be68e5fb56422e397cf9baddded06dae1d2e523401542383bc72a9f"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
|
@ -416,9 +436,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.40"
|
||||
version = "4.5.39"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e0c66c08ce9f0c698cbce5c0279d0bb6ac936d8674174fe48f736533b964f59e"
|
||||
checksum = "89cc6392a1f72bbeb820d71f32108f61fdaf18bc526e1d23954168a67759ef51"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
|
@ -430,9 +450,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.5.40"
|
||||
version = "4.5.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d2c7947ae4cc3d851207c1adb5b5e260ff0cca11446b1d6d1423788e442257ce"
|
||||
checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7"
|
||||
dependencies = [
|
||||
"heck 0.5.0",
|
||||
"proc-macro2",
|
||||
|
@ -561,13 +581,13 @@ checksum = "9226dbc05df4fb986f48d730b001532580883c4c06c5d1c213f4b34c1c157178"
|
|||
|
||||
[[package]]
|
||||
name = "confy"
|
||||
version = "1.0.0"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f29222b549d4e3ded127989d523da9e928918d0d0d7f7c1690b439d0d538bae9"
|
||||
checksum = "45b1f4c00870f07dc34adcac82bb6a72cc5aabca8536ba1797e01df51d2ce9a0"
|
||||
dependencies = [
|
||||
"directories",
|
||||
"serde",
|
||||
"thiserror 2.0.12",
|
||||
"thiserror 1.0.63",
|
||||
"toml 0.8.19",
|
||||
]
|
||||
|
||||
|
@ -746,9 +766,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "directories"
|
||||
version = "6.0.0"
|
||||
version = "5.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d"
|
||||
checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35"
|
||||
dependencies = [
|
||||
"dirs-sys",
|
||||
]
|
||||
|
@ -765,14 +785,14 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "dirs-sys"
|
||||
version = "0.5.0"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab"
|
||||
checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"option-ext",
|
||||
"redox_users 0.5.0",
|
||||
"windows-sys 0.59.0",
|
||||
"redox_users",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -782,7 +802,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"redox_users 0.4.6",
|
||||
"redox_users",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
|
@ -1098,6 +1118,7 @@ checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece"
|
|||
dependencies = [
|
||||
"crc32fast",
|
||||
"libz-rs-sys",
|
||||
"libz-sys",
|
||||
"miniz_oxide 0.8.8",
|
||||
]
|
||||
|
||||
|
@ -1994,17 +2015,6 @@ dependencies = [
|
|||
"unic-langid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "io-uring"
|
||||
version = "0.7.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b86e202f00093dcba4275d4636b93ef9dd75d025ae560d2521b45ea28ab49013"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"cfg-if",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ipnet"
|
||||
version = "2.9.0"
|
||||
|
@ -2164,9 +2174,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
|||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.174"
|
||||
version = "0.2.172"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776"
|
||||
checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
|
||||
|
||||
[[package]]
|
||||
name = "libloading"
|
||||
|
@ -2198,6 +2208,17 @@ dependencies = [
|
|||
"zlib-rs",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libz-sys"
|
||||
version = "1.1.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "df9b68e50e6e0b26f672573834882eb57759f6db9b3be2ea3c35c91188bb4eaa"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"pkg-config",
|
||||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.4.14"
|
||||
|
@ -2232,7 +2253,7 @@ checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
|
|||
name = "luajit2-sys"
|
||||
version = "0.0.2"
|
||||
dependencies = [
|
||||
"bindgen",
|
||||
"bindgen 0.70.1",
|
||||
"cc",
|
||||
"fs_extra",
|
||||
"libc",
|
||||
|
@ -2313,9 +2334,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "minijinja"
|
||||
version = "2.11.0"
|
||||
version = "2.10.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4e60ac08614cc09062820e51d5d94c2fce16b94ea4e5003bb81b99a95f84e876"
|
||||
checksum = "dd72e8b4e42274540edabec853f607c015c73436159b06c39c7af85a20433155"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
@ -2360,9 +2381,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "nanorand"
|
||||
version = "0.8.0"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6e3d189da485332e96ba8a5ef646a311871abd7915bf06ac848a9117f19cf6e4"
|
||||
checksum = "6a51313c5820b0b02bd422f4b44776fbf47961755c74ce64afc73bfad10226c3"
|
||||
|
||||
[[package]]
|
||||
name = "native-tls"
|
||||
|
@ -2491,6 +2512,17 @@ version = "0.1.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
|
||||
|
||||
[[package]]
|
||||
name = "num-derive"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-traits"
|
||||
version = "0.2.19"
|
||||
|
@ -2537,7 +2569,7 @@ checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
|||
name = "oodle"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bindgen",
|
||||
"bindgen 0.71.1",
|
||||
"color-eyre",
|
||||
"tracing",
|
||||
]
|
||||
|
@ -3011,17 +3043,6 @@ dependencies = [
|
|||
"thiserror 1.0.63",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_users"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b"
|
||||
dependencies = [
|
||||
"getrandom 0.2.15",
|
||||
"libredox",
|
||||
"thiserror 2.0.12",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.11.1"
|
||||
|
@ -3068,9 +3089,9 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
|
|||
|
||||
[[package]]
|
||||
name = "reqwest"
|
||||
version = "0.12.22"
|
||||
version = "0.12.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531"
|
||||
checksum = "e98ff6b0dbbe4d5a37318f433d4fc82babd21631f194d370409ceb2e40b2f0b5"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
|
@ -3084,10 +3105,12 @@ dependencies = [
|
|||
"hyper-rustls",
|
||||
"hyper-tls",
|
||||
"hyper-util",
|
||||
"ipnet",
|
||||
"js-sys",
|
||||
"log",
|
||||
"mime",
|
||||
"native-tls",
|
||||
"once_cell",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"rustls-pki-types",
|
||||
|
@ -3341,16 +3364,20 @@ dependencies = [
|
|||
"color-eyre",
|
||||
"csv-async",
|
||||
"fastrand",
|
||||
"flate2",
|
||||
"futures",
|
||||
"futures-util",
|
||||
"glob",
|
||||
"luajit2-sys",
|
||||
"nanorand",
|
||||
"num-derive",
|
||||
"num-traits",
|
||||
"oodle",
|
||||
"path-slash",
|
||||
"pin-project-lite",
|
||||
"serde",
|
||||
"serde_sjson",
|
||||
"strum",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tracing",
|
||||
|
@ -3612,6 +3639,28 @@ version = "0.11.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
||||
|
||||
[[package]]
|
||||
name = "strum"
|
||||
version = "0.26.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06"
|
||||
dependencies = [
|
||||
"strum_macros",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "strum_macros"
|
||||
version = "0.26.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be"
|
||||
dependencies = [
|
||||
"heck 0.5.0",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rustversion",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "subtle"
|
||||
version = "2.6.1"
|
||||
|
@ -3861,18 +3910,16 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tokio"
|
||||
version = "1.46.1"
|
||||
version = "1.45.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0cc3a2344dafbe23a245241fe8b09735b521110d30fcefbbd5feb1797ca35d17"
|
||||
checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779"
|
||||
dependencies = [
|
||||
"backtrace",
|
||||
"bytes",
|
||||
"io-uring",
|
||||
"libc",
|
||||
"mio",
|
||||
"pin-project-lite",
|
||||
"signal-hook-registry",
|
||||
"slab",
|
||||
"socket2",
|
||||
"tokio-macros",
|
||||
"tracing",
|
||||
|
@ -4006,9 +4053,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tower-http"
|
||||
version = "0.6.5"
|
||||
version = "0.6.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5cc2d9e086a412a451384326f521c8123a99a466b329941a9403696bff9b0da2"
|
||||
checksum = "0fdb0c213ca27a9f57ab69ddb290fd80d970922355b83ae380b395d3986b8a2e"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bytes",
|
||||
|
@ -4996,9 +5043,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "zip"
|
||||
version = "4.2.0"
|
||||
version = "3.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "95ab361742de920c5535880f89bbd611ee62002bf11341d16a5f057bb8ba6899"
|
||||
checksum = "12598812502ed0105f607f941c386f43d441e00148fce9dec3ca5ffb0bde9308"
|
||||
dependencies = [
|
||||
"arbitrary",
|
||||
"bzip2",
|
||||
|
|
14
Cargo.toml
14
Cargo.toml
|
@ -16,31 +16,30 @@ ansi-parser = "0.9.1"
|
|||
ansi_term = "0.12.1"
|
||||
async-recursion = "1.0.5"
|
||||
bincode = "2.0.0"
|
||||
bindgen = "0.72.0"
|
||||
bitflags = "2.5.0"
|
||||
byteorder = "1.4.3"
|
||||
cc = { version = "1.2.27", features = ["parallel"] }
|
||||
clap = { version = "4.0.15", features = ["color", "derive", "std", "cargo", "string", "unicode"] }
|
||||
cli-table = { version = "0.5.0", default-features = false, features = ["derive"] }
|
||||
color-eyre = { path = "lib/color-eyre" }
|
||||
colors-transform = "0.2.11"
|
||||
confy = "1.0.0"
|
||||
confy = "0.6.1"
|
||||
csv-async = { version = "1.2.4", features = ["tokio", "serde"] }
|
||||
druid = { version = "0.8", features = ["im", "serde", "image", "png", "jpeg", "bmp", "webp", "svg"] }
|
||||
druid-widget-nursery = "0.1"
|
||||
dtmt-shared = { path = "lib/dtmt-shared" }
|
||||
fastrand = "2.1.0"
|
||||
fs_extra = "1.1.0"
|
||||
flate2 = { version = "1.0.30", features = ["zlib"] }
|
||||
futures = "0.3.25"
|
||||
futures-util = "0.3.24"
|
||||
glob = "0.3.0"
|
||||
interprocess = "2.1.0"
|
||||
lazy_static = "1.4.0"
|
||||
libc = "0.2.174"
|
||||
luajit2-sys = { path = "lib/luajit2-sys" }
|
||||
minijinja = { version = "2.0.1", default-features = false, features = ["serde"] }
|
||||
nanorand = "0.8.0"
|
||||
nanorand = "0.7.0"
|
||||
nexusmods = { path = "lib/nexusmods" }
|
||||
num-derive = "0.4.2"
|
||||
num-traits = "0.2.19"
|
||||
notify = "8.0.0"
|
||||
oodle = { path = "lib/oodle" }
|
||||
open = "5.0.1"
|
||||
|
@ -53,6 +52,7 @@ serde = { version = "1.0.152", features = ["derive", "rc"] }
|
|||
serde_sjson = "1.2.1"
|
||||
steamlocate = "2.0.0-beta.2"
|
||||
strip-ansi-escapes = "0.2.0"
|
||||
strum = { version = "0.26.3", features = ["derive", "strum_macros"] }
|
||||
time = { version = "0.3.20", features = ["serde", "serde-well-known", "local-offset", "formatting", "macros"] }
|
||||
tokio = { version = "1.23.0", features = ["rt-multi-thread", "fs", "process", "macros", "tracing", "io-util", "io-std"] }
|
||||
tokio-stream = { version = "0.1.12", features = ["fs", "io-util"] }
|
||||
|
@ -60,7 +60,7 @@ tracing = { version = "0.1.37", features = ["async-await"] }
|
|||
tracing-error = "0.2.0"
|
||||
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
|
||||
usvg = "0.25.0"
|
||||
zip = { version = "4.0.0", default-features = false, features = ["deflate", "bzip2", "zstd", "time"] }
|
||||
zip = { version = "3.0.0", default-features = false, features = ["deflate", "bzip2", "zstd", "time"] }
|
||||
|
||||
[profile.dev.package.backtrace]
|
||||
opt-level = 3
|
||||
|
|
|
@ -469,7 +469,7 @@ async fn patch_boot_bundle(state: Arc<ActionState>, deployment_info: &str) -> Re
|
|||
}
|
||||
.instrument(tracing::trace_span!("read boot bundle"))
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to read bundle '{BOOT_BUNDLE_NAME}'"))?;
|
||||
.wrap_err_with(|| format!("Failed to read bundle '{}'", BOOT_BUNDLE_NAME))?;
|
||||
|
||||
{
|
||||
tracing::trace!("Adding mod package file to boot bundle");
|
||||
|
|
|
@ -208,7 +208,7 @@ pub(crate) async fn reset_mod_deployment(state: ActionState) -> Result<()> {
|
|||
|
||||
for p in paths {
|
||||
let path = bundle_dir.join(p);
|
||||
let backup = bundle_dir.join(format!("{p}.bak"));
|
||||
let backup = bundle_dir.join(format!("{}.bak", p));
|
||||
|
||||
let res = async {
|
||||
tracing::debug!(
|
||||
|
|
|
@ -363,7 +363,7 @@ fn extract_legacy_mod<R: Read + Seek>(
|
|||
for i in 0..file_count {
|
||||
let mut f = archive
|
||||
.by_index(i)
|
||||
.wrap_err_with(|| format!("Failed to get file at index {i}"))?;
|
||||
.wrap_err_with(|| format!("Failed to get file at index {}", i))?;
|
||||
|
||||
let Some(name) = f.enclosed_name().map(|p| p.to_path_buf()) else {
|
||||
let err = eyre::eyre!("File name in archive is not a safe path value.").suggestion(
|
||||
|
@ -426,7 +426,7 @@ pub(crate) async fn import_from_file(state: ActionState, info: FileInfo) -> Resu
|
|||
let mod_info = api
|
||||
.mods_id(id)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to query mod {id} from Nexus"))?;
|
||||
.wrap_err_with(|| format!("Failed to query mod {} from Nexus", id))?;
|
||||
|
||||
let version = match api.file_version(id, timestamp).await {
|
||||
Ok(version) => version,
|
||||
|
@ -461,13 +461,13 @@ pub(crate) async fn import_from_file(state: ActionState, info: FileInfo) -> Resu
|
|||
pub(crate) async fn import_from_nxm(state: ActionState, uri: String) -> Result<ModInfo> {
|
||||
let url = uri
|
||||
.parse()
|
||||
.wrap_err_with(|| format!("Invalid Uri '{uri}'"))?;
|
||||
.wrap_err_with(|| format!("Invalid Uri '{}'", uri))?;
|
||||
|
||||
let api = NexusApi::new(state.nexus_api_key.to_string())?;
|
||||
let (mod_info, file_info, data) = api
|
||||
.handle_nxm(url)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to download mod from NXM uri '{uri}'"))?;
|
||||
.wrap_err_with(|| format!("Failed to download mod from NXM uri '{}'", uri))?;
|
||||
|
||||
let nexus = NexusInfo::from(mod_info);
|
||||
import_mod(state, Some((nexus, file_info.version)), data).await
|
||||
|
@ -524,7 +524,7 @@ pub(crate) async fn import_mod(
|
|||
let data = api
|
||||
.picture(url)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to download Nexus image from '{url}'"))?;
|
||||
.wrap_err_with(|| format!("Failed to download Nexus image from '{}'", url))?;
|
||||
|
||||
let img = image_data_to_buffer(&data)?;
|
||||
|
||||
|
|
|
@ -47,7 +47,7 @@ fn notify_nxm_download(
|
|||
.to_ns_name::<GenericNamespaced>()
|
||||
.expect("Invalid socket name"),
|
||||
)
|
||||
.wrap_err_with(|| format!("Failed to connect to '{IPC_ADDRESS}'"))
|
||||
.wrap_err_with(|| format!("Failed to connect to '{}'", IPC_ADDRESS))
|
||||
.suggestion("Make sure the main window is open.")?;
|
||||
|
||||
tracing::debug!("Connected to main process at '{}'", IPC_ADDRESS);
|
||||
|
@ -159,7 +159,7 @@ fn main() -> Result<()> {
|
|||
|
||||
loop {
|
||||
let res = server.accept().wrap_err_with(|| {
|
||||
format!("IPC server failed to listen on '{IPC_ADDRESS}'")
|
||||
format!("IPC server failed to listen on '{}'", IPC_ADDRESS)
|
||||
});
|
||||
|
||||
match res {
|
||||
|
|
|
@ -108,19 +108,20 @@ impl std::fmt::Debug for AsyncAction {
|
|||
match self {
|
||||
AsyncAction::DeployMods(_) => write!(f, "AsyncAction::DeployMods(_state)"),
|
||||
AsyncAction::ResetDeployment(_) => write!(f, "AsyncAction::ResetDeployment(_state)"),
|
||||
AsyncAction::AddMod(_, info) => write!(f, "AsyncAction::AddMod(_state, {info:?})"),
|
||||
AsyncAction::AddMod(_, info) => write!(f, "AsyncAction::AddMod(_state, {:?})", info),
|
||||
AsyncAction::DeleteMod(_, info) => {
|
||||
write!(f, "AsyncAction::DeleteMod(_state, {info:?})")
|
||||
write!(f, "AsyncAction::DeleteMod(_state, {:?})", info)
|
||||
}
|
||||
AsyncAction::SaveSettings(_) => write!(f, "AsyncAction::SaveSettings(_state)"),
|
||||
AsyncAction::CheckUpdates(_) => write!(f, "AsyncAction::CheckUpdates(_state)"),
|
||||
AsyncAction::LoadInitial((path, is_default)) => write!(
|
||||
f,
|
||||
"AsyncAction::LoadInitial(({path:?}, {is_default:?}))"
|
||||
"AsyncAction::LoadInitial(({:?}, {:?}))",
|
||||
path, is_default
|
||||
),
|
||||
AsyncAction::Log(_) => write!(f, "AsyncAction::Log(_)"),
|
||||
AsyncAction::NxmDownload(_, uri) => {
|
||||
write!(f, "AsyncAction::NxmDownload(_state, {uri})")
|
||||
write!(f, "AsyncAction::NxmDownload(_state, {})", uri)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -447,7 +448,7 @@ impl AppDelegate<State> for Delegate {
|
|||
if let Err(err) = open::that_detached(Arc::as_ref(url)) {
|
||||
tracing::error!(
|
||||
"{:?}",
|
||||
Report::new(err).wrap_err(format!("Failed to open url '{url}'"))
|
||||
Report::new(err).wrap_err(format!("Failed to open url '{}'", url))
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -76,7 +76,7 @@ impl ColorExt for Color {
|
|||
fn darken(&self, fac: f32) -> Self {
|
||||
let (r, g, b, a) = self.as_rgba();
|
||||
let rgb = Rgb::from(r as f32, g as f32, b as f32);
|
||||
let rgb = rgb.lighten(-fac);
|
||||
let rgb = rgb.lighten(-1. * fac);
|
||||
Self::rgba(
|
||||
rgb.get_red() as f64,
|
||||
rgb.get_green() as f64,
|
||||
|
|
|
@ -5,7 +5,6 @@ use druid::{
|
|||
|
||||
use crate::state::{State, ACTION_SET_DIRTY, ACTION_START_SAVE_SETTINGS};
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub struct DisabledButtonController;
|
||||
|
||||
impl<T: Data> Controller<T, Button<T>> for DisabledButtonController {
|
||||
|
|
|
@ -34,9 +34,9 @@ pub fn error<T: Data>(err: Report, _parent: WindowHandle) -> WindowDesc<T> {
|
|||
// The second to last one, the context to the root cause
|
||||
let context = err.chain().nth(count - 2).unwrap();
|
||||
|
||||
(format!("{first}!"), format!("{context}: {root}"))
|
||||
(format!("{first}!"), format!("{}: {}", context, root))
|
||||
} else {
|
||||
("An error occurred!".to_string(), format!("{first}: {root}"))
|
||||
("An error occurred!".to_string(), format!("{}: {}", first, root))
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -348,7 +348,7 @@ fn build_mod_details_info() -> impl Widget<State> {
|
|||
let nexus_link = Maybe::or_empty(|| {
|
||||
let link = Label::raw().lens(NexusInfo::id.map(
|
||||
|id| {
|
||||
let url = format!("https://nexusmods.com/warhammer40kdarktide/mods/{id}");
|
||||
let url = format!("https://nexusmods.com/warhammer40kdarktide/mods/{}", id);
|
||||
let mut builder = RichTextBuilder::new();
|
||||
builder
|
||||
.push("Open on Nexusmods")
|
||||
|
|
|
@ -94,10 +94,10 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
|||
|
||||
match bundle_name {
|
||||
IdString64::String(name) => {
|
||||
println!("{bundle_hash:016x} {name}");
|
||||
println!("{:016x} {}", bundle_hash, name);
|
||||
}
|
||||
IdString64::Hash(hash) => {
|
||||
println!("{hash:016x}");
|
||||
println!("{:016x}", hash);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -110,7 +110,7 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
|||
println!("\t{:016x}.{:<12} {}", file.name, extension, name);
|
||||
}
|
||||
IdString64::Hash(hash) => {
|
||||
println!("\t{hash:016x}.{extension}");
|
||||
println!("\t{:016x}.{}", hash, extension);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -127,10 +127,10 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
|||
|
||||
match bundle_name {
|
||||
IdString64::String(name) => {
|
||||
println!("{bundle_hash:016x} {name}");
|
||||
println!("{:016x} {}", bundle_hash, name);
|
||||
}
|
||||
IdString64::Hash(hash) => {
|
||||
println!("{hash:016x}");
|
||||
println!("{:016x}", hash);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -158,7 +158,7 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
|||
|
||||
for bundle in bundles {
|
||||
found = true;
|
||||
println!("{bundle:016x}");
|
||||
println!("{:016x}", bundle);
|
||||
}
|
||||
|
||||
if !found {
|
||||
|
|
|
@ -275,7 +275,13 @@ struct ExtractOptions<'a> {
|
|||
|
||||
#[tracing::instrument(
|
||||
skip(ctx, options),
|
||||
fields(decompile = options.decompile, flatten = options.flatten, dry_run = options.dry_run)
|
||||
fields(
|
||||
bundle_name = tracing::field::Empty,
|
||||
bundle_hash = tracing::field::Empty,
|
||||
decompile = options.decompile,
|
||||
flatten = options.flatten,
|
||||
dry_run = options.dry_run,
|
||||
)
|
||||
)]
|
||||
async fn extract_bundle<P1, P2>(
|
||||
ctx: Arc<sdk::Context>,
|
||||
|
@ -318,6 +324,11 @@ where
|
|||
let bundle = {
|
||||
let data = fs::read(path.as_ref()).await?;
|
||||
let name = Bundle::get_name_from_path(&ctx, path.as_ref());
|
||||
{
|
||||
let span = tracing::span::Span::current();
|
||||
span.record("bundle_hash", format!("{:X}", name));
|
||||
span.record("bundle_name", name.display().to_string());
|
||||
}
|
||||
Bundle::from_binary(&ctx, name, data)?
|
||||
};
|
||||
|
||||
|
@ -473,7 +484,7 @@ where
|
|||
}
|
||||
}
|
||||
Err(err) => {
|
||||
let err = err.wrap_err(format!("Failed to decompile file {name}"));
|
||||
let err = err.wrap_err(format!("Failed to decompile file {}", name));
|
||||
tracing::error!("{:?}", err);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -147,7 +147,7 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
|||
|
||||
let patch_number = matches
|
||||
.get_one::<u16>("patch")
|
||||
.map(|num| format!("{num:03}"));
|
||||
.map(|num| format!("{:03}", num));
|
||||
|
||||
let output_path = matches
|
||||
.get_one::<PathBuf>("output")
|
||||
|
@ -156,7 +156,7 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
|||
let mut output_path = bundle_path.clone();
|
||||
|
||||
if let Some(patch_number) = patch_number.as_ref() {
|
||||
output_path.set_extension(format!("patch_{patch_number:03}"));
|
||||
output_path.set_extension(format!("patch_{:03}", patch_number));
|
||||
}
|
||||
|
||||
output_path
|
||||
|
@ -196,19 +196,21 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
|||
span.record("output_path", output_path.display().to_string());
|
||||
span.record("raw", sub_matches.get_flag("raw"));
|
||||
span.record("target_name", target_name.display().to_string());
|
||||
span.record("file_type", format!("{file_type:?}"));
|
||||
span.record("file_type", format!("{:?}", file_type));
|
||||
}
|
||||
}
|
||||
|
||||
let bundle_name = Bundle::get_name_from_path(&ctx, bundle_path);
|
||||
let mut bundle = {
|
||||
fs::read(bundle_path)
|
||||
.await
|
||||
.map_err(From::from)
|
||||
.and_then(|binary| Bundle::from_binary(&ctx, bundle_name.clone(), binary))
|
||||
let binary = fs::read(bundle_path).await?;
|
||||
Bundle::from_binary(&ctx, bundle_name.clone(), binary)
|
||||
.wrap_err_with(|| format!("Failed to open bundle '{}'", bundle_path.display()))?
|
||||
};
|
||||
|
||||
if op == "copy" {
|
||||
unimplemented!("Implement copying a file from one bundle to the other.");
|
||||
}
|
||||
|
||||
let output_bundle = match op {
|
||||
"replace" => {
|
||||
let Some(file) = bundle
|
||||
|
@ -278,9 +280,6 @@ pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
|||
"add" => {
|
||||
unimplemented!("Implement adding a new file to the bundle.");
|
||||
}
|
||||
"copy" => {
|
||||
unimplemented!("Implement copying a file from one bundle to the other.");
|
||||
}
|
||||
_ => unreachable!("no other operations exist"),
|
||||
};
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ enum OutputFormat {
|
|||
|
||||
fn format_byte_size(size: usize) -> String {
|
||||
if size < 1024 {
|
||||
format!("{size} Bytes")
|
||||
format!("{} Bytes", size)
|
||||
} else if size < 1024 * 1024 {
|
||||
format!("{} kB", size / 1024)
|
||||
} else if size < 1024 * 1024 * 1024 {
|
||||
|
|
21
crates/dtmt/src/cmd/experiment/mod.rs
Normal file
21
crates/dtmt/src/cmd/experiment/mod.rs
Normal file
|
@ -0,0 +1,21 @@
|
|||
use clap::{ArgMatches, Command};
|
||||
use color_eyre::Result;
|
||||
|
||||
mod texture_meta;
|
||||
|
||||
pub(crate) fn command_definition() -> Command {
|
||||
Command::new("experiment")
|
||||
.subcommand_required(true)
|
||||
.about("A collection of utilities and experiments.")
|
||||
.subcommand(texture_meta::command_definition())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
||||
match matches.subcommand() {
|
||||
Some(("texture-meta", sub_matches)) => texture_meta::run(ctx, sub_matches).await,
|
||||
_ => unreachable!(
|
||||
"clap is configured to require a subcommand, and they're all handled above"
|
||||
),
|
||||
}
|
||||
}
|
121
crates/dtmt/src/cmd/experiment/texture_meta.rs
Normal file
121
crates/dtmt/src/cmd/experiment/texture_meta.rs
Normal file
|
@ -0,0 +1,121 @@
|
|||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::Result;
|
||||
use futures_util::StreamExt;
|
||||
use sdk::{Bundle, BundleFileType};
|
||||
use tokio::fs;
|
||||
|
||||
use crate::cmd::util::resolve_bundle_paths;
|
||||
|
||||
pub(crate) fn command_definition() -> Command {
|
||||
Command::new("texture-meta")
|
||||
.about(
|
||||
"Iterates over the provided bundles and lists certain meta data.
|
||||
Primarily intended to help spot patterns between dependend data fields and values.",
|
||||
)
|
||||
.arg(
|
||||
Arg::new("bundle")
|
||||
.required(true)
|
||||
.action(ArgAction::Append)
|
||||
.value_parser(value_parser!(PathBuf))
|
||||
.help(
|
||||
"Path to the bundle(s) to read. If this points to a directory instead \
|
||||
of a file, all files in that directory will be checked.",
|
||||
),
|
||||
)
|
||||
// TODO: Maybe provide JSON and CSV
|
||||
// TODO: Maybe allow toggling certain fields
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(ctx))]
|
||||
async fn handle_bundle(ctx: &sdk::Context, path: &PathBuf) -> Result<()> {
|
||||
let bundle = {
|
||||
let binary = fs::read(path).await?;
|
||||
let name = Bundle::get_name_from_path(ctx, path);
|
||||
Bundle::from_binary(ctx, name, binary)?
|
||||
};
|
||||
|
||||
let bundle_dir = ctx
|
||||
.game_dir
|
||||
.as_deref()
|
||||
.map(|dir| dir.join("bundle"))
|
||||
.or_else(|| path.parent().map(|p| p.to_path_buf()))
|
||||
.unwrap_or_default();
|
||||
|
||||
for f in bundle.files().iter() {
|
||||
if f.file_type() != BundleFileType::Texture {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (i, v) in f.variants().iter().enumerate() {
|
||||
let data_file_name = v.data_file_name();
|
||||
|
||||
let data_file_length = if let Some(file_name) = data_file_name {
|
||||
let path = bundle_dir.join(file_name);
|
||||
|
||||
match fs::metadata(&path).await {
|
||||
Ok(meta) => meta.len(),
|
||||
Err(err) => {
|
||||
return Err(err).wrap_err_with(|| {
|
||||
format!("Failed to open data file {}", path.display())
|
||||
})
|
||||
}
|
||||
}
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
println!(
|
||||
"{},{},{},{},{:b},{},{},{:?},{},{:#010b}",
|
||||
bundle.name().display(),
|
||||
f.name(false, None),
|
||||
f.file_type().ext_name(),
|
||||
i,
|
||||
v.property(),
|
||||
v.data().len(),
|
||||
v.external(),
|
||||
data_file_name,
|
||||
data_file_length,
|
||||
v.unknown_1(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub(crate) async fn run(ctx: sdk::Context, matches: &ArgMatches) -> Result<()> {
|
||||
let bundles = matches
|
||||
.get_many::<PathBuf>("bundle")
|
||||
.unwrap_or_default()
|
||||
.cloned();
|
||||
|
||||
let paths = resolve_bundle_paths(bundles);
|
||||
|
||||
let ctx = Arc::new(ctx);
|
||||
|
||||
println!(
|
||||
"Bundle Name,File Name,File Type,Variant,Property,Bundle Data Length,External,Data File,Data File Length,Unknown 1"
|
||||
);
|
||||
|
||||
paths
|
||||
.for_each_concurrent(10, |p| async {
|
||||
let ctx = ctx.clone();
|
||||
async move {
|
||||
if let Err(err) = handle_bundle(&ctx, &p)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to list contents of bundle {}", p.display()))
|
||||
{
|
||||
tracing::error!("Failed to handle bundle: {}", format!("{:#}", err));
|
||||
}
|
||||
}
|
||||
.await;
|
||||
})
|
||||
.await;
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -164,7 +164,7 @@ pub(crate) async fn run(_ctx: sdk::Context, matches: &ArgMatches) -> Result<()>
|
|||
.iter()
|
||||
.map(|(path_tmpl, content_tmpl)| {
|
||||
env.render_str(path_tmpl, &render_ctx)
|
||||
.wrap_err_with(|| format!("Failed to render template: {path_tmpl}"))
|
||||
.wrap_err_with(|| format!("Failed to render template: {}", path_tmpl))
|
||||
.and_then(|path| {
|
||||
env.render_named_str(&path, content_tmpl, &render_ctx)
|
||||
.wrap_err_with(|| format!("Failed to render template '{}'", &path))
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#![feature(io_error_more)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(result_flattening)]
|
||||
#![feature(test)]
|
||||
#![windows_subsystem = "console"]
|
||||
|
||||
|
@ -21,6 +22,7 @@ mod cmd {
|
|||
pub mod build;
|
||||
pub mod bundle;
|
||||
pub mod dictionary;
|
||||
pub mod experiment;
|
||||
pub mod migrate;
|
||||
pub mod murmur;
|
||||
pub mod new;
|
||||
|
@ -67,6 +69,7 @@ async fn main() -> Result<()> {
|
|||
.subcommand(cmd::build::command_definition())
|
||||
.subcommand(cmd::bundle::command_definition())
|
||||
.subcommand(cmd::dictionary::command_definition())
|
||||
.subcommand(cmd::experiment::command_definition())
|
||||
.subcommand(cmd::migrate::command_definition())
|
||||
.subcommand(cmd::murmur::command_definition())
|
||||
.subcommand(cmd::new::command_definition())
|
||||
|
@ -145,6 +148,7 @@ async fn main() -> Result<()> {
|
|||
Some(("build", sub_matches)) => cmd::build::run(ctx, sub_matches).await?,
|
||||
Some(("bundle", sub_matches)) => cmd::bundle::run(ctx, sub_matches).await?,
|
||||
Some(("dictionary", sub_matches)) => cmd::dictionary::run(ctx, sub_matches).await?,
|
||||
Some(("experiment", sub_matches)) => cmd::experiment::run(ctx, sub_matches).await?,
|
||||
Some(("migrate", sub_matches)) => cmd::migrate::run(ctx, sub_matches).await?,
|
||||
Some(("murmur", sub_matches)) => cmd::murmur::run(ctx, sub_matches).await?,
|
||||
Some(("new", sub_matches)) => cmd::new::run(ctx, sub_matches).await?,
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit bdefeef09803df45bdf6dae7f3ae289e58427e3a
|
||||
Subproject commit 228b8ca37ee79ab9afa45c40da415e4dcb029751
|
|
@ -19,7 +19,7 @@ pub const TIME_FORMAT: &[FormatItem] = format_description!("[hour]:[minute]:[sec
|
|||
|
||||
pub fn format_fields(w: &mut Writer<'_>, field: &Field, val: &dyn std::fmt::Debug) -> Result {
|
||||
if field.name() == "message" {
|
||||
write!(w, "{val:?}")
|
||||
write!(w, "{:?}", val)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
|
@ -70,7 +70,7 @@ where
|
|||
writer,
|
||||
"[{}] [{:>5}] ",
|
||||
time,
|
||||
color.bold().paint(format!("{level}"))
|
||||
color.bold().paint(format!("{}", level))
|
||||
)?;
|
||||
|
||||
ctx.field_format().format_fields(writer.by_ref(), event)?;
|
||||
|
|
1
lib/luajit2-sys
Submodule
1
lib/luajit2-sys
Submodule
|
@ -0,0 +1 @@
|
|||
Subproject commit 6d94a4dd2c296bf1f044ee4c70fb10dca4c1c241
|
|
@ -1,20 +0,0 @@
|
|||
[package]
|
||||
name = "luajit2-sys"
|
||||
version = "0.0.2"
|
||||
description = "LuaJIT-2.1 FFI Bindings"
|
||||
authors = ["Aaron Loucks <aloucks@cofront.net>"]
|
||||
edition = "2021"
|
||||
keywords = ["lua", "luajit", "script"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/aloucks/luajit2-sys"
|
||||
documentation = "https://docs.rs/luajit2-sys"
|
||||
links = "luajit"
|
||||
|
||||
[dependencies]
|
||||
libc = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
bindgen = { workspace = true }
|
||||
cc = { workspace = true }
|
||||
fs_extra = { workspace = true }
|
|
@ -1,201 +0,0 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
https://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
https://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -1,23 +0,0 @@
|
|||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
|
@ -1,217 +0,0 @@
|
|||
use cc::Build;
|
||||
use fs_extra::dir;
|
||||
use fs_extra::dir::CopyOptions;
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
const LIB_NAME: &str = "luajit";
|
||||
const LUAJIT_HEADERS: [&str; 4] = ["lua.h", "lualib.h", "lauxlib.h", "luajit.h"];
|
||||
const LUAJIT_SRC: [&str; 65] = [
|
||||
// LJCORE_O
|
||||
// The MSVC toolchain cannot compile this assembler file,
|
||||
// as it contains GNU-specific directives
|
||||
// "lj_vm.S",
|
||||
"lj_gc.c",
|
||||
"lj_err.c",
|
||||
"lj_char.c",
|
||||
"lj_bc.c",
|
||||
"lj_obj.c",
|
||||
"lj_buf.c",
|
||||
"lj_str.c",
|
||||
"lj_tab.c",
|
||||
"lj_func.c",
|
||||
"lj_udata.c",
|
||||
"lj_meta.c",
|
||||
"lj_debug.c",
|
||||
"lj_state.c",
|
||||
"lj_dispatch.c",
|
||||
"lj_vmevent.c",
|
||||
"lj_vmmath.c",
|
||||
"lj_strscan.c",
|
||||
"lj_strfmt.c",
|
||||
"lj_strfmt_num.c",
|
||||
"lj_api.c",
|
||||
"lj_profile.c",
|
||||
"lj_lex.c",
|
||||
"lj_parse.c",
|
||||
"lj_bcread.c",
|
||||
"lj_bcwrite.c",
|
||||
"lj_load.c",
|
||||
"lj_ir.c",
|
||||
"lj_opt_mem.c",
|
||||
"lj_opt_fold.c",
|
||||
"lj_opt_narrow.c",
|
||||
"lj_opt_dce.c",
|
||||
"lj_opt_loop.c",
|
||||
"lj_opt_split.c",
|
||||
"lj_opt_sink.c",
|
||||
"lj_mcode.c",
|
||||
"lj_snap.c",
|
||||
"lj_record.c",
|
||||
"lj_crecord.c",
|
||||
"lj_ffrecord.c",
|
||||
"lj_asm.c",
|
||||
"lj_trace.c",
|
||||
"lj_gdbjit.c",
|
||||
"lj_ctype.c",
|
||||
"lj_cdata.c",
|
||||
"lj_cconv.c",
|
||||
"lj_ccall.c",
|
||||
"lj_ccallback.c",
|
||||
"lj_carith.c",
|
||||
"lj_clib.c",
|
||||
"lj_cparse.c",
|
||||
"lj_lib.c",
|
||||
"lj_alloc.c",
|
||||
// LJLIB_O
|
||||
"lib_aux.c",
|
||||
"lib_base.c",
|
||||
"lib_math.c",
|
||||
"lib_bit.c",
|
||||
"lib_string.c",
|
||||
"lib_table.c",
|
||||
"lib_io.c",
|
||||
"lib_os.c",
|
||||
"lib_package.c",
|
||||
"lib_debug.c",
|
||||
"lib_jit.c",
|
||||
"lib_ffi.c",
|
||||
"lib_init.c",
|
||||
];
|
||||
|
||||
fn build_gcc(src_dir: &str) {
|
||||
let mut buildcmd = Command::new("make");
|
||||
if let Ok(flags) = env::var("CARGO_MAKEFLAGS") {
|
||||
buildcmd.env("MAKEFLAGS", flags);
|
||||
} else {
|
||||
buildcmd.arg("-j8");
|
||||
}
|
||||
buildcmd.current_dir(src_dir);
|
||||
buildcmd.stderr(Stdio::inherit());
|
||||
buildcmd.arg("--no-silent");
|
||||
|
||||
// We do need to cross-compile even here, so that `lj_vm.o` is created
|
||||
// for the correct architecture.
|
||||
if env::var("CARGO_CFG_WINDOWS").is_ok() {
|
||||
buildcmd.arg("TARGET_SYS=Windows");
|
||||
buildcmd.arg("CROSS=x86_64-w64-mingw32-");
|
||||
}
|
||||
|
||||
if cfg!(target_pointer_width = "32") {
|
||||
buildcmd.arg("HOST_CC='gcc -m32'");
|
||||
buildcmd.arg("-e");
|
||||
} else {
|
||||
buildcmd.arg("HOST_CC='gcc'");
|
||||
}
|
||||
|
||||
let mut child = buildcmd.spawn().expect("failed to run make");
|
||||
|
||||
child
|
||||
.wait()
|
||||
.map(|status| status.success())
|
||||
.expect("Failed to build LuaJIT");
|
||||
}
|
||||
|
||||
fn build_msvc(src_dir: &str, out_dir: &str) {
|
||||
let mut cc = Build::new();
|
||||
// cc can't handle many of the `clang-dl`-specific flags, so
|
||||
// we need to port them manually from a `make -n` run.
|
||||
cc.out_dir(out_dir)
|
||||
// `llvm-as` (which the clang-based toolchain for MSVC would use to compile `lj_vm.S`
|
||||
// assembler) doesn't support some of the GNU-specific directives.
|
||||
// However, the previous host-targeted compilation already created the
|
||||
// object, so we simply link that.
|
||||
.object(format!("{src_dir}/lj_vm.o"))
|
||||
.define("_FILE_OFFSET_BITS", "64")
|
||||
.define("_LARGEFILE_SOURCE", None)
|
||||
.define("LUA_MULTILIB", "\"lib\"")
|
||||
.define("LUAJIT_UNWIND_EXTERNAL", None)
|
||||
.flag("-fcolor-diagnostics")
|
||||
// Disable warnings
|
||||
.flag("/W0")
|
||||
.flag("/U _FORTIFY_SOURCE")
|
||||
// Link statically
|
||||
.flag("/MT")
|
||||
// Omit frame pointers
|
||||
.flag("/Oy");
|
||||
|
||||
for f in LUAJIT_SRC {
|
||||
cc.file(format!("{src_dir}/{f}"));
|
||||
}
|
||||
|
||||
cc.compile(LIB_NAME);
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let luajit_dir = format!("{}/luajit", env!("CARGO_MANIFEST_DIR"));
|
||||
let out_dir = env::var("OUT_DIR").unwrap();
|
||||
let src_dir = format!("{out_dir}/luajit/src");
|
||||
|
||||
dbg!(&luajit_dir);
|
||||
dbg!(&out_dir);
|
||||
dbg!(&src_dir);
|
||||
|
||||
let mut copy_options = CopyOptions::new();
|
||||
copy_options.overwrite = true;
|
||||
|
||||
dir::copy(&luajit_dir, &out_dir, ©_options).expect("Failed to copy LuaJIT source");
|
||||
|
||||
// The first run builds with and for the host architecture.
|
||||
// This also creates all the tools and generated sources that a compilation needs.
|
||||
build_gcc(&src_dir);
|
||||
|
||||
// Then, for cross-compilation, we can utilize those generated
|
||||
// sources to re-compile just the library.
|
||||
if env::var("CARGO_CFG_WINDOWS").is_ok() {
|
||||
build_msvc(&src_dir, &out_dir);
|
||||
println!("cargo:rustc-link-search={out_dir}");
|
||||
} else {
|
||||
println!("cargo:rustc-link-search=native={src_dir}");
|
||||
}
|
||||
|
||||
println!("cargo:lib-name={LIB_NAME}");
|
||||
println!("cargo:include={src_dir}");
|
||||
println!("cargo:rustc-link-lib=static={LIB_NAME}");
|
||||
|
||||
let mut bindings = bindgen::Builder::default();
|
||||
|
||||
for header in LUAJIT_HEADERS {
|
||||
println!("cargo:rerun-if-changed={luajit_dir}/src/{header}");
|
||||
bindings = bindings.header(format!("{luajit_dir}/src/{header}"));
|
||||
}
|
||||
|
||||
let bindings = bindings
|
||||
.allowlist_var("LUA.*")
|
||||
.allowlist_var("LUAJIT.*")
|
||||
.allowlist_type("lua_.*")
|
||||
.allowlist_type("luaL_.*")
|
||||
.allowlist_function("lua_.*")
|
||||
.allowlist_function("luaL_.*")
|
||||
.allowlist_function("luaJIT.*")
|
||||
.ctypes_prefix("libc")
|
||||
.impl_debug(true)
|
||||
.use_core()
|
||||
.detect_include_paths(true)
|
||||
.formatter(bindgen::Formatter::Rustfmt)
|
||||
.sort_semantically(true)
|
||||
.merge_extern_blocks(true)
|
||||
.parse_callbacks(Box::new(bindgen::CargoCallbacks::new()));
|
||||
|
||||
let bindings = if env::var("CARGO_CFG_WINDOWS").is_ok() {
|
||||
bindings
|
||||
.clang_arg("-I/xwin/sdk/include/ucrt")
|
||||
.clang_arg("-I/xwin/sdk/include/um")
|
||||
.clang_arg("-I/xwin/sdk/include/shared")
|
||||
.clang_arg("-I/xwin/crt/include")
|
||||
.generate()
|
||||
.expect("Failed to generate bindings")
|
||||
} else {
|
||||
bindings.generate().expect("Failed to generate bindings")
|
||||
};
|
||||
|
||||
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
|
||||
bindings
|
||||
.write_to_file(out_path.join("bindings.rs"))
|
||||
.expect("Failed to write bindings");
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
Subproject commit 70f4b15ee45a6137fe6b48b941faea79d72f7159
|
|
@ -1,167 +0,0 @@
|
|||
#![no_std]
|
||||
#![allow(non_snake_case)]
|
||||
#![allow(non_camel_case_types)]
|
||||
#![allow(clippy::deprecated_semver)]
|
||||
#![allow(clippy::missing_safety_doc)]
|
||||
|
||||
//! # LuaJIT 2.1
|
||||
//!
|
||||
//! <http://luajit.org>
|
||||
//!
|
||||
//! <http://www.lua.org/manual/5.1/manual.html>
|
||||
//!
|
||||
//! ## Performance considerations
|
||||
//!
|
||||
//! The _Not Yet Implemented_ guide documents which language features will be JIT compiled
|
||||
//! into native machine code.
|
||||
//!
|
||||
//! <http://wiki.luajit.org/NYI>
|
||||
|
||||
mod ffi {
|
||||
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
|
||||
}
|
||||
pub use ffi::*;
|
||||
|
||||
use core::ptr;
|
||||
|
||||
// These are defined as macros
|
||||
|
||||
/// <https://www.lua.org/manual/5.1/manual.html#lua_pop>
|
||||
#[inline]
|
||||
pub unsafe fn lua_pop(L: *mut lua_State, idx: libc::c_int) {
|
||||
lua_settop(L, -(idx) - 1)
|
||||
}
|
||||
|
||||
/// <https://www.lua.org/manual/5.1/manual.html#lua_newtable>
|
||||
#[inline]
|
||||
pub unsafe fn lua_newtable(L: *mut lua_State) {
|
||||
lua_createtable(L, 0, 0)
|
||||
}
|
||||
|
||||
/// <https://www.lua.org/manual/5.1/manual.html#lua_register>
|
||||
#[inline]
|
||||
pub unsafe fn lua_register(L: *mut lua_State, name: *const libc::c_char, f: lua_CFunction) {
|
||||
lua_pushcfunction(L, f);
|
||||
lua_setglobal(L, name);
|
||||
}
|
||||
|
||||
/// <https://www.lua.org/manual/5.1/manual.html#lua_pushcfunction>
|
||||
#[inline]
|
||||
pub unsafe fn lua_pushcfunction(L: *mut lua_State, f: lua_CFunction) {
|
||||
lua_pushcclosure(L, f, 0);
|
||||
}
|
||||
|
||||
/// <https://www.lua.org/manual/5.1/manual.html#lua_strlen>
|
||||
#[inline]
|
||||
pub unsafe fn lua_strlen(L: *mut lua_State, idx: libc::c_int) -> usize {
|
||||
lua_objlen(L, idx)
|
||||
}
|
||||
|
||||
/// <https://www.lua.org/manual/5.1/manual.html#lua_isfunction>
|
||||
#[inline]
|
||||
pub unsafe fn lua_isfunction(L: *mut lua_State, idx: libc::c_int) -> libc::c_int {
|
||||
(lua_type(L, idx) == LUA_TFUNCTION as i32) as i32
|
||||
}
|
||||
|
||||
/// <https://www.lua.org/manual/5.1/manual.html#lua_istable>
|
||||
#[inline]
|
||||
pub unsafe fn lua_istable(L: *mut lua_State, idx: libc::c_int) -> libc::c_int {
|
||||
(lua_type(L, idx) == LUA_TTABLE as i32) as i32
|
||||
}
|
||||
|
||||
/// <https://www.lua.org/manual/5.1/manual.html#lua_islightuserdata>
|
||||
#[inline]
|
||||
pub unsafe fn lua_islightuserdata(L: *mut lua_State, idx: libc::c_int) -> libc::c_int {
|
||||
(lua_type(L, idx) == LUA_TLIGHTUSERDATA as i32) as i32
|
||||
}
|
||||
|
||||
/// <https://www.lua.org/manual/5.1/manual.html#lua_isnil>
|
||||
#[inline]
|
||||
pub unsafe fn lua_isnil(L: *mut lua_State, idx: libc::c_int) -> libc::c_int {
|
||||
(lua_type(L, idx) == LUA_TNIL as i32) as i32
|
||||
}
|
||||
|
||||
/// <https://www.lua.org/manual/5.1/manual.html#lua_isboolean>
|
||||
#[inline]
|
||||
pub unsafe fn lua_isboolean(L: *mut lua_State, idx: libc::c_int) -> libc::c_int {
|
||||
(lua_type(L, idx) == LUA_TBOOLEAN as i32) as i32
|
||||
}
|
||||
|
||||
/// <https://www.lua.org/manual/5.1/manual.html#lua_isthread>
|
||||
#[inline]
|
||||
pub unsafe fn lua_isthread(L: *mut lua_State, idx: libc::c_int) -> libc::c_int {
|
||||
(lua_type(L, idx) == LUA_TTHREAD as i32) as i32
|
||||
}
|
||||
|
||||
/// <https://www.lua.org/manual/5.1/manual.html#lua_isnone>
|
||||
#[inline]
|
||||
pub unsafe fn lua_isnone(L: *mut lua_State, idx: libc::c_int) -> libc::c_int {
|
||||
(lua_type(L, idx) == LUA_TNONE) as i32
|
||||
}
|
||||
|
||||
/// <https://www.lua.org/manual/5.1/manual.html#lua_isnoneornil>
|
||||
#[inline]
|
||||
pub unsafe fn lua_isnoneornil(L: *mut lua_State, idx: libc::c_int) -> libc::c_int {
|
||||
(lua_type(L, idx) <= 0) as i32
|
||||
}
|
||||
|
||||
/// <https://www.lua.org/manual/5.1/manual.html#lua_pushliteral>
|
||||
#[inline]
|
||||
pub unsafe fn lua_pushliteral(L: *mut lua_State, s: &str) {
|
||||
lua_pushlstring(L, s.as_ptr() as _, s.len() as _);
|
||||
}
|
||||
|
||||
/// <https://www.lua.org/manual/5.1/manual.html#lua_setglobal>
|
||||
#[inline]
|
||||
pub unsafe fn lua_setglobal(L: *mut lua_State, k: *const libc::c_char) {
|
||||
lua_setfield(L, LUA_GLOBALSINDEX, k);
|
||||
}
|
||||
|
||||
/// <https://www.lua.org/manual/5.1/manual.html#lua_getglobal>
|
||||
#[inline]
|
||||
pub unsafe fn lua_getglobal(L: *mut lua_State, k: *const libc::c_char) {
|
||||
lua_getfield(L, LUA_GLOBALSINDEX, k)
|
||||
}
|
||||
|
||||
/// <https://www.lua.org/manual/5.1/manual.html#lua_tostring>
|
||||
#[inline]
|
||||
pub unsafe fn lua_tostring(L: *mut lua_State, idx: libc::c_int) -> *const libc::c_char {
|
||||
lua_tolstring(L, idx, ptr::null_mut())
|
||||
}
|
||||
|
||||
// Additional compatibility items that are defined as macros
|
||||
|
||||
/// `luaL_newstate()`
|
||||
#[inline]
|
||||
#[deprecated(since = "Lua 5.1", note = "replace with `luaL_newstate()`")]
|
||||
pub unsafe fn lua_open() -> *mut lua_State {
|
||||
luaL_newstate()
|
||||
}
|
||||
|
||||
/// `lua_pushvalue(L, LUA_REGISTRYINDEX)`
|
||||
#[inline]
|
||||
#[deprecated(
|
||||
since = "Lua 5.1",
|
||||
note = "replace with `lua_pushvalue(L, LUA_REGISTRYINDEX)`"
|
||||
)]
|
||||
pub unsafe fn lua_getregistry(L: *mut lua_State) {
|
||||
lua_pushvalue(L, LUA_REGISTRYINDEX)
|
||||
}
|
||||
|
||||
/// `lua_gc(L, LUA_GCCOUNT as _, 0)`
|
||||
#[inline]
|
||||
#[deprecated(
|
||||
since = "Lua 5.1",
|
||||
note = "replace with `lua_gc(L, LUA_GCCOUNT as _, 0)`"
|
||||
)]
|
||||
pub unsafe fn lua_getgccount(L: *mut lua_State) -> libc::c_int {
|
||||
lua_gc(L, LUA_GCCOUNT as _, 0)
|
||||
}
|
||||
|
||||
/// `lua_Reader`
|
||||
#[deprecated(since = "Lua 5.1", note = "replace with `lua_Reader`")]
|
||||
pub type lua_Chunkreader = lua_Reader;
|
||||
|
||||
/// `lua_Writer`
|
||||
#[deprecated(since = "Lua 5.1", note = "replace with `lua_Writer`")]
|
||||
pub type lua_Chunkwriter = lua_Writer;
|
|
@ -99,7 +99,7 @@ impl Api {
|
|||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub async fn mods_id(&self, id: u64) -> Result<Mod> {
|
||||
let url = BASE_URL_GAME.join(&format!("mods/{id}.json"))?;
|
||||
let url = BASE_URL_GAME.join(&format!("mods/{}.json", id))?;
|
||||
let req = self.client.get(url);
|
||||
self.send(req).await
|
||||
}
|
||||
|
|
|
@ -10,4 +10,4 @@ color-eyre = { workspace = true }
|
|||
tracing = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
bindgen = "0.72.0"
|
||||
bindgen = "0.71.0"
|
||||
|
|
|
@ -11,7 +11,7 @@ fn main() {
|
|||
} else {
|
||||
"oo2core_win64"
|
||||
};
|
||||
println!("cargo:rustc-link-lib=static={lib_name}");
|
||||
println!("cargo:rustc-link-lib=static={}", lib_name);
|
||||
} else {
|
||||
println!("cargo:rustc-link-lib=static=oo2corelinux64");
|
||||
println!("cargo:rustc-link-lib=stdc++");
|
||||
|
|
|
@ -10,16 +10,20 @@ byteorder = { workspace = true }
|
|||
color-eyre = { workspace = true }
|
||||
csv-async = { workspace = true }
|
||||
fastrand = { workspace = true }
|
||||
flate2 = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
futures-util = { workspace = true }
|
||||
glob = { workspace = true }
|
||||
luajit2-sys = { workspace = true }
|
||||
nanorand = { workspace = true }
|
||||
num-derive = { workspace = true }
|
||||
num-traits = { workspace = true }
|
||||
oodle = { workspace = true }
|
||||
path-slash = { workspace = true }
|
||||
pin-project-lite = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_sjson = { workspace = true }
|
||||
strum = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tokio-stream = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
|
|
|
@ -42,6 +42,26 @@ impl<T: FromBinary> FromBinary for Vec<T> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn flags_from_bits<T: bitflags::Flags>(bits: T::Bits) -> T
|
||||
where
|
||||
<T as bitflags::Flags>::Bits: std::fmt::Binary,
|
||||
{
|
||||
if let Some(flags) = T::from_bits(bits) {
|
||||
flags
|
||||
} else {
|
||||
let unknown = bits & !T::all().bits();
|
||||
|
||||
tracing::warn!(
|
||||
"Unknown bits found for '{}': known = {:0b}, unknown = {:0b}",
|
||||
std::any::type_name::<T>(),
|
||||
T::all().bits(),
|
||||
unknown
|
||||
);
|
||||
|
||||
T::from_bits_truncate(bits)
|
||||
}
|
||||
}
|
||||
|
||||
pub mod sync {
|
||||
use std::ffi::CStr;
|
||||
use std::io::{self, Read, Seek, SeekFrom, Write};
|
||||
|
@ -128,9 +148,11 @@ pub mod sync {
|
|||
ReadBytesExt::read_u8(self)
|
||||
}
|
||||
|
||||
make_read!(read_u16, read_u16_le, u16);
|
||||
make_read!(read_u32, read_u32_le, u32);
|
||||
make_read!(read_u64, read_u64_le, u64);
|
||||
|
||||
make_skip!(skip_u16, read_u16, u16);
|
||||
make_skip!(skip_u32, read_u32, u32);
|
||||
|
||||
// Implementation based on https://en.wikipedia.com/wiki/LEB128
|
||||
|
@ -247,7 +269,7 @@ pub mod sync {
|
|||
fn read_string_len(mut r: impl Read, len: usize) -> Result<String> {
|
||||
let mut buf = vec![0; len];
|
||||
r.read_exact(&mut buf)
|
||||
.wrap_err_with(|| format!("Failed to read {len} bytes"))?;
|
||||
.wrap_err_with(|| format!("Failed to read {} bytes", len))?;
|
||||
|
||||
let res = match CStr::from_bytes_until_nul(&buf) {
|
||||
Ok(s) => {
|
||||
|
@ -259,6 +281,6 @@ pub mod sync {
|
|||
|
||||
res.wrap_err("Invalid binary for UTF8 string")
|
||||
.with_section(|| format!("{}", String::from_utf8_lossy(&buf)).header("ASCI:"))
|
||||
.with_section(|| format!("{buf:x?}").header("Bytes:"))
|
||||
.with_section(|| format!("{:x?}", buf).header("Bytes:"))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -310,6 +310,9 @@ impl BundleFile {
|
|||
) -> Result<Self> {
|
||||
match file_type {
|
||||
BundleFileType::Lua => lua::compile(name, sjson).wrap_err("Failed to compile Lua file"),
|
||||
BundleFileType::Texture => texture::compile(name, sjson, root)
|
||||
.await
|
||||
.wrap_err("Failed to compile Texture file"),
|
||||
BundleFileType::Unknown(_) => {
|
||||
eyre::bail!("Unknown file type. Cannot compile from SJSON");
|
||||
}
|
||||
|
@ -417,6 +420,7 @@ impl BundleFile {
|
|||
let res = match file_type {
|
||||
BundleFileType::Lua => lua::decompile(ctx, data).await,
|
||||
BundleFileType::Package => package::decompile(ctx, name.clone(), data),
|
||||
BundleFileType::Texture => texture::decompile(ctx, name.clone(), variant).await,
|
||||
_ => {
|
||||
tracing::debug!("Can't decompile, unknown file type");
|
||||
Ok(vec![UserFile::with_name(data.to_vec(), name.clone())])
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::path::PathBuf;
|
|||
use std::process::Command;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::murmur::{Dictionary, HashGroup, IdString64, Murmur32, Murmur64};
|
||||
use crate::murmur::{Dictionary, HashGroup, IdString32, IdString64, Murmur32, Murmur64};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CmdLine {
|
||||
|
@ -87,17 +87,17 @@ impl Context {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn lookup_hash_short<M>(&self, hash: M, group: HashGroup) -> String
|
||||
pub fn lookup_hash_short<M>(&self, hash: M, group: HashGroup) -> IdString32
|
||||
where
|
||||
M: Into<Murmur32>,
|
||||
{
|
||||
let hash = hash.into();
|
||||
if let Some(s) = self.lookup.lookup_short(hash, group) {
|
||||
tracing::debug!(%hash, string = s, "Murmur32 lookup successful");
|
||||
s.to_owned()
|
||||
s.to_string().into()
|
||||
} else {
|
||||
tracing::debug!(%hash, "Murmur32 lookup failed");
|
||||
format!("{hash:08X}")
|
||||
hash.into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
pub mod lua;
|
||||
pub mod package;
|
||||
pub mod strings;
|
||||
pub mod texture;
|
||||
|
|
|
@ -5,7 +5,7 @@ use color_eyre::{Report, Result};
|
|||
|
||||
use crate::binary::sync::ReadExt;
|
||||
use crate::bundle::file::{BundleFileVariant, UserFile};
|
||||
use crate::murmur::HashGroup;
|
||||
use crate::murmur::{HashGroup, IdString32};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, serde::Serialize)]
|
||||
#[serde(untagged)]
|
||||
|
@ -26,7 +26,7 @@ impl Language {
|
|||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
pub struct Strings(HashMap<String, HashMap<Language, String>>);
|
||||
pub struct Strings(HashMap<IdString32, HashMap<Language, String>>);
|
||||
|
||||
#[inline(always)]
|
||||
fn read_string<R>(r: R) -> Result<String>
|
||||
|
@ -46,7 +46,7 @@ where
|
|||
impl Strings {
|
||||
#[tracing::instrument(skip_all, fields(languages = variants.len()))]
|
||||
pub fn from_variants(ctx: &crate::Context, variants: &[BundleFileVariant]) -> Result<Self> {
|
||||
let mut map: HashMap<String, HashMap<Language, String>> = HashMap::new();
|
||||
let mut map: HashMap<IdString32, HashMap<Language, String>> = HashMap::new();
|
||||
|
||||
for (i, variant) in variants.iter().enumerate() {
|
||||
let _span = tracing::trace_span!("variant {}", i);
|
||||
|
|
741
lib/sdk/src/filetype/texture.rs
Normal file
741
lib/sdk/src/filetype/texture.rs
Normal file
|
@ -0,0 +1,741 @@
|
|||
use std::io::{Cursor, Read, Seek, SeekFrom, Write as _};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use bitflags::bitflags;
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::{eyre, SectionExt};
|
||||
use color_eyre::{Help, Result};
|
||||
use flate2::read::ZlibDecoder;
|
||||
use oodle::{OodleLZ_CheckCRC, OodleLZ_FuzzSafe};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::fs;
|
||||
|
||||
use crate::binary::sync::{ReadExt, WriteExt};
|
||||
use crate::bundle::file::UserFile;
|
||||
use crate::murmur::{HashGroup, IdString32, IdString64};
|
||||
use crate::{binary, BundleFile, BundleFileType, BundleFileVariant};
|
||||
|
||||
mod dds;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
struct TextureDefinition {
|
||||
common: TextureDefinitionPlatform,
|
||||
// Stingray supports per-platform sections here, where you can create overrides with the same
|
||||
// values as in `common`. But since we only support PC, we don't need to implement
|
||||
// that.
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
struct TextureDefinitionPlatform {
|
||||
input: TextureDefinitionInput,
|
||||
output: TextureDefinitionOutput,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
struct TextureDefinitionInput {
|
||||
filename: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
struct TextureDefinitionOutput {
|
||||
category: String,
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
#[derive(Clone, Copy, Debug, Default)]
|
||||
struct TextureFlags: u32 {
|
||||
const STREAMABLE = 0b0000_0001;
|
||||
const UNKNOWN = 1 << 1;
|
||||
const SRGB = 1 << 8;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Default)]
|
||||
struct TextureHeaderMipInfo {
|
||||
offset: usize,
|
||||
size: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
struct TextureHeader {
|
||||
flags: TextureFlags,
|
||||
n_streamable_mipmaps: usize,
|
||||
width: usize,
|
||||
height: usize,
|
||||
mip_infos: [TextureHeaderMipInfo; 16],
|
||||
meta_size: usize,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for TextureHeader {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("TextureHeader")
|
||||
.field("flags", &self.flags)
|
||||
.field("n_streamable_mipmaps", &self.n_streamable_mipmaps)
|
||||
.field("width", &self.width)
|
||||
.field("height", &self.height)
|
||||
.field("mip_infos", &{
|
||||
let mut s = self
|
||||
.mip_infos
|
||||
.iter()
|
||||
.fold(String::from("["), |mut s, info| {
|
||||
s.push_str(&format!("{}/{}, ", info.offset, info.size));
|
||||
s
|
||||
});
|
||||
s.push(']');
|
||||
s
|
||||
})
|
||||
.field("meta_size", &self.meta_size)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl TextureHeader {
|
||||
#[tracing::instrument(skip(r))]
|
||||
fn from_binary(mut r: impl ReadExt) -> Result<Self> {
|
||||
let flags = r.read_u32().map(binary::flags_from_bits)?;
|
||||
let n_streamable_mipmaps = r.read_u32()? as usize;
|
||||
let width = r.read_u32()? as usize;
|
||||
let height = r.read_u32()? as usize;
|
||||
|
||||
let mut mip_infos = [TextureHeaderMipInfo::default(); 16];
|
||||
|
||||
for info in mip_infos.iter_mut() {
|
||||
info.offset = r.read_u32()? as usize;
|
||||
info.size = r.read_u32()? as usize;
|
||||
}
|
||||
|
||||
let meta_size = r.read_u32()? as usize;
|
||||
|
||||
Ok(Self {
|
||||
flags,
|
||||
n_streamable_mipmaps,
|
||||
width,
|
||||
height,
|
||||
mip_infos,
|
||||
meta_size,
|
||||
})
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(w))]
|
||||
fn to_binary(&self, mut w: impl WriteExt) -> Result<()> {
|
||||
eyre::ensure!(
|
||||
self.flags.is_empty() && self.n_streamable_mipmaps == 0,
|
||||
"Only textures are supported where `flags == 0` and `n_streamable_mipmaps == 0`."
|
||||
);
|
||||
|
||||
w.write_u32(self.flags.bits())?;
|
||||
w.write_u32(self.n_streamable_mipmaps as u32)?;
|
||||
w.write_u32(self.width as u32)?;
|
||||
w.write_u32(self.height as u32)?;
|
||||
|
||||
for info in self.mip_infos {
|
||||
w.write_u32(info.offset as u32)?;
|
||||
w.write_u32(info.size as u32)?;
|
||||
}
|
||||
|
||||
// TODO: For now we write `0` here, until the meta section is figured out
|
||||
w.write_u32(0)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct Texture {
|
||||
header: TextureHeader,
|
||||
data: Vec<u8>,
|
||||
stream: Option<Vec<u8>>,
|
||||
category: IdString32,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Texture {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut out = f.debug_struct("Texture");
|
||||
out.field("header", &self.header);
|
||||
|
||||
if self.data.len() <= 5 {
|
||||
out.field("data", &format!("{:x?}", &self.data));
|
||||
} else {
|
||||
out.field(
|
||||
"data",
|
||||
&format!("{:x?}.. ({} bytes)", &self.data[..5], &self.data.len()),
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(stream) = self.stream.as_ref() {
|
||||
if stream.len() <= 5 {
|
||||
out.field("stream", &format!("{:x?}", &stream));
|
||||
} else {
|
||||
out.field(
|
||||
"stream",
|
||||
&format!("{:x?}.. ({} bytes)", &stream[..5], &stream.len()),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
out.field("stream", &"None");
|
||||
}
|
||||
|
||||
out.field("category", &self.category).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl Texture {
|
||||
#[tracing::instrument(skip(data, chunks))]
|
||||
fn decompress_stream_data(mut data: impl Read, chunks: impl AsRef<[usize]>) -> Result<Vec<u8>> {
|
||||
const RAW_SIZE: usize = 0x10000;
|
||||
|
||||
let chunks = chunks.as_ref();
|
||||
|
||||
let max_size = chunks.iter().max().copied().unwrap_or(RAW_SIZE);
|
||||
let mut read_buf = vec![0; max_size];
|
||||
|
||||
let mut stream_raw = Vec::with_capacity(chunks.iter().sum());
|
||||
let mut last = 0;
|
||||
|
||||
for offset_next in chunks {
|
||||
let size = offset_next - last;
|
||||
|
||||
let span = tracing::info_span!(
|
||||
"stream chunk",
|
||||
num_chunks = chunks.len(),
|
||||
chunk_size_comp = size,
|
||||
offset = last
|
||||
);
|
||||
let _enter = span.enter();
|
||||
|
||||
let buf = &mut read_buf[0..size];
|
||||
data.read_exact(buf)
|
||||
.wrap_err("Failed to read chunk from stream file")?;
|
||||
|
||||
let raw = oodle::decompress(buf, RAW_SIZE, OodleLZ_FuzzSafe::No, OodleLZ_CheckCRC::No)
|
||||
.wrap_err("Failed to decompress stream chunk")?;
|
||||
eyre::ensure!(
|
||||
raw.len() == RAW_SIZE,
|
||||
"Invalid chunk length after decompression"
|
||||
);
|
||||
|
||||
stream_raw.extend_from_slice(&raw);
|
||||
|
||||
last = *offset_next;
|
||||
}
|
||||
Ok(stream_raw)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(data), fields(data_len = data.as_ref().len()))]
|
||||
fn reorder_stream_mipmap(
|
||||
data: impl AsRef<[u8]>,
|
||||
bits_per_block: usize,
|
||||
bytes_per_block: usize,
|
||||
block_size: usize,
|
||||
pitch: usize,
|
||||
) -> Result<Vec<u8>> {
|
||||
const CHUNK_SIZE: usize = 0x10000;
|
||||
let data = data.as_ref();
|
||||
|
||||
let mut out = Vec::with_capacity(data.len());
|
||||
let mut window = vec![0u8; pitch * 64];
|
||||
|
||||
let row_size = bits_per_block * block_size;
|
||||
tracing::Span::current().record("row_size", row_size);
|
||||
|
||||
eyre::ensure!(
|
||||
data.len() % CHUNK_SIZE == 0,
|
||||
"Stream data does not divide evenly into chunks"
|
||||
);
|
||||
|
||||
for (i, chunk) in data.chunks_exact(CHUNK_SIZE).enumerate() {
|
||||
let chunk_x = (i % bytes_per_block) * row_size;
|
||||
|
||||
let span = tracing::trace_span!("chunk", i, chunk_x = chunk_x);
|
||||
let _guard = span.enter();
|
||||
|
||||
if i > 0 && i % bytes_per_block == 0 {
|
||||
out.extend_from_slice(&window);
|
||||
}
|
||||
|
||||
for (j, row) in chunk.chunks_exact(row_size).enumerate() {
|
||||
let start = chunk_x + j * pitch;
|
||||
let end = start + row_size;
|
||||
tracing::trace!("{i}/{j} at {}:{}", start, end);
|
||||
window[start..end].copy_from_slice(row);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
#[tracing::instrument(
|
||||
"Texture::from_binary",
|
||||
skip(ctx, r, stream_r),
|
||||
fields(
|
||||
compression_type = tracing::field::Empty,
|
||||
compressed_size = tracing::field::Empty,
|
||||
uncompressed_size = tracing::field::Empty,
|
||||
)
|
||||
)]
|
||||
fn from_binary(
|
||||
ctx: &crate::Context,
|
||||
mut r: impl Read + Seek,
|
||||
mut stream_r: Option<impl Read>,
|
||||
) -> Result<Self> {
|
||||
let compression_type = r.read_u32()?;
|
||||
let compressed_size = r.read_u32()? as usize;
|
||||
let uncompressed_size = r.read_u32()? as usize;
|
||||
|
||||
{
|
||||
let span = tracing::Span::current();
|
||||
span.record("compression_type", compression_type);
|
||||
span.record("compressed_size", compressed_size);
|
||||
span.record("uncompressed_size", uncompressed_size);
|
||||
}
|
||||
|
||||
let mut comp_buf = vec![0; compressed_size];
|
||||
r.read_exact(&mut comp_buf)?;
|
||||
|
||||
let out_buf = match compression_type {
|
||||
// Uncompressed
|
||||
// This one never seems to contain the additional `TextureHeader` metadata,
|
||||
// so we return early in this branch.
|
||||
0 => {
|
||||
eyre::ensure!(
|
||||
compressed_size == 0 && uncompressed_size == 0,
|
||||
"Cannot handle texture with compression_type == 0, but buffer sizes > 0"
|
||||
);
|
||||
tracing::trace!("Found raw texture");
|
||||
|
||||
let pos = r.stream_position()?;
|
||||
let end = {
|
||||
r.seek(SeekFrom::End(0))?;
|
||||
let end = r.stream_position()?;
|
||||
r.seek(SeekFrom::Start(pos))?;
|
||||
end
|
||||
};
|
||||
|
||||
// Reads until the last u32.
|
||||
let mut data = vec![0u8; (end - pos - 4) as usize];
|
||||
r.read_exact(&mut data)?;
|
||||
|
||||
let category = r.read_u32().map(IdString32::from)?;
|
||||
|
||||
return Ok(Self {
|
||||
header: TextureHeader::default(),
|
||||
data,
|
||||
stream: None,
|
||||
category,
|
||||
});
|
||||
}
|
||||
1 => oodle::decompress(
|
||||
comp_buf,
|
||||
uncompressed_size,
|
||||
OodleLZ_FuzzSafe::No,
|
||||
OodleLZ_CheckCRC::No,
|
||||
)?,
|
||||
2 => {
|
||||
let mut decoder = ZlibDecoder::new(comp_buf.as_slice());
|
||||
let mut buf = Vec::with_capacity(uncompressed_size);
|
||||
|
||||
decoder.read_to_end(&mut buf)?;
|
||||
buf
|
||||
}
|
||||
_ => eyre::bail!(
|
||||
"Unknown compression type for texture '{}'",
|
||||
compression_type
|
||||
),
|
||||
};
|
||||
|
||||
eyre::ensure!(
|
||||
out_buf.len() == uncompressed_size,
|
||||
"Length of decompressed buffer did not match expected value. Expected {}, got {}",
|
||||
uncompressed_size,
|
||||
out_buf.len()
|
||||
);
|
||||
|
||||
// No idea what this number is supposed to mean.
|
||||
// Even the game engine just skips this one.
|
||||
r.skip_u32(0x43)?;
|
||||
|
||||
let header = TextureHeader::from_binary(&mut r)?;
|
||||
|
||||
eyre::ensure!(
|
||||
header.meta_size == 0 || stream_r.is_some(),
|
||||
"Compression chunks and stream file don't match up. meta_size = {}, has_stream = {}",
|
||||
header.meta_size,
|
||||
stream_r.is_some()
|
||||
);
|
||||
|
||||
let stream = if let Some(stream_r) = stream_r.as_mut() {
|
||||
// Number of compression chunks in the stream file
|
||||
let num_chunks = r.read_u32()? as usize;
|
||||
r.skip_u16(0)?;
|
||||
|
||||
{
|
||||
let num_chunks_1 = r.read_u16()? as usize;
|
||||
|
||||
eyre::ensure!(
|
||||
num_chunks == num_chunks_1,
|
||||
"Chunk numbers don't match. first = {}, second = {}",
|
||||
num_chunks,
|
||||
num_chunks_1
|
||||
);
|
||||
}
|
||||
|
||||
let mut chunks = Vec::with_capacity(num_chunks);
|
||||
|
||||
for _ in 0..num_chunks {
|
||||
chunks.push(r.read_u32()? as usize);
|
||||
}
|
||||
|
||||
let stream_raw = Self::decompress_stream_data(stream_r, chunks)
|
||||
.wrap_err("Failed to decompress stream data")?;
|
||||
|
||||
Some(stream_raw)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let category = ctx.lookup_hash_short(r.read_u32()?, HashGroup::TextureCategory);
|
||||
|
||||
Ok(Self {
|
||||
category,
|
||||
header,
|
||||
data: out_buf,
|
||||
stream,
|
||||
})
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(w))]
|
||||
fn to_binary(&self, mut w: impl WriteExt) -> Result<()> {
|
||||
let compression_type = 1;
|
||||
w.write_u32(compression_type)?;
|
||||
|
||||
let comp_buf = oodle::compress(&self.data).wrap_err("Failed to compress DDS data")?;
|
||||
|
||||
w.write_u32(comp_buf.len() as u32)?;
|
||||
w.write_u32(self.data.len() as u32)?;
|
||||
w.write_all(&comp_buf)?;
|
||||
|
||||
// Unknown field, which the engine seems to ignore.
|
||||
// All game files have the same value here, so we just mirror that.
|
||||
w.write_u32(0x43)?;
|
||||
|
||||
self.header.to_binary(&mut w)?;
|
||||
|
||||
w.write_u32(self.category.to_murmur32().into())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument]
|
||||
fn to_sjson(&self, filename: String) -> Result<String> {
|
||||
let texture = TextureDefinition {
|
||||
common: TextureDefinitionPlatform {
|
||||
input: TextureDefinitionInput { filename },
|
||||
output: TextureDefinitionOutput {
|
||||
category: self.category.display().to_string(),
|
||||
},
|
||||
},
|
||||
};
|
||||
serde_sjson::to_string(&texture).wrap_err("Failed to serialize texture definition")
|
||||
}
|
||||
|
||||
#[tracing::instrument(fields(
|
||||
dds_header = tracing::field::Empty,
|
||||
dx10_header = tracing::field::Empty,
|
||||
image_format = tracing::field::Empty,
|
||||
))]
|
||||
fn create_dds_user_file(&self, name: String) -> Result<UserFile> {
|
||||
let mut data = Cursor::new(&self.data);
|
||||
let mut dds_header =
|
||||
dds::DDSHeader::from_binary(&mut data).wrap_err("Failed to read DDS header")?;
|
||||
|
||||
{
|
||||
let span = tracing::Span::current();
|
||||
span.record("dds_header", format!("{:?}", dds_header));
|
||||
}
|
||||
|
||||
if !dds_header.pixel_format.flags.contains(dds::DDPF::FOURCC) {
|
||||
tracing::debug!("Found DDS without FourCC. Dumping raw data");
|
||||
return Ok(UserFile::with_name(self.data.clone(), name));
|
||||
}
|
||||
|
||||
// eyre::ensure!(
|
||||
// dds_header.pixel_format.four_cc == dds::FourCC::DX10,
|
||||
// "Only DX10 textures are currently supported. FourCC == {}",
|
||||
// dds_header.pixel_format.four_cc,
|
||||
// );
|
||||
|
||||
let dx10_header =
|
||||
dds::Dx10Header::from_binary(&mut data).wrap_err("Failed to read DX10 header")?;
|
||||
|
||||
{
|
||||
let span = tracing::Span::current();
|
||||
span.record("dx10_header", format!("{:?}", dx10_header));
|
||||
}
|
||||
|
||||
// match dx10_header.dxgi_format {
|
||||
// DXGIFormat::BC1_UNORM
|
||||
// | DXGIFormat::BC3_UNORM
|
||||
// | DXGIFormat::BC4_UNORM
|
||||
// | DXGIFormat::BC5_UNORM
|
||||
// | DXGIFormat::BC6H_UF16
|
||||
// | DXGIFormat::BC7_UNORM => {}
|
||||
// _ => {
|
||||
// eyre::bail!(
|
||||
// "Unsupported DXGI format: {} (0x{:0X})",
|
||||
// dx10_header.dxgi_format,
|
||||
// dx10_header.dxgi_format.to_u32().unwrap_or_default()
|
||||
// );
|
||||
// }
|
||||
// }
|
||||
|
||||
let stingray_image_format = dds::stripped_format_from_header(&dds_header, &dx10_header)?;
|
||||
{
|
||||
let span = tracing::Span::current();
|
||||
span.record("image_format", format!("{:?}", stingray_image_format));
|
||||
}
|
||||
|
||||
// eyre::ensure!(
|
||||
// stingray_image_format.image_type == ImageType::Image2D,
|
||||
// "Unsupported image type: {}",
|
||||
// stingray_image_format.image_type,
|
||||
// );
|
||||
|
||||
let block_size = 4 * dds_header.pitch_or_linear_size / dds_header.width;
|
||||
let bits_per_block: usize = match block_size {
|
||||
8 => 128,
|
||||
16 => 64,
|
||||
block_size => eyre::bail!("Unsupported block size {}", block_size),
|
||||
};
|
||||
|
||||
let pitch = self.header.width / 4 * block_size;
|
||||
let bytes_per_block = self.header.width / bits_per_block / 4;
|
||||
|
||||
tracing::debug!(
|
||||
"block_size = {} | pitch = {} | bits_per_block = {} | bytes_per_block = {}",
|
||||
block_size,
|
||||
pitch,
|
||||
bits_per_block,
|
||||
bytes_per_block
|
||||
);
|
||||
|
||||
let mut out_data = Cursor::new(Vec::with_capacity(self.data.len()));
|
||||
|
||||
// Currently, we only extract the largest mipmap,
|
||||
// so we need to set the dimensions accordingly, and remove the
|
||||
// flag.
|
||||
dds_header.width = self.header.width;
|
||||
dds_header.height = self.header.height;
|
||||
dds_header.mipmap_count = 0;
|
||||
dds_header.flags &= !dds::DDSD::MIPMAPCOUNT;
|
||||
|
||||
dds_header
|
||||
.to_binary(&mut out_data)
|
||||
.wrap_err("Failed to write DDS header")?;
|
||||
|
||||
dx10_header
|
||||
.to_binary(&mut out_data)
|
||||
.wrap_err("Failed to write DX10 header")?;
|
||||
|
||||
// If there is stream data, we build the mipmap data from it.
|
||||
// If not, we take whatever is left in the bundle file.
|
||||
if let Some(stream) = &self.stream {
|
||||
let data = Self::reorder_stream_mipmap(
|
||||
stream,
|
||||
bits_per_block,
|
||||
bytes_per_block,
|
||||
block_size,
|
||||
pitch,
|
||||
)
|
||||
.wrap_err("Failed to reorder stream chunks")?;
|
||||
|
||||
out_data
|
||||
.write_all(&data)
|
||||
.wrap_err("Failed to write streamed mipmap data")?;
|
||||
} else {
|
||||
let (_, remaining) = data.split();
|
||||
out_data
|
||||
.write_all(remaining)
|
||||
.wrap_err("Failed to write texture data")?;
|
||||
};
|
||||
|
||||
Ok(UserFile::with_name(out_data.into_inner(), name))
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
fn to_user_files(&self, name: String) -> Result<Vec<UserFile>> {
|
||||
let mut files = Vec::with_capacity(2);
|
||||
|
||||
{
|
||||
let data = self.to_sjson(name.clone())?.as_bytes().to_vec();
|
||||
let name = PathBuf::from(&name)
|
||||
.with_extension("texture")
|
||||
.display()
|
||||
.to_string();
|
||||
files.push(UserFile::with_name(data, name));
|
||||
}
|
||||
|
||||
// For debugging purposes, also extract the raw files
|
||||
if cfg!(debug_assertions) {
|
||||
if let Some(stream) = &self.stream {
|
||||
let stream_name = PathBuf::from(&name).with_extension("stream");
|
||||
files.push(UserFile::with_name(
|
||||
stream.clone(),
|
||||
stream_name.display().to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let name = PathBuf::from(&name)
|
||||
.with_extension("raw.dds")
|
||||
.display()
|
||||
.to_string();
|
||||
files.push(UserFile::with_name(self.data.clone(), name));
|
||||
}
|
||||
|
||||
match self
|
||||
.create_dds_user_file(name)
|
||||
.wrap_err("Failed to create DDS file")
|
||||
{
|
||||
Ok(dds) => files.push(dds),
|
||||
Err(err) => {
|
||||
if cfg!(debug_assertions) {
|
||||
tracing::error!(
|
||||
"{:?}",
|
||||
err.with_section(|| {
|
||||
"Running in debug mode, continuing to produce raw files".header("Note:")
|
||||
})
|
||||
);
|
||||
} else {
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(files)
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(ctx, data, stream_data), fields(data_len = data.as_ref().len()))]
|
||||
pub(crate) async fn decompile_data(
|
||||
ctx: &crate::Context,
|
||||
name: String,
|
||||
data: impl AsRef<[u8]>,
|
||||
stream_data: Option<impl AsRef<[u8]>>,
|
||||
) -> Result<Vec<UserFile>> {
|
||||
let mut r = Cursor::new(data);
|
||||
let mut stream_r = stream_data.map(Cursor::new);
|
||||
|
||||
let texture = Texture::from_binary(ctx, &mut r, stream_r.as_mut())?;
|
||||
texture
|
||||
.to_user_files(name)
|
||||
.wrap_err("Failed to build user files")
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(ctx))]
|
||||
pub(crate) async fn decompile(
|
||||
ctx: &crate::Context,
|
||||
name: String,
|
||||
variant: &BundleFileVariant,
|
||||
) -> Result<Vec<UserFile>> {
|
||||
let data_file = variant.data_file_name().map(|name| match &ctx.game_dir {
|
||||
Some(dir) => dir.join("bundle").join(name),
|
||||
None => PathBuf::from("bundle").join(name),
|
||||
});
|
||||
|
||||
if variant.external() {
|
||||
let Some(path) = data_file else {
|
||||
eyre::bail!("File is marked external but has no data file name");
|
||||
};
|
||||
|
||||
tracing::debug!(
|
||||
"Decompiling texture from external file '{}'",
|
||||
path.display()
|
||||
);
|
||||
|
||||
let data = fs::read(&path)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to read data file '{}'", path.display()))
|
||||
.with_suggestion(|| {
|
||||
"Provide a game directory in the config file or make sure the `data` directory is next to the provided bundle."
|
||||
})?;
|
||||
|
||||
decompile_data(ctx, name, data, None::<&[u8]>).await
|
||||
} else {
|
||||
tracing::debug!("Decompiling texture from bundle data");
|
||||
|
||||
let stream_data = match data_file {
|
||||
Some(path) => {
|
||||
let data = fs::read(&path)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to read data file '{}'", path.display()))
|
||||
.with_suggestion(|| {
|
||||
"Provide a game directory in the config file or make sure the `data` directory is next to the provided bundle."
|
||||
})?;
|
||||
Some(data)
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
decompile_data(ctx, name, variant.data(), stream_data).await
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(sjson, name), fields(sjson_len = sjson.as_ref().len(), name = %name.display()))]
|
||||
pub async fn compile(
|
||||
name: IdString64,
|
||||
sjson: impl AsRef<str>,
|
||||
root: impl AsRef<Path> + std::fmt::Debug,
|
||||
) -> Result<BundleFile> {
|
||||
let definitions: TextureDefinition = serde_sjson::from_str(sjson.as_ref())
|
||||
.wrap_err("Failed to deserialize SJSON")
|
||||
.with_section(|| sjson.as_ref().to_string().header("SJSON:"))?;
|
||||
|
||||
let dds = {
|
||||
let path = root.as_ref().join(definitions.common.input.filename);
|
||||
fs::read(&path)
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to read DDS file '{}'", path.display()))?
|
||||
};
|
||||
|
||||
let (width, height) = {
|
||||
let mut r = Cursor::new(&dds);
|
||||
|
||||
let magic = r.read_u32()?;
|
||||
eyre::ensure!(
|
||||
magic == 0x20534444,
|
||||
"Invalid magic bytes for DDS. Expected 0x20534444, got {:08x}",
|
||||
magic
|
||||
);
|
||||
|
||||
r.seek(SeekFrom::Current(5))?;
|
||||
|
||||
let width = r.read_u32()? as usize;
|
||||
let height = r.read_u32()? as usize;
|
||||
|
||||
(width, height)
|
||||
};
|
||||
|
||||
let mut w = Cursor::new(Vec::new());
|
||||
|
||||
let texture = Texture {
|
||||
header: TextureHeader {
|
||||
// As long as we can't handle mipmaps, these two need be `0`
|
||||
flags: TextureFlags::empty(),
|
||||
n_streamable_mipmaps: 0,
|
||||
width,
|
||||
height,
|
||||
mip_infos: [TextureHeaderMipInfo::default(); 16],
|
||||
meta_size: 0,
|
||||
},
|
||||
data: dds,
|
||||
stream: None,
|
||||
category: IdString32::String(definitions.common.output.category),
|
||||
};
|
||||
texture.to_binary(&mut w)?;
|
||||
|
||||
let mut variant = BundleFileVariant::new();
|
||||
variant.set_data(w.into_inner());
|
||||
|
||||
let mut file = BundleFile::new(name, BundleFileType::Texture);
|
||||
file.add_variant(variant);
|
||||
|
||||
Ok(file)
|
||||
}
|
529
lib/sdk/src/filetype/texture/dds.rs
Normal file
529
lib/sdk/src/filetype/texture/dds.rs
Normal file
|
@ -0,0 +1,529 @@
|
|||
use std::io::SeekFrom;
|
||||
|
||||
use bitflags::bitflags;
|
||||
use color_eyre::eyre::Context as _;
|
||||
use color_eyre::eyre::{self, OptionExt as _};
|
||||
use color_eyre::Result;
|
||||
use num_derive::{FromPrimitive, ToPrimitive};
|
||||
use num_traits::{FromPrimitive as _, ToPrimitive as _};
|
||||
|
||||
use crate::binary;
|
||||
use crate::binary::sync::{ReadExt, WriteExt};
|
||||
|
||||
const MAGIC_DDS: u32 = 0x20534444;
|
||||
|
||||
bitflags! {
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct DDSD: u32 {
|
||||
/// Required
|
||||
const CAPS = 0x1;
|
||||
/// Required
|
||||
const HEIGHT = 0x2;
|
||||
/// Required
|
||||
const WIDTH = 0x4;
|
||||
/// Pitch for an uncompressed texture
|
||||
const PITCH = 0x8;
|
||||
/// Required
|
||||
const PIXELFORMAT = 0x1000;
|
||||
/// Required in a mipmapped texture
|
||||
const MIPMAPCOUNT = 0x20000;
|
||||
/// Pitch for a compressed texture
|
||||
const LINEARSIZE = 0x80000;
|
||||
/// Required in a depth texture
|
||||
const DEPTH = 0x800000;
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct DDSCAPS: u32 {
|
||||
const COMPLEX = 0x8;
|
||||
const MIPMAP = 0x400000;
|
||||
const TEXTURE = 0x1000;
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct DDSCAPS2: u32 {
|
||||
const CUBEMAP = 0x200;
|
||||
const CUBEMAP_POSITIVEX = 0x400;
|
||||
const CUBEMAP_NEGATIVEX = 0x800;
|
||||
const CUBEMAP_POSITIVEY = 0x1000;
|
||||
const CUBEMAP_NEGATIVEY = 0x2000;
|
||||
const CUBEMAP_POSITIVEZ = 0x4000;
|
||||
const CUBEMAP_NEGATIVEZ = 0x8000;
|
||||
const VOLUME = 0x200000;
|
||||
|
||||
const CUBEMAP_ALLFACES = Self::CUBEMAP_POSITIVEX.bits()
|
||||
| Self::CUBEMAP_NEGATIVEX.bits()
|
||||
| Self::CUBEMAP_POSITIVEY.bits()
|
||||
| Self::CUBEMAP_NEGATIVEY.bits()
|
||||
| Self::CUBEMAP_POSITIVEZ.bits()
|
||||
| Self::CUBEMAP_NEGATIVEZ.bits();
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct DDPF: u32 {
|
||||
const ALPHAPIXELS = 0x1;
|
||||
const ALPHA = 0x2;
|
||||
const FOURCC = 0x4;
|
||||
const RGB = 0x40;
|
||||
const YUV = 0x200;
|
||||
const LUMINANCE = 0x20000;
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct DdsResourceMiscFlags: u32 {
|
||||
const TEXTURECUBE = 0x4;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, FromPrimitive, ToPrimitive)]
|
||||
#[repr(u32)]
|
||||
pub enum D3D10ResourceDimension {
|
||||
Unknown = 0,
|
||||
Buffer = 1,
|
||||
Texture1D = 2,
|
||||
Texture2D = 3,
|
||||
Texture3D = 4,
|
||||
}
|
||||
|
||||
#[allow(clippy::upper_case_acronyms)]
|
||||
#[allow(non_camel_case_types)]
|
||||
#[derive(Clone, Copy, Debug, strum::Display, FromPrimitive, ToPrimitive)]
|
||||
#[repr(u32)]
|
||||
pub enum DXGIFormat {
|
||||
UNKNOWN = 0,
|
||||
R32G32B32A32_TYPELESS = 1,
|
||||
R32G32B32A32_FLOAT = 2,
|
||||
R32G32B32A32_UINT = 3,
|
||||
R32G32B32A32_SINT = 4,
|
||||
R32G32B32_TYPELESS = 5,
|
||||
R32G32B32_FLOAT = 6,
|
||||
R32G32B32_UINT = 7,
|
||||
R32G32B32_SINT = 8,
|
||||
R16G16B16A16_TYPELESS = 9,
|
||||
R16G16B16A16_FLOAT = 10,
|
||||
R16G16B16A16_UNORM = 11,
|
||||
R16G16B16A16_UINT = 12,
|
||||
R16G16B16A16_SNORM = 13,
|
||||
R16G16B16A16_SINT = 14,
|
||||
R32G32_TYPELESS = 15,
|
||||
R32G32_FLOAT = 16,
|
||||
R32G32_UINT = 17,
|
||||
R32G32_SINT = 18,
|
||||
R32G8X24_TYPELESS = 19,
|
||||
D32_FLOAT_S8X24_UINT = 20,
|
||||
R32_FLOAT_X8X24_TYPELESS = 21,
|
||||
X32_TYPELESS_G8X24_UINT = 22,
|
||||
R10G10B10A2_TYPELESS = 23,
|
||||
R10G10B10A2_UNORM = 24,
|
||||
R10G10B10A2_UINT = 25,
|
||||
R11G11B10_FLOAT = 26,
|
||||
R8G8B8A8_TYPELESS = 27,
|
||||
R8G8B8A8_UNORM = 28,
|
||||
R8G8B8A8_UNORM_SRGB = 29,
|
||||
R8G8B8A8_UINT = 30,
|
||||
R8G8B8A8_SNORM = 31,
|
||||
R8G8B8A8_SINT = 32,
|
||||
R16G16_TYPELESS = 33,
|
||||
R16G16_FLOAT = 34,
|
||||
R16G16_UNORM = 35,
|
||||
R16G16_UINT = 36,
|
||||
R16G16_SNORM = 37,
|
||||
R16G16_SINT = 38,
|
||||
R32_TYPELESS = 39,
|
||||
D32_FLOAT = 40,
|
||||
R32_FLOAT = 41,
|
||||
R32_UINT = 42,
|
||||
R32_SINT = 43,
|
||||
R24G8_TYPELESS = 44,
|
||||
D24_UNORM_S8_UINT = 45,
|
||||
R24_UNORM_X8_TYPELESS = 46,
|
||||
X24_TYPELESS_G8_UINT = 47,
|
||||
R8G8_TYPELESS = 48,
|
||||
R8G8_UNORM = 49,
|
||||
R8G8_UINT = 50,
|
||||
R8G8_SNORM = 51,
|
||||
R8G8_SINT = 52,
|
||||
R16_TYPELESS = 53,
|
||||
R16_FLOAT = 54,
|
||||
D16_UNORM = 55,
|
||||
R16_UNORM = 56,
|
||||
R16_UINT = 57,
|
||||
R16_SNORM = 58,
|
||||
R16_SINT = 59,
|
||||
R8_TYPELESS = 60,
|
||||
R8_UNORM = 61,
|
||||
R8_UINT = 62,
|
||||
R8_SNORM = 63,
|
||||
R8_SINT = 64,
|
||||
A8_UNORM = 65,
|
||||
R1_UNORM = 66,
|
||||
R9G9B9E5_SHAREDEXP = 67,
|
||||
R8G8_B8G8_UNORM = 68,
|
||||
G8R8_G8B8_UNORM = 69,
|
||||
BC1_TYPELESS = 70,
|
||||
BC1_UNORM = 71,
|
||||
BC1_UNORM_SRGB = 72,
|
||||
BC2_TYPELESS = 73,
|
||||
BC2_UNORM = 74,
|
||||
BC2_UNORM_SRGB = 75,
|
||||
BC3_TYPELESS = 76,
|
||||
BC3_UNORM = 77,
|
||||
BC3_UNORM_SRGB = 78,
|
||||
BC4_TYPELESS = 79,
|
||||
BC4_UNORM = 80,
|
||||
BC4_SNORM = 81,
|
||||
BC5_TYPELESS = 82,
|
||||
BC5_UNORM = 83,
|
||||
BC5_SNORM = 84,
|
||||
B5G6R5_UNORM = 85,
|
||||
B5G5R5A1_UNORM = 86,
|
||||
B8G8R8A8_UNORM = 87,
|
||||
B8G8R8X8_UNORM = 88,
|
||||
R10G10B10_XR_BIAS_A2_UNORM = 89,
|
||||
B8G8R8A8_TYPELESS = 90,
|
||||
B8G8R8A8_UNORM_SRGB = 91,
|
||||
B8G8R8X8_TYPELESS = 92,
|
||||
B8G8R8X8_UNORM_SRGB = 93,
|
||||
BC6H_TYPELESS = 94,
|
||||
BC6H_UF16 = 95,
|
||||
BC6H_SF16 = 96,
|
||||
BC7_TYPELESS = 97,
|
||||
BC7_UNORM = 98,
|
||||
BC7_UNORM_SRGB = 99,
|
||||
AYUV = 100,
|
||||
Y410 = 101,
|
||||
Y416 = 102,
|
||||
NV12 = 103,
|
||||
P010 = 104,
|
||||
P016 = 105,
|
||||
OPAQUE = 106,
|
||||
YUY2 = 107,
|
||||
Y210 = 108,
|
||||
Y216 = 109,
|
||||
NV11 = 110,
|
||||
AI44 = 111,
|
||||
IA44 = 112,
|
||||
P8 = 113,
|
||||
A8P8 = 114,
|
||||
B4G4R4A4_UNORM = 115,
|
||||
P208 = 130,
|
||||
V208 = 131,
|
||||
V408 = 132,
|
||||
SAMPLER_FEEDBACK_MIN_MIP_OPAQUE,
|
||||
SAMPLER_FEEDBACK_MIP_REGION_USED_OPAQUE,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct Dx10Header {
|
||||
/// Resource data formats, including fully-typed and typeless formats.
|
||||
/// See https://learn.microsoft.com/en-us/windows/win32/api/dxgiformat/ne-dxgiformat-dxgi_format
|
||||
pub dxgi_format: DXGIFormat,
|
||||
pub resource_dimension: D3D10ResourceDimension,
|
||||
pub misc_flag: DdsResourceMiscFlags,
|
||||
pub array_size: usize,
|
||||
pub misc_flags2: u32,
|
||||
}
|
||||
|
||||
impl Dx10Header {
|
||||
#[tracing::instrument("Dx10Header::from_binary", skip(r))]
|
||||
pub fn from_binary(mut r: impl ReadExt) -> Result<Self> {
|
||||
let dxgi_format = r
|
||||
.read_u32()
|
||||
.map(|val| DXGIFormat::from_u32(val).unwrap_or(DXGIFormat::UNKNOWN))?;
|
||||
let resource_dimension = r.read_u32().map(|val| {
|
||||
D3D10ResourceDimension::from_u32(val).unwrap_or(D3D10ResourceDimension::Unknown)
|
||||
})?;
|
||||
let misc_flag = r.read_u32().map(binary::flags_from_bits)?;
|
||||
let array_size = r.read_u32()? as usize;
|
||||
let misc_flags2 = r.read_u32()?;
|
||||
|
||||
Ok(Self {
|
||||
dxgi_format,
|
||||
resource_dimension,
|
||||
misc_flag,
|
||||
array_size,
|
||||
misc_flags2,
|
||||
})
|
||||
}
|
||||
|
||||
#[tracing::instrument("Dx10Header::to_binary", skip(w))]
|
||||
pub fn to_binary(&self, mut w: impl WriteExt) -> Result<()> {
|
||||
w.write_u32(
|
||||
self.dxgi_format
|
||||
.to_u32()
|
||||
.ok_or_eyre("DXGIFormat should fit in a u32")?,
|
||||
)?;
|
||||
w.write_u32(
|
||||
self.resource_dimension
|
||||
.to_u32()
|
||||
.ok_or_eyre("DXGIFormat should fit in a u32")?,
|
||||
)?;
|
||||
w.write_u32(self.misc_flag.bits())?;
|
||||
w.write_u32(self.array_size as u32)?;
|
||||
w.write_u32(self.misc_flags2)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(non_camel_case_types)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, strum::Display, FromPrimitive, ToPrimitive)]
|
||||
#[repr(u32)]
|
||||
pub enum FourCC {
|
||||
Empty = u32::MAX,
|
||||
DXT1 = 0x31545844,
|
||||
DXT2 = 0x33545844,
|
||||
DXT5 = 0x35545844,
|
||||
AXI1 = 0x31495441,
|
||||
AXI2 = 0x32495441,
|
||||
DX10 = 0x30315844,
|
||||
D3D_A16B16G16R16 = 0x24,
|
||||
D3D_R16F = 0x6F,
|
||||
D3D_G16R16F = 0x70,
|
||||
D3D_A16B16G16R16F = 0x71,
|
||||
D3D_R32F = 0x72,
|
||||
D3D_G32R32F = 0x73,
|
||||
D3D_A32B32G32R32F = 0x74,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct DDSPixelFormat {
|
||||
pub flags: DDPF,
|
||||
pub four_cc: FourCC,
|
||||
pub rgb_bit_count: u32,
|
||||
pub r_bit_mask: u32,
|
||||
pub g_bit_mask: u32,
|
||||
pub b_bit_mask: u32,
|
||||
pub a_bit_mask: u32,
|
||||
}
|
||||
|
||||
impl DDSPixelFormat {
|
||||
#[tracing::instrument("DDSPixelFormat::from_binary", skip(r))]
|
||||
pub fn from_binary(mut r: impl ReadExt) -> Result<Self> {
|
||||
let size = r.read_u32()? as usize;
|
||||
eyre::ensure!(
|
||||
size == 32,
|
||||
"Invalid structure size. Got 0X{:0X}, expected 0x20",
|
||||
size
|
||||
);
|
||||
|
||||
let flags: DDPF = r.read_u32().map(binary::flags_from_bits)?;
|
||||
|
||||
let four_cc = if flags.contains(DDPF::FOURCC) {
|
||||
r.read_u32().and_then(|bytes| {
|
||||
FourCC::from_u32(bytes).ok_or_eyre(format!("Unknown FourCC value: {:08X}", bytes))
|
||||
})?
|
||||
} else {
|
||||
r.skip_u32(0)?;
|
||||
FourCC::Empty
|
||||
};
|
||||
|
||||
let rgb_bit_count = r.read_u32()?;
|
||||
let r_bit_mask = r.read_u32()?;
|
||||
let g_bit_mask = r.read_u32()?;
|
||||
let b_bit_mask = r.read_u32()?;
|
||||
let a_bit_mask = r.read_u32()?;
|
||||
|
||||
Ok(Self {
|
||||
flags,
|
||||
four_cc,
|
||||
rgb_bit_count,
|
||||
r_bit_mask,
|
||||
g_bit_mask,
|
||||
b_bit_mask,
|
||||
a_bit_mask,
|
||||
})
|
||||
}
|
||||
|
||||
#[tracing::instrument("DDSPixelFormat::to_binary", skip(w))]
|
||||
pub fn to_binary(&self, mut w: impl WriteExt) -> Result<()> {
|
||||
// Structure size
|
||||
w.write_u32(32)?;
|
||||
|
||||
w.write_u32(self.flags.bits())?;
|
||||
w.write_u32(self.four_cc.to_u32().unwrap_or_default())?;
|
||||
w.write_u32(self.rgb_bit_count)?;
|
||||
w.write_u32(self.r_bit_mask)?;
|
||||
w.write_u32(self.g_bit_mask)?;
|
||||
w.write_u32(self.b_bit_mask)?;
|
||||
w.write_u32(self.a_bit_mask)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct DDSHeader {
|
||||
/// Flags to indicate which members contain valid data.
|
||||
pub flags: DDSD,
|
||||
pub height: usize,
|
||||
pub width: usize,
|
||||
pub pitch_or_linear_size: usize,
|
||||
pub depth: usize,
|
||||
pub mipmap_count: usize,
|
||||
pub pixel_format: DDSPixelFormat,
|
||||
pub caps: DDSCAPS,
|
||||
pub caps_2: DDSCAPS2,
|
||||
}
|
||||
|
||||
impl DDSHeader {
|
||||
#[tracing::instrument("DDSHeader::from_binary", skip(r))]
|
||||
pub fn from_binary(mut r: impl ReadExt) -> Result<Self> {
|
||||
r.skip_u32(MAGIC_DDS).wrap_err("Invalid magic bytes")?;
|
||||
|
||||
let size = r.read_u32()?;
|
||||
eyre::ensure!(
|
||||
size == 124,
|
||||
"Invalid structure size. Got 0x{:0X}, expected 0x7C",
|
||||
size
|
||||
);
|
||||
|
||||
let flags = r.read_u32().map(binary::flags_from_bits)?;
|
||||
let height = r.read_u32()? as usize;
|
||||
let width = r.read_u32()? as usize;
|
||||
let pitch_or_linear_size = r.read_u32()? as usize;
|
||||
let depth = r.read_u32()? as usize;
|
||||
let mipmap_count = r.read_u32()? as usize;
|
||||
|
||||
// Skip reserved bytes
|
||||
r.seek(SeekFrom::Current(11 * 4))?;
|
||||
|
||||
let pixel_format = DDSPixelFormat::from_binary(&mut r)?;
|
||||
let caps = r.read_u32().map(binary::flags_from_bits)?;
|
||||
let caps_2 = r.read_u32().map(binary::flags_from_bits)?;
|
||||
|
||||
// Skip unused and reserved bytes
|
||||
r.seek(SeekFrom::Current(3 * 4))?;
|
||||
|
||||
Ok(Self {
|
||||
flags,
|
||||
height,
|
||||
width,
|
||||
pitch_or_linear_size,
|
||||
depth,
|
||||
mipmap_count,
|
||||
pixel_format,
|
||||
caps,
|
||||
caps_2,
|
||||
})
|
||||
}
|
||||
|
||||
#[tracing::instrument("DDSHeader::to_binary", skip(w))]
|
||||
pub fn to_binary(&self, mut w: impl WriteExt) -> Result<()> {
|
||||
w.write_u32(MAGIC_DDS)?;
|
||||
|
||||
// Structure size in bytes
|
||||
w.write_u32(124)?;
|
||||
w.write_u32(self.flags.bits())?;
|
||||
w.write_u32(self.height as u32)?;
|
||||
w.write_u32(self.width as u32)?;
|
||||
w.write_u32(self.pitch_or_linear_size as u32)?;
|
||||
w.write_u32(self.depth as u32)?;
|
||||
w.write_u32(self.mipmap_count as u32)?;
|
||||
|
||||
w.write_all(&[0u8; 11 * 4])?;
|
||||
|
||||
self.pixel_format.to_binary(&mut w)?;
|
||||
w.write_u32(self.caps.bits())?;
|
||||
w.write_u32(self.caps_2.bits())?;
|
||||
|
||||
w.write_all(&[0u8; 3 * 4])?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, strum::Display)]
|
||||
#[repr(u32)]
|
||||
pub enum ImageType {
|
||||
Image2D = 0,
|
||||
Image3D = 1,
|
||||
ImageCube = 2,
|
||||
Unknown = 3,
|
||||
Image2dArray = 4,
|
||||
ImagecubeArray = 5,
|
||||
}
|
||||
|
||||
/// A stripped version of `ImageType` that only contains just the data needed
|
||||
/// to read a DDS image stream.
|
||||
#[allow(dead_code)]
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct StrippedImageFormat {
|
||||
pub image_type: ImageType,
|
||||
pub width: usize,
|
||||
pub height: usize,
|
||||
pub layers: usize,
|
||||
pub mip_levels: usize,
|
||||
}
|
||||
|
||||
// This is a stripped down version of the logic that the engine implements to fill
|
||||
// `stingray::ImageFormat`. With the `type` field we need to distinguish between `IMAGE3D`
|
||||
// and everything else, and we need the various dimensions filled to calculate the chunks.
|
||||
pub fn stripped_format_from_header(
|
||||
dds_header: &DDSHeader,
|
||||
dx10_header: &Dx10Header,
|
||||
) -> Result<StrippedImageFormat> {
|
||||
let mut image_format = StrippedImageFormat {
|
||||
image_type: ImageType::Unknown,
|
||||
width: dds_header.width,
|
||||
height: dds_header.height,
|
||||
layers: 0,
|
||||
mip_levels: 0,
|
||||
};
|
||||
|
||||
if dds_header.mipmap_count > 0 {
|
||||
image_format.mip_levels = dds_header.mipmap_count;
|
||||
} else {
|
||||
image_format.mip_levels = 1;
|
||||
}
|
||||
|
||||
// INFO: These next two sections are conditional in the engine code,
|
||||
// based on a lot of stuff in "fourcc" and other fields. But it might
|
||||
// actually be fine to just do it like this, as this seems universal
|
||||
// to DDS.
|
||||
// Will have to check how it plays out with actual assets.
|
||||
|
||||
if dds_header.caps_2.contains(DDSCAPS2::CUBEMAP) {
|
||||
image_format.image_type = ImageType::ImageCube;
|
||||
image_format.layers = 6;
|
||||
} else if dds_header.caps_2.contains(DDSCAPS2::VOLUME) {
|
||||
image_format.image_type = ImageType::Image3D;
|
||||
image_format.layers = dds_header.depth;
|
||||
} else {
|
||||
image_format.image_type = ImageType::Image2D;
|
||||
image_format.layers = 1;
|
||||
}
|
||||
|
||||
if dx10_header.resource_dimension == D3D10ResourceDimension::Texture2D {
|
||||
if dx10_header
|
||||
.misc_flag
|
||||
.contains(DdsResourceMiscFlags::TEXTURECUBE)
|
||||
{
|
||||
image_format.image_type = ImageType::ImageCube;
|
||||
if dx10_header.array_size > 1 {
|
||||
image_format.layers = dx10_header.array_size;
|
||||
} else {
|
||||
image_format.layers = 6;
|
||||
}
|
||||
} else {
|
||||
image_format.image_type = ImageType::Image2D;
|
||||
image_format.layers = dx10_header.array_size;
|
||||
}
|
||||
} else if dx10_header.resource_dimension == D3D10ResourceDimension::Texture3D {
|
||||
image_format.image_type = ImageType::Image3D;
|
||||
image_format.layers = dds_header.depth;
|
||||
}
|
||||
|
||||
if dx10_header.array_size > 1 {
|
||||
match image_format.image_type {
|
||||
ImageType::Image2D => image_format.image_type = ImageType::Image2dArray,
|
||||
ImageType::ImageCube => image_format.image_type = ImageType::ImagecubeArray,
|
||||
ImageType::Image3D => {
|
||||
eyre::bail!("3D-Arrays are not a supported image format")
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(image_format)
|
||||
}
|
|
@ -1,3 +1,4 @@
|
|||
#![feature(cursor_split)]
|
||||
#![feature(test)]
|
||||
|
||||
mod binary;
|
||||
|
|
|
@ -12,12 +12,19 @@ pub enum HashGroup {
|
|||
Filename,
|
||||
Filetype,
|
||||
Strings,
|
||||
TextureCategory,
|
||||
Other,
|
||||
}
|
||||
|
||||
impl HashGroup {
|
||||
pub fn all() -> [Self; 3] {
|
||||
[Self::Filename, Self::Filetype, Self::Other]
|
||||
pub fn all() -> [Self; 5] {
|
||||
[
|
||||
Self::Filename,
|
||||
Self::Filetype,
|
||||
Self::Strings,
|
||||
Self::TextureCategory,
|
||||
Self::Other,
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -27,6 +34,7 @@ impl std::fmt::Display for HashGroup {
|
|||
HashGroup::Filename => write!(f, "filename"),
|
||||
HashGroup::Filetype => write!(f, "filetype"),
|
||||
HashGroup::Strings => write!(f, "strings"),
|
||||
HashGroup::TextureCategory => write!(f, "texture-category"),
|
||||
HashGroup::Other => write!(f, "other"),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,7 +50,7 @@ impl fmt::LowerHex for Murmur64 {
|
|||
|
||||
impl fmt::Display for Murmur64 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{self:016X}")
|
||||
write!(f, "{:016X}", self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -158,7 +158,7 @@ impl fmt::LowerHex for Murmur32 {
|
|||
|
||||
impl fmt::Display for Murmur32 {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{self:08X}")
|
||||
write!(f, "{:08X}", self)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue