mirror of
https://github.com/redstrate/Physis.git
synced 2025-04-21 12:17:45 +00:00
Add beginnings of skeleton parsing support
Now we support TexTools skel files, alongside the usual Havok packfiles. Neither one has complete support (yet) but I'm exploring libraries to accomplish them. The dependencies are now commented to describe their usage and future plans.
This commit is contained in:
parent
8b6b6b036c
commit
1c72bc6da5
3 changed files with 201 additions and 1 deletions
45
Cargo.lock
generated
45
Cargo.lock
generated
|
@ -269,6 +269,30 @@ dependencies = [
|
|||
"crunchy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hard-xml"
|
||||
version = "1.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3477ce594ff6d821c38fc3f8d28744b9bac0340c94b152ebb0f8a1fd5b740f54"
|
||||
dependencies = [
|
||||
"hard-xml-derive",
|
||||
"jetscii",
|
||||
"lazy_static",
|
||||
"memchr",
|
||||
"xmlparser",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hard-xml-derive"
|
||||
version = "1.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3aa4585e2b133d2479ff3f03febd76972234cc04b40cdb374fab11a7f7b797ca"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.12.3"
|
||||
|
@ -309,6 +333,12 @@ version = "1.0.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d"
|
||||
|
||||
[[package]]
|
||||
name = "jetscii"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "47f142fe24a9c9944451e8349de0a56af5f3e7226dc46f3ed4d4ecc0b85af75e"
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.58"
|
||||
|
@ -350,6 +380,12 @@ dependencies = [
|
|||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
|
||||
|
||||
[[package]]
|
||||
name = "memoffset"
|
||||
version = "0.6.5"
|
||||
|
@ -417,8 +453,11 @@ dependencies = [
|
|||
"crc",
|
||||
"criterion",
|
||||
"half 2.1.0",
|
||||
"hard-xml",
|
||||
"libz-sys",
|
||||
"paste",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -747,3 +786,9 @@ name = "winapi-x86_64-pc-windows-gnu"
|
|||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
|
||||
[[package]]
|
||||
name = "xmlparser"
|
||||
version = "0.13.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "114ba2b24d2167ef6d67d7d04c8cc86522b87f490025f39f0303b7db5bf5e3d8"
|
||||
|
|
18
Cargo.toml
18
Cargo.toml
|
@ -22,9 +22,25 @@ criterion = { git = "https://github.com/bheisler/criterion.rs", branch="version-
|
|||
retail_game_testing = []
|
||||
|
||||
[dependencies]
|
||||
# used for jamcrc implementation, should eventually move away from it
|
||||
crc = "3.0.0"
|
||||
|
||||
# amazing binary parsing/writing library
|
||||
binrw = "0.9.2"
|
||||
|
||||
# used for zlib compression in sqpack files
|
||||
libz-sys = { version = "1.1.8", default-features = false }
|
||||
|
||||
# nice to have features rust is lacking at the moment
|
||||
bitfield-struct = "0.1.7"
|
||||
paste = "1.0.7"
|
||||
half = "2.1.0"
|
||||
|
||||
# needed for half-float support which FFXIV uses in it's model data
|
||||
half = "2.1.0"
|
||||
|
||||
# needed for havok xml parsing
|
||||
hard-xml = "1.13.0"
|
||||
|
||||
# needed for textools skel parsing
|
||||
serde_json = "1.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
139
src/skeleton.rs
Normal file
139
src/skeleton.rs
Normal file
|
@ -0,0 +1,139 @@
|
|||
use crate::gamedata::MemoryBuffer;
|
||||
use hard_xml::XmlRead;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Bone {
|
||||
name: String,
|
||||
parent_index: usize,
|
||||
|
||||
position: [f32; 3],
|
||||
rotation: [f32; 4],
|
||||
scale: [f32; 3]
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Skeleton {
|
||||
bones : Vec<Bone>
|
||||
}
|
||||
|
||||
impl Skeleton {
|
||||
/// Parses a Havok XML packfile generated by the Havok SDK.
|
||||
pub fn from_packfile(buffer : &MemoryBuffer) -> Option<Skeleton> {
|
||||
#[derive(XmlRead, Debug)]
|
||||
#[xml(tag = "hkpackfile")]
|
||||
struct HkPackfile {
|
||||
#[xml(child = "hksection")]
|
||||
sections: Vec<HkSection>,
|
||||
#[xml(attr = "toplevelobject")]
|
||||
top_level_object : String
|
||||
}
|
||||
|
||||
#[derive(XmlRead, Debug)]
|
||||
#[xml(tag = "hksection")]
|
||||
struct HkSection {
|
||||
#[xml(attr = "name")]
|
||||
name: String,
|
||||
|
||||
#[xml(child = "hkobject")]
|
||||
objects: Vec<HkObject>
|
||||
}
|
||||
|
||||
#[derive(XmlRead, Debug)]
|
||||
#[xml(tag = "hkobject")]
|
||||
struct HkObject {
|
||||
#[xml(attr = "name")]
|
||||
name: Option<String>,
|
||||
|
||||
#[xml(attr = "class")]
|
||||
class: Option<String>,
|
||||
|
||||
#[xml(child = "hkparam")]
|
||||
params: Vec<HkParam>
|
||||
}
|
||||
|
||||
#[derive(XmlRead, Debug)]
|
||||
#[xml(tag = "hkparam")]
|
||||
struct HkParam {
|
||||
#[xml(attr = "name")]
|
||||
name : String,
|
||||
|
||||
#[xml(attr = "className")]
|
||||
class_name : Option<String>,
|
||||
|
||||
#[xml(attr = "variant")]
|
||||
variant : Option<String>,
|
||||
|
||||
#[xml(child = "hkobject")]
|
||||
objects : Vec<HkObject>,
|
||||
|
||||
#[xml(text)]
|
||||
content : String
|
||||
}
|
||||
|
||||
let pak = HkPackfile::from_str(&mut std::str::from_utf8(&buffer).unwrap())
|
||||
.expect("Failed to parse sidecar file!");
|
||||
|
||||
// find the root level object
|
||||
let root_level_object = pak.sections[0].objects.iter()
|
||||
.find(|s| s.name.as_ref() == Some(&pak.top_level_object))
|
||||
.expect("Cannot locate root level object.");
|
||||
|
||||
println!("{:#?}", root_level_object);
|
||||
|
||||
println!("{:#?}", pak);
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Parses the TexTools skeleton format, as a nice alternative to packfiles.
|
||||
pub fn from_skel(buffer : &MemoryBuffer) -> Option<Skeleton> {
|
||||
let mut string_repr = String::from_utf8(buffer.to_vec()).unwrap();
|
||||
|
||||
// for some reason, textools does NOT write valid JSON.
|
||||
// let's begin by surrounding all of their json object blocks with an array, which is a valid
|
||||
// JSON root.
|
||||
string_repr.insert(0, '[');
|
||||
string_repr.push(']');
|
||||
|
||||
// then we turn all of newlines into commas, except of course for the last one!
|
||||
string_repr = string_repr.replacen("\n", ",", string_repr.matches("\n").count() - 1);
|
||||
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "PascalCase")]
|
||||
struct BoneObject {
|
||||
bone_name : String,
|
||||
bone_number : i32,
|
||||
bone_parent : i32,
|
||||
pose_matrix : [f32; 16]
|
||||
}
|
||||
|
||||
let json_bones : Vec<BoneObject> = serde_json::from_str(&string_repr).unwrap();
|
||||
|
||||
let mut skeleton = Skeleton {
|
||||
bones: vec![]
|
||||
};
|
||||
|
||||
for bone in &json_bones {
|
||||
skeleton.bones.push(Bone {
|
||||
name: bone.bone_name.clone(),
|
||||
parent_index: 0,
|
||||
position: [0.0; 3],
|
||||
rotation: [0.0; 4],
|
||||
scale: [0.0; 3]
|
||||
});
|
||||
}
|
||||
|
||||
// assign parenting
|
||||
for bone in &json_bones {
|
||||
if bone.bone_parent != -1 {
|
||||
let mut new_bone = &mut skeleton.bones[bone.bone_number as usize];
|
||||
|
||||
new_bone.parent_index = bone.bone_parent as usize;
|
||||
}
|
||||
}
|
||||
|
||||
Some(skeleton)
|
||||
}
|
||||
}
|
Loading…
Add table
Reference in a new issue