mirror of
https://github.com/redstrate/Physis.git
synced 2025-04-20 03:37:47 +00:00
Add support for reading binary SKLB and PBD
This removes the dependency of the Havok SDK or getting the decompiled skeleton files from TexTools or some other place. Code courtesy of FFXIVTools. The other two ways of reading skeletons (SKEL and Packfile) are removed which gets rid of two dependencies.
This commit is contained in:
parent
3243132a3f
commit
07582775cd
16 changed files with 1736 additions and 125 deletions
|
@ -44,7 +44,7 @@ game_install = []
|
||||||
# enables support for extracting visual data, such as models, textures, materials, etc
|
# enables support for extracting visual data, such as models, textures, materials, etc
|
||||||
# this enables a whole bunch of dependencies!
|
# this enables a whole bunch of dependencies!
|
||||||
# tip: can be safely turned off for launchers and other tools that simply need to extract the bare minimum of data
|
# tip: can be safely turned off for launchers and other tools that simply need to extract the bare minimum of data
|
||||||
visual_data = ["dep:half", "dep:hard-xml", "dep:serde_json", "dep:serde", "dep:glam", "dep:bitflags", "dep:texpresso"]
|
visual_data = ["dep:half", "dep:glam", "dep:bitflags", "dep:texpresso"]
|
||||||
|
|
||||||
# testing only features
|
# testing only features
|
||||||
retail_game_testing = []
|
retail_game_testing = []
|
||||||
|
@ -66,13 +66,6 @@ paste = "1"
|
||||||
# needed for half-float support which FFXIV uses in it's model data
|
# needed for half-float support which FFXIV uses in it's model data
|
||||||
half = { version = "2", optional = true }
|
half = { version = "2", optional = true }
|
||||||
|
|
||||||
# needed for havok xml parsing
|
|
||||||
hard-xml = { version = "1", optional = true }
|
|
||||||
|
|
||||||
# needed for textools skel parsing
|
|
||||||
serde_json = { version = "1", optional = true }
|
|
||||||
serde = { version = "1", optional = true, features = ["derive"] }
|
|
||||||
|
|
||||||
# needed for deconstructing skeleton pose matrices
|
# needed for deconstructing skeleton pose matrices
|
||||||
glam = { version = "0.24.1", optional = true }
|
glam = { version = "0.24.1", optional = true }
|
||||||
|
|
||||||
|
|
9
LICENSES/MIT.txt
Normal file
9
LICENSES/MIT.txt
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) <year> <copyright holders>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
9
src/havok/animation.rs
Normal file
9
src/havok/animation.rs
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
// SPDX-FileCopyrightText: 2020 Inseok Lee
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
use crate::havok::transform::HavokTransform;
|
||||||
|
|
||||||
|
pub trait HavokAnimation {
|
||||||
|
fn duration(&self) -> f32;
|
||||||
|
fn sample(&self, time: f32) -> Vec<HavokTransform>;
|
||||||
|
}
|
53
src/havok/animation_binding.rs
Normal file
53
src/havok/animation_binding.rs
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
// SPDX-FileCopyrightText: 2020 Inseok Lee
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
use core::cell::RefCell;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use crate::havok::HavokAnimation;
|
||||||
|
use crate::havok::object::HavokObject;
|
||||||
|
use crate::havok::spline_compressed_animation::HavokSplineCompressedAnimation;
|
||||||
|
|
||||||
|
#[repr(u8)]
|
||||||
|
pub enum HavokAnimationBlendHint {
|
||||||
|
Normal = 0,
|
||||||
|
Additive = 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HavokAnimationBlendHint {
|
||||||
|
pub fn from_raw(raw: u8) -> Self {
|
||||||
|
match raw {
|
||||||
|
0 => Self::Normal,
|
||||||
|
1 => Self::Additive,
|
||||||
|
_ => panic!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct HavokAnimationBinding {
|
||||||
|
pub transform_track_to_bone_indices: Vec<u16>,
|
||||||
|
pub blend_hint: HavokAnimationBlendHint,
|
||||||
|
pub animation: Box<dyn HavokAnimation>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HavokAnimationBinding {
|
||||||
|
pub fn new(object: Arc<RefCell<HavokObject>>) -> Self {
|
||||||
|
let root = object.borrow();
|
||||||
|
|
||||||
|
let raw_transform_track_to_bone_indices = root.get("transformTrackToBoneIndices").as_array();
|
||||||
|
let transform_track_to_bone_indices = raw_transform_track_to_bone_indices.iter().map(|x| x.as_int() as u16).collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let blend_hint = HavokAnimationBlendHint::from_raw(root.get("blendHint").as_int() as u8);
|
||||||
|
|
||||||
|
let raw_animation = root.get("animation").as_object();
|
||||||
|
let animation = match &*raw_animation.borrow().object_type.name {
|
||||||
|
"hkaSplineCompressedAnimation" => Box::new(HavokSplineCompressedAnimation::new(raw_animation.clone())),
|
||||||
|
_ => panic!(),
|
||||||
|
};
|
||||||
|
|
||||||
|
Self {
|
||||||
|
transform_track_to_bone_indices,
|
||||||
|
blend_hint,
|
||||||
|
animation,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
27
src/havok/animation_container.rs
Normal file
27
src/havok/animation_container.rs
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
// SPDX-FileCopyrightText: 2020 Inseok Lee
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
use std::cell::RefCell;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use crate::havok::animation_binding::HavokAnimationBinding;
|
||||||
|
use crate::havok::object::HavokObject;
|
||||||
|
use crate::havok::skeleton::HavokSkeleton;
|
||||||
|
|
||||||
|
pub struct HavokAnimationContainer {
|
||||||
|
pub skeletons: Vec<HavokSkeleton>,
|
||||||
|
pub bindings: Vec<HavokAnimationBinding>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HavokAnimationContainer {
|
||||||
|
pub fn new(object: Arc<RefCell<HavokObject>>) -> Self {
|
||||||
|
let root = object.borrow();
|
||||||
|
|
||||||
|
let raw_skeletons = root.get("skeletons").as_array();
|
||||||
|
let skeletons = raw_skeletons.iter().map(|x| HavokSkeleton::new(x.as_object())).collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let raw_bindings = root.get("bindings").as_array();
|
||||||
|
let bindings = raw_bindings.iter().map(|x| HavokAnimationBinding::new(x.as_object())).collect::<Vec<_>>();
|
||||||
|
|
||||||
|
Self { skeletons, bindings }
|
||||||
|
}
|
||||||
|
}
|
341
src/havok/binary_tag_file_reader.rs
Normal file
341
src/havok/binary_tag_file_reader.rs
Normal file
|
@ -0,0 +1,341 @@
|
||||||
|
// SPDX-FileCopyrightText: 2020 Inseok Lee
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
use core::cell::RefCell;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use crate::havok::byte_reader::ByteReader;
|
||||||
|
use crate::havok::object::{HavokInteger, HavokObject, HavokObjectType, HavokObjectTypeMember, HavokRootObject, HavokValue, HavokValueType};
|
||||||
|
use crate::havok::slice_ext::SliceByteOrderExt;
|
||||||
|
|
||||||
|
#[repr(i8)]
|
||||||
|
enum HavokTagType {
|
||||||
|
Eof = -1,
|
||||||
|
Invalid = 0,
|
||||||
|
FileInfo = 1,
|
||||||
|
Type = 2,
|
||||||
|
Object = 3,
|
||||||
|
ObjectRemember = 4,
|
||||||
|
Backref = 5,
|
||||||
|
ObjectNull = 6,
|
||||||
|
FileEnd = 7,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HavokTagType {
|
||||||
|
pub fn from_raw(raw: u8) -> Self {
|
||||||
|
match raw {
|
||||||
|
255 => HavokTagType::Eof,
|
||||||
|
0 => HavokTagType::Invalid,
|
||||||
|
1 => HavokTagType::FileInfo,
|
||||||
|
2 => HavokTagType::Type,
|
||||||
|
3 => HavokTagType::Object,
|
||||||
|
4 => HavokTagType::ObjectRemember,
|
||||||
|
5 => HavokTagType::Backref,
|
||||||
|
6 => HavokTagType::ObjectNull,
|
||||||
|
7 => HavokTagType::FileEnd,
|
||||||
|
_ => panic!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct HavokBinaryTagFileReader<'a> {
|
||||||
|
file_version: u8,
|
||||||
|
remembered_strings: Vec<Arc<str>>,
|
||||||
|
remembered_types: Vec<Arc<HavokObjectType>>,
|
||||||
|
remembered_objects: Vec<Arc<RefCell<HavokObject>>>,
|
||||||
|
objects: Vec<Arc<RefCell<HavokObject>>>,
|
||||||
|
reader: ByteReader<'a>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> HavokBinaryTagFileReader<'a> {
|
||||||
|
pub fn read(data: &'a [u8]) -> HavokRootObject {
|
||||||
|
let mut reader = Self::new(ByteReader::new(data));
|
||||||
|
|
||||||
|
reader.do_read()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new(reader: ByteReader<'a>) -> Self {
|
||||||
|
let file_version = 0;
|
||||||
|
let remembered_strings = vec![Arc::from("string"), Arc::from("")];
|
||||||
|
let remembered_types = vec![Arc::new(HavokObjectType::new(Arc::from("object"), None, Vec::new()))];
|
||||||
|
let remembered_objects = Vec::new();
|
||||||
|
let objects = Vec::new();
|
||||||
|
|
||||||
|
Self {
|
||||||
|
file_version,
|
||||||
|
remembered_strings,
|
||||||
|
remembered_types,
|
||||||
|
remembered_objects,
|
||||||
|
objects,
|
||||||
|
reader,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn do_read(&mut self) -> HavokRootObject {
|
||||||
|
let signature1 = self.reader.read_bytes(4).to_int_le::<u32>();
|
||||||
|
let signature2 = self.reader.read_bytes(4).to_int_le::<u32>();
|
||||||
|
if signature1 != 0xCAB0_0D1E || signature2 != 0xD011_FACE {
|
||||||
|
panic!()
|
||||||
|
}
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let tag_type = HavokTagType::from_raw(self.read_packed_int() as u8);
|
||||||
|
match tag_type {
|
||||||
|
HavokTagType::FileInfo => {
|
||||||
|
self.file_version = self.read_packed_int() as u8;
|
||||||
|
assert!(self.file_version == 3, "Unimplemented version");
|
||||||
|
self.remembered_objects
|
||||||
|
.push(Arc::new(RefCell::new(HavokObject::new(self.remembered_types[0].clone(), HashMap::new()))))
|
||||||
|
}
|
||||||
|
HavokTagType::Type => {
|
||||||
|
let object_type = self.read_type();
|
||||||
|
self.remembered_types.push(Arc::new(object_type));
|
||||||
|
}
|
||||||
|
HavokTagType::Backref => panic!(),
|
||||||
|
HavokTagType::ObjectRemember => {
|
||||||
|
let object = Arc::new(RefCell::new(self.read_object()));
|
||||||
|
|
||||||
|
self.remembered_objects.push(object.clone());
|
||||||
|
self.objects.push(object);
|
||||||
|
}
|
||||||
|
HavokTagType::FileEnd => {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
_ => panic!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// fill object references
|
||||||
|
for object in &self.objects {
|
||||||
|
self.fill_object_reference(&mut object.borrow_mut());
|
||||||
|
}
|
||||||
|
|
||||||
|
HavokRootObject::new(self.remembered_objects[1].clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_object(&mut self) -> HavokObject {
|
||||||
|
let object_type_index = self.read_packed_int();
|
||||||
|
let object_type = self.remembered_types[object_type_index as usize].clone();
|
||||||
|
|
||||||
|
let members = object_type.members();
|
||||||
|
let data_existence = self.read_bit_field(members.len());
|
||||||
|
|
||||||
|
let data = members
|
||||||
|
.into_iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(index, member)| {
|
||||||
|
let value = if data_existence[index] {
|
||||||
|
self.read_object_member_value(member)
|
||||||
|
} else {
|
||||||
|
Self::default_value(member.type_)
|
||||||
|
};
|
||||||
|
(index, value)
|
||||||
|
})
|
||||||
|
.collect::<HashMap<_, _>>();
|
||||||
|
|
||||||
|
HavokObject::new(object_type.clone(), data)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_object_member_value(&mut self, member: &HavokObjectTypeMember) -> HavokValue {
|
||||||
|
if member.type_.is_array() {
|
||||||
|
let array_len = self.read_packed_int();
|
||||||
|
if member.type_.base_type() == HavokValueType::OBJECT && member.class_name.is_none() {
|
||||||
|
panic!()
|
||||||
|
}
|
||||||
|
|
||||||
|
HavokValue::Array(self.read_array(member, array_len as usize))
|
||||||
|
} else {
|
||||||
|
match member.type_ {
|
||||||
|
HavokValueType::BYTE => HavokValue::Integer(self.reader.read() as i32),
|
||||||
|
HavokValueType::INT => HavokValue::Integer(self.read_packed_int()),
|
||||||
|
HavokValueType::REAL => HavokValue::Real(self.reader.read_f32_le()),
|
||||||
|
HavokValueType::STRING => HavokValue::String(self.read_string()),
|
||||||
|
HavokValueType::OBJECT => HavokValue::ObjectReference(self.read_packed_int() as usize),
|
||||||
|
_ => panic!("unimplemented {}", member.type_.bits()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_array(&mut self, member: &HavokObjectTypeMember, array_len: usize) -> Vec<HavokValue> {
|
||||||
|
let base_type = member.type_.base_type();
|
||||||
|
match base_type {
|
||||||
|
HavokValueType::STRING => (0..array_len).map(|_| HavokValue::String(self.read_string())).collect::<Vec<_>>(),
|
||||||
|
HavokValueType::STRUCT => {
|
||||||
|
let target_type = self.find_type(member.class_name.as_ref().unwrap());
|
||||||
|
let data_existence = self.read_bit_field(target_type.member_count());
|
||||||
|
|
||||||
|
let mut result_objects = Vec::new();
|
||||||
|
for _ in 0..array_len {
|
||||||
|
let object = Arc::new(RefCell::new(HavokObject::new(target_type.clone(), HashMap::new())));
|
||||||
|
|
||||||
|
result_objects.push(object.clone());
|
||||||
|
self.objects.push(object);
|
||||||
|
}
|
||||||
|
|
||||||
|
// struct of array
|
||||||
|
for (member_index, member) in target_type.members().into_iter().enumerate() {
|
||||||
|
if data_existence[member_index] {
|
||||||
|
if member.type_.is_tuple() {
|
||||||
|
panic!()
|
||||||
|
} else {
|
||||||
|
let data = self.read_array(member, array_len);
|
||||||
|
for (index, item) in data.into_iter().enumerate() {
|
||||||
|
result_objects[index].borrow_mut().set(member_index, item);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result_objects.into_iter().map(HavokValue::Object).collect::<Vec<_>>()
|
||||||
|
}
|
||||||
|
HavokValueType::OBJECT => (0..array_len)
|
||||||
|
.map(|_| {
|
||||||
|
let object_index = self.read_packed_int();
|
||||||
|
|
||||||
|
HavokValue::ObjectReference(object_index as usize)
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
HavokValueType::BYTE => (0..array_len)
|
||||||
|
.map(|_| HavokValue::Integer(self.reader.read() as HavokInteger))
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
HavokValueType::INT => {
|
||||||
|
if self.file_version >= 3 {
|
||||||
|
self.read_packed_int(); // type?
|
||||||
|
}
|
||||||
|
(0..array_len).map(|_| HavokValue::Integer(self.read_packed_int())).collect::<Vec<_>>()
|
||||||
|
}
|
||||||
|
HavokValueType::REAL => (0..array_len).map(|_| HavokValue::Real(self.reader.read_f32_le())).collect::<Vec<_>>(),
|
||||||
|
HavokValueType::VEC4 | HavokValueType::VEC8 | HavokValueType::VEC12 | HavokValueType::VEC16 => {
|
||||||
|
let vec_size = member.type_.base_type().vec_size() as usize;
|
||||||
|
(0..array_len)
|
||||||
|
.map(|_| HavokValue::Vec((0..vec_size).map(|_| self.reader.read_f32_le()).collect::<Vec<_>>()))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
}
|
||||||
|
_ => panic!("unimplemented {} {}", member.type_.bits(), member.type_.base_type().bits()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_type(&mut self) -> HavokObjectType {
|
||||||
|
let name = self.read_string();
|
||||||
|
let _version = self.read_packed_int();
|
||||||
|
let parent = self.read_packed_int();
|
||||||
|
let member_count = self.read_packed_int();
|
||||||
|
|
||||||
|
let parent = self.remembered_types[parent as usize].clone();
|
||||||
|
let members = (0..member_count)
|
||||||
|
.map(|_| {
|
||||||
|
let member_name = self.read_string();
|
||||||
|
let type_ = HavokValueType::from_bits(self.read_packed_int() as u32).unwrap();
|
||||||
|
|
||||||
|
let tuple_size = if type_.is_tuple() { self.read_packed_int() } else { 0 };
|
||||||
|
let type_name = if type_.base_type() == HavokValueType::OBJECT || type_.base_type() == HavokValueType::STRUCT {
|
||||||
|
Some(self.read_string())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
HavokObjectTypeMember::new(member_name, type_, tuple_size as u32, type_name)
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
HavokObjectType::new(name, Some(parent), members)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_string(&mut self) -> Arc<str> {
|
||||||
|
let length = self.read_packed_int();
|
||||||
|
if length < 0 {
|
||||||
|
return self.remembered_strings[-length as usize].clone();
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = Arc::from(std::str::from_utf8(self.reader.read_bytes(length as usize)).unwrap().to_owned());
|
||||||
|
self.remembered_strings.push(Arc::clone(&result));
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_bit_field(&mut self, count: usize) -> Vec<bool> {
|
||||||
|
let bytes_to_read = ((count + 7) & 0xffff_fff8) / 8;
|
||||||
|
let bytes = self.reader.read_bytes(bytes_to_read);
|
||||||
|
|
||||||
|
let mut result = Vec::with_capacity(count);
|
||||||
|
for byte in bytes {
|
||||||
|
let mut byte = *byte;
|
||||||
|
for _ in 0..8 {
|
||||||
|
result.push((byte & 1) == 1);
|
||||||
|
byte >>= 1;
|
||||||
|
|
||||||
|
if result.len() == count {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_packed_int(&mut self) -> HavokInteger {
|
||||||
|
let mut byte = self.reader.read();
|
||||||
|
|
||||||
|
let mut result = ((byte & 0x7f) >> 1) as u32;
|
||||||
|
let neg = byte & 1;
|
||||||
|
|
||||||
|
let mut shift = 6;
|
||||||
|
while byte & 0x80 != 0 {
|
||||||
|
byte = self.reader.read();
|
||||||
|
|
||||||
|
result |= ((byte as u32) & 0xffff_ff7f) << shift;
|
||||||
|
shift += 7;
|
||||||
|
}
|
||||||
|
if neg == 1 {
|
||||||
|
-(result as HavokInteger)
|
||||||
|
} else {
|
||||||
|
result as HavokInteger
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn find_type(&self, type_name: &str) -> Arc<HavokObjectType> {
|
||||||
|
self.remembered_types.iter().find(|&x| &*x.name == type_name).unwrap().clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fill_object_reference(&self, object: &mut HavokObject) {
|
||||||
|
let mut values_to_update = Vec::new();
|
||||||
|
for (index, mut value) in object.members_mut() {
|
||||||
|
match &mut value {
|
||||||
|
HavokValue::ObjectReference(x) => {
|
||||||
|
let object_ref = &self.remembered_objects[*x];
|
||||||
|
values_to_update.push((*index, HavokValue::Object(object_ref.clone())));
|
||||||
|
}
|
||||||
|
HavokValue::Array(x) => {
|
||||||
|
x.iter_mut().for_each(|item| {
|
||||||
|
if let HavokValue::ObjectReference(x) = item {
|
||||||
|
let object_ref = &self.remembered_objects[*x];
|
||||||
|
|
||||||
|
*item = HavokValue::Object(object_ref.clone())
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (index, value) in values_to_update {
|
||||||
|
object.set(index, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_value(type_: HavokValueType) -> HavokValue {
|
||||||
|
if type_.is_vec() {
|
||||||
|
HavokValue::Array((0..type_.vec_size()).map(|_| Self::default_value(type_.base_type())).collect::<Vec<_>>())
|
||||||
|
} else if type_.is_array() || type_.is_tuple() {
|
||||||
|
HavokValue::Array(Vec::new())
|
||||||
|
} else {
|
||||||
|
match type_ {
|
||||||
|
HavokValueType::EMPTY => HavokValue::Integer(HavokInteger::default()),
|
||||||
|
HavokValueType::BYTE => HavokValue::Integer(HavokInteger::default()),
|
||||||
|
HavokValueType::INT => HavokValue::Integer(HavokInteger::default()),
|
||||||
|
HavokValueType::OBJECT => HavokValue::ObjectReference(0),
|
||||||
|
_ => panic!("unimplemented {}", type_.bits()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
74
src/havok/byte_reader.rs
Normal file
74
src/havok/byte_reader.rs
Normal file
|
@ -0,0 +1,74 @@
|
||||||
|
// SPDX-FileCopyrightText: 2020 Inseok Lee
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
use core::mem::size_of;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ByteReader<'a> {
|
||||||
|
data: &'a [u8],
|
||||||
|
cursor: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> ByteReader<'a> {
|
||||||
|
pub fn new(data: &'a [u8]) -> Self {
|
||||||
|
Self { data, cursor: 0 }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read(&mut self) -> u8 {
|
||||||
|
let result = self.data[self.cursor];
|
||||||
|
self.cursor += 1;
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read_u16_le(&mut self) -> u16 {
|
||||||
|
let result = u16::from_le_bytes([self.data[self.cursor], self.data[self.cursor + 1]]);
|
||||||
|
self.cursor += size_of::<u16>();
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read_f32_le(&mut self) -> f32 {
|
||||||
|
let result = f32::from_le_bytes([
|
||||||
|
self.data[self.cursor],
|
||||||
|
self.data[self.cursor + 1],
|
||||||
|
self.data[self.cursor + 2],
|
||||||
|
self.data[self.cursor + 3],
|
||||||
|
]);
|
||||||
|
self.cursor += size_of::<f32>();
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read_bytes(&mut self, size: usize) -> &[u8] {
|
||||||
|
let result = &self.data[self.cursor..self.cursor + size];
|
||||||
|
self.cursor += size;
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn align(&mut self, align: usize) {
|
||||||
|
self.cursor = Self::round_up(self.cursor, align)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn round_up(num_to_round: usize, multiple: usize) -> usize {
|
||||||
|
if multiple == 0 {
|
||||||
|
return num_to_round;
|
||||||
|
}
|
||||||
|
|
||||||
|
let remainder = num_to_round % multiple;
|
||||||
|
if remainder == 0 {
|
||||||
|
num_to_round
|
||||||
|
} else {
|
||||||
|
num_to_round + multiple - remainder
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn raw(&self) -> &[u8] {
|
||||||
|
&self.data[self.cursor..]
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn seek(&mut self, offset: usize) {
|
||||||
|
self.cursor += offset;
|
||||||
|
}
|
||||||
|
}
|
19
src/havok/mod.rs
Normal file
19
src/havok/mod.rs
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
// SPDX-FileCopyrightText: 2020 Inseok Lee
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
extern crate alloc;
|
||||||
|
|
||||||
|
mod animation;
|
||||||
|
mod animation_binding;
|
||||||
|
mod animation_container;
|
||||||
|
mod binary_tag_file_reader;
|
||||||
|
mod byte_reader;
|
||||||
|
mod object;
|
||||||
|
mod skeleton;
|
||||||
|
mod spline_compressed_animation;
|
||||||
|
mod transform;
|
||||||
|
mod slice_ext;
|
||||||
|
|
||||||
|
pub use animation::HavokAnimation;
|
||||||
|
pub use animation_container::HavokAnimationContainer;
|
||||||
|
pub use binary_tag_file_reader::HavokBinaryTagFileReader;
|
228
src/havok/object.rs
Normal file
228
src/havok/object.rs
Normal file
|
@ -0,0 +1,228 @@
|
||||||
|
// SPDX-FileCopyrightText: 2020 Inseok Lee
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
use core::cell::RefCell;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use bitflags::bitflags;
|
||||||
|
|
||||||
|
bitflags! {
|
||||||
|
pub struct HavokValueType: u32 {
|
||||||
|
const EMPTY = 0;
|
||||||
|
const BYTE = 1;
|
||||||
|
const INT = 2;
|
||||||
|
const REAL = 3;
|
||||||
|
const VEC4 = 4;
|
||||||
|
const VEC8 = 5;
|
||||||
|
const VEC12 = 6;
|
||||||
|
const VEC16 = 7;
|
||||||
|
const OBJECT = 8;
|
||||||
|
const STRUCT = 9;
|
||||||
|
const STRING = 10;
|
||||||
|
|
||||||
|
const ARRAY = 0x10;
|
||||||
|
const ARRAYBYTE = Self::ARRAY.bits | Self::BYTE.bits;
|
||||||
|
const ARRAYINT = Self::ARRAY.bits | Self::INT.bits;
|
||||||
|
const ARRAYREAL = Self::ARRAY.bits | Self::REAL.bits;
|
||||||
|
const ARRAYVEC4 = Self::ARRAY.bits | Self::VEC4.bits;
|
||||||
|
const ARRAYVEC8 = Self::ARRAY.bits | Self::VEC8.bits;
|
||||||
|
const ARRAYVEC12 = Self::ARRAY.bits | Self::VEC12.bits;
|
||||||
|
const ARRAYVEC16 = Self::ARRAY.bits | Self::VEC16.bits;
|
||||||
|
const ARRAYOBJECT = Self::ARRAY.bits | Self::OBJECT.bits;
|
||||||
|
const ARRAYSTRUCT = Self::ARRAY.bits | Self::STRUCT.bits;
|
||||||
|
const ARRAYSTRING = Self::ARRAY.bits | Self::STRING.bits;
|
||||||
|
|
||||||
|
const TUPLE = 0x20;
|
||||||
|
const TUPLEBYTE = Self::TUPLE.bits | Self::BYTE.bits;
|
||||||
|
const TUPLEINT = Self::TUPLE.bits | Self::INT.bits;
|
||||||
|
const TUPLEREAL = Self::TUPLE.bits | Self::REAL.bits;
|
||||||
|
const TUPLEVEC4 = Self::TUPLE.bits | Self::VEC4.bits;
|
||||||
|
const TUPLEVEC8 = Self::TUPLE.bits | Self::VEC8.bits;
|
||||||
|
const TUPLEVEC12 = Self::TUPLE.bits | Self::VEC12.bits;
|
||||||
|
const TUPLEVEC16 = Self::TUPLE.bits | Self::VEC16.bits;
|
||||||
|
const TUPLEOBJECT = Self::TUPLE.bits | Self::OBJECT.bits;
|
||||||
|
const TUPLESTRUCT = Self::TUPLE.bits | Self::STRUCT.bits;
|
||||||
|
const TUPLESTRING = Self::TUPLE.bits | Self::STRING.bits;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HavokValueType {
|
||||||
|
pub fn is_tuple(self) -> bool {
|
||||||
|
(self.bits & HavokValueType::TUPLE.bits) != 0
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_array(self) -> bool {
|
||||||
|
(self.bits & HavokValueType::ARRAY.bits) != 0
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn base_type(self) -> HavokValueType {
|
||||||
|
HavokValueType::from_bits(self.bits & 0x0f).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_vec(self) -> bool {
|
||||||
|
let base_type = self.base_type();
|
||||||
|
base_type == HavokValueType::VEC4
|
||||||
|
|| base_type == HavokValueType::VEC8
|
||||||
|
|| base_type == HavokValueType::VEC12
|
||||||
|
|| base_type == HavokValueType::VEC16
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn vec_size(self) -> u8 {
|
||||||
|
match self.base_type() {
|
||||||
|
HavokValueType::VEC4 => 4,
|
||||||
|
HavokValueType::VEC8 => 8,
|
||||||
|
HavokValueType::VEC12 => 12,
|
||||||
|
HavokValueType::VEC16 => 16,
|
||||||
|
_ => panic!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type HavokInteger = i32;
|
||||||
|
pub type HavokReal = f32;
|
||||||
|
|
||||||
|
pub enum HavokValue {
|
||||||
|
Integer(HavokInteger),
|
||||||
|
Real(HavokReal),
|
||||||
|
String(Arc<str>),
|
||||||
|
Vec(Vec<HavokReal>),
|
||||||
|
Array(Vec<HavokValue>),
|
||||||
|
Object(Arc<RefCell<HavokObject>>),
|
||||||
|
|
||||||
|
ObjectReference(usize),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HavokValue {
|
||||||
|
pub fn as_int(&self) -> HavokInteger {
|
||||||
|
match self {
|
||||||
|
Self::Integer(x) => *x,
|
||||||
|
_ => panic!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_object(&self) -> Arc<RefCell<HavokObject>> {
|
||||||
|
match self {
|
||||||
|
Self::Object(x) => x.clone(),
|
||||||
|
_ => panic!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_array(&self) -> &Vec<HavokValue> {
|
||||||
|
match self {
|
||||||
|
Self::Array(x) => x,
|
||||||
|
_ => panic!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_string(&self) -> &str {
|
||||||
|
match self {
|
||||||
|
Self::String(x) => x,
|
||||||
|
_ => panic!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_vec(&self) -> &Vec<HavokReal> {
|
||||||
|
match self {
|
||||||
|
Self::Vec(x) => x,
|
||||||
|
_ => panic!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_real(&self) -> HavokReal {
|
||||||
|
match self {
|
||||||
|
Self::Real(x) => *x,
|
||||||
|
_ => panic!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct HavokRootObject {
|
||||||
|
object: Arc<RefCell<HavokObject>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HavokRootObject {
|
||||||
|
pub fn new(object: Arc<RefCell<HavokObject>>) -> Self {
|
||||||
|
Self { object }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn find_object_by_type(&self, type_name: &'static str) -> Arc<RefCell<HavokObject>> {
|
||||||
|
let root_obj = self.object.borrow();
|
||||||
|
let named_variants = root_obj.get("namedVariants");
|
||||||
|
|
||||||
|
for variant in named_variants.as_array() {
|
||||||
|
let variant_obj = variant.as_object();
|
||||||
|
if variant_obj.borrow().get("className").as_string() == type_name {
|
||||||
|
return variant_obj.borrow().get("variant").as_object();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
unreachable!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct HavokObjectTypeMember {
|
||||||
|
pub name: Arc<str>,
|
||||||
|
pub type_: HavokValueType,
|
||||||
|
pub tuple_size: u32,
|
||||||
|
pub class_name: Option<Arc<str>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HavokObjectTypeMember {
|
||||||
|
pub fn new(name: Arc<str>, type_: HavokValueType, tuple_size: u32, type_name: Option<Arc<str>>) -> Self {
|
||||||
|
Self {
|
||||||
|
name,
|
||||||
|
type_,
|
||||||
|
tuple_size,
|
||||||
|
class_name: type_name,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct HavokObjectType {
|
||||||
|
pub name: Arc<str>,
|
||||||
|
parent: Option<Arc<HavokObjectType>>,
|
||||||
|
members: Vec<HavokObjectTypeMember>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HavokObjectType {
|
||||||
|
pub fn new(name: Arc<str>, parent: Option<Arc<HavokObjectType>>, members: Vec<HavokObjectTypeMember>) -> Self {
|
||||||
|
Self { name, parent, members }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn members(&self) -> Vec<&HavokObjectTypeMember> {
|
||||||
|
if let Some(x) = &self.parent {
|
||||||
|
x.members().into_iter().chain(self.members.iter()).collect::<Vec<_>>()
|
||||||
|
} else {
|
||||||
|
self.members.iter().collect::<Vec<_>>()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn member_count(&self) -> usize {
|
||||||
|
(if let Some(x) = &self.parent { x.members.len() } else { 0 }) + self.members.len()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct HavokObject {
|
||||||
|
pub object_type: Arc<HavokObjectType>,
|
||||||
|
data: HashMap<usize, HavokValue>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HavokObject {
|
||||||
|
pub fn new(object_type: Arc<HavokObjectType>, data: HashMap<usize, HavokValue>) -> Self {
|
||||||
|
Self { object_type, data }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set(&mut self, index: usize, value: HavokValue) {
|
||||||
|
self.data.insert(index, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get(&self, member_name: &str) -> &HavokValue {
|
||||||
|
let member_index = self.object_type.members().iter().position(|&x| &*x.name == member_name).unwrap();
|
||||||
|
|
||||||
|
self.data.get(&member_index).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn members_mut(&mut self) -> impl Iterator<Item = (&usize, &mut HavokValue)> {
|
||||||
|
self.data.iter_mut()
|
||||||
|
}
|
||||||
|
}
|
42
src/havok/skeleton.rs
Normal file
42
src/havok/skeleton.rs
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
// SPDX-FileCopyrightText: 2020 Inseok Lee
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
use core::cell::RefCell;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use crate::havok::object::HavokObject;
|
||||||
|
use crate::havok::transform::HavokTransform;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct HavokSkeleton {
|
||||||
|
pub bone_names: Vec<String>,
|
||||||
|
pub parent_indices: Vec<usize>,
|
||||||
|
pub reference_pose: Vec<HavokTransform>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HavokSkeleton {
|
||||||
|
pub fn new(object: Arc<RefCell<HavokObject>>) -> Self {
|
||||||
|
let root = object.borrow();
|
||||||
|
let bones = root.get("bones").as_array();
|
||||||
|
let bone_names = bones
|
||||||
|
.iter()
|
||||||
|
.map(|x| {
|
||||||
|
let bone = x.as_object();
|
||||||
|
let bone_obj = bone.borrow();
|
||||||
|
|
||||||
|
bone_obj.get("name").as_string().to_owned()
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let raw_parent_indices = root.get("parentIndices").as_array();
|
||||||
|
let parent_indices = raw_parent_indices.iter().map(|x| x.as_int() as usize).collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let raw_reference_pose = root.get("referencePose").as_array();
|
||||||
|
let reference_pose = raw_reference_pose.iter().map(|x| HavokTransform::new(x.as_vec())).collect::<Vec<_>>();
|
||||||
|
|
||||||
|
Self {
|
||||||
|
bone_names,
|
||||||
|
parent_indices,
|
||||||
|
reference_pose,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
122
src/havok/slice_ext.rs
Normal file
122
src/havok/slice_ext.rs
Normal file
|
@ -0,0 +1,122 @@
|
||||||
|
// SPDX-FileCopyrightText: 2020 Inseok Lee
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
use core::convert::TryInto;
|
||||||
|
|
||||||
|
pub trait SliceByteOrderExt {
|
||||||
|
fn to_int_be<T>(&self) -> T
|
||||||
|
where
|
||||||
|
T: Integer;
|
||||||
|
|
||||||
|
fn to_int_le<T>(&self) -> T
|
||||||
|
where
|
||||||
|
T: Integer;
|
||||||
|
|
||||||
|
fn to_float_be<T>(&self) -> T
|
||||||
|
where
|
||||||
|
T: Float;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SliceByteOrderExt for &[u8] {
|
||||||
|
fn to_int_be<T>(&self) -> T
|
||||||
|
where
|
||||||
|
T: Integer,
|
||||||
|
{
|
||||||
|
let sliced = &self[..core::mem::size_of::<T>()];
|
||||||
|
|
||||||
|
T::from_be_bytes(sliced)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_int_le<T>(&self) -> T
|
||||||
|
where
|
||||||
|
T: Integer,
|
||||||
|
{
|
||||||
|
let sliced = &self[..core::mem::size_of::<T>()];
|
||||||
|
|
||||||
|
T::from_le_bytes(sliced)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_float_be<T>(&self) -> T
|
||||||
|
where
|
||||||
|
T: Float,
|
||||||
|
{
|
||||||
|
let sliced = &self[..core::mem::size_of::<T>()];
|
||||||
|
|
||||||
|
T::from_be_bytes(sliced)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait Integer {
|
||||||
|
fn from_be_bytes(bytes: &[u8]) -> Self;
|
||||||
|
fn from_le_bytes(bytes: &[u8]) -> Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Integer for u32 {
|
||||||
|
fn from_be_bytes(bytes: &[u8]) -> Self {
|
||||||
|
Self::from_be_bytes(bytes.try_into().unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_le_bytes(bytes: &[u8]) -> Self {
|
||||||
|
Self::from_le_bytes(bytes.try_into().unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Integer for i32 {
|
||||||
|
fn from_be_bytes(bytes: &[u8]) -> Self {
|
||||||
|
Self::from_be_bytes(bytes.try_into().unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_le_bytes(bytes: &[u8]) -> Self {
|
||||||
|
Self::from_le_bytes(bytes.try_into().unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Integer for u16 {
|
||||||
|
fn from_be_bytes(bytes: &[u8]) -> Self {
|
||||||
|
Self::from_be_bytes(bytes.try_into().unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_le_bytes(bytes: &[u8]) -> Self {
|
||||||
|
Self::from_le_bytes(bytes.try_into().unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Integer for i16 {
|
||||||
|
fn from_be_bytes(bytes: &[u8]) -> Self {
|
||||||
|
Self::from_be_bytes(bytes.try_into().unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_le_bytes(bytes: &[u8]) -> Self {
|
||||||
|
Self::from_le_bytes(bytes.try_into().unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Integer for u8 {
|
||||||
|
fn from_be_bytes(bytes: &[u8]) -> Self {
|
||||||
|
Self::from_be_bytes(bytes.try_into().unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_le_bytes(bytes: &[u8]) -> Self {
|
||||||
|
Self::from_le_bytes(bytes.try_into().unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Integer for i8 {
|
||||||
|
fn from_be_bytes(bytes: &[u8]) -> Self {
|
||||||
|
Self::from_be_bytes(bytes.try_into().unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_le_bytes(bytes: &[u8]) -> Self {
|
||||||
|
Self::from_le_bytes(bytes.try_into().unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait Float {
|
||||||
|
fn from_be_bytes(bytes: &[u8]) -> Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Float for f32 {
|
||||||
|
fn from_be_bytes(bytes: &[u8]) -> Self {
|
||||||
|
Self::from_be_bytes(bytes.try_into().unwrap())
|
||||||
|
}
|
||||||
|
}
|
595
src/havok/spline_compressed_animation.rs
Normal file
595
src/havok/spline_compressed_animation.rs
Normal file
|
@ -0,0 +1,595 @@
|
||||||
|
// SPDX-FileCopyrightText: 2020 Inseok Lee
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
use core::{cell::RefCell, cmp};
|
||||||
|
use std::f32;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use crate::havok::byte_reader::ByteReader;
|
||||||
|
use crate::havok::HavokAnimation;
|
||||||
|
use crate::havok::object::HavokObject;
|
||||||
|
use crate::havok::transform::HavokTransform;
|
||||||
|
|
||||||
|
#[repr(u8)]
|
||||||
|
#[allow(clippy::upper_case_acronyms)]
|
||||||
|
enum RotationQuantization {
|
||||||
|
POLAR32 = 0,
|
||||||
|
THREECOMP40 = 1,
|
||||||
|
THREECOMP48 = 2,
|
||||||
|
THREECOMP24 = 3,
|
||||||
|
STRAIGHT16 = 4,
|
||||||
|
UNCOMPRESSED = 5,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RotationQuantization {
|
||||||
|
pub fn from_raw(raw: u8) -> Self {
|
||||||
|
match raw {
|
||||||
|
0 => Self::POLAR32,
|
||||||
|
1 => Self::THREECOMP40,
|
||||||
|
2 => Self::THREECOMP48,
|
||||||
|
3 => Self::THREECOMP24,
|
||||||
|
4 => Self::STRAIGHT16,
|
||||||
|
5 => Self::UNCOMPRESSED,
|
||||||
|
_ => panic!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn align(&self) -> usize {
|
||||||
|
match self {
|
||||||
|
Self::POLAR32 => 4,
|
||||||
|
Self::THREECOMP40 => 1,
|
||||||
|
Self::THREECOMP48 => 2,
|
||||||
|
Self::THREECOMP24 => 1,
|
||||||
|
Self::STRAIGHT16 => 2,
|
||||||
|
Self::UNCOMPRESSED => 4,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn bytes_per_quaternion(&self) -> usize {
|
||||||
|
match self {
|
||||||
|
Self::POLAR32 => 4,
|
||||||
|
Self::THREECOMP40 => 5,
|
||||||
|
Self::THREECOMP48 => 6,
|
||||||
|
Self::THREECOMP24 => 3,
|
||||||
|
Self::STRAIGHT16 => 2,
|
||||||
|
Self::UNCOMPRESSED => 16,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[repr(u8)]
|
||||||
|
#[allow(clippy::upper_case_acronyms)]
|
||||||
|
enum ScalarQuantization {
|
||||||
|
BITS8 = 0,
|
||||||
|
BITS16 = 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ScalarQuantization {
|
||||||
|
pub fn from_raw(raw: u8) -> Self {
|
||||||
|
match raw {
|
||||||
|
0 => Self::BITS8,
|
||||||
|
1 => Self::BITS16,
|
||||||
|
_ => panic!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn bytes_per_component(&self) -> usize {
|
||||||
|
match self {
|
||||||
|
Self::BITS8 => 1,
|
||||||
|
Self::BITS16 => 2,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct HavokSplineCompressedAnimation {
|
||||||
|
duration: f32,
|
||||||
|
number_of_transform_tracks: usize,
|
||||||
|
num_frames: usize,
|
||||||
|
num_blocks: usize,
|
||||||
|
max_frames_per_block: usize,
|
||||||
|
mask_and_quantization_size: u32,
|
||||||
|
block_inverse_duration: f32,
|
||||||
|
frame_duration: f32,
|
||||||
|
block_offsets: Vec<u32>,
|
||||||
|
data: Vec<u8>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HavokSplineCompressedAnimation {
|
||||||
|
pub fn new(object: Arc<RefCell<HavokObject>>) -> Self {
|
||||||
|
let root = object.borrow();
|
||||||
|
|
||||||
|
let duration = root.get("duration").as_real();
|
||||||
|
let number_of_transform_tracks = root.get("numberOfTransformTracks").as_int() as usize;
|
||||||
|
let num_frames = root.get("numFrames").as_int() as usize;
|
||||||
|
let num_blocks = root.get("numBlocks").as_int() as usize;
|
||||||
|
let max_frames_per_block = root.get("maxFramesPerBlock").as_int() as usize;
|
||||||
|
let mask_and_quantization_size = root.get("maskAndQuantizationSize").as_int() as u32;
|
||||||
|
let block_inverse_duration = root.get("blockInverseDuration").as_real();
|
||||||
|
let frame_duration = root.get("frameDuration").as_real();
|
||||||
|
|
||||||
|
let raw_block_offsets = root.get("blockOffsets").as_array();
|
||||||
|
let block_offsets = raw_block_offsets.iter().map(|x| x.as_int() as u32).collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let raw_data = root.get("data").as_array();
|
||||||
|
let data = raw_data.iter().map(|x| x.as_int() as u8).collect::<Vec<_>>();
|
||||||
|
|
||||||
|
Self {
|
||||||
|
duration,
|
||||||
|
number_of_transform_tracks,
|
||||||
|
num_frames,
|
||||||
|
num_blocks,
|
||||||
|
max_frames_per_block,
|
||||||
|
mask_and_quantization_size,
|
||||||
|
block_inverse_duration,
|
||||||
|
frame_duration,
|
||||||
|
block_offsets,
|
||||||
|
data,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_block_and_time(&self, frame: usize, delta: f32) -> (usize, f32, u8) {
|
||||||
|
let mut block_out = frame / (self.max_frames_per_block - 1);
|
||||||
|
|
||||||
|
block_out = cmp::max(block_out, 0);
|
||||||
|
block_out = cmp::min(block_out, self.num_blocks - 1);
|
||||||
|
|
||||||
|
let first_frame_of_block = block_out * (self.max_frames_per_block - 1);
|
||||||
|
let real_frame = (frame - first_frame_of_block) as f32 + delta;
|
||||||
|
let block_time_out = real_frame * self.frame_duration;
|
||||||
|
|
||||||
|
let quantized_time_out = ((block_time_out * self.block_inverse_duration) * (self.max_frames_per_block as f32 - 1.)) as u8;
|
||||||
|
|
||||||
|
(block_out, block_time_out, quantized_time_out)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
fn find_span(n: usize, p: usize, u: u8, U: &[u8]) -> usize {
|
||||||
|
if u >= U[n + 1] {
|
||||||
|
return n;
|
||||||
|
}
|
||||||
|
if u <= U[0] {
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut low = p;
|
||||||
|
let mut high = n + 1;
|
||||||
|
let mut mid = (low + high) / 2;
|
||||||
|
while u < U[mid] || u >= U[mid + 1] {
|
||||||
|
if u < U[mid] {
|
||||||
|
high = mid;
|
||||||
|
} else {
|
||||||
|
low = mid;
|
||||||
|
}
|
||||||
|
mid = (low + high) / 2;
|
||||||
|
}
|
||||||
|
mid
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_knots(data: &mut ByteReader, u: u8, frame_duration: f32) -> (usize, usize, Vec<f32>, usize) {
|
||||||
|
let n = data.read_u16_le() as usize;
|
||||||
|
let p = data.read() as usize;
|
||||||
|
let raw = data.raw();
|
||||||
|
let span = Self::find_span(n, p, u, raw);
|
||||||
|
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let mut U = vec![0.; 2 * p];
|
||||||
|
|
||||||
|
for i in 0..2 * p {
|
||||||
|
let item = raw[i + 1] as usize + span - p;
|
||||||
|
U[i] = (item as f32) * frame_duration;
|
||||||
|
}
|
||||||
|
|
||||||
|
data.seek(n + p + 2);
|
||||||
|
|
||||||
|
(n, p, U, span)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unpack_signed_quaternion_32(data: &[u8]) -> [f32; 4] {
|
||||||
|
let input = u32::from_le_bytes([data[0], data[1], data[2], data[3]]);
|
||||||
|
|
||||||
|
let low = (input & 0x3FFFF) as f32;
|
||||||
|
let high = ((input & 0xFFC0000) >> 18) as f32 / 1023.0;
|
||||||
|
|
||||||
|
let value = 1. - high * high;
|
||||||
|
|
||||||
|
let a = f32::sqrt(low);
|
||||||
|
let b = low - a * a;
|
||||||
|
let c = if a == 0. { f32::MAX } else { 1. / (a + a) };
|
||||||
|
|
||||||
|
let theta = a / 511.0 * (f32::consts::PI / 2.);
|
||||||
|
let phi = b * c * (f32::consts::PI / 2.);
|
||||||
|
|
||||||
|
// spherical coordinate to cartesian coordinate
|
||||||
|
let mut result = [f32::sin(theta) * f32::cos(phi), f32::sin(theta) * f32::sin(phi), f32::cos(theta), 1.];
|
||||||
|
for item in result.iter_mut() {
|
||||||
|
*item *= f32::sqrt(1. - value * value);
|
||||||
|
}
|
||||||
|
result[3] = value;
|
||||||
|
|
||||||
|
let mask = input >> 28;
|
||||||
|
for (i, item) in result.iter_mut().enumerate() {
|
||||||
|
if mask & (1 << i) != 0 {
|
||||||
|
*item = -*item;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unpack_signed_quaternion_40(data: &[u8]) -> [f32; 4] {
|
||||||
|
let permute = [256, 513, 1027, 0];
|
||||||
|
let data_mask_and = [4095, 4095, 4095, 0];
|
||||||
|
let data_mask_or = [0, 0, 0, 2047];
|
||||||
|
let data = [
|
||||||
|
u32::from_le_bytes([data[0], data[1], data[2], data[3]]),
|
||||||
|
u32::from_le_bytes([data[4], data[5], data[6], data[7]]),
|
||||||
|
];
|
||||||
|
|
||||||
|
let mut buf = [0u32; 4];
|
||||||
|
unsafe {
|
||||||
|
let m = core::slice::from_raw_parts(permute.as_ptr() as *const u8, permute.len() * core::mem::size_of::<u32>());
|
||||||
|
let a = core::slice::from_raw_parts(data.as_ptr() as *const u8, data.len() * core::mem::size_of::<u32>());
|
||||||
|
let r = core::slice::from_raw_parts_mut(buf.as_mut_ptr() as *mut u8, buf.len() * core::mem::size_of::<u32>());
|
||||||
|
for i in 0..16 {
|
||||||
|
r[i] = a[m[i] as usize];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mask = 2;
|
||||||
|
for (i, item) in buf.iter_mut().enumerate() {
|
||||||
|
if mask & (1 << i) != 0 {
|
||||||
|
*item >>= 4;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i, item) in buf.iter_mut().enumerate() {
|
||||||
|
*item = (*item & data_mask_and[i]) | data_mask_or[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut result = [0., 0., 0., 1.];
|
||||||
|
for (i, &item) in buf.iter().enumerate() {
|
||||||
|
result[i] = ((item as f32) - 2047.0) / 2895.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
let length_square = result.iter().map(|x| x * x).sum::<f32>();
|
||||||
|
let mut remaining = f32::sqrt(1. - length_square);
|
||||||
|
if (data[1] & 64) == 64 {
|
||||||
|
remaining = -remaining;
|
||||||
|
}
|
||||||
|
|
||||||
|
match data[1] & 48 {
|
||||||
|
0 => [remaining, result[0], result[1], result[2]],
|
||||||
|
16 => [result[0], remaining, result[1], result[2]],
|
||||||
|
32 => [result[0], result[1], remaining, result[2]],
|
||||||
|
48 => [result[0], result[1], result[2], remaining],
|
||||||
|
_ => panic!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unpack_signed_quaternion_48(data: &[u8]) -> [f32; 4] {
|
||||||
|
let data = [
|
||||||
|
u16::from_le_bytes([data[0], data[1]]),
|
||||||
|
u16::from_le_bytes([data[2], data[3]]),
|
||||||
|
u16::from_le_bytes([data[4], data[5]]),
|
||||||
|
];
|
||||||
|
|
||||||
|
let item1 = data[0] & 0x7FFF;
|
||||||
|
let item2 = data[1] & 0x7FFF;
|
||||||
|
let item3 = data[2] & 0x7FFF;
|
||||||
|
let missing_index = (((data[1] & 0x8000) >> 14) | ((data[0] & 0x8000) >> 15)) as usize;
|
||||||
|
let mut vals = [0x3fff, 0x3fff, 0x3fff, 0x3fff];
|
||||||
|
|
||||||
|
let mut index = usize::from(missing_index == 0);
|
||||||
|
vals[index] = item1;
|
||||||
|
index += 1 + (usize::from(missing_index == index + 1));
|
||||||
|
vals[index] = item2;
|
||||||
|
index += 1 + (usize::from(missing_index == index + 1));
|
||||||
|
vals[index] = item3;
|
||||||
|
|
||||||
|
let mut result = [0., 0., 0., 1.];
|
||||||
|
for (i, &item) in vals.iter().enumerate() {
|
||||||
|
result[i] = ((item as f32) - 16383.0) / 23169.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
let length_square = result.iter().map(|x| x * x).sum::<f32>();
|
||||||
|
let mut remaining = f32::sqrt(1. - length_square);
|
||||||
|
if data[2] & 0x8000 != 0 {
|
||||||
|
remaining = -remaining;
|
||||||
|
}
|
||||||
|
|
||||||
|
result[missing_index] = remaining;
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unpack_quaternion(quantization: &RotationQuantization, data: &[u8]) -> [f32; 4] {
|
||||||
|
match quantization {
|
||||||
|
RotationQuantization::POLAR32 => Self::unpack_signed_quaternion_32(data),
|
||||||
|
RotationQuantization::THREECOMP40 => Self::unpack_signed_quaternion_40(data),
|
||||||
|
RotationQuantization::THREECOMP48 => Self::unpack_signed_quaternion_48(data),
|
||||||
|
_ => panic!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_packed_quaternions(quantization: RotationQuantization, data: &mut ByteReader, n: usize, p: usize, span: usize) -> Vec<[f32; 4]> {
|
||||||
|
data.align(quantization.align());
|
||||||
|
let bytes_per_quaternion = quantization.bytes_per_quaternion();
|
||||||
|
|
||||||
|
let mut result = Vec::new();
|
||||||
|
for i in 0..(p + 1) {
|
||||||
|
result.push(Self::unpack_quaternion(
|
||||||
|
&quantization,
|
||||||
|
&data.raw()[bytes_per_quaternion * (i + span - p)..],
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
data.seek(bytes_per_quaternion * (n + 1));
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unpack_vec_8(min_p: [f32; 4], max_p: [f32; 4], vals: &[u8]) -> [f32; 4] {
|
||||||
|
let mut result = [0., 0., 0., 1.];
|
||||||
|
for i in 0..4 {
|
||||||
|
result[i] = ((vals[i] as f32) / 255.) * (max_p[i] - min_p[i]) + min_p[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unpack_vec_16(min_p: [f32; 4], max_p: [f32; 4], vals: &[u16]) -> [f32; 4] {
|
||||||
|
let mut result = [0., 0., 0., 1.];
|
||||||
|
for i in 0..4 {
|
||||||
|
result[i] = ((vals[i] as f32) / 65535.) * (max_p[i] - min_p[i]) + min_p[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
fn recompose(stat_mask: u8, dyn_mask: u8, S: [f32; 4], I: [f32; 4], in_out: &mut [f32; 4]) {
|
||||||
|
for i in 0..4 {
|
||||||
|
if stat_mask & (1 << i) != 0 {
|
||||||
|
in_out[i] = S[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for i in 0..4 {
|
||||||
|
if dyn_mask & (1 << i) != 0 {
|
||||||
|
in_out[i] = I[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
fn evaluate(time: f32, p: usize, U: &[f32], P: &[[f32; 4]]) -> [f32; 4] {
|
||||||
|
if p > 3 {
|
||||||
|
panic!()
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut result = [0., 0., 0., 0.];
|
||||||
|
if p == 1 {
|
||||||
|
let t = (time - U[0]) / (U[1] - U[0]);
|
||||||
|
|
||||||
|
for (i, item) in result.iter_mut().enumerate() {
|
||||||
|
*item = P[0][i] + t * (P[1][i] - P[0][i]);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// evaluate interpolation.
|
||||||
|
let p_minus_1 = p - 1;
|
||||||
|
let mut values = [1.; 16];
|
||||||
|
let mut low = [0.; 16];
|
||||||
|
let mut high = [0.; 16];
|
||||||
|
|
||||||
|
for i in 1..(p + 1) {
|
||||||
|
high[4 * i] = time - U[p_minus_1 + 1 - i];
|
||||||
|
low[4 * i] = U[i + p_minus_1] - time;
|
||||||
|
let mut val = 0.;
|
||||||
|
for j in 0..i {
|
||||||
|
let a = low[4 * (j + 1)] + high[4 * (i - j)];
|
||||||
|
let b = values[4 * j] / a;
|
||||||
|
let c = low[4 * (j + 1)] * b;
|
||||||
|
values[4 * j] = val + c;
|
||||||
|
val = high[4 * (i - j)] * b;
|
||||||
|
}
|
||||||
|
values[4 * i] = val;
|
||||||
|
}
|
||||||
|
for i in 0..(p + 1) {
|
||||||
|
for (j, item) in result.iter_mut().enumerate() {
|
||||||
|
*item += values[4 * i] * P[i][j];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compute_packed_nurbs_offsets<'a>(base: &'a [u8], p: &[u32], o2: usize, o3: u32) -> &'a [u8] {
|
||||||
|
let offset = (p[o2] + (o3 & 0x7fff_ffff)) as usize;
|
||||||
|
|
||||||
|
&base[offset..]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unpack_quantization_types(packed_quantization_types: u8) -> (ScalarQuantization, RotationQuantization, ScalarQuantization) {
|
||||||
|
let translation = ScalarQuantization::from_raw(packed_quantization_types & 0x03);
|
||||||
|
let rotation = RotationQuantization::from_raw((packed_quantization_types >> 2) & 0x0F);
|
||||||
|
let scale = ScalarQuantization::from_raw((packed_quantization_types >> 6) & 0x03);
|
||||||
|
|
||||||
|
(translation, rotation, scale)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sample_translation(&self, quantization: ScalarQuantization, time: f32, quantized_time: u8, mask: u8, data: &mut ByteReader) -> [f32; 4] {
|
||||||
|
let result = if mask != 0 {
|
||||||
|
Self::read_nurbs_curve(quantization, data, quantized_time, self.frame_duration, time, mask, [0., 0., 0., 0.])
|
||||||
|
} else {
|
||||||
|
[0., 0., 0., 0.]
|
||||||
|
};
|
||||||
|
|
||||||
|
data.align(4);
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sample_rotation(&self, quantization: RotationQuantization, time: f32, quantized_time: u8, mask: u8, data: &mut ByteReader) -> [f32; 4] {
|
||||||
|
let result = Self::read_nurbs_quaternion(quantization, data, quantized_time, self.frame_duration, time, mask);
|
||||||
|
|
||||||
|
data.align(4);
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sample_scale(&self, quantization: ScalarQuantization, time: f32, quantized_time: u8, mask: u8, data: &mut ByteReader) -> [f32; 4] {
|
||||||
|
let result = if mask != 0 {
|
||||||
|
Self::read_nurbs_curve(quantization, data, quantized_time, self.frame_duration, time, mask, [1., 1., 1., 1.])
|
||||||
|
} else {
|
||||||
|
[1., 1., 1., 1.]
|
||||||
|
};
|
||||||
|
|
||||||
|
data.align(4);
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
fn read_nurbs_curve(
|
||||||
|
quantization: ScalarQuantization,
|
||||||
|
data: &mut ByteReader,
|
||||||
|
quantized_time: u8,
|
||||||
|
frame_duration: f32,
|
||||||
|
u: f32,
|
||||||
|
mask: u8,
|
||||||
|
I: [f32; 4],
|
||||||
|
) -> [f32; 4] {
|
||||||
|
let mut max_p = [0., 0., 0., 1.];
|
||||||
|
let mut min_p = [0., 0., 0., 1.];
|
||||||
|
let mut S = [0., 0., 0., 1.];
|
||||||
|
|
||||||
|
let (n, p, U, span) = if mask & 0xf0 != 0 {
|
||||||
|
Self::read_knots(data, quantized_time, frame_duration)
|
||||||
|
} else {
|
||||||
|
(0, 0, vec![0.; 10], 0)
|
||||||
|
};
|
||||||
|
data.align(4);
|
||||||
|
|
||||||
|
for i in 0..3 {
|
||||||
|
if (1 << i) & mask != 0 {
|
||||||
|
S[i] = data.read_f32_le();
|
||||||
|
} else if (1 << (i + 4)) & mask != 0 {
|
||||||
|
min_p[i] = data.read_f32_le();
|
||||||
|
max_p[i] = data.read_f32_le();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let stat_mask = mask & 0x0f;
|
||||||
|
let dyn_mask = (!mask >> 4) & (!mask & 0x0f);
|
||||||
|
|
||||||
|
if mask & 0xf0 != 0 {
|
||||||
|
let bytes_per_component = quantization.bytes_per_component();
|
||||||
|
data.align(2);
|
||||||
|
|
||||||
|
let sizes = [0, 1, 1, 2, 1, 2, 2, 3];
|
||||||
|
let size = sizes[((mask >> 4) & 7) as usize];
|
||||||
|
let mut new_data = data.clone();
|
||||||
|
new_data.seek(bytes_per_component * size * (span - p));
|
||||||
|
|
||||||
|
let mut P = [[0., 0., 0., 1.]; 4];
|
||||||
|
for pv in P.iter_mut().take(p + 1) {
|
||||||
|
match quantization {
|
||||||
|
ScalarQuantization::BITS8 => {
|
||||||
|
let mut vals = [0; 4];
|
||||||
|
for (j, item) in vals.iter_mut().enumerate().take(3) {
|
||||||
|
if (1 << (j + 4)) & mask != 0 {
|
||||||
|
*item = new_data.read();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
*pv = Self::unpack_vec_8(min_p, max_p, &vals);
|
||||||
|
}
|
||||||
|
ScalarQuantization::BITS16 => {
|
||||||
|
let mut vals = [0; 4];
|
||||||
|
for (j, item) in vals.iter_mut().enumerate().take(3) {
|
||||||
|
if (1 << (j + 4)) & mask != 0 {
|
||||||
|
*item = new_data.read_u16_le();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
*pv = Self::unpack_vec_16(min_p, max_p, &vals);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Self::recompose(stat_mask, dyn_mask, S, I, pv);
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = Self::evaluate(u, p, &U, &P);
|
||||||
|
|
||||||
|
data.seek(bytes_per_component * size * (n + 1));
|
||||||
|
|
||||||
|
result
|
||||||
|
} else {
|
||||||
|
let mut result = I;
|
||||||
|
Self::recompose(stat_mask, dyn_mask, S, I, &mut result);
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
fn read_nurbs_quaternion(
|
||||||
|
quantization: RotationQuantization,
|
||||||
|
data: &mut ByteReader,
|
||||||
|
quantized_time: u8,
|
||||||
|
frame_duration: f32,
|
||||||
|
u: f32,
|
||||||
|
mask: u8,
|
||||||
|
) -> [f32; 4] {
|
||||||
|
if mask & 0xf0 != 0 {
|
||||||
|
let (n, p, U, span) = Self::read_knots(data, quantized_time, frame_duration);
|
||||||
|
let P = Self::read_packed_quaternions(quantization, data, n, p, span);
|
||||||
|
Self::evaluate(u, p, &U, &P)
|
||||||
|
} else if mask & 0x0f != 0 {
|
||||||
|
data.align(quantization.align());
|
||||||
|
let result = Self::unpack_quaternion(&quantization, data.raw());
|
||||||
|
data.seek(quantization.bytes_per_quaternion());
|
||||||
|
|
||||||
|
result
|
||||||
|
} else {
|
||||||
|
[0., 0., 0., 1.]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HavokAnimation for HavokSplineCompressedAnimation {
|
||||||
|
fn sample(&self, time: f32) -> Vec<HavokTransform> {
|
||||||
|
let frame_float = ((time / 1000.) / self.duration) * (self.num_frames as f32 - 1.);
|
||||||
|
let frame = frame_float as usize;
|
||||||
|
let delta = frame_float - frame as f32;
|
||||||
|
|
||||||
|
let (block, block_time, quantized_time) = self.get_block_and_time(frame, delta);
|
||||||
|
|
||||||
|
let mut data = ByteReader::new(Self::compute_packed_nurbs_offsets(
|
||||||
|
&self.data,
|
||||||
|
&self.block_offsets,
|
||||||
|
block,
|
||||||
|
self.mask_and_quantization_size,
|
||||||
|
));
|
||||||
|
let mut mask = ByteReader::new(Self::compute_packed_nurbs_offsets(&self.data, &self.block_offsets, block, 0x8000_0000));
|
||||||
|
|
||||||
|
let mut result = Vec::with_capacity(self.number_of_transform_tracks);
|
||||||
|
for _ in 0..self.number_of_transform_tracks {
|
||||||
|
let packed_quantization_types = mask.read();
|
||||||
|
|
||||||
|
let (translation_type, rotation_type, scale_type) = Self::unpack_quantization_types(packed_quantization_types);
|
||||||
|
|
||||||
|
let translation = self.sample_translation(translation_type, block_time, quantized_time, mask.read(), &mut data);
|
||||||
|
let rotation = self.sample_rotation(rotation_type, block_time, quantized_time, mask.read(), &mut data);
|
||||||
|
let scale = self.sample_scale(scale_type, block_time, quantized_time, mask.read(), &mut data);
|
||||||
|
|
||||||
|
result.push(HavokTransform::from_trs(translation, rotation, scale));
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
fn duration(&self) -> f32 {
|
||||||
|
self.duration
|
||||||
|
}
|
||||||
|
}
|
29
src/havok/transform.rs
Normal file
29
src/havok/transform.rs
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
// SPDX-FileCopyrightText: 2020 Inseok Lee
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
use crate::havok::object::HavokReal;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct HavokTransform {
|
||||||
|
pub translation: [f32; 4],
|
||||||
|
pub rotation: [f32; 4],
|
||||||
|
pub scale: [f32; 4],
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HavokTransform {
|
||||||
|
pub fn new(vec: &[HavokReal]) -> Self {
|
||||||
|
Self {
|
||||||
|
translation: [vec[0], vec[1], vec[2], vec[3]],
|
||||||
|
rotation: [vec[4], vec[5], vec[6], vec[7]],
|
||||||
|
scale: [vec[8], vec[9], vec[10], vec[11]],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_trs(translation: [f32; 4], rotation: [f32; 4], scale: [f32; 4]) -> Self {
|
||||||
|
Self {
|
||||||
|
translation,
|
||||||
|
rotation,
|
||||||
|
scale,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -91,6 +91,13 @@ pub mod chardat;
|
||||||
/// Reading and writing the plaintext config files (CFG) used by the game to store most of it's configuration.
|
/// Reading and writing the plaintext config files (CFG) used by the game to store most of it's configuration.
|
||||||
pub mod cfg;
|
pub mod cfg;
|
||||||
|
|
||||||
|
#[cfg(feature = "visual_data")]
|
||||||
|
mod havok;
|
||||||
|
|
||||||
|
/// Reading bone deform matrices.
|
||||||
|
#[cfg(feature = "visual_data")]
|
||||||
|
pub mod pbd;
|
||||||
|
|
||||||
mod crc;
|
mod crc;
|
||||||
mod sha1;
|
mod sha1;
|
||||||
|
|
||||||
|
|
140
src/pbd.rs
Normal file
140
src/pbd.rs
Normal file
|
@ -0,0 +1,140 @@
|
||||||
|
// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
|
||||||
|
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
use std::io::{Cursor, Seek, SeekFrom};
|
||||||
|
|
||||||
|
use binrw::{BinRead, BinReaderExt, BinWrite};
|
||||||
|
use binrw::binrw;
|
||||||
|
|
||||||
|
use crate::gamedata::MemoryBuffer;
|
||||||
|
|
||||||
|
#[binrw]
|
||||||
|
#[derive(Debug)]
|
||||||
|
#[brw(little)]
|
||||||
|
struct PreBoneDeformerItem {
|
||||||
|
body_id: u16,
|
||||||
|
link_index: u16,
|
||||||
|
#[br(pad_after = 4)]
|
||||||
|
data_offset: u32
|
||||||
|
}
|
||||||
|
|
||||||
|
#[binrw]
|
||||||
|
#[derive(Debug)]
|
||||||
|
#[brw(little)]
|
||||||
|
struct PreBoneDeformerLink {
|
||||||
|
#[br(pad_after = 4)]
|
||||||
|
next_index: i16,
|
||||||
|
next_item_index: u16,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[binrw]
|
||||||
|
#[derive(Debug)]
|
||||||
|
#[brw(little)]
|
||||||
|
struct PreBoneDeformerHeader {
|
||||||
|
count: u32,
|
||||||
|
|
||||||
|
#[br(count = count)]
|
||||||
|
items: Vec<PreBoneDeformerItem>,
|
||||||
|
|
||||||
|
#[br(count = count)]
|
||||||
|
links: Vec<PreBoneDeformerLink>,
|
||||||
|
|
||||||
|
#[br(ignore)]
|
||||||
|
raw_data: Vec<u8>
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct PreBoneDeformer {
|
||||||
|
header: PreBoneDeformerHeader,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct PreBoneDeformBone {
|
||||||
|
pub name: String,
|
||||||
|
pub deform: [f32; 12]
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct PreBoneDeformMatrices {
|
||||||
|
pub bones: Vec<PreBoneDeformBone>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PreBoneDeformer {
|
||||||
|
pub fn from_existing(buffer: &MemoryBuffer) -> Option<PreBoneDeformer> {
|
||||||
|
let mut cursor = Cursor::new(buffer);
|
||||||
|
let mut header = PreBoneDeformerHeader::read(&mut cursor).ok()?;
|
||||||
|
|
||||||
|
header.raw_data = buffer.clone();
|
||||||
|
|
||||||
|
Some(PreBoneDeformer {
|
||||||
|
header
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_deform_matrices(&self, from_body_id: u16, to_body_id: u16) -> Option<PreBoneDeformMatrices> {
|
||||||
|
if from_body_id == to_body_id {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut item = self.header.items.iter().find(|x| x.body_id == from_body_id)?;
|
||||||
|
let mut next = &self.header.links[item.link_index as usize];
|
||||||
|
|
||||||
|
if next.next_index == -1 {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut bones = vec![];
|
||||||
|
|
||||||
|
let mut cursor = Cursor::new(&self.header.raw_data);
|
||||||
|
|
||||||
|
loop {
|
||||||
|
cursor.seek(SeekFrom::Start(item.data_offset as u64));
|
||||||
|
let bone_name_count = cursor.read_le::<u32>().unwrap() as usize;
|
||||||
|
|
||||||
|
let string_offsets_base = item.data_offset as usize + core::mem::size_of::<u32>();
|
||||||
|
|
||||||
|
cursor.seek(SeekFrom::Start(string_offsets_base as u64));
|
||||||
|
let mut strings_offset = vec![];
|
||||||
|
for i in 0..bone_name_count {
|
||||||
|
strings_offset.push(cursor.read_le::<u16>().unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
let matrices_base = string_offsets_base + (bone_name_count + bone_name_count % 2) * 2;
|
||||||
|
cursor.seek(SeekFrom::Start(matrices_base as u64));
|
||||||
|
|
||||||
|
let mut matrices = vec![];
|
||||||
|
for i in 0..bone_name_count {
|
||||||
|
matrices.push(cursor.read_le::<[f32; 12]>().unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
for i in 0..bone_name_count {
|
||||||
|
let string_offset = item.data_offset as usize + strings_offset[i] as usize;
|
||||||
|
|
||||||
|
let mut string = String::new();
|
||||||
|
|
||||||
|
cursor.seek(SeekFrom::Start(string_offset as u64));
|
||||||
|
let mut next_char = cursor.read_le::<u8>().unwrap() as char;
|
||||||
|
while next_char != '\0' {
|
||||||
|
string.push(next_char);
|
||||||
|
next_char = cursor.read_le::<u8>().unwrap() as char;
|
||||||
|
}
|
||||||
|
|
||||||
|
let matrix = matrices[i];
|
||||||
|
bones.push(PreBoneDeformBone {
|
||||||
|
name: string,
|
||||||
|
deform: matrix
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
next = &self.header.links[next.next_index as usize];
|
||||||
|
item = &self.header.items[next.next_item_index as usize];
|
||||||
|
|
||||||
|
if item.body_id == to_body_id {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(PreBoneDeformMatrices {
|
||||||
|
bones
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
157
src/skeleton.rs
157
src/skeleton.rs
|
@ -5,34 +5,50 @@
|
||||||
#![allow(clippy::needless_late_init)]
|
#![allow(clippy::needless_late_init)]
|
||||||
#![allow(clippy::upper_case_acronyms)]
|
#![allow(clippy::upper_case_acronyms)]
|
||||||
|
|
||||||
use binrw::{binread};
|
use std::io::{Cursor, SeekFrom};
|
||||||
|
use binrw::{binread, BinRead};
|
||||||
use binrw::helpers::until_eof;
|
use binrw::helpers::until_eof;
|
||||||
use glam::Mat4;
|
use glam::Mat4;
|
||||||
use hard_xml::XmlRead;
|
|
||||||
|
|
||||||
use crate::gamedata::MemoryBuffer;
|
use crate::gamedata::MemoryBuffer;
|
||||||
|
use crate::havok::{HavokAnimationContainer, HavokBinaryTagFileReader};
|
||||||
|
|
||||||
#[binread]
|
#[binread]
|
||||||
|
#[br(little)]
|
||||||
struct SklbV1 {
|
struct SklbV1 {
|
||||||
unk_offset: i16,
|
unk_offset: u16,
|
||||||
havok_offset: i16
|
havok_offset: u16,
|
||||||
|
body_id: u32,
|
||||||
|
mapper_body_id1: u32,
|
||||||
|
mapper_body_id2: u32,
|
||||||
|
mapper_body_id3: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[binread]
|
#[binread]
|
||||||
|
#[br(little)]
|
||||||
struct SklbV2 {
|
struct SklbV2 {
|
||||||
unk_offset: i32,
|
unk_offset: u32,
|
||||||
havok_offset: i32
|
havok_offset: u32,
|
||||||
|
unk: u32,
|
||||||
|
body_id: u32,
|
||||||
|
mapper_body_id1: u32,
|
||||||
|
mapper_body_id2: u32,
|
||||||
|
mapper_body_id3: u32
|
||||||
}
|
}
|
||||||
|
|
||||||
#[binread]
|
#[binread]
|
||||||
#[br(magic = 0x736B6C62i32)]
|
#[br(magic = 0x736B6C62i32)]
|
||||||
|
#[br(little)]
|
||||||
struct SKLB {
|
struct SKLB {
|
||||||
version_one: i16,
|
version: u32,
|
||||||
version_two: i16,
|
|
||||||
havok_offset: i32,
|
|
||||||
|
|
||||||
#[br(count = havok_offset)]
|
#[br(if(version == 0x3132_3030u32))]
|
||||||
raw_header: Vec<u8>,
|
sklb_v1: Option<SklbV1>,
|
||||||
|
|
||||||
|
#[br(if(version == 0x3133_3030u32 || version == 0x3133_3031u32))]
|
||||||
|
sklb_v2: Option<SklbV2>,
|
||||||
|
|
||||||
|
#[br(seek_before(SeekFrom::Start(if (version == 0x3132_3030u32) { sklb_v1.as_ref().unwrap().havok_offset as u64 } else { sklb_v2.as_ref().unwrap().havok_offset as u64 })))]
|
||||||
#[br(parse_with = until_eof)]
|
#[br(parse_with = until_eof)]
|
||||||
raw_data: Vec<u8>
|
raw_data: Vec<u8>
|
||||||
}
|
}
|
||||||
|
@ -53,119 +69,26 @@ pub struct Skeleton {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Skeleton {
|
impl Skeleton {
|
||||||
/// Parses a Havok XML packfile generated by the Havok SDK.
|
pub fn from_existing(buffer: &MemoryBuffer) -> Option<Skeleton> {
|
||||||
pub fn from_packfile(buffer: &MemoryBuffer) -> Option<Skeleton> {
|
let mut cursor = Cursor::new(buffer);
|
||||||
#[derive(XmlRead, Debug)]
|
|
||||||
#[xml(tag = "hkpackfile")]
|
|
||||||
struct HkPackfile {
|
|
||||||
#[xml(child = "hksection")]
|
|
||||||
sections: Vec<HkSection>,
|
|
||||||
#[xml(attr = "toplevelobject")]
|
|
||||||
top_level_object: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(XmlRead, Debug)]
|
let sklb = SKLB::read(&mut cursor).unwrap();
|
||||||
#[xml(tag = "hksection")]
|
|
||||||
#[allow(dead_code)]
|
|
||||||
struct HkSection {
|
|
||||||
#[xml(attr = "name")]
|
|
||||||
name: String,
|
|
||||||
|
|
||||||
#[xml(child = "hkobject")]
|
let root = HavokBinaryTagFileReader::read(&sklb.raw_data);
|
||||||
objects: Vec<HkObject>,
|
let raw_animation_container = root.find_object_by_type("hkaAnimationContainer");
|
||||||
}
|
let animation_container = HavokAnimationContainer::new(raw_animation_container);
|
||||||
|
|
||||||
#[derive(XmlRead, Debug)]
|
let havok_skeleton = &animation_container.skeletons[0];
|
||||||
#[xml(tag = "hkobject")]
|
|
||||||
#[allow(dead_code)]
|
|
||||||
struct HkObject {
|
|
||||||
#[xml(attr = "name")]
|
|
||||||
name: Option<String>,
|
|
||||||
|
|
||||||
#[xml(attr = "class")]
|
|
||||||
class: Option<String>,
|
|
||||||
|
|
||||||
#[xml(child = "hkparam")]
|
|
||||||
params: Vec<HkParam>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(XmlRead, Debug)]
|
|
||||||
#[xml(tag = "hkparam")]
|
|
||||||
#[allow(dead_code)]
|
|
||||||
struct HkParam {
|
|
||||||
#[xml(attr = "name")]
|
|
||||||
name: String,
|
|
||||||
|
|
||||||
#[xml(attr = "className")]
|
|
||||||
class_name: Option<String>,
|
|
||||||
|
|
||||||
#[xml(attr = "variant")]
|
|
||||||
variant: Option<String>,
|
|
||||||
|
|
||||||
#[xml(child = "hkobject")]
|
|
||||||
objects: Vec<HkObject>,
|
|
||||||
|
|
||||||
#[xml(text)]
|
|
||||||
content: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
let pak = HkPackfile::from_str(std::str::from_utf8(buffer).unwrap())
|
|
||||||
.expect("Failed to parse sidecar file!");
|
|
||||||
|
|
||||||
// find the root level object
|
|
||||||
let root_level_object = pak.sections[0]
|
|
||||||
.objects
|
|
||||||
.iter()
|
|
||||||
.find(|s| s.name.as_ref() == Some(&pak.top_level_object))
|
|
||||||
.expect("Cannot locate root level object.");
|
|
||||||
|
|
||||||
println!("{:#?}", root_level_object);
|
|
||||||
|
|
||||||
println!("{:#?}", pak);
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parses the TexTools skeleton format, as a nice alternative to packfiles.
|
|
||||||
pub fn from_skel(buffer: &MemoryBuffer) -> Option<Skeleton> {
|
|
||||||
let mut string_repr = String::from_utf8(buffer.to_vec()).unwrap();
|
|
||||||
|
|
||||||
// for some reason, textools does NOT write valid JSON.
|
|
||||||
// let's begin by surrounding all of their json object blocks with an array, which is a valid
|
|
||||||
// JSON root.
|
|
||||||
string_repr.insert(0, '[');
|
|
||||||
string_repr.push(']');
|
|
||||||
|
|
||||||
// then we turn all of newlines into commas, except of course for the last one!
|
|
||||||
string_repr = string_repr.replacen('\n', ",", string_repr.matches('\n').count() - 1);
|
|
||||||
|
|
||||||
use serde::Deserialize;
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
#[serde(rename_all = "PascalCase")]
|
|
||||||
#[allow(dead_code)]
|
|
||||||
struct BoneObject {
|
|
||||||
bone_name: String,
|
|
||||||
bone_number: i32,
|
|
||||||
bone_parent: i32,
|
|
||||||
pose_matrix: [f32; 16],
|
|
||||||
}
|
|
||||||
|
|
||||||
let json_bones: Vec<BoneObject> = serde_json::from_str(&string_repr).unwrap();
|
|
||||||
|
|
||||||
let mut skeleton = Skeleton { bones: vec![] };
|
let mut skeleton = Skeleton { bones: vec![] };
|
||||||
|
|
||||||
for bone in &json_bones {
|
for (index, bone) in havok_skeleton.bone_names.iter().enumerate() {
|
||||||
let pose_matrix = Mat4::from_cols_array(&bone.pose_matrix);
|
|
||||||
|
|
||||||
let (scale, rotation, translation) = pose_matrix.to_scale_rotation_translation();
|
|
||||||
|
|
||||||
skeleton.bones.push(Bone {
|
skeleton.bones.push(Bone {
|
||||||
name: bone.bone_name.clone(),
|
name: bone.clone(),
|
||||||
parent_index: bone.bone_parent,
|
parent_index: havok_skeleton.parent_indices[index] as i32,
|
||||||
position: translation.to_array(),
|
position: [havok_skeleton.reference_pose[index].translation[0], havok_skeleton.reference_pose[index].translation[1], havok_skeleton.reference_pose[index].translation[2]],
|
||||||
rotation: rotation.to_array(),
|
rotation: havok_skeleton.reference_pose[index].rotation,
|
||||||
scale: scale.to_array(),
|
scale: [havok_skeleton.reference_pose[index].scale[0], havok_skeleton.reference_pose[index].scale[1], havok_skeleton.reference_pose[index].scale[2]],
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue