1
Fork 0
mirror of https://github.com/redstrate/Physis.git synced 2025-04-19 17:36:50 +00:00

Run cargo fmt

This commit is contained in:
Joshua Goins 2024-04-20 13:18:03 -04:00
parent 6e50f03cd9
commit d5b3b8a468
43 changed files with 1160 additions and 758 deletions

View file

@ -8,7 +8,4 @@ fn bench_calculate_hash() {
IndexFile::calculate_hash("exd/root.exl");
}
brunch::benches!(
Bench::new("hash c alc")
.run(bench_calculate_hash),
);
brunch::benches!(Bench::new("hash c alc").run(bench_calculate_hash),);

View file

@ -8,13 +8,20 @@ use physis::common::Platform;
fn reload_repos() {
let game_dir = env::var("FFXIV_GAME_DIR").unwrap();
physis::gamedata::GameData::from_existing(Platform::Win32, format!("{}/game", game_dir).as_str()).unwrap();
physis::gamedata::GameData::from_existing(
Platform::Win32,
format!("{}/game", game_dir).as_str(),
)
.unwrap();
}
fn fetch_data() {
let game_dir = env::var("FFXIV_GAME_DIR").unwrap();
let mut gamedata =
physis::gamedata::GameData::from_existing(Platform::Win32, format!("{}/game", game_dir).as_str()).unwrap();
let mut gamedata = physis::gamedata::GameData::from_existing(
Platform::Win32,
format!("{}/game", game_dir).as_str(),
)
.unwrap();
gamedata.extract("exd/root.exl");
}

View file

@ -5,4 +5,3 @@ fn main() {
#[cfg(feature = "game_install")]
println!("cargo::rustc-link-lib=unshield");
}

View file

@ -171,9 +171,17 @@ mod tests {
fn test_encrypt_decrypt() {
let blowfish = Blowfish::new(b"test_case");
let expected_encrypted = [63, 149, 97, 229, 5, 35, 46, 128, 194, 107, 69, 132, 85, 202, 2, 126];
let expected_encrypted = [
63, 149, 97, 229, 5, 35, 46, 128, 194, 107, 69, 132, 85, 202, 2, 126,
];
assert_eq!(blowfish.encrypt(b"hello, world!").unwrap(), expected_encrypted);
assert_eq!(String::from_utf8(blowfish.decrypt(&expected_encrypted).unwrap()).unwrap(), "hello, world!\0\0\0");
assert_eq!(
blowfish.encrypt(b"hello, world!").unwrap(),
expected_encrypted
);
assert_eq!(
String::from_utf8(blowfish.decrypt(&expected_encrypted).unwrap()).unwrap(),
"hello, world!\0\0\0"
);
}
}

View file

@ -1,9 +1,9 @@
// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
// SPDX-License-Identifier: GPL-3.0-or-later
use crate::{ByteBuffer, ByteSpan};
use std::collections::HashMap;
use std::io::{BufRead, BufReader, BufWriter, Cursor, Write};
use crate::{ByteBuffer, ByteSpan};
/// Represents a collection of keys, mapped to their values.
#[derive(Debug)]
@ -26,7 +26,7 @@ impl ConfigFile {
pub fn from_existing(buffer: ByteSpan) -> Option<ConfigFile> {
let mut cfg = ConfigFile {
categories: Vec::new(),
settings: HashMap::new()
settings: HashMap::new(),
};
let cursor = Cursor::new(buffer);
@ -41,10 +41,17 @@ impl ConfigFile {
let name = &line[1..line.len() - 1];
current_category = Some(String::from(name));
cfg.categories.push(String::from(name));
} else if let (Some(category), Some((key, value))) = (&current_category, line.split_once('\t')) {
} else if let (Some(category), Some((key, value))) =
(&current_category, line.split_once('\t'))
{
// Key-value pair
cfg.settings.entry(category.clone()).or_insert_with(|| ConfigMap{ keys: Vec::new() });
cfg.settings.get_mut(category)?.keys.push((key.to_string(), value.to_string()));
cfg.settings
.entry(category.clone())
.or_insert_with(|| ConfigMap { keys: Vec::new() });
cfg.settings
.get_mut(category)?
.keys
.push((key.to_string(), value.to_string()));
}
}
}
@ -61,11 +68,15 @@ impl ConfigFile {
let mut writer = BufWriter::new(cursor);
for category in &self.categories {
writer.write_all(format!("\r\n<{}>\r\n", category).as_ref()).ok()?;
writer
.write_all(format!("\r\n<{}>\r\n", category).as_ref())
.ok()?;
if self.settings.contains_key(category) {
for key in &self.settings[category].keys {
writer.write_all(format!("{}\t{}\r\n", key.0, key.1).as_ref()).ok()?;
writer
.write_all(format!("{}\t{}\r\n", key.0, key.1).as_ref())
.ok()?;
}
}
}
@ -73,7 +84,6 @@ impl ConfigFile {
writer.write_all(b"\0").ok()?;
}
Some(buffer)
}
@ -107,7 +117,6 @@ impl ConfigFile {
}
}
#[cfg(test)]
mod tests {
use std::fs::read;

View file

@ -3,10 +3,10 @@
use std::io::{BufWriter, Cursor};
use binrw::{BinRead, BinWrite};
use binrw::binrw;
use crate::{ByteBuffer, ByteSpan};
use crate::common_file_operations::{read_bool_from, write_bool_as};
use crate::{ByteBuffer, ByteSpan};
use binrw::binrw;
use binrw::{BinRead, BinWrite};
use crate::race::{Gender, Race, Subrace};
@ -20,7 +20,7 @@ fn convert_dat_race(x: u8) -> Race {
6 => Race::AuRa,
7 => Race::Hrothgar,
8 => Race::Viera,
_ => Race::Hyur
_ => Race::Hyur,
}
}
@ -33,7 +33,7 @@ fn convert_race_dat(race: &Race) -> u8 {
Race::Roegadyn => 5,
Race::AuRa => 6,
Race::Hrothgar => 7,
Race::Viera => 8
Race::Viera => 8,
}
}
@ -41,7 +41,7 @@ fn convert_dat_gender(x: u8) -> Gender {
match x {
0 => Gender::Male,
1 => Gender::Female,
_ => Gender::Male
_ => Gender::Male,
}
}
@ -62,7 +62,7 @@ fn convert_dat_subrace(x: u8) -> Subrace {
6 => Subrace::Dunesfolk,
7 => Subrace::Seeker,
8 => Subrace::Keeper,
9 => Subrace:: SeaWolf,
9 => Subrace::SeaWolf,
10 => Subrace::Hellsguard,
11 => Subrace::Raen,
12 => Subrace::Xaela,
@ -70,7 +70,7 @@ fn convert_dat_subrace(x: u8) -> Subrace {
14 => Subrace::Lost,
15 => Subrace::Rava,
16 => Subrace::Veena,
_ => Subrace::Midlander
_ => Subrace::Midlander,
}
}
@ -84,14 +84,14 @@ fn convert_subrace_dat(subrace: &Subrace) -> u8 {
Subrace::Dunesfolk => 6,
Subrace::Seeker => 7,
Subrace::Keeper => 8,
Subrace:: SeaWolf => 9,
Subrace::SeaWolf => 9,
Subrace::Hellsguard => 10,
Subrace::Raen => 11,
Subrace::Xaela => 12,
Subrace::Hellion => 13,
Subrace::Lost => 14,
Subrace::Rava => 15,
Subrace::Veena => 16
Subrace::Veena => 16,
}
}
@ -101,7 +101,8 @@ fn convert_subrace_dat(subrace: &Subrace) -> u8 {
#[repr(C)]
#[br(magic = 0x2013FF14u32)]
#[derive(Debug)]
pub struct CharacterData { // version 4
pub struct CharacterData {
// version 4
/// The version of the character data, the only supported version right now is 4.
pub version: u32,
@ -197,7 +198,7 @@ pub struct CharacterData { // version 4
/// The timestamp when the preset was created.
#[br(pad_before = 1)]
pub timestamp: [u8; 4]
pub timestamp: [u8; 4],
}
impl CharacterData {

View file

@ -3,9 +3,9 @@
use std::io::{Cursor, Seek, SeekFrom};
use binrw::BinRead;
use binrw::binrw;
use crate::ByteSpan;
use binrw::binrw;
use binrw::BinRead;
#[binrw]
#[br(little)]
@ -44,13 +44,13 @@ pub struct RacialScalingParameters {
/// Maximum bust size on the Y-axis
pub bust_max_y: f32,
/// Maximum bust size on the Z-axis
pub bust_max_z: f32
pub bust_max_z: f32,
}
#[derive(Debug)]
pub struct CMP {
/// The racial scaling parameters
pub parameters: Vec<RacialScalingParameters>
pub parameters: Vec<RacialScalingParameters>,
}
impl CMP {
@ -69,9 +69,7 @@ impl CMP {
parameters.push(RacialScalingParameters::read(&mut cursor).ok()?);
}
Some(CMP {
parameters
})
Some(CMP { parameters })
}
}

View file

@ -6,7 +6,11 @@ pub(crate) fn read_bool_from<T: std::convert::From<u8> + std::cmp::PartialEq>(x:
}
pub(crate) fn write_bool_as<T: std::convert::From<u8>>(x: &bool) -> T {
if *x { T::from(1u8) } else { T::from(0u8) }
if *x {
T::from(1u8)
} else {
T::from(0u8)
}
}
#[cfg(test)]

View file

@ -1,18 +1,18 @@
// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
// SPDX-License-Identifier: GPL-3.0-or-later
use std::io::{Cursor, Read, Seek, SeekFrom};
use std::io::Write;
use std::io::{Cursor, Read, Seek, SeekFrom};
use binrw::{BinReaderExt, binrw};
use crate::ByteBuffer;
use binrw::BinRead;
use binrw::BinWrite;
use crate::ByteBuffer;
use binrw::{binrw, BinReaderExt};
use crate::common_file_operations::read_bool_from;
#[cfg(feature = "visual_data")]
use crate::model::ModelFileHeader;
use crate::sqpack::read_data_block;
use crate::common_file_operations::read_bool_from;
#[binrw]
#[brw(repr = i32)]
@ -46,13 +46,23 @@ struct TextureLodBlock {
block_count: u32,
}
pub trait AnyNumberType<'a>: BinRead<Args<'a> = ()> + BinWrite<Args<'a> = ()> + std::ops::AddAssign + Copy + Default + 'static {}
pub trait AnyNumberType<'a>:
BinRead<Args<'a> = ()> + BinWrite<Args<'a> = ()> + std::ops::AddAssign + Copy + Default + 'static
{
}
impl<'a, T> AnyNumberType<'a> for T where T: BinRead<Args<'a> = ()> + BinWrite<Args<'a> = ()> + std::ops::AddAssign + Copy + Default + 'static {}
impl<'a, T> AnyNumberType<'a> for T where
T: BinRead<Args<'a> = ()>
+ BinWrite<Args<'a> = ()>
+ std::ops::AddAssign
+ Copy
+ Default
+ 'static
{
}
#[derive(BinRead, BinWrite)]
pub struct ModelMemorySizes<T: for <'a> AnyNumberType<'a>>
{
pub struct ModelMemorySizes<T: for<'a> AnyNumberType<'a>> {
pub stack_size: T,
pub runtime_size: T,
@ -61,8 +71,7 @@ pub struct ModelMemorySizes<T: for <'a> AnyNumberType<'a>>
pub index_buffer_size: [T; 3],
}
impl<T: for<'a> AnyNumberType<'a>> ModelMemorySizes<T>
{
impl<T: for<'a> AnyNumberType<'a>> ModelMemorySizes<T> {
pub fn total(&self) -> T {
let mut total: T = T::default();
@ -214,7 +223,7 @@ impl DatFile {
{
panic!("Tried to extract a model without the visual_data feature enabled!")
}
},
}
FileType::Texture => self.read_texture_file(offset, &file_info),
}
}

View file

@ -4,9 +4,9 @@
use std::collections::HashMap;
use std::io::{Cursor, Seek, SeekFrom};
use binrw::{BinRead, BinReaderExt};
use binrw::binrw;
use crate::ByteSpan;
use binrw::binrw;
use binrw::{BinRead, BinReaderExt};
// Based off of https://github.com/Lotlab/ffxiv-vulgar-words-reader/
// Credit goes to Jim Kirisame for documenting this format
@ -19,7 +19,7 @@ pub struct EntryItem {
flag: u32,
sibling: u32,
child: u32,
offset: u32
offset: u32,
}
#[binrw]
@ -64,7 +64,7 @@ struct DictionaryHeader {
pub struct Dictionary {
header: DictionaryHeader,
pub words: Vec<String>
pub words: Vec<String>,
}
impl Dictionary {
@ -113,7 +113,7 @@ impl Dictionary {
let mut dict = Dictionary {
header: dict,
words: Vec::new()
words: Vec::new(),
};
// TODO: lol
@ -158,7 +158,7 @@ impl Dictionary {
(((*new_val as u32) << 8) + lower) as i32
} else {
0
}
};
}
fn dump_dict_node(&self, vec: &mut Vec<String>, entry_id: i32, prev: String) {

View file

@ -1,7 +1,7 @@
// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
// SPDX-License-Identifier: GPL-3.0-or-later
use crate::race::{Gender, get_race_id, Race, Subrace};
use crate::race::{get_race_id, Gender, Race, Subrace};
#[repr(u8)]
#[derive(Debug, PartialEq, Eq)]
@ -100,7 +100,7 @@ pub enum CharacterCategory {
Hair,
Face,
Tail,
Ear
Ear,
}
pub fn get_character_category_path(category: CharacterCategory) -> &'static str {
@ -109,7 +109,7 @@ pub fn get_character_category_path(category: CharacterCategory) -> &'static str
CharacterCategory::Hair => "hair",
CharacterCategory::Face => "face",
CharacterCategory::Tail => "tail",
CharacterCategory::Ear => "zear"
CharacterCategory::Ear => "zear",
}
}
@ -119,7 +119,7 @@ pub fn get_character_category_abbreviation(category: CharacterCategory) -> &'sta
CharacterCategory::Hair => "hir",
CharacterCategory::Face => "fac",
CharacterCategory::Tail => "til",
CharacterCategory::Ear => "ear"
CharacterCategory::Ear => "ear",
}
}
@ -129,7 +129,7 @@ pub fn get_character_category_prefix(category: CharacterCategory) -> &'static st
CharacterCategory::Hair => "h",
CharacterCategory::Face => "f",
CharacterCategory::Tail => "t",
CharacterCategory::Ear => "e"
CharacterCategory::Ear => "e",
}
}
@ -139,7 +139,7 @@ pub fn build_character_path(
body_ver: i32,
race: Race,
subrace: Subrace,
gender: Gender
gender: Gender,
) -> String {
let category_path = get_character_category_path(category);
let race_id = get_race_id(race, subrace, gender).unwrap();

View file

@ -3,8 +3,8 @@
use std::io::{Cursor, Seek, SeekFrom};
use binrw::{BinRead, Endian};
use binrw::binrw;
use binrw::{BinRead, Endian};
use crate::common::Language;
use crate::exh::{ColumnDataType, ExcelColumnDefinition, ExcelDataPagination, EXH};
@ -81,7 +81,7 @@ impl EXD {
let row_header = ExcelDataRowHeader::read(&mut cursor).ok()?;
let header_offset = offset.offset + 6;// std::mem::size_of::<ExcelDataRowHeader>() as u32;
let header_offset = offset.offset + 6; // std::mem::size_of::<ExcelDataRowHeader>() as u32;
let mut read_row = |row_offset: u32| -> Option<ExcelRow> {
let mut subrow = ExcelRow {
@ -93,9 +93,9 @@ impl EXD {
.seek(SeekFrom::Start((row_offset + column.offset as u32).into()))
.ok()?;
subrow
.data
.push(Self::read_column(&mut cursor, exh, row_offset, column).unwrap());
subrow.data.push(
Self::read_column(&mut cursor, exh, row_offset, column).unwrap(),
);
}
Some(subrow)
@ -118,13 +118,8 @@ impl EXD {
Some(exd)
}
fn read_data_raw<Z: BinRead<Args<'static> = ()>>(cursor: &mut Cursor<ByteSpan>) -> Option<Z>
{
Z::read_options(
cursor,
Endian::Big,
(),
).ok()
fn read_data_raw<Z: BinRead<Args<'static> = ()>>(cursor: &mut Cursor<ByteSpan>) -> Option<Z> {
Z::read_options(cursor, Endian::Big, ()).ok()
}
fn read_column(
@ -214,9 +209,9 @@ impl EXD {
#[cfg(test)]
mod tests {
use crate::exh::EXHHeader;
use std::fs::read;
use std::path::PathBuf;
use crate::exh::EXHHeader;
use super::*;

View file

@ -5,8 +5,8 @@
use std::io::Cursor;
use binrw::BinRead;
use binrw::binrw;
use binrw::BinRead;
use crate::common::Language;
use crate::ByteSpan;
@ -107,4 +107,3 @@ mod tests {
EXH::from_existing(&read(d).unwrap());
}
}

View file

@ -1,8 +1,8 @@
// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
// SPDX-License-Identifier: GPL-3.0-or-later
use std::io::{BufRead, BufReader, BufWriter, Cursor, Write};
use crate::{ByteBuffer, ByteSpan};
use std::io::{BufRead, BufReader, BufWriter, Cursor, Write};
/// Represents an Excel List.
pub struct EXL {
@ -47,7 +47,9 @@ impl EXL {
let cursor = Cursor::new(&mut buffer);
let mut writer = BufWriter::new(cursor);
writer.write_all(format!("EXLT,{}", self.version).as_ref()).ok()?;
writer
.write_all(format!("EXLT,{}", self.version).as_ref())
.ok()?;
for (key, value) in &self.entries {
writer.write_all(format!("\n{key},{value}").as_ref()).ok()?;
@ -120,7 +122,8 @@ mod tests {
let exl = read(d).unwrap();
let mut out = std::io::stdout();
out.write_all(&existing_exl.write_to_buffer().unwrap()).unwrap();
out.write_all(&existing_exl.write_to_buffer().unwrap())
.unwrap();
out.flush().unwrap();
assert_eq!(existing_exl.write_to_buffer().unwrap(), exl);

View file

@ -5,9 +5,9 @@ use std::fs::read;
use std::io::Cursor;
use std::path::Path;
use binrw::{BinRead, BinWrite};
use binrw::binrw;
use crate::{ByteBuffer, ByteSpan};
use binrw::binrw;
use binrw::{BinRead, BinWrite};
use crate::sha1::Sha1;
@ -135,10 +135,7 @@ mod tests {
d3.push("resources/tests");
d3.push("test.exl");
let testing_fiin = FileInfo::new(&[
d2.to_str().unwrap(),
d3.to_str().unwrap()
]).unwrap();
let testing_fiin = FileInfo::new(&[d2.to_str().unwrap(), d3.to_str().unwrap()]).unwrap();
assert_eq!(*valid_fiin, testing_fiin.write_to_buffer().unwrap());
}

View file

@ -8,15 +8,15 @@ use std::path::PathBuf;
use tracing::{debug, warn};
use crate::common::{Language, Platform, read_version};
use crate::common::{read_version, Language, Platform};
use crate::dat::DatFile;
use crate::exd::EXD;
use crate::exh::EXH;
use crate::exl::EXL;
use crate::index::{Index2File, IndexFile, IndexHashBitfield};
use crate::ByteBuffer;
use crate::patch::{apply_patch, PatchError};
use crate::repository::{Category, Repository, string_to_category};
use crate::repository::{string_to_category, Category, Repository};
use crate::ByteBuffer;
/// Framework for operating on game data.
pub struct GameData {
@ -27,7 +27,7 @@ pub struct GameData {
pub repositories: Vec<Repository>,
index_files: HashMap<String, IndexFile>,
index2_files: HashMap<String, Index2File>
index2_files: HashMap<String, Index2File>,
}
fn is_valid(path: &str) -> bool {
@ -79,7 +79,7 @@ impl GameData {
game_directory: String::from(directory),
repositories: vec![],
index_files: HashMap::new(),
index2_files: HashMap::new()
index2_files: HashMap::new(),
};
data.reload_repositories(platform);
Some(data)
@ -97,7 +97,9 @@ impl GameData {
let mut d = PathBuf::from(self.game_directory.as_str());
// add initial ffxiv directory
if let Some(base_repository) = Repository::from_existing_base(platform.clone(), d.to_str().unwrap()) {
if let Some(base_repository) =
Repository::from_existing_base(platform.clone(), d.to_str().unwrap())
{
self.repositories.push(base_repository);
}
@ -105,15 +107,18 @@ impl GameData {
d.push("sqpack");
if let Ok(repository_paths) = fs::read_dir(d.as_path()) {
let repository_paths : ReadDir = repository_paths;
let repository_paths: ReadDir = repository_paths;
let repository_paths : Vec<DirEntry> = repository_paths
let repository_paths: Vec<DirEntry> = repository_paths
.filter_map(Result::ok)
.filter(|s| s.file_type().unwrap().is_dir())
.collect();
for repository_path in repository_paths {
if let Some(expansion_repository) = Repository::from_existing_expansion(platform.clone(), repository_path.path().to_str().unwrap()) {
if let Some(expansion_repository) = Repository::from_existing_expansion(
platform.clone(),
repository_path.path().to_str().unwrap(),
) {
self.repositories.push(expansion_repository);
}
}
@ -183,7 +188,7 @@ impl GameData {
/// file.write(data.as_slice()).unwrap();
/// ```
pub fn extract(&mut self, path: &str) -> Option<ByteBuffer> {
debug!(file=path, "Extracting file");
debug!(file = path, "Extracting file");
let slice = self.find_entry(path);
match slice {
@ -235,7 +240,10 @@ impl GameData {
.iter()
.collect();
(index_path.into_os_string().into_string().unwrap(), index2_path.into_os_string().into_string().unwrap())
(
index_path.into_os_string().into_string().unwrap(),
index2_path.into_os_string().into_string().unwrap(),
)
}
/// Read an excel sheet by name (e.g. "Achievement")
@ -396,7 +404,8 @@ impl GameData {
if !self.index2_files.contains_key(filenames.1) {
if let Some(index_file) = Index2File::from_existing(filenames.1) {
self.index2_files.insert(filenames.1.to_string(), index_file);
self.index2_files
.insert(filenames.1.to_string(), index_file);
}
}
}
@ -411,7 +420,11 @@ impl GameData {
fn find_entry(&mut self, path: &str) -> Option<IndexHashBitfield> {
let index_path = self.get_index_filenames(path);
debug!("Trying index files {index_path}, {index2_path}", index_path=index_path.0, index2_path=index_path.1);
debug!(
"Trying index files {index_path}, {index2_path}",
index_path = index_path.0,
index2_path = index_path.1
);
self.cache_index_file((&index_path.0, &index_path.1));

View file

@ -1,11 +1,11 @@
// SPDX-FileCopyrightText: 2020 Inseok Lee
// SPDX-License-Identifier: MIT
use core::cell::RefCell;
use std::sync::Arc;
use crate::havok::HavokAnimation;
use crate::havok::object::HavokObject;
use crate::havok::spline_compressed_animation::HavokSplineCompressedAnimation;
use crate::havok::HavokAnimation;
use core::cell::RefCell;
use std::sync::Arc;
#[repr(u8)]
pub enum HavokAnimationBlendHint {
@ -33,14 +33,20 @@ impl HavokAnimationBinding {
pub fn new(object: Arc<RefCell<HavokObject>>) -> Self {
let root = object.borrow();
let raw_transform_track_to_bone_indices = root.get("transformTrackToBoneIndices").as_array();
let transform_track_to_bone_indices = raw_transform_track_to_bone_indices.iter().map(|x| x.as_int() as u16).collect::<Vec<_>>();
let raw_transform_track_to_bone_indices =
root.get("transformTrackToBoneIndices").as_array();
let transform_track_to_bone_indices = raw_transform_track_to_bone_indices
.iter()
.map(|x| x.as_int() as u16)
.collect::<Vec<_>>();
let blend_hint = HavokAnimationBlendHint::from_raw(root.get("blendHint").as_int() as u8);
let raw_animation = root.get("animation").as_object();
let animation = match &*raw_animation.borrow().object_type.name {
"hkaSplineCompressedAnimation" => Box::new(HavokSplineCompressedAnimation::new(raw_animation.clone())),
"hkaSplineCompressedAnimation" => {
Box::new(HavokSplineCompressedAnimation::new(raw_animation.clone()))
}
_ => panic!(),
};

View file

@ -1,11 +1,11 @@
// SPDX-FileCopyrightText: 2020 Inseok Lee
// SPDX-License-Identifier: MIT
use std::cell::RefCell;
use std::sync::Arc;
use crate::havok::animation_binding::HavokAnimationBinding;
use crate::havok::object::HavokObject;
use crate::havok::skeleton::HavokSkeleton;
use std::cell::RefCell;
use std::sync::Arc;
pub struct HavokAnimationContainer {
pub skeletons: Vec<HavokSkeleton>,
@ -17,11 +17,20 @@ impl HavokAnimationContainer {
let root = object.borrow();
let raw_skeletons = root.get("skeletons").as_array();
let skeletons = raw_skeletons.iter().map(|x| HavokSkeleton::new(x.as_object())).collect::<Vec<_>>();
let skeletons = raw_skeletons
.iter()
.map(|x| HavokSkeleton::new(x.as_object()))
.collect::<Vec<_>>();
let raw_bindings = root.get("bindings").as_array();
let bindings = raw_bindings.iter().map(|x| HavokAnimationBinding::new(x.as_object())).collect::<Vec<_>>();
let bindings = raw_bindings
.iter()
.map(|x| HavokAnimationBinding::new(x.as_object()))
.collect::<Vec<_>>();
Self { skeletons, bindings }
Self {
skeletons,
bindings,
}
}
}

View file

@ -3,12 +3,15 @@
#![allow(clippy::arc_with_non_send_sync)]
use crate::havok::byte_reader::ByteReader;
use crate::havok::object::{
HavokInteger, HavokObject, HavokObjectType, HavokObjectTypeMember, HavokRootObject, HavokValue,
HavokValueType,
};
use crate::havok::slice_ext::SliceByteOrderExt;
use core::cell::RefCell;
use std::collections::HashMap;
use std::sync::Arc;
use crate::havok::byte_reader::ByteReader;
use crate::havok::object::{HavokInteger, HavokObject, HavokObjectType, HavokObjectTypeMember, HavokRootObject, HavokValue, HavokValueType};
use crate::havok::slice_ext::SliceByteOrderExt;
#[repr(i8)]
enum HavokTagType {
@ -59,7 +62,11 @@ impl<'a> HavokBinaryTagFileReader<'a> {
fn new(reader: ByteReader<'a>) -> Self {
let file_version = 0;
let remembered_strings = vec![Arc::from("string"), Arc::from("")];
let remembered_types = vec![Arc::new(HavokObjectType::new(Arc::from("object"), None, Vec::new()))];
let remembered_types = vec![Arc::new(HavokObjectType::new(
Arc::from("object"),
None,
Vec::new(),
))];
let remembered_objects = Vec::new();
let objects = Vec::new();
@ -87,7 +94,10 @@ impl<'a> HavokBinaryTagFileReader<'a> {
self.file_version = self.read_packed_int() as u8;
assert_eq!(self.file_version, 3, "Unimplemented version");
self.remembered_objects
.push(Arc::new(RefCell::new(HavokObject::new(self.remembered_types[0].clone(), HashMap::new()))))
.push(Arc::new(RefCell::new(HavokObject::new(
self.remembered_types[0].clone(),
HashMap::new(),
))))
}
HavokTagType::Type => {
let object_type = self.read_type();
@ -152,7 +162,9 @@ impl<'a> HavokBinaryTagFileReader<'a> {
HavokValueType::INT => HavokValue::Integer(self.read_packed_int()),
HavokValueType::REAL => HavokValue::Real(self.reader.read_f32_le()),
HavokValueType::STRING => HavokValue::String(self.read_string()),
HavokValueType::OBJECT => HavokValue::ObjectReference(self.read_packed_int() as usize),
HavokValueType::OBJECT => {
HavokValue::ObjectReference(self.read_packed_int() as usize)
}
_ => panic!("unimplemented {}", member.type_.bits()),
}
}
@ -161,14 +173,19 @@ impl<'a> HavokBinaryTagFileReader<'a> {
fn read_array(&mut self, member: &HavokObjectTypeMember, array_len: usize) -> Vec<HavokValue> {
let base_type = member.type_.base_type();
match base_type {
HavokValueType::STRING => (0..array_len).map(|_| HavokValue::String(self.read_string())).collect::<Vec<_>>(),
HavokValueType::STRING => (0..array_len)
.map(|_| HavokValue::String(self.read_string()))
.collect::<Vec<_>>(),
HavokValueType::STRUCT => {
let target_type = self.find_type(member.class_name.as_ref().unwrap());
let data_existence = self.read_bit_field(target_type.member_count());
let mut result_objects = Vec::new();
for _ in 0..array_len {
let object = Arc::new(RefCell::new(HavokObject::new(target_type.clone(), HashMap::new())));
let object = Arc::new(RefCell::new(HavokObject::new(
target_type.clone(),
HashMap::new(),
)));
result_objects.push(object.clone());
self.objects.push(object);
@ -188,7 +205,10 @@ impl<'a> HavokBinaryTagFileReader<'a> {
}
}
result_objects.into_iter().map(HavokValue::Object).collect::<Vec<_>>()
result_objects
.into_iter()
.map(HavokValue::Object)
.collect::<Vec<_>>()
}
HavokValueType::OBJECT => (0..array_len)
.map(|_| {
@ -204,16 +224,33 @@ impl<'a> HavokBinaryTagFileReader<'a> {
if self.file_version >= 3 {
self.read_packed_int(); // type?
}
(0..array_len).map(|_| HavokValue::Integer(self.read_packed_int())).collect::<Vec<_>>()
}
HavokValueType::REAL => (0..array_len).map(|_| HavokValue::Real(self.reader.read_f32_le())).collect::<Vec<_>>(),
HavokValueType::VEC4 | HavokValueType::VEC8 | HavokValueType::VEC12 | HavokValueType::VEC16 => {
let vec_size = member.type_.base_type().vec_size() as usize;
(0..array_len)
.map(|_| HavokValue::Vec((0..vec_size).map(|_| self.reader.read_f32_le()).collect::<Vec<_>>()))
.map(|_| HavokValue::Integer(self.read_packed_int()))
.collect::<Vec<_>>()
}
_ => panic!("unimplemented {} {}", member.type_.bits(), member.type_.base_type().bits()),
HavokValueType::REAL => (0..array_len)
.map(|_| HavokValue::Real(self.reader.read_f32_le()))
.collect::<Vec<_>>(),
HavokValueType::VEC4
| HavokValueType::VEC8
| HavokValueType::VEC12
| HavokValueType::VEC16 => {
let vec_size = member.type_.base_type().vec_size() as usize;
(0..array_len)
.map(|_| {
HavokValue::Vec(
(0..vec_size)
.map(|_| self.reader.read_f32_le())
.collect::<Vec<_>>(),
)
})
.collect::<Vec<_>>()
}
_ => panic!(
"unimplemented {} {}",
member.type_.bits(),
member.type_.base_type().bits()
),
}
}
@ -229,8 +266,14 @@ impl<'a> HavokBinaryTagFileReader<'a> {
let member_name = self.read_string();
let type_ = HavokValueType::from_bits(self.read_packed_int() as u32).unwrap();
let tuple_size = if type_.is_tuple() { self.read_packed_int() } else { 0 };
let type_name = if type_.base_type() == HavokValueType::OBJECT || type_.base_type() == HavokValueType::STRUCT {
let tuple_size = if type_.is_tuple() {
self.read_packed_int()
} else {
0
};
let type_name = if type_.base_type() == HavokValueType::OBJECT
|| type_.base_type() == HavokValueType::STRUCT
{
Some(self.read_string())
} else {
None
@ -249,7 +292,11 @@ impl<'a> HavokBinaryTagFileReader<'a> {
return self.remembered_strings[-length as usize].clone();
}
let result = Arc::from(std::str::from_utf8(self.reader.read_bytes(length as usize)).unwrap().to_owned());
let result = Arc::from(
std::str::from_utf8(self.reader.read_bytes(length as usize))
.unwrap()
.to_owned(),
);
self.remembered_strings.push(Arc::clone(&result));
result
@ -296,7 +343,11 @@ impl<'a> HavokBinaryTagFileReader<'a> {
}
fn find_type(&self, type_name: &str) -> Arc<HavokObjectType> {
self.remembered_types.iter().find(|&x| &*x.name == type_name).unwrap().clone()
self.remembered_types
.iter()
.find(|&x| &*x.name == type_name)
.unwrap()
.clone()
}
fn fill_object_reference(&self, object: &mut HavokObject) {
@ -327,7 +378,11 @@ impl<'a> HavokBinaryTagFileReader<'a> {
fn default_value(type_: HavokValueType) -> HavokValue {
if type_.is_vec() {
HavokValue::Array((0..type_.vec_size()).map(|_| Self::default_value(type_.base_type())).collect::<Vec<_>>())
HavokValue::Array(
(0..type_.vec_size())
.map(|_| Self::default_value(type_.base_type()))
.collect::<Vec<_>>(),
)
} else if type_.is_array() || type_.is_tuple() {
HavokValue::Array(Vec::new())
} else {

View file

@ -10,9 +10,9 @@ mod binary_tag_file_reader;
mod byte_reader;
mod object;
mod skeleton;
mod slice_ext;
mod spline_compressed_animation;
mod transform;
mod slice_ext;
pub use animation::HavokAnimation;
pub use animation_container::HavokAnimationContainer;

View file

@ -170,7 +170,12 @@ pub struct HavokObjectTypeMember {
}
impl HavokObjectTypeMember {
pub fn new(name: Arc<str>, type_: HavokValueType, tuple_size: u32, type_name: Option<Arc<str>>) -> Self {
pub fn new(
name: Arc<str>,
type_: HavokValueType,
tuple_size: u32,
type_name: Option<Arc<str>>,
) -> Self {
Self {
name,
type_,
@ -187,20 +192,35 @@ pub struct HavokObjectType {
}
impl HavokObjectType {
pub fn new(name: Arc<str>, parent: Option<Arc<HavokObjectType>>, members: Vec<HavokObjectTypeMember>) -> Self {
Self { name, parent, members }
pub fn new(
name: Arc<str>,
parent: Option<Arc<HavokObjectType>>,
members: Vec<HavokObjectTypeMember>,
) -> Self {
Self {
name,
parent,
members,
}
}
pub fn members(&self) -> Vec<&HavokObjectTypeMember> {
if let Some(x) = &self.parent {
x.members().into_iter().chain(self.members.iter()).collect::<Vec<_>>()
x.members()
.into_iter()
.chain(self.members.iter())
.collect::<Vec<_>>()
} else {
self.members.iter().collect::<Vec<_>>()
}
}
pub fn member_count(&self) -> usize {
(if let Some(x) = &self.parent { x.members.len() } else { 0 }) + self.members.len()
(if let Some(x) = &self.parent {
x.members.len()
} else {
0
}) + self.members.len()
}
}
@ -219,7 +239,12 @@ impl HavokObject {
}
pub fn get(&self, member_name: &str) -> &HavokValue {
let member_index = self.object_type.members().iter().position(|&x| &*x.name == member_name).unwrap();
let member_index = self
.object_type
.members()
.iter()
.position(|&x| &*x.name == member_name)
.unwrap();
self.data.get(&member_index).unwrap()
}

View file

@ -1,10 +1,10 @@
// SPDX-FileCopyrightText: 2020 Inseok Lee
// SPDX-License-Identifier: MIT
use core::cell::RefCell;
use std::sync::Arc;
use crate::havok::object::HavokObject;
use crate::havok::transform::HavokTransform;
use core::cell::RefCell;
use std::sync::Arc;
#[derive(Debug)]
pub struct HavokSkeleton {
@ -28,10 +28,16 @@ impl HavokSkeleton {
.collect::<Vec<_>>();
let raw_parent_indices = root.get("parentIndices").as_array();
let parent_indices = raw_parent_indices.iter().map(|x| x.as_int() as usize).collect::<Vec<_>>();
let parent_indices = raw_parent_indices
.iter()
.map(|x| x.as_int() as usize)
.collect::<Vec<_>>();
let raw_reference_pose = root.get("referencePose").as_array();
let reference_pose = raw_reference_pose.iter().map(|x| HavokTransform::new(x.as_vec())).collect::<Vec<_>>();
let reference_pose = raw_reference_pose
.iter()
.map(|x| HavokTransform::new(x.as_vec()))
.collect::<Vec<_>>();
Self {
bone_names,

View file

@ -1,13 +1,13 @@
// SPDX-FileCopyrightText: 2020 Inseok Lee
// SPDX-License-Identifier: MIT
use crate::havok::byte_reader::ByteReader;
use crate::havok::object::HavokObject;
use crate::havok::transform::HavokTransform;
use crate::havok::HavokAnimation;
use core::{cell::RefCell, cmp};
use std::f32;
use std::sync::Arc;
use crate::havok::byte_reader::ByteReader;
use crate::havok::HavokAnimation;
use crate::havok::object::HavokObject;
use crate::havok::transform::HavokTransform;
#[repr(u8)]
#[allow(clippy::upper_case_acronyms)]
@ -107,10 +107,16 @@ impl HavokSplineCompressedAnimation {
let frame_duration = root.get("frameDuration").as_real();
let raw_block_offsets = root.get("blockOffsets").as_array();
let block_offsets = raw_block_offsets.iter().map(|x| x.as_int() as u32).collect::<Vec<_>>();
let block_offsets = raw_block_offsets
.iter()
.map(|x| x.as_int() as u32)
.collect::<Vec<_>>();
let raw_data = root.get("data").as_array();
let data = raw_data.iter().map(|x| x.as_int() as u8).collect::<Vec<_>>();
let data = raw_data
.iter()
.map(|x| x.as_int() as u8)
.collect::<Vec<_>>();
Self {
duration,
@ -136,7 +142,8 @@ impl HavokSplineCompressedAnimation {
let real_frame = (frame - first_frame_of_block) as f32 + delta;
let block_time_out = real_frame * self.frame_duration;
let quantized_time_out = ((block_time_out * self.block_inverse_duration) * (self.max_frames_per_block as f32 - 1.)) as u8;
let quantized_time_out = ((block_time_out * self.block_inverse_duration)
* (self.max_frames_per_block as f32 - 1.)) as u8;
(block_out, block_time_out, quantized_time_out)
}
@ -164,7 +171,11 @@ impl HavokSplineCompressedAnimation {
mid
}
fn read_knots(data: &mut ByteReader, u: u8, frame_duration: f32) -> (usize, usize, Vec<f32>, usize) {
fn read_knots(
data: &mut ByteReader,
u: u8,
frame_duration: f32,
) -> (usize, usize, Vec<f32>, usize) {
let n = data.read_u16_le() as usize;
let p = data.read() as usize;
let raw = data.raw();
@ -199,7 +210,12 @@ impl HavokSplineCompressedAnimation {
let phi = b * c * (f32::consts::PI / 2.);
// spherical coordinate to cartesian coordinate
let mut result = [f32::sin(theta) * f32::cos(phi), f32::sin(theta) * f32::sin(phi), f32::cos(theta), 1.];
let mut result = [
f32::sin(theta) * f32::cos(phi),
f32::sin(theta) * f32::sin(phi),
f32::cos(theta),
1.,
];
for item in result.iter_mut() {
*item *= f32::sqrt(1. - value * value);
}
@ -226,9 +242,18 @@ impl HavokSplineCompressedAnimation {
let mut buf = [0u32; 4];
unsafe {
let m = core::slice::from_raw_parts(permute.as_ptr() as *const u8, permute.len() * core::mem::size_of::<u32>());
let a = core::slice::from_raw_parts(data.as_ptr() as *const u8, data.len() * core::mem::size_of::<u32>());
let r = core::slice::from_raw_parts_mut(buf.as_mut_ptr() as *mut u8, buf.len() * core::mem::size_of::<u32>());
let m = core::slice::from_raw_parts(
permute.as_ptr() as *const u8,
permute.len() * core::mem::size_of::<u32>(),
);
let a = core::slice::from_raw_parts(
data.as_ptr() as *const u8,
data.len() * core::mem::size_of::<u32>(),
);
let r = core::slice::from_raw_parts_mut(
buf.as_mut_ptr() as *mut u8,
buf.len() * core::mem::size_of::<u32>(),
);
for i in 0..16 {
r[i] = a[m[i] as usize];
}
@ -310,7 +335,13 @@ impl HavokSplineCompressedAnimation {
}
}
fn read_packed_quaternions(quantization: RotationQuantization, data: &mut ByteReader, n: usize, p: usize, span: usize) -> Vec<[f32; 4]> {
fn read_packed_quaternions(
quantization: RotationQuantization,
data: &mut ByteReader,
n: usize,
p: usize,
span: usize,
) -> Vec<[f32; 4]> {
data.align(quantization.align());
let bytes_per_quaternion = quantization.bytes_per_quaternion();
@ -409,7 +440,9 @@ impl HavokSplineCompressedAnimation {
&base[offset..]
}
fn unpack_quantization_types(packed_quantization_types: u8) -> (ScalarQuantization, RotationQuantization, ScalarQuantization) {
fn unpack_quantization_types(
packed_quantization_types: u8,
) -> (ScalarQuantization, RotationQuantization, ScalarQuantization) {
let translation = ScalarQuantization::from_raw(packed_quantization_types & 0x03);
let rotation = RotationQuantization::from_raw((packed_quantization_types >> 2) & 0x0F);
let scale = ScalarQuantization::from_raw((packed_quantization_types >> 6) & 0x03);
@ -417,9 +450,24 @@ impl HavokSplineCompressedAnimation {
(translation, rotation, scale)
}
fn sample_translation(&self, quantization: ScalarQuantization, time: f32, quantized_time: u8, mask: u8, data: &mut ByteReader) -> [f32; 4] {
fn sample_translation(
&self,
quantization: ScalarQuantization,
time: f32,
quantized_time: u8,
mask: u8,
data: &mut ByteReader,
) -> [f32; 4] {
let result = if mask != 0 {
Self::read_nurbs_curve(quantization, data, quantized_time, self.frame_duration, time, mask, [0., 0., 0., 0.])
Self::read_nurbs_curve(
quantization,
data,
quantized_time,
self.frame_duration,
time,
mask,
[0., 0., 0., 0.],
)
} else {
[0., 0., 0., 0.]
};
@ -429,17 +477,46 @@ impl HavokSplineCompressedAnimation {
result
}
fn sample_rotation(&self, quantization: RotationQuantization, time: f32, quantized_time: u8, mask: u8, data: &mut ByteReader) -> [f32; 4] {
let result = Self::read_nurbs_quaternion(quantization, data, quantized_time, self.frame_duration, time, mask);
fn sample_rotation(
&self,
quantization: RotationQuantization,
time: f32,
quantized_time: u8,
mask: u8,
data: &mut ByteReader,
) -> [f32; 4] {
let result = Self::read_nurbs_quaternion(
quantization,
data,
quantized_time,
self.frame_duration,
time,
mask,
);
data.align(4);
result
}
fn sample_scale(&self, quantization: ScalarQuantization, time: f32, quantized_time: u8, mask: u8, data: &mut ByteReader) -> [f32; 4] {
fn sample_scale(
&self,
quantization: ScalarQuantization,
time: f32,
quantized_time: u8,
mask: u8,
data: &mut ByteReader,
) -> [f32; 4] {
let result = if mask != 0 {
Self::read_nurbs_curve(quantization, data, quantized_time, self.frame_duration, time, mask, [1., 1., 1., 1.])
Self::read_nurbs_curve(
quantization,
data,
quantized_time,
self.frame_duration,
time,
mask,
[1., 1., 1., 1.],
)
} else {
[1., 1., 1., 1.]
};
@ -575,17 +652,41 @@ impl HavokAnimation for HavokSplineCompressedAnimation {
block,
self.mask_and_quantization_size,
));
let mut mask = ByteReader::new(Self::compute_packed_nurbs_offsets(&self.data, &self.block_offsets, block, 0x8000_0000));
let mut mask = ByteReader::new(Self::compute_packed_nurbs_offsets(
&self.data,
&self.block_offsets,
block,
0x8000_0000,
));
let mut result = Vec::with_capacity(self.number_of_transform_tracks);
for _ in 0..self.number_of_transform_tracks {
let packed_quantization_types = mask.read();
let (translation_type, rotation_type, scale_type) = Self::unpack_quantization_types(packed_quantization_types);
let (translation_type, rotation_type, scale_type) =
Self::unpack_quantization_types(packed_quantization_types);
let translation = self.sample_translation(translation_type, block_time, quantized_time, mask.read(), &mut data);
let rotation = self.sample_rotation(rotation_type, block_time, quantized_time, mask.read(), &mut data);
let scale = self.sample_scale(scale_type, block_time, quantized_time, mask.read(), &mut data);
let translation = self.sample_translation(
translation_type,
block_time,
quantized_time,
mask.read(),
&mut data,
);
let rotation = self.sample_rotation(
rotation_type,
block_time,
quantized_time,
mask.read(),
&mut data,
);
let scale = self.sample_scale(
scale_type,
block_time,
quantized_time,
mask.read(),
&mut data,
);
result.push(HavokTransform::from_trs(translation, rotation, scale));
}

View file

@ -5,11 +5,11 @@
use std::io::SeekFrom;
use binrw::BinRead;
use binrw::binrw;
use modular_bitfield::prelude::*;
use crate::common::Platform;
use crate::crc::Jamcrc;
use binrw::binrw;
use binrw::BinRead;
use modular_bitfield::prelude::*;
#[binrw]
#[br(magic = b"SqPack")]

View file

@ -3,9 +3,9 @@
use std::io::{Cursor, Seek, SeekFrom};
use binrw::{BinRead, binread, BinReaderExt};
use binrw::binrw;
use crate::ByteSpan;
use binrw::binrw;
use binrw::{binread, BinRead, BinReaderExt};
// From https://github.com/NotAdam/Lumina/tree/40dab50183eb7ddc28344378baccc2d63ae71d35/src/Lumina/Data/Parsing/Layer
@ -13,8 +13,7 @@ use crate::ByteSpan;
#[binrw]
#[repr(i32)]
#[derive(Debug, PartialEq)]
enum LayerEntryType
{
enum LayerEntryType {
#[brw(magic = 0x0i32)]
AssetNone,
#[brw(magic = 0x1i32)]
@ -29,7 +28,7 @@ enum LayerEntryType
render_shadow_enabled: u8,
render_light_shadow_enabled: u8,
padding: u8,
render_model_clip_range: f32
render_model_clip_range: f32,
},
#[brw(magic = 0x2i32)]
Attribute,
@ -120,8 +119,7 @@ enum LayerEntryType
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum DoorState
{
enum DoorState {
Auto = 0x1,
Open = 0x2,
Closed = 0x3,
@ -130,8 +128,7 @@ enum DoorState
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum RotationState
{
enum RotationState {
Rounding = 0x1,
Stopped = 0x2,
}
@ -139,8 +136,7 @@ enum RotationState
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum TransformState
{
enum TransformState {
Play = 0x0,
Stop = 0x1,
Replay = 0x2,
@ -150,8 +146,7 @@ enum TransformState
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum ColourState
{
enum ColourState {
Play = 0x0,
Stop = 0x1,
Replay = 0x2,
@ -161,8 +156,7 @@ enum ColourState
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum TriggerBoxShape
{
enum TriggerBoxShape {
Box = 0x1,
Sphere = 0x2,
Cylinder = 0x3,
@ -174,8 +168,7 @@ enum TriggerBoxShape
#[binrw]
#[brw(repr = i32)]
#[derive(Debug, PartialEq)]
enum ModelCollisionType
{
enum ModelCollisionType {
None = 0x0,
Replace = 0x1,
Box = 0x2,
@ -184,8 +177,7 @@ enum ModelCollisionType
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum LightType
{
enum LightType {
None = 0x0,
Directional = 0x1,
Point = 0x2,
@ -198,8 +190,7 @@ enum LightType
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum PointLightType
{
enum PointLightType {
Sphere = 0x0,
Hemisphere = 0x1,
}
@ -207,8 +198,7 @@ enum PointLightType
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum PositionMarkerType
{
enum PositionMarkerType {
DebugZonePop = 0x1,
DebugJump = 0x2,
NaviMesh = 0x3,
@ -218,8 +208,7 @@ enum PositionMarkerType
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum EnvSetShape
{
enum EnvSetShape {
Ellipsoid = 0x1,
Cuboid = 0x2,
Cylinder = 0x3,
@ -228,8 +217,7 @@ enum EnvSetShape
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum HelperObjectType
{
enum HelperObjectType {
ProxyActor = 0x0,
NullObject = 0x1,
}
@ -237,8 +225,7 @@ enum HelperObjectType
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum TargetType
{
enum TargetType {
None = 0x0,
ENPCInstanceID = 0x1,
Player = 0x2,
@ -257,8 +244,7 @@ enum TargetType
#[binread]
#[derive(Debug, PartialEq)]
enum PopType
{
enum PopType {
#[br(magic = 0x1u8)]
PC = 0x1,
#[br(magic = 0x2u8)]
@ -270,16 +256,14 @@ enum PopType
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum ExitType
{
enum ExitType {
ZoneLine = 0x1,
}
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum RangeType
{
enum RangeType {
Type01 = 0x1,
Type02 = 0x2,
Type03 = 0x3,
@ -292,8 +276,7 @@ enum RangeType
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum LineStyle
{
enum LineStyle {
Red = 0x1,
Blue = 0x2,
}
@ -301,8 +284,7 @@ enum LineStyle
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum GimmickType
{
enum GimmickType {
Fishing = 0x1,
Content = 0x2,
Room = 0x3,
@ -311,8 +293,7 @@ enum GimmickType
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum TargetMarkerType
{
enum TargetMarkerType {
UiTarget = 0x0,
UiNameplate = 0x1,
LookAt = 0x2,
@ -324,8 +305,7 @@ enum TargetMarkerType
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum ObjectType
{
enum ObjectType {
ObjectChair = 0x0,
ObjectBed = 0x1,
}
@ -333,8 +313,7 @@ enum ObjectType
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum CharacterSize
{
enum CharacterSize {
DefaultSize = 0x0,
VerySmall = 0x1,
Small = 0x2,
@ -346,8 +325,7 @@ enum CharacterSize
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum DrawHeadParts
{
enum DrawHeadParts {
Default = 0x0,
ForceOn = 0x1,
ForceOff = 0x2,
@ -356,8 +334,7 @@ enum DrawHeadParts
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum RotationType
{
enum RotationType {
NoRotate = 0x0,
AllAxis = 0x1,
YAxisOnly = 0x2,
@ -366,8 +343,7 @@ enum RotationType
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum MovePathMode
{
enum MovePathMode {
None = 0x0,
SharedGroupAction = 0x1,
Timeline = 0x2,
@ -376,8 +352,7 @@ enum MovePathMode
#[binrw]
#[brw(repr = i32)]
#[derive(Debug, PartialEq)]
enum LayerSetReferencedType
{
enum LayerSetReferencedType {
All = 0x0,
Include = 0x1,
Exclude = 0x2,
@ -387,8 +362,7 @@ enum LayerSetReferencedType
#[binrw]
#[brw(repr = u8)]
#[derive(Debug, PartialEq)]
enum SoundEffectType
{
enum SoundEffectType {
Point = 0x3,
PointDir = 0x4,
Line = 0x5,
@ -439,7 +413,7 @@ struct LayerHeader {
struct LayerSetReferencedList {
referenced_type: LayerSetReferencedType,
layer_sets: i32,
layer_set_count: i32
layer_set_count: i32,
}
#[binread]
@ -450,7 +424,7 @@ struct LgbHeader {
#[br(count = 4)]
file_id: Vec<u8>,
file_size: i32,
total_chunk_count: i32
total_chunk_count: i32,
}
#[binread]
@ -464,7 +438,7 @@ struct LayerChunk {
layer_group_id: i32,
name_offset: u32,
layer_offset: i32,
layer_count: i32
layer_count: i32,
}
#[binread]
@ -474,13 +448,11 @@ struct LayerChunk {
struct InstanceObject {
asset_type: LayerEntryType,
instance_id: u32,
name_offset: u32
name_offset: u32,
}
#[derive(Debug)]
pub struct Layer {
}
pub struct Layer {}
impl Layer {
/// Reads an existing PBD file
@ -497,29 +469,45 @@ impl Layer {
let old_pos = cursor.position();
let mut layer_offsets = vec![0i32; chunk_header.layer_count as usize];
for i in 0.. chunk_header.layer_count {
for i in 0..chunk_header.layer_count {
layer_offsets[i as usize] = cursor.read_le::<i32>().unwrap();
}
for i in 0.. chunk_header.layer_count {
cursor.seek(SeekFrom::Start(old_pos + layer_offsets[i as usize] as u64)).unwrap();
for i in 0..chunk_header.layer_count {
cursor
.seek(SeekFrom::Start(old_pos + layer_offsets[i as usize] as u64))
.unwrap();
let old_pos = cursor.position();
let header = LayerHeader::read(&mut cursor).unwrap();
cursor.seek(SeekFrom::Start(old_pos + header.instance_object_offset as u64)).unwrap();
cursor
.seek(SeekFrom::Start(
old_pos + header.instance_object_offset as u64,
))
.unwrap();
let mut instance_offsets = vec![0i32; header.instance_object_count as usize];
for i in 0..header.instance_object_count {
instance_offsets[i as usize] = cursor.read_le::<i32>().unwrap();
}
cursor.seek(SeekFrom::Start(old_pos + header.layer_set_referenced_list_offset as u64)).unwrap();
cursor
.seek(SeekFrom::Start(
old_pos + header.layer_set_referenced_list_offset as u64,
))
.unwrap();
LayerSetReferencedList::read(&mut cursor).unwrap();
for i in 0..header.instance_object_count {
cursor.seek(SeekFrom::Start(old_pos + header.instance_object_offset as u64 + instance_offsets[i as usize] as u64)).unwrap();
cursor
.seek(SeekFrom::Start(
old_pos
+ header.instance_object_offset as u64
+ instance_offsets[i as usize] as u64,
))
.unwrap();
let instance_object = InstanceObject::read(&mut cursor).unwrap();
println!("{:#?}", instance_object);
@ -549,4 +537,3 @@ mod tests {
Layer::from_existing(&read(d).unwrap());
}
}

View file

@ -6,7 +6,7 @@
extern crate core;
/// Represents a continuous block of memory which is not owned, and comes either from an in-memory location or from a file.
pub type ByteSpan<'a> = &'a[u8];
pub type ByteSpan<'a> = &'a [u8];
/// Represents a continuous block of memory which is owned.
pub type ByteBuffer = Vec<u8>;

View file

@ -3,9 +3,9 @@
use std::io::{Cursor, Seek, SeekFrom};
use binrw::BinRead;
use binrw::binrw;
use crate::ByteSpan;
use binrw::binrw;
use binrw::BinRead;
#[binrw]
#[allow(dead_code)]
@ -143,9 +143,8 @@ impl ChatLog {
};
// TODO: handle the coloring properly, in some way
entry.message = String::from_utf8_lossy(
&buffer[cursor.position() as usize..next_offset],
)
entry.message =
String::from_utf8_lossy(&buffer[cursor.position() as usize..next_offset])
.to_string();
entries.push(entry);

View file

@ -6,13 +6,16 @@
use std::io::{Cursor, Seek, SeekFrom};
use std::mem::size_of;
use binrw::{binrw, BinWrite, BinWriterExt};
use binrw::BinRead;
use binrw::BinReaderExt;
use binrw::{binrw, BinWrite, BinWriterExt};
use crate::{ByteBuffer, ByteSpan};
use crate::common_file_operations::{read_bool_from, write_bool_as};
use crate::model_vertex_declarations::{vertex_element_parser, VERTEX_ELEMENT_SIZE, vertex_element_writer, VertexDeclaration, VertexType, VertexUsage};
use crate::model_vertex_declarations::{
vertex_element_parser, vertex_element_writer, VertexDeclaration, VertexType, VertexUsage,
VERTEX_ELEMENT_SIZE,
};
use crate::{ByteBuffer, ByteSpan};
pub const NUM_VERTICES: u32 = 17;
@ -126,7 +129,7 @@ pub struct ModelHeader {
unknown7: u16,
unknown8: u16,
#[brw(pad_after = 6)]
unknown9: u16
unknown9: u16,
}
#[binrw]
@ -221,7 +224,7 @@ struct BoneTableV2 {
// align to 4 bytes
// TODO: use br align_to?
#[br(if(bone_count % 2 == 0))]
padding: u16
padding: u16,
}
#[binrw]
@ -243,7 +246,7 @@ struct TerrainShadowMesh {
submesh_index: u16,
submesh_count: u16,
vertex_buffer_stride: u8,
padding: u8
padding: u8,
}
#[binrw]
@ -253,7 +256,7 @@ struct TerrainShadowSubmesh {
index_offset: u32,
index_count: u32,
unknown1: u16,
unknown2: u16
unknown2: u16,
}
#[binrw]
@ -262,7 +265,7 @@ struct TerrainShadowSubmesh {
struct ShapeStruct {
string_offset: u32,
shape_mesh_start_index: [u16; 3],
shape_mesh_count: [u16; 3]
shape_mesh_count: [u16; 3],
}
#[binrw]
@ -271,7 +274,7 @@ struct ShapeStruct {
struct ShapeMesh {
mesh_index_offset: u32,
shape_value_count: u32,
shape_value_offset: u32
shape_value_offset: u32,
}
#[binrw]
@ -279,7 +282,7 @@ struct ShapeMesh {
#[allow(dead_code)]
struct ShapeValue {
base_indices_index: u16,
replacing_vertex_index: u16
replacing_vertex_index: u16,
}
#[binrw]
@ -405,7 +408,7 @@ impl Default for Vertex {
#[repr(C)]
pub struct NewShapeValue {
pub base_index: u32,
pub replacing_vertex: Vertex
pub replacing_vertex: Vertex,
}
#[derive(Clone, Copy)]
@ -413,13 +416,13 @@ pub struct NewShapeValue {
pub struct SubMesh {
submesh_index: usize,
pub index_count: u32,
pub index_offset: u32
pub index_offset: u32,
}
#[derive(Clone)]
pub struct Shape {
pub name: String,
pub morphed_vertices: Vec<Vertex>
pub morphed_vertices: Vec<Vertex>,
}
/// Corresponds to a "Mesh" in an LOD
@ -430,7 +433,7 @@ pub struct Part {
pub indices: Vec<u16>,
pub material_index: u16,
pub submeshes: Vec<SubMesh>,
pub shapes: Vec<Shape>
pub shapes: Vec<Shape>,
}
#[derive(Clone)]
@ -453,7 +456,11 @@ impl MDL {
let mut cursor = Cursor::new(buffer);
let model_file_header = ModelFileHeader::read(&mut cursor).ok()?;
let model = ModelData::read_args(&mut cursor, binrw::args! { file_header: &model_file_header }).ok()?;
let model = ModelData::read_args(
&mut cursor,
binrw::args! { file_header: &model_file_header },
)
.ok()?;
let mut affected_bone_names = vec![];
@ -517,26 +524,32 @@ impl MDL {
.ok()?;
match element.vertex_usage {
VertexUsage::Position => {
match element.vertex_type {
VertexUsage::Position => match element.vertex_type {
VertexType::Single4 => {
vertices[k as usize].position.clone_from_slice(&MDL::read_single4(&mut cursor).unwrap()[0..3]);
vertices[k as usize].position.clone_from_slice(
&MDL::read_single4(&mut cursor).unwrap()[0..3],
);
}
VertexType::Half4 => {
vertices[k as usize].position.clone_from_slice(&MDL::read_half4(&mut cursor).unwrap()[0..3]);
vertices[k as usize].position.clone_from_slice(
&MDL::read_half4(&mut cursor).unwrap()[0..3],
);
}
VertexType::Single3 => {
vertices[k as usize].position = MDL::read_single3(&mut cursor).unwrap();
vertices[k as usize].position =
MDL::read_single3(&mut cursor).unwrap();
}
_ => {
panic!("Unexpected vertex type for position: {:#?}", element.vertex_type);
panic!(
"Unexpected vertex type for position: {:#?}",
element.vertex_type
);
}
}
}
VertexUsage::BlendWeights => {
match element.vertex_type {
},
VertexUsage::BlendWeights => match element.vertex_type {
VertexType::ByteFloat4 => {
vertices[k as usize].bone_weight = MDL::read_byte_float4(&mut cursor).unwrap();
vertices[k as usize].bone_weight =
MDL::read_byte_float4(&mut cursor).unwrap();
}
VertexType::Byte4 => {
let bytes = MDL::read_byte4(&mut cursor).unwrap();
@ -544,7 +557,7 @@ impl MDL {
f32::from(bytes[0]),
f32::from(bytes[1]),
f32::from(bytes[2]),
f32::from(bytes[3])
f32::from(bytes[3]),
];
}
VertexType::UnsignedShort4 => {
@ -553,18 +566,20 @@ impl MDL {
f32::from(bytes[0]),
f32::from(bytes[1]),
f32::from(bytes[2]),
f32::from(bytes[3])
f32::from(bytes[3]),
];
}
_ => {
panic!("Unexpected vertex type for blendweight: {:#?}", element.vertex_type);
panic!(
"Unexpected vertex type for blendweight: {:#?}",
element.vertex_type
);
}
}
}
VertexUsage::BlendIndices => {
match element.vertex_type {
},
VertexUsage::BlendIndices => match element.vertex_type {
VertexType::Byte4 => {
vertices[k as usize].bone_id = MDL::read_byte4(&mut cursor).unwrap();
vertices[k as usize].bone_id =
MDL::read_byte4(&mut cursor).unwrap();
}
VertexType::UnsignedShort4 => {
let shorts = MDL::read_unsigned_short4(&mut cursor).unwrap();
@ -572,29 +587,34 @@ impl MDL {
shorts[0] as u8,
shorts[1] as u8,
shorts[2] as u8,
shorts[3] as u8
shorts[3] as u8,
];
}
_ => {
panic!("Unexpected vertex type for blendindice: {:#?}", element.vertex_type);
panic!(
"Unexpected vertex type for blendindice: {:#?}",
element.vertex_type
);
}
}
}
VertexUsage::Normal => {
match element.vertex_type {
},
VertexUsage::Normal => match element.vertex_type {
VertexType::Half4 => {
vertices[k as usize].normal.clone_from_slice(&MDL::read_half4(&mut cursor).unwrap()[0..3]);
vertices[k as usize].normal.clone_from_slice(
&MDL::read_half4(&mut cursor).unwrap()[0..3],
);
}
VertexType::Single3 => {
vertices[k as usize].normal = MDL::read_single3(&mut cursor).unwrap();
vertices[k as usize].normal =
MDL::read_single3(&mut cursor).unwrap();
}
_ => {
panic!("Unexpected vertex type for normal: {:#?}", element.vertex_type);
panic!(
"Unexpected vertex type for normal: {:#?}",
element.vertex_type
);
}
}
}
VertexUsage::UV => {
match element.vertex_type {
},
VertexUsage::UV => match element.vertex_type {
VertexType::ByteFloat4 => {
let combined = MDL::read_byte_float4(&mut cursor).unwrap();
@ -619,39 +639,48 @@ impl MDL {
vertices[k as usize].uv0.clone_from_slice(&combined[0..2]);
}
_ => {
panic!("Unexpected vertex type for uv: {:#?}", element.vertex_type);
panic!(
"Unexpected vertex type for uv: {:#?}",
element.vertex_type
);
}
}
}
VertexUsage::BiTangent => {
match element.vertex_type {
},
VertexUsage::BiTangent => match element.vertex_type {
VertexType::ByteFloat4 => {
vertices[k as usize].bitangent = MDL::read_tangent(&mut cursor).unwrap();
vertices[k as usize].bitangent =
MDL::read_tangent(&mut cursor).unwrap();
}
_ => {
panic!("Unexpected vertex type for bitangent: {:#?}", element.vertex_type);
}
}
panic!(
"Unexpected vertex type for bitangent: {:#?}",
element.vertex_type
);
}
},
VertexUsage::Tangent => {
match element.vertex_type {
// Used for... terrain..?
VertexType::ByteFloat4 => {}
_ => {
panic!("Unexpected vertex type for tangent: {:#?}", element.vertex_type);
panic!(
"Unexpected vertex type for tangent: {:#?}",
element.vertex_type
);
}
}
}
VertexUsage::Color => {
match element.vertex_type {
VertexUsage::Color => match element.vertex_type {
VertexType::ByteFloat4 => {
vertices[k as usize].color = MDL::read_byte_float4(&mut cursor).unwrap();
vertices[k as usize].color =
MDL::read_byte_float4(&mut cursor).unwrap();
}
_ => {
panic!("Unexpected vertex type for color: {:#?}", element.vertex_type);
}
}
panic!(
"Unexpected vertex type for color: {:#?}",
element.vertex_type
);
}
},
}
}
}
@ -671,12 +700,17 @@ impl MDL {
indices.push(cursor.read_le::<u16>().ok()?);
}
let mut submeshes: Vec<SubMesh> = Vec::with_capacity(model.meshes[j as usize].submesh_count as usize);
let mut submeshes: Vec<SubMesh> =
Vec::with_capacity(model.meshes[j as usize].submesh_count as usize);
for i in 0..model.meshes[j as usize].submesh_count {
submeshes.push(SubMesh {
submesh_index: model.meshes[j as usize].submesh_index as usize + i as usize,
index_count: model.submeshes[model.meshes[j as usize].submesh_index as usize + i as usize].index_count,
index_offset: model.submeshes[model.meshes[j as usize].submesh_index as usize + i as usize].index_offset,
index_count: model.submeshes
[model.meshes[j as usize].submesh_index as usize + i as usize]
.index_count,
index_offset: model.submeshes
[model.meshes[j as usize].submesh_index as usize + i as usize]
.index_offset,
});
}
@ -684,23 +718,45 @@ impl MDL {
for shape in &model.shapes {
// Adapted from https://github.com/xivdev/Penumbra/blob/master/Penumbra/Import/Models/Export/MeshExporter.cs
let affected_shape_mesh: Vec<&ShapeMesh> = model.shape_meshes.iter()
let affected_shape_mesh: Vec<&ShapeMesh> = model
.shape_meshes
.iter()
.skip(shape.shape_mesh_start_index[i as usize] as usize)
.take(shape.shape_mesh_count[i as usize] as usize)
.filter(|shape_mesh| shape_mesh.mesh_index_offset == model.meshes[j as usize].start_index).collect();
.filter(|shape_mesh| {
shape_mesh.mesh_index_offset == model.meshes[j as usize].start_index
})
.collect();
let shape_values: Vec<&ShapeValue> = affected_shape_mesh.iter()
.flat_map(|shape_mesh| model.shape_values.iter().skip(shape_mesh.shape_value_offset as usize).take(shape_mesh.shape_value_count as usize))
.filter(|shape_value| shape_value.base_indices_index >= model.meshes[j as usize].start_index as u16 && shape_value.base_indices_index < (model.meshes[j as usize].start_index + model.meshes[j as usize].index_count) as u16)
let shape_values: Vec<&ShapeValue> = affected_shape_mesh
.iter()
.flat_map(|shape_mesh| {
model
.shape_values
.iter()
.skip(shape_mesh.shape_value_offset as usize)
.take(shape_mesh.shape_value_count as usize)
})
.filter(|shape_value| {
shape_value.base_indices_index
>= model.meshes[j as usize].start_index as u16
&& shape_value.base_indices_index
< (model.meshes[j as usize].start_index
+ model.meshes[j as usize].index_count)
as u16
})
.collect();
let mut morphed_vertices = vec![Vertex::default(); vertices.len()];
if !shape_values.is_empty() {
for shape_value in shape_values {
let old_vertex = vertices[indices[shape_value.base_indices_index as usize] as usize];
let new_vertex = vertices[shape_value.replacing_vertex_index as usize - model.meshes[j as usize].start_index as usize];
let vertex = &mut morphed_vertices[indices[shape_value.base_indices_index as usize] as usize];
let old_vertex =
vertices[indices[shape_value.base_indices_index as usize] as usize];
let new_vertex = vertices[shape_value.replacing_vertex_index as usize
- model.meshes[j as usize].start_index as usize];
let vertex = &mut morphed_vertices
[indices[shape_value.base_indices_index as usize] as usize];
vertex.position[0] = new_vertex.position[0] - old_vertex.position[0];
vertex.position[1] = new_vertex.position[1] - old_vertex.position[1];
@ -719,12 +775,19 @@ impl MDL {
shapes.push(Shape {
name: string,
morphed_vertices
morphed_vertices,
});
}
}
parts.push(Part { mesh_index: j, vertices, indices, material_index, submeshes, shapes });
parts.push(Part {
mesh_index: j,
vertices,
indices,
material_index,
submeshes,
shapes,
});
}
lods.push(Lod { parts });
@ -735,11 +798,18 @@ impl MDL {
model_data: model,
lods,
affected_bone_names,
material_names
material_names,
})
}
pub fn replace_vertices(&mut self, lod_index: usize, part_index: usize, vertices: &[Vertex], indices: &[u16], submeshes: &[SubMesh]) {
pub fn replace_vertices(
&mut self,
lod_index: usize,
part_index: usize,
vertices: &[Vertex],
indices: &[u16],
submeshes: &[SubMesh],
) {
let part = &mut self.lods[lod_index].parts[part_index];
part.vertices = Vec::from(vertices);
@ -747,8 +817,10 @@ impl MDL {
for (i, submesh) in part.submeshes.iter().enumerate() {
if i < submeshes.len() {
self.model_data.submeshes[submesh.submesh_index].index_offset = submeshes[i].index_offset;
self.model_data.submeshes[submesh.submesh_index].index_count = submeshes[i].index_count;
self.model_data.submeshes[submesh.submesh_index].index_offset =
submeshes[i].index_offset;
self.model_data.submeshes[submesh.submesh_index].index_count =
submeshes[i].index_count;
}
}
@ -773,26 +845,38 @@ impl MDL {
self.update_headers();
}
pub fn add_shape_mesh(&mut self, lod_index: usize, shape_index: usize, shape_mesh_index: usize, part_index: usize, shape_values: &[NewShapeValue]) {
pub fn add_shape_mesh(
&mut self,
lod_index: usize,
shape_index: usize,
shape_mesh_index: usize,
part_index: usize,
shape_values: &[NewShapeValue],
) {
let part = &mut self.lods[lod_index].parts[part_index];
// TODO: this is assuming they are added in order
if shape_mesh_index == 0 {
self.model_data.shapes[shape_index].shape_mesh_start_index[lod_index] = self.model_data.shape_meshes.len() as u16;
self.model_data.shapes[shape_index].shape_mesh_start_index[lod_index] =
self.model_data.shape_meshes.len() as u16;
}
self.model_data.shape_meshes.push(ShapeMesh {
mesh_index_offset: self.model_data.meshes[part.mesh_index as usize].start_index,
shape_value_count: shape_values.len() as u32,
shape_value_offset: self.model_data.shape_values.len() as u32
shape_value_offset: self.model_data.shape_values.len() as u32,
});
for shape_value in shape_values {
part.vertices.push(shape_value.replacing_vertex);
self.model_data.shape_values.push(ShapeValue {
base_indices_index: self.model_data.meshes[part.mesh_index as usize].start_index as u16 + shape_value.base_index as u16,
replacing_vertex_index: self.model_data.meshes[part.mesh_index as usize].start_index as u16 + (part.vertices.len() - 1) as u16
base_indices_index: self.model_data.meshes[part.mesh_index as usize].start_index
as u16
+ shape_value.base_index as u16,
replacing_vertex_index: self.model_data.meshes[part.mesh_index as usize].start_index
as u16
+ (part.vertices.len() - 1) as u16,
})
}
@ -807,15 +891,18 @@ impl MDL {
let mut vertex_offset = 0;
for j in self.model_data.lods[i as usize].mesh_index
..self.model_data.lods[i as usize].mesh_index + self.model_data.lods[i as usize].mesh_count
..self.model_data.lods[i as usize].mesh_index
+ self.model_data.lods[i as usize].mesh_count
{
let mesh = &mut self.model_data.meshes[j as usize];
mesh.start_index = self.model_data.submeshes[mesh.submesh_index as usize].index_offset;
mesh.start_index =
self.model_data.submeshes[mesh.submesh_index as usize].index_offset;
for i in 0..mesh.vertex_stream_count as usize {
mesh.vertex_buffer_offsets[i] = vertex_offset;
vertex_offset += mesh.vertex_count as u32 * mesh.vertex_buffer_strides[i] as u32;
vertex_offset +=
mesh.vertex_count as u32 * mesh.vertex_buffer_strides[i] as u32;
}
}
}
@ -825,15 +912,14 @@ impl MDL {
let mut total_index_buffer_size = 0;
// still slightly off?
for j in lod.mesh_index
..lod.mesh_index + lod.mesh_count
{
for j in lod.mesh_index..lod.mesh_index + lod.mesh_count {
let vertex_count = self.model_data.meshes[j as usize].vertex_count;
let index_count = self.model_data.meshes[j as usize].index_count;
let mut total_vertex_stride: u32 = 0;
for i in 0..self.model_data.meshes[j as usize].vertex_stream_count as usize {
total_vertex_stride += self.model_data.meshes[j as usize].vertex_buffer_strides[i] as u32;
total_vertex_stride +=
self.model_data.meshes[j as usize].vertex_buffer_strides[i] as u32;
}
total_vertex_buffer_size += vertex_count as u32 * total_vertex_stride;
@ -912,17 +998,20 @@ impl MDL {
for (l, lod) in self.lods.iter().enumerate() {
for part in lod.parts.iter() {
let declaration = &self.model_data.header.vertex_declarations[part.mesh_index as usize];
let declaration =
&self.model_data.header.vertex_declarations[part.mesh_index as usize];
for (k, vert) in part.vertices.iter().enumerate() {
for element in &declaration.elements {
cursor
.seek(SeekFrom::Start(
(self.model_data.lods[l].vertex_data_offset
+ self.model_data.meshes[part.mesh_index as usize].vertex_buffer_offsets
+ self.model_data.meshes[part.mesh_index as usize]
.vertex_buffer_offsets
[element.stream as usize]
+ element.offset as u32
+ self.model_data.meshes[part.mesh_index as usize].vertex_buffer_strides
+ self.model_data.meshes[part.mesh_index as usize]
.vertex_buffer_strides
[element.stream as usize]
as u32
* k as u32) as u64,
@ -930,79 +1019,96 @@ impl MDL {
.ok()?;
match element.vertex_usage {
VertexUsage::Position => {
match element.vertex_type {
VertexUsage::Position => match element.vertex_type {
VertexType::Half4 => {
MDL::write_half4(&mut cursor, &MDL::pad_slice(&vert.position, 1.0)).ok()?;
MDL::write_half4(
&mut cursor,
&MDL::pad_slice(&vert.position, 1.0),
)
.ok()?;
}
VertexType::Single3 => {
MDL::write_single3(&mut cursor, &vert.position).ok()?;
}
_ => {
panic!("Unexpected vertex type for position: {:#?}", element.vertex_type);
panic!(
"Unexpected vertex type for position: {:#?}",
element.vertex_type
);
}
}
}
VertexUsage::BlendWeights => {
match element.vertex_type {
},
VertexUsage::BlendWeights => match element.vertex_type {
VertexType::ByteFloat4 => {
MDL::write_byte_float4(&mut cursor, &vert.bone_weight).ok()?;
MDL::write_byte_float4(&mut cursor, &vert.bone_weight)
.ok()?;
}
_ => {
panic!("Unexpected vertex type for blendweight: {:#?}", element.vertex_type);
panic!(
"Unexpected vertex type for blendweight: {:#?}",
element.vertex_type
);
}
}
}
VertexUsage::BlendIndices => {
match element.vertex_type {
},
VertexUsage::BlendIndices => match element.vertex_type {
VertexType::Byte4 => {
MDL::write_byte4(&mut cursor, &vert.bone_id).ok()?;
}
_ => {
panic!("Unexpected vertex type for blendindice: {:#?}", element.vertex_type);
panic!(
"Unexpected vertex type for blendindice: {:#?}",
element.vertex_type
);
}
}
}
VertexUsage::Normal => {
match element.vertex_type {
},
VertexUsage::Normal => match element.vertex_type {
VertexType::Half4 => {
MDL::write_half4(&mut cursor, &MDL::pad_slice(&vert.normal, 0.0)).ok()?;
MDL::write_half4(
&mut cursor,
&MDL::pad_slice(&vert.normal, 0.0),
)
.ok()?;
}
VertexType::Single3 => {
MDL::write_single3(&mut cursor, &vert.normal).ok()?;
}
_ => {
panic!("Unexpected vertex type for normal: {:#?}", element.vertex_type);
panic!(
"Unexpected vertex type for normal: {:#?}",
element.vertex_type
);
}
}
}
VertexUsage::UV => {
match element.vertex_type {
},
VertexUsage::UV => match element.vertex_type {
VertexType::Half4 => {
let combined = [vert.uv0[0], vert.uv0[1], vert.uv1[0], vert.uv1[1]];
let combined =
[vert.uv0[0], vert.uv0[1], vert.uv1[0], vert.uv1[1]];
MDL::write_half4(&mut cursor, &combined).ok()?;
}
VertexType::Single4 => {
let combined = [vert.uv0[0], vert.uv0[1], vert.uv1[0], vert.uv1[1]];
let combined =
[vert.uv0[0], vert.uv0[1], vert.uv1[0], vert.uv1[1]];
MDL::write_single4(&mut cursor, &combined).ok()?;
}
_ => {
panic!("Unexpected vertex type for uv: {:#?}", element.vertex_type);
panic!(
"Unexpected vertex type for uv: {:#?}",
element.vertex_type
);
}
}
}
VertexUsage::BiTangent => {
match element.vertex_type {
},
VertexUsage::BiTangent => match element.vertex_type {
VertexType::ByteFloat4 => {
MDL::write_tangent(&mut cursor, &vert.bitangent).ok()?;
}
_ => {
panic!("Unexpected vertex type for bitangent: {:#?}", element.vertex_type);
}
}
panic!(
"Unexpected vertex type for bitangent: {:#?}",
element.vertex_type
);
}
},
VertexUsage::Tangent => {
#[allow(clippy::match_single_binding)] // TODO
match element.vertex_type {
@ -1010,20 +1116,24 @@ impl MDL {
MDL::write_tangent(&mut cursor, &vert.binormal).ok()?;
}*/
_ => {
panic!("Unexpected vertex type for tangent: {:#?}", element.vertex_type);
panic!(
"Unexpected vertex type for tangent: {:#?}",
element.vertex_type
);
}
}
}
VertexUsage::Color => {
match element.vertex_type {
VertexUsage::Color => match element.vertex_type {
VertexType::ByteFloat4 => {
MDL::write_byte_float4(&mut cursor, &vert.color).ok()?;
}
_ => {
panic!("Unexpected vertex type for color: {:#?}", element.vertex_type);
}
}
panic!(
"Unexpected vertex type for color: {:#?}",
element.vertex_type
);
}
},
}
}
}
@ -1031,8 +1141,8 @@ impl MDL {
cursor
.seek(SeekFrom::Start(
(self.file_header.index_offsets[l]
+ (self.model_data.meshes[part.mesh_index as usize].start_index * size_of::<u16>() as u32))
as u64,
+ (self.model_data.meshes[part.mesh_index as usize].start_index
* size_of::<u16>() as u32)) as u64,
))
.ok()?;
@ -1166,7 +1276,13 @@ mod tests {
for l in 0..old_mdl.lods.len() {
for p in 0..old_mdl.lods[l].parts.len() {
mdl.replace_vertices(l, p, &old_mdl.lods[l].parts[p].vertices, &old_mdl.lods[l].parts[p].indices, &old_mdl.lods[l].parts[p].submeshes);
mdl.replace_vertices(
l,
p,
&old_mdl.lods[l].parts[p].vertices,
&old_mdl.lods[l].parts[p].indices,
&old_mdl.lods[l].parts[p].submeshes,
);
}
}
@ -1186,9 +1302,15 @@ mod tests {
// file header
assert_eq!(mdl.file_header.version, 16777221);
assert_eq!(mdl.file_header.stack_size, 816);
assert_eq!(mdl.file_header.stack_size, mdl.file_header.calculate_stack_size());
assert_eq!(
mdl.file_header.stack_size,
mdl.file_header.calculate_stack_size()
);
assert_eq!(mdl.file_header.runtime_size, 12544);
assert_eq!(mdl.file_header.runtime_size, mdl.model_data.calculate_runtime_size());
assert_eq!(
mdl.file_header.runtime_size,
mdl.model_data.calculate_runtime_size()
);
assert_eq!(mdl.file_header.vertex_declaration_count, 6);
assert_eq!(mdl.file_header.material_count, 2);
assert_eq!(mdl.file_header.lod_count, 3);

View file

@ -1,11 +1,11 @@
// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
// SPDX-License-Identifier: GPL-3.0-or-later
use std::io::Cursor;
use crate::model::MDL;
use crate::ByteSpan;
use binrw::{BinReaderExt, BinResult, BinWriterExt};
use half::f16;
use crate::ByteSpan;
use crate::model::MDL;
use std::io::Cursor;
/// Maximum value of byte, used to divide and multiply floats in that space [0.0..1.0] to [0..255]
const MAX_BYTE_FLOAT: f32 = u8::MAX as f32;
@ -16,16 +16,20 @@ impl MDL {
(f32::from(cursor.read_le::<u8>().ok()?) / MAX_BYTE_FLOAT),
(f32::from(cursor.read_le::<u8>().ok()?) / MAX_BYTE_FLOAT),
(f32::from(cursor.read_le::<u8>().ok()?) / MAX_BYTE_FLOAT),
(f32::from(cursor.read_le::<u8>().ok()?) / MAX_BYTE_FLOAT)
(f32::from(cursor.read_le::<u8>().ok()?) / MAX_BYTE_FLOAT),
])
}
pub(crate) fn write_byte_float4<T: BinWriterExt>(cursor: &mut T, vec: &[f32; 4]) -> BinResult<()> {
pub(crate) fn write_byte_float4<T: BinWriterExt>(
cursor: &mut T,
vec: &[f32; 4],
) -> BinResult<()> {
cursor.write_le::<[u8; 4]>(&[
(vec[0] * MAX_BYTE_FLOAT).round() as u8,
(vec[1] * MAX_BYTE_FLOAT).round() as u8,
(vec[2] * MAX_BYTE_FLOAT).round() as u8,
(vec[3] * MAX_BYTE_FLOAT).round() as u8])
(vec[3] * MAX_BYTE_FLOAT).round() as u8,
])
}
pub(crate) fn read_tangent(cursor: &mut Cursor<ByteSpan>) -> Option<[f32; 4]> {
@ -33,7 +37,11 @@ impl MDL {
(f32::from(cursor.read_le::<u8>().ok()?) * 2.0 / MAX_BYTE_FLOAT - 1.0),
(f32::from(cursor.read_le::<u8>().ok()?) * 2.0 / MAX_BYTE_FLOAT - 1.0),
(f32::from(cursor.read_le::<u8>().ok()?) * 2.0 / MAX_BYTE_FLOAT - 1.0),
if (f32::from(cursor.read_le::<u8>().ok()?) * 2.0 / MAX_BYTE_FLOAT - 1.0) == 1.0 { 1.0 } else { -1.0 }
if (f32::from(cursor.read_le::<u8>().ok()?) * 2.0 / MAX_BYTE_FLOAT - 1.0) == 1.0 {
1.0
} else {
-1.0
},
])
}
@ -42,7 +50,8 @@ impl MDL {
((vec[0] + 1.0) * (MAX_BYTE_FLOAT / 2.0)).round() as u8,
((vec[1] + 1.0) * (MAX_BYTE_FLOAT / 2.0)).round() as u8,
((vec[2] + 1.0) * (MAX_BYTE_FLOAT / 2.0)).round() as u8,
if vec[3] > 0.0 { 255 } else { 0 }]) // SqEx uses 0 as -1, not 1
if vec[3] > 0.0 { 255 } else { 0 },
]) // SqEx uses 0 as -1, not 1
}
pub(crate) fn read_half4(cursor: &mut Cursor<ByteSpan>) -> Option<[f32; 4]> {
@ -50,7 +59,7 @@ impl MDL {
f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32(),
f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32(),
f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32(),
f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32()
f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32(),
])
}
@ -59,13 +68,14 @@ impl MDL {
f16::from_f32(vec[0]).to_bits(),
f16::from_f32(vec[1]).to_bits(),
f16::from_f32(vec[2]).to_bits(),
f16::from_f32(vec[3]).to_bits()])
f16::from_f32(vec[3]).to_bits(),
])
}
pub(crate) fn read_half2(cursor: &mut Cursor<ByteSpan>) -> Option<[f32; 2]> {
Some([
f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32(),
f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32()
f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32(),
])
}
@ -73,7 +83,8 @@ impl MDL {
pub(crate) fn write_half2<T: BinWriterExt>(cursor: &mut T, vec: &[f32; 2]) -> BinResult<()> {
cursor.write_le::<[u16; 2]>(&[
f16::from_f32(vec[0]).to_bits(),
f16::from_f32(vec[1]).to_bits()])
f16::from_f32(vec[1]).to_bits(),
])
}
pub(crate) fn read_byte4(cursor: &mut Cursor<ByteSpan>) -> BinResult<[u8; 4]> {
@ -113,15 +124,17 @@ impl MDL {
#[cfg(test)]
mod tests {
use std::io::Cursor;
use crate::model::MDL;
use std::io::Cursor;
macro_rules! assert_delta {
($x:expr, $y:expr, $d:expr) => {
for i in 0..4 {
if !($x[i] - $y[i] < $d || $y[i] - $x[i] < $d) { panic!(); }
if !($x[i] - $y[i] < $d || $y[i] - $x[i] < $d) {
panic!();
}
}
};
}
#[test]

View file

@ -1,9 +1,9 @@
// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
// SPDX-License-Identifier: GPL-3.0-or-later
use std::io::SeekFrom;
use binrw::{BinRead, BinResult, binrw, BinWrite};
use crate::model::NUM_VERTICES;
use binrw::{binrw, BinRead, BinResult, BinWrite};
use std::io::SeekFrom;
/// Marker for end of stream (0xFF)
const END_OF_STREAM: u8 = 0xFF;
@ -46,7 +46,7 @@ pub enum VertexType {
/// 2 16-bit unsigned integers
UnsignedShort2 = 16,
/// 4 16-bit unsigned integers
UnsignedShort4 = 17
UnsignedShort4 = 17,
}
/// What the vertex stream is used for.
@ -90,10 +90,7 @@ pub struct VertexDeclaration {
#[binrw::parser(reader, endian)]
pub(crate) fn vertex_element_parser(count: u16) -> BinResult<Vec<VertexDeclaration>> {
let mut vertex_declarations: Vec<VertexDeclaration> =
vec![
VertexDeclaration { elements: vec![] };
count.into()
];
vec![VertexDeclaration { elements: vec![] }; count.into()];
for declaration in &mut vertex_declarations {
let mut element = VertexElement::read_options(reader, endian, ())?;
@ -115,9 +112,7 @@ pub(crate) fn vertex_element_parser(count: u16) -> BinResult<Vec<VertexDeclarati
}
#[binrw::writer(writer, endian)]
pub(crate) fn vertex_element_writer(
declarations: &Vec<VertexDeclaration>,
) -> BinResult<()> {
pub(crate) fn vertex_element_writer(declarations: &Vec<VertexDeclaration>) -> BinResult<()> {
// write vertex declarations
for declaration in declarations {
for element in &declaration.elements {
@ -129,8 +124,9 @@ pub(crate) fn vertex_element_writer(
offset: 0,
vertex_type: VertexType::Single1,
vertex_usage: VertexUsage::Position,
usage_index: 0
}.write_options(writer, endian, ())?;
usage_index: 0,
}
.write_options(writer, endian, ())?;
let to_seek = (NUM_VERTICES as usize - 1 - declaration.elements.len()) * 8;
writer.seek(SeekFrom::Current(to_seek as i64))?;
@ -138,4 +134,3 @@ pub(crate) fn vertex_element_writer(
Ok(())
}

View file

@ -5,8 +5,8 @@
use std::io::Cursor;
use binrw::{BinRead, binrw};
use crate::ByteSpan;
use binrw::{binrw, BinRead};
#[binrw]
#[derive(Debug)]
@ -79,8 +79,7 @@ struct Constant {
// from https://github.com/NotAdam/Lumina/blob/master/src/Lumina/Data/Parsing/MtrlStructs.cs
#[binrw]
#[derive(Debug)]
enum TextureUsage
{
enum TextureUsage {
#[brw(magic = 0x88408C04u32)]
Sampler,
#[brw(magic = 0x213CB439u32)]
@ -123,7 +122,7 @@ enum TextureUsage
SamplerWhitecapMap,
#[brw(magic = 0x565f8fd8u32)]
UnknownDawntrail1
UnknownDawntrail1,
}
#[binrw]
@ -183,7 +182,7 @@ struct MaterialData {
pub struct Material {
pub shader_package_name: String,
pub texture_paths: Vec<String>,
pub shader_keys: Vec<ShaderKey>
pub shader_keys: Vec<ShaderKey>,
}
impl Material {
@ -224,7 +223,7 @@ impl Material {
Some(Material {
shader_package_name,
texture_paths,
shader_keys: mat_data.shader_keys
shader_keys: mat_data.shader_keys,
})
}
}

View file

@ -12,8 +12,8 @@ use binrw::BinRead;
use tracing::{debug, warn};
use crate::common::{get_platform_string, Platform, Region};
use crate::sqpack::read_data_block_patch;
use crate::common_file_operations::read_bool_from;
use crate::sqpack::read_data_block_patch;
#[binread]
#[derive(Debug)]
@ -148,7 +148,7 @@ enum SqpkOperation {
#[br(magic = b'T')]
TargetInfo(SqpkTargetInfo),
#[br(magic = b'I')]
Index(SqpkIndex)
Index(SqpkIndex),
}
#[derive(BinRead, PartialEq, Debug)]
@ -301,7 +301,7 @@ struct SqpkIndex {
block_offset: u32,
#[br(pad_after = 8)] // data?
block_number: u32
block_number: u32,
}
#[derive(BinRead, PartialEq, Debug)]
@ -463,8 +463,11 @@ pub fn apply_patch(data_dir: &str, patch_path: &str) -> Result<(), PatchError> {
let (left, _) = filename.rsplit_once('/').unwrap();
fs::create_dir_all(left)?;
let mut new_file =
OpenOptions::new().write(true).create(true).truncate(false).open(filename)?;
let mut new_file = OpenOptions::new()
.write(true)
.create(true)
.truncate(false)
.open(filename)?;
new_file.seek(SeekFrom::Start(add.block_offset as u64))?;
@ -480,8 +483,11 @@ pub fn apply_patch(data_dir: &str, patch_path: &str) -> Result<(), PatchError> {
delete.file_id,
);
let new_file =
OpenOptions::new().write(true).create(true).truncate(false).open(filename)?;
let new_file = OpenOptions::new()
.write(true)
.create(true)
.truncate(false)
.open(filename)?;
write_empty_file_block_at(
&new_file,
@ -500,8 +506,11 @@ pub fn apply_patch(data_dir: &str, patch_path: &str) -> Result<(), PatchError> {
let (left, _) = filename.rsplit_once('/').unwrap();
fs::create_dir_all(left)?;
let new_file =
OpenOptions::new().write(true).create(true).truncate(false).open(filename)?;
let new_file = OpenOptions::new()
.write(true)
.create(true)
.truncate(false)
.open(filename)?;
write_empty_file_block_at(
&new_file,

View file

@ -3,9 +3,9 @@
use std::io::{Cursor, Seek, SeekFrom};
use binrw::{BinRead, BinReaderExt};
use binrw::binrw;
use crate::ByteSpan;
use binrw::binrw;
use binrw::{BinRead, BinReaderExt};
#[binrw]
#[derive(Debug)]
@ -14,7 +14,7 @@ struct PreBoneDeformerItem {
body_id: u16,
link_index: u16,
#[br(pad_after = 4)]
data_offset: u32
data_offset: u32,
}
#[binrw]
@ -39,7 +39,7 @@ struct PreBoneDeformerHeader {
links: Vec<PreBoneDeformerLink>,
#[br(ignore)]
raw_data: Vec<u8>
raw_data: Vec<u8>,
}
pub struct PreBoneDeformer {
@ -51,13 +51,13 @@ pub struct PreBoneDeformBone {
/// Name of the affected bone
pub name: String,
/// The deform matrix
pub deform: [f32; 12]
pub deform: [f32; 12],
}
#[derive(Debug)]
pub struct PreBoneDeformMatrices {
/// The prebone deform bones
pub bones: Vec<PreBoneDeformBone>
pub bones: Vec<PreBoneDeformBone>,
}
impl PreBoneDeformer {
@ -68,18 +68,24 @@ impl PreBoneDeformer {
header.raw_data = buffer.to_vec();
Some(PreBoneDeformer {
header
})
Some(PreBoneDeformer { header })
}
/// Calculates the deform matrices between two races
pub fn get_deform_matrices(&self, from_body_id: u16, to_body_id: u16) -> Option<PreBoneDeformMatrices> {
pub fn get_deform_matrices(
&self,
from_body_id: u16,
to_body_id: u16,
) -> Option<PreBoneDeformMatrices> {
if from_body_id == to_body_id {
return None;
}
let mut item = self.header.items.iter().find(|x| x.body_id == from_body_id)?;
let mut item = self
.header
.items
.iter()
.find(|x| x.body_id == from_body_id)?;
let mut next = &self.header.links[item.link_index as usize];
if next.next_index == -1 {
@ -96,7 +102,9 @@ impl PreBoneDeformer {
let string_offsets_base = item.data_offset as usize + core::mem::size_of::<u32>();
cursor.seek(SeekFrom::Start(string_offsets_base as u64)).ok()?;
cursor
.seek(SeekFrom::Start(string_offsets_base as u64))
.ok()?;
let mut strings_offset = vec![];
for _ in 0..bone_name_count {
strings_offset.push(cursor.read_le::<u16>().unwrap());
@ -125,7 +133,7 @@ impl PreBoneDeformer {
let matrix = matrices[i];
bones.push(PreBoneDeformBone {
name: string,
deform: matrix
deform: matrix,
});
}
@ -137,9 +145,7 @@ impl PreBoneDeformer {
}
}
Some(PreBoneDeformMatrices {
bones
})
Some(PreBoneDeformMatrices { bones })
}
}
@ -160,4 +166,3 @@ mod tests {
PreBoneDeformer::from_existing(&read(d).unwrap());
}
}

View file

@ -51,65 +51,47 @@ pub enum Race {
pub fn get_race_id(race: Race, subrace: Subrace, gender: Gender) -> Option<i32> {
// TODO: should we check for invalid subraces like the Hyur branch does?
match race {
Race::Hyur => {
match subrace {
Subrace::Midlander => {
match gender {
Race::Hyur => match subrace {
Subrace::Midlander => match gender {
Gender::Male => Some(101),
Gender::Female => Some(201)
}
}
Subrace::Highlander => {
match gender {
Gender::Female => Some(201),
},
Subrace::Highlander => match gender {
Gender::Male => Some(301),
Gender::Female => Some(401)
}
}
_ => None
}
}
Race::Elezen => {
match gender {
Gender::Female => Some(401),
},
_ => None,
},
Race::Elezen => match gender {
Gender::Male => Some(501),
Gender::Female => Some(601)
}
}
Race::Lalafell => {
match gender {
Gender::Female => Some(601),
},
Race::Lalafell => match gender {
Gender::Male => Some(501),
Gender::Female => Some(601)
}
}
Race::Miqote => {
match gender {
Gender::Female => Some(601),
},
Race::Miqote => match gender {
Gender::Male => Some(701),
Gender::Female => Some(801)
}
}
Race::Roegadyn => {
match gender {
Gender::Female => Some(801),
},
Race::Roegadyn => match gender {
Gender::Male => Some(901),
Gender::Female => Some(1001)
}
}
Race::AuRa => {
match gender {
Gender::Female => Some(1001),
},
Race::AuRa => match gender {
Gender::Male => Some(1301),
Gender::Female => Some(1401)
}
}
Gender::Female => Some(1401),
},
Race::Hrothgar => {
match gender {
Gender::Male => Some(1501),
Gender::Female => Some(1601) // TODO: is this accurate as of dawntrail?
Gender::Female => Some(1601), // TODO: is this accurate as of dawntrail?
}
}
Race::Viera => {
match gender {
Race::Viera => match gender {
Gender::Male => Some(1701),
Gender::Female => Some(1801)
}
}
Gender::Female => Some(1801),
},
}
}
@ -131,7 +113,7 @@ pub fn get_supported_subraces(race: Race) -> [Subrace; 2] {
Race::Roegadyn => [Subrace::SeaWolf, Subrace::Hellsguard],
Race::AuRa => [Subrace::Raen, Subrace::Xaela],
Race::Hrothgar => [Subrace::Hellion, Subrace::Lost],
Race::Viera => [Subrace::Raen, Subrace::Veena]
Race::Viera => [Subrace::Raen, Subrace::Veena],
}
}

View file

@ -5,7 +5,7 @@ use std::cmp::Ordering;
use std::cmp::Ordering::{Greater, Less};
use std::path::{Path, PathBuf};
use crate::common::{get_platform_string, Platform, read_version};
use crate::common::{get_platform_string, read_version, Platform};
use crate::repository::RepositoryType::{Base, Expansion};
/// The type of repository, discerning game data from expansion data.
@ -182,10 +182,7 @@ impl Repository {
/// Calculate an index2 filename for a specific category, like _"0a0000.win32.index"_.
pub fn index2_filename(&self, category: Category) -> String {
format!(
"{}2",
self.index_filename(category)
)
format!("{}2", self.index_filename(category))
}
/// Calculate a dat filename given a category and a data file id, returns something like _"0a0000.win32.dat0"_.
@ -210,8 +207,8 @@ impl Repository {
#[cfg(test)]
mod tests {
use std::path::PathBuf;
use crate::common::Platform;
use std::path::PathBuf;
use super::*;
@ -248,7 +245,10 @@ mod tests {
assert_eq!(repo.index_filename(Category::Music), "0c0000.win32.index");
assert_eq!(repo.index2_filename(Category::Music), "0c0000.win32.index2");
assert_eq!(repo.dat_filename(Category::GameScript, 1), "0b0000.win32.dat1");
assert_eq!(
repo.dat_filename(Category::GameScript, 1),
"0b0000.win32.dat1"
);
}
// TODO: We need to check if these console filenames are actually correct
@ -263,7 +263,10 @@ mod tests {
assert_eq!(repo.index_filename(Category::Music), "0c0000.ps3.index");
assert_eq!(repo.index2_filename(Category::Music), "0c0000.ps3.index2");
assert_eq!(repo.dat_filename(Category::GameScript, 1), "0b0000.ps3.dat1");
assert_eq!(
repo.dat_filename(Category::GameScript, 1),
"0b0000.ps3.dat1"
);
}
#[test]
@ -277,6 +280,9 @@ mod tests {
assert_eq!(repo.index_filename(Category::Music), "0c0000.ps4.index");
assert_eq!(repo.index2_filename(Category::Music), "0c0000.ps4.index2");
assert_eq!(repo.dat_filename(Category::GameScript, 1), "0b0000.ps4.dat1");
assert_eq!(
repo.dat_filename(Category::GameScript, 1),
"0b0000.ps4.dat1"
);
}
}

View file

@ -3,8 +3,8 @@
use std::io::{Cursor, SeekFrom};
use binrw::{BinRead, binread};
use crate::ByteSpan;
use binrw::{binread, BinRead};
#[binread]
#[br(little, import {
@ -24,7 +24,7 @@ pub struct ResourceParameter {
#[br(seek_before = SeekFrom::Start(strings_offset as u64 + local_string_offset as u64))]
#[br(count = string_length, map = | x: Vec<u8> | String::from_utf8(x).unwrap().trim_matches(char::from(0)).to_string())]
#[br(restore_position)]
pub name: String
pub name: String,
}
#[binread]
@ -55,7 +55,7 @@ pub struct Shader {
#[br(seek_before = SeekFrom::Start(shader_data_offset as u64 + data_offset as u64))]
#[br(count = data_size)]
#[br(restore_position)]
pub bytecode: Vec<u8>
pub bytecode: Vec<u8>,
}
#[binread]
@ -64,7 +64,7 @@ pub struct Shader {
pub struct MaterialParameter {
id: u32,
byte_offset: u16,
byte_size: u16
byte_size: u16,
}
#[binread]
@ -72,7 +72,7 @@ pub struct MaterialParameter {
#[allow(unused)]
pub struct Key {
id: u32,
default_value: u32
default_value: u32,
}
#[binread]
@ -82,7 +82,7 @@ pub struct Key {
pub struct Pass {
id: u32,
vertex_shader: u32,
pixel_shader: u32
pixel_shader: u32,
}
#[binread]
@ -90,7 +90,7 @@ pub struct Pass {
#[allow(unused)]
pub struct NodeAlias {
selector: u32,
node: u32
node: u32,
}
#[binread]
@ -115,7 +115,7 @@ pub struct Node {
#[br(count = subview_key_count)]
pub subview_keys: Vec<u32>,
#[br(count = pass_count, err_context("system_key_count = {}", material_key_count))]
pub passes: Vec<Pass>
pub passes: Vec<Pass>,
}
#[binread]
@ -183,7 +183,7 @@ pub struct ShaderPackage {
node_selectors: Vec<(u32, u32)>,
#[br(count = node_alias_count)]
node_aliases: Vec<NodeAlias>
node_aliases: Vec<NodeAlias>,
}
impl ShaderPackage {
@ -215,11 +215,26 @@ impl ShaderPackage {
None
}
pub fn build_selector_from_all_keys(system_keys: &[u32], scene_keys: &[u32], material_keys: &[u32], subview_keys: &[u32]) -> u32 {
Self::build_selector_from_keys(Self::build_selector(system_keys), Self::build_selector(scene_keys), Self::build_selector(material_keys), Self::build_selector(subview_keys))
pub fn build_selector_from_all_keys(
system_keys: &[u32],
scene_keys: &[u32],
material_keys: &[u32],
subview_keys: &[u32],
) -> u32 {
Self::build_selector_from_keys(
Self::build_selector(system_keys),
Self::build_selector(scene_keys),
Self::build_selector(material_keys),
Self::build_selector(subview_keys),
)
}
pub fn build_selector_from_keys(system_key: u32, scene_key: u32, material_key: u32, subview_key: u32) -> u32 {
pub fn build_selector_from_keys(
system_key: u32,
scene_key: u32,
material_key: u32,
subview_key: u32,
) -> u32 {
Self::build_selector(&[system_key, scene_key, material_key, subview_key])
}

View file

@ -5,9 +5,9 @@
#![allow(clippy::needless_late_init)]
#![allow(clippy::upper_case_acronyms)]
use std::io::{Cursor, SeekFrom};
use binrw::{binread, BinRead};
use binrw::helpers::until_eof;
use binrw::{binread, BinRead};
use std::io::{Cursor, SeekFrom};
use crate::havok::{HavokAnimationContainer, HavokBinaryTagFileReader};
use crate::ByteSpan;
@ -32,7 +32,7 @@ struct SklbV2 {
body_id: u32,
mapper_body_id1: u32,
mapper_body_id2: u32,
mapper_body_id3: u32
mapper_body_id3: u32,
}
#[binread]
@ -49,7 +49,7 @@ struct SKLB {
#[br(seek_before(SeekFrom::Start(if (version == 0x3132_3030u32) { sklb_v1.as_ref().unwrap().havok_offset as u64 } else { sklb_v2.as_ref().unwrap().havok_offset as u64 })))]
#[br(parse_with = until_eof)]
raw_data: Vec<u8>
raw_data: Vec<u8>,
}
#[derive(Debug)]
@ -92,9 +92,17 @@ impl Skeleton {
skeleton.bones.push(Bone {
name: bone.clone(),
parent_index: havok_skeleton.parent_indices[index] as i32,
position: [havok_skeleton.reference_pose[index].translation[0], havok_skeleton.reference_pose[index].translation[1], havok_skeleton.reference_pose[index].translation[2]],
position: [
havok_skeleton.reference_pose[index].translation[0],
havok_skeleton.reference_pose[index].translation[1],
havok_skeleton.reference_pose[index].translation[2],
],
rotation: havok_skeleton.reference_pose[index].rotation,
scale: [havok_skeleton.reference_pose[index].scale[0], havok_skeleton.reference_pose[index].scale[1], havok_skeleton.reference_pose[index].scale[2]],
scale: [
havok_skeleton.reference_pose[index].scale[0],
havok_skeleton.reference_pose[index].scale[1],
havok_skeleton.reference_pose[index].scale[2],
],
});
}

View file

@ -3,16 +3,16 @@
use std::io::Cursor;
use binrw::BinRead;
use binrw::binrw;
use crate::ByteSpan;
use binrw::binrw;
use binrw::BinRead;
#[binrw]
#[derive(Debug, Clone, Copy)]
#[brw(little)]
struct PlatePosition {
x: i16,
y: i16
y: i16,
}
#[binrw]
@ -30,18 +30,18 @@ struct TerrainHeader {
padding: Vec<u8>,
#[br(count = plate_count)]
positions: Vec<PlatePosition>
positions: Vec<PlatePosition>,
}
#[derive(Debug)]
pub struct PlateModel {
pub position: (f32, f32),
pub filename: String
pub filename: String,
}
#[derive(Debug)]
pub struct Terrain {
pub plates: Vec<PlateModel>
pub plates: Vec<PlateModel>,
}
impl Terrain {
@ -54,15 +54,15 @@ impl Terrain {
for i in 0..header.plate_count {
plates.push(PlateModel {
position: (header.plate_size as f32 * (header.positions[i as usize].x as f32 + 0.5),
header.plate_size as f32 * (header.positions[i as usize].y as f32 + 0.5)),
filename: format!("{:04}.mdl", i)
position: (
header.plate_size as f32 * (header.positions[i as usize].x as f32 + 0.5),
header.plate_size as f32 * (header.positions[i as usize].y as f32 + 0.5),
),
filename: format!("{:04}.mdl", i),
})
}
Some(Terrain {
plates
})
Some(Terrain { plates })
}
}

View file

@ -5,11 +5,11 @@
use std::io::{Cursor, Read, Seek, SeekFrom};
use binrw::BinRead;
use crate::ByteSpan;
use binrw::binrw;
use binrw::BinRead;
use bitflags::bitflags;
use texture2ddecoder::{decode_bc1, decode_bc3, decode_bc5};
use crate::ByteSpan;
// Attributes and Format are adapted from Lumina (https://github.com/NotAdam/Lumina/blob/master/src/Lumina/Data/Files/TexFile.cs)
bitflags! {
@ -94,7 +94,7 @@ impl Texture {
let mut src = vec![0u8; buffer.len() - std::mem::size_of::<TexHeader>()];
cursor.read_exact(src.as_mut_slice()).ok()?;
let mut dst : Vec<u8>;
let mut dst: Vec<u8>;
match header.format {
TextureFormat::B4G4R4A4 => {
@ -107,7 +107,7 @@ impl Texture {
let short: u16 = ((src[offset] as u16) << 8) | src[offset + 1] as u16;
let src_b = short & 0xF;
let src_g= (short >> 4) & 0xF;
let src_g = (short >> 4) & 0xF;
let src_r = (short >> 8) & 0xF;
let src_a = (short >> 12) & 0xF;
@ -124,13 +124,28 @@ impl Texture {
dst = src; // TODO: not correct, of course
}
TextureFormat::BC1 => {
dst = Texture::decode(&src, header.width as usize, header.height as usize, decode_bc1);
dst = Texture::decode(
&src,
header.width as usize,
header.height as usize,
decode_bc1,
);
}
TextureFormat::BC3 => {
dst = Texture::decode(&src, header.width as usize, header.height as usize, decode_bc3);
dst = Texture::decode(
&src,
header.width as usize,
header.height as usize,
decode_bc3,
);
}
TextureFormat::BC5 => {
dst = Texture::decode(&src, header.width as usize, header.height as usize, decode_bc5);
dst = Texture::decode(
&src,
header.width as usize,
header.height as usize,
decode_bc5,
);
}
}
@ -143,13 +158,7 @@ impl Texture {
fn decode(src: &[u8], width: usize, height: usize, decode_func: DecodeFunction) -> Vec<u8> {
let mut image: Vec<u32> = vec![0; width * height];
decode_func(
src,
width,
height,
&mut image,
)
.unwrap();
decode_func(src, width, height, &mut image).unwrap();
image
.iter()

View file

@ -1,17 +1,17 @@
// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
// SPDX-License-Identifier: GPL-3.0-or-later
use hmac_sha512::Hash;
use physis::patch::apply_patch;
use std::env;
use std::fs::{read, read_dir};
use std::process::Command;
use hmac_sha512::Hash;
use physis::patch::apply_patch;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use physis::common::Platform;
use physis::fiin::FileInfo;
use physis::index;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
#[test]
#[cfg_attr(not(feature = "retail_game_testing"), ignore)]
@ -28,8 +28,11 @@ fn test_index_read() {
fn test_gamedata_extract() {
let game_dir = env::var("FFXIV_GAME_DIR").unwrap();
let mut gamedata =
physis::gamedata::GameData::from_existing(Platform::Win32, format!("{}/game", game_dir).as_str()).unwrap();
let mut gamedata = physis::gamedata::GameData::from_existing(
Platform::Win32,
format!("{}/game", game_dir).as_str(),
)
.unwrap();
assert!(gamedata.extract("exd/root.exl").is_some());
}
@ -70,22 +73,31 @@ fn make_temp_install_dir(name: &str) -> String {
// Shamelessly taken from https://stackoverflow.com/a/76820878
fn recurse(path: impl AsRef<Path>) -> Vec<PathBuf> {
let Ok(entries) = read_dir(path) else { return vec![] };
entries.flatten().flat_map(|entry| {
let Ok(meta) = entry.metadata() else { return vec![] };
if meta.is_dir() { return recurse(entry.path()); }
if meta.is_file() { return vec![entry.path()]; }
let Ok(entries) = read_dir(path) else {
return vec![];
};
entries
.flatten()
.flat_map(|entry| {
let Ok(meta) = entry.metadata() else {
return vec![];
};
if meta.is_dir() {
return recurse(entry.path());
}
if meta.is_file() {
return vec![entry.path()];
}
vec![]
}).collect()
})
.collect()
}
#[cfg(feature = "patch_testing")]
fn fill_dir_hash(game_dir: &str) -> HashMap<String, [u8; 64]> {
let mut file_hashes: HashMap<String, [u8; 64]> = HashMap::new();
recurse(game_dir)
.into_iter()
.for_each(|x| {
recurse(game_dir).into_iter().for_each(|x| {
let path = x.as_path();
let file = std::fs::read(path).unwrap();