1
Fork 0
mirror of https://github.com/redstrate/Physis.git synced 2025-04-19 17:36:50 +00:00

Run cargo fmt

This commit is contained in:
Joshua Goins 2024-04-20 13:18:03 -04:00
parent 6e50f03cd9
commit d5b3b8a468
43 changed files with 1160 additions and 758 deletions

View file

@ -8,7 +8,4 @@ fn bench_calculate_hash() {
IndexFile::calculate_hash("exd/root.exl"); IndexFile::calculate_hash("exd/root.exl");
} }
brunch::benches!( brunch::benches!(Bench::new("hash c alc").run(bench_calculate_hash),);
Bench::new("hash c alc")
.run(bench_calculate_hash),
);

View file

@ -8,13 +8,20 @@ use physis::common::Platform;
fn reload_repos() { fn reload_repos() {
let game_dir = env::var("FFXIV_GAME_DIR").unwrap(); let game_dir = env::var("FFXIV_GAME_DIR").unwrap();
physis::gamedata::GameData::from_existing(Platform::Win32, format!("{}/game", game_dir).as_str()).unwrap(); physis::gamedata::GameData::from_existing(
Platform::Win32,
format!("{}/game", game_dir).as_str(),
)
.unwrap();
} }
fn fetch_data() { fn fetch_data() {
let game_dir = env::var("FFXIV_GAME_DIR").unwrap(); let game_dir = env::var("FFXIV_GAME_DIR").unwrap();
let mut gamedata = let mut gamedata = physis::gamedata::GameData::from_existing(
physis::gamedata::GameData::from_existing(Platform::Win32, format!("{}/game", game_dir).as_str()).unwrap(); Platform::Win32,
format!("{}/game", game_dir).as_str(),
)
.unwrap();
gamedata.extract("exd/root.exl"); gamedata.extract("exd/root.exl");
} }

View file

@ -5,4 +5,3 @@ fn main() {
#[cfg(feature = "game_install")] #[cfg(feature = "game_install")]
println!("cargo::rustc-link-lib=unshield"); println!("cargo::rustc-link-lib=unshield");
} }

View file

@ -171,9 +171,17 @@ mod tests {
fn test_encrypt_decrypt() { fn test_encrypt_decrypt() {
let blowfish = Blowfish::new(b"test_case"); let blowfish = Blowfish::new(b"test_case");
let expected_encrypted = [63, 149, 97, 229, 5, 35, 46, 128, 194, 107, 69, 132, 85, 202, 2, 126]; let expected_encrypted = [
63, 149, 97, 229, 5, 35, 46, 128, 194, 107, 69, 132, 85, 202, 2, 126,
];
assert_eq!(blowfish.encrypt(b"hello, world!").unwrap(), expected_encrypted); assert_eq!(
assert_eq!(String::from_utf8(blowfish.decrypt(&expected_encrypted).unwrap()).unwrap(), "hello, world!\0\0\0"); blowfish.encrypt(b"hello, world!").unwrap(),
expected_encrypted
);
assert_eq!(
String::from_utf8(blowfish.decrypt(&expected_encrypted).unwrap()).unwrap(),
"hello, world!\0\0\0"
);
} }
} }

View file

@ -1,9 +1,9 @@
// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com> // SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
// SPDX-License-Identifier: GPL-3.0-or-later // SPDX-License-Identifier: GPL-3.0-or-later
use crate::{ByteBuffer, ByteSpan};
use std::collections::HashMap; use std::collections::HashMap;
use std::io::{BufRead, BufReader, BufWriter, Cursor, Write}; use std::io::{BufRead, BufReader, BufWriter, Cursor, Write};
use crate::{ByteBuffer, ByteSpan};
/// Represents a collection of keys, mapped to their values. /// Represents a collection of keys, mapped to their values.
#[derive(Debug)] #[derive(Debug)]
@ -26,14 +26,14 @@ impl ConfigFile {
pub fn from_existing(buffer: ByteSpan) -> Option<ConfigFile> { pub fn from_existing(buffer: ByteSpan) -> Option<ConfigFile> {
let mut cfg = ConfigFile { let mut cfg = ConfigFile {
categories: Vec::new(), categories: Vec::new(),
settings: HashMap::new() settings: HashMap::new(),
}; };
let cursor = Cursor::new(buffer); let cursor = Cursor::new(buffer);
let reader = BufReader::new(cursor); let reader = BufReader::new(cursor);
let mut current_category: Option<String> = None; let mut current_category: Option<String> = None;
for line in reader.lines().map_while(Result::ok) { for line in reader.lines().map_while(Result::ok) {
if !line.is_empty() && line != "\0" { if !line.is_empty() && line != "\0" {
if line.contains('<') || line.contains('>') { if line.contains('<') || line.contains('>') {
@ -41,10 +41,17 @@ impl ConfigFile {
let name = &line[1..line.len() - 1]; let name = &line[1..line.len() - 1];
current_category = Some(String::from(name)); current_category = Some(String::from(name));
cfg.categories.push(String::from(name)); cfg.categories.push(String::from(name));
} else if let (Some(category), Some((key, value))) = (&current_category, line.split_once('\t')) { } else if let (Some(category), Some((key, value))) =
(&current_category, line.split_once('\t'))
{
// Key-value pair // Key-value pair
cfg.settings.entry(category.clone()).or_insert_with(|| ConfigMap{ keys: Vec::new() }); cfg.settings
cfg.settings.get_mut(category)?.keys.push((key.to_string(), value.to_string())); .entry(category.clone())
.or_insert_with(|| ConfigMap { keys: Vec::new() });
cfg.settings
.get_mut(category)?
.keys
.push((key.to_string(), value.to_string()));
} }
} }
} }
@ -61,11 +68,15 @@ impl ConfigFile {
let mut writer = BufWriter::new(cursor); let mut writer = BufWriter::new(cursor);
for category in &self.categories { for category in &self.categories {
writer.write_all(format!("\r\n<{}>\r\n", category).as_ref()).ok()?; writer
.write_all(format!("\r\n<{}>\r\n", category).as_ref())
.ok()?;
if self.settings.contains_key(category) { if self.settings.contains_key(category) {
for key in &self.settings[category].keys { for key in &self.settings[category].keys {
writer.write_all(format!("{}\t{}\r\n", key.0, key.1).as_ref()).ok()?; writer
.write_all(format!("{}\t{}\r\n", key.0, key.1).as_ref())
.ok()?;
} }
} }
} }
@ -73,7 +84,6 @@ impl ConfigFile {
writer.write_all(b"\0").ok()?; writer.write_all(b"\0").ok()?;
} }
Some(buffer) Some(buffer)
} }
@ -107,7 +117,6 @@ impl ConfigFile {
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::fs::read; use std::fs::read;

View file

@ -3,10 +3,10 @@
use std::io::{BufWriter, Cursor}; use std::io::{BufWriter, Cursor};
use binrw::{BinRead, BinWrite};
use binrw::binrw;
use crate::{ByteBuffer, ByteSpan};
use crate::common_file_operations::{read_bool_from, write_bool_as}; use crate::common_file_operations::{read_bool_from, write_bool_as};
use crate::{ByteBuffer, ByteSpan};
use binrw::binrw;
use binrw::{BinRead, BinWrite};
use crate::race::{Gender, Race, Subrace}; use crate::race::{Gender, Race, Subrace};
@ -20,7 +20,7 @@ fn convert_dat_race(x: u8) -> Race {
6 => Race::AuRa, 6 => Race::AuRa,
7 => Race::Hrothgar, 7 => Race::Hrothgar,
8 => Race::Viera, 8 => Race::Viera,
_ => Race::Hyur _ => Race::Hyur,
} }
} }
@ -33,7 +33,7 @@ fn convert_race_dat(race: &Race) -> u8 {
Race::Roegadyn => 5, Race::Roegadyn => 5,
Race::AuRa => 6, Race::AuRa => 6,
Race::Hrothgar => 7, Race::Hrothgar => 7,
Race::Viera => 8 Race::Viera => 8,
} }
} }
@ -41,7 +41,7 @@ fn convert_dat_gender(x: u8) -> Gender {
match x { match x {
0 => Gender::Male, 0 => Gender::Male,
1 => Gender::Female, 1 => Gender::Female,
_ => Gender::Male _ => Gender::Male,
} }
} }
@ -62,7 +62,7 @@ fn convert_dat_subrace(x: u8) -> Subrace {
6 => Subrace::Dunesfolk, 6 => Subrace::Dunesfolk,
7 => Subrace::Seeker, 7 => Subrace::Seeker,
8 => Subrace::Keeper, 8 => Subrace::Keeper,
9 => Subrace:: SeaWolf, 9 => Subrace::SeaWolf,
10 => Subrace::Hellsguard, 10 => Subrace::Hellsguard,
11 => Subrace::Raen, 11 => Subrace::Raen,
12 => Subrace::Xaela, 12 => Subrace::Xaela,
@ -70,7 +70,7 @@ fn convert_dat_subrace(x: u8) -> Subrace {
14 => Subrace::Lost, 14 => Subrace::Lost,
15 => Subrace::Rava, 15 => Subrace::Rava,
16 => Subrace::Veena, 16 => Subrace::Veena,
_ => Subrace::Midlander _ => Subrace::Midlander,
} }
} }
@ -84,14 +84,14 @@ fn convert_subrace_dat(subrace: &Subrace) -> u8 {
Subrace::Dunesfolk => 6, Subrace::Dunesfolk => 6,
Subrace::Seeker => 7, Subrace::Seeker => 7,
Subrace::Keeper => 8, Subrace::Keeper => 8,
Subrace:: SeaWolf => 9, Subrace::SeaWolf => 9,
Subrace::Hellsguard => 10, Subrace::Hellsguard => 10,
Subrace::Raen => 11, Subrace::Raen => 11,
Subrace::Xaela => 12, Subrace::Xaela => 12,
Subrace::Hellion => 13, Subrace::Hellion => 13,
Subrace::Lost => 14, Subrace::Lost => 14,
Subrace::Rava => 15, Subrace::Rava => 15,
Subrace::Veena => 16 Subrace::Veena => 16,
} }
} }
@ -101,7 +101,8 @@ fn convert_subrace_dat(subrace: &Subrace) -> u8 {
#[repr(C)] #[repr(C)]
#[br(magic = 0x2013FF14u32)] #[br(magic = 0x2013FF14u32)]
#[derive(Debug)] #[derive(Debug)]
pub struct CharacterData { // version 4 pub struct CharacterData {
// version 4
/// The version of the character data, the only supported version right now is 4. /// The version of the character data, the only supported version right now is 4.
pub version: u32, pub version: u32,
@ -197,7 +198,7 @@ pub struct CharacterData { // version 4
/// The timestamp when the preset was created. /// The timestamp when the preset was created.
#[br(pad_before = 1)] #[br(pad_before = 1)]
pub timestamp: [u8; 4] pub timestamp: [u8; 4],
} }
impl CharacterData { impl CharacterData {
@ -229,7 +230,7 @@ mod tests {
use std::path::PathBuf; use std::path::PathBuf;
use super::*; use super::*;
#[test] #[test]
fn test_invalid() { fn test_invalid() {
let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR")); let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR"));

View file

@ -3,9 +3,9 @@
use std::io::{Cursor, Seek, SeekFrom}; use std::io::{Cursor, Seek, SeekFrom};
use binrw::BinRead;
use binrw::binrw;
use crate::ByteSpan; use crate::ByteSpan;
use binrw::binrw;
use binrw::BinRead;
#[binrw] #[binrw]
#[br(little)] #[br(little)]
@ -44,13 +44,13 @@ pub struct RacialScalingParameters {
/// Maximum bust size on the Y-axis /// Maximum bust size on the Y-axis
pub bust_max_y: f32, pub bust_max_y: f32,
/// Maximum bust size on the Z-axis /// Maximum bust size on the Z-axis
pub bust_max_z: f32 pub bust_max_z: f32,
} }
#[derive(Debug)] #[derive(Debug)]
pub struct CMP { pub struct CMP {
/// The racial scaling parameters /// The racial scaling parameters
pub parameters: Vec<RacialScalingParameters> pub parameters: Vec<RacialScalingParameters>,
} }
impl CMP { impl CMP {
@ -69,9 +69,7 @@ impl CMP {
parameters.push(RacialScalingParameters::read(&mut cursor).ok()?); parameters.push(RacialScalingParameters::read(&mut cursor).ok()?);
} }
Some(CMP { Some(CMP { parameters })
parameters
})
} }
} }

View file

@ -74,4 +74,4 @@ pub fn get_platform_string(id: &Platform) -> &'static str {
Platform::PS3 => "ps3", Platform::PS3 => "ps3",
Platform::PS4 => "ps4", // TODO: confirm if this "ps4" is correct Platform::PS4 => "ps4", // TODO: confirm if this "ps4" is correct
} }
} }

View file

@ -6,7 +6,11 @@ pub(crate) fn read_bool_from<T: std::convert::From<u8> + std::cmp::PartialEq>(x:
} }
pub(crate) fn write_bool_as<T: std::convert::From<u8>>(x: &bool) -> T { pub(crate) fn write_bool_as<T: std::convert::From<u8>>(x: &bool) -> T {
if *x { T::from(1u8) } else { T::from(0u8) } if *x {
T::from(1u8)
} else {
T::from(0u8)
}
} }
#[cfg(test)] #[cfg(test)]
@ -28,4 +32,4 @@ mod tests {
assert_eq!(write_bool_as::<u8>(&false), DATA[0]); assert_eq!(write_bool_as::<u8>(&false), DATA[0]);
assert_eq!(write_bool_as::<u8>(&true), DATA[1]); assert_eq!(write_bool_as::<u8>(&true), DATA[1]);
} }
} }

View file

@ -1,18 +1,18 @@
// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com> // SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
// SPDX-License-Identifier: GPL-3.0-or-later // SPDX-License-Identifier: GPL-3.0-or-later
use std::io::{Cursor, Read, Seek, SeekFrom};
use std::io::Write; use std::io::Write;
use std::io::{Cursor, Read, Seek, SeekFrom};
use binrw::{BinReaderExt, binrw}; use crate::ByteBuffer;
use binrw::BinRead; use binrw::BinRead;
use binrw::BinWrite; use binrw::BinWrite;
use crate::ByteBuffer; use binrw::{binrw, BinReaderExt};
use crate::common_file_operations::read_bool_from;
#[cfg(feature = "visual_data")] #[cfg(feature = "visual_data")]
use crate::model::ModelFileHeader; use crate::model::ModelFileHeader;
use crate::sqpack::read_data_block; use crate::sqpack::read_data_block;
use crate::common_file_operations::read_bool_from;
#[binrw] #[binrw]
#[brw(repr = i32)] #[brw(repr = i32)]
@ -46,13 +46,23 @@ struct TextureLodBlock {
block_count: u32, block_count: u32,
} }
pub trait AnyNumberType<'a>: BinRead<Args<'a> = ()> + BinWrite<Args<'a> = ()> + std::ops::AddAssign + Copy + Default + 'static {} pub trait AnyNumberType<'a>:
BinRead<Args<'a> = ()> + BinWrite<Args<'a> = ()> + std::ops::AddAssign + Copy + Default + 'static
{
}
impl<'a, T> AnyNumberType<'a> for T where T: BinRead<Args<'a> = ()> + BinWrite<Args<'a> = ()> + std::ops::AddAssign + Copy + Default + 'static {} impl<'a, T> AnyNumberType<'a> for T where
T: BinRead<Args<'a> = ()>
+ BinWrite<Args<'a> = ()>
+ std::ops::AddAssign
+ Copy
+ Default
+ 'static
{
}
#[derive(BinRead, BinWrite)] #[derive(BinRead, BinWrite)]
pub struct ModelMemorySizes<T: for <'a> AnyNumberType<'a>> pub struct ModelMemorySizes<T: for<'a> AnyNumberType<'a>> {
{
pub stack_size: T, pub stack_size: T,
pub runtime_size: T, pub runtime_size: T,
@ -61,8 +71,7 @@ pub struct ModelMemorySizes<T: for <'a> AnyNumberType<'a>>
pub index_buffer_size: [T; 3], pub index_buffer_size: [T; 3],
} }
impl<T: for<'a> AnyNumberType<'a>> ModelMemorySizes<T> impl<T: for<'a> AnyNumberType<'a>> ModelMemorySizes<T> {
{
pub fn total(&self) -> T { pub fn total(&self) -> T {
let mut total: T = T::default(); let mut total: T = T::default();
@ -214,7 +223,7 @@ impl DatFile {
{ {
panic!("Tried to extract a model without the visual_data feature enabled!") panic!("Tried to extract a model without the visual_data feature enabled!")
} }
}, }
FileType::Texture => self.read_texture_file(offset, &file_info), FileType::Texture => self.read_texture_file(offset, &file_info),
} }
} }

View file

@ -4,9 +4,9 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::io::{Cursor, Seek, SeekFrom}; use std::io::{Cursor, Seek, SeekFrom};
use binrw::{BinRead, BinReaderExt};
use binrw::binrw;
use crate::ByteSpan; use crate::ByteSpan;
use binrw::binrw;
use binrw::{BinRead, BinReaderExt};
// Based off of https://github.com/Lotlab/ffxiv-vulgar-words-reader/ // Based off of https://github.com/Lotlab/ffxiv-vulgar-words-reader/
// Credit goes to Jim Kirisame for documenting this format // Credit goes to Jim Kirisame for documenting this format
@ -19,7 +19,7 @@ pub struct EntryItem {
flag: u32, flag: u32,
sibling: u32, sibling: u32,
child: u32, child: u32,
offset: u32 offset: u32,
} }
#[binrw] #[binrw]
@ -64,7 +64,7 @@ struct DictionaryHeader {
pub struct Dictionary { pub struct Dictionary {
header: DictionaryHeader, header: DictionaryHeader,
pub words: Vec<String> pub words: Vec<String>,
} }
impl Dictionary { impl Dictionary {
@ -113,7 +113,7 @@ impl Dictionary {
let mut dict = Dictionary { let mut dict = Dictionary {
header: dict, header: dict,
words: Vec::new() words: Vec::new(),
}; };
// TODO: lol // TODO: lol
@ -158,7 +158,7 @@ impl Dictionary {
(((*new_val as u32) << 8) + lower) as i32 (((*new_val as u32) << 8) + lower) as i32
} else { } else {
0 0
} };
} }
fn dump_dict_node(&self, vec: &mut Vec<String>, entry_id: i32, prev: String) { fn dump_dict_node(&self, vec: &mut Vec<String>, entry_id: i32, prev: String) {
@ -238,4 +238,4 @@ mod tests {
// Feeding it invalid data should not panic // Feeding it invalid data should not panic
Dictionary::from_existing(&read(d).unwrap()); Dictionary::from_existing(&read(d).unwrap());
} }
} }

View file

@ -1,7 +1,7 @@
// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com> // SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
// SPDX-License-Identifier: GPL-3.0-or-later // SPDX-License-Identifier: GPL-3.0-or-later
use crate::race::{Gender, get_race_id, Race, Subrace}; use crate::race::{get_race_id, Gender, Race, Subrace};
#[repr(u8)] #[repr(u8)]
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
@ -100,7 +100,7 @@ pub enum CharacterCategory {
Hair, Hair,
Face, Face,
Tail, Tail,
Ear Ear,
} }
pub fn get_character_category_path(category: CharacterCategory) -> &'static str { pub fn get_character_category_path(category: CharacterCategory) -> &'static str {
@ -109,7 +109,7 @@ pub fn get_character_category_path(category: CharacterCategory) -> &'static str
CharacterCategory::Hair => "hair", CharacterCategory::Hair => "hair",
CharacterCategory::Face => "face", CharacterCategory::Face => "face",
CharacterCategory::Tail => "tail", CharacterCategory::Tail => "tail",
CharacterCategory::Ear => "zear" CharacterCategory::Ear => "zear",
} }
} }
@ -119,7 +119,7 @@ pub fn get_character_category_abbreviation(category: CharacterCategory) -> &'sta
CharacterCategory::Hair => "hir", CharacterCategory::Hair => "hir",
CharacterCategory::Face => "fac", CharacterCategory::Face => "fac",
CharacterCategory::Tail => "til", CharacterCategory::Tail => "til",
CharacterCategory::Ear => "ear" CharacterCategory::Ear => "ear",
} }
} }
@ -129,7 +129,7 @@ pub fn get_character_category_prefix(category: CharacterCategory) -> &'static st
CharacterCategory::Hair => "h", CharacterCategory::Hair => "h",
CharacterCategory::Face => "f", CharacterCategory::Face => "f",
CharacterCategory::Tail => "t", CharacterCategory::Tail => "t",
CharacterCategory::Ear => "e" CharacterCategory::Ear => "e",
} }
} }
@ -139,7 +139,7 @@ pub fn build_character_path(
body_ver: i32, body_ver: i32,
race: Race, race: Race,
subrace: Subrace, subrace: Subrace,
gender: Gender gender: Gender,
) -> String { ) -> String {
let category_path = get_character_category_path(category); let category_path = get_character_category_path(category);
let race_id = get_race_id(race, subrace, gender).unwrap(); let race_id = get_race_id(race, subrace, gender).unwrap();

View file

@ -3,8 +3,8 @@
use std::io::{Cursor, Seek, SeekFrom}; use std::io::{Cursor, Seek, SeekFrom};
use binrw::{BinRead, Endian};
use binrw::binrw; use binrw::binrw;
use binrw::{BinRead, Endian};
use crate::common::Language; use crate::common::Language;
use crate::exh::{ColumnDataType, ExcelColumnDefinition, ExcelDataPagination, EXH}; use crate::exh::{ColumnDataType, ExcelColumnDefinition, ExcelDataPagination, EXH};
@ -81,7 +81,7 @@ impl EXD {
let row_header = ExcelDataRowHeader::read(&mut cursor).ok()?; let row_header = ExcelDataRowHeader::read(&mut cursor).ok()?;
let header_offset = offset.offset + 6;// std::mem::size_of::<ExcelDataRowHeader>() as u32; let header_offset = offset.offset + 6; // std::mem::size_of::<ExcelDataRowHeader>() as u32;
let mut read_row = |row_offset: u32| -> Option<ExcelRow> { let mut read_row = |row_offset: u32| -> Option<ExcelRow> {
let mut subrow = ExcelRow { let mut subrow = ExcelRow {
@ -93,9 +93,9 @@ impl EXD {
.seek(SeekFrom::Start((row_offset + column.offset as u32).into())) .seek(SeekFrom::Start((row_offset + column.offset as u32).into()))
.ok()?; .ok()?;
subrow subrow.data.push(
.data Self::read_column(&mut cursor, exh, row_offset, column).unwrap(),
.push(Self::read_column(&mut cursor, exh, row_offset, column).unwrap()); );
} }
Some(subrow) Some(subrow)
@ -117,14 +117,9 @@ impl EXD {
Some(exd) Some(exd)
} }
fn read_data_raw<Z: BinRead<Args<'static> = ()>>(cursor: &mut Cursor<ByteSpan>) -> Option<Z> fn read_data_raw<Z: BinRead<Args<'static> = ()>>(cursor: &mut Cursor<ByteSpan>) -> Option<Z> {
{ Z::read_options(cursor, Endian::Big, ()).ok()
Z::read_options(
cursor,
Endian::Big,
(),
).ok()
} }
fn read_column( fn read_column(
@ -214,9 +209,9 @@ impl EXD {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::exh::EXHHeader;
use std::fs::read; use std::fs::read;
use std::path::PathBuf; use std::path::PathBuf;
use crate::exh::EXHHeader;
use super::*; use super::*;

View file

@ -20,7 +20,7 @@ fn find_needle(installer_file: &[u8], needle: &str) -> Option<String> {
let mut position = installer_file let mut position = installer_file
.windows(bytes.len()) .windows(bytes.len())
.position(|window| window == bytes)?; .position(|window| window == bytes)?;
let parse_char_at_position = |position: usize| { let parse_char_at_position = |position: usize| {
let upper = installer_file[position]; let upper = installer_file[position];
let lower = installer_file[position + 1]; let lower = installer_file[position + 1];
@ -60,4 +60,4 @@ pub fn extract_frontier_url(launcher_path: &str) -> Option<String> {
} }
None None
} }

View file

@ -5,8 +5,8 @@
use std::io::Cursor; use std::io::Cursor;
use binrw::BinRead;
use binrw::binrw; use binrw::binrw;
use binrw::BinRead;
use crate::common::Language; use crate::common::Language;
use crate::ByteSpan; use crate::ByteSpan;
@ -107,4 +107,3 @@ mod tests {
EXH::from_existing(&read(d).unwrap()); EXH::from_existing(&read(d).unwrap());
} }
} }

View file

@ -1,8 +1,8 @@
// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com> // SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
// SPDX-License-Identifier: GPL-3.0-or-later // SPDX-License-Identifier: GPL-3.0-or-later
use std::io::{BufRead, BufReader, BufWriter, Cursor, Write};
use crate::{ByteBuffer, ByteSpan}; use crate::{ByteBuffer, ByteSpan};
use std::io::{BufRead, BufReader, BufWriter, Cursor, Write};
/// Represents an Excel List. /// Represents an Excel List.
pub struct EXL { pub struct EXL {
@ -47,7 +47,9 @@ impl EXL {
let cursor = Cursor::new(&mut buffer); let cursor = Cursor::new(&mut buffer);
let mut writer = BufWriter::new(cursor); let mut writer = BufWriter::new(cursor);
writer.write_all(format!("EXLT,{}", self.version).as_ref()).ok()?; writer
.write_all(format!("EXLT,{}", self.version).as_ref())
.ok()?;
for (key, value) in &self.entries { for (key, value) in &self.entries {
writer.write_all(format!("\n{key},{value}").as_ref()).ok()?; writer.write_all(format!("\n{key},{value}").as_ref()).ok()?;
@ -120,12 +122,13 @@ mod tests {
let exl = read(d).unwrap(); let exl = read(d).unwrap();
let mut out = std::io::stdout(); let mut out = std::io::stdout();
out.write_all(&existing_exl.write_to_buffer().unwrap()).unwrap(); out.write_all(&existing_exl.write_to_buffer().unwrap())
.unwrap();
out.flush().unwrap(); out.flush().unwrap();
assert_eq!(existing_exl.write_to_buffer().unwrap(), exl); assert_eq!(existing_exl.write_to_buffer().unwrap(), exl);
} }
#[test] #[test]
fn test_invalid() { fn test_invalid() {
let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR")); let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR"));

View file

@ -5,9 +5,9 @@ use std::fs::read;
use std::io::Cursor; use std::io::Cursor;
use std::path::Path; use std::path::Path;
use binrw::{BinRead, BinWrite};
use binrw::binrw;
use crate::{ByteBuffer, ByteSpan}; use crate::{ByteBuffer, ByteSpan};
use binrw::binrw;
use binrw::{BinRead, BinWrite};
use crate::sha1::Sha1; use crate::sha1::Sha1;
@ -74,7 +74,7 @@ impl FileInfo {
/// Creates a new FileInfo structure from a list of filenames. These filenames must be present in /// Creates a new FileInfo structure from a list of filenames. These filenames must be present in
/// the current working directory in order to be read properly, since it also generates SHA1 /// the current working directory in order to be read properly, since it also generates SHA1
/// hashes. /// hashes.
/// ///
/// These paths are converted to just their filenames. /// These paths are converted to just their filenames.
/// ///
/// The new FileInfo structure can then be serialized back into retail-compatible form. /// The new FileInfo structure can then be serialized back into retail-compatible form.
@ -135,14 +135,11 @@ mod tests {
d3.push("resources/tests"); d3.push("resources/tests");
d3.push("test.exl"); d3.push("test.exl");
let testing_fiin = FileInfo::new(&[ let testing_fiin = FileInfo::new(&[d2.to_str().unwrap(), d3.to_str().unwrap()]).unwrap();
d2.to_str().unwrap(),
d3.to_str().unwrap()
]).unwrap();
assert_eq!(*valid_fiin, testing_fiin.write_to_buffer().unwrap()); assert_eq!(*valid_fiin, testing_fiin.write_to_buffer().unwrap());
} }
#[test] #[test]
fn test_invalid() { fn test_invalid() {
let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR")); let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR"));

View file

@ -8,15 +8,15 @@ use std::path::PathBuf;
use tracing::{debug, warn}; use tracing::{debug, warn};
use crate::common::{Language, Platform, read_version}; use crate::common::{read_version, Language, Platform};
use crate::dat::DatFile; use crate::dat::DatFile;
use crate::exd::EXD; use crate::exd::EXD;
use crate::exh::EXH; use crate::exh::EXH;
use crate::exl::EXL; use crate::exl::EXL;
use crate::index::{Index2File, IndexFile, IndexHashBitfield}; use crate::index::{Index2File, IndexFile, IndexHashBitfield};
use crate::ByteBuffer;
use crate::patch::{apply_patch, PatchError}; use crate::patch::{apply_patch, PatchError};
use crate::repository::{Category, Repository, string_to_category}; use crate::repository::{string_to_category, Category, Repository};
use crate::ByteBuffer;
/// Framework for operating on game data. /// Framework for operating on game data.
pub struct GameData { pub struct GameData {
@ -27,7 +27,7 @@ pub struct GameData {
pub repositories: Vec<Repository>, pub repositories: Vec<Repository>,
index_files: HashMap<String, IndexFile>, index_files: HashMap<String, IndexFile>,
index2_files: HashMap<String, Index2File> index2_files: HashMap<String, Index2File>,
} }
fn is_valid(path: &str) -> bool { fn is_valid(path: &str) -> bool {
@ -79,7 +79,7 @@ impl GameData {
game_directory: String::from(directory), game_directory: String::from(directory),
repositories: vec![], repositories: vec![],
index_files: HashMap::new(), index_files: HashMap::new(),
index2_files: HashMap::new() index2_files: HashMap::new(),
}; };
data.reload_repositories(platform); data.reload_repositories(platform);
Some(data) Some(data)
@ -90,14 +90,16 @@ impl GameData {
} }
} }
} }
fn reload_repositories(&mut self, platform: Platform) { fn reload_repositories(&mut self, platform: Platform) {
self.repositories.clear(); self.repositories.clear();
let mut d = PathBuf::from(self.game_directory.as_str()); let mut d = PathBuf::from(self.game_directory.as_str());
// add initial ffxiv directory // add initial ffxiv directory
if let Some(base_repository) = Repository::from_existing_base(platform.clone(), d.to_str().unwrap()) { if let Some(base_repository) =
Repository::from_existing_base(platform.clone(), d.to_str().unwrap())
{
self.repositories.push(base_repository); self.repositories.push(base_repository);
} }
@ -105,15 +107,18 @@ impl GameData {
d.push("sqpack"); d.push("sqpack");
if let Ok(repository_paths) = fs::read_dir(d.as_path()) { if let Ok(repository_paths) = fs::read_dir(d.as_path()) {
let repository_paths : ReadDir = repository_paths; let repository_paths: ReadDir = repository_paths;
let repository_paths : Vec<DirEntry> = repository_paths let repository_paths: Vec<DirEntry> = repository_paths
.filter_map(Result::ok) .filter_map(Result::ok)
.filter(|s| s.file_type().unwrap().is_dir()) .filter(|s| s.file_type().unwrap().is_dir())
.collect(); .collect();
for repository_path in repository_paths { for repository_path in repository_paths {
if let Some(expansion_repository) = Repository::from_existing_expansion(platform.clone(), repository_path.path().to_str().unwrap()) { if let Some(expansion_repository) = Repository::from_existing_expansion(
platform.clone(),
repository_path.path().to_str().unwrap(),
) {
self.repositories.push(expansion_repository); self.repositories.push(expansion_repository);
} }
} }
@ -183,7 +188,7 @@ impl GameData {
/// file.write(data.as_slice()).unwrap(); /// file.write(data.as_slice()).unwrap();
/// ``` /// ```
pub fn extract(&mut self, path: &str) -> Option<ByteBuffer> { pub fn extract(&mut self, path: &str) -> Option<ByteBuffer> {
debug!(file=path, "Extracting file"); debug!(file = path, "Extracting file");
let slice = self.find_entry(path); let slice = self.find_entry(path);
match slice { match slice {
@ -223,8 +228,8 @@ impl GameData {
&repository.name, &repository.name,
&repository.index_filename(category), &repository.index_filename(category),
] ]
.iter() .iter()
.collect(); .collect();
let index2_path: PathBuf = [ let index2_path: PathBuf = [
&self.game_directory, &self.game_directory,
@ -232,10 +237,13 @@ impl GameData {
&repository.name, &repository.name,
&repository.index2_filename(category), &repository.index2_filename(category),
] ]
.iter() .iter()
.collect(); .collect();
(index_path.into_os_string().into_string().unwrap(), index2_path.into_os_string().into_string().unwrap()) (
index_path.into_os_string().into_string().unwrap(),
index2_path.into_os_string().into_string().unwrap(),
)
} }
/// Read an excel sheet by name (e.g. "Achievement") /// Read an excel sheet by name (e.g. "Achievement")
@ -387,7 +395,7 @@ impl GameData {
Ok(()) Ok(())
} }
fn cache_index_file(&mut self, filenames: (&str, &str)) { fn cache_index_file(&mut self, filenames: (&str, &str)) {
if !self.index_files.contains_key(filenames.0) { if !self.index_files.contains_key(filenames.0) {
if let Some(index_file) = IndexFile::from_existing(filenames.0) { if let Some(index_file) = IndexFile::from_existing(filenames.0) {
self.index_files.insert(filenames.0.to_string(), index_file); self.index_files.insert(filenames.0.to_string(), index_file);
@ -396,7 +404,8 @@ impl GameData {
if !self.index2_files.contains_key(filenames.1) { if !self.index2_files.contains_key(filenames.1) {
if let Some(index_file) = Index2File::from_existing(filenames.1) { if let Some(index_file) = Index2File::from_existing(filenames.1) {
self.index2_files.insert(filenames.1.to_string(), index_file); self.index2_files
.insert(filenames.1.to_string(), index_file);
} }
} }
} }
@ -411,7 +420,11 @@ impl GameData {
fn find_entry(&mut self, path: &str) -> Option<IndexHashBitfield> { fn find_entry(&mut self, path: &str) -> Option<IndexHashBitfield> {
let index_path = self.get_index_filenames(path); let index_path = self.get_index_filenames(path);
debug!("Trying index files {index_path}, {index2_path}", index_path=index_path.0, index2_path=index_path.1); debug!(
"Trying index files {index_path}, {index2_path}",
index_path = index_path.0,
index2_path = index_path.1
);
self.cache_index_file((&index_path.0, &index_path.1)); self.cache_index_file((&index_path.0, &index_path.1));

View file

@ -1,11 +1,11 @@
// SPDX-FileCopyrightText: 2020 Inseok Lee // SPDX-FileCopyrightText: 2020 Inseok Lee
// SPDX-License-Identifier: MIT // SPDX-License-Identifier: MIT
use core::cell::RefCell;
use std::sync::Arc;
use crate::havok::HavokAnimation;
use crate::havok::object::HavokObject; use crate::havok::object::HavokObject;
use crate::havok::spline_compressed_animation::HavokSplineCompressedAnimation; use crate::havok::spline_compressed_animation::HavokSplineCompressedAnimation;
use crate::havok::HavokAnimation;
use core::cell::RefCell;
use std::sync::Arc;
#[repr(u8)] #[repr(u8)]
pub enum HavokAnimationBlendHint { pub enum HavokAnimationBlendHint {
@ -33,14 +33,20 @@ impl HavokAnimationBinding {
pub fn new(object: Arc<RefCell<HavokObject>>) -> Self { pub fn new(object: Arc<RefCell<HavokObject>>) -> Self {
let root = object.borrow(); let root = object.borrow();
let raw_transform_track_to_bone_indices = root.get("transformTrackToBoneIndices").as_array(); let raw_transform_track_to_bone_indices =
let transform_track_to_bone_indices = raw_transform_track_to_bone_indices.iter().map(|x| x.as_int() as u16).collect::<Vec<_>>(); root.get("transformTrackToBoneIndices").as_array();
let transform_track_to_bone_indices = raw_transform_track_to_bone_indices
.iter()
.map(|x| x.as_int() as u16)
.collect::<Vec<_>>();
let blend_hint = HavokAnimationBlendHint::from_raw(root.get("blendHint").as_int() as u8); let blend_hint = HavokAnimationBlendHint::from_raw(root.get("blendHint").as_int() as u8);
let raw_animation = root.get("animation").as_object(); let raw_animation = root.get("animation").as_object();
let animation = match &*raw_animation.borrow().object_type.name { let animation = match &*raw_animation.borrow().object_type.name {
"hkaSplineCompressedAnimation" => Box::new(HavokSplineCompressedAnimation::new(raw_animation.clone())), "hkaSplineCompressedAnimation" => {
Box::new(HavokSplineCompressedAnimation::new(raw_animation.clone()))
}
_ => panic!(), _ => panic!(),
}; };

View file

@ -1,11 +1,11 @@
// SPDX-FileCopyrightText: 2020 Inseok Lee // SPDX-FileCopyrightText: 2020 Inseok Lee
// SPDX-License-Identifier: MIT // SPDX-License-Identifier: MIT
use std::cell::RefCell;
use std::sync::Arc;
use crate::havok::animation_binding::HavokAnimationBinding; use crate::havok::animation_binding::HavokAnimationBinding;
use crate::havok::object::HavokObject; use crate::havok::object::HavokObject;
use crate::havok::skeleton::HavokSkeleton; use crate::havok::skeleton::HavokSkeleton;
use std::cell::RefCell;
use std::sync::Arc;
pub struct HavokAnimationContainer { pub struct HavokAnimationContainer {
pub skeletons: Vec<HavokSkeleton>, pub skeletons: Vec<HavokSkeleton>,
@ -17,11 +17,20 @@ impl HavokAnimationContainer {
let root = object.borrow(); let root = object.borrow();
let raw_skeletons = root.get("skeletons").as_array(); let raw_skeletons = root.get("skeletons").as_array();
let skeletons = raw_skeletons.iter().map(|x| HavokSkeleton::new(x.as_object())).collect::<Vec<_>>(); let skeletons = raw_skeletons
.iter()
.map(|x| HavokSkeleton::new(x.as_object()))
.collect::<Vec<_>>();
let raw_bindings = root.get("bindings").as_array(); let raw_bindings = root.get("bindings").as_array();
let bindings = raw_bindings.iter().map(|x| HavokAnimationBinding::new(x.as_object())).collect::<Vec<_>>(); let bindings = raw_bindings
.iter()
.map(|x| HavokAnimationBinding::new(x.as_object()))
.collect::<Vec<_>>();
Self { skeletons, bindings } Self {
skeletons,
bindings,
}
} }
} }

View file

@ -3,12 +3,15 @@
#![allow(clippy::arc_with_non_send_sync)] #![allow(clippy::arc_with_non_send_sync)]
use crate::havok::byte_reader::ByteReader;
use crate::havok::object::{
HavokInteger, HavokObject, HavokObjectType, HavokObjectTypeMember, HavokRootObject, HavokValue,
HavokValueType,
};
use crate::havok::slice_ext::SliceByteOrderExt;
use core::cell::RefCell; use core::cell::RefCell;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use crate::havok::byte_reader::ByteReader;
use crate::havok::object::{HavokInteger, HavokObject, HavokObjectType, HavokObjectTypeMember, HavokRootObject, HavokValue, HavokValueType};
use crate::havok::slice_ext::SliceByteOrderExt;
#[repr(i8)] #[repr(i8)]
enum HavokTagType { enum HavokTagType {
@ -59,7 +62,11 @@ impl<'a> HavokBinaryTagFileReader<'a> {
fn new(reader: ByteReader<'a>) -> Self { fn new(reader: ByteReader<'a>) -> Self {
let file_version = 0; let file_version = 0;
let remembered_strings = vec![Arc::from("string"), Arc::from("")]; let remembered_strings = vec![Arc::from("string"), Arc::from("")];
let remembered_types = vec![Arc::new(HavokObjectType::new(Arc::from("object"), None, Vec::new()))]; let remembered_types = vec![Arc::new(HavokObjectType::new(
Arc::from("object"),
None,
Vec::new(),
))];
let remembered_objects = Vec::new(); let remembered_objects = Vec::new();
let objects = Vec::new(); let objects = Vec::new();
@ -87,7 +94,10 @@ impl<'a> HavokBinaryTagFileReader<'a> {
self.file_version = self.read_packed_int() as u8; self.file_version = self.read_packed_int() as u8;
assert_eq!(self.file_version, 3, "Unimplemented version"); assert_eq!(self.file_version, 3, "Unimplemented version");
self.remembered_objects self.remembered_objects
.push(Arc::new(RefCell::new(HavokObject::new(self.remembered_types[0].clone(), HashMap::new())))) .push(Arc::new(RefCell::new(HavokObject::new(
self.remembered_types[0].clone(),
HashMap::new(),
))))
} }
HavokTagType::Type => { HavokTagType::Type => {
let object_type = self.read_type(); let object_type = self.read_type();
@ -152,7 +162,9 @@ impl<'a> HavokBinaryTagFileReader<'a> {
HavokValueType::INT => HavokValue::Integer(self.read_packed_int()), HavokValueType::INT => HavokValue::Integer(self.read_packed_int()),
HavokValueType::REAL => HavokValue::Real(self.reader.read_f32_le()), HavokValueType::REAL => HavokValue::Real(self.reader.read_f32_le()),
HavokValueType::STRING => HavokValue::String(self.read_string()), HavokValueType::STRING => HavokValue::String(self.read_string()),
HavokValueType::OBJECT => HavokValue::ObjectReference(self.read_packed_int() as usize), HavokValueType::OBJECT => {
HavokValue::ObjectReference(self.read_packed_int() as usize)
}
_ => panic!("unimplemented {}", member.type_.bits()), _ => panic!("unimplemented {}", member.type_.bits()),
} }
} }
@ -161,14 +173,19 @@ impl<'a> HavokBinaryTagFileReader<'a> {
fn read_array(&mut self, member: &HavokObjectTypeMember, array_len: usize) -> Vec<HavokValue> { fn read_array(&mut self, member: &HavokObjectTypeMember, array_len: usize) -> Vec<HavokValue> {
let base_type = member.type_.base_type(); let base_type = member.type_.base_type();
match base_type { match base_type {
HavokValueType::STRING => (0..array_len).map(|_| HavokValue::String(self.read_string())).collect::<Vec<_>>(), HavokValueType::STRING => (0..array_len)
.map(|_| HavokValue::String(self.read_string()))
.collect::<Vec<_>>(),
HavokValueType::STRUCT => { HavokValueType::STRUCT => {
let target_type = self.find_type(member.class_name.as_ref().unwrap()); let target_type = self.find_type(member.class_name.as_ref().unwrap());
let data_existence = self.read_bit_field(target_type.member_count()); let data_existence = self.read_bit_field(target_type.member_count());
let mut result_objects = Vec::new(); let mut result_objects = Vec::new();
for _ in 0..array_len { for _ in 0..array_len {
let object = Arc::new(RefCell::new(HavokObject::new(target_type.clone(), HashMap::new()))); let object = Arc::new(RefCell::new(HavokObject::new(
target_type.clone(),
HashMap::new(),
)));
result_objects.push(object.clone()); result_objects.push(object.clone());
self.objects.push(object); self.objects.push(object);
@ -188,7 +205,10 @@ impl<'a> HavokBinaryTagFileReader<'a> {
} }
} }
result_objects.into_iter().map(HavokValue::Object).collect::<Vec<_>>() result_objects
.into_iter()
.map(HavokValue::Object)
.collect::<Vec<_>>()
} }
HavokValueType::OBJECT => (0..array_len) HavokValueType::OBJECT => (0..array_len)
.map(|_| { .map(|_| {
@ -204,16 +224,33 @@ impl<'a> HavokBinaryTagFileReader<'a> {
if self.file_version >= 3 { if self.file_version >= 3 {
self.read_packed_int(); // type? self.read_packed_int(); // type?
} }
(0..array_len).map(|_| HavokValue::Integer(self.read_packed_int())).collect::<Vec<_>>()
}
HavokValueType::REAL => (0..array_len).map(|_| HavokValue::Real(self.reader.read_f32_le())).collect::<Vec<_>>(),
HavokValueType::VEC4 | HavokValueType::VEC8 | HavokValueType::VEC12 | HavokValueType::VEC16 => {
let vec_size = member.type_.base_type().vec_size() as usize;
(0..array_len) (0..array_len)
.map(|_| HavokValue::Vec((0..vec_size).map(|_| self.reader.read_f32_le()).collect::<Vec<_>>())) .map(|_| HavokValue::Integer(self.read_packed_int()))
.collect::<Vec<_>>() .collect::<Vec<_>>()
} }
_ => panic!("unimplemented {} {}", member.type_.bits(), member.type_.base_type().bits()), HavokValueType::REAL => (0..array_len)
.map(|_| HavokValue::Real(self.reader.read_f32_le()))
.collect::<Vec<_>>(),
HavokValueType::VEC4
| HavokValueType::VEC8
| HavokValueType::VEC12
| HavokValueType::VEC16 => {
let vec_size = member.type_.base_type().vec_size() as usize;
(0..array_len)
.map(|_| {
HavokValue::Vec(
(0..vec_size)
.map(|_| self.reader.read_f32_le())
.collect::<Vec<_>>(),
)
})
.collect::<Vec<_>>()
}
_ => panic!(
"unimplemented {} {}",
member.type_.bits(),
member.type_.base_type().bits()
),
} }
} }
@ -229,8 +266,14 @@ impl<'a> HavokBinaryTagFileReader<'a> {
let member_name = self.read_string(); let member_name = self.read_string();
let type_ = HavokValueType::from_bits(self.read_packed_int() as u32).unwrap(); let type_ = HavokValueType::from_bits(self.read_packed_int() as u32).unwrap();
let tuple_size = if type_.is_tuple() { self.read_packed_int() } else { 0 }; let tuple_size = if type_.is_tuple() {
let type_name = if type_.base_type() == HavokValueType::OBJECT || type_.base_type() == HavokValueType::STRUCT { self.read_packed_int()
} else {
0
};
let type_name = if type_.base_type() == HavokValueType::OBJECT
|| type_.base_type() == HavokValueType::STRUCT
{
Some(self.read_string()) Some(self.read_string())
} else { } else {
None None
@ -249,7 +292,11 @@ impl<'a> HavokBinaryTagFileReader<'a> {
return self.remembered_strings[-length as usize].clone(); return self.remembered_strings[-length as usize].clone();
} }
let result = Arc::from(std::str::from_utf8(self.reader.read_bytes(length as usize)).unwrap().to_owned()); let result = Arc::from(
std::str::from_utf8(self.reader.read_bytes(length as usize))
.unwrap()
.to_owned(),
);
self.remembered_strings.push(Arc::clone(&result)); self.remembered_strings.push(Arc::clone(&result));
result result
@ -296,7 +343,11 @@ impl<'a> HavokBinaryTagFileReader<'a> {
} }
fn find_type(&self, type_name: &str) -> Arc<HavokObjectType> { fn find_type(&self, type_name: &str) -> Arc<HavokObjectType> {
self.remembered_types.iter().find(|&x| &*x.name == type_name).unwrap().clone() self.remembered_types
.iter()
.find(|&x| &*x.name == type_name)
.unwrap()
.clone()
} }
fn fill_object_reference(&self, object: &mut HavokObject) { fn fill_object_reference(&self, object: &mut HavokObject) {
@ -327,7 +378,11 @@ impl<'a> HavokBinaryTagFileReader<'a> {
fn default_value(type_: HavokValueType) -> HavokValue { fn default_value(type_: HavokValueType) -> HavokValue {
if type_.is_vec() { if type_.is_vec() {
HavokValue::Array((0..type_.vec_size()).map(|_| Self::default_value(type_.base_type())).collect::<Vec<_>>()) HavokValue::Array(
(0..type_.vec_size())
.map(|_| Self::default_value(type_.base_type()))
.collect::<Vec<_>>(),
)
} else if type_.is_array() || type_.is_tuple() { } else if type_.is_array() || type_.is_tuple() {
HavokValue::Array(Vec::new()) HavokValue::Array(Vec::new())
} else { } else {

View file

@ -10,9 +10,9 @@ mod binary_tag_file_reader;
mod byte_reader; mod byte_reader;
mod object; mod object;
mod skeleton; mod skeleton;
mod slice_ext;
mod spline_compressed_animation; mod spline_compressed_animation;
mod transform; mod transform;
mod slice_ext;
pub use animation::HavokAnimation; pub use animation::HavokAnimation;
pub use animation_container::HavokAnimationContainer; pub use animation_container::HavokAnimationContainer;

View file

@ -170,7 +170,12 @@ pub struct HavokObjectTypeMember {
} }
impl HavokObjectTypeMember { impl HavokObjectTypeMember {
pub fn new(name: Arc<str>, type_: HavokValueType, tuple_size: u32, type_name: Option<Arc<str>>) -> Self { pub fn new(
name: Arc<str>,
type_: HavokValueType,
tuple_size: u32,
type_name: Option<Arc<str>>,
) -> Self {
Self { Self {
name, name,
type_, type_,
@ -187,20 +192,35 @@ pub struct HavokObjectType {
} }
impl HavokObjectType { impl HavokObjectType {
pub fn new(name: Arc<str>, parent: Option<Arc<HavokObjectType>>, members: Vec<HavokObjectTypeMember>) -> Self { pub fn new(
Self { name, parent, members } name: Arc<str>,
parent: Option<Arc<HavokObjectType>>,
members: Vec<HavokObjectTypeMember>,
) -> Self {
Self {
name,
parent,
members,
}
} }
pub fn members(&self) -> Vec<&HavokObjectTypeMember> { pub fn members(&self) -> Vec<&HavokObjectTypeMember> {
if let Some(x) = &self.parent { if let Some(x) = &self.parent {
x.members().into_iter().chain(self.members.iter()).collect::<Vec<_>>() x.members()
.into_iter()
.chain(self.members.iter())
.collect::<Vec<_>>()
} else { } else {
self.members.iter().collect::<Vec<_>>() self.members.iter().collect::<Vec<_>>()
} }
} }
pub fn member_count(&self) -> usize { pub fn member_count(&self) -> usize {
(if let Some(x) = &self.parent { x.members.len() } else { 0 }) + self.members.len() (if let Some(x) = &self.parent {
x.members.len()
} else {
0
}) + self.members.len()
} }
} }
@ -219,7 +239,12 @@ impl HavokObject {
} }
pub fn get(&self, member_name: &str) -> &HavokValue { pub fn get(&self, member_name: &str) -> &HavokValue {
let member_index = self.object_type.members().iter().position(|&x| &*x.name == member_name).unwrap(); let member_index = self
.object_type
.members()
.iter()
.position(|&x| &*x.name == member_name)
.unwrap();
self.data.get(&member_index).unwrap() self.data.get(&member_index).unwrap()
} }

View file

@ -1,10 +1,10 @@
// SPDX-FileCopyrightText: 2020 Inseok Lee // SPDX-FileCopyrightText: 2020 Inseok Lee
// SPDX-License-Identifier: MIT // SPDX-License-Identifier: MIT
use core::cell::RefCell;
use std::sync::Arc;
use crate::havok::object::HavokObject; use crate::havok::object::HavokObject;
use crate::havok::transform::HavokTransform; use crate::havok::transform::HavokTransform;
use core::cell::RefCell;
use std::sync::Arc;
#[derive(Debug)] #[derive(Debug)]
pub struct HavokSkeleton { pub struct HavokSkeleton {
@ -28,10 +28,16 @@ impl HavokSkeleton {
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let raw_parent_indices = root.get("parentIndices").as_array(); let raw_parent_indices = root.get("parentIndices").as_array();
let parent_indices = raw_parent_indices.iter().map(|x| x.as_int() as usize).collect::<Vec<_>>(); let parent_indices = raw_parent_indices
.iter()
.map(|x| x.as_int() as usize)
.collect::<Vec<_>>();
let raw_reference_pose = root.get("referencePose").as_array(); let raw_reference_pose = root.get("referencePose").as_array();
let reference_pose = raw_reference_pose.iter().map(|x| HavokTransform::new(x.as_vec())).collect::<Vec<_>>(); let reference_pose = raw_reference_pose
.iter()
.map(|x| HavokTransform::new(x.as_vec()))
.collect::<Vec<_>>();
Self { Self {
bone_names, bone_names,

View file

@ -5,22 +5,22 @@ use core::convert::TryInto;
pub trait SliceByteOrderExt { pub trait SliceByteOrderExt {
fn to_int_be<T>(&self) -> T fn to_int_be<T>(&self) -> T
where where
T: Integer; T: Integer;
fn to_int_le<T>(&self) -> T fn to_int_le<T>(&self) -> T
where where
T: Integer; T: Integer;
fn to_float_be<T>(&self) -> T fn to_float_be<T>(&self) -> T
where where
T: Float; T: Float;
} }
impl SliceByteOrderExt for &[u8] { impl SliceByteOrderExt for &[u8] {
fn to_int_be<T>(&self) -> T fn to_int_be<T>(&self) -> T
where where
T: Integer, T: Integer,
{ {
let sliced = &self[..core::mem::size_of::<T>()]; let sliced = &self[..core::mem::size_of::<T>()];
@ -28,8 +28,8 @@ impl SliceByteOrderExt for &[u8] {
} }
fn to_int_le<T>(&self) -> T fn to_int_le<T>(&self) -> T
where where
T: Integer, T: Integer,
{ {
let sliced = &self[..core::mem::size_of::<T>()]; let sliced = &self[..core::mem::size_of::<T>()];
@ -37,8 +37,8 @@ impl SliceByteOrderExt for &[u8] {
} }
fn to_float_be<T>(&self) -> T fn to_float_be<T>(&self) -> T
where where
T: Float, T: Float,
{ {
let sliced = &self[..core::mem::size_of::<T>()]; let sliced = &self[..core::mem::size_of::<T>()];
@ -119,4 +119,4 @@ impl Float for f32 {
fn from_be_bytes(bytes: &[u8]) -> Self { fn from_be_bytes(bytes: &[u8]) -> Self {
Self::from_be_bytes(bytes.try_into().unwrap()) Self::from_be_bytes(bytes.try_into().unwrap())
} }
} }

View file

@ -1,13 +1,13 @@
// SPDX-FileCopyrightText: 2020 Inseok Lee // SPDX-FileCopyrightText: 2020 Inseok Lee
// SPDX-License-Identifier: MIT // SPDX-License-Identifier: MIT
use crate::havok::byte_reader::ByteReader;
use crate::havok::object::HavokObject;
use crate::havok::transform::HavokTransform;
use crate::havok::HavokAnimation;
use core::{cell::RefCell, cmp}; use core::{cell::RefCell, cmp};
use std::f32; use std::f32;
use std::sync::Arc; use std::sync::Arc;
use crate::havok::byte_reader::ByteReader;
use crate::havok::HavokAnimation;
use crate::havok::object::HavokObject;
use crate::havok::transform::HavokTransform;
#[repr(u8)] #[repr(u8)]
#[allow(clippy::upper_case_acronyms)] #[allow(clippy::upper_case_acronyms)]
@ -107,10 +107,16 @@ impl HavokSplineCompressedAnimation {
let frame_duration = root.get("frameDuration").as_real(); let frame_duration = root.get("frameDuration").as_real();
let raw_block_offsets = root.get("blockOffsets").as_array(); let raw_block_offsets = root.get("blockOffsets").as_array();
let block_offsets = raw_block_offsets.iter().map(|x| x.as_int() as u32).collect::<Vec<_>>(); let block_offsets = raw_block_offsets
.iter()
.map(|x| x.as_int() as u32)
.collect::<Vec<_>>();
let raw_data = root.get("data").as_array(); let raw_data = root.get("data").as_array();
let data = raw_data.iter().map(|x| x.as_int() as u8).collect::<Vec<_>>(); let data = raw_data
.iter()
.map(|x| x.as_int() as u8)
.collect::<Vec<_>>();
Self { Self {
duration, duration,
@ -136,7 +142,8 @@ impl HavokSplineCompressedAnimation {
let real_frame = (frame - first_frame_of_block) as f32 + delta; let real_frame = (frame - first_frame_of_block) as f32 + delta;
let block_time_out = real_frame * self.frame_duration; let block_time_out = real_frame * self.frame_duration;
let quantized_time_out = ((block_time_out * self.block_inverse_duration) * (self.max_frames_per_block as f32 - 1.)) as u8; let quantized_time_out = ((block_time_out * self.block_inverse_duration)
* (self.max_frames_per_block as f32 - 1.)) as u8;
(block_out, block_time_out, quantized_time_out) (block_out, block_time_out, quantized_time_out)
} }
@ -164,7 +171,11 @@ impl HavokSplineCompressedAnimation {
mid mid
} }
fn read_knots(data: &mut ByteReader, u: u8, frame_duration: f32) -> (usize, usize, Vec<f32>, usize) { fn read_knots(
data: &mut ByteReader,
u: u8,
frame_duration: f32,
) -> (usize, usize, Vec<f32>, usize) {
let n = data.read_u16_le() as usize; let n = data.read_u16_le() as usize;
let p = data.read() as usize; let p = data.read() as usize;
let raw = data.raw(); let raw = data.raw();
@ -199,7 +210,12 @@ impl HavokSplineCompressedAnimation {
let phi = b * c * (f32::consts::PI / 2.); let phi = b * c * (f32::consts::PI / 2.);
// spherical coordinate to cartesian coordinate // spherical coordinate to cartesian coordinate
let mut result = [f32::sin(theta) * f32::cos(phi), f32::sin(theta) * f32::sin(phi), f32::cos(theta), 1.]; let mut result = [
f32::sin(theta) * f32::cos(phi),
f32::sin(theta) * f32::sin(phi),
f32::cos(theta),
1.,
];
for item in result.iter_mut() { for item in result.iter_mut() {
*item *= f32::sqrt(1. - value * value); *item *= f32::sqrt(1. - value * value);
} }
@ -226,9 +242,18 @@ impl HavokSplineCompressedAnimation {
let mut buf = [0u32; 4]; let mut buf = [0u32; 4];
unsafe { unsafe {
let m = core::slice::from_raw_parts(permute.as_ptr() as *const u8, permute.len() * core::mem::size_of::<u32>()); let m = core::slice::from_raw_parts(
let a = core::slice::from_raw_parts(data.as_ptr() as *const u8, data.len() * core::mem::size_of::<u32>()); permute.as_ptr() as *const u8,
let r = core::slice::from_raw_parts_mut(buf.as_mut_ptr() as *mut u8, buf.len() * core::mem::size_of::<u32>()); permute.len() * core::mem::size_of::<u32>(),
);
let a = core::slice::from_raw_parts(
data.as_ptr() as *const u8,
data.len() * core::mem::size_of::<u32>(),
);
let r = core::slice::from_raw_parts_mut(
buf.as_mut_ptr() as *mut u8,
buf.len() * core::mem::size_of::<u32>(),
);
for i in 0..16 { for i in 0..16 {
r[i] = a[m[i] as usize]; r[i] = a[m[i] as usize];
} }
@ -310,7 +335,13 @@ impl HavokSplineCompressedAnimation {
} }
} }
fn read_packed_quaternions(quantization: RotationQuantization, data: &mut ByteReader, n: usize, p: usize, span: usize) -> Vec<[f32; 4]> { fn read_packed_quaternions(
quantization: RotationQuantization,
data: &mut ByteReader,
n: usize,
p: usize,
span: usize,
) -> Vec<[f32; 4]> {
data.align(quantization.align()); data.align(quantization.align());
let bytes_per_quaternion = quantization.bytes_per_quaternion(); let bytes_per_quaternion = quantization.bytes_per_quaternion();
@ -409,7 +440,9 @@ impl HavokSplineCompressedAnimation {
&base[offset..] &base[offset..]
} }
fn unpack_quantization_types(packed_quantization_types: u8) -> (ScalarQuantization, RotationQuantization, ScalarQuantization) { fn unpack_quantization_types(
packed_quantization_types: u8,
) -> (ScalarQuantization, RotationQuantization, ScalarQuantization) {
let translation = ScalarQuantization::from_raw(packed_quantization_types & 0x03); let translation = ScalarQuantization::from_raw(packed_quantization_types & 0x03);
let rotation = RotationQuantization::from_raw((packed_quantization_types >> 2) & 0x0F); let rotation = RotationQuantization::from_raw((packed_quantization_types >> 2) & 0x0F);
let scale = ScalarQuantization::from_raw((packed_quantization_types >> 6) & 0x03); let scale = ScalarQuantization::from_raw((packed_quantization_types >> 6) & 0x03);
@ -417,9 +450,24 @@ impl HavokSplineCompressedAnimation {
(translation, rotation, scale) (translation, rotation, scale)
} }
fn sample_translation(&self, quantization: ScalarQuantization, time: f32, quantized_time: u8, mask: u8, data: &mut ByteReader) -> [f32; 4] { fn sample_translation(
&self,
quantization: ScalarQuantization,
time: f32,
quantized_time: u8,
mask: u8,
data: &mut ByteReader,
) -> [f32; 4] {
let result = if mask != 0 { let result = if mask != 0 {
Self::read_nurbs_curve(quantization, data, quantized_time, self.frame_duration, time, mask, [0., 0., 0., 0.]) Self::read_nurbs_curve(
quantization,
data,
quantized_time,
self.frame_duration,
time,
mask,
[0., 0., 0., 0.],
)
} else { } else {
[0., 0., 0., 0.] [0., 0., 0., 0.]
}; };
@ -429,17 +477,46 @@ impl HavokSplineCompressedAnimation {
result result
} }
fn sample_rotation(&self, quantization: RotationQuantization, time: f32, quantized_time: u8, mask: u8, data: &mut ByteReader) -> [f32; 4] { fn sample_rotation(
let result = Self::read_nurbs_quaternion(quantization, data, quantized_time, self.frame_duration, time, mask); &self,
quantization: RotationQuantization,
time: f32,
quantized_time: u8,
mask: u8,
data: &mut ByteReader,
) -> [f32; 4] {
let result = Self::read_nurbs_quaternion(
quantization,
data,
quantized_time,
self.frame_duration,
time,
mask,
);
data.align(4); data.align(4);
result result
} }
fn sample_scale(&self, quantization: ScalarQuantization, time: f32, quantized_time: u8, mask: u8, data: &mut ByteReader) -> [f32; 4] { fn sample_scale(
&self,
quantization: ScalarQuantization,
time: f32,
quantized_time: u8,
mask: u8,
data: &mut ByteReader,
) -> [f32; 4] {
let result = if mask != 0 { let result = if mask != 0 {
Self::read_nurbs_curve(quantization, data, quantized_time, self.frame_duration, time, mask, [1., 1., 1., 1.]) Self::read_nurbs_curve(
quantization,
data,
quantized_time,
self.frame_duration,
time,
mask,
[1., 1., 1., 1.],
)
} else { } else {
[1., 1., 1., 1.] [1., 1., 1., 1.]
}; };
@ -575,17 +652,41 @@ impl HavokAnimation for HavokSplineCompressedAnimation {
block, block,
self.mask_and_quantization_size, self.mask_and_quantization_size,
)); ));
let mut mask = ByteReader::new(Self::compute_packed_nurbs_offsets(&self.data, &self.block_offsets, block, 0x8000_0000)); let mut mask = ByteReader::new(Self::compute_packed_nurbs_offsets(
&self.data,
&self.block_offsets,
block,
0x8000_0000,
));
let mut result = Vec::with_capacity(self.number_of_transform_tracks); let mut result = Vec::with_capacity(self.number_of_transform_tracks);
for _ in 0..self.number_of_transform_tracks { for _ in 0..self.number_of_transform_tracks {
let packed_quantization_types = mask.read(); let packed_quantization_types = mask.read();
let (translation_type, rotation_type, scale_type) = Self::unpack_quantization_types(packed_quantization_types); let (translation_type, rotation_type, scale_type) =
Self::unpack_quantization_types(packed_quantization_types);
let translation = self.sample_translation(translation_type, block_time, quantized_time, mask.read(), &mut data); let translation = self.sample_translation(
let rotation = self.sample_rotation(rotation_type, block_time, quantized_time, mask.read(), &mut data); translation_type,
let scale = self.sample_scale(scale_type, block_time, quantized_time, mask.read(), &mut data); block_time,
quantized_time,
mask.read(),
&mut data,
);
let rotation = self.sample_rotation(
rotation_type,
block_time,
quantized_time,
mask.read(),
&mut data,
);
let scale = self.sample_scale(
scale_type,
block_time,
quantized_time,
mask.read(),
&mut data,
);
result.push(HavokTransform::from_trs(translation, rotation, scale)); result.push(HavokTransform::from_trs(translation, rotation, scale));
} }

View file

@ -5,11 +5,11 @@
use std::io::SeekFrom; use std::io::SeekFrom;
use binrw::BinRead;
use binrw::binrw;
use modular_bitfield::prelude::*;
use crate::common::Platform; use crate::common::Platform;
use crate::crc::Jamcrc; use crate::crc::Jamcrc;
use binrw::binrw;
use binrw::BinRead;
use modular_bitfield::prelude::*;
#[binrw] #[binrw]
#[br(magic = b"SqPack")] #[br(magic = b"SqPack")]

View file

@ -3,9 +3,9 @@
use std::io::{Cursor, Seek, SeekFrom}; use std::io::{Cursor, Seek, SeekFrom};
use binrw::{BinRead, binread, BinReaderExt};
use binrw::binrw;
use crate::ByteSpan; use crate::ByteSpan;
use binrw::binrw;
use binrw::{binread, BinRead, BinReaderExt};
// From https://github.com/NotAdam/Lumina/tree/40dab50183eb7ddc28344378baccc2d63ae71d35/src/Lumina/Data/Parsing/Layer // From https://github.com/NotAdam/Lumina/tree/40dab50183eb7ddc28344378baccc2d63ae71d35/src/Lumina/Data/Parsing/Layer
@ -13,8 +13,7 @@ use crate::ByteSpan;
#[binrw] #[binrw]
#[repr(i32)] #[repr(i32)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum LayerEntryType enum LayerEntryType {
{
#[brw(magic = 0x0i32)] #[brw(magic = 0x0i32)]
AssetNone, AssetNone,
#[brw(magic = 0x1i32)] #[brw(magic = 0x1i32)]
@ -29,7 +28,7 @@ enum LayerEntryType
render_shadow_enabled: u8, render_shadow_enabled: u8,
render_light_shadow_enabled: u8, render_light_shadow_enabled: u8,
padding: u8, padding: u8,
render_model_clip_range: f32 render_model_clip_range: f32,
}, },
#[brw(magic = 0x2i32)] #[brw(magic = 0x2i32)]
Attribute, Attribute,
@ -41,16 +40,16 @@ enum LayerEntryType
PositionMarker, PositionMarker,
#[brw(magic = 0x6i32)] #[brw(magic = 0x6i32)]
SharedGroup, SharedGroup,
Sound = 0x7, // // Sound = 0x7, // //
EventNPC = 0x8, // // EventNPC = 0x8, // //
BattleNPC = 0x9, // // BattleNPC = 0x9, // //
RoutePath = 0xA, RoutePath = 0xA,
Character = 0xB, Character = 0xB,
Aetheryte = 0xC, // // Aetheryte = 0xC, // //
EnvSet = 0xD, // // EnvSet = 0xD, // //
Gathering = 0xE, // // Gathering = 0xE, // //
HelperObject = 0xF, // HelperObject = 0xF, //
Treasure = 0x10, // // Treasure = 0x10, // //
Clip = 0x11, Clip = 0x11,
ClipCtrlPoint = 0x12, ClipCtrlPoint = 0x12,
ClipCamera = 0x13, ClipCamera = 0x13,
@ -73,13 +72,13 @@ enum LayerEntryType
CutAssetOnlySelectable = 0x24, CutAssetOnlySelectable = 0x24,
Player = 0x25, Player = 0x25,
Monster = 0x26, Monster = 0x26,
Weapon = 0x27, // Weapon = 0x27, //
PopRange = 0x28, // // PopRange = 0x28, // //
ExitRange = 0x29, // // ExitRange = 0x29, // //
Lvb = 0x2A, Lvb = 0x2A,
MapRange = 0x2B, // // MapRange = 0x2B, // //
NaviMeshRange = 0x2C, // // NaviMeshRange = 0x2C, // //
EventObject = 0x2D, // // EventObject = 0x2D, // //
DemiHuman = 0x2E, DemiHuman = 0x2E,
EnvLocation = 0x2F, // // EnvLocation = 0x2F, // //
ControlPoint = 0x30, ControlPoint = 0x30,
@ -92,21 +91,21 @@ enum LayerEntryType
ScenarioExd = 0x37, ScenarioExd = 0x37,
ScenarioText = 0x38, ScenarioText = 0x38,
CollisionBox = 0x39, // // CollisionBox = 0x39, // //
DoorRange = 0x3A, // DoorRange = 0x3A, //
LineVFX = 0x3B, // // LineVFX = 0x3B, // //
SoundEnvSet = 0x3C, SoundEnvSet = 0x3C,
CutActionTimeline = 0x3D, CutActionTimeline = 0x3D,
CharaScene = 0x3E, CharaScene = 0x3E,
CutAction = 0x3F, CutAction = 0x3F,
EquipPreset = 0x40, EquipPreset = 0x40,
ClientPath = 0x41, // // ClientPath = 0x41, // //
ServerPath = 0x42, // // ServerPath = 0x42, // //
GimmickRange = 0x43, // // GimmickRange = 0x43, // //
TargetMarker = 0x44, // // TargetMarker = 0x44, // //
ChairMarker = 0x45, // // ChairMarker = 0x45, // //
ClickableRange = 0x46, // ClickableRange = 0x46, //
PrefetchRange = 0x47, // // PrefetchRange = 0x47, // //
FateRange = 0x48, // // FateRange = 0x48, // //
PartyMember = 0x49, PartyMember = 0x49,
KeepRange = 0x4A, // KeepRange = 0x4A, //
SphereCastRange = 0x4B, SphereCastRange = 0x4B,
@ -120,8 +119,7 @@ enum LayerEntryType
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum DoorState enum DoorState {
{
Auto = 0x1, Auto = 0x1,
Open = 0x2, Open = 0x2,
Closed = 0x3, Closed = 0x3,
@ -130,8 +128,7 @@ enum DoorState
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum RotationState enum RotationState {
{
Rounding = 0x1, Rounding = 0x1,
Stopped = 0x2, Stopped = 0x2,
} }
@ -139,8 +136,7 @@ enum RotationState
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum TransformState enum TransformState {
{
Play = 0x0, Play = 0x0,
Stop = 0x1, Stop = 0x1,
Replay = 0x2, Replay = 0x2,
@ -150,8 +146,7 @@ enum TransformState
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum ColourState enum ColourState {
{
Play = 0x0, Play = 0x0,
Stop = 0x1, Stop = 0x1,
Replay = 0x2, Replay = 0x2,
@ -161,8 +156,7 @@ enum ColourState
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum TriggerBoxShape enum TriggerBoxShape {
{
Box = 0x1, Box = 0x1,
Sphere = 0x2, Sphere = 0x2,
Cylinder = 0x3, Cylinder = 0x3,
@ -174,8 +168,7 @@ enum TriggerBoxShape
#[binrw] #[binrw]
#[brw(repr = i32)] #[brw(repr = i32)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum ModelCollisionType enum ModelCollisionType {
{
None = 0x0, None = 0x0,
Replace = 0x1, Replace = 0x1,
Box = 0x2, Box = 0x2,
@ -184,8 +177,7 @@ enum ModelCollisionType
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum LightType enum LightType {
{
None = 0x0, None = 0x0,
Directional = 0x1, Directional = 0x1,
Point = 0x2, Point = 0x2,
@ -198,8 +190,7 @@ enum LightType
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum PointLightType enum PointLightType {
{
Sphere = 0x0, Sphere = 0x0,
Hemisphere = 0x1, Hemisphere = 0x1,
} }
@ -207,8 +198,7 @@ enum PointLightType
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum PositionMarkerType enum PositionMarkerType {
{
DebugZonePop = 0x1, DebugZonePop = 0x1,
DebugJump = 0x2, DebugJump = 0x2,
NaviMesh = 0x3, NaviMesh = 0x3,
@ -218,8 +208,7 @@ enum PositionMarkerType
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum EnvSetShape enum EnvSetShape {
{
Ellipsoid = 0x1, Ellipsoid = 0x1,
Cuboid = 0x2, Cuboid = 0x2,
Cylinder = 0x3, Cylinder = 0x3,
@ -228,8 +217,7 @@ enum EnvSetShape
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum HelperObjectType enum HelperObjectType {
{
ProxyActor = 0x0, ProxyActor = 0x0,
NullObject = 0x1, NullObject = 0x1,
} }
@ -237,8 +225,7 @@ enum HelperObjectType
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum TargetType enum TargetType {
{
None = 0x0, None = 0x0,
ENPCInstanceID = 0x1, ENPCInstanceID = 0x1,
Player = 0x2, Player = 0x2,
@ -257,8 +244,7 @@ enum TargetType
#[binread] #[binread]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum PopType enum PopType {
{
#[br(magic = 0x1u8)] #[br(magic = 0x1u8)]
PC = 0x1, PC = 0x1,
#[br(magic = 0x2u8)] #[br(magic = 0x2u8)]
@ -270,16 +256,14 @@ enum PopType
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum ExitType enum ExitType {
{
ZoneLine = 0x1, ZoneLine = 0x1,
} }
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum RangeType enum RangeType {
{
Type01 = 0x1, Type01 = 0x1,
Type02 = 0x2, Type02 = 0x2,
Type03 = 0x3, Type03 = 0x3,
@ -292,8 +276,7 @@ enum RangeType
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum LineStyle enum LineStyle {
{
Red = 0x1, Red = 0x1,
Blue = 0x2, Blue = 0x2,
} }
@ -301,8 +284,7 @@ enum LineStyle
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum GimmickType enum GimmickType {
{
Fishing = 0x1, Fishing = 0x1,
Content = 0x2, Content = 0x2,
Room = 0x3, Room = 0x3,
@ -311,8 +293,7 @@ enum GimmickType
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum TargetMarkerType enum TargetMarkerType {
{
UiTarget = 0x0, UiTarget = 0x0,
UiNameplate = 0x1, UiNameplate = 0x1,
LookAt = 0x2, LookAt = 0x2,
@ -324,8 +305,7 @@ enum TargetMarkerType
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum ObjectType enum ObjectType {
{
ObjectChair = 0x0, ObjectChair = 0x0,
ObjectBed = 0x1, ObjectBed = 0x1,
} }
@ -333,8 +313,7 @@ enum ObjectType
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum CharacterSize enum CharacterSize {
{
DefaultSize = 0x0, DefaultSize = 0x0,
VerySmall = 0x1, VerySmall = 0x1,
Small = 0x2, Small = 0x2,
@ -346,8 +325,7 @@ enum CharacterSize
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum DrawHeadParts enum DrawHeadParts {
{
Default = 0x0, Default = 0x0,
ForceOn = 0x1, ForceOn = 0x1,
ForceOff = 0x2, ForceOff = 0x2,
@ -356,8 +334,7 @@ enum DrawHeadParts
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum RotationType enum RotationType {
{
NoRotate = 0x0, NoRotate = 0x0,
AllAxis = 0x1, AllAxis = 0x1,
YAxisOnly = 0x2, YAxisOnly = 0x2,
@ -366,8 +343,7 @@ enum RotationType
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum MovePathMode enum MovePathMode {
{
None = 0x0, None = 0x0,
SharedGroupAction = 0x1, SharedGroupAction = 0x1,
Timeline = 0x2, Timeline = 0x2,
@ -376,8 +352,7 @@ enum MovePathMode
#[binrw] #[binrw]
#[brw(repr = i32)] #[brw(repr = i32)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum LayerSetReferencedType enum LayerSetReferencedType {
{
All = 0x0, All = 0x0,
Include = 0x1, Include = 0x1,
Exclude = 0x2, Exclude = 0x2,
@ -387,8 +362,7 @@ enum LayerSetReferencedType
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
enum SoundEffectType enum SoundEffectType {
{
Point = 0x3, Point = 0x3,
PointDir = 0x4, PointDir = 0x4,
Line = 0x5, Line = 0x5,
@ -424,7 +398,7 @@ struct LayerHeader {
is_temporary: u8, is_temporary: u8,
is_housing: u8, is_housing: u8,
version_mask: u16, version_mask: u16,
#[br(pad_before = 4)] #[br(pad_before = 4)]
ob_set_referenced_list: i32, ob_set_referenced_list: i32,
ob_set_referenced_list_count: i32, ob_set_referenced_list_count: i32,
@ -439,7 +413,7 @@ struct LayerHeader {
struct LayerSetReferencedList { struct LayerSetReferencedList {
referenced_type: LayerSetReferencedType, referenced_type: LayerSetReferencedType,
layer_sets: i32, layer_sets: i32,
layer_set_count: i32 layer_set_count: i32,
} }
#[binread] #[binread]
@ -450,7 +424,7 @@ struct LgbHeader {
#[br(count = 4)] #[br(count = 4)]
file_id: Vec<u8>, file_id: Vec<u8>,
file_size: i32, file_size: i32,
total_chunk_count: i32 total_chunk_count: i32,
} }
#[binread] #[binread]
@ -464,7 +438,7 @@ struct LayerChunk {
layer_group_id: i32, layer_group_id: i32,
name_offset: u32, name_offset: u32,
layer_offset: i32, layer_offset: i32,
layer_count: i32 layer_count: i32,
} }
#[binread] #[binread]
@ -474,13 +448,11 @@ struct LayerChunk {
struct InstanceObject { struct InstanceObject {
asset_type: LayerEntryType, asset_type: LayerEntryType,
instance_id: u32, instance_id: u32,
name_offset: u32 name_offset: u32,
} }
#[derive(Debug)] #[derive(Debug)]
pub struct Layer { pub struct Layer {}
}
impl Layer { impl Layer {
/// Reads an existing PBD file /// Reads an existing PBD file
@ -491,35 +463,51 @@ impl Layer {
if file_header.file_size < 0 || file_header.total_chunk_count < 0 { if file_header.file_size < 0 || file_header.total_chunk_count < 0 {
return None; return None;
} }
let chunk_header = LayerChunk::read(&mut cursor).unwrap(); let chunk_header = LayerChunk::read(&mut cursor).unwrap();
let old_pos = cursor.position(); let old_pos = cursor.position();
let mut layer_offsets = vec![0i32; chunk_header.layer_count as usize]; let mut layer_offsets = vec![0i32; chunk_header.layer_count as usize];
for i in 0.. chunk_header.layer_count { for i in 0..chunk_header.layer_count {
layer_offsets[i as usize] = cursor.read_le::<i32>().unwrap(); layer_offsets[i as usize] = cursor.read_le::<i32>().unwrap();
} }
for i in 0.. chunk_header.layer_count { for i in 0..chunk_header.layer_count {
cursor.seek(SeekFrom::Start(old_pos + layer_offsets[i as usize] as u64)).unwrap(); cursor
.seek(SeekFrom::Start(old_pos + layer_offsets[i as usize] as u64))
.unwrap();
let old_pos = cursor.position(); let old_pos = cursor.position();
let header = LayerHeader::read(&mut cursor).unwrap(); let header = LayerHeader::read(&mut cursor).unwrap();
cursor.seek(SeekFrom::Start(old_pos + header.instance_object_offset as u64)).unwrap(); cursor
.seek(SeekFrom::Start(
old_pos + header.instance_object_offset as u64,
))
.unwrap();
let mut instance_offsets = vec![0i32; header.instance_object_count as usize]; let mut instance_offsets = vec![0i32; header.instance_object_count as usize];
for i in 0..header.instance_object_count { for i in 0..header.instance_object_count {
instance_offsets[i as usize] = cursor.read_le::<i32>().unwrap(); instance_offsets[i as usize] = cursor.read_le::<i32>().unwrap();
} }
cursor.seek(SeekFrom::Start(old_pos + header.layer_set_referenced_list_offset as u64)).unwrap(); cursor
.seek(SeekFrom::Start(
old_pos + header.layer_set_referenced_list_offset as u64,
))
.unwrap();
LayerSetReferencedList::read(&mut cursor).unwrap(); LayerSetReferencedList::read(&mut cursor).unwrap();
for i in 0..header.instance_object_count { for i in 0..header.instance_object_count {
cursor.seek(SeekFrom::Start(old_pos + header.instance_object_offset as u64 + instance_offsets[i as usize] as u64)).unwrap(); cursor
.seek(SeekFrom::Start(
old_pos
+ header.instance_object_offset as u64
+ instance_offsets[i as usize] as u64,
))
.unwrap();
let instance_object = InstanceObject::read(&mut cursor).unwrap(); let instance_object = InstanceObject::read(&mut cursor).unwrap();
println!("{:#?}", instance_object); println!("{:#?}", instance_object);
@ -549,4 +537,3 @@ mod tests {
Layer::from_existing(&read(d).unwrap()); Layer::from_existing(&read(d).unwrap());
} }
} }

View file

@ -6,7 +6,7 @@
extern crate core; extern crate core;
/// Represents a continuous block of memory which is not owned, and comes either from an in-memory location or from a file. /// Represents a continuous block of memory which is not owned, and comes either from an in-memory location or from a file.
pub type ByteSpan<'a> = &'a[u8]; pub type ByteSpan<'a> = &'a [u8];
/// Represents a continuous block of memory which is owned. /// Represents a continuous block of memory which is owned.
pub type ByteBuffer = Vec<u8>; pub type ByteBuffer = Vec<u8>;

View file

@ -3,9 +3,9 @@
use std::io::{Cursor, Seek, SeekFrom}; use std::io::{Cursor, Seek, SeekFrom};
use binrw::BinRead;
use binrw::binrw;
use crate::ByteSpan; use crate::ByteSpan;
use binrw::binrw;
use binrw::BinRead;
#[binrw] #[binrw]
#[allow(dead_code)] #[allow(dead_code)]
@ -121,7 +121,7 @@ impl ChatLog {
if header.content_size as usize > buffer.len() || header.file_size as usize > buffer.len() { if header.content_size as usize > buffer.len() || header.file_size as usize > buffer.len() {
return None; return None;
} }
let content_offset = (8 + header.file_size * 4) as u64; let content_offset = (8 + header.file_size * 4) as u64;
// beginning of content offset // beginning of content offset
@ -143,10 +143,9 @@ impl ChatLog {
}; };
// TODO: handle the coloring properly, in some way // TODO: handle the coloring properly, in some way
entry.message = String::from_utf8_lossy( entry.message =
&buffer[cursor.position() as usize..next_offset], String::from_utf8_lossy(&buffer[cursor.position() as usize..next_offset])
) .to_string();
.to_string();
entries.push(entry); entries.push(entry);
} }

View file

@ -6,13 +6,16 @@
use std::io::{Cursor, Seek, SeekFrom}; use std::io::{Cursor, Seek, SeekFrom};
use std::mem::size_of; use std::mem::size_of;
use binrw::{binrw, BinWrite, BinWriterExt};
use binrw::BinRead; use binrw::BinRead;
use binrw::BinReaderExt; use binrw::BinReaderExt;
use binrw::{binrw, BinWrite, BinWriterExt};
use crate::{ByteBuffer, ByteSpan};
use crate::common_file_operations::{read_bool_from, write_bool_as}; use crate::common_file_operations::{read_bool_from, write_bool_as};
use crate::model_vertex_declarations::{vertex_element_parser, VERTEX_ELEMENT_SIZE, vertex_element_writer, VertexDeclaration, VertexType, VertexUsage}; use crate::model_vertex_declarations::{
vertex_element_parser, vertex_element_writer, VertexDeclaration, VertexType, VertexUsage,
VERTEX_ELEMENT_SIZE,
};
use crate::{ByteBuffer, ByteSpan};
pub const NUM_VERTICES: u32 = 17; pub const NUM_VERTICES: u32 = 17;
@ -46,7 +49,7 @@ pub struct ModelFileHeader {
#[binrw] #[binrw]
#[brw(repr = u8)] #[brw(repr = u8)]
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
enum ModelFlags1 { enum ModelFlags1 {
DustOcclusionEnabled = 0x80, DustOcclusionEnabled = 0x80,
SnowOcclusionEnabled = 0x40, SnowOcclusionEnabled = 0x40,
@ -126,7 +129,7 @@ pub struct ModelHeader {
unknown7: u16, unknown7: u16,
unknown8: u16, unknown8: u16,
#[brw(pad_after = 6)] #[brw(pad_after = 6)]
unknown9: u16 unknown9: u16,
} }
#[binrw] #[binrw]
@ -221,7 +224,7 @@ struct BoneTableV2 {
// align to 4 bytes // align to 4 bytes
// TODO: use br align_to? // TODO: use br align_to?
#[br(if(bone_count % 2 == 0))] #[br(if(bone_count % 2 == 0))]
padding: u16 padding: u16,
} }
#[binrw] #[binrw]
@ -243,7 +246,7 @@ struct TerrainShadowMesh {
submesh_index: u16, submesh_index: u16,
submesh_count: u16, submesh_count: u16,
vertex_buffer_stride: u8, vertex_buffer_stride: u8,
padding: u8 padding: u8,
} }
#[binrw] #[binrw]
@ -253,7 +256,7 @@ struct TerrainShadowSubmesh {
index_offset: u32, index_offset: u32,
index_count: u32, index_count: u32,
unknown1: u16, unknown1: u16,
unknown2: u16 unknown2: u16,
} }
#[binrw] #[binrw]
@ -262,16 +265,16 @@ struct TerrainShadowSubmesh {
struct ShapeStruct { struct ShapeStruct {
string_offset: u32, string_offset: u32,
shape_mesh_start_index: [u16; 3], shape_mesh_start_index: [u16; 3],
shape_mesh_count: [u16; 3] shape_mesh_count: [u16; 3],
} }
#[binrw] #[binrw]
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
#[allow(dead_code)] #[allow(dead_code)]
struct ShapeMesh { struct ShapeMesh {
mesh_index_offset: u32, mesh_index_offset: u32,
shape_value_count: u32, shape_value_count: u32,
shape_value_offset: u32 shape_value_offset: u32,
} }
#[binrw] #[binrw]
@ -279,7 +282,7 @@ struct ShapeMesh {
#[allow(dead_code)] #[allow(dead_code)]
struct ShapeValue { struct ShapeValue {
base_indices_index: u16, base_indices_index: u16,
replacing_vertex_index: u16 replacing_vertex_index: u16,
} }
#[binrw] #[binrw]
@ -362,7 +365,7 @@ struct ModelData {
} }
#[binrw] #[binrw]
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
#[allow(dead_code)] #[allow(dead_code)]
struct ElementId { struct ElementId {
element_id: u32, element_id: u32,
@ -371,7 +374,7 @@ struct ElementId {
rotate: [f32; 3], rotate: [f32; 3],
} }
#[derive(Clone, Copy, PartialEq)] #[derive(Clone, Copy, PartialEq)]
#[repr(C)] #[repr(C)]
pub struct Vertex { pub struct Vertex {
pub position: [f32; 3], pub position: [f32; 3],
@ -405,7 +408,7 @@ impl Default for Vertex {
#[repr(C)] #[repr(C)]
pub struct NewShapeValue { pub struct NewShapeValue {
pub base_index: u32, pub base_index: u32,
pub replacing_vertex: Vertex pub replacing_vertex: Vertex,
} }
#[derive(Clone, Copy)] #[derive(Clone, Copy)]
@ -413,13 +416,13 @@ pub struct NewShapeValue {
pub struct SubMesh { pub struct SubMesh {
submesh_index: usize, submesh_index: usize,
pub index_count: u32, pub index_count: u32,
pub index_offset: u32 pub index_offset: u32,
} }
#[derive(Clone)] #[derive(Clone)]
pub struct Shape { pub struct Shape {
pub name: String, pub name: String,
pub morphed_vertices: Vec<Vertex> pub morphed_vertices: Vec<Vertex>,
} }
/// Corresponds to a "Mesh" in an LOD /// Corresponds to a "Mesh" in an LOD
@ -430,7 +433,7 @@ pub struct Part {
pub indices: Vec<u16>, pub indices: Vec<u16>,
pub material_index: u16, pub material_index: u16,
pub submeshes: Vec<SubMesh>, pub submeshes: Vec<SubMesh>,
pub shapes: Vec<Shape> pub shapes: Vec<Shape>,
} }
#[derive(Clone)] #[derive(Clone)]
@ -453,8 +456,12 @@ impl MDL {
let mut cursor = Cursor::new(buffer); let mut cursor = Cursor::new(buffer);
let model_file_header = ModelFileHeader::read(&mut cursor).ok()?; let model_file_header = ModelFileHeader::read(&mut cursor).ok()?;
let model = ModelData::read_args(&mut cursor, binrw::args! { file_header: &model_file_header }).ok()?; let model = ModelData::read_args(
&mut cursor,
binrw::args! { file_header: &model_file_header },
)
.ok()?;
let mut affected_bone_names = vec![]; let mut affected_bone_names = vec![];
for offset in &model.bone_name_offsets { for offset in &model.bone_name_offsets {
@ -507,151 +514,173 @@ impl MDL {
.seek(SeekFrom::Start( .seek(SeekFrom::Start(
(model.lods[i as usize].vertex_data_offset (model.lods[i as usize].vertex_data_offset
+ model.meshes[j as usize].vertex_buffer_offsets + model.meshes[j as usize].vertex_buffer_offsets
[element.stream as usize] [element.stream as usize]
+ element.offset as u32 + element.offset as u32
+ model.meshes[j as usize].vertex_buffer_strides + model.meshes[j as usize].vertex_buffer_strides
[element.stream as usize] [element.stream as usize]
as u32 as u32
* k as u32) as u64, * k as u32) as u64,
)) ))
.ok()?; .ok()?;
match element.vertex_usage { match element.vertex_usage {
VertexUsage::Position => { VertexUsage::Position => match element.vertex_type {
match element.vertex_type { VertexType::Single4 => {
VertexType::Single4 => { vertices[k as usize].position.clone_from_slice(
vertices[k as usize].position.clone_from_slice(&MDL::read_single4(&mut cursor).unwrap()[0..3]); &MDL::read_single4(&mut cursor).unwrap()[0..3],
} );
VertexType::Half4 => {
vertices[k as usize].position.clone_from_slice(&MDL::read_half4(&mut cursor).unwrap()[0..3]);
}
VertexType::Single3 => {
vertices[k as usize].position = MDL::read_single3(&mut cursor).unwrap();
}
_ => {
panic!("Unexpected vertex type for position: {:#?}", element.vertex_type);
}
} }
} VertexType::Half4 => {
VertexUsage::BlendWeights => { vertices[k as usize].position.clone_from_slice(
match element.vertex_type { &MDL::read_half4(&mut cursor).unwrap()[0..3],
VertexType::ByteFloat4 => { );
vertices[k as usize].bone_weight = MDL::read_byte_float4(&mut cursor).unwrap();
}
VertexType::Byte4 => {
let bytes = MDL::read_byte4(&mut cursor).unwrap();
vertices[k as usize].bone_weight = [
f32::from(bytes[0]),
f32::from(bytes[1]),
f32::from(bytes[2]),
f32::from(bytes[3])
];
}
VertexType::UnsignedShort4 => {
let bytes = MDL::read_unsigned_short4(&mut cursor).unwrap();
vertices[k as usize].bone_weight = [
f32::from(bytes[0]),
f32::from(bytes[1]),
f32::from(bytes[2]),
f32::from(bytes[3])
];
}
_ => {
panic!("Unexpected vertex type for blendweight: {:#?}", element.vertex_type);
}
} }
} VertexType::Single3 => {
VertexUsage::BlendIndices => { vertices[k as usize].position =
match element.vertex_type { MDL::read_single3(&mut cursor).unwrap();
VertexType::Byte4 => {
vertices[k as usize].bone_id = MDL::read_byte4(&mut cursor).unwrap();
}
VertexType::UnsignedShort4 => {
let shorts = MDL::read_unsigned_short4(&mut cursor).unwrap();
vertices[k as usize].bone_id = [
shorts[0] as u8,
shorts[1] as u8,
shorts[2] as u8,
shorts[3] as u8
];
}
_ => {
panic!("Unexpected vertex type for blendindice: {:#?}", element.vertex_type);
}
} }
} _ => {
VertexUsage::Normal => { panic!(
match element.vertex_type { "Unexpected vertex type for position: {:#?}",
VertexType::Half4 => { element.vertex_type
vertices[k as usize].normal.clone_from_slice(&MDL::read_half4(&mut cursor).unwrap()[0..3]); );
}
VertexType::Single3 => {
vertices[k as usize].normal = MDL::read_single3(&mut cursor).unwrap();
}
_ => {
panic!("Unexpected vertex type for normal: {:#?}", element.vertex_type);
}
} }
} },
VertexUsage::UV => { VertexUsage::BlendWeights => match element.vertex_type {
match element.vertex_type { VertexType::ByteFloat4 => {
VertexType::ByteFloat4 => { vertices[k as usize].bone_weight =
let combined = MDL::read_byte_float4(&mut cursor).unwrap(); MDL::read_byte_float4(&mut cursor).unwrap();
}
VertexType::Byte4 => {
let bytes = MDL::read_byte4(&mut cursor).unwrap();
vertices[k as usize].bone_weight = [
f32::from(bytes[0]),
f32::from(bytes[1]),
f32::from(bytes[2]),
f32::from(bytes[3]),
];
}
VertexType::UnsignedShort4 => {
let bytes = MDL::read_unsigned_short4(&mut cursor).unwrap();
vertices[k as usize].bone_weight = [
f32::from(bytes[0]),
f32::from(bytes[1]),
f32::from(bytes[2]),
f32::from(bytes[3]),
];
}
_ => {
panic!(
"Unexpected vertex type for blendweight: {:#?}",
element.vertex_type
);
}
},
VertexUsage::BlendIndices => match element.vertex_type {
VertexType::Byte4 => {
vertices[k as usize].bone_id =
MDL::read_byte4(&mut cursor).unwrap();
}
VertexType::UnsignedShort4 => {
let shorts = MDL::read_unsigned_short4(&mut cursor).unwrap();
vertices[k as usize].bone_id = [
shorts[0] as u8,
shorts[1] as u8,
shorts[2] as u8,
shorts[3] as u8,
];
}
_ => {
panic!(
"Unexpected vertex type for blendindice: {:#?}",
element.vertex_type
);
}
},
VertexUsage::Normal => match element.vertex_type {
VertexType::Half4 => {
vertices[k as usize].normal.clone_from_slice(
&MDL::read_half4(&mut cursor).unwrap()[0..3],
);
}
VertexType::Single3 => {
vertices[k as usize].normal =
MDL::read_single3(&mut cursor).unwrap();
}
_ => {
panic!(
"Unexpected vertex type for normal: {:#?}",
element.vertex_type
);
}
},
VertexUsage::UV => match element.vertex_type {
VertexType::ByteFloat4 => {
let combined = MDL::read_byte_float4(&mut cursor).unwrap();
vertices[k as usize].uv0.clone_from_slice(&combined[0..2]); vertices[k as usize].uv0.clone_from_slice(&combined[0..2]);
vertices[k as usize].uv1.clone_from_slice(&combined[2..4]); vertices[k as usize].uv1.clone_from_slice(&combined[2..4]);
}
VertexType::Half4 => {
let combined = MDL::read_half4(&mut cursor).unwrap();
vertices[k as usize].uv0.clone_from_slice(&combined[0..2]);
vertices[k as usize].uv1.clone_from_slice(&combined[2..4]);
}
VertexType::Single4 => {
let combined = MDL::read_single4(&mut cursor).unwrap();
vertices[k as usize].uv0.clone_from_slice(&combined[0..2]);
vertices[k as usize].uv1.clone_from_slice(&combined[2..4]);
}
VertexType::Half2 => {
let combined = MDL::read_half2(&mut cursor).unwrap();
vertices[k as usize].uv0.clone_from_slice(&combined[0..2]);
}
_ => {
panic!("Unexpected vertex type for uv: {:#?}", element.vertex_type);
}
} }
} VertexType::Half4 => {
VertexUsage::BiTangent => { let combined = MDL::read_half4(&mut cursor).unwrap();
match element.vertex_type {
VertexType::ByteFloat4 => { vertices[k as usize].uv0.clone_from_slice(&combined[0..2]);
vertices[k as usize].bitangent = MDL::read_tangent(&mut cursor).unwrap(); vertices[k as usize].uv1.clone_from_slice(&combined[2..4]);
}
_ => {
panic!("Unexpected vertex type for bitangent: {:#?}", element.vertex_type);
}
} }
} VertexType::Single4 => {
let combined = MDL::read_single4(&mut cursor).unwrap();
vertices[k as usize].uv0.clone_from_slice(&combined[0..2]);
vertices[k as usize].uv1.clone_from_slice(&combined[2..4]);
}
VertexType::Half2 => {
let combined = MDL::read_half2(&mut cursor).unwrap();
vertices[k as usize].uv0.clone_from_slice(&combined[0..2]);
}
_ => {
panic!(
"Unexpected vertex type for uv: {:#?}",
element.vertex_type
);
}
},
VertexUsage::BiTangent => match element.vertex_type {
VertexType::ByteFloat4 => {
vertices[k as usize].bitangent =
MDL::read_tangent(&mut cursor).unwrap();
}
_ => {
panic!(
"Unexpected vertex type for bitangent: {:#?}",
element.vertex_type
);
}
},
VertexUsage::Tangent => { VertexUsage::Tangent => {
match element.vertex_type { match element.vertex_type {
// Used for... terrain..? // Used for... terrain..?
VertexType::ByteFloat4 => {} VertexType::ByteFloat4 => {}
_ => { _ => {
panic!("Unexpected vertex type for tangent: {:#?}", element.vertex_type); panic!(
"Unexpected vertex type for tangent: {:#?}",
element.vertex_type
);
} }
} }
} }
VertexUsage::Color => { VertexUsage::Color => match element.vertex_type {
match element.vertex_type { VertexType::ByteFloat4 => {
VertexType::ByteFloat4 => { vertices[k as usize].color =
vertices[k as usize].color = MDL::read_byte_float4(&mut cursor).unwrap(); MDL::read_byte_float4(&mut cursor).unwrap();
}
_ => {
panic!("Unexpected vertex type for color: {:#?}", element.vertex_type);
}
} }
} _ => {
panic!(
"Unexpected vertex type for color: {:#?}",
element.vertex_type
);
}
},
} }
} }
} }
@ -671,12 +700,17 @@ impl MDL {
indices.push(cursor.read_le::<u16>().ok()?); indices.push(cursor.read_le::<u16>().ok()?);
} }
let mut submeshes: Vec<SubMesh> = Vec::with_capacity(model.meshes[j as usize].submesh_count as usize); let mut submeshes: Vec<SubMesh> =
Vec::with_capacity(model.meshes[j as usize].submesh_count as usize);
for i in 0..model.meshes[j as usize].submesh_count { for i in 0..model.meshes[j as usize].submesh_count {
submeshes.push(SubMesh { submeshes.push(SubMesh {
submesh_index: model.meshes[j as usize].submesh_index as usize + i as usize, submesh_index: model.meshes[j as usize].submesh_index as usize + i as usize,
index_count: model.submeshes[model.meshes[j as usize].submesh_index as usize + i as usize].index_count, index_count: model.submeshes
index_offset: model.submeshes[model.meshes[j as usize].submesh_index as usize + i as usize].index_offset, [model.meshes[j as usize].submesh_index as usize + i as usize]
.index_count,
index_offset: model.submeshes
[model.meshes[j as usize].submesh_index as usize + i as usize]
.index_offset,
}); });
} }
@ -684,23 +718,45 @@ impl MDL {
for shape in &model.shapes { for shape in &model.shapes {
// Adapted from https://github.com/xivdev/Penumbra/blob/master/Penumbra/Import/Models/Export/MeshExporter.cs // Adapted from https://github.com/xivdev/Penumbra/blob/master/Penumbra/Import/Models/Export/MeshExporter.cs
let affected_shape_mesh: Vec<&ShapeMesh> = model.shape_meshes.iter() let affected_shape_mesh: Vec<&ShapeMesh> = model
.shape_meshes
.iter()
.skip(shape.shape_mesh_start_index[i as usize] as usize) .skip(shape.shape_mesh_start_index[i as usize] as usize)
.take(shape.shape_mesh_count[i as usize] as usize) .take(shape.shape_mesh_count[i as usize] as usize)
.filter(|shape_mesh| shape_mesh.mesh_index_offset == model.meshes[j as usize].start_index).collect(); .filter(|shape_mesh| {
shape_mesh.mesh_index_offset == model.meshes[j as usize].start_index
})
.collect();
let shape_values: Vec<&ShapeValue> = affected_shape_mesh.iter() let shape_values: Vec<&ShapeValue> = affected_shape_mesh
.flat_map(|shape_mesh| model.shape_values.iter().skip(shape_mesh.shape_value_offset as usize).take(shape_mesh.shape_value_count as usize)) .iter()
.filter(|shape_value| shape_value.base_indices_index >= model.meshes[j as usize].start_index as u16 && shape_value.base_indices_index < (model.meshes[j as usize].start_index + model.meshes[j as usize].index_count) as u16) .flat_map(|shape_mesh| {
model
.shape_values
.iter()
.skip(shape_mesh.shape_value_offset as usize)
.take(shape_mesh.shape_value_count as usize)
})
.filter(|shape_value| {
shape_value.base_indices_index
>= model.meshes[j as usize].start_index as u16
&& shape_value.base_indices_index
< (model.meshes[j as usize].start_index
+ model.meshes[j as usize].index_count)
as u16
})
.collect(); .collect();
let mut morphed_vertices = vec![Vertex::default(); vertices.len()]; let mut morphed_vertices = vec![Vertex::default(); vertices.len()];
if !shape_values.is_empty() { if !shape_values.is_empty() {
for shape_value in shape_values { for shape_value in shape_values {
let old_vertex = vertices[indices[shape_value.base_indices_index as usize] as usize]; let old_vertex =
let new_vertex = vertices[shape_value.replacing_vertex_index as usize - model.meshes[j as usize].start_index as usize]; vertices[indices[shape_value.base_indices_index as usize] as usize];
let vertex = &mut morphed_vertices[indices[shape_value.base_indices_index as usize] as usize]; let new_vertex = vertices[shape_value.replacing_vertex_index as usize
- model.meshes[j as usize].start_index as usize];
let vertex = &mut morphed_vertices
[indices[shape_value.base_indices_index as usize] as usize];
vertex.position[0] = new_vertex.position[0] - old_vertex.position[0]; vertex.position[0] = new_vertex.position[0] - old_vertex.position[0];
vertex.position[1] = new_vertex.position[1] - old_vertex.position[1]; vertex.position[1] = new_vertex.position[1] - old_vertex.position[1];
@ -719,12 +775,19 @@ impl MDL {
shapes.push(Shape { shapes.push(Shape {
name: string, name: string,
morphed_vertices morphed_vertices,
}); });
} }
} }
parts.push(Part { mesh_index: j, vertices, indices, material_index, submeshes, shapes }); parts.push(Part {
mesh_index: j,
vertices,
indices,
material_index,
submeshes,
shapes,
});
} }
lods.push(Lod { parts }); lods.push(Lod { parts });
@ -735,11 +798,18 @@ impl MDL {
model_data: model, model_data: model,
lods, lods,
affected_bone_names, affected_bone_names,
material_names material_names,
}) })
} }
pub fn replace_vertices(&mut self, lod_index: usize, part_index: usize, vertices: &[Vertex], indices: &[u16], submeshes: &[SubMesh]) { pub fn replace_vertices(
&mut self,
lod_index: usize,
part_index: usize,
vertices: &[Vertex],
indices: &[u16],
submeshes: &[SubMesh],
) {
let part = &mut self.lods[lod_index].parts[part_index]; let part = &mut self.lods[lod_index].parts[part_index];
part.vertices = Vec::from(vertices); part.vertices = Vec::from(vertices);
@ -747,8 +817,10 @@ impl MDL {
for (i, submesh) in part.submeshes.iter().enumerate() { for (i, submesh) in part.submeshes.iter().enumerate() {
if i < submeshes.len() { if i < submeshes.len() {
self.model_data.submeshes[submesh.submesh_index].index_offset = submeshes[i].index_offset; self.model_data.submeshes[submesh.submesh_index].index_offset =
self.model_data.submeshes[submesh.submesh_index].index_count = submeshes[i].index_count; submeshes[i].index_offset;
self.model_data.submeshes[submesh.submesh_index].index_count =
submeshes[i].index_count;
} }
} }
@ -773,26 +845,38 @@ impl MDL {
self.update_headers(); self.update_headers();
} }
pub fn add_shape_mesh(&mut self, lod_index: usize, shape_index: usize, shape_mesh_index: usize, part_index: usize, shape_values: &[NewShapeValue]) { pub fn add_shape_mesh(
&mut self,
lod_index: usize,
shape_index: usize,
shape_mesh_index: usize,
part_index: usize,
shape_values: &[NewShapeValue],
) {
let part = &mut self.lods[lod_index].parts[part_index]; let part = &mut self.lods[lod_index].parts[part_index];
// TODO: this is assuming they are added in order // TODO: this is assuming they are added in order
if shape_mesh_index == 0 { if shape_mesh_index == 0 {
self.model_data.shapes[shape_index].shape_mesh_start_index[lod_index] = self.model_data.shape_meshes.len() as u16; self.model_data.shapes[shape_index].shape_mesh_start_index[lod_index] =
self.model_data.shape_meshes.len() as u16;
} }
self.model_data.shape_meshes.push(ShapeMesh { self.model_data.shape_meshes.push(ShapeMesh {
mesh_index_offset: self.model_data.meshes[part.mesh_index as usize].start_index, mesh_index_offset: self.model_data.meshes[part.mesh_index as usize].start_index,
shape_value_count: shape_values.len() as u32, shape_value_count: shape_values.len() as u32,
shape_value_offset: self.model_data.shape_values.len() as u32 shape_value_offset: self.model_data.shape_values.len() as u32,
}); });
for shape_value in shape_values { for shape_value in shape_values {
part.vertices.push(shape_value.replacing_vertex); part.vertices.push(shape_value.replacing_vertex);
self.model_data.shape_values.push(ShapeValue { self.model_data.shape_values.push(ShapeValue {
base_indices_index: self.model_data.meshes[part.mesh_index as usize].start_index as u16 + shape_value.base_index as u16, base_indices_index: self.model_data.meshes[part.mesh_index as usize].start_index
replacing_vertex_index: self.model_data.meshes[part.mesh_index as usize].start_index as u16 + (part.vertices.len() - 1) as u16 as u16
+ shape_value.base_index as u16,
replacing_vertex_index: self.model_data.meshes[part.mesh_index as usize].start_index
as u16
+ (part.vertices.len() - 1) as u16,
}) })
} }
@ -807,15 +891,18 @@ impl MDL {
let mut vertex_offset = 0; let mut vertex_offset = 0;
for j in self.model_data.lods[i as usize].mesh_index for j in self.model_data.lods[i as usize].mesh_index
..self.model_data.lods[i as usize].mesh_index + self.model_data.lods[i as usize].mesh_count ..self.model_data.lods[i as usize].mesh_index
+ self.model_data.lods[i as usize].mesh_count
{ {
let mesh = &mut self.model_data.meshes[j as usize]; let mesh = &mut self.model_data.meshes[j as usize];
mesh.start_index = self.model_data.submeshes[mesh.submesh_index as usize].index_offset; mesh.start_index =
self.model_data.submeshes[mesh.submesh_index as usize].index_offset;
for i in 0..mesh.vertex_stream_count as usize { for i in 0..mesh.vertex_stream_count as usize {
mesh.vertex_buffer_offsets[i] = vertex_offset; mesh.vertex_buffer_offsets[i] = vertex_offset;
vertex_offset += mesh.vertex_count as u32 * mesh.vertex_buffer_strides[i] as u32; vertex_offset +=
mesh.vertex_count as u32 * mesh.vertex_buffer_strides[i] as u32;
} }
} }
} }
@ -825,15 +912,14 @@ impl MDL {
let mut total_index_buffer_size = 0; let mut total_index_buffer_size = 0;
// still slightly off? // still slightly off?
for j in lod.mesh_index for j in lod.mesh_index..lod.mesh_index + lod.mesh_count {
..lod.mesh_index + lod.mesh_count
{
let vertex_count = self.model_data.meshes[j as usize].vertex_count; let vertex_count = self.model_data.meshes[j as usize].vertex_count;
let index_count = self.model_data.meshes[j as usize].index_count; let index_count = self.model_data.meshes[j as usize].index_count;
let mut total_vertex_stride: u32 = 0; let mut total_vertex_stride: u32 = 0;
for i in 0..self.model_data.meshes[j as usize].vertex_stream_count as usize { for i in 0..self.model_data.meshes[j as usize].vertex_stream_count as usize {
total_vertex_stride += self.model_data.meshes[j as usize].vertex_buffer_strides[i] as u32; total_vertex_stride +=
self.model_data.meshes[j as usize].vertex_buffer_strides[i] as u32;
} }
total_vertex_buffer_size += vertex_count as u32 * total_vertex_stride; total_vertex_buffer_size += vertex_count as u32 * total_vertex_stride;
@ -912,97 +998,117 @@ impl MDL {
for (l, lod) in self.lods.iter().enumerate() { for (l, lod) in self.lods.iter().enumerate() {
for part in lod.parts.iter() { for part in lod.parts.iter() {
let declaration = &self.model_data.header.vertex_declarations[part.mesh_index as usize]; let declaration =
&self.model_data.header.vertex_declarations[part.mesh_index as usize];
for (k, vert) in part.vertices.iter().enumerate() { for (k, vert) in part.vertices.iter().enumerate() {
for element in &declaration.elements { for element in &declaration.elements {
cursor cursor
.seek(SeekFrom::Start( .seek(SeekFrom::Start(
(self.model_data.lods[l].vertex_data_offset (self.model_data.lods[l].vertex_data_offset
+ self.model_data.meshes[part.mesh_index as usize].vertex_buffer_offsets + self.model_data.meshes[part.mesh_index as usize]
[element.stream as usize] .vertex_buffer_offsets
[element.stream as usize]
+ element.offset as u32 + element.offset as u32
+ self.model_data.meshes[part.mesh_index as usize].vertex_buffer_strides + self.model_data.meshes[part.mesh_index as usize]
[element.stream as usize] .vertex_buffer_strides
as u32 [element.stream as usize]
* k as u32) as u64, as u32
* k as u32) as u64,
)) ))
.ok()?; .ok()?;
match element.vertex_usage { match element.vertex_usage {
VertexUsage::Position => { VertexUsage::Position => match element.vertex_type {
match element.vertex_type { VertexType::Half4 => {
VertexType::Half4 => { MDL::write_half4(
MDL::write_half4(&mut cursor, &MDL::pad_slice(&vert.position, 1.0)).ok()?; &mut cursor,
} &MDL::pad_slice(&vert.position, 1.0),
VertexType::Single3 => { )
MDL::write_single3(&mut cursor, &vert.position).ok()?; .ok()?;
}
_ => {
panic!("Unexpected vertex type for position: {:#?}", element.vertex_type);
}
} }
} VertexType::Single3 => {
VertexUsage::BlendWeights => { MDL::write_single3(&mut cursor, &vert.position).ok()?;
match element.vertex_type {
VertexType::ByteFloat4 => {
MDL::write_byte_float4(&mut cursor, &vert.bone_weight).ok()?;
}
_ => {
panic!("Unexpected vertex type for blendweight: {:#?}", element.vertex_type);
}
} }
} _ => {
VertexUsage::BlendIndices => { panic!(
match element.vertex_type { "Unexpected vertex type for position: {:#?}",
VertexType::Byte4 => { element.vertex_type
MDL::write_byte4(&mut cursor, &vert.bone_id).ok()?; );
}
_ => {
panic!("Unexpected vertex type for blendindice: {:#?}", element.vertex_type);
}
} }
} },
VertexUsage::Normal => { VertexUsage::BlendWeights => match element.vertex_type {
match element.vertex_type { VertexType::ByteFloat4 => {
VertexType::Half4 => { MDL::write_byte_float4(&mut cursor, &vert.bone_weight)
MDL::write_half4(&mut cursor, &MDL::pad_slice(&vert.normal, 0.0)).ok()?; .ok()?;
}
VertexType::Single3 => {
MDL::write_single3(&mut cursor, &vert.normal).ok()?;
}
_ => {
panic!("Unexpected vertex type for normal: {:#?}", element.vertex_type);
}
} }
} _ => {
VertexUsage::UV => { panic!(
match element.vertex_type { "Unexpected vertex type for blendweight: {:#?}",
VertexType::Half4 => { element.vertex_type
let combined = [vert.uv0[0], vert.uv0[1], vert.uv1[0], vert.uv1[1]]; );
}
},
VertexUsage::BlendIndices => match element.vertex_type {
VertexType::Byte4 => {
MDL::write_byte4(&mut cursor, &vert.bone_id).ok()?;
}
_ => {
panic!(
"Unexpected vertex type for blendindice: {:#?}",
element.vertex_type
);
}
},
VertexUsage::Normal => match element.vertex_type {
VertexType::Half4 => {
MDL::write_half4(
&mut cursor,
&MDL::pad_slice(&vert.normal, 0.0),
)
.ok()?;
}
VertexType::Single3 => {
MDL::write_single3(&mut cursor, &vert.normal).ok()?;
}
_ => {
panic!(
"Unexpected vertex type for normal: {:#?}",
element.vertex_type
);
}
},
VertexUsage::UV => match element.vertex_type {
VertexType::Half4 => {
let combined =
[vert.uv0[0], vert.uv0[1], vert.uv1[0], vert.uv1[1]];
MDL::write_half4(&mut cursor, &combined).ok()?; MDL::write_half4(&mut cursor, &combined).ok()?;
} }
VertexType::Single4 => { VertexType::Single4 => {
let combined = [vert.uv0[0], vert.uv0[1], vert.uv1[0], vert.uv1[1]]; let combined =
[vert.uv0[0], vert.uv0[1], vert.uv1[0], vert.uv1[1]];
MDL::write_single4(&mut cursor, &combined).ok()?; MDL::write_single4(&mut cursor, &combined).ok()?;
}
_ => {
panic!("Unexpected vertex type for uv: {:#?}", element.vertex_type);
}
} }
} _ => {
VertexUsage::BiTangent => { panic!(
match element.vertex_type { "Unexpected vertex type for uv: {:#?}",
VertexType::ByteFloat4 => { element.vertex_type
MDL::write_tangent(&mut cursor, &vert.bitangent).ok()?; );
}
_ => {
panic!("Unexpected vertex type for bitangent: {:#?}", element.vertex_type);
}
} }
} },
VertexUsage::BiTangent => match element.vertex_type {
VertexType::ByteFloat4 => {
MDL::write_tangent(&mut cursor, &vert.bitangent).ok()?;
}
_ => {
panic!(
"Unexpected vertex type for bitangent: {:#?}",
element.vertex_type
);
}
},
VertexUsage::Tangent => { VertexUsage::Tangent => {
#[allow(clippy::match_single_binding)] // TODO #[allow(clippy::match_single_binding)] // TODO
match element.vertex_type { match element.vertex_type {
@ -1010,20 +1116,24 @@ impl MDL {
MDL::write_tangent(&mut cursor, &vert.binormal).ok()?; MDL::write_tangent(&mut cursor, &vert.binormal).ok()?;
}*/ }*/
_ => { _ => {
panic!("Unexpected vertex type for tangent: {:#?}", element.vertex_type); panic!(
"Unexpected vertex type for tangent: {:#?}",
element.vertex_type
);
} }
} }
} }
VertexUsage::Color => { VertexUsage::Color => match element.vertex_type {
match element.vertex_type { VertexType::ByteFloat4 => {
VertexType::ByteFloat4 => { MDL::write_byte_float4(&mut cursor, &vert.color).ok()?;
MDL::write_byte_float4(&mut cursor, &vert.color).ok()?;
}
_ => {
panic!("Unexpected vertex type for color: {:#?}", element.vertex_type);
}
} }
} _ => {
panic!(
"Unexpected vertex type for color: {:#?}",
element.vertex_type
);
}
},
} }
} }
} }
@ -1031,8 +1141,8 @@ impl MDL {
cursor cursor
.seek(SeekFrom::Start( .seek(SeekFrom::Start(
(self.file_header.index_offsets[l] (self.file_header.index_offsets[l]
+ (self.model_data.meshes[part.mesh_index as usize].start_index * size_of::<u16>() as u32)) + (self.model_data.meshes[part.mesh_index as usize].start_index
as u64, * size_of::<u16>() as u32)) as u64,
)) ))
.ok()?; .ok()?;
@ -1166,7 +1276,13 @@ mod tests {
for l in 0..old_mdl.lods.len() { for l in 0..old_mdl.lods.len() {
for p in 0..old_mdl.lods[l].parts.len() { for p in 0..old_mdl.lods[l].parts.len() {
mdl.replace_vertices(l, p, &old_mdl.lods[l].parts[p].vertices, &old_mdl.lods[l].parts[p].indices, &old_mdl.lods[l].parts[p].submeshes); mdl.replace_vertices(
l,
p,
&old_mdl.lods[l].parts[p].vertices,
&old_mdl.lods[l].parts[p].indices,
&old_mdl.lods[l].parts[p].submeshes,
);
} }
} }
@ -1186,9 +1302,15 @@ mod tests {
// file header // file header
assert_eq!(mdl.file_header.version, 16777221); assert_eq!(mdl.file_header.version, 16777221);
assert_eq!(mdl.file_header.stack_size, 816); assert_eq!(mdl.file_header.stack_size, 816);
assert_eq!(mdl.file_header.stack_size, mdl.file_header.calculate_stack_size()); assert_eq!(
mdl.file_header.stack_size,
mdl.file_header.calculate_stack_size()
);
assert_eq!(mdl.file_header.runtime_size, 12544); assert_eq!(mdl.file_header.runtime_size, 12544);
assert_eq!(mdl.file_header.runtime_size, mdl.model_data.calculate_runtime_size()); assert_eq!(
mdl.file_header.runtime_size,
mdl.model_data.calculate_runtime_size()
);
assert_eq!(mdl.file_header.vertex_declaration_count, 6); assert_eq!(mdl.file_header.vertex_declaration_count, 6);
assert_eq!(mdl.file_header.material_count, 2); assert_eq!(mdl.file_header.material_count, 2);
assert_eq!(mdl.file_header.lod_count, 3); assert_eq!(mdl.file_header.lod_count, 3);
@ -1208,4 +1330,4 @@ mod tests {
// Feeding it invalid data should not panic // Feeding it invalid data should not panic
MDL::from_existing(&read(d).unwrap()); MDL::from_existing(&read(d).unwrap());
} }
} }

View file

@ -1,11 +1,11 @@
// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com> // SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
// SPDX-License-Identifier: GPL-3.0-or-later // SPDX-License-Identifier: GPL-3.0-or-later
use std::io::Cursor; use crate::model::MDL;
use crate::ByteSpan;
use binrw::{BinReaderExt, BinResult, BinWriterExt}; use binrw::{BinReaderExt, BinResult, BinWriterExt};
use half::f16; use half::f16;
use crate::ByteSpan; use std::io::Cursor;
use crate::model::MDL;
/// Maximum value of byte, used to divide and multiply floats in that space [0.0..1.0] to [0..255] /// Maximum value of byte, used to divide and multiply floats in that space [0.0..1.0] to [0..255]
const MAX_BYTE_FLOAT: f32 = u8::MAX as f32; const MAX_BYTE_FLOAT: f32 = u8::MAX as f32;
@ -16,16 +16,20 @@ impl MDL {
(f32::from(cursor.read_le::<u8>().ok()?) / MAX_BYTE_FLOAT), (f32::from(cursor.read_le::<u8>().ok()?) / MAX_BYTE_FLOAT),
(f32::from(cursor.read_le::<u8>().ok()?) / MAX_BYTE_FLOAT), (f32::from(cursor.read_le::<u8>().ok()?) / MAX_BYTE_FLOAT),
(f32::from(cursor.read_le::<u8>().ok()?) / MAX_BYTE_FLOAT), (f32::from(cursor.read_le::<u8>().ok()?) / MAX_BYTE_FLOAT),
(f32::from(cursor.read_le::<u8>().ok()?) / MAX_BYTE_FLOAT) (f32::from(cursor.read_le::<u8>().ok()?) / MAX_BYTE_FLOAT),
]) ])
} }
pub(crate) fn write_byte_float4<T: BinWriterExt>(cursor: &mut T, vec: &[f32; 4]) -> BinResult<()> { pub(crate) fn write_byte_float4<T: BinWriterExt>(
cursor: &mut T,
vec: &[f32; 4],
) -> BinResult<()> {
cursor.write_le::<[u8; 4]>(&[ cursor.write_le::<[u8; 4]>(&[
(vec[0] * MAX_BYTE_FLOAT).round() as u8, (vec[0] * MAX_BYTE_FLOAT).round() as u8,
(vec[1] * MAX_BYTE_FLOAT).round() as u8, (vec[1] * MAX_BYTE_FLOAT).round() as u8,
(vec[2] * MAX_BYTE_FLOAT).round() as u8, (vec[2] * MAX_BYTE_FLOAT).round() as u8,
(vec[3] * MAX_BYTE_FLOAT).round() as u8]) (vec[3] * MAX_BYTE_FLOAT).round() as u8,
])
} }
pub(crate) fn read_tangent(cursor: &mut Cursor<ByteSpan>) -> Option<[f32; 4]> { pub(crate) fn read_tangent(cursor: &mut Cursor<ByteSpan>) -> Option<[f32; 4]> {
@ -33,7 +37,11 @@ impl MDL {
(f32::from(cursor.read_le::<u8>().ok()?) * 2.0 / MAX_BYTE_FLOAT - 1.0), (f32::from(cursor.read_le::<u8>().ok()?) * 2.0 / MAX_BYTE_FLOAT - 1.0),
(f32::from(cursor.read_le::<u8>().ok()?) * 2.0 / MAX_BYTE_FLOAT - 1.0), (f32::from(cursor.read_le::<u8>().ok()?) * 2.0 / MAX_BYTE_FLOAT - 1.0),
(f32::from(cursor.read_le::<u8>().ok()?) * 2.0 / MAX_BYTE_FLOAT - 1.0), (f32::from(cursor.read_le::<u8>().ok()?) * 2.0 / MAX_BYTE_FLOAT - 1.0),
if (f32::from(cursor.read_le::<u8>().ok()?) * 2.0 / MAX_BYTE_FLOAT - 1.0) == 1.0 { 1.0 } else { -1.0 } if (f32::from(cursor.read_le::<u8>().ok()?) * 2.0 / MAX_BYTE_FLOAT - 1.0) == 1.0 {
1.0
} else {
-1.0
},
]) ])
} }
@ -42,7 +50,8 @@ impl MDL {
((vec[0] + 1.0) * (MAX_BYTE_FLOAT / 2.0)).round() as u8, ((vec[0] + 1.0) * (MAX_BYTE_FLOAT / 2.0)).round() as u8,
((vec[1] + 1.0) * (MAX_BYTE_FLOAT / 2.0)).round() as u8, ((vec[1] + 1.0) * (MAX_BYTE_FLOAT / 2.0)).round() as u8,
((vec[2] + 1.0) * (MAX_BYTE_FLOAT / 2.0)).round() as u8, ((vec[2] + 1.0) * (MAX_BYTE_FLOAT / 2.0)).round() as u8,
if vec[3] > 0.0 { 255 } else { 0 }]) // SqEx uses 0 as -1, not 1 if vec[3] > 0.0 { 255 } else { 0 },
]) // SqEx uses 0 as -1, not 1
} }
pub(crate) fn read_half4(cursor: &mut Cursor<ByteSpan>) -> Option<[f32; 4]> { pub(crate) fn read_half4(cursor: &mut Cursor<ByteSpan>) -> Option<[f32; 4]> {
@ -50,7 +59,7 @@ impl MDL {
f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32(), f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32(),
f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32(), f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32(),
f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32(), f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32(),
f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32() f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32(),
]) ])
} }
@ -59,13 +68,14 @@ impl MDL {
f16::from_f32(vec[0]).to_bits(), f16::from_f32(vec[0]).to_bits(),
f16::from_f32(vec[1]).to_bits(), f16::from_f32(vec[1]).to_bits(),
f16::from_f32(vec[2]).to_bits(), f16::from_f32(vec[2]).to_bits(),
f16::from_f32(vec[3]).to_bits()]) f16::from_f32(vec[3]).to_bits(),
])
} }
pub(crate) fn read_half2(cursor: &mut Cursor<ByteSpan>) -> Option<[f32; 2]> { pub(crate) fn read_half2(cursor: &mut Cursor<ByteSpan>) -> Option<[f32; 2]> {
Some([ Some([
f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32(), f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32(),
f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32() f16::from_bits(cursor.read_le::<u16>().ok()?).to_f32(),
]) ])
} }
@ -73,7 +83,8 @@ impl MDL {
pub(crate) fn write_half2<T: BinWriterExt>(cursor: &mut T, vec: &[f32; 2]) -> BinResult<()> { pub(crate) fn write_half2<T: BinWriterExt>(cursor: &mut T, vec: &[f32; 2]) -> BinResult<()> {
cursor.write_le::<[u16; 2]>(&[ cursor.write_le::<[u16; 2]>(&[
f16::from_f32(vec[0]).to_bits(), f16::from_f32(vec[0]).to_bits(),
f16::from_f32(vec[1]).to_bits()]) f16::from_f32(vec[1]).to_bits(),
])
} }
pub(crate) fn read_byte4(cursor: &mut Cursor<ByteSpan>) -> BinResult<[u8; 4]> { pub(crate) fn read_byte4(cursor: &mut Cursor<ByteSpan>) -> BinResult<[u8; 4]> {
@ -113,15 +124,17 @@ impl MDL {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::io::Cursor;
use crate::model::MDL; use crate::model::MDL;
use std::io::Cursor;
macro_rules! assert_delta { macro_rules! assert_delta {
($x:expr, $y:expr, $d:expr) => { ($x:expr, $y:expr, $d:expr) => {
for i in 0..4 { for i in 0..4 {
if !($x[i] - $y[i] < $d || $y[i] - $x[i] < $d) { panic!(); } if !($x[i] - $y[i] < $d || $y[i] - $x[i] < $d) {
panic!();
}
} }
} };
} }
#[test] #[test]

View file

@ -1,9 +1,9 @@
// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com> // SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
// SPDX-License-Identifier: GPL-3.0-or-later // SPDX-License-Identifier: GPL-3.0-or-later
use std::io::SeekFrom;
use binrw::{BinRead, BinResult, binrw, BinWrite};
use crate::model::NUM_VERTICES; use crate::model::NUM_VERTICES;
use binrw::{binrw, BinRead, BinResult, BinWrite};
use std::io::SeekFrom;
/// Marker for end of stream (0xFF) /// Marker for end of stream (0xFF)
const END_OF_STREAM: u8 = 0xFF; const END_OF_STREAM: u8 = 0xFF;
@ -46,7 +46,7 @@ pub enum VertexType {
/// 2 16-bit unsigned integers /// 2 16-bit unsigned integers
UnsignedShort2 = 16, UnsignedShort2 = 16,
/// 4 16-bit unsigned integers /// 4 16-bit unsigned integers
UnsignedShort4 = 17 UnsignedShort4 = 17,
} }
/// What the vertex stream is used for. /// What the vertex stream is used for.
@ -78,7 +78,7 @@ pub struct VertexElement {
pub usage_index: u8, pub usage_index: u8,
} }
/// Represents the true size of VertexElement. Always use this value instead of std::mem::size_of. /// Represents the true size of VertexElement. Always use this value instead of std::mem::size_of.
// 3 extra bytes to account for the padding that doesn't appear in the struct itself // 3 extra bytes to account for the padding that doesn't appear in the struct itself
pub const VERTEX_ELEMENT_SIZE: usize = std::mem::size_of::<VertexElement>() + 3; pub const VERTEX_ELEMENT_SIZE: usize = std::mem::size_of::<VertexElement>() + 3;
@ -90,10 +90,7 @@ pub struct VertexDeclaration {
#[binrw::parser(reader, endian)] #[binrw::parser(reader, endian)]
pub(crate) fn vertex_element_parser(count: u16) -> BinResult<Vec<VertexDeclaration>> { pub(crate) fn vertex_element_parser(count: u16) -> BinResult<Vec<VertexDeclaration>> {
let mut vertex_declarations: Vec<VertexDeclaration> = let mut vertex_declarations: Vec<VertexDeclaration> =
vec![ vec![VertexDeclaration { elements: vec![] }; count.into()];
VertexDeclaration { elements: vec![] };
count.into()
];
for declaration in &mut vertex_declarations { for declaration in &mut vertex_declarations {
let mut element = VertexElement::read_options(reader, endian, ())?; let mut element = VertexElement::read_options(reader, endian, ())?;
@ -115,9 +112,7 @@ pub(crate) fn vertex_element_parser(count: u16) -> BinResult<Vec<VertexDeclarati
} }
#[binrw::writer(writer, endian)] #[binrw::writer(writer, endian)]
pub(crate) fn vertex_element_writer( pub(crate) fn vertex_element_writer(declarations: &Vec<VertexDeclaration>) -> BinResult<()> {
declarations: &Vec<VertexDeclaration>,
) -> BinResult<()> {
// write vertex declarations // write vertex declarations
for declaration in declarations { for declaration in declarations {
for element in &declaration.elements { for element in &declaration.elements {
@ -129,8 +124,9 @@ pub(crate) fn vertex_element_writer(
offset: 0, offset: 0,
vertex_type: VertexType::Single1, vertex_type: VertexType::Single1,
vertex_usage: VertexUsage::Position, vertex_usage: VertexUsage::Position,
usage_index: 0 usage_index: 0,
}.write_options(writer, endian, ())?; }
.write_options(writer, endian, ())?;
let to_seek = (NUM_VERTICES as usize - 1 - declaration.elements.len()) * 8; let to_seek = (NUM_VERTICES as usize - 1 - declaration.elements.len()) * 8;
writer.seek(SeekFrom::Current(to_seek as i64))?; writer.seek(SeekFrom::Current(to_seek as i64))?;
@ -138,4 +134,3 @@ pub(crate) fn vertex_element_writer(
Ok(()) Ok(())
} }

View file

@ -5,8 +5,8 @@
use std::io::Cursor; use std::io::Cursor;
use binrw::{BinRead, binrw};
use crate::ByteSpan; use crate::ByteSpan;
use binrw::{binrw, BinRead};
#[binrw] #[binrw]
#[derive(Debug)] #[derive(Debug)]
@ -79,8 +79,7 @@ struct Constant {
// from https://github.com/NotAdam/Lumina/blob/master/src/Lumina/Data/Parsing/MtrlStructs.cs // from https://github.com/NotAdam/Lumina/blob/master/src/Lumina/Data/Parsing/MtrlStructs.cs
#[binrw] #[binrw]
#[derive(Debug)] #[derive(Debug)]
enum TextureUsage enum TextureUsage {
{
#[brw(magic = 0x88408C04u32)] #[brw(magic = 0x88408C04u32)]
Sampler, Sampler,
#[brw(magic = 0x213CB439u32)] #[brw(magic = 0x213CB439u32)]
@ -123,7 +122,7 @@ enum TextureUsage
SamplerWhitecapMap, SamplerWhitecapMap,
#[brw(magic = 0x565f8fd8u32)] #[brw(magic = 0x565f8fd8u32)]
UnknownDawntrail1 UnknownDawntrail1,
} }
#[binrw] #[binrw]
@ -183,14 +182,14 @@ struct MaterialData {
pub struct Material { pub struct Material {
pub shader_package_name: String, pub shader_package_name: String,
pub texture_paths: Vec<String>, pub texture_paths: Vec<String>,
pub shader_keys: Vec<ShaderKey> pub shader_keys: Vec<ShaderKey>,
} }
impl Material { impl Material {
pub fn from_existing(buffer: ByteSpan) -> Option<Material> { pub fn from_existing(buffer: ByteSpan) -> Option<Material> {
let mut cursor = Cursor::new(buffer); let mut cursor = Cursor::new(buffer);
let mat_data = MaterialData::read(&mut cursor).ok()?; let mat_data = MaterialData::read(&mut cursor).ok()?;
let mut texture_paths = vec![]; let mut texture_paths = vec![];
let mut offset = 0; let mut offset = 0;
@ -224,7 +223,7 @@ impl Material {
Some(Material { Some(Material {
shader_package_name, shader_package_name,
texture_paths, texture_paths,
shader_keys: mat_data.shader_keys shader_keys: mat_data.shader_keys,
}) })
} }
} }

View file

@ -12,8 +12,8 @@ use binrw::BinRead;
use tracing::{debug, warn}; use tracing::{debug, warn};
use crate::common::{get_platform_string, Platform, Region}; use crate::common::{get_platform_string, Platform, Region};
use crate::sqpack::read_data_block_patch;
use crate::common_file_operations::read_bool_from; use crate::common_file_operations::read_bool_from;
use crate::sqpack::read_data_block_patch;
#[binread] #[binread]
#[derive(Debug)] #[derive(Debug)]
@ -148,7 +148,7 @@ enum SqpkOperation {
#[br(magic = b'T')] #[br(magic = b'T')]
TargetInfo(SqpkTargetInfo), TargetInfo(SqpkTargetInfo),
#[br(magic = b'I')] #[br(magic = b'I')]
Index(SqpkIndex) Index(SqpkIndex),
} }
#[derive(BinRead, PartialEq, Debug)] #[derive(BinRead, PartialEq, Debug)]
@ -301,7 +301,7 @@ struct SqpkIndex {
block_offset: u32, block_offset: u32,
#[br(pad_after = 8)] // data? #[br(pad_after = 8)] // data?
block_number: u32 block_number: u32,
} }
#[derive(BinRead, PartialEq, Debug)] #[derive(BinRead, PartialEq, Debug)]
@ -463,8 +463,11 @@ pub fn apply_patch(data_dir: &str, patch_path: &str) -> Result<(), PatchError> {
let (left, _) = filename.rsplit_once('/').unwrap(); let (left, _) = filename.rsplit_once('/').unwrap();
fs::create_dir_all(left)?; fs::create_dir_all(left)?;
let mut new_file = let mut new_file = OpenOptions::new()
OpenOptions::new().write(true).create(true).truncate(false).open(filename)?; .write(true)
.create(true)
.truncate(false)
.open(filename)?;
new_file.seek(SeekFrom::Start(add.block_offset as u64))?; new_file.seek(SeekFrom::Start(add.block_offset as u64))?;
@ -480,8 +483,11 @@ pub fn apply_patch(data_dir: &str, patch_path: &str) -> Result<(), PatchError> {
delete.file_id, delete.file_id,
); );
let new_file = let new_file = OpenOptions::new()
OpenOptions::new().write(true).create(true).truncate(false).open(filename)?; .write(true)
.create(true)
.truncate(false)
.open(filename)?;
write_empty_file_block_at( write_empty_file_block_at(
&new_file, &new_file,
@ -500,8 +506,11 @@ pub fn apply_patch(data_dir: &str, patch_path: &str) -> Result<(), PatchError> {
let (left, _) = filename.rsplit_once('/').unwrap(); let (left, _) = filename.rsplit_once('/').unwrap();
fs::create_dir_all(left)?; fs::create_dir_all(left)?;
let new_file = let new_file = OpenOptions::new()
OpenOptions::new().write(true).create(true).truncate(false).open(filename)?; .write(true)
.create(true)
.truncate(false)
.open(filename)?;
write_empty_file_block_at( write_empty_file_block_at(
&new_file, &new_file,

View file

@ -3,9 +3,9 @@
use std::io::{Cursor, Seek, SeekFrom}; use std::io::{Cursor, Seek, SeekFrom};
use binrw::{BinRead, BinReaderExt};
use binrw::binrw;
use crate::ByteSpan; use crate::ByteSpan;
use binrw::binrw;
use binrw::{BinRead, BinReaderExt};
#[binrw] #[binrw]
#[derive(Debug)] #[derive(Debug)]
@ -14,7 +14,7 @@ struct PreBoneDeformerItem {
body_id: u16, body_id: u16,
link_index: u16, link_index: u16,
#[br(pad_after = 4)] #[br(pad_after = 4)]
data_offset: u32 data_offset: u32,
} }
#[binrw] #[binrw]
@ -39,7 +39,7 @@ struct PreBoneDeformerHeader {
links: Vec<PreBoneDeformerLink>, links: Vec<PreBoneDeformerLink>,
#[br(ignore)] #[br(ignore)]
raw_data: Vec<u8> raw_data: Vec<u8>,
} }
pub struct PreBoneDeformer { pub struct PreBoneDeformer {
@ -51,13 +51,13 @@ pub struct PreBoneDeformBone {
/// Name of the affected bone /// Name of the affected bone
pub name: String, pub name: String,
/// The deform matrix /// The deform matrix
pub deform: [f32; 12] pub deform: [f32; 12],
} }
#[derive(Debug)] #[derive(Debug)]
pub struct PreBoneDeformMatrices { pub struct PreBoneDeformMatrices {
/// The prebone deform bones /// The prebone deform bones
pub bones: Vec<PreBoneDeformBone> pub bones: Vec<PreBoneDeformBone>,
} }
impl PreBoneDeformer { impl PreBoneDeformer {
@ -68,18 +68,24 @@ impl PreBoneDeformer {
header.raw_data = buffer.to_vec(); header.raw_data = buffer.to_vec();
Some(PreBoneDeformer { Some(PreBoneDeformer { header })
header
})
} }
/// Calculates the deform matrices between two races /// Calculates the deform matrices between two races
pub fn get_deform_matrices(&self, from_body_id: u16, to_body_id: u16) -> Option<PreBoneDeformMatrices> { pub fn get_deform_matrices(
&self,
from_body_id: u16,
to_body_id: u16,
) -> Option<PreBoneDeformMatrices> {
if from_body_id == to_body_id { if from_body_id == to_body_id {
return None; return None;
} }
let mut item = self.header.items.iter().find(|x| x.body_id == from_body_id)?; let mut item = self
.header
.items
.iter()
.find(|x| x.body_id == from_body_id)?;
let mut next = &self.header.links[item.link_index as usize]; let mut next = &self.header.links[item.link_index as usize];
if next.next_index == -1 { if next.next_index == -1 {
@ -96,7 +102,9 @@ impl PreBoneDeformer {
let string_offsets_base = item.data_offset as usize + core::mem::size_of::<u32>(); let string_offsets_base = item.data_offset as usize + core::mem::size_of::<u32>();
cursor.seek(SeekFrom::Start(string_offsets_base as u64)).ok()?; cursor
.seek(SeekFrom::Start(string_offsets_base as u64))
.ok()?;
let mut strings_offset = vec![]; let mut strings_offset = vec![];
for _ in 0..bone_name_count { for _ in 0..bone_name_count {
strings_offset.push(cursor.read_le::<u16>().unwrap()); strings_offset.push(cursor.read_le::<u16>().unwrap());
@ -125,7 +133,7 @@ impl PreBoneDeformer {
let matrix = matrices[i]; let matrix = matrices[i];
bones.push(PreBoneDeformBone { bones.push(PreBoneDeformBone {
name: string, name: string,
deform: matrix deform: matrix,
}); });
} }
@ -137,9 +145,7 @@ impl PreBoneDeformer {
} }
} }
Some(PreBoneDeformMatrices { Some(PreBoneDeformMatrices { bones })
bones
})
} }
} }
@ -160,4 +166,3 @@ mod tests {
PreBoneDeformer::from_existing(&read(d).unwrap()); PreBoneDeformer::from_existing(&read(d).unwrap());
} }
} }

View file

@ -51,65 +51,47 @@ pub enum Race {
pub fn get_race_id(race: Race, subrace: Subrace, gender: Gender) -> Option<i32> { pub fn get_race_id(race: Race, subrace: Subrace, gender: Gender) -> Option<i32> {
// TODO: should we check for invalid subraces like the Hyur branch does? // TODO: should we check for invalid subraces like the Hyur branch does?
match race { match race {
Race::Hyur => { Race::Hyur => match subrace {
match subrace { Subrace::Midlander => match gender {
Subrace::Midlander => { Gender::Male => Some(101),
match gender { Gender::Female => Some(201),
Gender::Male => Some(101), },
Gender::Female => Some(201) Subrace::Highlander => match gender {
} Gender::Male => Some(301),
} Gender::Female => Some(401),
Subrace::Highlander => { },
match gender { _ => None,
Gender::Male => Some(301), },
Gender::Female => Some(401) Race::Elezen => match gender {
} Gender::Male => Some(501),
} Gender::Female => Some(601),
_ => None },
} Race::Lalafell => match gender {
} Gender::Male => Some(501),
Race::Elezen => { Gender::Female => Some(601),
match gender { },
Gender::Male => Some(501), Race::Miqote => match gender {
Gender::Female => Some(601) Gender::Male => Some(701),
} Gender::Female => Some(801),
} },
Race::Lalafell => { Race::Roegadyn => match gender {
match gender { Gender::Male => Some(901),
Gender::Male => Some(501), Gender::Female => Some(1001),
Gender::Female => Some(601) },
} Race::AuRa => match gender {
} Gender::Male => Some(1301),
Race::Miqote => { Gender::Female => Some(1401),
match gender { },
Gender::Male => Some(701),
Gender::Female => Some(801)
}
}
Race::Roegadyn => {
match gender {
Gender::Male => Some(901),
Gender::Female => Some(1001)
}
}
Race::AuRa => {
match gender {
Gender::Male => Some(1301),
Gender::Female => Some(1401)
}
}
Race::Hrothgar => { Race::Hrothgar => {
match gender { match gender {
Gender::Male => Some(1501), Gender::Male => Some(1501),
Gender::Female => Some(1601) // TODO: is this accurate as of dawntrail? Gender::Female => Some(1601), // TODO: is this accurate as of dawntrail?
}
}
Race::Viera => {
match gender {
Gender::Male => Some(1701),
Gender::Female => Some(1801)
} }
} }
Race::Viera => match gender {
Gender::Male => Some(1701),
Gender::Female => Some(1801),
},
} }
} }
@ -131,7 +113,7 @@ pub fn get_supported_subraces(race: Race) -> [Subrace; 2] {
Race::Roegadyn => [Subrace::SeaWolf, Subrace::Hellsguard], Race::Roegadyn => [Subrace::SeaWolf, Subrace::Hellsguard],
Race::AuRa => [Subrace::Raen, Subrace::Xaela], Race::AuRa => [Subrace::Raen, Subrace::Xaela],
Race::Hrothgar => [Subrace::Hellion, Subrace::Lost], Race::Hrothgar => [Subrace::Hellion, Subrace::Lost],
Race::Viera => [Subrace::Raen, Subrace::Veena] Race::Viera => [Subrace::Raen, Subrace::Veena],
} }
} }

View file

@ -5,7 +5,7 @@ use std::cmp::Ordering;
use std::cmp::Ordering::{Greater, Less}; use std::cmp::Ordering::{Greater, Less};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use crate::common::{get_platform_string, Platform, read_version}; use crate::common::{get_platform_string, read_version, Platform};
use crate::repository::RepositoryType::{Base, Expansion}; use crate::repository::RepositoryType::{Base, Expansion};
/// The type of repository, discerning game data from expansion data. /// The type of repository, discerning game data from expansion data.
@ -179,13 +179,10 @@ impl Repository {
get_platform_string(&self.platform) get_platform_string(&self.platform)
) )
} }
/// Calculate an index2 filename for a specific category, like _"0a0000.win32.index"_. /// Calculate an index2 filename for a specific category, like _"0a0000.win32.index"_.
pub fn index2_filename(&self, category: Category) -> String { pub fn index2_filename(&self, category: Category) -> String {
format!( format!("{}2", self.index_filename(category))
"{}2",
self.index_filename(category)
)
} }
/// Calculate a dat filename given a category and a data file id, returns something like _"0a0000.win32.dat0"_. /// Calculate a dat filename given a category and a data file id, returns something like _"0a0000.win32.dat0"_.
@ -210,8 +207,8 @@ impl Repository {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::path::PathBuf;
use crate::common::Platform; use crate::common::Platform;
use std::path::PathBuf;
use super::*; use super::*;
@ -248,7 +245,10 @@ mod tests {
assert_eq!(repo.index_filename(Category::Music), "0c0000.win32.index"); assert_eq!(repo.index_filename(Category::Music), "0c0000.win32.index");
assert_eq!(repo.index2_filename(Category::Music), "0c0000.win32.index2"); assert_eq!(repo.index2_filename(Category::Music), "0c0000.win32.index2");
assert_eq!(repo.dat_filename(Category::GameScript, 1), "0b0000.win32.dat1"); assert_eq!(
repo.dat_filename(Category::GameScript, 1),
"0b0000.win32.dat1"
);
} }
// TODO: We need to check if these console filenames are actually correct // TODO: We need to check if these console filenames are actually correct
@ -263,7 +263,10 @@ mod tests {
assert_eq!(repo.index_filename(Category::Music), "0c0000.ps3.index"); assert_eq!(repo.index_filename(Category::Music), "0c0000.ps3.index");
assert_eq!(repo.index2_filename(Category::Music), "0c0000.ps3.index2"); assert_eq!(repo.index2_filename(Category::Music), "0c0000.ps3.index2");
assert_eq!(repo.dat_filename(Category::GameScript, 1), "0b0000.ps3.dat1"); assert_eq!(
repo.dat_filename(Category::GameScript, 1),
"0b0000.ps3.dat1"
);
} }
#[test] #[test]
@ -277,6 +280,9 @@ mod tests {
assert_eq!(repo.index_filename(Category::Music), "0c0000.ps4.index"); assert_eq!(repo.index_filename(Category::Music), "0c0000.ps4.index");
assert_eq!(repo.index2_filename(Category::Music), "0c0000.ps4.index2"); assert_eq!(repo.index2_filename(Category::Music), "0c0000.ps4.index2");
assert_eq!(repo.dat_filename(Category::GameScript, 1), "0b0000.ps4.dat1"); assert_eq!(
repo.dat_filename(Category::GameScript, 1),
"0b0000.ps4.dat1"
);
} }
} }

View file

@ -3,8 +3,8 @@
use std::io::{Cursor, SeekFrom}; use std::io::{Cursor, SeekFrom};
use binrw::{BinRead, binread};
use crate::ByteSpan; use crate::ByteSpan;
use binrw::{binread, BinRead};
#[binread] #[binread]
#[br(little, import { #[br(little, import {
@ -24,7 +24,7 @@ pub struct ResourceParameter {
#[br(seek_before = SeekFrom::Start(strings_offset as u64 + local_string_offset as u64))] #[br(seek_before = SeekFrom::Start(strings_offset as u64 + local_string_offset as u64))]
#[br(count = string_length, map = | x: Vec<u8> | String::from_utf8(x).unwrap().trim_matches(char::from(0)).to_string())] #[br(count = string_length, map = | x: Vec<u8> | String::from_utf8(x).unwrap().trim_matches(char::from(0)).to_string())]
#[br(restore_position)] #[br(restore_position)]
pub name: String pub name: String,
} }
#[binread] #[binread]
@ -55,7 +55,7 @@ pub struct Shader {
#[br(seek_before = SeekFrom::Start(shader_data_offset as u64 + data_offset as u64))] #[br(seek_before = SeekFrom::Start(shader_data_offset as u64 + data_offset as u64))]
#[br(count = data_size)] #[br(count = data_size)]
#[br(restore_position)] #[br(restore_position)]
pub bytecode: Vec<u8> pub bytecode: Vec<u8>,
} }
#[binread] #[binread]
@ -64,7 +64,7 @@ pub struct Shader {
pub struct MaterialParameter { pub struct MaterialParameter {
id: u32, id: u32,
byte_offset: u16, byte_offset: u16,
byte_size: u16 byte_size: u16,
} }
#[binread] #[binread]
@ -72,7 +72,7 @@ pub struct MaterialParameter {
#[allow(unused)] #[allow(unused)]
pub struct Key { pub struct Key {
id: u32, id: u32,
default_value: u32 default_value: u32,
} }
#[binread] #[binread]
@ -82,7 +82,7 @@ pub struct Key {
pub struct Pass { pub struct Pass {
id: u32, id: u32,
vertex_shader: u32, vertex_shader: u32,
pixel_shader: u32 pixel_shader: u32,
} }
#[binread] #[binread]
@ -90,7 +90,7 @@ pub struct Pass {
#[allow(unused)] #[allow(unused)]
pub struct NodeAlias { pub struct NodeAlias {
selector: u32, selector: u32,
node: u32 node: u32,
} }
#[binread] #[binread]
@ -115,7 +115,7 @@ pub struct Node {
#[br(count = subview_key_count)] #[br(count = subview_key_count)]
pub subview_keys: Vec<u32>, pub subview_keys: Vec<u32>,
#[br(count = pass_count, err_context("system_key_count = {}", material_key_count))] #[br(count = pass_count, err_context("system_key_count = {}", material_key_count))]
pub passes: Vec<Pass> pub passes: Vec<Pass>,
} }
#[binread] #[binread]
@ -183,7 +183,7 @@ pub struct ShaderPackage {
node_selectors: Vec<(u32, u32)>, node_selectors: Vec<(u32, u32)>,
#[br(count = node_alias_count)] #[br(count = node_alias_count)]
node_aliases: Vec<NodeAlias> node_aliases: Vec<NodeAlias>,
} }
impl ShaderPackage { impl ShaderPackage {
@ -215,11 +215,26 @@ impl ShaderPackage {
None None
} }
pub fn build_selector_from_all_keys(system_keys: &[u32], scene_keys: &[u32], material_keys: &[u32], subview_keys: &[u32]) -> u32 { pub fn build_selector_from_all_keys(
Self::build_selector_from_keys(Self::build_selector(system_keys), Self::build_selector(scene_keys), Self::build_selector(material_keys), Self::build_selector(subview_keys)) system_keys: &[u32],
scene_keys: &[u32],
material_keys: &[u32],
subview_keys: &[u32],
) -> u32 {
Self::build_selector_from_keys(
Self::build_selector(system_keys),
Self::build_selector(scene_keys),
Self::build_selector(material_keys),
Self::build_selector(subview_keys),
)
} }
pub fn build_selector_from_keys(system_key: u32, scene_key: u32, material_key: u32, subview_key: u32) -> u32 { pub fn build_selector_from_keys(
system_key: u32,
scene_key: u32,
material_key: u32,
subview_key: u32,
) -> u32 {
Self::build_selector(&[system_key, scene_key, material_key, subview_key]) Self::build_selector(&[system_key, scene_key, material_key, subview_key])
} }

View file

@ -5,9 +5,9 @@
#![allow(clippy::needless_late_init)] #![allow(clippy::needless_late_init)]
#![allow(clippy::upper_case_acronyms)] #![allow(clippy::upper_case_acronyms)]
use std::io::{Cursor, SeekFrom};
use binrw::{binread, BinRead};
use binrw::helpers::until_eof; use binrw::helpers::until_eof;
use binrw::{binread, BinRead};
use std::io::{Cursor, SeekFrom};
use crate::havok::{HavokAnimationContainer, HavokBinaryTagFileReader}; use crate::havok::{HavokAnimationContainer, HavokBinaryTagFileReader};
use crate::ByteSpan; use crate::ByteSpan;
@ -32,7 +32,7 @@ struct SklbV2 {
body_id: u32, body_id: u32,
mapper_body_id1: u32, mapper_body_id1: u32,
mapper_body_id2: u32, mapper_body_id2: u32,
mapper_body_id3: u32 mapper_body_id3: u32,
} }
#[binread] #[binread]
@ -49,7 +49,7 @@ struct SKLB {
#[br(seek_before(SeekFrom::Start(if (version == 0x3132_3030u32) { sklb_v1.as_ref().unwrap().havok_offset as u64 } else { sklb_v2.as_ref().unwrap().havok_offset as u64 })))] #[br(seek_before(SeekFrom::Start(if (version == 0x3132_3030u32) { sklb_v1.as_ref().unwrap().havok_offset as u64 } else { sklb_v2.as_ref().unwrap().havok_offset as u64 })))]
#[br(parse_with = until_eof)] #[br(parse_with = until_eof)]
raw_data: Vec<u8> raw_data: Vec<u8>,
} }
#[derive(Debug)] #[derive(Debug)]
@ -92,9 +92,17 @@ impl Skeleton {
skeleton.bones.push(Bone { skeleton.bones.push(Bone {
name: bone.clone(), name: bone.clone(),
parent_index: havok_skeleton.parent_indices[index] as i32, parent_index: havok_skeleton.parent_indices[index] as i32,
position: [havok_skeleton.reference_pose[index].translation[0], havok_skeleton.reference_pose[index].translation[1], havok_skeleton.reference_pose[index].translation[2]], position: [
havok_skeleton.reference_pose[index].translation[0],
havok_skeleton.reference_pose[index].translation[1],
havok_skeleton.reference_pose[index].translation[2],
],
rotation: havok_skeleton.reference_pose[index].rotation, rotation: havok_skeleton.reference_pose[index].rotation,
scale: [havok_skeleton.reference_pose[index].scale[0], havok_skeleton.reference_pose[index].scale[1], havok_skeleton.reference_pose[index].scale[2]], scale: [
havok_skeleton.reference_pose[index].scale[0],
havok_skeleton.reference_pose[index].scale[1],
havok_skeleton.reference_pose[index].scale[2],
],
}); });
} }

View file

@ -3,16 +3,16 @@
use std::io::Cursor; use std::io::Cursor;
use binrw::BinRead;
use binrw::binrw;
use crate::ByteSpan; use crate::ByteSpan;
use binrw::binrw;
use binrw::BinRead;
#[binrw] #[binrw]
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
#[brw(little)] #[brw(little)]
struct PlatePosition { struct PlatePosition {
x: i16, x: i16,
y: i16 y: i16,
} }
#[binrw] #[binrw]
@ -30,18 +30,18 @@ struct TerrainHeader {
padding: Vec<u8>, padding: Vec<u8>,
#[br(count = plate_count)] #[br(count = plate_count)]
positions: Vec<PlatePosition> positions: Vec<PlatePosition>,
} }
#[derive(Debug)] #[derive(Debug)]
pub struct PlateModel { pub struct PlateModel {
pub position: (f32, f32), pub position: (f32, f32),
pub filename: String pub filename: String,
} }
#[derive(Debug)] #[derive(Debug)]
pub struct Terrain { pub struct Terrain {
pub plates: Vec<PlateModel> pub plates: Vec<PlateModel>,
} }
impl Terrain { impl Terrain {
@ -54,15 +54,15 @@ impl Terrain {
for i in 0..header.plate_count { for i in 0..header.plate_count {
plates.push(PlateModel { plates.push(PlateModel {
position: (header.plate_size as f32 * (header.positions[i as usize].x as f32 + 0.5), position: (
header.plate_size as f32 * (header.positions[i as usize].y as f32 + 0.5)), header.plate_size as f32 * (header.positions[i as usize].x as f32 + 0.5),
filename: format!("{:04}.mdl", i) header.plate_size as f32 * (header.positions[i as usize].y as f32 + 0.5),
),
filename: format!("{:04}.mdl", i),
}) })
} }
Some(Terrain { Some(Terrain { plates })
plates
})
} }
} }

View file

@ -5,11 +5,11 @@
use std::io::{Cursor, Read, Seek, SeekFrom}; use std::io::{Cursor, Read, Seek, SeekFrom};
use binrw::BinRead; use crate::ByteSpan;
use binrw::binrw; use binrw::binrw;
use binrw::BinRead;
use bitflags::bitflags; use bitflags::bitflags;
use texture2ddecoder::{decode_bc1, decode_bc3, decode_bc5}; use texture2ddecoder::{decode_bc1, decode_bc3, decode_bc5};
use crate::ByteSpan;
// Attributes and Format are adapted from Lumina (https://github.com/NotAdam/Lumina/blob/master/src/Lumina/Data/Files/TexFile.cs) // Attributes and Format are adapted from Lumina (https://github.com/NotAdam/Lumina/blob/master/src/Lumina/Data/Files/TexFile.cs)
bitflags! { bitflags! {
@ -94,7 +94,7 @@ impl Texture {
let mut src = vec![0u8; buffer.len() - std::mem::size_of::<TexHeader>()]; let mut src = vec![0u8; buffer.len() - std::mem::size_of::<TexHeader>()];
cursor.read_exact(src.as_mut_slice()).ok()?; cursor.read_exact(src.as_mut_slice()).ok()?;
let mut dst : Vec<u8>; let mut dst: Vec<u8>;
match header.format { match header.format {
TextureFormat::B4G4R4A4 => { TextureFormat::B4G4R4A4 => {
@ -107,7 +107,7 @@ impl Texture {
let short: u16 = ((src[offset] as u16) << 8) | src[offset + 1] as u16; let short: u16 = ((src[offset] as u16) << 8) | src[offset + 1] as u16;
let src_b = short & 0xF; let src_b = short & 0xF;
let src_g= (short >> 4) & 0xF; let src_g = (short >> 4) & 0xF;
let src_r = (short >> 8) & 0xF; let src_r = (short >> 8) & 0xF;
let src_a = (short >> 12) & 0xF; let src_a = (short >> 12) & 0xF;
@ -124,13 +124,28 @@ impl Texture {
dst = src; // TODO: not correct, of course dst = src; // TODO: not correct, of course
} }
TextureFormat::BC1 => { TextureFormat::BC1 => {
dst = Texture::decode(&src, header.width as usize, header.height as usize, decode_bc1); dst = Texture::decode(
&src,
header.width as usize,
header.height as usize,
decode_bc1,
);
} }
TextureFormat::BC3 => { TextureFormat::BC3 => {
dst = Texture::decode(&src, header.width as usize, header.height as usize, decode_bc3); dst = Texture::decode(
&src,
header.width as usize,
header.height as usize,
decode_bc3,
);
} }
TextureFormat::BC5 => { TextureFormat::BC5 => {
dst = Texture::decode(&src, header.width as usize, header.height as usize, decode_bc5); dst = Texture::decode(
&src,
header.width as usize,
header.height as usize,
decode_bc5,
);
} }
} }
@ -143,13 +158,7 @@ impl Texture {
fn decode(src: &[u8], width: usize, height: usize, decode_func: DecodeFunction) -> Vec<u8> { fn decode(src: &[u8], width: usize, height: usize, decode_func: DecodeFunction) -> Vec<u8> {
let mut image: Vec<u32> = vec![0; width * height]; let mut image: Vec<u32> = vec![0; width * height];
decode_func( decode_func(src, width, height, &mut image).unwrap();
src,
width,
height,
&mut image,
)
.unwrap();
image image
.iter() .iter()

View file

@ -1,17 +1,17 @@
// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com> // SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
// SPDX-License-Identifier: GPL-3.0-or-later // SPDX-License-Identifier: GPL-3.0-or-later
use hmac_sha512::Hash;
use physis::patch::apply_patch;
use std::env; use std::env;
use std::fs::{read, read_dir}; use std::fs::{read, read_dir};
use std::process::Command; use std::process::Command;
use hmac_sha512::Hash;
use physis::patch::apply_patch;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use physis::common::Platform; use physis::common::Platform;
use physis::fiin::FileInfo; use physis::fiin::FileInfo;
use physis::index; use physis::index;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
#[test] #[test]
#[cfg_attr(not(feature = "retail_game_testing"), ignore)] #[cfg_attr(not(feature = "retail_game_testing"), ignore)]
@ -28,9 +28,12 @@ fn test_index_read() {
fn test_gamedata_extract() { fn test_gamedata_extract() {
let game_dir = env::var("FFXIV_GAME_DIR").unwrap(); let game_dir = env::var("FFXIV_GAME_DIR").unwrap();
let mut gamedata = let mut gamedata = physis::gamedata::GameData::from_existing(
physis::gamedata::GameData::from_existing(Platform::Win32, format!("{}/game", game_dir).as_str()).unwrap(); Platform::Win32,
format!("{}/game", game_dir).as_str(),
)
.unwrap();
assert!(gamedata.extract("exd/root.exl").is_some()); assert!(gamedata.extract("exd/root.exl").is_some());
} }
@ -70,34 +73,43 @@ fn make_temp_install_dir(name: &str) -> String {
// Shamelessly taken from https://stackoverflow.com/a/76820878 // Shamelessly taken from https://stackoverflow.com/a/76820878
fn recurse(path: impl AsRef<Path>) -> Vec<PathBuf> { fn recurse(path: impl AsRef<Path>) -> Vec<PathBuf> {
let Ok(entries) = read_dir(path) else { return vec![] }; let Ok(entries) = read_dir(path) else {
entries.flatten().flat_map(|entry| { return vec![];
let Ok(meta) = entry.metadata() else { return vec![] }; };
if meta.is_dir() { return recurse(entry.path()); } entries
if meta.is_file() { return vec![entry.path()]; } .flatten()
vec![] .flat_map(|entry| {
}).collect() let Ok(meta) = entry.metadata() else {
return vec![];
};
if meta.is_dir() {
return recurse(entry.path());
}
if meta.is_file() {
return vec![entry.path()];
}
vec![]
})
.collect()
} }
#[cfg(feature = "patch_testing")] #[cfg(feature = "patch_testing")]
fn fill_dir_hash(game_dir: &str) -> HashMap<String, [u8; 64]> { fn fill_dir_hash(game_dir: &str) -> HashMap<String, [u8; 64]> {
let mut file_hashes: HashMap<String, [u8; 64]> = HashMap::new(); let mut file_hashes: HashMap<String, [u8; 64]> = HashMap::new();
recurse(game_dir) recurse(game_dir).into_iter().for_each(|x| {
.into_iter() let path = x.as_path();
.for_each(|x| { let file = std::fs::read(path).unwrap();
let path = x.as_path();
let file = std::fs::read(path).unwrap();
let mut hash = Hash::new(); let mut hash = Hash::new();
hash.update(&file); hash.update(&file);
let sha = hash.finalize(); let sha = hash.finalize();
let mut rel_path = path; let mut rel_path = path;
rel_path = rel_path.strip_prefix(game_dir).unwrap(); rel_path = rel_path.strip_prefix(game_dir).unwrap();
file_hashes.insert(rel_path.to_str().unwrap().to_string(), sha); file_hashes.insert(rel_path.to_str().unwrap().to_string(), sha);
}); });
file_hashes file_hashes
} }