1
Fork 0
mirror of https://github.com/redstrate/Physis.git synced 2025-04-20 03:37:47 +00:00

Add DatHeader so we can read gear sets without libxivdat

This is limited to just that one dat type for now, but we can expand it as we
add more parsers.
This commit is contained in:
Joshua Goins 2025-03-11 17:45:00 -04:00
parent ed7618a5fa
commit b61c3383b3
8 changed files with 131 additions and 71 deletions

Binary file not shown.

26
src/dat.rs Normal file
View file

@ -0,0 +1,26 @@
// SPDX-FileCopyrightText: 2025 Joshua Goins <josh@redstrate.com>
// SPDX-License-Identifier: GPL-3.0-or-later
use binrw::binrw;
#[binrw]
#[derive(Debug)]
#[brw(little)]
pub enum DatFileType {
/// GEARSET.DAT
#[brw(magic = 0x006d0005u32)]
Gearset,
}
#[binrw]
#[derive(Debug)]
#[brw(little)]
pub struct DatHeader {
pub file_type: DatFileType,
pub max_size: u32,
#[brw(pad_after = 4)] // empty bytes
pub content_size: u32,
#[br(temp)]
#[bw(calc = 0xFF)]
end_of_header: u8,
}

View file

@ -8,7 +8,6 @@ use std::path::PathBuf;
use tracing::{debug, warn}; use tracing::{debug, warn};
use crate::sqpack::{IndexEntry, SqPackData, SqPackIndex};
use crate::ByteBuffer; use crate::ByteBuffer;
use crate::common::{Language, Platform, read_version}; use crate::common::{Language, Platform, read_version};
use crate::exd::EXD; use crate::exd::EXD;
@ -16,6 +15,7 @@ use crate::exh::EXH;
use crate::exl::EXL; use crate::exl::EXL;
use crate::patch::{PatchError, ZiPatch}; use crate::patch::{PatchError, ZiPatch};
use crate::repository::{Category, Repository, string_to_category}; use crate::repository::{Category, Repository, string_to_category};
use crate::sqpack::{IndexEntry, SqPackData, SqPackIndex};
/// Framework for operating on game data. /// Framework for operating on game data.
pub struct GameData { pub struct GameData {

View file

@ -2,6 +2,7 @@
// SPDX-License-Identifier: GPL-3.0-or-later // SPDX-License-Identifier: GPL-3.0-or-later
use crate::ByteBuffer; use crate::ByteBuffer;
use crate::dat::DatHeader;
use crate::equipment::Slot; use crate::equipment::Slot;
use binrw::NullString; use binrw::NullString;
use binrw::binrw; use binrw::binrw;
@ -9,6 +10,7 @@ use binrw::{BinRead, BinWrite};
use std::collections::HashMap; use std::collections::HashMap;
use std::io::BufWriter; use std::io::BufWriter;
use std::io::Cursor; use std::io::Cursor;
use std::io::Read;
// FIXME: unclear what this is // FIXME: unclear what this is
const UNKNOWN_FLAG: u32 = 1_000_000; const UNKNOWN_FLAG: u32 = 1_000_000;
@ -223,8 +225,16 @@ const GEARSET_KEY: u8 = 0x73;
impl GearSets { impl GearSets {
/// Parses existing gearsets data. /// Parses existing gearsets data.
pub fn from_existing(buffer: &[u8]) -> Option<GearSets> { pub fn from_existing(buffer: &[u8]) -> Option<GearSets> {
let mut cursor = Cursor::new(buffer);
let header = DatHeader::read(&mut cursor).ok()?;
let mut buffer = vec![0; header.content_size as usize - 1];
cursor.read_exact(&mut buffer).ok()?;
let decoded = buffer.iter().map(|x| *x ^ GEARSET_KEY).collect::<Vec<_>>(); let decoded = buffer.iter().map(|x| *x ^ GEARSET_KEY).collect::<Vec<_>>();
let mut cursor = Cursor::new(decoded); let mut cursor = Cursor::new(decoded);
GearSets::read(&mut cursor).ok() GearSets::read(&mut cursor).ok()
} }
@ -232,15 +242,33 @@ impl GearSets {
pub fn write_to_buffer(&self) -> Option<ByteBuffer> { pub fn write_to_buffer(&self) -> Option<ByteBuffer> {
let mut buffer = ByteBuffer::new(); let mut buffer = ByteBuffer::new();
// header
{ {
let cursor = Cursor::new(&mut buffer); let mut cursor = Cursor::new(&mut buffer);
let mut writer = BufWriter::new(cursor);
self.write_le(&mut writer).ok()?; let header = DatHeader {
file_type: crate::dat::DatFileType::Gearset,
max_size: 45205,
content_size: 45205,
};
header.write_le(&mut cursor).ok()?
} }
let encoded = buffer.iter().map(|x| *x ^ GEARSET_KEY).collect::<Vec<_>>(); // buffer contents encoded
Some(encoded) {
let mut cursor = Cursor::new(ByteBuffer::new());
self.write_le(&mut cursor).ok()?;
buffer.extend_from_slice(
&cursor
.into_inner()
.iter()
.map(|x| *x ^ GEARSET_KEY)
.collect::<Vec<_>>(),
);
}
Some(buffer)
} }
} }

View file

@ -152,3 +152,6 @@ pub mod existing_dirs;
pub mod patchlist; pub mod patchlist;
mod bcn; mod bcn;
/// Reading the binary .dat files in the user folder (e.g. GEARSET.dat)
pub mod dat;

View file

@ -46,17 +46,17 @@ struct TextureLodBlock {
} }
pub trait AnyNumberType<'a>: pub trait AnyNumberType<'a>:
BinRead<Args<'a> = ()> + BinWrite<Args<'a> = ()> + std::ops::AddAssign + Copy + Default + 'static BinRead<Args<'a> = ()> + BinWrite<Args<'a> = ()> + std::ops::AddAssign + Copy + Default + 'static
{ {
} }
impl<'a, T> AnyNumberType<'a> for T where impl<'a, T> AnyNumberType<'a> for T where
T: BinRead<Args<'a> = ()> T: BinRead<Args<'a> = ()>
+ BinWrite<Args<'a> = ()> + BinWrite<Args<'a> = ()>
+ std::ops::AddAssign + std::ops::AddAssign
+ Copy + Copy
+ Default + Default
+ 'static + 'static
{ {
} }
@ -205,8 +205,8 @@ impl SqPackData {
/// If the block of data is successfully parsed, it returns the file data - otherwise is None. /// If the block of data is successfully parsed, it returns the file data - otherwise is None.
pub fn read_from_offset(&mut self, offset: u64) -> Option<ByteBuffer> { pub fn read_from_offset(&mut self, offset: u64) -> Option<ByteBuffer> {
self.file self.file
.seek(SeekFrom::Start(offset)) .seek(SeekFrom::Start(offset))
.expect("Unable to find offset in file."); .expect("Unable to find offset in file.");
let file_info = FileInfo::read(&mut self.file).ok()?; let file_info = FileInfo::read(&mut self.file).ok()?;
@ -272,10 +272,10 @@ impl SqPackData {
buffer.seek(SeekFrom::Start(0x44)).ok()?; buffer.seek(SeekFrom::Start(0x44)).ok()?;
self.file self.file
.seek(SeekFrom::Start( .seek(SeekFrom::Start(
base_offset + (model_file_info.offset.stack_size as u64), base_offset + (model_file_info.offset.stack_size as u64),
)) ))
.ok()?; .ok()?;
// read from stack blocks // read from stack blocks
let mut read_model_blocks = |offset: u64, size: usize| -> Option<u64> { let mut read_model_blocks = |offset: u64, size: usize| -> Option<u64> {
@ -285,15 +285,15 @@ impl SqPackData {
let last_pos = &self.file.stream_position().ok()?; let last_pos = &self.file.stream_position().ok()?;
let data = let data =
read_data_block(&self.file, *last_pos).expect("Unable to read block data."); read_data_block(&self.file, *last_pos).expect("Unable to read block data.");
// write to buffer // write to buffer
buffer.write_all(data.as_slice()).ok()?; buffer.write_all(data.as_slice()).ok()?;
self.file self.file
.seek(SeekFrom::Start( .seek(SeekFrom::Start(
last_pos + (compressed_block_sizes[current_block] as u64), last_pos + (compressed_block_sizes[current_block] as u64),
)) ))
.ok()?; .ok()?;
current_block += 1; current_block += 1;
} }
@ -310,43 +310,43 @@ impl SqPackData {
)? as u32; )? as u32;
let mut process_model_data = let mut process_model_data =
|i: usize, |i: usize,
size: u32, size: u32,
offset: u32, offset: u32,
offsets: &mut [u32; 3], offsets: &mut [u32; 3],
data_sizes: &mut [u32; 3]| { data_sizes: &mut [u32; 3]| {
if size != 0 { if size != 0 {
let current_vertex_offset = buffer.position() as u32; let current_vertex_offset = buffer.position() as u32;
if i == 0 || current_vertex_offset != offsets[i - 1] { if i == 0 || current_vertex_offset != offsets[i - 1] {
offsets[i] = current_vertex_offset; offsets[i] = current_vertex_offset;
} else { } else {
offsets[i] = 0; offsets[i] = 0;
} }
self.file
.seek(SeekFrom::Start(base_offset + (offset as u64)))
.ok();
for _ in 0..size {
let last_pos = self.file.stream_position().unwrap();
let data = read_data_block(&self.file, last_pos)
.expect("Unable to read raw model block!");
buffer
.write_all(data.as_slice())
.expect("Unable to write to memory buffer!");
data_sizes[i] += data.len() as u32;
self.file self.file
.seek(SeekFrom::Start( .seek(SeekFrom::Start(base_offset + (offset as u64)))
last_pos + (compressed_block_sizes[current_block] as u64), .ok();
))
.expect("Unable to seek properly."); for _ in 0..size {
current_block += 1; let last_pos = self.file.stream_position().unwrap();
let data = read_data_block(&self.file, last_pos)
.expect("Unable to read raw model block!");
buffer
.write_all(data.as_slice())
.expect("Unable to write to memory buffer!");
data_sizes[i] += data.len() as u32;
self.file
.seek(SeekFrom::Start(
last_pos + (compressed_block_sizes[current_block] as u64),
))
.expect("Unable to seek properly.");
current_block += 1;
}
} }
} };
};
// process all 3 lods // process all 3 lods
for i in 0..3 { for i in 0..3 {
@ -405,8 +405,8 @@ impl SqPackData {
let original_pos = self.file.stream_position().ok()?; let original_pos = self.file.stream_position().ok()?;
self.file self.file
.seek(SeekFrom::Start(offset + file_info.size as u64)) .seek(SeekFrom::Start(offset + file_info.size as u64))
.ok()?; .ok()?;
let mut header = vec![0u8; texture_file_info.lods[0].compressed_offset as usize]; let mut header = vec![0u8; texture_file_info.lods[0].compressed_offset as usize];
self.file.read_exact(&mut header).ok()?; self.file.read_exact(&mut header).ok()?;
@ -418,9 +418,9 @@ impl SqPackData {
for i in 0..texture_file_info.num_blocks { for i in 0..texture_file_info.num_blocks {
let mut running_block_total = (texture_file_info.lods[i as usize].compressed_offset let mut running_block_total = (texture_file_info.lods[i as usize].compressed_offset
as u64) as u64)
+ offset + offset
+ (file_info.size as u64); + (file_info.size as u64);
for _ in 0..texture_file_info.lods[i as usize].block_count { for _ in 0..texture_file_info.lods[i as usize].block_count {
let original_pos = self.file.stream_position().ok()?; let original_pos = self.file.stream_position().ok()?;

View file

@ -87,8 +87,8 @@ impl BinRead for FileEntryData {
let data = <u32>::read_options(reader, endian, ())?; let data = <u32>::read_options(reader, endian, ())?;
Ok(Self { Ok(Self {
is_synonym: (data & 0b1) == 0b1, is_synonym: (data & 0b1) == 0b1,
data_file_id: ((data & 0b1110) >> 1) as u8, data_file_id: ((data & 0b1110) >> 1) as u8,
offset: (data & !0xF) as u64 * 0x08, offset: (data & !0xF) as u64 * 0x08,
}) })
} }
} }
@ -265,7 +265,7 @@ mod tests {
let mut cursor = Cursor::new(&data); let mut cursor = Cursor::new(&data);
let file_entry = let file_entry =
FileEntry::read_options(&mut cursor, Endian::Little, (&IndexType::Index1,)).unwrap(); FileEntry::read_options(&mut cursor, Endian::Little, (&IndexType::Index1,)).unwrap();
let expected_hash = Hash::SplitPath { let expected_hash = Hash::SplitPath {
name: 475005679, name: 475005679,
@ -281,8 +281,8 @@ mod tests {
{ {
let mut write_cursor = Cursor::new(&mut new_data); let mut write_cursor = Cursor::new(&mut new_data);
file_entry file_entry
.write_options(&mut write_cursor, Endian::Little, (&IndexType::Index1,)) .write_options(&mut write_cursor, Endian::Little, (&IndexType::Index1,))
.unwrap(); .unwrap();
} }
assert_eq!(new_data, data); assert_eq!(new_data, data);

View file

@ -16,7 +16,7 @@ mod db;
pub use db::SqPackDatabase; pub use db::SqPackDatabase;
mod index; mod index;
pub use index::{SqPackIndex, IndexEntry}; pub use index::{IndexEntry, SqPackIndex};
/// The type of this SqPack file. /// The type of this SqPack file.
#[binrw] #[binrw]
@ -57,7 +57,10 @@ pub(crate) struct SqPackHeader {
sha1_hash: [u8; 20], sha1_hash: [u8; 20],
} }
pub(crate) fn read_data_block<T: Read + Seek>(mut buf: T, starting_position: u64) -> Option<Vec<u8>> { pub(crate) fn read_data_block<T: Read + Seek>(
mut buf: T,
starting_position: u64,
) -> Option<Vec<u8>> {
buf.seek(SeekFrom::Start(starting_position)).ok()?; buf.seek(SeekFrom::Start(starting_position)).ok()?;
let block_header = BlockHeader::read(&mut buf).unwrap(); let block_header = BlockHeader::read(&mut buf).unwrap();
@ -96,7 +99,7 @@ pub(crate) fn read_data_block_patch<T: Read + Seek>(mut buf: T) -> Option<Vec<u8
decompressed_length, decompressed_length,
} => { } => {
let compressed_length: usize = let compressed_length: usize =
((compressed_length as usize + 143) & 0xFFFFFF80) - (block_header.size as usize); ((compressed_length as usize + 143) & 0xFFFFFF80) - (block_header.size as usize);
let mut compressed_data: Vec<u8> = vec![0; compressed_length]; let mut compressed_data: Vec<u8> = vec![0; compressed_length];
buf.read_exact(&mut compressed_data).ok()?; buf.read_exact(&mut compressed_data).ok()?;