mirror of
https://github.com/redstrate/Physis.git
synced 2025-04-20 03:37:47 +00:00
Add DatHeader so we can read gear sets without libxivdat
This is limited to just that one dat type for now, but we can expand it as we add more parsers.
This commit is contained in:
parent
ed7618a5fa
commit
b61c3383b3
8 changed files with 131 additions and 71 deletions
Binary file not shown.
26
src/dat.rs
Normal file
26
src/dat.rs
Normal file
|
@ -0,0 +1,26 @@
|
|||
// SPDX-FileCopyrightText: 2025 Joshua Goins <josh@redstrate.com>
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use binrw::binrw;
|
||||
|
||||
#[binrw]
|
||||
#[derive(Debug)]
|
||||
#[brw(little)]
|
||||
pub enum DatFileType {
|
||||
/// GEARSET.DAT
|
||||
#[brw(magic = 0x006d0005u32)]
|
||||
Gearset,
|
||||
}
|
||||
|
||||
#[binrw]
|
||||
#[derive(Debug)]
|
||||
#[brw(little)]
|
||||
pub struct DatHeader {
|
||||
pub file_type: DatFileType,
|
||||
pub max_size: u32,
|
||||
#[brw(pad_after = 4)] // empty bytes
|
||||
pub content_size: u32,
|
||||
#[br(temp)]
|
||||
#[bw(calc = 0xFF)]
|
||||
end_of_header: u8,
|
||||
}
|
|
@ -8,7 +8,6 @@ use std::path::PathBuf;
|
|||
|
||||
use tracing::{debug, warn};
|
||||
|
||||
use crate::sqpack::{IndexEntry, SqPackData, SqPackIndex};
|
||||
use crate::ByteBuffer;
|
||||
use crate::common::{Language, Platform, read_version};
|
||||
use crate::exd::EXD;
|
||||
|
@ -16,6 +15,7 @@ use crate::exh::EXH;
|
|||
use crate::exl::EXL;
|
||||
use crate::patch::{PatchError, ZiPatch};
|
||||
use crate::repository::{Category, Repository, string_to_category};
|
||||
use crate::sqpack::{IndexEntry, SqPackData, SqPackIndex};
|
||||
|
||||
/// Framework for operating on game data.
|
||||
pub struct GameData {
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use crate::ByteBuffer;
|
||||
use crate::dat::DatHeader;
|
||||
use crate::equipment::Slot;
|
||||
use binrw::NullString;
|
||||
use binrw::binrw;
|
||||
|
@ -9,6 +10,7 @@ use binrw::{BinRead, BinWrite};
|
|||
use std::collections::HashMap;
|
||||
use std::io::BufWriter;
|
||||
use std::io::Cursor;
|
||||
use std::io::Read;
|
||||
|
||||
// FIXME: unclear what this is
|
||||
const UNKNOWN_FLAG: u32 = 1_000_000;
|
||||
|
@ -223,8 +225,16 @@ const GEARSET_KEY: u8 = 0x73;
|
|||
impl GearSets {
|
||||
/// Parses existing gearsets data.
|
||||
pub fn from_existing(buffer: &[u8]) -> Option<GearSets> {
|
||||
let mut cursor = Cursor::new(buffer);
|
||||
|
||||
let header = DatHeader::read(&mut cursor).ok()?;
|
||||
|
||||
let mut buffer = vec![0; header.content_size as usize - 1];
|
||||
cursor.read_exact(&mut buffer).ok()?;
|
||||
|
||||
let decoded = buffer.iter().map(|x| *x ^ GEARSET_KEY).collect::<Vec<_>>();
|
||||
let mut cursor = Cursor::new(decoded);
|
||||
|
||||
GearSets::read(&mut cursor).ok()
|
||||
}
|
||||
|
||||
|
@ -232,15 +242,33 @@ impl GearSets {
|
|||
pub fn write_to_buffer(&self) -> Option<ByteBuffer> {
|
||||
let mut buffer = ByteBuffer::new();
|
||||
|
||||
// header
|
||||
{
|
||||
let cursor = Cursor::new(&mut buffer);
|
||||
let mut writer = BufWriter::new(cursor);
|
||||
let mut cursor = Cursor::new(&mut buffer);
|
||||
|
||||
self.write_le(&mut writer).ok()?;
|
||||
let header = DatHeader {
|
||||
file_type: crate::dat::DatFileType::Gearset,
|
||||
max_size: 45205,
|
||||
content_size: 45205,
|
||||
};
|
||||
header.write_le(&mut cursor).ok()?
|
||||
}
|
||||
|
||||
let encoded = buffer.iter().map(|x| *x ^ GEARSET_KEY).collect::<Vec<_>>();
|
||||
Some(encoded)
|
||||
// buffer contents encoded
|
||||
{
|
||||
let mut cursor = Cursor::new(ByteBuffer::new());
|
||||
self.write_le(&mut cursor).ok()?;
|
||||
|
||||
buffer.extend_from_slice(
|
||||
&cursor
|
||||
.into_inner()
|
||||
.iter()
|
||||
.map(|x| *x ^ GEARSET_KEY)
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
}
|
||||
|
||||
Some(buffer)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -152,3 +152,6 @@ pub mod existing_dirs;
|
|||
pub mod patchlist;
|
||||
|
||||
mod bcn;
|
||||
|
||||
/// Reading the binary .dat files in the user folder (e.g. GEARSET.dat)
|
||||
pub mod dat;
|
||||
|
|
|
@ -46,17 +46,17 @@ struct TextureLodBlock {
|
|||
}
|
||||
|
||||
pub trait AnyNumberType<'a>:
|
||||
BinRead<Args<'a> = ()> + BinWrite<Args<'a> = ()> + std::ops::AddAssign + Copy + Default + 'static
|
||||
BinRead<Args<'a> = ()> + BinWrite<Args<'a> = ()> + std::ops::AddAssign + Copy + Default + 'static
|
||||
{
|
||||
}
|
||||
|
||||
impl<'a, T> AnyNumberType<'a> for T where
|
||||
T: BinRead<Args<'a> = ()>
|
||||
+ BinWrite<Args<'a> = ()>
|
||||
+ std::ops::AddAssign
|
||||
+ Copy
|
||||
+ Default
|
||||
+ 'static
|
||||
T: BinRead<Args<'a> = ()>
|
||||
+ BinWrite<Args<'a> = ()>
|
||||
+ std::ops::AddAssign
|
||||
+ Copy
|
||||
+ Default
|
||||
+ 'static
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -205,8 +205,8 @@ impl SqPackData {
|
|||
/// If the block of data is successfully parsed, it returns the file data - otherwise is None.
|
||||
pub fn read_from_offset(&mut self, offset: u64) -> Option<ByteBuffer> {
|
||||
self.file
|
||||
.seek(SeekFrom::Start(offset))
|
||||
.expect("Unable to find offset in file.");
|
||||
.seek(SeekFrom::Start(offset))
|
||||
.expect("Unable to find offset in file.");
|
||||
|
||||
let file_info = FileInfo::read(&mut self.file).ok()?;
|
||||
|
||||
|
@ -272,10 +272,10 @@ impl SqPackData {
|
|||
buffer.seek(SeekFrom::Start(0x44)).ok()?;
|
||||
|
||||
self.file
|
||||
.seek(SeekFrom::Start(
|
||||
base_offset + (model_file_info.offset.stack_size as u64),
|
||||
))
|
||||
.ok()?;
|
||||
.seek(SeekFrom::Start(
|
||||
base_offset + (model_file_info.offset.stack_size as u64),
|
||||
))
|
||||
.ok()?;
|
||||
|
||||
// read from stack blocks
|
||||
let mut read_model_blocks = |offset: u64, size: usize| -> Option<u64> {
|
||||
|
@ -285,15 +285,15 @@ impl SqPackData {
|
|||
let last_pos = &self.file.stream_position().ok()?;
|
||||
|
||||
let data =
|
||||
read_data_block(&self.file, *last_pos).expect("Unable to read block data.");
|
||||
read_data_block(&self.file, *last_pos).expect("Unable to read block data.");
|
||||
// write to buffer
|
||||
buffer.write_all(data.as_slice()).ok()?;
|
||||
|
||||
self.file
|
||||
.seek(SeekFrom::Start(
|
||||
last_pos + (compressed_block_sizes[current_block] as u64),
|
||||
))
|
||||
.ok()?;
|
||||
.seek(SeekFrom::Start(
|
||||
last_pos + (compressed_block_sizes[current_block] as u64),
|
||||
))
|
||||
.ok()?;
|
||||
current_block += 1;
|
||||
}
|
||||
|
||||
|
@ -310,43 +310,43 @@ impl SqPackData {
|
|||
)? as u32;
|
||||
|
||||
let mut process_model_data =
|
||||
|i: usize,
|
||||
size: u32,
|
||||
offset: u32,
|
||||
offsets: &mut [u32; 3],
|
||||
data_sizes: &mut [u32; 3]| {
|
||||
if size != 0 {
|
||||
let current_vertex_offset = buffer.position() as u32;
|
||||
if i == 0 || current_vertex_offset != offsets[i - 1] {
|
||||
offsets[i] = current_vertex_offset;
|
||||
} else {
|
||||
offsets[i] = 0;
|
||||
}
|
||||
|i: usize,
|
||||
size: u32,
|
||||
offset: u32,
|
||||
offsets: &mut [u32; 3],
|
||||
data_sizes: &mut [u32; 3]| {
|
||||
if size != 0 {
|
||||
let current_vertex_offset = buffer.position() as u32;
|
||||
if i == 0 || current_vertex_offset != offsets[i - 1] {
|
||||
offsets[i] = current_vertex_offset;
|
||||
} else {
|
||||
offsets[i] = 0;
|
||||
}
|
||||
|
||||
self.file
|
||||
.seek(SeekFrom::Start(base_offset + (offset as u64)))
|
||||
.ok();
|
||||
|
||||
for _ in 0..size {
|
||||
let last_pos = self.file.stream_position().unwrap();
|
||||
|
||||
let data = read_data_block(&self.file, last_pos)
|
||||
.expect("Unable to read raw model block!");
|
||||
|
||||
buffer
|
||||
.write_all(data.as_slice())
|
||||
.expect("Unable to write to memory buffer!");
|
||||
|
||||
data_sizes[i] += data.len() as u32;
|
||||
self.file
|
||||
.seek(SeekFrom::Start(
|
||||
last_pos + (compressed_block_sizes[current_block] as u64),
|
||||
))
|
||||
.expect("Unable to seek properly.");
|
||||
current_block += 1;
|
||||
.seek(SeekFrom::Start(base_offset + (offset as u64)))
|
||||
.ok();
|
||||
|
||||
for _ in 0..size {
|
||||
let last_pos = self.file.stream_position().unwrap();
|
||||
|
||||
let data = read_data_block(&self.file, last_pos)
|
||||
.expect("Unable to read raw model block!");
|
||||
|
||||
buffer
|
||||
.write_all(data.as_slice())
|
||||
.expect("Unable to write to memory buffer!");
|
||||
|
||||
data_sizes[i] += data.len() as u32;
|
||||
self.file
|
||||
.seek(SeekFrom::Start(
|
||||
last_pos + (compressed_block_sizes[current_block] as u64),
|
||||
))
|
||||
.expect("Unable to seek properly.");
|
||||
current_block += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
// process all 3 lods
|
||||
for i in 0..3 {
|
||||
|
@ -405,8 +405,8 @@ impl SqPackData {
|
|||
let original_pos = self.file.stream_position().ok()?;
|
||||
|
||||
self.file
|
||||
.seek(SeekFrom::Start(offset + file_info.size as u64))
|
||||
.ok()?;
|
||||
.seek(SeekFrom::Start(offset + file_info.size as u64))
|
||||
.ok()?;
|
||||
|
||||
let mut header = vec![0u8; texture_file_info.lods[0].compressed_offset as usize];
|
||||
self.file.read_exact(&mut header).ok()?;
|
||||
|
@ -418,9 +418,9 @@ impl SqPackData {
|
|||
|
||||
for i in 0..texture_file_info.num_blocks {
|
||||
let mut running_block_total = (texture_file_info.lods[i as usize].compressed_offset
|
||||
as u64)
|
||||
+ offset
|
||||
+ (file_info.size as u64);
|
||||
as u64)
|
||||
+ offset
|
||||
+ (file_info.size as u64);
|
||||
|
||||
for _ in 0..texture_file_info.lods[i as usize].block_count {
|
||||
let original_pos = self.file.stream_position().ok()?;
|
||||
|
|
|
@ -87,8 +87,8 @@ impl BinRead for FileEntryData {
|
|||
let data = <u32>::read_options(reader, endian, ())?;
|
||||
Ok(Self {
|
||||
is_synonym: (data & 0b1) == 0b1,
|
||||
data_file_id: ((data & 0b1110) >> 1) as u8,
|
||||
offset: (data & !0xF) as u64 * 0x08,
|
||||
data_file_id: ((data & 0b1110) >> 1) as u8,
|
||||
offset: (data & !0xF) as u64 * 0x08,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -265,7 +265,7 @@ mod tests {
|
|||
let mut cursor = Cursor::new(&data);
|
||||
|
||||
let file_entry =
|
||||
FileEntry::read_options(&mut cursor, Endian::Little, (&IndexType::Index1,)).unwrap();
|
||||
FileEntry::read_options(&mut cursor, Endian::Little, (&IndexType::Index1,)).unwrap();
|
||||
|
||||
let expected_hash = Hash::SplitPath {
|
||||
name: 475005679,
|
||||
|
@ -281,8 +281,8 @@ mod tests {
|
|||
{
|
||||
let mut write_cursor = Cursor::new(&mut new_data);
|
||||
file_entry
|
||||
.write_options(&mut write_cursor, Endian::Little, (&IndexType::Index1,))
|
||||
.unwrap();
|
||||
.write_options(&mut write_cursor, Endian::Little, (&IndexType::Index1,))
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
assert_eq!(new_data, data);
|
||||
|
|
|
@ -16,7 +16,7 @@ mod db;
|
|||
pub use db::SqPackDatabase;
|
||||
|
||||
mod index;
|
||||
pub use index::{SqPackIndex, IndexEntry};
|
||||
pub use index::{IndexEntry, SqPackIndex};
|
||||
|
||||
/// The type of this SqPack file.
|
||||
#[binrw]
|
||||
|
@ -57,7 +57,10 @@ pub(crate) struct SqPackHeader {
|
|||
sha1_hash: [u8; 20],
|
||||
}
|
||||
|
||||
pub(crate) fn read_data_block<T: Read + Seek>(mut buf: T, starting_position: u64) -> Option<Vec<u8>> {
|
||||
pub(crate) fn read_data_block<T: Read + Seek>(
|
||||
mut buf: T,
|
||||
starting_position: u64,
|
||||
) -> Option<Vec<u8>> {
|
||||
buf.seek(SeekFrom::Start(starting_position)).ok()?;
|
||||
|
||||
let block_header = BlockHeader::read(&mut buf).unwrap();
|
||||
|
@ -96,7 +99,7 @@ pub(crate) fn read_data_block_patch<T: Read + Seek>(mut buf: T) -> Option<Vec<u8
|
|||
decompressed_length,
|
||||
} => {
|
||||
let compressed_length: usize =
|
||||
((compressed_length as usize + 143) & 0xFFFFFF80) - (block_header.size as usize);
|
||||
((compressed_length as usize + 143) & 0xFFFFFF80) - (block_header.size as usize);
|
||||
|
||||
let mut compressed_data: Vec<u8> = vec![0; compressed_length];
|
||||
buf.read_exact(&mut compressed_data).ok()?;
|
||||
|
|
Loading…
Add table
Reference in a new issue