mirror of
https://github.com/redstrate/Physis.git
synced 2025-04-23 05:07:46 +00:00
Support chunked dat/index files
Needed for grabbing later expansion content that's split between chunks.
This commit is contained in:
parent
8ecbd74283
commit
a1a50de62e
4 changed files with 159 additions and 106 deletions
|
@ -201,9 +201,7 @@ impl DatFile {
|
|||
/// by the function.
|
||||
///
|
||||
/// If the block of data is successfully parsed, it returns the file data - otherwise is None.
|
||||
pub fn read_from_offset(&mut self, offset: u32) -> Option<ByteBuffer> {
|
||||
let offset = (offset * 0x80) as u64;
|
||||
|
||||
pub fn read_from_offset(&mut self, offset: u64) -> Option<ByteBuffer> {
|
||||
self.file
|
||||
.seek(SeekFrom::Start(offset))
|
||||
.expect("Unable to find offset in file.");
|
||||
|
|
121
src/gamedata.rs
121
src/gamedata.rs
|
@ -13,7 +13,7 @@ use crate::dat::DatFile;
|
|||
use crate::exd::EXD;
|
||||
use crate::exh::EXH;
|
||||
use crate::exl::EXL;
|
||||
use crate::index::{Index2File, IndexFile, IndexHashBitfield};
|
||||
use crate::index::{Index2File, IndexEntry, IndexFile};
|
||||
use crate::patch::{apply_patch, PatchError};
|
||||
use crate::repository::{string_to_category, Category, Repository};
|
||||
use crate::ByteBuffer;
|
||||
|
@ -127,14 +127,14 @@ impl GameData {
|
|||
self.repositories.sort();
|
||||
}
|
||||
|
||||
fn get_dat_file(&self, path: &str, data_file_id: u32) -> Option<DatFile> {
|
||||
fn get_dat_file(&self, path: &str, chunk: u8, data_file_id: u32) -> Option<DatFile> {
|
||||
let (repository, category) = self.parse_repository_category(path).unwrap();
|
||||
|
||||
let dat_path: PathBuf = [
|
||||
self.game_directory.clone(),
|
||||
"sqpack".to_string(),
|
||||
repository.name.clone(),
|
||||
repository.dat_filename(category, data_file_id),
|
||||
repository.dat_filename(chunk, category, data_file_id),
|
||||
]
|
||||
.iter()
|
||||
.collect();
|
||||
|
@ -161,17 +161,7 @@ impl GameData {
|
|||
return false;
|
||||
};
|
||||
|
||||
self.cache_index_file((&index_path.0, &index_path.1));
|
||||
|
||||
if let Some(index_file) = self.get_index_file(&index_path.0) {
|
||||
return index_file.exists(path);
|
||||
}
|
||||
|
||||
if let Some(index2_file) = self.get_index2_file(&index_path.1) {
|
||||
return index2_file.exists(path);
|
||||
}
|
||||
|
||||
false
|
||||
return self.find_entry(path).is_some();
|
||||
}
|
||||
|
||||
/// Extracts the file located at `path`. This is returned as an in-memory buffer, and will usually
|
||||
|
@ -194,10 +184,10 @@ impl GameData {
|
|||
|
||||
let slice = self.find_entry(path);
|
||||
match slice {
|
||||
Some(entry) => {
|
||||
let mut dat_file = self.get_dat_file(path, entry.data_file_id().into())?;
|
||||
Some((entry, chunk)) => {
|
||||
let mut dat_file = self.get_dat_file(path, chunk, entry.data_file_id.into())?;
|
||||
|
||||
dat_file.read_from_offset(entry.offset())
|
||||
dat_file.read_from_offset(entry.offset as u64)
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
|
@ -206,45 +196,55 @@ impl GameData {
|
|||
/// Parses a path structure and spits out the corresponding category and repository.
|
||||
fn parse_repository_category(&self, path: &str) -> Option<(&Repository, Category)> {
|
||||
let tokens: Vec<&str> = path.split('/').collect(); // TODO: use split_once here
|
||||
let repository_token = tokens[0];
|
||||
|
||||
if tokens.len() < 2 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let repository_token = tokens[1];
|
||||
|
||||
for repository in &self.repositories {
|
||||
if repository.name == repository_token {
|
||||
return Some((repository, string_to_category(tokens[1])?));
|
||||
return Some((repository, string_to_category(tokens[0])?));
|
||||
}
|
||||
}
|
||||
|
||||
Some((&self.repositories[0], string_to_category(tokens[0])?))
|
||||
}
|
||||
|
||||
fn get_index_filenames(&self, path: &str) -> Option<(String, String)> {
|
||||
fn get_index_filenames(&self, path: &str) -> Option<(Vec<(String, u8)>, Vec<(String, u8)>)> {
|
||||
let (repository, category) = self.parse_repository_category(path)?;
|
||||
|
||||
let index_path: PathBuf = [
|
||||
&self.game_directory,
|
||||
"sqpack",
|
||||
&repository.name,
|
||||
&repository.index_filename(category),
|
||||
]
|
||||
.iter()
|
||||
.collect();
|
||||
let mut index1_filenames = vec![];
|
||||
let mut index2_filenames = vec![];
|
||||
|
||||
let index2_path: PathBuf = [
|
||||
&self.game_directory,
|
||||
"sqpack",
|
||||
&repository.name,
|
||||
&repository.index2_filename(category),
|
||||
]
|
||||
.iter()
|
||||
.collect();
|
||||
for chunk in 0..255 {
|
||||
let index_path: PathBuf = [
|
||||
&self.game_directory,
|
||||
"sqpack",
|
||||
&repository.name,
|
||||
&repository.index_filename(chunk, category),
|
||||
]
|
||||
.iter()
|
||||
.collect();
|
||||
|
||||
index1_filenames.push((index_path.into_os_string().into_string().unwrap(), chunk));
|
||||
|
||||
let index2_path: PathBuf = [
|
||||
&self.game_directory,
|
||||
"sqpack",
|
||||
&repository.name,
|
||||
&repository.index2_filename(chunk, category),
|
||||
]
|
||||
.iter()
|
||||
.collect();
|
||||
|
||||
index2_filenames.push((index2_path.into_os_string().into_string().unwrap(), chunk));
|
||||
}
|
||||
|
||||
Some((
|
||||
index_path.into_os_string().into_string().unwrap(),
|
||||
index2_path.into_os_string().into_string().unwrap(),
|
||||
index1_filenames,
|
||||
index2_filenames
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -397,17 +397,19 @@ impl GameData {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn cache_index_file(&mut self, filenames: (&str, &str)) {
|
||||
if !self.index_files.contains_key(filenames.0) {
|
||||
if let Some(index_file) = IndexFile::from_existing(filenames.0) {
|
||||
self.index_files.insert(filenames.0.to_string(), index_file);
|
||||
fn cache_index_file(&mut self, filename: &str) {
|
||||
if !self.index_files.contains_key(filename) {
|
||||
if let Some(index_file) = IndexFile::from_existing(filename) {
|
||||
self.index_files.insert(filename.to_string(), index_file);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !self.index2_files.contains_key(filenames.1) {
|
||||
if let Some(index_file) = Index2File::from_existing(filenames.1) {
|
||||
fn cache_index2_file(&mut self, filename: &str) {
|
||||
if !self.index2_files.contains_key(filename) {
|
||||
if let Some(index_file) = Index2File::from_existing(filename) {
|
||||
self.index2_files
|
||||
.insert(filenames.1.to_string(), index_file);
|
||||
.insert(filename.to_string(), index_file);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -420,25 +422,26 @@ impl GameData {
|
|||
self.index2_files.get(filename)
|
||||
}
|
||||
|
||||
fn find_entry(&mut self, path: &str) -> Option<IndexHashBitfield> {
|
||||
let index_path = self.get_index_filenames(path)?;
|
||||
debug!(
|
||||
"Trying index files {index_path}, {index2_path}",
|
||||
index_path = index_path.0,
|
||||
index2_path = index_path.1
|
||||
);
|
||||
fn find_entry(&mut self, path: &str) -> Option<(IndexEntry, u8)> {
|
||||
let (index_paths, index2_paths) = self.get_index_filenames(path)?;
|
||||
|
||||
self.cache_index_file((&index_path.0, &index_path.1));
|
||||
for (index_path, chunk) in index_paths {
|
||||
self.cache_index_file(&index_path);
|
||||
|
||||
if let Some(index_file) = self.get_index_file(&index_path.0) {
|
||||
if let Some(entry) = index_file.find_entry(path) {
|
||||
return Some(entry.bitfield);
|
||||
if let Some(index_file) = self.get_index_file(&index_path) {
|
||||
if let Some(entry) = index_file.find_entry(path) {
|
||||
return Some((entry, chunk));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(index2_file) = self.get_index2_file(&index_path.1) {
|
||||
if let Some(entry) = index2_file.find_entry(path) {
|
||||
return Some(entry.bitfield);
|
||||
for (index2_path, chunk) in index2_paths {
|
||||
self.cache_index2_file(&index2_path);
|
||||
|
||||
if let Some(index_file) = self.get_index2_file(&index2_path) {
|
||||
if let Some(entry) = index_file.find_entry(path) {
|
||||
return Some((entry, chunk));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
99
src/index.rs
99
src/index.rs
|
@ -12,9 +12,8 @@ use binrw::BinRead;
|
|||
use modular_bitfield::prelude::*;
|
||||
|
||||
#[binrw]
|
||||
#[br(magic = b"SqPack")]
|
||||
#[br(magic = b"SqPack\0\0")]
|
||||
pub struct SqPackHeader {
|
||||
#[br(pad_before = 2)]
|
||||
platform_id: Platform,
|
||||
#[br(pad_before = 3)]
|
||||
size: u32,
|
||||
|
@ -25,26 +24,48 @@ pub struct SqPackHeader {
|
|||
#[binrw]
|
||||
pub struct SqPackIndexHeader {
|
||||
size: u32,
|
||||
file_type: u32,
|
||||
version: u32,
|
||||
index_data_offset: u32,
|
||||
index_data_size: u32,
|
||||
}
|
||||
|
||||
#[bitfield]
|
||||
#[binrw]
|
||||
#[br(map = Self::from_bytes)]
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct IndexHashBitfield {
|
||||
pub size: B1,
|
||||
pub data_file_id: B3,
|
||||
pub offset: B28,
|
||||
index_data_hash: [u8; 64],
|
||||
number_of_data_file: u32,
|
||||
synonym_data_offset: u32,
|
||||
synonym_data_size: u32,
|
||||
synonym_data_hash: [u8; 64],
|
||||
empty_block_data_offset: u32,
|
||||
empty_block_data_size: u32,
|
||||
empty_block_data_hash: [u8; 64],
|
||||
dir_index_data_offset: u32,
|
||||
dir_index_data_size: u32,
|
||||
dir_index_data_hash: [u8; 64],
|
||||
index_type: u32,
|
||||
#[br(pad_before = 656)]
|
||||
self_hash: [u8; 64]
|
||||
}
|
||||
|
||||
#[binrw]
|
||||
pub struct IndexHashTableEntry {
|
||||
pub hash: u64,
|
||||
#[br(pad_after = 4)]
|
||||
pub(crate) bitfield: IndexHashBitfield,
|
||||
|
||||
#[br(temp)]
|
||||
#[bw(ignore)]
|
||||
data: u32,
|
||||
|
||||
#[br(temp)]
|
||||
#[bw(ignore)]
|
||||
padding: u32,
|
||||
|
||||
#[br(calc = (data & 0b1) == 0b1)]
|
||||
#[bw(ignore)]
|
||||
pub is_synonym: bool,
|
||||
|
||||
#[br(calc = ((data & 0b1110) >> 1) as u8)]
|
||||
#[bw(ignore)]
|
||||
pub data_file_id: u8,
|
||||
|
||||
#[br(calc = (data & !0xF) * 0x08)]
|
||||
#[bw(ignore)]
|
||||
pub offset: u32,
|
||||
}
|
||||
|
||||
// The only difference between index and index2 is how the path hash is stored.
|
||||
|
@ -54,7 +75,22 @@ pub struct IndexHashTableEntry {
|
|||
#[derive(Debug)]
|
||||
pub struct Index2HashTableEntry {
|
||||
pub hash: u32,
|
||||
pub(crate) bitfield: IndexHashBitfield,
|
||||
|
||||
#[br(temp)]
|
||||
#[bw(ignore)]
|
||||
data: u32,
|
||||
|
||||
#[br(calc = (data & 0b1) == 0b1)]
|
||||
#[bw(ignore)]
|
||||
pub is_synonym: bool,
|
||||
|
||||
#[br(calc = ((data & 0b1110) >> 1) as u8)]
|
||||
#[bw(ignore)]
|
||||
pub data_file_id: u8,
|
||||
|
||||
#[br(calc = (data & !0xF) * 0x08)]
|
||||
#[bw(ignore)]
|
||||
pub offset: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -73,8 +109,7 @@ pub struct IndexFile {
|
|||
index_header: SqPackIndexHeader,
|
||||
|
||||
#[br(seek_before = SeekFrom::Start(index_header.index_data_offset.into()))]
|
||||
// +4 because of padding
|
||||
#[br(count = index_header.index_data_size / core::mem::size_of::<IndexHashTableEntry>() as u32 + 4)]
|
||||
#[br(count = index_header.index_data_size / 16)]
|
||||
pub entries: Vec<IndexHashTableEntry>,
|
||||
}
|
||||
|
||||
|
@ -87,7 +122,7 @@ pub struct Index2File {
|
|||
index_header: SqPackIndexHeader,
|
||||
|
||||
#[br(seek_before = SeekFrom::Start(index_header.index_data_offset.into()))]
|
||||
#[br(count = index_header.index_data_size / core::mem::size_of::<Index2HashTableEntry>() as u32)]
|
||||
#[br(count = index_header.index_data_size / 8)]
|
||||
pub entries: Vec<Index2HashTableEntry>,
|
||||
}
|
||||
|
||||
|
@ -130,9 +165,18 @@ impl IndexFile {
|
|||
self.entries.iter().any(|s| s.hash == hash)
|
||||
}
|
||||
|
||||
pub fn find_entry(&self, path: &str) -> Option<&IndexHashTableEntry> {
|
||||
pub fn find_entry(&self, path: &str) -> Option<IndexEntry> {
|
||||
let hash = IndexFile::calculate_hash(path);
|
||||
self.entries.iter().find(|s| s.hash == hash)
|
||||
|
||||
if let Some(entry) = self.entries.iter().find(|s| s.hash == hash) {
|
||||
return Some(IndexEntry {
|
||||
hash: entry.hash,
|
||||
data_file_id: entry.data_file_id,
|
||||
offset: entry.offset,
|
||||
});
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -156,9 +200,18 @@ impl Index2File {
|
|||
self.entries.iter().any(|s| s.hash == hash)
|
||||
}
|
||||
|
||||
pub fn find_entry(&self, path: &str) -> Option<&Index2HashTableEntry> {
|
||||
pub fn find_entry(&self, path: &str) -> Option<IndexEntry> {
|
||||
let hash = Index2File::calculate_hash(path);
|
||||
self.entries.iter().find(|s| s.hash == hash)
|
||||
|
||||
if let Some(entry) = self.entries.iter().find(|s| s.hash == hash) {
|
||||
return Some(IndexEntry {
|
||||
hash: entry.hash as u64,
|
||||
data_file_id: entry.data_file_id,
|
||||
offset: entry.offset,
|
||||
});
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -70,35 +70,35 @@ impl PartialOrd for Repository {
|
|||
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
|
||||
pub enum Category {
|
||||
/// Common files such as game fonts, and other data that doesn't really fit anywhere else.
|
||||
Common,
|
||||
Common = 0x00,
|
||||
/// Shared data between game maps.
|
||||
BackgroundCommon,
|
||||
BackgroundCommon = 0x01,
|
||||
/// Game map data such as models, textures, and so on.
|
||||
Background,
|
||||
Background = 0x02,
|
||||
/// Cutscene content such as animations.
|
||||
Cutscene,
|
||||
Cutscene = 0x03,
|
||||
/// Character model files and more.
|
||||
Character,
|
||||
Character = 0x04,
|
||||
/// Compiled shaders used by the retail client.
|
||||
Shader,
|
||||
Shader = 0x05,
|
||||
/// UI layouts and textures.
|
||||
UI,
|
||||
UI = 0x06,
|
||||
/// Sound effects, basically anything not under `Music`.
|
||||
Sound,
|
||||
Sound = 0x07,
|
||||
/// This "VFX" means "visual effects", and contains textures and definitions for stuff like battle effects.
|
||||
VFX,
|
||||
VFX = 0x08,
|
||||
/// A leftover from 1.0, where the UI was primarily driven by LUA scripts.
|
||||
UIScript,
|
||||
UIScript = 0x09,
|
||||
/// Excel data.
|
||||
EXD,
|
||||
EXD = 0x0A,
|
||||
/// Many game events are driven by LUA scripts, such as cutscenes.
|
||||
GameScript,
|
||||
GameScript = 0x0B,
|
||||
/// Music!
|
||||
Music,
|
||||
Music = 0x0C,
|
||||
/// Unknown purpose, most likely to test SqPack functionality.
|
||||
SqPackTest,
|
||||
SqPackTest = 0x12,
|
||||
/// Unknown purpose, most likely debug files.
|
||||
Debug,
|
||||
Debug = 0x13,
|
||||
}
|
||||
|
||||
pub fn string_to_category(string: &str) -> Option<Category> {
|
||||
|
@ -170,25 +170,24 @@ impl Repository {
|
|||
}
|
||||
|
||||
/// Calculate an index filename for a specific category, like _"0a0000.win32.index"_.
|
||||
pub fn index_filename(&self, category: Category) -> String {
|
||||
pub fn index_filename(&self, chunk: u8, category: Category) -> String {
|
||||
format!(
|
||||
"{:02x}{:02}{:02}.{}.index",
|
||||
category as i32,
|
||||
self.expansion(),
|
||||
0,
|
||||
chunk,
|
||||
get_platform_string(&self.platform)
|
||||
)
|
||||
}
|
||||
|
||||
/// Calculate an index2 filename for a specific category, like _"0a0000.win32.index"_.
|
||||
pub fn index2_filename(&self, category: Category) -> String {
|
||||
format!("{}2", self.index_filename(category))
|
||||
pub fn index2_filename(&self, chunk: u8, category: Category) -> String {
|
||||
format!("{}2", self.index_filename(chunk, category))
|
||||
}
|
||||
|
||||
/// Calculate a dat filename given a category and a data file id, returns something like _"0a0000.win32.dat0"_.
|
||||
pub fn dat_filename(&self, category: Category, data_file_id: u32) -> String {
|
||||
pub fn dat_filename(&self, chunk: u8, category: Category, data_file_id: u32) -> String {
|
||||
let expansion = self.expansion();
|
||||
let chunk = 0;
|
||||
let platform = get_platform_string(&self.platform);
|
||||
|
||||
format!(
|
||||
|
|
Loading…
Add table
Reference in a new issue