mirror of
https://github.com/redstrate/Physis.git
synced 2025-04-19 17:36:50 +00:00
Re-arrange SqPack-related formats into their own submodule
This is to make way for another dat module (for the stuff under the user folder.) This kind of re-organization was inevitable anyway, and I gave the structs new SqPack-y names to fit their new home.
This commit is contained in:
parent
591c5f55ed
commit
b54ee74802
7 changed files with 102 additions and 101 deletions
|
@ -8,13 +8,12 @@ use std::path::PathBuf;
|
||||||
|
|
||||||
use tracing::{debug, warn};
|
use tracing::{debug, warn};
|
||||||
|
|
||||||
|
use crate::sqpack::{IndexEntry, SqPackData, SqPackIndex};
|
||||||
use crate::ByteBuffer;
|
use crate::ByteBuffer;
|
||||||
use crate::common::{Language, Platform, read_version};
|
use crate::common::{Language, Platform, read_version};
|
||||||
use crate::dat::DatFile;
|
|
||||||
use crate::exd::EXD;
|
use crate::exd::EXD;
|
||||||
use crate::exh::EXH;
|
use crate::exh::EXH;
|
||||||
use crate::exl::EXL;
|
use crate::exl::EXL;
|
||||||
use crate::index::{IndexEntry, IndexFile};
|
|
||||||
use crate::patch::{PatchError, ZiPatch};
|
use crate::patch::{PatchError, ZiPatch};
|
||||||
use crate::repository::{Category, Repository, string_to_category};
|
use crate::repository::{Category, Repository, string_to_category};
|
||||||
|
|
||||||
|
@ -26,7 +25,7 @@ pub struct GameData {
|
||||||
/// Repositories in the game directory.
|
/// Repositories in the game directory.
|
||||||
pub repositories: Vec<Repository>,
|
pub repositories: Vec<Repository>,
|
||||||
|
|
||||||
index_files: HashMap<String, IndexFile>,
|
index_files: HashMap<String, SqPackIndex>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_valid(path: &str) -> bool {
|
fn is_valid(path: &str) -> bool {
|
||||||
|
@ -125,7 +124,7 @@ impl GameData {
|
||||||
self.repositories.sort();
|
self.repositories.sort();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_dat_file(&self, path: &str, chunk: u8, data_file_id: u32) -> Option<DatFile> {
|
fn get_dat_file(&self, path: &str, chunk: u8, data_file_id: u32) -> Option<SqPackData> {
|
||||||
let (repository, category) = self.parse_repository_category(path).unwrap();
|
let (repository, category) = self.parse_repository_category(path).unwrap();
|
||||||
|
|
||||||
let dat_path: PathBuf = [
|
let dat_path: PathBuf = [
|
||||||
|
@ -137,7 +136,7 @@ impl GameData {
|
||||||
.iter()
|
.iter()
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
DatFile::from_existing(dat_path.to_str()?)
|
SqPackData::from_existing(dat_path.to_str()?)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks if a file located at `path` exists.
|
/// Checks if a file located at `path` exists.
|
||||||
|
@ -395,13 +394,13 @@ impl GameData {
|
||||||
|
|
||||||
fn cache_index_file(&mut self, filename: &str) {
|
fn cache_index_file(&mut self, filename: &str) {
|
||||||
if !self.index_files.contains_key(filename) {
|
if !self.index_files.contains_key(filename) {
|
||||||
if let Some(index_file) = IndexFile::from_existing(filename) {
|
if let Some(index_file) = SqPackIndex::from_existing(filename) {
|
||||||
self.index_files.insert(filename.to_string(), index_file);
|
self.index_files.insert(filename.to_string(), index_file);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_index_file(&self, filename: &str) -> Option<&IndexFile> {
|
fn get_index_file(&self, filename: &str) -> Option<&SqPackIndex> {
|
||||||
self.index_files.get(filename)
|
self.index_files.get(filename)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,14 +20,10 @@ pub mod repository;
|
||||||
/// Handling and updating data in the "boot" directory, which contains the launcher files.
|
/// Handling and updating data in the "boot" directory, which contains the launcher files.
|
||||||
pub mod bootdata;
|
pub mod bootdata;
|
||||||
|
|
||||||
/// Common methods and structures relating to the SqPack data format.
|
/// SqPack file formats - including Db, Data and Index/Index2 files.
|
||||||
pub mod sqpack;
|
pub mod sqpack;
|
||||||
|
|
||||||
/// Reading and writing SqPack index files.
|
|
||||||
pub mod index;
|
|
||||||
|
|
||||||
mod compression;
|
mod compression;
|
||||||
mod dat;
|
|
||||||
|
|
||||||
/// Reading model (MDL) files.
|
/// Reading model (MDL) files.
|
||||||
pub mod model;
|
pub mod model;
|
||||||
|
@ -157,7 +153,4 @@ pub mod existing_dirs;
|
||||||
/// Reading patch lists
|
/// Reading patch lists
|
||||||
pub mod patchlist;
|
pub mod patchlist;
|
||||||
|
|
||||||
/// Reading SQDB files
|
|
||||||
pub mod sqdb;
|
|
||||||
|
|
||||||
mod bcn;
|
mod bcn;
|
||||||
|
|
128
src/dat.rs → src/sqpack/data.rs
Executable file → Normal file
128
src/dat.rs → src/sqpack/data.rs
Executable file → Normal file
|
@ -46,17 +46,17 @@ struct TextureLodBlock {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait AnyNumberType<'a>:
|
pub trait AnyNumberType<'a>:
|
||||||
BinRead<Args<'a> = ()> + BinWrite<Args<'a> = ()> + std::ops::AddAssign + Copy + Default + 'static
|
BinRead<Args<'a> = ()> + BinWrite<Args<'a> = ()> + std::ops::AddAssign + Copy + Default + 'static
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, T> AnyNumberType<'a> for T where
|
impl<'a, T> AnyNumberType<'a> for T where
|
||||||
T: BinRead<Args<'a> = ()>
|
T: BinRead<Args<'a> = ()>
|
||||||
+ BinWrite<Args<'a> = ()>
|
+ BinWrite<Args<'a> = ()>
|
||||||
+ std::ops::AddAssign
|
+ std::ops::AddAssign
|
||||||
+ Copy
|
+ Copy
|
||||||
+ Default
|
+ Default
|
||||||
+ 'static
|
+ 'static
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -181,7 +181,7 @@ pub struct BlockHeader {
|
||||||
pub compression: CompressionMode,
|
pub compression: CompressionMode,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct DatFile {
|
pub struct SqPackData {
|
||||||
file: std::fs::File,
|
file: std::fs::File,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -191,10 +191,10 @@ fn to_u8_slice(slice: &mut [u16]) -> &mut [u8] {
|
||||||
unsafe { std::slice::from_raw_parts_mut(slice.as_mut_ptr().cast::<u8>(), byte_len) }
|
unsafe { std::slice::from_raw_parts_mut(slice.as_mut_ptr().cast::<u8>(), byte_len) }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DatFile {
|
impl SqPackData {
|
||||||
/// Creates a new reference to an existing dat file.
|
/// Creates a new reference to an existing dat file.
|
||||||
pub fn from_existing(path: &str) -> Option<DatFile> {
|
pub fn from_existing(path: &str) -> Option<Self> {
|
||||||
Some(DatFile {
|
Some(Self {
|
||||||
file: std::fs::File::open(path).ok()?,
|
file: std::fs::File::open(path).ok()?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -205,8 +205,8 @@ impl DatFile {
|
||||||
/// If the block of data is successfully parsed, it returns the file data - otherwise is None.
|
/// If the block of data is successfully parsed, it returns the file data - otherwise is None.
|
||||||
pub fn read_from_offset(&mut self, offset: u64) -> Option<ByteBuffer> {
|
pub fn read_from_offset(&mut self, offset: u64) -> Option<ByteBuffer> {
|
||||||
self.file
|
self.file
|
||||||
.seek(SeekFrom::Start(offset))
|
.seek(SeekFrom::Start(offset))
|
||||||
.expect("Unable to find offset in file.");
|
.expect("Unable to find offset in file.");
|
||||||
|
|
||||||
let file_info = FileInfo::read(&mut self.file).ok()?;
|
let file_info = FileInfo::read(&mut self.file).ok()?;
|
||||||
|
|
||||||
|
@ -272,10 +272,10 @@ impl DatFile {
|
||||||
buffer.seek(SeekFrom::Start(0x44)).ok()?;
|
buffer.seek(SeekFrom::Start(0x44)).ok()?;
|
||||||
|
|
||||||
self.file
|
self.file
|
||||||
.seek(SeekFrom::Start(
|
.seek(SeekFrom::Start(
|
||||||
base_offset + (model_file_info.offset.stack_size as u64),
|
base_offset + (model_file_info.offset.stack_size as u64),
|
||||||
))
|
))
|
||||||
.ok()?;
|
.ok()?;
|
||||||
|
|
||||||
// read from stack blocks
|
// read from stack blocks
|
||||||
let mut read_model_blocks = |offset: u64, size: usize| -> Option<u64> {
|
let mut read_model_blocks = |offset: u64, size: usize| -> Option<u64> {
|
||||||
|
@ -285,15 +285,15 @@ impl DatFile {
|
||||||
let last_pos = &self.file.stream_position().ok()?;
|
let last_pos = &self.file.stream_position().ok()?;
|
||||||
|
|
||||||
let data =
|
let data =
|
||||||
read_data_block(&self.file, *last_pos).expect("Unable to read block data.");
|
read_data_block(&self.file, *last_pos).expect("Unable to read block data.");
|
||||||
// write to buffer
|
// write to buffer
|
||||||
buffer.write_all(data.as_slice()).ok()?;
|
buffer.write_all(data.as_slice()).ok()?;
|
||||||
|
|
||||||
self.file
|
self.file
|
||||||
.seek(SeekFrom::Start(
|
.seek(SeekFrom::Start(
|
||||||
last_pos + (compressed_block_sizes[current_block] as u64),
|
last_pos + (compressed_block_sizes[current_block] as u64),
|
||||||
))
|
))
|
||||||
.ok()?;
|
.ok()?;
|
||||||
current_block += 1;
|
current_block += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -310,43 +310,43 @@ impl DatFile {
|
||||||
)? as u32;
|
)? as u32;
|
||||||
|
|
||||||
let mut process_model_data =
|
let mut process_model_data =
|
||||||
|i: usize,
|
|i: usize,
|
||||||
size: u32,
|
size: u32,
|
||||||
offset: u32,
|
offset: u32,
|
||||||
offsets: &mut [u32; 3],
|
offsets: &mut [u32; 3],
|
||||||
data_sizes: &mut [u32; 3]| {
|
data_sizes: &mut [u32; 3]| {
|
||||||
if size != 0 {
|
if size != 0 {
|
||||||
let current_vertex_offset = buffer.position() as u32;
|
let current_vertex_offset = buffer.position() as u32;
|
||||||
if i == 0 || current_vertex_offset != offsets[i - 1] {
|
if i == 0 || current_vertex_offset != offsets[i - 1] {
|
||||||
offsets[i] = current_vertex_offset;
|
offsets[i] = current_vertex_offset;
|
||||||
} else {
|
} else {
|
||||||
offsets[i] = 0;
|
offsets[i] = 0;
|
||||||
}
|
|
||||||
|
|
||||||
self.file
|
|
||||||
.seek(SeekFrom::Start(base_offset + (offset as u64)))
|
|
||||||
.ok();
|
|
||||||
|
|
||||||
for _ in 0..size {
|
|
||||||
let last_pos = self.file.stream_position().unwrap();
|
|
||||||
|
|
||||||
let data = read_data_block(&self.file, last_pos)
|
|
||||||
.expect("Unable to read raw model block!");
|
|
||||||
|
|
||||||
buffer
|
|
||||||
.write_all(data.as_slice())
|
|
||||||
.expect("Unable to write to memory buffer!");
|
|
||||||
|
|
||||||
data_sizes[i] += data.len() as u32;
|
|
||||||
self.file
|
|
||||||
.seek(SeekFrom::Start(
|
|
||||||
last_pos + (compressed_block_sizes[current_block] as u64),
|
|
||||||
))
|
|
||||||
.expect("Unable to seek properly.");
|
|
||||||
current_block += 1;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
self.file
|
||||||
|
.seek(SeekFrom::Start(base_offset + (offset as u64)))
|
||||||
|
.ok();
|
||||||
|
|
||||||
|
for _ in 0..size {
|
||||||
|
let last_pos = self.file.stream_position().unwrap();
|
||||||
|
|
||||||
|
let data = read_data_block(&self.file, last_pos)
|
||||||
|
.expect("Unable to read raw model block!");
|
||||||
|
|
||||||
|
buffer
|
||||||
|
.write_all(data.as_slice())
|
||||||
|
.expect("Unable to write to memory buffer!");
|
||||||
|
|
||||||
|
data_sizes[i] += data.len() as u32;
|
||||||
|
self.file
|
||||||
|
.seek(SeekFrom::Start(
|
||||||
|
last_pos + (compressed_block_sizes[current_block] as u64),
|
||||||
|
))
|
||||||
|
.expect("Unable to seek properly.");
|
||||||
|
current_block += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
// process all 3 lods
|
// process all 3 lods
|
||||||
for i in 0..3 {
|
for i in 0..3 {
|
||||||
|
@ -405,8 +405,8 @@ impl DatFile {
|
||||||
let original_pos = self.file.stream_position().ok()?;
|
let original_pos = self.file.stream_position().ok()?;
|
||||||
|
|
||||||
self.file
|
self.file
|
||||||
.seek(SeekFrom::Start(offset + file_info.size as u64))
|
.seek(SeekFrom::Start(offset + file_info.size as u64))
|
||||||
.ok()?;
|
.ok()?;
|
||||||
|
|
||||||
let mut header = vec![0u8; texture_file_info.lods[0].compressed_offset as usize];
|
let mut header = vec![0u8; texture_file_info.lods[0].compressed_offset as usize];
|
||||||
self.file.read_exact(&mut header).ok()?;
|
self.file.read_exact(&mut header).ok()?;
|
||||||
|
@ -418,9 +418,9 @@ impl DatFile {
|
||||||
|
|
||||||
for i in 0..texture_file_info.num_blocks {
|
for i in 0..texture_file_info.num_blocks {
|
||||||
let mut running_block_total = (texture_file_info.lods[i as usize].compressed_offset
|
let mut running_block_total = (texture_file_info.lods[i as usize].compressed_offset
|
||||||
as u64)
|
as u64)
|
||||||
+ offset
|
+ offset
|
||||||
+ (file_info.size as u64);
|
+ (file_info.size as u64);
|
||||||
|
|
||||||
for _ in 0..texture_file_info.lods[i as usize].block_count {
|
for _ in 0..texture_file_info.lods[i as usize].block_count {
|
||||||
let original_pos = self.file.stream_position().ok()?;
|
let original_pos = self.file.stream_position().ok()?;
|
||||||
|
@ -449,7 +449,7 @@ mod tests {
|
||||||
d.push("resources/tests");
|
d.push("resources/tests");
|
||||||
d.push("random");
|
d.push("random");
|
||||||
|
|
||||||
let mut dat = crate::dat::DatFile::from_existing(d.to_str().unwrap()).unwrap();
|
let mut dat = SqPackData::from_existing(d.to_str().unwrap()).unwrap();
|
||||||
|
|
||||||
let empty_file_info = FileInfo {
|
let empty_file_info = FileInfo {
|
||||||
size: 0,
|
size: 0,
|
|
@ -42,7 +42,7 @@ pub struct SQDBEntry {
|
||||||
#[binrw]
|
#[binrw]
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
#[brw(little)]
|
#[brw(little)]
|
||||||
pub struct SQDB {
|
pub struct SqPackDatabase {
|
||||||
sqpack_header: SqPackHeader,
|
sqpack_header: SqPackHeader,
|
||||||
|
|
||||||
header: SQDBHeader,
|
header: SQDBHeader,
|
||||||
|
@ -51,10 +51,10 @@ pub struct SQDB {
|
||||||
entries: Vec<SQDBEntry>,
|
entries: Vec<SQDBEntry>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SQDB {
|
impl SqPackDatabase {
|
||||||
/// Reads an existing SQDB file
|
/// Reads an existing SQDB file
|
||||||
pub fn from_existing(buffer: ByteSpan) -> Option<Self> {
|
pub fn from_existing(buffer: ByteSpan) -> Option<Self> {
|
||||||
let mut cursor = Cursor::new(buffer);
|
let mut cursor = Cursor::new(buffer);
|
||||||
SQDB::read(&mut cursor).ok()
|
Self::read(&mut cursor).ok()
|
||||||
}
|
}
|
||||||
}
|
}
|
20
src/index.rs → src/sqpack/index.rs
Executable file → Normal file
20
src/index.rs → src/sqpack/index.rs
Executable file → Normal file
|
@ -87,8 +87,8 @@ impl BinRead for FileEntryData {
|
||||||
let data = <u32>::read_options(reader, endian, ())?;
|
let data = <u32>::read_options(reader, endian, ())?;
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
is_synonym: (data & 0b1) == 0b1,
|
is_synonym: (data & 0b1) == 0b1,
|
||||||
data_file_id: ((data & 0b1110) >> 1) as u8,
|
data_file_id: ((data & 0b1110) >> 1) as u8,
|
||||||
offset: (data & !0xF) as u64 * 0x08,
|
offset: (data & !0xF) as u64 * 0x08,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -149,7 +149,7 @@ pub struct IndexEntry {
|
||||||
|
|
||||||
#[binrw]
|
#[binrw]
|
||||||
#[br(little)]
|
#[br(little)]
|
||||||
pub struct IndexFile {
|
pub struct SqPackIndex {
|
||||||
sqpack_header: SqPackHeader,
|
sqpack_header: SqPackHeader,
|
||||||
|
|
||||||
#[br(seek_before = SeekFrom::Start(sqpack_header.size.into()))]
|
#[br(seek_before = SeekFrom::Start(sqpack_header.size.into()))]
|
||||||
|
@ -164,8 +164,8 @@ pub struct IndexFile {
|
||||||
pub data_entries: Vec<DataEntry>,
|
pub data_entries: Vec<DataEntry>,
|
||||||
|
|
||||||
/*#[br(seek_before = SeekFrom::Start(index_header.unknown_descriptor.offset.into()))]
|
/*#[br(seek_before = SeekFrom::Start(index_header.unknown_descriptor.offset.into()))]
|
||||||
#[br(count = index_header.unknown_descriptor.size / 16)]
|
* #[br(count = index_header.unknown_descriptor.size / 16)]
|
||||||
pub unknown_entries: Vec<IndexHashTableEntry>,*/
|
* pub unknown_entries: Vec<IndexHashTableEntry>,*/
|
||||||
#[br(seek_before = SeekFrom::Start(index_header.folder_descriptor.offset.into()))]
|
#[br(seek_before = SeekFrom::Start(index_header.folder_descriptor.offset.into()))]
|
||||||
#[br(count = index_header.folder_descriptor.size / 16)]
|
#[br(count = index_header.folder_descriptor.size / 16)]
|
||||||
pub folder_entries: Vec<FolderEntry>,
|
pub folder_entries: Vec<FolderEntry>,
|
||||||
|
@ -173,7 +173,7 @@ pub struct IndexFile {
|
||||||
|
|
||||||
const CRC: Jamcrc = Jamcrc::new();
|
const CRC: Jamcrc = Jamcrc::new();
|
||||||
|
|
||||||
impl IndexFile {
|
impl SqPackIndex {
|
||||||
/// Creates a new reference to an existing index file.
|
/// Creates a new reference to an existing index file.
|
||||||
pub fn from_existing(path: &str) -> Option<Self> {
|
pub fn from_existing(path: &str) -> Option<Self> {
|
||||||
let mut index_file = std::fs::File::open(path).ok()?;
|
let mut index_file = std::fs::File::open(path).ok()?;
|
||||||
|
@ -252,7 +252,7 @@ mod tests {
|
||||||
d.push("random");
|
d.push("random");
|
||||||
|
|
||||||
// Feeding it invalid data should not panic
|
// Feeding it invalid data should not panic
|
||||||
IndexFile::from_existing(d.to_str().unwrap());
|
SqPackIndex::from_existing(d.to_str().unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -265,7 +265,7 @@ mod tests {
|
||||||
let mut cursor = Cursor::new(&data);
|
let mut cursor = Cursor::new(&data);
|
||||||
|
|
||||||
let file_entry =
|
let file_entry =
|
||||||
FileEntry::read_options(&mut cursor, Endian::Little, (&IndexType::Index1,)).unwrap();
|
FileEntry::read_options(&mut cursor, Endian::Little, (&IndexType::Index1,)).unwrap();
|
||||||
|
|
||||||
let expected_hash = Hash::SplitPath {
|
let expected_hash = Hash::SplitPath {
|
||||||
name: 475005679,
|
name: 475005679,
|
||||||
|
@ -281,8 +281,8 @@ mod tests {
|
||||||
{
|
{
|
||||||
let mut write_cursor = Cursor::new(&mut new_data);
|
let mut write_cursor = Cursor::new(&mut new_data);
|
||||||
file_entry
|
file_entry
|
||||||
.write_options(&mut write_cursor, Endian::Little, (&IndexType::Index1,))
|
.write_options(&mut write_cursor, Endian::Little, (&IndexType::Index1,))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
assert_eq!(new_data, data);
|
assert_eq!(new_data, data);
|
23
src/sqpack.rs → src/sqpack/mod.rs
Executable file → Normal file
23
src/sqpack.rs → src/sqpack/mod.rs
Executable file → Normal file
|
@ -4,16 +4,25 @@
|
||||||
use std::io::{Read, Seek, SeekFrom, Write};
|
use std::io::{Read, Seek, SeekFrom, Write};
|
||||||
|
|
||||||
use binrw::{BinRead, BinWrite, binrw};
|
use binrw::{BinRead, BinWrite, binrw};
|
||||||
|
use data::{BlockHeader, CompressionMode};
|
||||||
|
|
||||||
use crate::common::{Platform, Region};
|
use crate::common::{Platform, Region};
|
||||||
use crate::compression::no_header_decompress;
|
use crate::compression::no_header_decompress;
|
||||||
use crate::dat::{BlockHeader, CompressionMode};
|
|
||||||
|
mod data;
|
||||||
|
pub use data::SqPackData;
|
||||||
|
|
||||||
|
mod db;
|
||||||
|
pub use db::SqPackDatabase;
|
||||||
|
|
||||||
|
mod index;
|
||||||
|
pub use index::{SqPackIndex, IndexEntry};
|
||||||
|
|
||||||
/// The type of this SqPack file.
|
/// The type of this SqPack file.
|
||||||
#[binrw]
|
#[binrw]
|
||||||
#[brw(repr = u8)]
|
#[brw(repr = u8)]
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
enum SqPackFileType {
|
pub(crate) enum SqPackFileType {
|
||||||
/// FFXIV Explorer says "SQDB", whatever that is.
|
/// FFXIV Explorer says "SQDB", whatever that is.
|
||||||
SQDB = 0x0,
|
SQDB = 0x0,
|
||||||
/// Dat files.
|
/// Dat files.
|
||||||
|
@ -25,7 +34,7 @@ enum SqPackFileType {
|
||||||
#[binrw]
|
#[binrw]
|
||||||
#[brw(magic = b"SqPack\0\0")]
|
#[brw(magic = b"SqPack\0\0")]
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct SqPackHeader {
|
pub(crate) struct SqPackHeader {
|
||||||
#[brw(pad_size_to = 4)]
|
#[brw(pad_size_to = 4)]
|
||||||
platform_id: Platform,
|
platform_id: Platform,
|
||||||
pub size: u32,
|
pub size: u32,
|
||||||
|
@ -48,7 +57,7 @@ pub struct SqPackHeader {
|
||||||
sha1_hash: [u8; 20],
|
sha1_hash: [u8; 20],
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn read_data_block<T: Read + Seek>(mut buf: T, starting_position: u64) -> Option<Vec<u8>> {
|
pub(crate) fn read_data_block<T: Read + Seek>(mut buf: T, starting_position: u64) -> Option<Vec<u8>> {
|
||||||
buf.seek(SeekFrom::Start(starting_position)).ok()?;
|
buf.seek(SeekFrom::Start(starting_position)).ok()?;
|
||||||
|
|
||||||
let block_header = BlockHeader::read(&mut buf).unwrap();
|
let block_header = BlockHeader::read(&mut buf).unwrap();
|
||||||
|
@ -78,7 +87,7 @@ pub fn read_data_block<T: Read + Seek>(mut buf: T, starting_position: u64) -> Op
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A fixed version of read_data_block accounting for differing compressed block sizes in ZiPatch files.
|
/// A fixed version of read_data_block accounting for differing compressed block sizes in ZiPatch files.
|
||||||
pub fn read_data_block_patch<T: Read + Seek>(mut buf: T) -> Option<Vec<u8>> {
|
pub(crate) fn read_data_block_patch<T: Read + Seek>(mut buf: T) -> Option<Vec<u8>> {
|
||||||
let block_header = BlockHeader::read(&mut buf).unwrap();
|
let block_header = BlockHeader::read(&mut buf).unwrap();
|
||||||
|
|
||||||
match block_header.compression {
|
match block_header.compression {
|
||||||
|
@ -87,7 +96,7 @@ pub fn read_data_block_patch<T: Read + Seek>(mut buf: T) -> Option<Vec<u8>> {
|
||||||
decompressed_length,
|
decompressed_length,
|
||||||
} => {
|
} => {
|
||||||
let compressed_length: usize =
|
let compressed_length: usize =
|
||||||
((compressed_length as usize + 143) & 0xFFFFFF80) - (block_header.size as usize);
|
((compressed_length as usize + 143) & 0xFFFFFF80) - (block_header.size as usize);
|
||||||
|
|
||||||
let mut compressed_data: Vec<u8> = vec![0; compressed_length];
|
let mut compressed_data: Vec<u8> = vec![0; compressed_length];
|
||||||
buf.read_exact(&mut compressed_data).ok()?;
|
buf.read_exact(&mut compressed_data).ok()?;
|
||||||
|
@ -115,7 +124,7 @@ pub fn read_data_block_patch<T: Read + Seek>(mut buf: T) -> Option<Vec<u8>> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write_data_block_patch<T: Write + Seek>(mut writer: T, data: Vec<u8>) {
|
pub(crate) fn write_data_block_patch<T: Write + Seek>(mut writer: T, data: Vec<u8>) {
|
||||||
let new_file_size: usize = (data.len() + 143) & 0xFFFFFF80;
|
let new_file_size: usize = (data.len() + 143) & 0xFFFFFF80;
|
||||||
|
|
||||||
// This only adds uncompressed data for now, to simplify implementation
|
// This only adds uncompressed data for now, to simplify implementation
|
|
@ -6,14 +6,14 @@ use std::fs::read;
|
||||||
|
|
||||||
use physis::common::Platform;
|
use physis::common::Platform;
|
||||||
use physis::fiin::FileInfo;
|
use physis::fiin::FileInfo;
|
||||||
use physis::index;
|
use physis::sqpack::SqPackIndex;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[cfg_attr(not(feature = "retail_game_testing"), ignore)]
|
#[cfg_attr(not(feature = "retail_game_testing"), ignore)]
|
||||||
fn test_index_read() {
|
fn test_index_read() {
|
||||||
let game_dir = env::var("FFXIV_GAME_DIR").unwrap();
|
let game_dir = env::var("FFXIV_GAME_DIR").unwrap();
|
||||||
|
|
||||||
index::IndexFile::from_existing(
|
SqPackIndex::from_existing(
|
||||||
format!("{}/game/sqpack/ffxiv/000000.win32.index", game_dir).as_str(),
|
format!("{}/game/sqpack/ffxiv/000000.win32.index", game_dir).as_str(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue