mirror of
https://github.com/redstrate/Physis.git
synced 2025-04-20 11:47:46 +00:00
Redesign how IndexFile parsing works, again
Now it reads much more of the index file format, and tries to handle index1/index2 differences more opaquely. I'm not yet happy with the API, it needs a bit more work.
This commit is contained in:
parent
f0554c8c27
commit
bb7c74fec8
41 changed files with 374 additions and 312 deletions
|
@ -5,7 +5,7 @@ use std::io::{Cursor, Seek, SeekFrom};
|
|||
|
||||
use crate::ByteSpan;
|
||||
use binrw::BinRead;
|
||||
use binrw::{binread, binrw, BinReaderExt};
|
||||
use binrw::{BinReaderExt, binread, binrw};
|
||||
|
||||
#[binrw]
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
use std::io::{Cursor, Seek, SeekFrom};
|
||||
|
||||
use crate::ByteSpan;
|
||||
use binrw::binrw;
|
||||
use binrw::BinRead;
|
||||
use binrw::binrw;
|
||||
|
||||
#[binrw]
|
||||
#[br(little)]
|
||||
|
|
|
@ -49,8 +49,10 @@ pub fn get_language_code(lang: &Language) -> &'static str {
|
|||
#[brw(repr = i16)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum Region {
|
||||
/// The global region.
|
||||
Global = -1, // TODO: find patch codes for other regions :-)
|
||||
/// The global region, used for any region not specified.
|
||||
Global = -1,
|
||||
/// Korea and China clients.
|
||||
KoreaChina = 1,
|
||||
}
|
||||
|
||||
/// Reads a version file.
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
// SPDX-FileCopyrightText: 2024 Joshua Goins <josh@redstrate.com>
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use std::ffi::CString;
|
||||
use binrw::{binread, BinReaderExt, BinResult};
|
||||
use binrw::{BinReaderExt, BinResult, binread};
|
||||
use half::f16;
|
||||
use std::ffi::CString;
|
||||
use std::io::SeekFrom;
|
||||
|
||||
pub(crate) fn read_bool_from<T: std::convert::From<u8> + std::cmp::PartialEq>(x: T) -> bool {
|
||||
|
@ -11,11 +11,7 @@ pub(crate) fn read_bool_from<T: std::convert::From<u8> + std::cmp::PartialEq>(x:
|
|||
}
|
||||
|
||||
pub(crate) fn write_bool_as<T: std::convert::From<u8>>(x: &bool) -> T {
|
||||
if *x {
|
||||
T::from(1u8)
|
||||
} else {
|
||||
T::from(0u8)
|
||||
}
|
||||
if *x { T::from(1u8) } else { T::from(0u8) }
|
||||
}
|
||||
|
||||
pub(crate) fn read_string(byte_stream: Vec<u8>) -> String {
|
||||
|
@ -129,18 +125,27 @@ mod tests {
|
|||
#[test]
|
||||
fn read_string() {
|
||||
// The nul terminator is supposed to be removed
|
||||
assert_eq!(crate::common_file_operations::read_string(STRING_DATA.to_vec()), "FOO".to_string());
|
||||
assert_eq!(
|
||||
crate::common_file_operations::read_string(STRING_DATA.to_vec()),
|
||||
"FOO".to_string()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn write_string() {
|
||||
// Supposed to include the nul terminator
|
||||
assert_eq!(crate::common_file_operations::write_string(&"FOO".to_string()), STRING_DATA.to_vec());
|
||||
assert_eq!(
|
||||
crate::common_file_operations::write_string(&"FOO".to_string()),
|
||||
STRING_DATA.to_vec()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_string_len() {
|
||||
// Supposed to include the nul terminator
|
||||
assert_eq!(crate::common_file_operations::get_string_len(&"FOO".to_string()), 4);
|
||||
assert_eq!(
|
||||
crate::common_file_operations::get_string_len(&"FOO".to_string()),
|
||||
4
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -124,7 +124,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn check_jamcrc() {
|
||||
use crc::{Crc, CRC_32_JAMCRC};
|
||||
use crc::{CRC_32_JAMCRC, Crc};
|
||||
|
||||
const JAMCR: Crc<u32> = Crc::<u32>::new(&CRC_32_JAMCRC);
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ use std::io::{Cursor, Read, Seek, SeekFrom};
|
|||
use crate::ByteBuffer;
|
||||
use binrw::BinRead;
|
||||
use binrw::BinWrite;
|
||||
use binrw::{binrw, BinReaderExt};
|
||||
use binrw::{BinReaderExt, binrw};
|
||||
|
||||
use crate::common_file_operations::read_bool_from;
|
||||
#[cfg(feature = "visual_data")]
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use crate::race::{get_race_id, Gender, Race, Subrace};
|
||||
use crate::race::{Gender, Race, Subrace, get_race_id};
|
||||
|
||||
#[repr(u8)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
|
|
|
@ -6,9 +6,9 @@ use std::io::{Cursor, Seek, SeekFrom};
|
|||
use binrw::binrw;
|
||||
use binrw::{BinRead, Endian};
|
||||
|
||||
use crate::common::Language;
|
||||
use crate::exh::{ColumnDataType, ExcelColumnDefinition, ExcelDataPagination, EXH};
|
||||
use crate::ByteSpan;
|
||||
use crate::common::Language;
|
||||
use crate::exh::{ColumnDataType, EXH, ExcelColumnDefinition, ExcelDataPagination};
|
||||
|
||||
#[binrw]
|
||||
#[brw(magic = b"EXDF")]
|
||||
|
|
|
@ -5,11 +5,11 @@
|
|||
|
||||
use std::io::Cursor;
|
||||
|
||||
use binrw::binrw;
|
||||
use binrw::BinRead;
|
||||
use binrw::binrw;
|
||||
|
||||
use crate::common::Language;
|
||||
use crate::ByteSpan;
|
||||
use crate::common::Language;
|
||||
|
||||
#[binrw]
|
||||
#[brw(magic = b"EXHF")]
|
||||
|
|
|
@ -22,15 +22,15 @@ pub enum ExistingInstallType {
|
|||
/// Installed via XIVOnMac
|
||||
XIVOnMac,
|
||||
/// Installed via Astra
|
||||
Astra
|
||||
Astra,
|
||||
}
|
||||
|
||||
/// An existing install location on disk
|
||||
pub struct ExistingGameDirectory {
|
||||
/// The application where this installation was from
|
||||
pub install_type : ExistingInstallType,
|
||||
pub install_type: ExistingInstallType,
|
||||
/// The path to the "main folder" where "game" and "boot" sits
|
||||
pub path: String
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
/// Finds existing installations on disk. Will only return locations that actually have files in them, and a really basic check to see if the data is valid.
|
||||
|
@ -48,13 +48,15 @@ pub fn find_existing_game_dirs() -> Vec<ExistingGameDirectory> {
|
|||
// Official install (Steam)
|
||||
install_dirs.push(ExistingGameDirectory {
|
||||
install_type: ExistingInstallType::OfficialLauncher,
|
||||
path: from_home_dir(".steam/steam/steamapps/common/FINAL FANTASY XIV - A Realm Reborn")
|
||||
path: from_home_dir(
|
||||
".steam/steam/steamapps/common/FINAL FANTASY XIV - A Realm Reborn",
|
||||
),
|
||||
});
|
||||
|
||||
// XIVLauncherCore location
|
||||
install_dirs.push(ExistingGameDirectory {
|
||||
install_type: ExistingInstallType::XIVLauncherCore,
|
||||
path: from_home_dir(".xlcore/ffxiv")
|
||||
path: from_home_dir(".xlcore/ffxiv"),
|
||||
});
|
||||
|
||||
// Astra location. But we have to iterate through each UUID.
|
||||
|
@ -73,7 +75,7 @@ pub fn find_existing_game_dirs() -> Vec<ExistingGameDirectory> {
|
|||
.for_each(|path| {
|
||||
install_dirs.push(ExistingGameDirectory {
|
||||
install_type: ExistingInstallType::Astra,
|
||||
path: path.into_os_string().into_string().unwrap()
|
||||
path: path.into_os_string().into_string().unwrap(),
|
||||
})
|
||||
});
|
||||
}
|
||||
|
@ -91,7 +93,9 @@ pub fn find_existing_game_dirs() -> Vec<ExistingGameDirectory> {
|
|||
// Official install (Wine)
|
||||
install_dirs.push(ExistingGameDirectory {
|
||||
install_type: ExistingInstallType::OfficialLauncher,
|
||||
path: "C:\\Program Files (x86)\\SquareEnix\\FINAL FANTASY XIV - A Realm Reborn".parse().unwrap()
|
||||
path: "C:\\Program Files (x86)\\SquareEnix\\FINAL FANTASY XIV - A Realm Reborn"
|
||||
.parse()
|
||||
.unwrap(),
|
||||
});
|
||||
|
||||
// TODO: Add Astra
|
||||
|
@ -99,15 +103,18 @@ pub fn find_existing_game_dirs() -> Vec<ExistingGameDirectory> {
|
|||
&_ => {}
|
||||
}
|
||||
|
||||
install_dirs.into_iter().filter(|dir| is_valid_game_dir(&dir.path)).collect()
|
||||
install_dirs
|
||||
.into_iter()
|
||||
.filter(|dir| is_valid_game_dir(&dir.path))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// An existing user directory
|
||||
pub struct ExistingUserDirectory {
|
||||
/// The application where this directory was from
|
||||
pub install_type : ExistingInstallType,
|
||||
pub install_type: ExistingInstallType,
|
||||
/// The path to the user folder
|
||||
pub path: String
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
/// Finds existing user folders on disk. Will only return locations that actually have files in them, and a really basic check to see if the data is valid.
|
||||
|
@ -123,13 +130,13 @@ pub fn find_existing_user_dirs() -> Vec<ExistingUserDirectory> {
|
|||
// Official install (Wine)
|
||||
user_dirs.push(ExistingUserDirectory {
|
||||
install_type: ExistingInstallType::OfficialLauncher,
|
||||
path: from_home_dir("Documents/My Games/FINAL FANTASY XIV - A Realm Reborn")
|
||||
path: from_home_dir("Documents/My Games/FINAL FANTASY XIV - A Realm Reborn"),
|
||||
});
|
||||
|
||||
// XIVLauncherCore location
|
||||
user_dirs.push(ExistingUserDirectory {
|
||||
install_type: ExistingInstallType::XIVLauncherCore,
|
||||
path: from_home_dir(".xlcore/ffxivConfig")
|
||||
path: from_home_dir(".xlcore/ffxivConfig"),
|
||||
});
|
||||
|
||||
// Astra location. But we have to iterate through each UUID.
|
||||
|
@ -148,7 +155,7 @@ pub fn find_existing_user_dirs() -> Vec<ExistingUserDirectory> {
|
|||
.for_each(|path| {
|
||||
user_dirs.push(ExistingUserDirectory {
|
||||
install_type: ExistingInstallType::Astra,
|
||||
path: path.into_os_string().into_string().unwrap()
|
||||
path: path.into_os_string().into_string().unwrap(),
|
||||
})
|
||||
});
|
||||
}
|
||||
|
@ -157,7 +164,7 @@ pub fn find_existing_user_dirs() -> Vec<ExistingUserDirectory> {
|
|||
// Official install (Wine)
|
||||
user_dirs.push(ExistingUserDirectory {
|
||||
install_type: ExistingInstallType::OfficialLauncher,
|
||||
path: from_home_dir("Documents/My Games/FINAL FANTASY XIV - A Realm Reborn")
|
||||
path: from_home_dir("Documents/My Games/FINAL FANTASY XIV - A Realm Reborn"),
|
||||
})
|
||||
|
||||
// TODO: Add XIV on Mac?
|
||||
|
@ -166,7 +173,7 @@ pub fn find_existing_user_dirs() -> Vec<ExistingUserDirectory> {
|
|||
// Official install
|
||||
user_dirs.push(ExistingUserDirectory {
|
||||
install_type: ExistingInstallType::OfficialLauncher,
|
||||
path: from_home_dir("Documents/My Games/FINAL FANTASY XIV - A Realm Reborn")
|
||||
path: from_home_dir("Documents/My Games/FINAL FANTASY XIV - A Realm Reborn"),
|
||||
})
|
||||
|
||||
// TODO: Add Astra
|
||||
|
@ -174,7 +181,10 @@ pub fn find_existing_user_dirs() -> Vec<ExistingUserDirectory> {
|
|||
&_ => {}
|
||||
}
|
||||
|
||||
user_dirs.into_iter().filter(|dir| is_valid_user_dir(&dir.path)).collect()
|
||||
user_dirs
|
||||
.into_iter()
|
||||
.filter(|dir| is_valid_user_dir(&dir.path))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn from_home_dir(path: &'static str) -> String {
|
||||
|
|
|
@ -8,15 +8,15 @@ use std::path::PathBuf;
|
|||
|
||||
use tracing::{debug, warn};
|
||||
|
||||
use crate::common::{read_version, Language, Platform};
|
||||
use crate::ByteBuffer;
|
||||
use crate::common::{Language, Platform, read_version};
|
||||
use crate::dat::DatFile;
|
||||
use crate::exd::EXD;
|
||||
use crate::exh::EXH;
|
||||
use crate::exl::EXL;
|
||||
use crate::index::{Index2File, IndexEntry, IndexFile};
|
||||
use crate::index::{IndexEntry, IndexFile};
|
||||
use crate::patch::{PatchError, ZiPatch};
|
||||
use crate::repository::{string_to_category, Category, Repository};
|
||||
use crate::ByteBuffer;
|
||||
use crate::repository::{Category, Repository, string_to_category};
|
||||
|
||||
/// Framework for operating on game data.
|
||||
pub struct GameData {
|
||||
|
@ -27,7 +27,6 @@ pub struct GameData {
|
|||
pub repositories: Vec<Repository>,
|
||||
|
||||
index_files: HashMap<String, IndexFile>,
|
||||
index2_files: HashMap<String, Index2File>,
|
||||
}
|
||||
|
||||
fn is_valid(path: &str) -> bool {
|
||||
|
@ -79,7 +78,6 @@ impl GameData {
|
|||
game_directory: String::from(directory),
|
||||
repositories: vec![],
|
||||
index_files: HashMap::new(),
|
||||
index2_files: HashMap::new(),
|
||||
};
|
||||
data.reload_repositories(platform);
|
||||
Some(data)
|
||||
|
@ -157,7 +155,7 @@ impl GameData {
|
|||
/// }
|
||||
/// ```
|
||||
pub fn exists(&mut self, path: &str) -> bool {
|
||||
let Some((_, _)) = self.get_index_filenames(path) else {
|
||||
let Some(_) = self.get_index_filenames(path) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
|
@ -214,11 +212,10 @@ impl GameData {
|
|||
Some((&self.repositories[0], string_to_category(tokens.0)?))
|
||||
}
|
||||
|
||||
fn get_index_filenames(&self, path: &str) -> Option<(Vec<(String, u8)>, Vec<(String, u8)>)> {
|
||||
fn get_index_filenames(&self, path: &str) -> Option<Vec<(String, u8)>> {
|
||||
let (repository, category) = self.parse_repository_category(path)?;
|
||||
|
||||
let mut index1_filenames = vec![];
|
||||
let mut index2_filenames = vec![];
|
||||
let mut index_filenames = vec![];
|
||||
|
||||
for chunk in 0..255 {
|
||||
let index_path: PathBuf = [
|
||||
|
@ -230,7 +227,7 @@ impl GameData {
|
|||
.iter()
|
||||
.collect();
|
||||
|
||||
index1_filenames.push((index_path.into_os_string().into_string().unwrap(), chunk));
|
||||
index_filenames.push((index_path.into_os_string().into_string().unwrap(), chunk));
|
||||
|
||||
let index2_path: PathBuf = [
|
||||
&self.game_directory,
|
||||
|
@ -241,10 +238,10 @@ impl GameData {
|
|||
.iter()
|
||||
.collect();
|
||||
|
||||
index2_filenames.push((index2_path.into_os_string().into_string().unwrap(), chunk));
|
||||
index_filenames.push((index2_path.into_os_string().into_string().unwrap(), chunk));
|
||||
}
|
||||
|
||||
Some((index1_filenames, index2_filenames))
|
||||
Some(index_filenames)
|
||||
}
|
||||
|
||||
/// Read an excel sheet by name (e.g. "Achievement")
|
||||
|
@ -404,24 +401,12 @@ impl GameData {
|
|||
}
|
||||
}
|
||||
|
||||
fn cache_index2_file(&mut self, filename: &str) {
|
||||
if !self.index2_files.contains_key(filename) {
|
||||
if let Some(index_file) = Index2File::from_existing(filename) {
|
||||
self.index2_files.insert(filename.to_string(), index_file);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_index_file(&self, filename: &str) -> Option<&IndexFile> {
|
||||
self.index_files.get(filename)
|
||||
}
|
||||
|
||||
fn get_index2_file(&self, filename: &str) -> Option<&Index2File> {
|
||||
self.index2_files.get(filename)
|
||||
}
|
||||
|
||||
fn find_entry(&mut self, path: &str) -> Option<(IndexEntry, u8)> {
|
||||
let (index_paths, index2_paths) = self.get_index_filenames(path)?;
|
||||
let index_paths = self.get_index_filenames(path)?;
|
||||
|
||||
for (index_path, chunk) in index_paths {
|
||||
self.cache_index_file(&index_path);
|
||||
|
@ -433,16 +418,6 @@ impl GameData {
|
|||
}
|
||||
}
|
||||
|
||||
for (index2_path, chunk) in index2_paths {
|
||||
self.cache_index2_file(&index2_path);
|
||||
|
||||
if let Some(index_file) = self.get_index2_file(&index2_path) {
|
||||
if let Some(entry) = index_file.find_entry(path) {
|
||||
return Some((entry, chunk));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
@ -479,8 +454,9 @@ mod tests {
|
|||
data.parse_repository_category("exd/root.exl").unwrap(),
|
||||
(&data.repositories[0], EXD)
|
||||
);
|
||||
assert!(data
|
||||
.parse_repository_category("what/some_font.dat")
|
||||
.is_none());
|
||||
assert!(
|
||||
data.parse_repository_category("what/some_font.dat")
|
||||
.is_none()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,9 +3,9 @@
|
|||
|
||||
#![allow(dead_code)]
|
||||
|
||||
use crate::havok::HavokAnimation;
|
||||
use crate::havok::object::HavokObject;
|
||||
use crate::havok::spline_compressed_animation::HavokSplineCompressedAnimation;
|
||||
use crate::havok::HavokAnimation;
|
||||
use core::cell::RefCell;
|
||||
use std::sync::Arc;
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
// SPDX-FileCopyrightText: 2020 Inseok Lee
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use crate::havok::HavokAnimation;
|
||||
use crate::havok::byte_reader::ByteReader;
|
||||
use crate::havok::object::HavokObject;
|
||||
use crate::havok::transform::HavokTransform;
|
||||
use crate::havok::HavokAnimation;
|
||||
use core::{cell::RefCell, cmp};
|
||||
use std::f32;
|
||||
use std::sync::Arc;
|
||||
|
|
204
src/index.rs
204
src/index.rs
|
@ -7,14 +7,17 @@
|
|||
use std::io::SeekFrom;
|
||||
|
||||
use crate::common::Platform;
|
||||
use crate::common::Region;
|
||||
use crate::crc::Jamcrc;
|
||||
use binrw::binrw;
|
||||
use binrw::BinRead;
|
||||
use binrw::binrw;
|
||||
|
||||
/// The type of this SqPack file.
|
||||
#[binrw]
|
||||
#[brw(repr = u8)]
|
||||
enum SqPackFileType {
|
||||
/// FFXIV Explorer says "SQDB", whatever that is.
|
||||
SQDB = 0x0,
|
||||
/// Dat files.
|
||||
Data = 0x1,
|
||||
/// Index/Index2 files.
|
||||
|
@ -33,36 +36,54 @@ pub struct SqPackHeader {
|
|||
file_type: SqPackFileType,
|
||||
|
||||
// some unknown value, zeroed out for index files
|
||||
// XivAlexandar says date/time, where does that come from?
|
||||
unk1: u32,
|
||||
unk2: u32,
|
||||
|
||||
// always 0xFFFFFFFF
|
||||
unk3: u32,
|
||||
#[br(pad_size_to = 4)]
|
||||
region: Region,
|
||||
|
||||
#[brw(pad_before = 924)]
|
||||
#[brw(pad_after = 44)]
|
||||
// The SHA1 of the bytes immediately before this
|
||||
sha1_hash: [u8; 20]
|
||||
sha1_hash: [u8; 20],
|
||||
}
|
||||
|
||||
#[binrw]
|
||||
#[derive(Debug)]
|
||||
pub struct SegementDescriptor {
|
||||
count: u32,
|
||||
offset: u32,
|
||||
size: u32,
|
||||
#[brw(pad_after = 40)]
|
||||
sha1_hash: [u8; 20],
|
||||
}
|
||||
|
||||
#[binrw]
|
||||
#[brw(repr = u8)]
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum IndexType {
|
||||
Index1,
|
||||
Index2,
|
||||
}
|
||||
|
||||
#[binrw]
|
||||
#[derive(Debug)]
|
||||
pub struct SqPackIndexHeader {
|
||||
size: u32,
|
||||
version: u32,
|
||||
index_data_offset: u32,
|
||||
index_data_size: u32,
|
||||
index_data_hash: [u8; 64],
|
||||
number_of_data_file: u32,
|
||||
synonym_data_offset: u32,
|
||||
synonym_data_size: u32,
|
||||
synonym_data_hash: [u8; 64],
|
||||
empty_block_data_offset: u32,
|
||||
empty_block_data_size: u32,
|
||||
empty_block_data_hash: [u8; 64],
|
||||
dir_index_data_offset: u32,
|
||||
dir_index_data_size: u32,
|
||||
dir_index_data_hash: [u8; 64],
|
||||
index_type: u32,
|
||||
|
||||
#[brw(pad_after = 4)]
|
||||
file_descriptor: SegementDescriptor,
|
||||
|
||||
// Count in this descriptor correlates to the number of dat files.
|
||||
data_descriptor: SegementDescriptor,
|
||||
|
||||
unknown_descriptor: SegementDescriptor,
|
||||
|
||||
folder_descriptor: SegementDescriptor,
|
||||
|
||||
#[brw(pad_size_to = 4)]
|
||||
pub(crate) index_type: IndexType,
|
||||
|
||||
#[brw(pad_before = 656)]
|
||||
#[brw(pad_after = 44)]
|
||||
|
@ -71,14 +92,30 @@ pub struct SqPackIndexHeader {
|
|||
}
|
||||
|
||||
#[binrw]
|
||||
#[br(import(index_type: &IndexType))]
|
||||
#[derive(PartialEq)]
|
||||
pub enum Hash {
|
||||
#[br(pre_assert(*index_type == IndexType::Index1))]
|
||||
SplitPath { name: u32, path: u32 },
|
||||
#[br(pre_assert(*index_type == IndexType::Index2))]
|
||||
FullPath(u32),
|
||||
}
|
||||
|
||||
#[binrw]
|
||||
#[br(import(index_type: &IndexType))]
|
||||
pub struct IndexHashTableEntry {
|
||||
pub hash: u64,
|
||||
#[br(args(index_type))]
|
||||
pub hash: Hash,
|
||||
|
||||
#[br(temp)]
|
||||
#[bw(ignore)]
|
||||
#[brw(pad_after = 4)]
|
||||
data: u32,
|
||||
|
||||
#[br(temp)]
|
||||
#[bw(calc = 0)]
|
||||
#[br(if(*index_type == IndexType::Index1))]
|
||||
padding: u32,
|
||||
|
||||
#[br(calc = (data & 0b1) == 0b1)]
|
||||
#[bw(ignore)]
|
||||
pub is_synonym: bool,
|
||||
|
@ -117,6 +154,23 @@ pub struct Index2HashTableEntry {
|
|||
pub offset: u64,
|
||||
}
|
||||
|
||||
#[binrw]
|
||||
#[derive(Debug)]
|
||||
pub struct DataEntry {
|
||||
// A bunch of 0xFFFFFFFF
|
||||
unk: [u8; 256],
|
||||
}
|
||||
|
||||
#[binrw]
|
||||
#[derive(Debug)]
|
||||
pub struct FolderEntry {
|
||||
hash: u32,
|
||||
files_offset: u32,
|
||||
// Divide by 0x10 to get the number of files
|
||||
#[brw(pad_after = 4)]
|
||||
total_files_size: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IndexEntry {
|
||||
pub hash: u64,
|
||||
|
@ -132,22 +186,19 @@ pub struct IndexFile {
|
|||
#[br(seek_before = SeekFrom::Start(sqpack_header.size.into()))]
|
||||
index_header: SqPackIndexHeader,
|
||||
|
||||
#[br(seek_before = SeekFrom::Start(index_header.index_data_offset.into()))]
|
||||
#[br(count = index_header.index_data_size / 16)]
|
||||
#[br(seek_before = SeekFrom::Start(index_header.file_descriptor.offset.into()), count = index_header.file_descriptor.size / 16, args { inner: (&index_header.index_type,) })]
|
||||
pub entries: Vec<IndexHashTableEntry>,
|
||||
}
|
||||
|
||||
#[binrw]
|
||||
#[br(little)]
|
||||
pub struct Index2File {
|
||||
sqpack_header: SqPackHeader,
|
||||
#[br(seek_before = SeekFrom::Start(index_header.data_descriptor.offset.into()))]
|
||||
#[br(count = index_header.data_descriptor.size / 256)]
|
||||
pub data_entries: Vec<DataEntry>,
|
||||
|
||||
#[br(seek_before = SeekFrom::Start(sqpack_header.size.into()))]
|
||||
index_header: SqPackIndexHeader,
|
||||
|
||||
#[br(seek_before = SeekFrom::Start(index_header.index_data_offset.into()))]
|
||||
#[br(count = index_header.index_data_size / 8)]
|
||||
pub entries: Vec<Index2HashTableEntry>,
|
||||
/*#[br(seek_before = SeekFrom::Start(index_header.unknown_descriptor.offset.into()))]
|
||||
#[br(count = index_header.unknown_descriptor.size / 16)]
|
||||
pub unknown_entries: Vec<IndexHashTableEntry>,*/
|
||||
#[br(seek_before = SeekFrom::Start(index_header.folder_descriptor.offset.into()))]
|
||||
#[br(count = index_header.folder_descriptor.size / 16)]
|
||||
pub folder_entries: Vec<FolderEntry>,
|
||||
}
|
||||
|
||||
const CRC: Jamcrc = Jamcrc::new();
|
||||
|
@ -157,6 +208,8 @@ impl IndexFile {
|
|||
pub fn from_existing(path: &str) -> Option<Self> {
|
||||
let mut index_file = std::fs::File::open(path).ok()?;
|
||||
|
||||
println!("Reading {}!", path);
|
||||
|
||||
Self::read(&mut index_file).ok()
|
||||
}
|
||||
|
||||
|
@ -168,68 +221,45 @@ impl IndexFile {
|
|||
}
|
||||
|
||||
/// Calculates a hash for `index` files from a game path.
|
||||
pub fn calculate_hash(path: &str) -> u64 {
|
||||
pub fn calculate_hash(&self, path: &str) -> Hash {
|
||||
let lowercase = path.to_lowercase();
|
||||
|
||||
if let Some(pos) = lowercase.rfind('/') {
|
||||
let (directory, filename) = lowercase.split_at(pos);
|
||||
return match &self.index_header.index_type {
|
||||
IndexType::Index1 => {
|
||||
if let Some(pos) = lowercase.rfind('/') {
|
||||
let (directory, filename) = lowercase.split_at(pos);
|
||||
|
||||
let directory_crc = CRC.checksum(directory.as_bytes());
|
||||
let filename_crc = CRC.checksum(filename[1..filename.len()].as_bytes());
|
||||
let directory_crc = CRC.checksum(directory.as_bytes());
|
||||
let filename_crc = CRC.checksum(filename[1..filename.len()].as_bytes());
|
||||
|
||||
(directory_crc as u64) << 32 | (filename_crc as u64)
|
||||
} else {
|
||||
CRC.checksum(lowercase.as_bytes()) as u64
|
||||
}
|
||||
Hash::SplitPath {
|
||||
name: filename_crc,
|
||||
path: directory_crc,
|
||||
}
|
||||
} else {
|
||||
// TODO: is this ever hit?
|
||||
panic!("This is unexpected, why is the file sitting outside of a folder?");
|
||||
}
|
||||
}
|
||||
IndexType::Index2 => Hash::FullPath(CRC.checksum(lowercase.as_bytes())),
|
||||
};
|
||||
}
|
||||
|
||||
// TODO: turn into traits?
|
||||
pub fn exists(&self, path: &str) -> bool {
|
||||
let hash = IndexFile::calculate_hash(path);
|
||||
let hash = self.calculate_hash(path);
|
||||
self.entries.iter().any(|s| s.hash == hash)
|
||||
}
|
||||
|
||||
pub fn find_entry(&self, path: &str) -> Option<IndexEntry> {
|
||||
let hash = IndexFile::calculate_hash(path);
|
||||
let hash = self.calculate_hash(path);
|
||||
|
||||
if let Some(entry) = self.entries.iter().find(|s| s.hash == hash) {
|
||||
let full_hash = match hash {
|
||||
Hash::SplitPath { name, path } => (path as u64) << 32 | (name as u64),
|
||||
Hash::FullPath(hash) => hash as u64,
|
||||
};
|
||||
return Some(IndexEntry {
|
||||
hash: entry.hash,
|
||||
data_file_id: entry.data_file_id,
|
||||
offset: entry.offset,
|
||||
});
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl Index2File {
|
||||
/// Creates a new reference to an existing index2 file.
|
||||
pub fn from_existing(path: &str) -> Option<Self> {
|
||||
let mut index_file = std::fs::File::open(path).ok()?;
|
||||
|
||||
Self::read(&mut index_file).ok()
|
||||
}
|
||||
|
||||
/// Calculates a hash for `index2` files from a game path.
|
||||
pub fn calculate_hash(path: &str) -> u32 {
|
||||
let lowercase = path.to_lowercase();
|
||||
|
||||
CRC.checksum(lowercase.as_bytes())
|
||||
}
|
||||
|
||||
pub fn exists(&self, path: &str) -> bool {
|
||||
let hash = Index2File::calculate_hash(path);
|
||||
self.entries.iter().any(|s| s.hash == hash)
|
||||
}
|
||||
|
||||
pub fn find_entry(&self, path: &str) -> Option<IndexEntry> {
|
||||
let hash = Index2File::calculate_hash(path);
|
||||
|
||||
if let Some(entry) = self.entries.iter().find(|s| s.hash == hash) {
|
||||
return Some(IndexEntry {
|
||||
hash: entry.hash as u64,
|
||||
hash: 0,
|
||||
data_file_id: entry.data_file_id,
|
||||
offset: entry.offset,
|
||||
});
|
||||
|
@ -254,14 +284,4 @@ mod tests {
|
|||
// Feeding it invalid data should not panic
|
||||
IndexFile::from_existing(d.to_str().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_index2_invalid() {
|
||||
let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
d.push("resources/tests");
|
||||
d.push("random");
|
||||
|
||||
// Feeding it invalid data should not panic
|
||||
Index2File::from_existing(d.to_str().unwrap());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
use std::io::Cursor;
|
||||
|
||||
use crate::ByteSpan;
|
||||
use binrw::binrw;
|
||||
use binrw::BinRead;
|
||||
use binrw::binrw;
|
||||
|
||||
#[binrw]
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -5,7 +5,7 @@ use std::io::{Cursor, Seek, SeekFrom};
|
|||
|
||||
use crate::ByteSpan;
|
||||
use binrw::binrw;
|
||||
use binrw::{binread, BinRead, BinReaderExt};
|
||||
use binrw::{BinRead, BinReaderExt, binread};
|
||||
|
||||
// From https://github.com/NotAdam/Lumina/tree/40dab50183eb7ddc28344378baccc2d63ae71d35/src/Lumina/Data/Parsing/Layer
|
||||
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
use std::io::{Cursor, Seek, SeekFrom};
|
||||
|
||||
use crate::ByteSpan;
|
||||
use binrw::binrw;
|
||||
use binrw::BinRead;
|
||||
use binrw::binrw;
|
||||
|
||||
#[binrw]
|
||||
#[allow(dead_code)]
|
||||
|
|
25
src/model.rs
25
src/model.rs
|
@ -8,10 +8,13 @@ use std::mem::size_of;
|
|||
|
||||
use binrw::BinRead;
|
||||
use binrw::BinReaderExt;
|
||||
use binrw::{binrw, BinWrite, BinWriterExt};
|
||||
use binrw::{BinWrite, BinWriterExt, binrw};
|
||||
|
||||
use crate::common_file_operations::{read_bool_from, write_bool_as};
|
||||
use crate::model_vertex_declarations::{vertex_element_parser, vertex_element_writer, VertexDeclaration, VertexType, VertexUsage, VERTEX_ELEMENT_SIZE};
|
||||
use crate::model_vertex_declarations::{
|
||||
VERTEX_ELEMENT_SIZE, VertexDeclaration, VertexType, VertexUsage, vertex_element_parser,
|
||||
vertex_element_writer,
|
||||
};
|
||||
use crate::{ByteBuffer, ByteSpan};
|
||||
|
||||
pub const NUM_VERTICES: u32 = 17;
|
||||
|
@ -551,7 +554,8 @@ impl MDL {
|
|||
MDL::read_byte_float4(&mut cursor).unwrap();
|
||||
}
|
||||
VertexType::Byte4 => {
|
||||
vertices[k as usize].bone_weight = MDL::read_tangent(&mut cursor).unwrap();
|
||||
vertices[k as usize].bone_weight =
|
||||
MDL::read_tangent(&mut cursor).unwrap();
|
||||
}
|
||||
VertexType::UnsignedShort4 => {
|
||||
let bytes = MDL::read_unsigned_short4(&mut cursor).unwrap();
|
||||
|
@ -746,8 +750,7 @@ impl MDL {
|
|||
for shape_value in shape_values {
|
||||
let old_vertex =
|
||||
vertices[indices[shape_value.base_indices_index as usize] as usize];
|
||||
let new_vertex = vertices[shape_value.replacing_vertex_index
|
||||
as usize];
|
||||
let new_vertex = vertices[shape_value.replacing_vertex_index as usize];
|
||||
let vertex = &mut morphed_vertices
|
||||
[indices[shape_value.base_indices_index as usize] as usize];
|
||||
|
||||
|
@ -786,8 +789,9 @@ impl MDL {
|
|||
.seek(SeekFrom::Start(
|
||||
(model.lods[i as usize].vertex_data_offset
|
||||
+ model.meshes[j as usize].vertex_buffer_offsets
|
||||
[stream as usize]
|
||||
+ (z as u32 * stride as u32)) as u64,
|
||||
[stream as usize]
|
||||
+ (z as u32 * stride as u32))
|
||||
as u64,
|
||||
))
|
||||
.ok()?;
|
||||
|
||||
|
@ -797,7 +801,8 @@ impl MDL {
|
|||
}
|
||||
|
||||
vertex_streams.push(vertex_data);
|
||||
vertex_stream_strides.push(mesh.vertex_buffer_strides[stream as usize] as usize);
|
||||
vertex_stream_strides
|
||||
.push(mesh.vertex_buffer_strides[stream as usize] as usize);
|
||||
}
|
||||
|
||||
parts.push(Part {
|
||||
|
@ -808,7 +813,7 @@ impl MDL {
|
|||
submeshes,
|
||||
shapes,
|
||||
vertex_streams,
|
||||
vertex_stream_strides
|
||||
vertex_stream_strides,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1047,7 +1052,7 @@ impl MDL {
|
|||
&mut cursor,
|
||||
&MDL::pad_slice(&vert.position, 1.0),
|
||||
)
|
||||
.ok()?;
|
||||
.ok()?;
|
||||
}
|
||||
VertexType::Half4 => {
|
||||
MDL::write_half4(
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use crate::model::MDL;
|
||||
use crate::ByteSpan;
|
||||
use crate::model::MDL;
|
||||
use binrw::{BinReaderExt, BinResult, BinWriterExt};
|
||||
use half::f16;
|
||||
use std::io::Cursor;
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use crate::model::NUM_VERTICES;
|
||||
use binrw::{binrw, BinRead, BinResult, BinWrite};
|
||||
use binrw::{BinRead, BinResult, BinWrite, binrw};
|
||||
use std::io::SeekFrom;
|
||||
|
||||
/// Marker for end of stream (0xFF)
|
||||
|
|
22
src/mtrl.rs
22
src/mtrl.rs
|
@ -5,11 +5,13 @@
|
|||
|
||||
use std::io::Cursor;
|
||||
|
||||
use crate::common_file_operations::{Half1, Half2, Half3};
|
||||
use crate::ByteSpan;
|
||||
use binrw::{binread, binrw, BinRead, BinResult};
|
||||
use crate::mtrl::ColorDyeTable::{DawntrailColorDyeTable, LegacyColorDyeTable, OpaqueColorDyeTable};
|
||||
use crate::common_file_operations::{Half1, Half2, Half3};
|
||||
use crate::mtrl::ColorDyeTable::{
|
||||
DawntrailColorDyeTable, LegacyColorDyeTable, OpaqueColorDyeTable,
|
||||
};
|
||||
use crate::mtrl::ColorTable::{DawntrailColorTable, LegacyColorTable, OpaqueColorTable};
|
||||
use binrw::{BinRead, BinResult, binread, binrw};
|
||||
|
||||
#[binrw]
|
||||
#[derive(Debug)]
|
||||
|
@ -177,7 +179,7 @@ pub struct OpaqueColorTableData {
|
|||
pub enum ColorTable {
|
||||
LegacyColorTable(LegacyColorTableData),
|
||||
DawntrailColorTable(DawntrailColorTableData),
|
||||
OpaqueColorTable(OpaqueColorTableData)
|
||||
OpaqueColorTable(OpaqueColorTableData),
|
||||
}
|
||||
|
||||
#[binread]
|
||||
|
@ -285,7 +287,7 @@ pub struct OpaqueColorDyeTableData {
|
|||
pub enum ColorDyeTable {
|
||||
LegacyColorDyeTable(LegacyColorDyeTableData),
|
||||
DawntrailColorDyeTable(DawntrailColorDyeTableData),
|
||||
OpaqueColorDyeTable(OpaqueColorDyeTableData)
|
||||
OpaqueColorDyeTable(OpaqueColorDyeTableData),
|
||||
}
|
||||
|
||||
#[binrw]
|
||||
|
@ -386,7 +388,7 @@ fn parse_color_table(table_dimension_logs: u8) -> BinResult<Option<ColorTable>>
|
|||
Ok(Some(match table_dimension_logs {
|
||||
0 | 0x42 => LegacyColorTable(LegacyColorTableData::read_options(reader, endian, ())?),
|
||||
0x53 => DawntrailColorTable(DawntrailColorTableData::read_options(reader, endian, ())?),
|
||||
_ => OpaqueColorTable(OpaqueColorTableData::read_options(reader, endian, ())?)
|
||||
_ => OpaqueColorTable(OpaqueColorTableData::read_options(reader, endian, ())?),
|
||||
}))
|
||||
}
|
||||
|
||||
|
@ -394,8 +396,12 @@ fn parse_color_table(table_dimension_logs: u8) -> BinResult<Option<ColorTable>>
|
|||
fn parse_color_dye_table(table_dimension_logs: u8) -> BinResult<Option<ColorDyeTable>> {
|
||||
Ok(Some(match table_dimension_logs {
|
||||
0 => LegacyColorDyeTable(LegacyColorDyeTableData::read_options(reader, endian, ())?),
|
||||
0x50...0x5F => DawntrailColorDyeTable(DawntrailColorDyeTableData::read_options(reader, endian, ())?),
|
||||
_ => OpaqueColorDyeTable(OpaqueColorDyeTableData::read_options(reader, endian, ())?)
|
||||
0x50...0x5F => DawntrailColorDyeTable(DawntrailColorDyeTableData::read_options(
|
||||
reader,
|
||||
endian,
|
||||
(),
|
||||
)?),
|
||||
_ => OpaqueColorDyeTable(OpaqueColorDyeTableData::read_options(reader, endian, ())?),
|
||||
}))
|
||||
}
|
||||
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
use std::io::Cursor;
|
||||
|
||||
use crate::ByteSpan;
|
||||
use binrw::binrw;
|
||||
use binrw::BinRead;
|
||||
use binrw::binrw;
|
||||
|
||||
#[binrw]
|
||||
#[derive(Debug)]
|
||||
|
|
89
src/patch.rs
89
src/patch.rs
|
@ -7,13 +7,15 @@ use std::fs::{File, OpenOptions, read, read_dir};
|
|||
use std::io::{BufWriter, Cursor, Seek, SeekFrom, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use binrw::{binrw, BinWrite};
|
||||
use binrw::BinRead;
|
||||
use tracing::{debug, warn};
|
||||
use crate::ByteBuffer;
|
||||
use binrw::BinRead;
|
||||
use binrw::{BinWrite, binrw};
|
||||
use tracing::{debug, warn};
|
||||
|
||||
use crate::common::{get_platform_string, Platform, Region};
|
||||
use crate::common_file_operations::{get_string_len, read_bool_from, read_string, write_bool_as, write_string};
|
||||
use crate::common::{Platform, Region, get_platform_string};
|
||||
use crate::common_file_operations::{
|
||||
get_string_len, read_bool_from, read_string, write_bool_as, write_string,
|
||||
};
|
||||
use crate::sqpack::{read_data_block_patch, write_data_block_patch};
|
||||
|
||||
#[binrw]
|
||||
|
@ -460,8 +462,8 @@ impl ZiPatch {
|
|||
&get_expansion_folder_sub(sub_id),
|
||||
&filename,
|
||||
]
|
||||
.iter()
|
||||
.collect();
|
||||
.iter()
|
||||
.collect();
|
||||
|
||||
path.to_str().unwrap().to_string()
|
||||
};
|
||||
|
@ -486,8 +488,8 @@ impl ZiPatch {
|
|||
&get_expansion_folder_sub(sub_id),
|
||||
&filename,
|
||||
]
|
||||
.iter()
|
||||
.collect();
|
||||
.iter()
|
||||
.collect();
|
||||
|
||||
path.to_str().unwrap().to_string()
|
||||
};
|
||||
|
@ -580,7 +582,8 @@ impl ZiPatch {
|
|||
),
|
||||
};
|
||||
|
||||
let (left, _) = file_path.rsplit_once('/').ok_or(PatchError::ParseError)?;
|
||||
let (left, _) =
|
||||
file_path.rsplit_once('/').ok_or(PatchError::ParseError)?;
|
||||
fs::create_dir_all(left)?;
|
||||
|
||||
let mut new_file = OpenOptions::new()
|
||||
|
@ -606,7 +609,8 @@ impl ZiPatch {
|
|||
// reverse reading crc32
|
||||
file.seek(SeekFrom::Current(-4))?;
|
||||
|
||||
let mut data: Vec<u8> = Vec::with_capacity(fop.file_size as usize);
|
||||
let mut data: Vec<u8> =
|
||||
Vec::with_capacity(fop.file_size as usize);
|
||||
|
||||
while data.len() < fop.file_size as usize {
|
||||
data.append(&mut read_data_block_patch(&mut file).unwrap());
|
||||
|
@ -639,10 +643,13 @@ impl ZiPatch {
|
|||
}
|
||||
}
|
||||
SqpkFileOperation::RemoveAll => {
|
||||
let path: PathBuf =
|
||||
[data_dir, "sqpack", &get_expansion_folder(fop.expansion_id)]
|
||||
.iter()
|
||||
.collect();
|
||||
let path: PathBuf = [
|
||||
data_dir,
|
||||
"sqpack",
|
||||
&get_expansion_folder(fop.expansion_id),
|
||||
]
|
||||
.iter()
|
||||
.collect();
|
||||
|
||||
if fs::read_dir(&path).is_ok() {
|
||||
fs::remove_dir_all(&path)?;
|
||||
|
@ -702,18 +709,29 @@ impl ZiPatch {
|
|||
let new_files = crate::patch::recurse(new_directory);
|
||||
|
||||
// A set of files not present in base, but in new (aka added files)
|
||||
let added_files: Vec<&PathBuf> = new_files.iter().filter(|item| {
|
||||
let metadata = fs::metadata(item).unwrap();
|
||||
!base_files.contains(item) && metadata.len() > 0 // TODO: we filter out zero byte files here, but does SqEx do that?
|
||||
}).collect();
|
||||
let added_files: Vec<&PathBuf> = new_files
|
||||
.iter()
|
||||
.filter(|item| {
|
||||
let metadata = fs::metadata(item).unwrap();
|
||||
!base_files.contains(item) && metadata.len() > 0 // TODO: we filter out zero byte files here, but does SqEx do that?
|
||||
})
|
||||
.collect();
|
||||
|
||||
// A set of files not present in the new directory, that used to be in base (aka removedf iles)
|
||||
let removed_files: Vec<&PathBuf> = base_files.iter().filter(|item| !new_files.contains(item)).collect();
|
||||
let removed_files: Vec<&PathBuf> = base_files
|
||||
.iter()
|
||||
.filter(|item| !new_files.contains(item))
|
||||
.collect();
|
||||
|
||||
// Process added files
|
||||
for file in added_files {
|
||||
let file_data = read(file.to_str().unwrap()).unwrap();
|
||||
let relative_path = file.strip_prefix(new_directory).unwrap().to_str().unwrap().to_string();
|
||||
let relative_path = file
|
||||
.strip_prefix(new_directory)
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string();
|
||||
|
||||
let add_file_chunk = PatchChunk {
|
||||
size: 0,
|
||||
|
@ -744,7 +762,12 @@ impl ZiPatch {
|
|||
|
||||
// Process deleted files
|
||||
for file in removed_files {
|
||||
let relative_path = file.strip_prefix(base_directory).unwrap().to_str().unwrap().to_string();
|
||||
let relative_path = file
|
||||
.strip_prefix(base_directory)
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string();
|
||||
|
||||
let remove_file_chunk = PatchChunk {
|
||||
size: 0,
|
||||
|
@ -810,7 +833,9 @@ mod tests {
|
|||
write(data_dir.clone() + "/test.patch", &read(d).unwrap()).unwrap();
|
||||
|
||||
// Feeding it invalid data should not panic
|
||||
let Err(PatchError::ParseError) = ZiPatch::apply(&data_dir.clone(), &(data_dir + "/test.patch")) else {
|
||||
let Err(PatchError::ParseError) =
|
||||
ZiPatch::apply(&data_dir.clone(), &(data_dir + "/test.patch"))
|
||||
else {
|
||||
panic!("Expecting a parse error!");
|
||||
};
|
||||
}
|
||||
|
@ -838,13 +863,19 @@ mod tests {
|
|||
let old_files = recurse(&resources_dir);
|
||||
let new_files = recurse(&data_dir);
|
||||
|
||||
let mut old_relative_files: Vec<&Path> = old_files.iter().filter(|item| {
|
||||
let metadata = fs::metadata(item).unwrap();
|
||||
metadata.len() > 0 // filter out zero byte files because ZiPatch::create does
|
||||
}).map(|x| x.strip_prefix(&resources_dir).unwrap()).collect();
|
||||
let mut new_relative_files: Vec<&Path> = new_files.iter().map(|x| x.strip_prefix(&data_dir).unwrap()).collect();
|
||||
let mut old_relative_files: Vec<&Path> = old_files
|
||||
.iter()
|
||||
.filter(|item| {
|
||||
let metadata = fs::metadata(item).unwrap();
|
||||
metadata.len() > 0 // filter out zero byte files because ZiPatch::create does
|
||||
})
|
||||
.map(|x| x.strip_prefix(&resources_dir).unwrap())
|
||||
.collect();
|
||||
let mut new_relative_files: Vec<&Path> = new_files
|
||||
.iter()
|
||||
.map(|x| x.strip_prefix(&data_dir).unwrap())
|
||||
.collect();
|
||||
|
||||
assert_eq!(old_relative_files.sort(), new_relative_files.sort());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
125
src/patchlist.rs
125
src/patchlist.rs
|
@ -34,7 +34,7 @@ pub struct PatchList {
|
|||
/// The version that was requested from the server.
|
||||
pub requested_version: String,
|
||||
/// The list of patches.
|
||||
pub patches: Vec<PatchEntry>
|
||||
pub patches: Vec<PatchEntry>,
|
||||
}
|
||||
|
||||
/// The kind of patch list.
|
||||
|
@ -45,7 +45,7 @@ pub enum PatchListType {
|
|||
/// A boot patch list.
|
||||
Boot,
|
||||
/// A game patch ist.
|
||||
Game
|
||||
Game,
|
||||
}
|
||||
|
||||
impl PatchList {
|
||||
|
@ -56,7 +56,8 @@ impl PatchList {
|
|||
if let Some(patch_length_index) = encoded.find("X-Patch-Length: ") {
|
||||
let rest_of_string = &encoded[patch_length_index..];
|
||||
if let Some(end_of_number_index) = rest_of_string.find("\r\n") {
|
||||
let patch_length_parse: Result<u64, _> = rest_of_string[0..end_of_number_index].parse();
|
||||
let patch_length_parse: Result<u64, _> =
|
||||
rest_of_string[0..end_of_number_index].parse();
|
||||
if let Ok(p) = patch_length_parse {
|
||||
patch_length = p;
|
||||
}
|
||||
|
@ -187,7 +188,10 @@ mod tests {
|
|||
let patch_list = PatchList::from_string(PatchListType::Boot, test_case);
|
||||
assert_eq!(patch_list.patches.len(), 1);
|
||||
assert_eq!(patch_list.patches[0].version, "2023.09.14.0000.0001");
|
||||
assert_eq!(patch_list.patches[0].url, "http://patch-dl.ffxiv.com/boot/2b5cbc63/D2023.09.14.0000.0001.patch");
|
||||
assert_eq!(
|
||||
patch_list.patches[0].url,
|
||||
"http://patch-dl.ffxiv.com/boot/2b5cbc63/D2023.09.14.0000.0001.patch"
|
||||
);
|
||||
assert_eq!(patch_list.patches[0].size_on_disk, 69674819);
|
||||
}
|
||||
|
||||
|
@ -251,7 +255,10 @@ mod tests {
|
|||
let patch_list = PatchList::from_string(PatchListType::Game, test_case);
|
||||
assert_eq!(patch_list.patches.len(), 19);
|
||||
assert_eq!(patch_list.patches[5].version, "2023.07.26.0000.0001");
|
||||
assert_eq!(patch_list.patches[5].url, "http://patch-dl.ffxiv.com/game/ex1/6b936f08/D2023.07.26.0000.0001.patch");
|
||||
assert_eq!(
|
||||
patch_list.patches[5].url,
|
||||
"http://patch-dl.ffxiv.com/game/ex1/6b936f08/D2023.07.26.0000.0001.patch"
|
||||
);
|
||||
assert_eq!(patch_list.patches[5].size_on_disk, 5854598228);
|
||||
}
|
||||
|
||||
|
@ -266,18 +273,17 @@ mod tests {
|
|||
id: "477D80B1_38BC_41d4_8B48_5273ADB89CAC".to_string(),
|
||||
requested_version: "D2023.04.28.0000.0001".to_string(),
|
||||
content_location: "ffxivpatch/2b5cbc63/metainfo/D2023.04.28.0000.0001.http".to_string(),
|
||||
patches: vec![
|
||||
PatchEntry {
|
||||
url: "http://patch-dl.ffxiv.com/boot/2b5cbc63/D2023.09.14.0000.0001.patch".to_string(),
|
||||
version: "2023.09.14.0000.0001".to_string(),
|
||||
hash_block_size: 0,
|
||||
length: 22221335,
|
||||
size_on_disk: 69674819,
|
||||
hashes: vec![],
|
||||
unknown_a: 19,
|
||||
unknown_b: 18
|
||||
}
|
||||
],
|
||||
patches: vec![PatchEntry {
|
||||
url: "http://patch-dl.ffxiv.com/boot/2b5cbc63/D2023.09.14.0000.0001.patch"
|
||||
.to_string(),
|
||||
version: "2023.09.14.0000.0001".to_string(),
|
||||
hash_block_size: 0,
|
||||
length: 22221335,
|
||||
size_on_disk: 69674819,
|
||||
hashes: vec![],
|
||||
unknown_a: 19,
|
||||
unknown_b: 18,
|
||||
}],
|
||||
patch_length: 22221335,
|
||||
};
|
||||
|
||||
|
@ -305,49 +311,48 @@ mod tests {
|
|||
id: "477D80B1_38BC_41d4_8B48_5273ADB89CAC".to_string(),
|
||||
requested_version: "2023.07.26.0000.0000".to_string(),
|
||||
content_location: "ffxivpatch/4e9a232b/metainfo/2023.07.26.0000.0000.http".to_string(),
|
||||
patches: vec![
|
||||
PatchEntry {
|
||||
url: "http://patch-dl.ffxiv.com/game/4e9a232b/D2023.09.15.0000.0000.patch".to_string(),
|
||||
version: "2023.09.15.0000.0000".to_string(),
|
||||
hash_block_size: 50000000,
|
||||
length: 1479062470,
|
||||
size_on_disk: 44145529682,
|
||||
unknown_a: 71,
|
||||
unknown_b: 11,
|
||||
hashes: vec![
|
||||
"1c66becde2a8cf26a99d0fc7c06f15f8bab2d87c".to_string(),
|
||||
"950725418366c965d824228bf20f0496f81e0b9a".to_string(),
|
||||
"cabef48f7bf00fbf18b72843bdae2f61582ad264".to_string(),
|
||||
"53608de567b52f5fdb43fdb8b623156317e26704".to_string(),
|
||||
"f0bc06cabf9ff6490f36114b25f62619d594dbe8".to_string(),
|
||||
"3c5e4b962cd8445bd9ee29011ecdb331d108abd8".to_string(),
|
||||
"88e1a2a322f09de3dc28173d4130a2829950d4e0".to_string(),
|
||||
"1040667917dc99b9215dfccff0e458c2e8a724a8".to_string(),
|
||||
"149c7e20e9e3e376377a130e0526b35fd7f43df2".to_string(),
|
||||
"1bb4e33807355cdf46af93ce828b6e145a9a8795".to_string(),
|
||||
"a79daff43db488f087da8e22bb4c21fd3a390f3c".to_string(),
|
||||
"6b04fadb656d467fb8318eba1c7f5ee8f030d967".to_string(),
|
||||
"a6641e1c894db961a49b70fda2b0d6d87be487a7".to_string(),
|
||||
"edf419de49f42ef19bd6814f8184b35a25e9e977".to_string(),
|
||||
"c1525c4df6001b66b575e2891db0284dc3a16566".to_string(),
|
||||
"01b7628095b07fa3c9c1aed2d66d32d118020321".to_string(),
|
||||
"991b137ea0ebb11bd668f82149bc2392a4cbcf52".to_string(),
|
||||
"ad3f74d4fca143a6cf507fc859544a4bcd501d85".to_string(),
|
||||
"936a0f1711e273519cae6b2da0d8b435fe6aa020".to_string(),
|
||||
"023f19d8d8b3ecaaf865e3170e8243dd437a384c".to_string(),
|
||||
"2d9e934de152956961a849e81912ca8d848265ca".to_string(),
|
||||
"8e32f9aa76c95c60a9dbe0967aee5792b812d5ec".to_string(),
|
||||
"dee052b9aa1cc8863efd61afc63ac3c2d56f9acc".to_string(),
|
||||
"fa81225aea53fa13a9bae1e8e02dea07de6d7052".to_string(),
|
||||
"59b24693b1b62ea1660bc6f96a61f7d41b3f7878".to_string(),
|
||||
"349b691db1853f6c0120a8e66093c763ba6e3671".to_string(),
|
||||
"4561eb6f954d80cdb1ece3cc4d58cbd864bf2b50".to_string(),
|
||||
"de94175c4db39a11d5334aefc7a99434eea8e4f9".to_string(),
|
||||
"55dd7215f24441d6e47d1f9b32cebdb041f2157f".to_string(),
|
||||
"2ca09db645cfeefa41a04251dfcb13587418347a".to_string()
|
||||
],
|
||||
}
|
||||
],
|
||||
patches: vec![PatchEntry {
|
||||
url: "http://patch-dl.ffxiv.com/game/4e9a232b/D2023.09.15.0000.0000.patch"
|
||||
.to_string(),
|
||||
version: "2023.09.15.0000.0000".to_string(),
|
||||
hash_block_size: 50000000,
|
||||
length: 1479062470,
|
||||
size_on_disk: 44145529682,
|
||||
unknown_a: 71,
|
||||
unknown_b: 11,
|
||||
hashes: vec![
|
||||
"1c66becde2a8cf26a99d0fc7c06f15f8bab2d87c".to_string(),
|
||||
"950725418366c965d824228bf20f0496f81e0b9a".to_string(),
|
||||
"cabef48f7bf00fbf18b72843bdae2f61582ad264".to_string(),
|
||||
"53608de567b52f5fdb43fdb8b623156317e26704".to_string(),
|
||||
"f0bc06cabf9ff6490f36114b25f62619d594dbe8".to_string(),
|
||||
"3c5e4b962cd8445bd9ee29011ecdb331d108abd8".to_string(),
|
||||
"88e1a2a322f09de3dc28173d4130a2829950d4e0".to_string(),
|
||||
"1040667917dc99b9215dfccff0e458c2e8a724a8".to_string(),
|
||||
"149c7e20e9e3e376377a130e0526b35fd7f43df2".to_string(),
|
||||
"1bb4e33807355cdf46af93ce828b6e145a9a8795".to_string(),
|
||||
"a79daff43db488f087da8e22bb4c21fd3a390f3c".to_string(),
|
||||
"6b04fadb656d467fb8318eba1c7f5ee8f030d967".to_string(),
|
||||
"a6641e1c894db961a49b70fda2b0d6d87be487a7".to_string(),
|
||||
"edf419de49f42ef19bd6814f8184b35a25e9e977".to_string(),
|
||||
"c1525c4df6001b66b575e2891db0284dc3a16566".to_string(),
|
||||
"01b7628095b07fa3c9c1aed2d66d32d118020321".to_string(),
|
||||
"991b137ea0ebb11bd668f82149bc2392a4cbcf52".to_string(),
|
||||
"ad3f74d4fca143a6cf507fc859544a4bcd501d85".to_string(),
|
||||
"936a0f1711e273519cae6b2da0d8b435fe6aa020".to_string(),
|
||||
"023f19d8d8b3ecaaf865e3170e8243dd437a384c".to_string(),
|
||||
"2d9e934de152956961a849e81912ca8d848265ca".to_string(),
|
||||
"8e32f9aa76c95c60a9dbe0967aee5792b812d5ec".to_string(),
|
||||
"dee052b9aa1cc8863efd61afc63ac3c2d56f9acc".to_string(),
|
||||
"fa81225aea53fa13a9bae1e8e02dea07de6d7052".to_string(),
|
||||
"59b24693b1b62ea1660bc6f96a61f7d41b3f7878".to_string(),
|
||||
"349b691db1853f6c0120a8e66093c763ba6e3671".to_string(),
|
||||
"4561eb6f954d80cdb1ece3cc4d58cbd864bf2b50".to_string(),
|
||||
"de94175c4db39a11d5334aefc7a99434eea8e4f9".to_string(),
|
||||
"55dd7215f24441d6e47d1f9b32cebdb041f2157f".to_string(),
|
||||
"2ca09db645cfeefa41a04251dfcb13587418347a".to_string(),
|
||||
],
|
||||
}],
|
||||
patch_length: 1479062470,
|
||||
};
|
||||
|
||||
|
|
|
@ -3,10 +3,10 @@
|
|||
|
||||
use std::io::{Cursor, SeekFrom};
|
||||
|
||||
use crate::common_file_operations::strings_parser;
|
||||
use crate::ByteSpan;
|
||||
use binrw::binread;
|
||||
use crate::common_file_operations::strings_parser;
|
||||
use binrw::BinRead;
|
||||
use binrw::binread;
|
||||
|
||||
#[binread]
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
use std::io::Cursor;
|
||||
|
||||
use crate::ByteSpan;
|
||||
use binrw::binread;
|
||||
use binrw::BinRead;
|
||||
use binrw::binread;
|
||||
|
||||
#[binread]
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -5,7 +5,7 @@ use std::cmp::Ordering;
|
|||
use std::cmp::Ordering::{Greater, Less};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::common::{get_platform_string, read_version, Platform};
|
||||
use crate::common::{Platform, get_platform_string, read_version};
|
||||
use crate::repository::RepositoryType::{Base, Expansion};
|
||||
|
||||
/// The type of repository, discerning game data from expansion data.
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
use std::io::Cursor;
|
||||
|
||||
use crate::ByteSpan;
|
||||
use binrw::binrw;
|
||||
use binrw::BinRead;
|
||||
use binrw::binrw;
|
||||
|
||||
#[binrw]
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
use std::io::Cursor;
|
||||
|
||||
use crate::ByteSpan;
|
||||
use binrw::binread;
|
||||
use binrw::BinRead;
|
||||
use binrw::binread;
|
||||
|
||||
#[binread]
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
use std::io::Cursor;
|
||||
|
||||
use crate::ByteSpan;
|
||||
use binrw::binrw;
|
||||
use binrw::BinRead;
|
||||
use binrw::binrw;
|
||||
|
||||
#[binrw]
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -3,9 +3,9 @@
|
|||
|
||||
use std::io::{Cursor, SeekFrom};
|
||||
|
||||
use crate::crc::XivCrc32;
|
||||
use crate::ByteSpan;
|
||||
use binrw::{binread, BinRead};
|
||||
use crate::crc::XivCrc32;
|
||||
use binrw::{BinRead, binread};
|
||||
|
||||
#[binread]
|
||||
#[br(little, import {
|
||||
|
|
|
@ -6,11 +6,11 @@
|
|||
#![allow(clippy::upper_case_acronyms)]
|
||||
|
||||
use binrw::helpers::until_eof;
|
||||
use binrw::{binread, BinRead};
|
||||
use binrw::{BinRead, binread};
|
||||
use std::io::{Cursor, SeekFrom};
|
||||
|
||||
use crate::havok::{HavokAnimationContainer, HavokBinaryTagFileReader};
|
||||
use crate::ByteSpan;
|
||||
use crate::havok::{HavokAnimationContainer, HavokBinaryTagFileReader};
|
||||
|
||||
#[binread]
|
||||
#[br(little)]
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
use std::io::Cursor;
|
||||
|
||||
use crate::ByteSpan;
|
||||
use binrw::binrw;
|
||||
use binrw::BinRead;
|
||||
use binrw::binrw;
|
||||
|
||||
#[binrw]
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -90,4 +90,3 @@ pub fn write_data_block_patch<T: Write + Seek>(mut writer: T, data: Vec<u8>) {
|
|||
|
||||
data.write(&mut writer).unwrap();
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ use std::io::{Cursor, Seek, SeekFrom};
|
|||
|
||||
use crate::ByteSpan;
|
||||
use binrw::BinRead;
|
||||
use binrw::{binrw, BinReaderExt};
|
||||
use binrw::{BinReaderExt, binrw};
|
||||
|
||||
/// Maximum number of elements in one row
|
||||
const MAX_ELEMENTS: usize = 128;
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
use std::io::Cursor;
|
||||
|
||||
use crate::ByteSpan;
|
||||
use binrw::binrw;
|
||||
use binrw::BinRead;
|
||||
use binrw::binrw;
|
||||
|
||||
#[binrw]
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
|
|
|
@ -6,8 +6,8 @@
|
|||
use std::io::{Cursor, Read, Seek, SeekFrom};
|
||||
|
||||
use crate::ByteSpan;
|
||||
use binrw::binrw;
|
||||
use binrw::BinRead;
|
||||
use binrw::binrw;
|
||||
use bitflags::bitflags;
|
||||
use texture2ddecoder::{decode_bc1, decode_bc3, decode_bc5};
|
||||
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
use std::io::Cursor;
|
||||
|
||||
use crate::ByteSpan;
|
||||
use binrw::binrw;
|
||||
use binrw::BinRead;
|
||||
use binrw::binrw;
|
||||
|
||||
#[binrw]
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
use std::io::Cursor;
|
||||
|
||||
use crate::ByteSpan;
|
||||
use binrw::binrw;
|
||||
use binrw::BinRead;
|
||||
use binrw::binrw;
|
||||
|
||||
#[binrw]
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -3,22 +3,23 @@
|
|||
|
||||
use hmac_sha512::Hash;
|
||||
use std::env;
|
||||
use std::io::Write;
|
||||
use std::fs::{read, read_dir};
|
||||
use std::io::Write;
|
||||
use std::process::Command;
|
||||
|
||||
use physis::common::Platform;
|
||||
use physis::fiin::FileInfo;
|
||||
use physis::index;
|
||||
use physis::patch::ZiPatch;
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use physis::patch::ZiPatch;
|
||||
|
||||
#[cfg(feature = "patch_testing")]
|
||||
fn make_temp_install_dir(name: &str) -> String {
|
||||
use physis::installer::install_game;
|
||||
|
||||
let installer_exe = env::var("FFXIV_INSTALLER").expect("$FFXIV_INSTALLER needs to point to the retail installer");
|
||||
let installer_exe = env::var("FFXIV_INSTALLER")
|
||||
.expect("$FFXIV_INSTALLER needs to point to the retail installer");
|
||||
|
||||
let mut game_dir = env::home_dir().unwrap();
|
||||
game_dir.push(name);
|
||||
|
@ -92,7 +93,8 @@ fn physis_install_patch(game_directory: &str, data_directory: &str, patch_name:
|
|||
#[cfg(feature = "patch_testing")]
|
||||
fn xivlauncher_install_patch(game_directory: &str, data_directory: &str, patch_name: &str) {
|
||||
let patch_dir = env::var("FFXIV_PATCH_DIR").unwrap();
|
||||
let patcher_exe = env::var("FFXIV_XIV_LAUNCHER_PATCHER").expect("$FFXIV_XIV_LAUNCHER_PATCHER must point to XIVLauncher.PatchInstaller.exe");
|
||||
let patcher_exe = env::var("FFXIV_XIV_LAUNCHER_PATCHER")
|
||||
.expect("$FFXIV_XIV_LAUNCHER_PATCHER must point to XIVLauncher.PatchInstaller.exe");
|
||||
|
||||
let patch_path = format!("Z:\\{}\\{}", patch_dir, &patch_name);
|
||||
let game_dir = format!("Z:\\{}\\{}", game_directory, data_directory);
|
||||
|
@ -110,7 +112,7 @@ fn xivlauncher_install_patch(game_directory: &str, data_directory: &str, patch_n
|
|||
std::io::stderr().write_all(&output.stderr).unwrap();
|
||||
}
|
||||
|
||||
assert!(output. status.success());
|
||||
assert!(output.status.success());
|
||||
}
|
||||
|
||||
#[cfg(feature = "patch_testing")]
|
||||
|
@ -147,13 +149,14 @@ fn test_patching() {
|
|||
"boot/2024.03.07.0000.0001.patch",
|
||||
"boot/2024.03.21.0000.0001.patch",
|
||||
"boot/2024.04.09.0000.0001.patch",
|
||||
"boot/2024.05.24.0000.0001.patch"
|
||||
"boot/2024.05.24.0000.0001.patch",
|
||||
];
|
||||
|
||||
println!("Now beginning boot patching...");
|
||||
|
||||
for patch in boot_patches {
|
||||
let patch_dir = env::var("FFXIV_PATCH_DIR").expect("$FFXIV_PATCH_DIR must point to the directory where the patches are stored");
|
||||
let patch_dir = env::var("FFXIV_PATCH_DIR")
|
||||
.expect("$FFXIV_PATCH_DIR must point to the directory where the patches are stored");
|
||||
if !Path::new(&(patch_dir + "/" + patch)).exists() {
|
||||
println!("Skipping {} because it doesn't exist locally.", patch);
|
||||
continue;
|
||||
|
|
Loading…
Add table
Reference in a new issue