2023-08-06 08:25:04 -04:00
|
|
|
// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
|
|
|
|
// SPDX-License-Identifier: GPL-3.0-or-later
|
|
|
|
|
2023-10-13 16:58:08 -04:00
|
|
|
use std::collections::HashMap;
|
2023-08-06 08:25:04 -04:00
|
|
|
use std::fs;
|
|
|
|
use std::fs::{DirEntry, ReadDir};
|
|
|
|
use std::path::PathBuf;
|
|
|
|
|
2023-11-10 17:25:53 -05:00
|
|
|
use tracing::{debug, warn};
|
2023-08-06 08:25:04 -04:00
|
|
|
|
2024-04-20 13:18:03 -04:00
|
|
|
use crate::common::{read_version, Language, Platform};
|
2022-07-19 19:29:41 -04:00
|
|
|
use crate::dat::DatFile;
|
2022-07-21 19:58:58 -04:00
|
|
|
use crate::exd::EXD;
|
|
|
|
use crate::exh::EXH;
|
|
|
|
use crate::exl::EXL;
|
2024-05-04 14:20:45 -04:00
|
|
|
use crate::index::{Index2File, IndexEntry, IndexFile};
|
2022-08-09 21:51:52 -04:00
|
|
|
use crate::patch::{apply_patch, PatchError};
|
2024-04-20 13:18:03 -04:00
|
|
|
use crate::repository::{string_to_category, Category, Repository};
|
|
|
|
use crate::ByteBuffer;
|
2022-07-19 19:29:41 -04:00
|
|
|
|
|
|
|
/// Framework for operating on game data.
|
|
|
|
pub struct GameData {
|
|
|
|
/// The game directory to operate on.
|
|
|
|
pub game_directory: String,
|
|
|
|
|
|
|
|
/// Repositories in the game directory.
|
|
|
|
pub repositories: Vec<Repository>,
|
2023-10-13 16:58:08 -04:00
|
|
|
|
2024-04-14 11:24:18 -04:00
|
|
|
index_files: HashMap<String, IndexFile>,
|
2024-04-20 13:18:03 -04:00
|
|
|
index2_files: HashMap<String, Index2File>,
|
2022-07-19 19:29:41 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
fn is_valid(path: &str) -> bool {
|
2022-07-27 21:21:50 -04:00
|
|
|
let d = PathBuf::from(path);
|
2022-07-19 19:29:41 -04:00
|
|
|
|
|
|
|
if fs::metadata(d.as_path()).is_err() {
|
2023-11-10 17:25:53 -05:00
|
|
|
warn!("Game directory not found.");
|
2022-07-19 19:29:41 -04:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
true
|
|
|
|
}
|
|
|
|
|
2023-12-02 20:12:12 -05:00
|
|
|
/// Possible actions to repair game files
|
2022-10-25 11:03:05 -04:00
|
|
|
#[derive(Debug)]
|
|
|
|
pub enum RepairAction {
|
2023-12-02 20:12:12 -05:00
|
|
|
/// Indicates a version file is missing for a repository
|
2022-10-25 11:03:05 -04:00
|
|
|
VersionFileMissing,
|
2023-12-02 20:12:12 -05:00
|
|
|
/// The version file is missing, but it can be restored via a backup
|
2022-10-25 13:02:06 -04:00
|
|
|
VersionFileCanRestore,
|
2022-10-25 11:03:05 -04:00
|
|
|
}
|
|
|
|
|
2023-03-31 17:30:08 -04:00
|
|
|
#[derive(Debug)]
|
2023-12-02 20:12:12 -05:00
|
|
|
/// Possible errors emitted through the repair process
|
2022-10-25 11:03:05 -04:00
|
|
|
pub enum RepairError<'a> {
|
2023-12-02 20:12:12 -05:00
|
|
|
/// Failed to repair a repository
|
2022-10-25 13:02:06 -04:00
|
|
|
FailedRepair(&'a Repository),
|
2022-10-25 11:03:05 -04:00
|
|
|
}
|
|
|
|
|
2022-07-19 19:29:41 -04:00
|
|
|
impl GameData {
|
|
|
|
/// Read game data from an existing game installation.
|
|
|
|
///
|
|
|
|
/// This will return _None_ if the game directory is not valid, but it does not check the validity
|
|
|
|
/// of each individual file.
|
|
|
|
///
|
|
|
|
/// # Example
|
|
|
|
///
|
|
|
|
/// ```
|
2024-04-15 19:40:34 -04:00
|
|
|
/// # use physis::common::Platform;
|
|
|
|
/// use physis::gamedata::GameData;
|
|
|
|
/// GameData::from_existing(Platform::Win32, "$FFXIV/game");
|
2022-07-19 19:29:41 -04:00
|
|
|
/// ```
|
2024-04-15 19:40:34 -04:00
|
|
|
pub fn from_existing(platform: Platform, directory: &str) -> Option<GameData> {
|
2023-04-06 14:51:40 -04:00
|
|
|
debug!(directory, "Loading game directory");
|
|
|
|
|
2022-07-19 19:29:41 -04:00
|
|
|
match is_valid(directory) {
|
2024-04-15 19:40:34 -04:00
|
|
|
true => {
|
|
|
|
let mut data = Self {
|
|
|
|
game_directory: String::from(directory),
|
|
|
|
repositories: vec![],
|
|
|
|
index_files: HashMap::new(),
|
2024-04-20 13:18:03 -04:00
|
|
|
index2_files: HashMap::new(),
|
2024-04-15 19:40:34 -04:00
|
|
|
};
|
|
|
|
data.reload_repositories(platform);
|
|
|
|
Some(data)
|
|
|
|
}
|
2022-07-19 19:29:41 -04:00
|
|
|
false => {
|
2023-11-10 17:25:53 -05:00
|
|
|
warn!("Game data is not valid!");
|
2022-07-19 19:29:41 -04:00
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2024-04-20 13:18:03 -04:00
|
|
|
|
2024-04-15 19:40:34 -04:00
|
|
|
fn reload_repositories(&mut self, platform: Platform) {
|
2022-07-19 19:29:41 -04:00
|
|
|
self.repositories.clear();
|
|
|
|
|
|
|
|
let mut d = PathBuf::from(self.game_directory.as_str());
|
2023-05-13 17:30:15 -04:00
|
|
|
|
|
|
|
// add initial ffxiv directory
|
2024-04-20 13:18:03 -04:00
|
|
|
if let Some(base_repository) =
|
|
|
|
Repository::from_existing_base(platform.clone(), d.to_str().unwrap())
|
|
|
|
{
|
2023-05-13 17:30:15 -04:00
|
|
|
self.repositories.push(base_repository);
|
|
|
|
}
|
|
|
|
|
|
|
|
// add expansions
|
2022-07-19 19:29:41 -04:00
|
|
|
d.push("sqpack");
|
|
|
|
|
2023-05-13 17:30:15 -04:00
|
|
|
if let Ok(repository_paths) = fs::read_dir(d.as_path()) {
|
2024-04-20 13:18:03 -04:00
|
|
|
let repository_paths: ReadDir = repository_paths;
|
2022-07-19 19:29:41 -04:00
|
|
|
|
2024-04-20 13:18:03 -04:00
|
|
|
let repository_paths: Vec<DirEntry> = repository_paths
|
2023-05-13 17:30:15 -04:00
|
|
|
.filter_map(Result::ok)
|
|
|
|
.filter(|s| s.file_type().unwrap().is_dir())
|
|
|
|
.collect();
|
2023-04-06 14:51:40 -04:00
|
|
|
|
2023-05-13 17:30:15 -04:00
|
|
|
for repository_path in repository_paths {
|
2024-04-20 13:18:03 -04:00
|
|
|
if let Some(expansion_repository) = Repository::from_existing_expansion(
|
|
|
|
platform.clone(),
|
|
|
|
repository_path.path().to_str().unwrap(),
|
|
|
|
) {
|
2023-05-13 17:30:15 -04:00
|
|
|
self.repositories.push(expansion_repository);
|
|
|
|
}
|
|
|
|
}
|
2022-07-19 19:29:41 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
self.repositories.sort();
|
|
|
|
}
|
|
|
|
|
2024-05-04 14:20:45 -04:00
|
|
|
fn get_dat_file(&self, path: &str, chunk: u8, data_file_id: u32) -> Option<DatFile> {
|
2022-08-09 20:03:18 -04:00
|
|
|
let (repository, category) = self.parse_repository_category(path).unwrap();
|
|
|
|
|
2022-08-16 11:52:07 -04:00
|
|
|
let dat_path: PathBuf = [
|
|
|
|
self.game_directory.clone(),
|
2022-08-09 20:03:18 -04:00
|
|
|
"sqpack".to_string(),
|
|
|
|
repository.name.clone(),
|
2024-05-04 14:20:45 -04:00
|
|
|
repository.dat_filename(chunk, category, data_file_id),
|
2022-08-16 11:52:07 -04:00
|
|
|
]
|
|
|
|
.iter()
|
|
|
|
.collect();
|
2022-08-09 20:03:18 -04:00
|
|
|
|
|
|
|
DatFile::from_existing(dat_path.to_str()?)
|
|
|
|
}
|
|
|
|
|
2022-07-19 19:29:41 -04:00
|
|
|
/// Checks if a file located at `path` exists.
|
|
|
|
///
|
|
|
|
/// # Example
|
|
|
|
///
|
|
|
|
/// ```should_panic
|
2024-04-15 19:40:34 -04:00
|
|
|
/// # use physis::common::Platform;
|
|
|
|
/// use physis::gamedata::GameData;
|
|
|
|
/// # let mut game = GameData::from_existing(Platform::Win32, "SquareEnix/Final Fantasy XIV - A Realm Reborn/game").unwrap();
|
2022-07-19 19:29:41 -04:00
|
|
|
/// if game.exists("exd/cid.exl") {
|
|
|
|
/// println!("Cid really does exist!");
|
|
|
|
/// } else {
|
|
|
|
/// println!("Oh noes!");
|
|
|
|
/// }
|
|
|
|
/// ```
|
2023-10-13 16:58:08 -04:00
|
|
|
pub fn exists(&mut self, path: &str) -> bool {
|
2024-04-29 19:09:51 -04:00
|
|
|
let Some(index_path) = self.get_index_filenames(path) else {
|
|
|
|
return false;
|
|
|
|
};
|
2022-07-19 19:29:41 -04:00
|
|
|
|
2024-05-04 14:20:45 -04:00
|
|
|
return self.find_entry(path).is_some();
|
2022-07-19 19:29:41 -04:00
|
|
|
}
|
|
|
|
|
2022-08-06 18:05:16 -04:00
|
|
|
/// Extracts the file located at `path`. This is returned as an in-memory buffer, and will usually
|
2022-07-19 19:29:41 -04:00
|
|
|
/// have to be further parsed.
|
|
|
|
///
|
|
|
|
/// # Example
|
|
|
|
///
|
|
|
|
/// ```should_panic
|
|
|
|
/// # use physis::gamedata::GameData;
|
|
|
|
/// # use std::io::Write;
|
2024-04-15 19:40:34 -04:00
|
|
|
/// use physis::common::Platform;
|
|
|
|
/// # let mut game = GameData::from_existing(Platform::Win32, "SquareEnix/Final Fantasy XIV - A Realm Reborn/game").unwrap();
|
2022-07-19 19:29:41 -04:00
|
|
|
/// let data = game.extract("exd/root.exl").unwrap();
|
|
|
|
///
|
|
|
|
/// let mut file = std::fs::File::create("root.exl").unwrap();
|
2022-08-09 23:44:11 -04:00
|
|
|
/// file.write(data.as_slice()).unwrap();
|
2022-07-19 19:29:41 -04:00
|
|
|
/// ```
|
2023-10-13 16:58:08 -04:00
|
|
|
pub fn extract(&mut self, path: &str) -> Option<ByteBuffer> {
|
2024-04-20 13:18:03 -04:00
|
|
|
debug!(file = path, "Extracting file");
|
2023-04-06 14:51:40 -04:00
|
|
|
|
2024-04-14 11:24:18 -04:00
|
|
|
let slice = self.find_entry(path);
|
2022-07-19 19:29:41 -04:00
|
|
|
match slice {
|
2024-05-04 14:20:45 -04:00
|
|
|
Some((entry, chunk)) => {
|
|
|
|
let mut dat_file = self.get_dat_file(path, chunk, entry.data_file_id.into())?;
|
2022-07-19 19:29:41 -04:00
|
|
|
|
2024-05-04 14:20:45 -04:00
|
|
|
dat_file.read_from_offset(entry.offset as u64)
|
2022-07-19 19:29:41 -04:00
|
|
|
}
|
2022-08-16 11:52:07 -04:00
|
|
|
None => None,
|
2022-07-19 19:29:41 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Parses a path structure and spits out the corresponding category and repository.
|
|
|
|
fn parse_repository_category(&self, path: &str) -> Option<(&Repository, Category)> {
|
|
|
|
let tokens: Vec<&str> = path.split('/').collect(); // TODO: use split_once here
|
|
|
|
|
|
|
|
if tokens.len() < 2 {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
2024-05-04 14:20:45 -04:00
|
|
|
let repository_token = tokens[1];
|
|
|
|
|
2022-07-19 19:29:41 -04:00
|
|
|
for repository in &self.repositories {
|
|
|
|
if repository.name == repository_token {
|
2024-05-04 14:20:45 -04:00
|
|
|
return Some((repository, string_to_category(tokens[0])?));
|
2022-07-19 19:29:41 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Some((&self.repositories[0], string_to_category(tokens[0])?))
|
|
|
|
}
|
2022-07-21 19:58:58 -04:00
|
|
|
|
2024-05-04 14:20:45 -04:00
|
|
|
fn get_index_filenames(&self, path: &str) -> Option<(Vec<(String, u8)>, Vec<(String, u8)>)> {
|
2024-04-29 19:09:51 -04:00
|
|
|
let (repository, category) = self.parse_repository_category(path)?;
|
2023-10-13 16:58:08 -04:00
|
|
|
|
2024-05-04 14:20:45 -04:00
|
|
|
let mut index1_filenames = vec![];
|
|
|
|
let mut index2_filenames = vec![];
|
2023-10-13 16:58:08 -04:00
|
|
|
|
2024-05-04 14:20:45 -04:00
|
|
|
for chunk in 0..255 {
|
|
|
|
let index_path: PathBuf = [
|
|
|
|
&self.game_directory,
|
|
|
|
"sqpack",
|
|
|
|
&repository.name,
|
|
|
|
&repository.index_filename(chunk, category),
|
|
|
|
]
|
|
|
|
.iter()
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
index1_filenames.push((index_path.into_os_string().into_string().unwrap(), chunk));
|
|
|
|
|
|
|
|
let index2_path: PathBuf = [
|
|
|
|
&self.game_directory,
|
|
|
|
"sqpack",
|
|
|
|
&repository.name,
|
|
|
|
&repository.index2_filename(chunk, category),
|
|
|
|
]
|
|
|
|
.iter()
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
index2_filenames.push((index2_path.into_os_string().into_string().unwrap(), chunk));
|
|
|
|
}
|
2024-04-14 11:24:18 -04:00
|
|
|
|
2024-04-29 19:09:51 -04:00
|
|
|
Some((
|
2024-05-04 14:20:45 -04:00
|
|
|
index1_filenames,
|
|
|
|
index2_filenames
|
2024-04-29 19:09:51 -04:00
|
|
|
))
|
2023-10-13 16:58:08 -04:00
|
|
|
}
|
|
|
|
|
2023-12-02 20:12:12 -05:00
|
|
|
/// Read an excel sheet by name (e.g. "Achievement")
|
2023-10-13 16:58:08 -04:00
|
|
|
pub fn read_excel_sheet_header(&mut self, name: &str) -> Option<EXH> {
|
2022-08-09 22:43:04 -04:00
|
|
|
let root_exl_file = self.extract("exd/root.exl")?;
|
2022-07-21 19:58:58 -04:00
|
|
|
|
2022-08-09 22:43:04 -04:00
|
|
|
let root_exl = EXL::from_existing(&root_exl_file)?;
|
2022-07-21 19:58:58 -04:00
|
|
|
|
|
|
|
for (row, _) in root_exl.entries {
|
|
|
|
if row == name {
|
|
|
|
let new_filename = name.to_lowercase();
|
|
|
|
|
|
|
|
let path = format!("exd/{new_filename}.exh");
|
|
|
|
|
2022-08-16 11:52:07 -04:00
|
|
|
return EXH::from_existing(&self.extract(&path)?);
|
2022-07-21 19:58:58 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
None
|
|
|
|
}
|
|
|
|
|
2023-12-02 20:12:12 -05:00
|
|
|
/// Returns all known sheet names listed in the root list
|
2023-10-13 16:58:08 -04:00
|
|
|
pub fn get_all_sheet_names(&mut self) -> Option<Vec<String>> {
|
2023-04-09 15:35:42 -04:00
|
|
|
let root_exl_file = self.extract("exd/root.exl")?;
|
|
|
|
|
|
|
|
let root_exl = EXL::from_existing(&root_exl_file)?;
|
|
|
|
|
|
|
|
let mut names = vec![];
|
|
|
|
for (row, _) in root_exl.entries {
|
|
|
|
names.push(row);
|
|
|
|
}
|
|
|
|
|
|
|
|
Some(names)
|
|
|
|
}
|
|
|
|
|
2023-12-02 20:12:12 -05:00
|
|
|
/// Read an excel sheet
|
2022-08-16 11:52:07 -04:00
|
|
|
pub fn read_excel_sheet(
|
2023-10-13 16:58:08 -04:00
|
|
|
&mut self,
|
2022-08-16 11:52:07 -04:00
|
|
|
name: &str,
|
|
|
|
exh: &EXH,
|
|
|
|
language: Language,
|
|
|
|
page: usize,
|
|
|
|
) -> Option<EXD> {
|
|
|
|
let exd_path = format!(
|
|
|
|
"exd/{}",
|
|
|
|
EXD::calculate_filename(name, language, &exh.pages[page])
|
|
|
|
);
|
2022-07-21 19:58:58 -04:00
|
|
|
|
2023-04-09 15:35:54 -04:00
|
|
|
let exd_file = self.extract(&exd_path)?;
|
2022-07-21 19:58:58 -04:00
|
|
|
|
2022-08-16 11:50:18 -04:00
|
|
|
EXD::from_existing(exh, &exd_file)
|
2022-07-21 19:58:58 -04:00
|
|
|
}
|
2022-08-09 21:51:52 -04:00
|
|
|
|
2023-12-02 20:12:12 -05:00
|
|
|
/// Applies the patch to game data and returns any errors it encounters. This function will not update the version in the GameData struct.
|
2022-08-16 11:52:07 -04:00
|
|
|
pub fn apply_patch(&self, patch_path: &str) -> Result<(), PatchError> {
|
2022-08-09 21:51:52 -04:00
|
|
|
apply_patch(&self.game_directory, patch_path)
|
|
|
|
}
|
2022-10-25 11:03:05 -04:00
|
|
|
|
|
|
|
/// Detects whether or not the game files need a repair, right now it only checks for invalid
|
|
|
|
/// version files.
|
|
|
|
/// If the repair is needed, a list of invalid repositories is given.
|
|
|
|
pub fn needs_repair(&self) -> Option<Vec<(&Repository, RepairAction)>> {
|
2022-10-25 13:02:06 -04:00
|
|
|
let mut repositories: Vec<(&Repository, RepairAction)> = Vec::new();
|
2022-10-25 11:03:05 -04:00
|
|
|
for repository in &self.repositories {
|
|
|
|
if repository.version.is_none() {
|
|
|
|
// Check to see if a .bck file is created, as we might be able to use that
|
|
|
|
let ver_bak_path: PathBuf = [
|
|
|
|
self.game_directory.clone(),
|
|
|
|
"sqpack".to_string(),
|
|
|
|
repository.name.clone(),
|
|
|
|
format!("{}.bck", repository.name),
|
|
|
|
]
|
2022-10-25 13:02:06 -04:00
|
|
|
.iter()
|
|
|
|
.collect();
|
2022-10-25 11:03:05 -04:00
|
|
|
|
|
|
|
let repair_action = if read_version(&ver_bak_path).is_some() {
|
|
|
|
RepairAction::VersionFileCanRestore
|
|
|
|
} else {
|
|
|
|
RepairAction::VersionFileMissing
|
|
|
|
};
|
|
|
|
|
|
|
|
repositories.push((repository, repair_action));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if repositories.is_empty() {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
Some(repositories)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Performs the repair, assuming any damaging effects it may have
|
|
|
|
/// Returns true only if all actions were taken are successful.
|
|
|
|
/// NOTE: This is a destructive operation, especially for InvalidVersion errors.
|
2022-10-25 13:02:06 -04:00
|
|
|
pub fn perform_repair<'a>(
|
|
|
|
&self,
|
|
|
|
repositories: &Vec<(&'a Repository, RepairAction)>,
|
|
|
|
) -> Result<(), RepairError<'a>> {
|
2022-10-25 11:03:05 -04:00
|
|
|
for (repository, action) in repositories {
|
|
|
|
let ver_path: PathBuf = [
|
|
|
|
self.game_directory.clone(),
|
|
|
|
"sqpack".to_string(),
|
|
|
|
repository.name.clone(),
|
|
|
|
format!("{}.ver", repository.name),
|
|
|
|
]
|
2022-10-25 13:02:06 -04:00
|
|
|
.iter()
|
|
|
|
.collect();
|
2022-10-25 11:03:05 -04:00
|
|
|
|
2022-10-25 13:02:06 -04:00
|
|
|
let new_version: String = match action {
|
2022-10-25 11:03:05 -04:00
|
|
|
RepairAction::VersionFileMissing => {
|
|
|
|
let repo_path: PathBuf = [
|
|
|
|
self.game_directory.clone(),
|
|
|
|
"sqpack".to_string(),
|
2022-10-25 13:02:06 -04:00
|
|
|
repository.name.clone(),
|
2022-10-25 11:03:05 -04:00
|
|
|
]
|
2022-10-25 13:02:06 -04:00
|
|
|
.iter()
|
|
|
|
.collect();
|
2022-10-25 11:03:05 -04:00
|
|
|
|
2022-12-17 08:23:19 -05:00
|
|
|
fs::remove_dir_all(&repo_path)
|
2022-10-25 13:02:06 -04:00
|
|
|
.ok()
|
2022-10-25 11:03:05 -04:00
|
|
|
.ok_or(RepairError::FailedRepair(repository))?;
|
|
|
|
|
2022-12-17 08:23:19 -05:00
|
|
|
fs::create_dir_all(&repo_path)
|
2022-10-25 13:02:06 -04:00
|
|
|
.ok()
|
2022-10-25 11:03:05 -04:00
|
|
|
.ok_or(RepairError::FailedRepair(repository))?;
|
|
|
|
|
2022-10-25 13:02:06 -04:00
|
|
|
"2012.01.01.0000.0000".to_string() // TODO: is this correct for expansions?
|
2022-10-25 11:03:05 -04:00
|
|
|
}
|
|
|
|
RepairAction::VersionFileCanRestore => {
|
|
|
|
let ver_bak_path: PathBuf = [
|
|
|
|
self.game_directory.clone(),
|
|
|
|
"sqpack".to_string(),
|
|
|
|
repository.name.clone(),
|
|
|
|
format!("{}.bck", repository.name),
|
|
|
|
]
|
2022-10-25 13:02:06 -04:00
|
|
|
.iter()
|
|
|
|
.collect();
|
2022-10-25 11:03:05 -04:00
|
|
|
|
2022-10-25 13:02:06 -04:00
|
|
|
read_version(&ver_bak_path).ok_or(RepairError::FailedRepair(repository))?
|
2022-10-25 11:03:05 -04:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2023-02-20 16:07:48 -05:00
|
|
|
fs::write(ver_path, new_version)
|
2022-10-25 13:02:06 -04:00
|
|
|
.ok()
|
2022-10-25 11:03:05 -04:00
|
|
|
.ok_or(RepairError::FailedRepair(repository))?;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
2023-10-13 16:58:08 -04:00
|
|
|
|
2024-05-04 14:20:45 -04:00
|
|
|
fn cache_index_file(&mut self, filename: &str) {
|
|
|
|
if !self.index_files.contains_key(filename) {
|
|
|
|
if let Some(index_file) = IndexFile::from_existing(filename) {
|
|
|
|
self.index_files.insert(filename.to_string(), index_file);
|
2024-04-14 11:24:18 -04:00
|
|
|
}
|
|
|
|
}
|
2024-05-04 14:20:45 -04:00
|
|
|
}
|
2024-04-14 11:24:18 -04:00
|
|
|
|
2024-05-04 14:20:45 -04:00
|
|
|
fn cache_index2_file(&mut self, filename: &str) {
|
|
|
|
if !self.index2_files.contains_key(filename) {
|
|
|
|
if let Some(index_file) = Index2File::from_existing(filename) {
|
2024-04-20 13:18:03 -04:00
|
|
|
self.index2_files
|
2024-05-04 14:20:45 -04:00
|
|
|
.insert(filename.to_string(), index_file);
|
2023-11-10 17:17:34 -05:00
|
|
|
}
|
2023-10-13 16:58:08 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn get_index_file(&self, filename: &str) -> Option<&IndexFile> {
|
|
|
|
self.index_files.get(filename)
|
|
|
|
}
|
2024-04-14 11:24:18 -04:00
|
|
|
|
|
|
|
fn get_index2_file(&self, filename: &str) -> Option<&Index2File> {
|
|
|
|
self.index2_files.get(filename)
|
|
|
|
}
|
|
|
|
|
2024-05-04 14:20:45 -04:00
|
|
|
fn find_entry(&mut self, path: &str) -> Option<(IndexEntry, u8)> {
|
|
|
|
let (index_paths, index2_paths) = self.get_index_filenames(path)?;
|
2024-04-14 11:24:18 -04:00
|
|
|
|
2024-05-04 14:20:45 -04:00
|
|
|
for (index_path, chunk) in index_paths {
|
|
|
|
self.cache_index_file(&index_path);
|
2024-04-14 11:24:18 -04:00
|
|
|
|
2024-05-04 14:20:45 -04:00
|
|
|
if let Some(index_file) = self.get_index_file(&index_path) {
|
|
|
|
if let Some(entry) = index_file.find_entry(path) {
|
|
|
|
return Some((entry, chunk));
|
|
|
|
}
|
2024-04-14 11:24:18 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-05-04 14:20:45 -04:00
|
|
|
for (index2_path, chunk) in index2_paths {
|
|
|
|
self.cache_index2_file(&index2_path);
|
|
|
|
|
|
|
|
if let Some(index_file) = self.get_index2_file(&index2_path) {
|
|
|
|
if let Some(entry) = index_file.find_entry(path) {
|
|
|
|
return Some((entry, chunk));
|
|
|
|
}
|
2024-04-14 11:24:18 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
None
|
|
|
|
}
|
2022-07-19 19:29:41 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
2022-08-16 11:52:07 -04:00
|
|
|
use crate::repository::Category::EXD;
|
2022-07-19 19:29:41 -04:00
|
|
|
|
2023-08-06 08:25:04 -04:00
|
|
|
use super::*;
|
|
|
|
|
2022-07-19 19:29:41 -04:00
|
|
|
fn common_setup_data() -> GameData {
|
|
|
|
let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
|
|
|
d.push("resources/tests");
|
|
|
|
d.push("valid_sqpack");
|
|
|
|
d.push("game");
|
|
|
|
|
2024-04-15 19:40:34 -04:00
|
|
|
GameData::from_existing(Platform::Win32, d.to_str().unwrap()).unwrap()
|
2022-07-19 19:29:41 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn repository_ordering() {
|
2024-04-16 21:19:08 -04:00
|
|
|
let data = common_setup_data();
|
2022-07-19 19:29:41 -04:00
|
|
|
|
|
|
|
assert_eq!(data.repositories[0].name, "ffxiv");
|
|
|
|
assert_eq!(data.repositories[1].name, "ex1");
|
|
|
|
assert_eq!(data.repositories[2].name, "ex2");
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn repository_and_category_parsing() {
|
2024-04-16 21:19:08 -04:00
|
|
|
let data = common_setup_data();
|
2022-07-19 19:29:41 -04:00
|
|
|
|
2022-08-16 11:52:07 -04:00
|
|
|
assert_eq!(
|
|
|
|
data.parse_repository_category("exd/root.exl").unwrap(),
|
|
|
|
(&data.repositories[0], EXD)
|
|
|
|
);
|
|
|
|
assert!(data
|
|
|
|
.parse_repository_category("what/some_font.dat")
|
|
|
|
.is_none());
|
2022-07-19 19:29:41 -04:00
|
|
|
}
|
2022-08-16 11:52:07 -04:00
|
|
|
}
|