1
Fork 0
mirror of https://github.com/redstrate/Physis.git synced 2025-06-08 07:37:46 +00:00

Fix adding multiple files in the same patch, add tests for this

Also fixes patching_test, and makes file paths written in ZiPatches
relative to it's base directory.
This commit is contained in:
Joshua Goins 2024-06-29 13:25:24 -04:00
parent 657005ad53
commit f52a4b68fc
3 changed files with 53 additions and 8 deletions

View file

@ -270,7 +270,6 @@ struct SqpkFileOperationData {
// Note: counts the \0 at the end... for some reason // Note: counts the \0 at the end... for some reason
#[br(temp)] #[br(temp)]
#[bw(calc = get_string_len(path) as u32 + 1)] #[bw(calc = get_string_len(path) as u32 + 1)]
#[br(dbg)]
path_length: u32, path_length: u32,
#[brw(pad_after = 2)] #[brw(pad_after = 2)]
@ -426,7 +425,7 @@ fn recurse(path: impl AsRef<Path>) -> Vec<PathBuf> {
return vec![]; return vec![];
}; };
if meta.is_dir() { if meta.is_dir() {
return crate::patch::recurse(entry.path()); return recurse(entry.path());
} }
if meta.is_file() { if meta.is_file() {
return vec![entry.path()]; return vec![entry.path()];
@ -704,7 +703,10 @@ impl ZiPatch {
let new_files = crate::patch::recurse(new_directory); let new_files = crate::patch::recurse(new_directory);
// A set of files not present in base, but in new (aka added files) // A set of files not present in base, but in new (aka added files)
let added_files: Vec<&PathBuf> = new_files.iter().filter(|item| !base_files.contains(item)).collect(); let added_files: Vec<&PathBuf> = new_files.iter().filter(|item| {
let metadata = fs::metadata(item).unwrap();
!base_files.contains(item) && metadata.len() > 0 // TODO: we filter out zero byte files here, but does SqEx do that?
}).collect();
// A set of files not present in the new directory, that used to be in base (aka removedf iles) // A set of files not present in the new directory, that used to be in base (aka removedf iles)
let removed_files: Vec<&PathBuf> = base_files.iter().filter(|item| !new_files.contains(item)).collect(); let removed_files: Vec<&PathBuf> = base_files.iter().filter(|item| !new_files.contains(item)).collect();
@ -712,6 +714,7 @@ impl ZiPatch {
// Process added files // Process added files
for file in added_files { for file in added_files {
let file_data = read(file.to_str().unwrap()).unwrap(); let file_data = read(file.to_str().unwrap()).unwrap();
let relative_path = file.strip_prefix(new_directory).unwrap().to_str().unwrap().to_string();
let add_file_chunk = PatchChunk { let add_file_chunk = PatchChunk {
size: 0, size: 0,
@ -722,7 +725,7 @@ impl ZiPatch {
offset: 0, offset: 0,
file_size: file_data.len() as u64, file_size: file_data.len() as u64,
expansion_id: 0, expansion_id: 0,
path: file.to_str().unwrap().parse().unwrap(), path: relative_path,
}), }),
}), }),
crc32: 0, crc32: 0,
@ -742,6 +745,8 @@ impl ZiPatch {
// Process deleted files // Process deleted files
for file in removed_files { for file in removed_files {
let relative_path = file.strip_prefix(base_directory).unwrap().to_str().unwrap().to_string();
let remove_file_chunk = PatchChunk { let remove_file_chunk = PatchChunk {
size: 0, size: 0,
chunk_type: ChunkType::Sqpk(SqpkChunk { chunk_type: ChunkType::Sqpk(SqpkChunk {
@ -751,7 +756,7 @@ impl ZiPatch {
offset: 0, offset: 0,
file_size: 0, file_size: 0,
expansion_id: 0, expansion_id: 0,
path: file.to_str().unwrap().parse().unwrap(), path: relative_path,
}), }),
}), }),
crc32: 0, crc32: 0,
@ -777,6 +782,8 @@ impl ZiPatch {
mod tests { mod tests {
use std::fs::{read, write}; use std::fs::{read, write};
use std::path::PathBuf; use std::path::PathBuf;
use std::thread::sleep;
use std::time::Duration;
use super::*; use super::*;
@ -806,5 +813,41 @@ mod tests {
// Feeding it invalid data should not panic // Feeding it invalid data should not panic
ZiPatch::apply(&data_dir.clone(), &(data_dir + "/test.patch")); ZiPatch::apply(&data_dir.clone(), &(data_dir + "/test.patch"));
} }
#[test]
fn test_add_file_op() {
let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
d.push("resources/tests");
d.push("random");
let data_dir = prepare_data_dir();
let mut resources_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
resources_dir.push("resources/tests");
// Let's create a patch that re-creates the resources dir into our data directory
let patch = ZiPatch::create(&*data_dir, resources_dir.to_str().unwrap()).unwrap();
write(data_dir.clone() + "/test.patch", &patch);
ZiPatch::apply(&data_dir.clone(), &(data_dir.clone() + "/test.patch"));
// FIXME: For some reason, running this test by itself is fine. However when running in the test suite, it trips over itself.
// So this is a really cruddy way to wait for the files to settle.
sleep(Duration::new(1, 0));
fs::remove_file(data_dir.clone() + "/test.patch");
let old_files = recurse(&resources_dir);
let new_files = recurse(&data_dir);
let old_relative_files: Vec<&Path> = old_files.iter().filter(|item| {
let metadata = fs::metadata(item).unwrap();
metadata.len() > 0 // fitler out zero byte files because ZiPatch::create does
}).map(|x| x.strip_prefix(&resources_dir).unwrap()).collect();
let new_relative_files: Vec<&Path> = new_files.iter().map(|x| x.strip_prefix(&data_dir).unwrap()).collect();
assert_eq!(old_relative_files, new_relative_files);
}
} }

View file

@ -76,10 +76,12 @@ pub fn read_data_block_patch<T: Read + Seek>(mut buf: T) -> Option<Vec<u8>> {
} }
pub fn write_data_block_patch<T: Write + Seek>(mut writer: T, data: Vec<u8>) { pub fn write_data_block_patch<T: Write + Seek>(mut writer: T, data: Vec<u8>) {
let new_file_size: usize = (data.len() as usize + 143) & 0xFFFFFF80;
// This only adds uncompressed data for now, to simplify implementation // This only adds uncompressed data for now, to simplify implementation
// TODO: write compressed blocks // TODO: write compressed blocks
let block_header = BlockHeader { let block_header = BlockHeader {
size: 128, // TODO: i have no idea what this value is from size: (new_file_size - data.len()) as u32, // TODO: i have no idea what this value is from
compression: CompressionMode::Uncompressed { compression: CompressionMode::Uncompressed {
file_size: data.len() as i32, file_size: data.len() as i32,
}, },

View file

@ -2,7 +2,6 @@
// SPDX-License-Identifier: GPL-3.0-or-later // SPDX-License-Identifier: GPL-3.0-or-later
use hmac_sha512::Hash; use hmac_sha512::Hash;
use physis::patch::apply_patch;
use std::env; use std::env;
use std::io::Write; use std::io::Write;
use std::fs::{read, read_dir}; use std::fs::{read, read_dir};
@ -13,6 +12,7 @@ use physis::fiin::FileInfo;
use physis::index; use physis::index;
use std::collections::HashMap; use std::collections::HashMap;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use physis::patch::ZiPatch;
#[cfg(feature = "patch_testing")] #[cfg(feature = "patch_testing")]
fn make_temp_install_dir(name: &str) -> String { fn make_temp_install_dir(name: &str) -> String {
@ -86,7 +86,7 @@ fn physis_install_patch(game_directory: &str, data_directory: &str, patch_name:
let patch_path = format!("{}/{}", patch_dir, &patch_name); let patch_path = format!("{}/{}", patch_dir, &patch_name);
let data_dir = format!("{}/{}", game_directory, data_directory); let data_dir = format!("{}/{}", game_directory, data_directory);
apply_patch(&data_dir, &patch_path).unwrap(); ZiPatch::apply(&data_dir, &patch_path).unwrap();
} }
#[cfg(feature = "patch_testing")] #[cfg(feature = "patch_testing")]