1
Fork 0
mirror of https://github.com/redstrate/Physis.git synced 2025-04-21 20:27:46 +00:00
physis/src/dat.rs

442 lines
14 KiB
Rust
Raw Normal View History

// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
// SPDX-License-Identifier: GPL-3.0-or-later
use std::io::{Cursor, Read, Seek, SeekFrom};
use std::io::Write;
use binrw::{BinReaderExt, binrw};
use binrw::BinRead;
use binrw::BinWrite;
2022-07-19 19:29:41 -04:00
use crate::gamedata::MemoryBuffer;
#[cfg(feature = "visual_data")]
use crate::model::ModelFileHeader;
2022-07-19 19:29:41 -04:00
use crate::sqpack::read_data_block;
#[binrw]
#[brw(repr = i32)]
2022-09-15 16:26:31 -04:00
#[derive(Debug, PartialEq, Eq)]
2022-08-09 19:53:17 -04:00
/// The file type of the data entry.
2022-07-19 19:29:41 -04:00
pub enum FileType {
2022-08-09 19:53:17 -04:00
/// Empty entry, usually invalid.
2022-07-19 19:29:41 -04:00
Empty = 1,
2022-08-09 19:53:17 -04:00
/// Encompasses every file that is not a model or a texture, which are stored in a special fashion.
2022-07-19 19:29:41 -04:00
Standard,
2022-08-09 19:53:17 -04:00
/// Model (MDL) files.
2022-07-19 19:29:41 -04:00
Model,
2022-08-09 19:53:17 -04:00
/// Texture (TEX) files.
2022-07-19 19:29:41 -04:00
Texture,
}
#[derive(BinRead)]
struct StandardFileBlock {
#[br(pad_before = 8)]
num_blocks: u32,
}
#[derive(BinRead, Debug)]
2022-09-15 16:26:31 -04:00
#[allow(dead_code)]
2022-07-19 19:29:41 -04:00
struct TextureLodBlock {
compressed_offset: u32,
compressed_size: u32,
decompressed_size: u32,
block_offset: u32,
block_count: u32,
}
pub trait AnyNumberType<'a>: BinRead<Args<'a> = ()> + BinWrite<Args<'a> = ()> + std::ops::AddAssign + Copy + Default + 'static {}
impl<'a, T> AnyNumberType<'a> for T where T: BinRead<Args<'a> = ()> + BinWrite<Args<'a> = ()> + std::ops::AddAssign + Copy + Default + 'static {}
#[derive(BinRead, BinWrite)]
pub struct ModelMemorySizes<T: for <'a> AnyNumberType<'a>>
{
2022-07-19 19:29:41 -04:00
pub stack_size: T,
pub runtime_size: T,
pub vertex_buffer_size: [T; 3],
pub edge_geometry_vertex_buffer_size: [T; 3],
pub index_buffer_size: [T; 3],
}
impl<T: for<'a> AnyNumberType<'a>> ModelMemorySizes<T>
2022-08-16 11:52:07 -04:00
{
2022-07-19 19:29:41 -04:00
pub fn total(&self) -> T {
let mut total: T = T::default();
total += self.stack_size;
total += self.runtime_size;
for i in 0..3 {
total += self.vertex_buffer_size[i];
total += self.edge_geometry_vertex_buffer_size[i];
total += self.index_buffer_size[i];
}
total
}
}
#[derive(BinRead)]
pub struct ModelFileBlock {
pub num_blocks: u32,
pub num_used_blocks: u32,
pub version: u32,
pub uncompressed_size: ModelMemorySizes<u32>,
pub compressed_size: ModelMemorySizes<u32>,
pub offset: ModelMemorySizes<u32>,
pub index: ModelMemorySizes<u16>,
pub num: ModelMemorySizes<u16>,
pub vertex_declaration_num: u16,
pub material_num: u16,
pub num_lods: u8,
#[br(map = | x: u8 | x != 0)]
pub index_buffer_streaming_enabled: bool,
#[brw(pad_after = 1)]
#[br(map = | x: u8 | x != 0)]
pub edge_geometry_enabled: bool,
}
#[derive(BinRead, Debug)]
2022-07-19 19:29:41 -04:00
struct TextureBlock {
#[br(pad_before = 8)]
num_blocks: u32,
#[br(count = num_blocks)]
lods: Vec<TextureLodBlock>,
}
/// A SqPack file info header. It can optionally contain extra information, such as texture or
/// model data depending on the file type.
#[derive(BinRead)]
#[br(little)]
2022-07-19 19:29:41 -04:00
struct FileInfo {
size: u32,
file_type: FileType,
file_size: u32,
2022-07-19 19:29:41 -04:00
#[br(if (file_type == FileType::Standard))]
standard_info: Option<StandardFileBlock>,
#[br(if (file_type == FileType::Model))]
model_info: Option<ModelFileBlock>,
#[br(if (file_type == FileType::Texture))]
texture_info: Option<TextureBlock>,
}
#[binrw]
#[br(little)]
2022-07-19 19:29:41 -04:00
pub struct Block {
#[br(pad_after = 4)]
offset: i32,
}
#[binrw]
#[derive(Debug)]
#[br(import { x : i32, y : i32 })]
#[br(map = | _ : i32 | if x < 32000 { CompressionMode::Compressed{ compressed_length : x, decompressed_length : y} } else { CompressionMode::Uncompressed { file_size : y } } )]
pub enum CompressionMode {
// we manually map here, because for this case the enum value is also a raw value we want to extract :-)
2022-08-16 11:52:07 -04:00
Compressed {
compressed_length: i32,
decompressed_length: i32,
},
Uncompressed {
file_size: i32,
},
2022-07-19 19:29:41 -04:00
}
#[binrw::binread]
#[derive(Debug)]
#[br(little)]
2022-07-19 19:29:41 -04:00
pub struct BlockHeader {
#[br(pad_after = 4)]
2022-08-16 11:52:07 -04:00
pub size: u32,
2022-07-19 19:29:41 -04:00
#[br(temp)]
2022-08-16 11:52:07 -04:00
x: i32,
2022-07-19 19:29:41 -04:00
#[br(temp)]
2022-08-16 11:52:07 -04:00
y: i32,
2022-07-19 19:29:41 -04:00
#[br(args { x, y })]
#[br(restore_position)]
pub compression: CompressionMode,
}
pub struct DatFile {
file: std::fs::File,
}
// from https://users.rust-lang.org/t/how-best-to-convert-u8-to-u16/57551/4
fn to_u8_slice(slice: &mut [u16]) -> &mut [u8] {
let byte_len = 2 * slice.len();
2022-08-16 11:52:07 -04:00
unsafe { std::slice::from_raw_parts_mut(slice.as_mut_ptr().cast::<u8>(), byte_len) }
2022-07-19 19:29:41 -04:00
}
impl DatFile {
/// Creates a new reference to an existing dat file.
pub fn from_existing(path: &str) -> Option<DatFile> {
Some(DatFile {
2022-08-16 11:52:07 -04:00
file: std::fs::File::open(path).ok()?,
2022-07-19 19:29:41 -04:00
})
}
/// Reads from a certain offset inside of the dat file. This offset will be fixed automatically
/// by the function.
///
/// If the block of data is successfully parsed, it returns the file data - otherwise is None.
2022-07-19 19:29:41 -04:00
pub fn read_from_offset(&mut self, offset: u32) -> Option<MemoryBuffer> {
let offset = (offset * 0x80) as u64;
2022-07-19 19:29:41 -04:00
2022-08-16 11:52:07 -04:00
self.file
.seek(SeekFrom::Start(offset))
.expect("Unable to find offset in file.");
2022-07-19 19:29:41 -04:00
2022-08-16 11:52:07 -04:00
let file_info = FileInfo::read(&mut self.file).expect("Failed to parse file info.");
2022-07-19 19:29:41 -04:00
match file_info.file_type {
FileType::Empty => None,
FileType::Standard => self.read_standard_file(offset, &file_info),
FileType::Model => {
#[cfg(feature = "visual_data")]
{
self.read_model_file(offset, &file_info)
}
#[cfg(not(feature = "visual_data"))]
{
panic!("Tried to extract a model without the visual_data feature enabled!")
}
},
2022-08-16 11:52:07 -04:00
FileType::Texture => self.read_texture_file(offset, &file_info),
}
}
2022-07-19 19:29:41 -04:00
/// Reads a standard file block.
2022-08-16 11:52:07 -04:00
fn read_standard_file(&mut self, offset: u64, file_info: &FileInfo) -> Option<MemoryBuffer> {
let standard_file_info = file_info.standard_info.as_ref().unwrap();
2022-07-19 19:29:41 -04:00
let mut blocks: Vec<Block> = Vec::with_capacity(standard_file_info.num_blocks as usize);
2022-07-19 19:29:41 -04:00
for _ in 0..standard_file_info.num_blocks {
blocks.push(Block::read(&mut self.file).unwrap());
}
2022-07-19 19:29:41 -04:00
let mut data: Vec<u8> = Vec::with_capacity(file_info.file_size as usize);
2022-07-19 19:29:41 -04:00
let starting_position = offset + (file_info.size as u64);
2022-07-19 19:29:41 -04:00
for i in 0..standard_file_info.num_blocks {
2022-08-16 11:52:07 -04:00
data.append(
&mut read_data_block(
&mut self.file,
starting_position + (blocks[i as usize].offset as u64),
)
.expect("Failed to read data block."),
);
}
2022-07-19 19:29:41 -04:00
Some(data)
}
2022-07-19 19:29:41 -04:00
/// Reads a model file block.
#[cfg(feature = "visual_data")]
2022-08-16 11:52:07 -04:00
fn read_model_file(&mut self, offset: u64, file_info: &FileInfo) -> Option<MemoryBuffer> {
let mut buffer = Cursor::new(Vec::new());
2022-07-19 19:29:41 -04:00
let model_file_info = file_info.model_info.as_ref().unwrap();
2022-07-19 19:29:41 -04:00
let base_offset = offset + (file_info.size as u64);
2022-07-19 19:29:41 -04:00
let total_blocks = model_file_info.num.total();
2022-07-19 19:29:41 -04:00
let mut compressed_block_sizes: Vec<u16> = vec![0; total_blocks as usize];
let slice: &mut [u8] = to_u8_slice(&mut compressed_block_sizes);
2022-07-19 19:29:41 -04:00
self.file.read_exact(slice).ok()?;
2022-07-19 19:29:41 -04:00
let mut current_block = 0;
2022-07-19 19:29:41 -04:00
let mut vertex_data_offsets: [u32; 3] = [0; 3];
let mut vertex_data_sizes: [u32; 3] = [0; 3];
2022-07-19 19:29:41 -04:00
let mut index_data_offsets: [u32; 3] = [0; 3];
let mut index_data_sizes: [u32; 3] = [0; 3];
2022-07-19 19:29:41 -04:00
// start writing at 0x44
buffer.seek(SeekFrom::Start(0x44)).ok()?;
2022-07-19 19:29:41 -04:00
2022-08-16 11:52:07 -04:00
self.file
.seek(SeekFrom::Start(
base_offset + (model_file_info.offset.stack_size as u64),
))
.ok()?;
2022-07-19 19:29:41 -04:00
// read from stack blocks
let mut read_model_blocks = |offset: u64, size: usize| -> Option<u64> {
self.file.seek(SeekFrom::Start(base_offset + offset)).ok()?;
let stack_start = buffer.position();
for _ in 0..size {
let last_pos = &self.file.stream_position().unwrap();
2022-07-19 19:29:41 -04:00
2022-08-16 11:52:07 -04:00
let data =
read_data_block(&self.file, *last_pos).expect("Unable to read block data.");
// write to buffer
2022-08-16 11:50:18 -04:00
buffer.write_all(data.as_slice()).ok()?;
2022-07-19 19:29:41 -04:00
2022-08-16 11:52:07 -04:00
self.file
.seek(SeekFrom::Start(
last_pos + (compressed_block_sizes[current_block] as u64),
2022-08-16 11:52:07 -04:00
))
.ok()?;
current_block += 1;
}
2022-07-19 19:29:41 -04:00
Some(buffer.position() - stack_start)
};
2022-07-19 19:29:41 -04:00
2022-08-16 11:52:07 -04:00
let stack_size = read_model_blocks(
model_file_info.offset.stack_size as u64,
model_file_info.num.stack_size as usize,
)
.unwrap() as u32;
let runtime_size = read_model_blocks(
model_file_info.offset.runtime_size as u64,
model_file_info.num.runtime_size as usize,
)
.unwrap() as u32;
let mut process_model_data =
|i: usize,
size: u32,
offset: u32,
offsets: &mut [u32; 3],
data_sizes: &mut [u32; 3]| {
if size != 0 {
let current_vertex_offset = buffer.position() as u32;
if i == 0 || current_vertex_offset != offsets[i - 1] {
offsets[i] = current_vertex_offset;
} else {
offsets[i] = 0;
}
self.file
.seek(SeekFrom::Start(base_offset + (offset as u64)))
.ok();
for _ in 0..size {
let last_pos = self.file.stream_position().unwrap();
let data = read_data_block(&self.file, last_pos)
.expect("Unable to read raw model block!");
buffer
.write_all(data.as_slice())
.expect("Unable to write to memory buffer!");
data_sizes[i] += data.len() as u32;
self.file
.seek(SeekFrom::Start(
last_pos + (compressed_block_sizes[current_block] as u64),
))
.expect("Unable to seek properly.");
current_block += 1;
}
2022-07-19 19:29:41 -04:00
}
2022-08-16 11:52:07 -04:00
};
2022-07-19 19:29:41 -04:00
// process all 3 lods
for i in 0..3 {
// process vertices
2022-08-16 11:52:07 -04:00
process_model_data(
i,
model_file_info.num.vertex_buffer_size[i] as u32,
model_file_info.offset.vertex_buffer_size[i],
&mut vertex_data_offsets,
&mut vertex_data_sizes,
);
2022-07-19 19:29:41 -04:00
// TODO: process edges
// process indices
2022-08-16 11:52:07 -04:00
process_model_data(
i,
model_file_info.num.index_buffer_size[i] as u32,
model_file_info.offset.index_buffer_size[i],
&mut index_data_offsets,
&mut index_data_sizes,
);
}
let header = ModelFileHeader {
version: model_file_info.version,
stack_size,
runtime_size,
vertex_declaration_count: model_file_info.vertex_declaration_num,
material_count: model_file_info.material_num,
vertex_offsets: vertex_data_offsets,
index_offsets: index_data_offsets,
vertex_buffer_size: vertex_data_sizes,
index_buffer_size: index_data_sizes,
lod_count: model_file_info.num_lods,
index_buffer_streaming_enabled: model_file_info.index_buffer_streaming_enabled,
has_edge_geometry: model_file_info.edge_geometry_enabled,
};
buffer.seek(SeekFrom::Start(0)).ok()?;
header.write(&mut buffer).ok()?;
Some(buffer.into_inner())
}
/// Reads a texture file block.
2022-08-16 11:52:07 -04:00
fn read_texture_file(&mut self, offset: u64, file_info: &FileInfo) -> Option<MemoryBuffer> {
let mut data: Vec<u8> = Vec::with_capacity(file_info.file_size as usize);
2022-07-19 19:29:41 -04:00
let texture_file_info = file_info.texture_info.as_ref().unwrap();
// write the header if it exists
let mipmap_size = texture_file_info.lods[0].compressed_size;
if mipmap_size != 0 {
let original_pos = self.file.stream_position().ok()?;
2022-07-19 19:29:41 -04:00
2022-08-16 11:52:07 -04:00
self.file
.seek(SeekFrom::Start(offset + file_info.size as u64))
.ok()?;
let mut header = vec![0u8; texture_file_info.lods[0].compressed_offset as usize];
self.file.read_exact(&mut header).ok()?;
data.append(&mut header);
self.file.seek(SeekFrom::Start(original_pos)).ok()?;
}
for i in 0..texture_file_info.num_blocks {
2022-08-16 11:52:07 -04:00
let mut running_block_total = (texture_file_info.lods[i as usize].compressed_offset
as u64)
+ offset
+ (file_info.size as u64);
for _ in 0..texture_file_info.lods[i as usize].block_count {
let original_pos = self.file.stream_position().ok()?;
data.append(&mut read_data_block(&self.file, running_block_total)?);
self.file.seek(SeekFrom::Start(original_pos)).ok()?;
running_block_total += self.file.read_le::<i16>().ok()? as u64;
2022-07-19 19:29:41 -04:00
}
}
Some(data)
2022-07-19 19:29:41 -04:00
}
2022-08-16 11:52:07 -04:00
}