mirror of
https://github.com/redstrate/Physis.git
synced 2025-04-25 22:07:44 +00:00
Fix clippy warnings about unnecessary deref, useless casts
This commit is contained in:
parent
d3d814eed9
commit
425f755850
8 changed files with 11 additions and 11 deletions
|
@ -22,7 +22,7 @@ impl Blowfish {
|
|||
for i in 0..ROUNDS + 2 {
|
||||
let mut data = 0u32;
|
||||
for _ in 0..4 {
|
||||
data = (data << 8) | (key[j as usize] as u32);
|
||||
data = (data << 8) | (key[j] as u32);
|
||||
j += 1;
|
||||
|
||||
if j >= (KEYBITS as usize) {
|
||||
|
|
|
@ -284,7 +284,7 @@ impl DatFile {
|
|||
|
||||
self.file
|
||||
.seek(SeekFrom::Start(
|
||||
last_pos + (compressed_block_sizes[current_block as usize] as u64),
|
||||
last_pos + (compressed_block_sizes[current_block] as u64),
|
||||
))
|
||||
.ok()?;
|
||||
current_block += 1;
|
||||
|
|
|
@ -56,7 +56,7 @@ impl FileInfo {
|
|||
let mut entries = vec![];
|
||||
|
||||
for name in file_names {
|
||||
let file = &read(&name).expect("Cannot read file.");
|
||||
let file = &read(name).expect("Cannot read file.");
|
||||
|
||||
entries.push(FIINEntry {
|
||||
file_size: file.len() as i32,
|
||||
|
|
|
@ -325,7 +325,7 @@ impl GameData {
|
|||
}
|
||||
};
|
||||
|
||||
fs::write(&ver_path, new_version)
|
||||
fs::write(ver_path, new_version)
|
||||
.ok()
|
||||
.ok_or(RepairError::FailedRepair(repository))?;
|
||||
}
|
||||
|
|
|
@ -87,7 +87,7 @@ impl ChatLog {
|
|||
|
||||
// TODO: handle the coloring properly, in some way
|
||||
entry.message = String::from_utf8_lossy(
|
||||
&*buffer[cursor.position() as usize..new_last_offset as usize].to_vec(),
|
||||
&buffer[cursor.position() as usize..new_last_offset as usize],
|
||||
)
|
||||
.to_string();
|
||||
|
||||
|
|
|
@ -449,7 +449,7 @@ pub fn apply_patch(data_dir: &str, patch_path: &str) -> Result<(), PatchError> {
|
|||
|
||||
new_file.seek(SeekFrom::Start(add.block_offset as u64))?;
|
||||
|
||||
new_file.write_all(&*add.block_data)?;
|
||||
new_file.write_all(&add.block_data)?;
|
||||
|
||||
wipe(&new_file, add.block_delete_number)?;
|
||||
}
|
||||
|
@ -518,7 +518,7 @@ pub fn apply_patch(data_dir: &str, patch_path: &str) -> Result<(), PatchError> {
|
|||
new_file.seek(SeekFrom::Start(1024))?;
|
||||
}
|
||||
|
||||
new_file.write_all(&*header.header_data)?;
|
||||
new_file.write_all(&header.header_data)?;
|
||||
}
|
||||
SqpkOperation::FileOperation(fop) => {
|
||||
let file_path = format!("{}/{}", data_dir, fop.path);
|
||||
|
@ -526,7 +526,7 @@ pub fn apply_patch(data_dir: &str, patch_path: &str) -> Result<(), PatchError> {
|
|||
|
||||
match fop.operation {
|
||||
SqpkFileOperation::AddFile => {
|
||||
fs::create_dir_all(&parent_directory)?;
|
||||
fs::create_dir_all(parent_directory)?;
|
||||
|
||||
// reverse reading crc32
|
||||
file.seek(SeekFrom::Current(-4))?;
|
||||
|
|
|
@ -61,7 +61,7 @@ pub fn read_data_block_patch<T: Read + Seek>(mut buf: T) -> Option<Vec<u8>> {
|
|||
let compressed_length: usize =
|
||||
((compressed_length as usize + 143) & 0xFFFFFF80) - (block_header.size as usize);
|
||||
|
||||
let mut compressed_data: Vec<u8> = vec![0; compressed_length as usize];
|
||||
let mut compressed_data: Vec<u8> = vec![0; compressed_length];
|
||||
buf.read_exact(&mut compressed_data).ok()?;
|
||||
|
||||
let mut decompressed_data: Vec<u8> = vec![0; decompressed_length as usize];
|
||||
|
@ -78,7 +78,7 @@ pub fn read_data_block_patch<T: Read + Seek>(mut buf: T) -> Option<Vec<u8>> {
|
|||
buf.read_exact(&mut local_data).ok()?;
|
||||
|
||||
buf.seek(SeekFrom::Current(
|
||||
(new_file_size as usize - block_header.size as usize - file_size as usize) as i64,
|
||||
(new_file_size - block_header.size as usize - file_size as usize) as i64,
|
||||
))
|
||||
.ok()?;
|
||||
|
||||
|
|
|
@ -92,7 +92,7 @@ impl Texture {
|
|||
cursor.read_exact(src.as_mut_slice()).ok()?;
|
||||
|
||||
let mut dst: Vec<u8> =
|
||||
vec![0u8; (header.width as usize * header.height as usize * 4) as usize];
|
||||
vec![0u8; header.width as usize * header.height as usize * 4];
|
||||
|
||||
match header.format {
|
||||
TextureFormat::B8G8R8A8 => {
|
||||
|
|
Loading…
Add table
Reference in a new issue