Big refactoring commit pt. 1
This is the first of many commits to improve code quality, and try to tame my bad looking code. Types such as Slot and Race are now living under types/ and have dedicated functions to go between ids and enumerations without a heavy std::map. A new repository API lives in a new SqPack header, which replaces the old crusty way of fetching repository information in GameData. Building equipment paths now live in libxiv (moved from novus) provided you have a model id. Standard methods to build index and dat filenames are provided in their functions now too.
This commit is contained in:
parent
d0e016e568
commit
b11767dc02
12 changed files with 319 additions and 149 deletions
|
@ -89,7 +89,10 @@ add_library(libxiv STATIC
|
|||
src/patch.cpp
|
||||
src/exlparser.cpp
|
||||
src/mdlparser.cpp
|
||||
src/havokxmlparser.cpp)
|
||||
src/havokxmlparser.cpp
|
||||
src/types.cpp
|
||||
src/equipment.cpp
|
||||
src/sqpack.cpp)
|
||||
target_include_directories(libxiv PUBLIC include PRIVATE src)
|
||||
target_link_libraries(libxiv PUBLIC ${LIBRARIES} pugixml::pugixml)
|
||||
target_link_directories(libxiv PUBLIC ${LIB_DIRS})
|
||||
|
|
8
include/equipment.h
Normal file
8
include/equipment.h
Normal file
|
@ -0,0 +1,8 @@
|
|||
#pragma once
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "types/race.h"
|
||||
#include "types/slot.h"
|
||||
|
||||
std::string build_equipment_path(int model_id, Race race, Slot slot);
|
|
@ -6,6 +6,7 @@
|
|||
#include "exhparser.h"
|
||||
#include "exlparser.h"
|
||||
#include "indexparser.h"
|
||||
#include "sqpack.h"
|
||||
|
||||
/*
|
||||
* This handles reading/extracting the raw data from game data packs, such as dat0, index and index2 files.
|
||||
|
@ -41,18 +42,17 @@ public:
|
|||
uint64_t calculateHash(std::string_view path);
|
||||
|
||||
private:
|
||||
/*
|
||||
* This returns a proper SQEX-style filename for index, index2, and dat files.
|
||||
* filenames are in the format of {category}{expansion}{chunk}.{platform}.{type}
|
||||
*/
|
||||
std::string calculateFilename(int category, int expansion, int chunk, std::string_view platform, std::string_view type);
|
||||
Repository& getBaseRepository();
|
||||
|
||||
/*
|
||||
* Returns the repository, category for a given game path - respectively.
|
||||
*/
|
||||
std::tuple<std::string, std::string> calculateRepositoryCategory(std::string_view path);
|
||||
std::tuple<Repository, std::string> calculateRepositoryCategory(std::string_view path);
|
||||
|
||||
std::string dataDirectory;
|
||||
std::vector<Repository> repositories;
|
||||
|
||||
EXL rootEXL;
|
||||
};
|
||||
};
|
||||
|
||||
std::vector<std::uint8_t> read_data_block(FILE* file, size_t starting_position);
|
|
@ -58,5 +58,17 @@ struct IndexFile {
|
|||
std::vector<Entry> entries;
|
||||
};
|
||||
|
||||
IndexFile<IndexHashTableEntry> readIndexFile(const std::string_view path);
|
||||
IndexFile<Index2HashTableEntry> readIndex2File(const std::string_view path);
|
||||
struct IndexEntry {
|
||||
uint64_t hash = 0;
|
||||
uint32_t dataFileId = 0;
|
||||
uint32_t offset = 0;
|
||||
};
|
||||
|
||||
struct CombinedIndexFile {
|
||||
std::vector<IndexEntry> entries;
|
||||
};
|
||||
|
||||
IndexFile<IndexHashTableEntry> readIndexFile(std::string_view path);
|
||||
IndexFile<Index2HashTableEntry> readIndex2File(std::string_view path);
|
||||
|
||||
CombinedIndexFile read_index_files(std::string_view index_filename, std::string_view index2_filename);
|
29
include/sqpack.h
Normal file
29
include/sqpack.h
Normal file
|
@ -0,0 +1,29 @@
|
|||
#pragma once
|
||||
|
||||
#include <cstdint>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
// The type of file inside of a SqPack dat file.
|
||||
// Standard is everything that isn't covered by Model or Texture, such as exd files.
|
||||
enum class FileType : int32_t {
|
||||
Empty = 1,
|
||||
Standard = 2,
|
||||
Model = 3,
|
||||
Texture = 4
|
||||
};
|
||||
|
||||
// This is a folder containing game data, usually seperated by ffxiv (which is always present), and then exX
|
||||
// where X is the expansion number.
|
||||
struct Repository {
|
||||
enum class Type {
|
||||
Base,
|
||||
Expansion
|
||||
} type = Type::Base;
|
||||
|
||||
std::string name;
|
||||
int expansion_number = 0;
|
||||
|
||||
std::pair<std::string, std::string> get_index_filenames(int category);
|
||||
std::string get_dat_filename(int category, uint32_t data_file_id);
|
||||
};
|
16
include/types/race.h
Normal file
16
include/types/race.h
Normal file
|
@ -0,0 +1,16 @@
|
|||
#pragma once
|
||||
|
||||
/*
|
||||
* Different genders of the same race are considered
|
||||
* different races with different ids in the game data,
|
||||
* so we keep the same semantics here.
|
||||
*/
|
||||
enum class Race {
|
||||
HyurMidlanderMale,
|
||||
HyurMidlanderFemale,
|
||||
};
|
||||
|
||||
/*
|
||||
* This returns the race id. For example, Hyur Midlander Male returns 101.
|
||||
*/
|
||||
int get_race_id(Race race);
|
23
include/types/slot.h
Normal file
23
include/types/slot.h
Normal file
|
@ -0,0 +1,23 @@
|
|||
#pragma once
|
||||
|
||||
#include <string_view>
|
||||
#include <optional>
|
||||
|
||||
enum class Slot {
|
||||
Head,
|
||||
Hands,
|
||||
Legs,
|
||||
Feet,
|
||||
Body,
|
||||
Earring,
|
||||
Neck,
|
||||
Rings,
|
||||
Wrists
|
||||
};
|
||||
|
||||
/*
|
||||
* This gets the slot abbreviation used in model paths. For example, Head returns "met".
|
||||
*/
|
||||
std::string_view get_slot_abbreviation(Slot slot);
|
||||
|
||||
std::optional<Slot> get_slot_from_id(int id);
|
10
src/equipment.cpp
Normal file
10
src/equipment.cpp
Normal file
|
@ -0,0 +1,10 @@
|
|||
#include "equipment.h"
|
||||
|
||||
#include <fmt/format.h>
|
||||
|
||||
std::string build_equipment_path(const int model_id, const Race race, const Slot slot) {
|
||||
return fmt::format("chara/equipment/e{model_id:04d}/model/c{race_id:04d}e{model_id:04d}_{slot}.mdl",
|
||||
fmt::arg("model_id", model_id),
|
||||
fmt::arg("race_id", get_race_id(race)),
|
||||
fmt::arg("slot", get_slot_abbreviation(slot)));
|
||||
}
|
202
src/gamedata.cpp
202
src/gamedata.cpp
|
@ -10,6 +10,7 @@
|
|||
#include <unordered_map>
|
||||
#include <array>
|
||||
#include <fmt/printf.h>
|
||||
#include <filesystem>
|
||||
|
||||
// TODO: should be enum?
|
||||
// taken from https://xiv.dev/data-files/sqpack#categories
|
||||
|
@ -34,6 +35,20 @@ std::unordered_map<std::string_view, int> categoryToID = {
|
|||
GameData::GameData(const std::string_view dataDirectory) {
|
||||
this->dataDirectory = dataDirectory;
|
||||
|
||||
for(auto const& dir_entry : std::filesystem::directory_iterator{dataDirectory}) {
|
||||
if(!dir_entry.is_directory())
|
||||
continue;
|
||||
|
||||
Repository repository;
|
||||
repository.name = dir_entry.path().filename().string();
|
||||
repository.type = stringContains(repository.name, "ex") ? Repository::Type::Expansion : Repository::Type::Base;
|
||||
|
||||
if(repository.type == Repository::Type::Expansion)
|
||||
repository.expansion_number = std::stoi(repository.name.substr(2));
|
||||
|
||||
repositories.push_back(repository);
|
||||
}
|
||||
|
||||
extractFile("exd/root.exl", "root.exl");
|
||||
|
||||
rootEXL = readEXL("root.exl");
|
||||
|
@ -42,9 +57,8 @@ GameData::GameData(const std::string_view dataDirectory) {
|
|||
std::vector<std::string> GameData::getAllSheetNames() {
|
||||
std::vector<std::string> names;
|
||||
|
||||
for(auto row : rootEXL.rows) {
|
||||
for(auto& row : rootEXL.rows)
|
||||
names.push_back(row.name);
|
||||
}
|
||||
|
||||
return names;
|
||||
}
|
||||
|
@ -66,72 +80,50 @@ uint64_t GameData::calculateHash(const std::string_view path) {
|
|||
return static_cast<uint64_t>(directoryCrc) << 32 | filenameCrc;
|
||||
}
|
||||
|
||||
std::tuple<std::string, std::string> GameData::calculateRepositoryCategory(std::string_view path) {
|
||||
std::string repository, category;
|
||||
std::tuple<Repository, std::string> GameData::calculateRepositoryCategory(std::string_view path) {
|
||||
const auto tokens = tokenize(path, "/");
|
||||
const std::string repositoryToken = tokens[0];
|
||||
|
||||
auto tokens = tokenize(path, "/");
|
||||
if(stringContains(tokens[1], "ex") && !stringContains(tokens[0], "exd") && !stringContains(tokens[0], "exh")) {
|
||||
repository = tokens[1];
|
||||
} else {
|
||||
repository = "ffxiv";
|
||||
for(auto& repository : repositories) {
|
||||
if(repository.name == repositoryToken) {
|
||||
// if this is an expansion, the next token is the category
|
||||
return {repository, tokens[1]};
|
||||
}
|
||||
}
|
||||
|
||||
category = tokens[0];
|
||||
|
||||
return {repository, category};
|
||||
}
|
||||
|
||||
int getExpansionID(std::string_view repositoryName) {
|
||||
if(repositoryName == "ffxiv")
|
||||
return 0;
|
||||
|
||||
return std::stoi(std::string(repositoryName.substr(2, 2)));
|
||||
}
|
||||
|
||||
std::string GameData::calculateFilename(const int category, const int expansion, const int chunk, const std::string_view platform, const std::string_view type) {
|
||||
if(type == "index") {
|
||||
return fmt::sprintf("%02x%02x%02x.%s.%s", category, expansion, chunk, platform, type);
|
||||
} else if(type == "dat") {
|
||||
return fmt::sprintf("%02x%02x00.%s.%s%01x", category, expansion, platform, type, chunk);
|
||||
}
|
||||
// if it doesn't match any existing repositories (in the case of accessing base game data),
|
||||
// fall back to base repository.
|
||||
return {getBaseRepository(), tokens[0]};
|
||||
}
|
||||
|
||||
void GameData::extractFile(std::string_view dataFilePath, std::string_view outPath) {
|
||||
const uint64_t hash = calculateHash(dataFilePath);
|
||||
auto [repository, category] = calculateRepositoryCategory(dataFilePath);
|
||||
|
||||
fmt::print("repository = {}\n", repository);
|
||||
fmt::print("category = {}\n", category);
|
||||
auto [index_filename, index2_filename] = repository.get_index_filenames(categoryToID[category]);
|
||||
auto index_path = fmt::format("{data_directory}/{repository}/{filename}",
|
||||
fmt::arg("data_directory", dataDirectory),
|
||||
fmt::arg("repository", repository.name),
|
||||
fmt::arg("filename", index_filename));
|
||||
auto index2_path = fmt::format("{data_directory}/{repository}/{filename}",
|
||||
fmt::arg("data_directory", dataDirectory),
|
||||
fmt::arg("repository", repository.name),
|
||||
fmt::arg("filename", index2_filename));
|
||||
|
||||
// TODO: handle platforms other than win32
|
||||
auto indexFilename = calculateFilename(categoryToID[category], getExpansionID(repository), 0, "win32", "index");
|
||||
auto index_file = read_index_files(index_path, index2_path);
|
||||
|
||||
fmt::print("calculated index filename: {}\n", indexFilename);
|
||||
|
||||
// TODO: handle hashes in index2 files (we can read them but it's not setup yet.)
|
||||
auto indexFile = readIndexFile(dataDirectory + "/" + repository + "/" + indexFilename);
|
||||
|
||||
for(const auto entry : indexFile.entries) {
|
||||
for(const auto entry : index_file.entries) {
|
||||
if(entry.hash == hash) {
|
||||
auto dataFilename = calculateFilename(categoryToID[category], getExpansionID(repository), entry.dataFileId, "win32", "dat");
|
||||
auto data_filename = repository.get_dat_filename(categoryToID[category], entry.dataFileId);
|
||||
|
||||
fmt::print("Opening data file {}...\n", dataFilename);
|
||||
|
||||
FILE* file = fopen((dataDirectory + "/" + repository + "/" + dataFilename).c_str(), "rb");
|
||||
FILE* file = fopen((dataDirectory + "/" + repository.name + "/" + data_filename).c_str(), "rb");
|
||||
if(file == nullptr) {
|
||||
throw std::runtime_error("Failed to open data file: " + dataFilename);
|
||||
throw std::runtime_error("Failed to open data file: " + data_filename);
|
||||
}
|
||||
|
||||
const size_t offset = entry.offset * 0x80;
|
||||
fseek(file, offset, SEEK_SET);
|
||||
|
||||
enum FileType : int32_t {
|
||||
Empty = 1,
|
||||
Standard = 2,
|
||||
Model = 3,
|
||||
Texture = 4
|
||||
};
|
||||
|
||||
struct FileInfo {
|
||||
uint32_t size;
|
||||
FileType fileType;
|
||||
|
@ -142,49 +134,12 @@ void GameData::extractFile(std::string_view dataFilePath, std::string_view outPa
|
|||
|
||||
fread(&info, sizeof(FileInfo), 1, file);
|
||||
|
||||
fmt::print("file size = {}\n", info.fileSize);
|
||||
|
||||
struct Block {
|
||||
int32_t offset;
|
||||
int16_t dummy;
|
||||
int16_t dummy2;
|
||||
};
|
||||
|
||||
const auto readFileBlock = [](FILE* file, size_t startingPos) -> std::vector<std::uint8_t> {
|
||||
struct BlockHeader {
|
||||
int32_t size;
|
||||
int32_t dummy;
|
||||
int32_t compressedLength; // < 32000 is uncompressed data
|
||||
int32_t decompressedLength;
|
||||
} header;
|
||||
|
||||
fseek(file, startingPos, SEEK_SET);
|
||||
|
||||
fread(&header, sizeof(BlockHeader), 1, file);
|
||||
|
||||
std::vector<uint8_t> localdata;
|
||||
|
||||
bool isCompressed = header.compressedLength < 32000;
|
||||
if(isCompressed) {
|
||||
localdata.resize(header.decompressedLength);
|
||||
|
||||
std::vector<uint8_t> compressed_data;
|
||||
compressed_data.resize(header.compressedLength);
|
||||
fread(compressed_data.data(), header.compressedLength, 1, file);
|
||||
|
||||
zlib::no_header_decompress(reinterpret_cast<uint8_t*>(compressed_data.data()),
|
||||
compressed_data.size(),
|
||||
reinterpret_cast<uint8_t*>(localdata.data()),
|
||||
header.decompressedLength);
|
||||
} else {
|
||||
localdata.resize(header.decompressedLength);
|
||||
|
||||
fread(localdata.data(), header.decompressedLength, 1, file);
|
||||
}
|
||||
|
||||
return localdata;
|
||||
};
|
||||
|
||||
if(info.fileType == FileType::Standard) {
|
||||
std::vector<Block> blocks;
|
||||
|
||||
|
@ -199,37 +154,7 @@ void GameData::extractFile(std::string_view dataFilePath, std::string_view outPa
|
|||
|
||||
const size_t startingPos = offset + info.size;
|
||||
for(auto block : blocks) {
|
||||
struct BlockHeader {
|
||||
int32_t size;
|
||||
int32_t dummy;
|
||||
int32_t compressedLength; // < 32000 is uncompressed data
|
||||
int32_t decompressedLength;
|
||||
} header;
|
||||
|
||||
fseek(file, startingPos + block.offset, SEEK_SET);
|
||||
|
||||
fread(&header, sizeof(BlockHeader), 1, file);
|
||||
|
||||
std::vector<uint8_t> localdata;
|
||||
|
||||
bool isCompressed = header.compressedLength < 32000;
|
||||
if(isCompressed) {
|
||||
localdata.resize(header.decompressedLength);
|
||||
|
||||
std::vector<uint8_t> compressed_data;
|
||||
compressed_data.resize(header.compressedLength);
|
||||
fread(compressed_data.data(), header.compressedLength, 1, file);
|
||||
|
||||
zlib::no_header_decompress(reinterpret_cast<uint8_t*>(compressed_data.data()),
|
||||
compressed_data.size(),
|
||||
reinterpret_cast<uint8_t*>(localdata.data()),
|
||||
header.decompressedLength);
|
||||
} else {
|
||||
localdata.resize(header.decompressedLength);
|
||||
|
||||
fread(localdata.data(), header.decompressedLength, 1, file);
|
||||
}
|
||||
|
||||
auto localdata = read_data_block(file, startingPos + block.offset);
|
||||
data.insert(data.end(), localdata.begin(), localdata.end());
|
||||
}
|
||||
|
||||
|
@ -238,7 +163,6 @@ void GameData::extractFile(std::string_view dataFilePath, std::string_view outPa
|
|||
FILE* newFile = fopen(outPath.data(), "w");
|
||||
fwrite(data.data(), data.size(), 1, newFile);
|
||||
fclose(newFile);
|
||||
|
||||
} else if(info.fileType == FileType::Model) {
|
||||
FILE* newFile = fopen(outPath.data(), "w");
|
||||
|
||||
|
@ -316,7 +240,7 @@ void GameData::extractFile(std::string_view dataFilePath, std::string_view outPa
|
|||
size_t stackStart = ftell(newFile);
|
||||
for(int i = 0; i < modelInfo.stackBlockNum; i++) {
|
||||
size_t lastPos = ftell(file);
|
||||
auto data = readFileBlock(file, lastPos);
|
||||
auto data = read_data_block(file, lastPos);
|
||||
fwrite(data.data(), data.size(), 1, newFile); // i think we write this to file?
|
||||
fseek(file, lastPos + compressedBlockSizes[currentBlock], SEEK_SET);
|
||||
currentBlock++;
|
||||
|
@ -329,7 +253,7 @@ void GameData::extractFile(std::string_view dataFilePath, std::string_view outPa
|
|||
size_t runtimeStart = ftell(newFile);
|
||||
for(int i = 0; i < modelInfo.runtimeBlockNum; i++) {
|
||||
size_t lastPos = ftell(file);
|
||||
auto data = readFileBlock(file, lastPos);
|
||||
auto data = read_data_block(file, lastPos);
|
||||
fwrite(data.data(), data.size(), 1, newFile);
|
||||
fseek(file, lastPos + compressedBlockSizes[currentBlock], SEEK_SET);
|
||||
currentBlock++;
|
||||
|
@ -338,9 +262,6 @@ void GameData::extractFile(std::string_view dataFilePath, std::string_view outPa
|
|||
size_t runtimeEnd = ftell(newFile);
|
||||
runtimeSize = (int)(runtimeEnd - runtimeStart);
|
||||
|
||||
fmt::print("stack size: {}\n", stackSize);
|
||||
fmt::print("runtime size: {}\n", runtimeSize);
|
||||
|
||||
// process all 3 lods
|
||||
for(int i = 0; i < 3; i++) {
|
||||
if(modelInfo.vertexBlockBufferBlockNum[i] != 0) {
|
||||
|
@ -354,7 +275,7 @@ void GameData::extractFile(std::string_view dataFilePath, std::string_view outPa
|
|||
|
||||
for(int j = 0; j < modelInfo.vertexBlockBufferBlockNum[i]; j++) {
|
||||
size_t lastPos = ftell(file);
|
||||
auto data = readFileBlock(file, lastPos);
|
||||
auto data = read_data_block(file, lastPos);
|
||||
fwrite(data.data(), data.size(), 1, newFile); // i think we write this to file?
|
||||
vertexDataSizes[i] += (int)data.size();
|
||||
fseek(file, lastPos + compressedBlockSizes[currentBlock], SEEK_SET);
|
||||
|
@ -373,7 +294,7 @@ void GameData::extractFile(std::string_view dataFilePath, std::string_view outPa
|
|||
|
||||
for(int j = 0; j < modelInfo.indexBufferBlockNum[i]; j++) {
|
||||
size_t lastPos = ftell(file);
|
||||
auto data = readFileBlock(file, lastPos);
|
||||
auto data = read_data_block(file, lastPos);
|
||||
fwrite(data.data(), data.size(), 1, newFile); // i think we write this to file?
|
||||
indexDataSizes[i] += (int)data.size();
|
||||
fseek(file, lastPos + compressedBlockSizes[currentBlock], SEEK_SET);
|
||||
|
@ -409,26 +330,24 @@ void GameData::extractFile(std::string_view dataFilePath, std::string_view outPa
|
|||
uint8_t dummy[] = {0};
|
||||
fwrite(dummy, sizeof(uint8_t), 1, file);
|
||||
|
||||
fmt::print("data size: {}\n", modelInfo.fileSize);
|
||||
|
||||
fclose(newFile);
|
||||
fclose(file);
|
||||
} else {
|
||||
throw std::runtime_error("File type is not handled yet for " + std::string(dataFilePath));
|
||||
}
|
||||
|
||||
fmt::print("Extracted {} to {}!\n", dataFilePath, outPath);
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
fmt::print("Extracted {} to {}\n", dataFilePath, outPath);
|
||||
fmt::print("Failed to find file {}.\n", dataFilePath);
|
||||
}
|
||||
|
||||
std::optional<EXH> GameData::readExcelSheet(std::string_view name) {
|
||||
fmt::print("Beginning to read excel sheet {}...\n", name);
|
||||
|
||||
for(auto row : rootEXL.rows) {
|
||||
for(const auto& row : rootEXL.rows) {
|
||||
if(row.name == name) {
|
||||
fmt::print("Found row {} at id {}!\n", name, row.id);
|
||||
|
||||
// we want it as lowercase (Item -> item)
|
||||
std::string newFilename = name.data();
|
||||
std::transform(newFilename.begin(), newFilename.end(), newFilename.begin(),
|
||||
|
@ -441,8 +360,6 @@ std::optional<EXH> GameData::readExcelSheet(std::string_view name) {
|
|||
|
||||
extractFile(exhFilename, outPath);
|
||||
|
||||
fmt::print("Done extracting files, now parsing...\n");
|
||||
|
||||
return readEXH(outPath);
|
||||
}
|
||||
}
|
||||
|
@ -490,8 +407,6 @@ void GameData::extractSkeleton() {
|
|||
|
||||
fseek(file, dataOffset, SEEK_SET);
|
||||
|
||||
fmt::print("data offset: {}\n", dataOffset);
|
||||
|
||||
std::vector<uint8_t> havokData(end - dataOffset);
|
||||
fread(havokData.data(), havokData.size(), 1, file);
|
||||
|
||||
|
@ -505,7 +420,16 @@ void GameData::extractSkeleton() {
|
|||
IndexFile<IndexHashTableEntry> GameData::getIndexListing(std::string_view folder) {
|
||||
auto [repository, category] = calculateRepositoryCategory(fmt::format("{}/{}", folder, "a"));
|
||||
|
||||
auto indexFilename = calculateFilename(categoryToID[category], getExpansionID(repository), 0, "win32", "index");
|
||||
auto [indexFilename, index2Filename] = repository.get_index_filenames(categoryToID[category]);
|
||||
|
||||
return readIndexFile(dataDirectory + "/" + repository + "/" + indexFilename);
|
||||
return readIndexFile(dataDirectory + "/" + repository.name + "/" + indexFilename);
|
||||
}
|
||||
|
||||
Repository& GameData::getBaseRepository() {
|
||||
for(auto& repository : repositories) {
|
||||
if(repository.type == Repository::Type::Base)
|
||||
return repository;
|
||||
}
|
||||
|
||||
throw std::runtime_error("No base repository found.");
|
||||
}
|
||||
|
|
|
@ -63,4 +63,31 @@ IndexFile<Index2HashTableEntry> readIndex2File(const std::string_view path) {
|
|||
}
|
||||
|
||||
return index;
|
||||
}
|
||||
|
||||
CombinedIndexFile read_index_files(std::string_view index_filename, std::string_view index2_filename) {
|
||||
CombinedIndexFile final_index_file;
|
||||
|
||||
auto index_parsed = readIndexFile(index_filename);
|
||||
auto index2_parsed = readIndex2File(index2_filename);
|
||||
|
||||
for(auto entry : index_parsed.entries) {
|
||||
IndexEntry new_entry;
|
||||
new_entry.hash = entry.hash;
|
||||
new_entry.offset = entry.offset;
|
||||
new_entry.dataFileId = entry.dataFileId;
|
||||
|
||||
final_index_file.entries.push_back(new_entry);
|
||||
}
|
||||
|
||||
for(auto entry : index2_parsed.entries) {
|
||||
IndexEntry new_entry;
|
||||
new_entry.hash = entry.hash;
|
||||
new_entry.offset = entry.offset;
|
||||
new_entry.dataFileId = entry.dataFileId;
|
||||
|
||||
final_index_file.entries.push_back(new_entry);
|
||||
}
|
||||
|
||||
return final_index_file;
|
||||
}
|
59
src/sqpack.cpp
Normal file
59
src/sqpack.cpp
Normal file
|
@ -0,0 +1,59 @@
|
|||
#include "sqpack.h"
|
||||
#include "compression.h"
|
||||
|
||||
#include <fmt/format.h>
|
||||
|
||||
std::pair<std::string, std::string> Repository::get_index_filenames(const int category) {
|
||||
std::string base = fmt::format("{category:02x}{expansion:02d}{chunk:02d}.{platform}",
|
||||
fmt::arg("category", category),
|
||||
fmt::arg("expansion", expansion_number),
|
||||
fmt::arg("chunk", 0),
|
||||
fmt::arg("platform", "win32"));
|
||||
|
||||
return {fmt::format("{}.index", base),
|
||||
fmt::format("{}.index2", base)};
|
||||
}
|
||||
|
||||
std::string Repository::get_dat_filename(const int category, const uint32_t data_file_id) {
|
||||
return fmt::format("{category:02x}{expansion:02d}{chunk:02d}.{platform}.dat{data_file_id}",
|
||||
fmt::arg("category", category),
|
||||
fmt::arg("expansion", expansion_number),
|
||||
fmt::arg("chunk", 0),
|
||||
fmt::arg("platform", "win32"),
|
||||
fmt::arg("data_file_id", data_file_id));
|
||||
}
|
||||
|
||||
std::vector<std::uint8_t> read_data_block(FILE* file, size_t starting_position) {
|
||||
struct BlockHeader {
|
||||
int32_t size;
|
||||
int32_t dummy;
|
||||
int32_t compressedLength; // < 32000 is uncompressed data
|
||||
int32_t decompressedLength;
|
||||
} header;
|
||||
|
||||
fseek(file, starting_position, SEEK_SET);
|
||||
|
||||
fread(&header, sizeof(BlockHeader), 1, file);
|
||||
|
||||
std::vector<uint8_t> localdata;
|
||||
|
||||
bool isCompressed = header.compressedLength < 32000;
|
||||
if(isCompressed) {
|
||||
localdata.resize(header.decompressedLength);
|
||||
|
||||
std::vector<uint8_t> compressed_data;
|
||||
compressed_data.resize(header.compressedLength);
|
||||
fread(compressed_data.data(), header.compressedLength, 1, file);
|
||||
|
||||
zlib::no_header_decompress(reinterpret_cast<uint8_t*>(compressed_data.data()),
|
||||
compressed_data.size(),
|
||||
reinterpret_cast<uint8_t*>(localdata.data()),
|
||||
header.decompressedLength);
|
||||
} else {
|
||||
localdata.resize(header.decompressedLength);
|
||||
|
||||
fread(localdata.data(), header.decompressedLength, 1, file);
|
||||
}
|
||||
|
||||
return localdata;
|
||||
}
|
59
src/types.cpp
Normal file
59
src/types.cpp
Normal file
|
@ -0,0 +1,59 @@
|
|||
#include "types/race.h"
|
||||
#include "types/slot.h"
|
||||
|
||||
std::string_view get_slot_abbreviation(Slot slot) {
|
||||
switch(slot) {
|
||||
case Slot::Head:
|
||||
return "met";
|
||||
case Slot::Hands:
|
||||
return "glv";
|
||||
case Slot::Legs:
|
||||
return "dwn";
|
||||
case Slot::Feet:
|
||||
return "sho";
|
||||
case Slot::Body:
|
||||
return "top";
|
||||
case Slot::Earring:
|
||||
return "ear";
|
||||
case Slot::Neck:
|
||||
return "nek";
|
||||
case Slot::Rings:
|
||||
return "rir";
|
||||
case Slot::Wrists:
|
||||
return "wrs";
|
||||
}
|
||||
}
|
||||
|
||||
std::optional<Slot> get_slot_from_id(const int id) {
|
||||
switch(id) {
|
||||
case 3:
|
||||
return Slot::Head;
|
||||
case 5:
|
||||
return Slot::Hands;
|
||||
case 7:
|
||||
return Slot::Legs;
|
||||
case 8:
|
||||
return Slot::Feet;
|
||||
case 4:
|
||||
return Slot::Body;
|
||||
case 9:
|
||||
return Slot::Earring;
|
||||
case 10:
|
||||
return Slot::Neck;
|
||||
case 12:
|
||||
return Slot::Rings;
|
||||
case 11:
|
||||
return Slot::Wrists;
|
||||
default:
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
int get_race_id(Race race) {
|
||||
switch(race) {
|
||||
case Race::HyurMidlanderMale:
|
||||
return 101;
|
||||
case Race::HyurMidlanderFemale:
|
||||
return 201;
|
||||
}
|
||||
}
|
Reference in a new issue