Big refactoring commit pt. 2
Now instead of hammering your working directory, all operations happen in-memory using two new classes, MemoryBuffer and MemorySpan. This has already fixed numerous bugs especially around handling model files. All operations on files inside dat's now happen on memory buffers, with an option to write them to a file. A lot of now useless debug messages are removed too, as that made some operations needlessly bound by the speed of your console output.
This commit is contained in:
parent
b11767dc02
commit
c78a1ab245
11 changed files with 344 additions and 250 deletions
|
@ -4,6 +4,8 @@
|
|||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "memorybuffer.h"
|
||||
|
||||
struct EXH;
|
||||
struct ExcelDataPagination;
|
||||
|
||||
|
@ -23,4 +25,4 @@ struct EXD {
|
|||
|
||||
std::string getEXDFilename(EXH& exh, std::string_view name, std::string_view lang, ExcelDataPagination& page);
|
||||
|
||||
EXD readEXD(EXH& exh, std::string_view path, ExcelDataPagination& page);
|
||||
EXD readEXD(EXH& exh, MemorySpan data, ExcelDataPagination& page);
|
|
@ -4,6 +4,7 @@
|
|||
#include <vector>
|
||||
|
||||
#include "language.h"
|
||||
#include "memorybuffer.h"
|
||||
|
||||
// taken from https://xiv.dev/game-data/file-formats/excel
|
||||
struct ExhHeader {
|
||||
|
@ -63,4 +64,4 @@ struct EXH {
|
|||
std::vector<Language> language;
|
||||
};
|
||||
|
||||
EXH readEXH(std::string_view path);
|
||||
EXH readEXH(MemorySpan data);
|
|
@ -3,6 +3,7 @@
|
|||
#include <string_view>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
#include "memorybuffer.h"
|
||||
|
||||
struct EXLRow {
|
||||
std::string name;
|
||||
|
@ -13,4 +14,4 @@ struct EXL {
|
|||
std::vector<EXLRow> rows;
|
||||
};
|
||||
|
||||
EXL readEXL(std::string_view path);
|
||||
EXL readEXL(MemorySpan data);
|
|
@ -7,6 +7,7 @@
|
|||
#include "exlparser.h"
|
||||
#include "indexparser.h"
|
||||
#include "sqpack.h"
|
||||
#include "memorybuffer.h"
|
||||
|
||||
/*
|
||||
* This handles reading/extracting the raw data from game data packs, such as dat0, index and index2 files.
|
||||
|
@ -26,7 +27,8 @@ public:
|
|||
/*
|
||||
* This extracts the raw file from dataFilePath to outPath;
|
||||
*/
|
||||
void extractFile(std::string_view dataFilePath, std::string_view outPath);
|
||||
[[nodiscard]]
|
||||
std::optional<MemoryBuffer> extractFile(std::string_view data_file_path);
|
||||
|
||||
IndexFile<IndexHashTableEntry> getIndexListing(std::string_view folder);
|
||||
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
#include <vector>
|
||||
#include <array>
|
||||
|
||||
#include "memorybuffer.h"
|
||||
|
||||
struct Vertex {
|
||||
std::array<float, 3> position;
|
||||
std::array<float, 3> normal;
|
||||
|
@ -22,4 +24,4 @@ struct Model {
|
|||
std::vector<Lod> lods;
|
||||
};
|
||||
|
||||
Model parseMDL(const std::string_view path);
|
||||
Model parseMDL(MemorySpan data);
|
141
include/memorybuffer.h
Normal file
141
include/memorybuffer.h
Normal file
|
@ -0,0 +1,141 @@
|
|||
#pragma once
|
||||
|
||||
#include <string_view>
|
||||
#include <streambuf>
|
||||
#include <istream>
|
||||
|
||||
enum class Seek {
|
||||
Current,
|
||||
End,
|
||||
Set
|
||||
};
|
||||
|
||||
struct MemoryBuffer {
|
||||
MemoryBuffer() {}
|
||||
MemoryBuffer(const std::vector<uint8_t>& new_data) : data(new_data) {}
|
||||
|
||||
void seek(const size_t pos, const Seek seek_type) {
|
||||
switch(seek_type) {
|
||||
case Seek::Current:
|
||||
position += pos;
|
||||
break;
|
||||
case Seek::End:
|
||||
position = size() - pos;
|
||||
break;
|
||||
case Seek::Set:
|
||||
position = pos;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
void write(const T& t) {
|
||||
size_t end = position + sizeof(T);
|
||||
if(end > data.size())
|
||||
data.resize(end);
|
||||
|
||||
memcpy(data.data() + position, &t, sizeof(T));
|
||||
|
||||
position = end;
|
||||
}
|
||||
|
||||
template <>
|
||||
void write<std::vector<uint8_t>>(const std::vector<uint8_t>& t) {
|
||||
size_t end = position + (sizeof(uint8_t) * t.size());
|
||||
if(end > data.size())
|
||||
data.resize(end);
|
||||
|
||||
data.insert(data.begin() + position, t.begin(), t.end());
|
||||
position = end;
|
||||
}
|
||||
|
||||
size_t size() const {
|
||||
return data.size();
|
||||
}
|
||||
|
||||
size_t current_position() const {
|
||||
return position;
|
||||
}
|
||||
|
||||
std::vector<uint8_t> data;
|
||||
|
||||
private:
|
||||
size_t position = 0;
|
||||
};
|
||||
|
||||
struct MemorySpan {
|
||||
MemorySpan(const MemoryBuffer& new_buffer) : buffer(new_buffer) {}
|
||||
|
||||
std::istream read_as_stream() {
|
||||
auto char_data = cast_data<char>();
|
||||
mem = std::make_unique<membuf>(char_data, char_data + size());
|
||||
return std::istream(mem.get());
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
void read(T* t) {
|
||||
*t = *reinterpret_cast<const T*>(buffer.data.data() + position);
|
||||
position += sizeof(T);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
void read(T* t, const size_t size) {
|
||||
*t = *reinterpret_cast<const T*>(buffer.data.data() + position);
|
||||
position += size;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
void read_structures(std::vector<T>* vector, const size_t count) {
|
||||
vector->resize(count);
|
||||
for(size_t i = 0; i < count; i++)
|
||||
read(&vector->at(i));
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
void read_array(T* array, const size_t count) {
|
||||
for(size_t i = 0; i < count; i++)
|
||||
read((array + i));
|
||||
}
|
||||
|
||||
void seek(const size_t pos, const Seek seek_type) {
|
||||
switch(seek_type) {
|
||||
case Seek::Current:
|
||||
position += pos;
|
||||
break;
|
||||
case Seek::End:
|
||||
position = buffer.size() - pos;
|
||||
break;
|
||||
case Seek::Set:
|
||||
position = pos;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
size_t size() const {
|
||||
return buffer.data.size();
|
||||
}
|
||||
|
||||
size_t current_position() const {
|
||||
return position;
|
||||
}
|
||||
|
||||
private:
|
||||
const MemoryBuffer& buffer;
|
||||
|
||||
struct membuf : std::streambuf {
|
||||
inline membuf(char* begin, char* end) {
|
||||
this->setg(begin, begin, end);
|
||||
}
|
||||
};
|
||||
|
||||
template<typename T>
|
||||
T* cast_data() {
|
||||
return (T*)(buffer.data.data());
|
||||
}
|
||||
|
||||
std::unique_ptr<membuf> mem;
|
||||
|
||||
size_t position = 0;
|
||||
};
|
||||
|
||||
void write_buffer_to_file(const MemoryBuffer& buffer, std::string_view path);
|
|
@ -27,18 +27,19 @@ struct ExcelDataRowHeader {
|
|||
};
|
||||
|
||||
template<typename T>
|
||||
T readDataRaw(FILE* file, int offset) {
|
||||
fseek(file, offset, SEEK_SET);
|
||||
T readDataRaw(MemorySpan& span, int offset) {
|
||||
span.seek(offset, Seek::Set);
|
||||
|
||||
T value;
|
||||
fread(&value, sizeof value, 1, file);
|
||||
span.read(&value);
|
||||
endianSwap(&value);
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
std::string readData(FILE* file, int offset) {
|
||||
return std::to_string(readDataRaw<T>(file, offset));
|
||||
std::string readData(MemorySpan& span, int offset) {
|
||||
return std::to_string(readDataRaw<T>(span, offset));
|
||||
}
|
||||
|
||||
std::string getEXDFilename(EXH& exh, std::string_view name, std::string_view lang, ExcelDataPagination& page) {
|
||||
|
@ -49,23 +50,16 @@ std::string getEXDFilename(EXH& exh, std::string_view name, std::string_view lan
|
|||
}
|
||||
}
|
||||
|
||||
EXD readEXD(EXH& exh, std::string_view path, ExcelDataPagination& page) {
|
||||
EXD readEXD(EXH& exh, MemorySpan data, ExcelDataPagination& page) {
|
||||
EXD exd;
|
||||
|
||||
FILE* file = fopen(path.data(), "rb");
|
||||
if(file == nullptr) {
|
||||
throw std::runtime_error("Failed to open exd file " + std::string(path));
|
||||
}
|
||||
|
||||
ExcelDataHeader header;
|
||||
fread(&header, sizeof(ExcelDataHeader), 1, file);
|
||||
data.read(&header);
|
||||
|
||||
endianSwap(&header.indexSize);
|
||||
|
||||
std::vector<ExcelDataOffset> dataOffsets;
|
||||
dataOffsets.resize(header.indexSize / sizeof(ExcelDataOffset));
|
||||
|
||||
fread(dataOffsets.data(), sizeof(ExcelDataOffset) * dataOffsets.size(), 1, file);
|
||||
data.read_structures(&dataOffsets, header.indexSize / sizeof(ExcelDataOffset));
|
||||
|
||||
for(auto& offset : dataOffsets) {
|
||||
endianSwap(&offset.offset);
|
||||
|
@ -75,17 +69,17 @@ EXD readEXD(EXH& exh, std::string_view path, ExcelDataPagination& page) {
|
|||
for(int i = 0; i < exh.header.rowCount; i++) {
|
||||
for(auto& offset : dataOffsets) {
|
||||
if (offset.rowId == i) {
|
||||
fseek(file, offset.offset, SEEK_SET);
|
||||
data.seek(offset.offset, Seek::Set);
|
||||
|
||||
ExcelDataRowHeader rowHeader;
|
||||
fread(&rowHeader, sizeof(ExcelDataRowHeader), 1, file);
|
||||
data.read(&rowHeader);
|
||||
|
||||
endianSwap(&rowHeader.dataSize);
|
||||
endianSwap(&rowHeader.rowCount);
|
||||
|
||||
const int headerOffset = offset.offset + 6;
|
||||
|
||||
const auto readRow = [&exd, &exh, file, rowHeader](int offset) {
|
||||
const auto readRow = [&exd, &exh, &data, rowHeader](int offset) {
|
||||
Row row;
|
||||
|
||||
for (auto column: exh.columnDefinitions) {
|
||||
|
@ -93,22 +87,22 @@ EXD readEXD(EXH& exh, std::string_view path, ExcelDataPagination& page) {
|
|||
|
||||
switch (column.type) {
|
||||
case String: {
|
||||
fseek(file, offset + column.offset, SEEK_SET);
|
||||
data.seek(offset + column.offset, Seek::Set);
|
||||
|
||||
uint32_t stringLength = 0; // this is actually offset?
|
||||
fread(&stringLength, sizeof(uint32_t), 1, file);
|
||||
data.read(&stringLength);
|
||||
|
||||
endianSwap(&stringLength);
|
||||
|
||||
fseek(file, offset + exh.header.dataOffset + stringLength, SEEK_SET);
|
||||
data.seek(offset + exh.header.dataOffset + stringLength, Seek::Set);
|
||||
|
||||
std::string string;
|
||||
|
||||
uint8_t byte;
|
||||
fread(&byte, sizeof(uint8_t), 1, file);
|
||||
data.read(&byte);
|
||||
while(byte != 0) {
|
||||
string.push_back(byte);
|
||||
fread(&byte, sizeof(uint8_t), 1, file);
|
||||
data.read(&byte);
|
||||
}
|
||||
|
||||
c.data = string;
|
||||
|
@ -116,46 +110,46 @@ EXD readEXD(EXH& exh, std::string_view path, ExcelDataPagination& page) {
|
|||
}
|
||||
break;
|
||||
case Int8:
|
||||
c.data = readData<int8_t>(file, offset + column.offset);
|
||||
c.data = readData<int8_t>(data, offset + column.offset);
|
||||
c.type = "Int";
|
||||
c.uint64Data = readDataRaw<int8_t>(file, offset + column.offset);
|
||||
c.uint64Data = readDataRaw<int8_t>(data, offset + column.offset);
|
||||
break;
|
||||
case UInt8:
|
||||
c.data = readData<uint8_t>(file, offset + column.offset);
|
||||
c.data = readData<uint8_t>(data, offset + column.offset);
|
||||
c.type = "Unsigned Int";
|
||||
c.uint64Data = readDataRaw<uint8_t>(file, offset + column.offset);
|
||||
c.uint64Data = readDataRaw<uint8_t>(data, offset + column.offset);
|
||||
break;
|
||||
case Int16:
|
||||
c.data = readData<int16_t>(file, offset + column.offset);
|
||||
c.data = readData<int16_t>(data, offset + column.offset);
|
||||
c.type = "Int";
|
||||
c.uint64Data = readDataRaw<int16_t>(file, offset + column.offset);
|
||||
c.uint64Data = readDataRaw<int16_t>(data, offset + column.offset);
|
||||
break;
|
||||
case UInt16:
|
||||
c.data = readData<uint16_t>(file, offset + column.offset);
|
||||
c.data = readData<uint16_t>(data, offset + column.offset);
|
||||
c.type = "Unsigned Int";
|
||||
break;
|
||||
case Int32:
|
||||
c.data = readData<int32_t>(file, offset + column.offset);
|
||||
c.data = readData<int32_t>(data, offset + column.offset);
|
||||
c.type = "Int";
|
||||
c.uint64Data = readDataRaw<int32_t>(file, offset + column.offset);
|
||||
c.uint64Data = readDataRaw<int32_t>(data, offset + column.offset);
|
||||
break;
|
||||
case UInt32:
|
||||
c.data = readData<uint32_t>(file, offset + column.offset);
|
||||
c.data = readData<uint32_t>(data, offset + column.offset);
|
||||
c.type = "Unsigned Int";
|
||||
break;
|
||||
case Float32:
|
||||
c.data = readData<float>(file, offset + column.offset);
|
||||
c.data = readData<float>(data, offset + column.offset);
|
||||
c.type = "Float";
|
||||
break;
|
||||
case Int64:
|
||||
c.data = readData<int64_t>(file, offset + column.offset);
|
||||
c.data = readData<int64_t>(data, offset + column.offset);
|
||||
c.type = "Int";
|
||||
c.uint64Data = readDataRaw<int64_t>(file, offset + column.offset);
|
||||
c.uint64Data = readDataRaw<int64_t>(data, offset + column.offset);
|
||||
break;
|
||||
case UInt64:
|
||||
c.data = readData<uint64_t>(file, offset + column.offset);
|
||||
c.data = readData<uint64_t>(data, offset + column.offset);
|
||||
c.type = "Unsigned Int";
|
||||
c.uint64Data = readDataRaw<uint64_t>(file, offset + column.offset);
|
||||
c.uint64Data = readDataRaw<uint64_t>(data, offset + column.offset);
|
||||
break;
|
||||
case PackedBool0:
|
||||
case PackedBool1:
|
||||
|
@ -167,8 +161,8 @@ EXD readEXD(EXH& exh, std::string_view path, ExcelDataPagination& page) {
|
|||
case PackedBool7: {
|
||||
int shift = (int) column.type - (int) PackedBool0;
|
||||
int bit = 1 << shift;
|
||||
int32_t data = readDataRaw<int32_t>(file, offset + column.offset);
|
||||
c.data = std::to_string((data & bit) == bit);
|
||||
int32_t boolData = readDataRaw<int32_t>(data, offset + column.offset);
|
||||
c.data = std::to_string((boolData & bit) == bit);
|
||||
c.type = "Boolean";
|
||||
}
|
||||
break;
|
||||
|
|
|
@ -7,17 +7,12 @@
|
|||
#include <algorithm>
|
||||
#include <string>
|
||||
|
||||
EXH readEXH(const std::string_view path) {
|
||||
EXH readEXH(MemorySpan data) {
|
||||
EXH exh;
|
||||
|
||||
FILE* file = fopen(path.data(), "rb");
|
||||
if(file == nullptr) {
|
||||
throw std::runtime_error("Failed to open exh file " + std::string(path));
|
||||
}
|
||||
data.read(&exh.header);
|
||||
|
||||
fread(&exh.header, sizeof(ExhHeader), 1, file);
|
||||
|
||||
fseek(file, 0x20, SEEK_SET);
|
||||
data.seek(0x20, Seek::Set);
|
||||
|
||||
endianSwap(&exh.header.dataOffset);
|
||||
endianSwap(&exh.header.columnCount);
|
||||
|
@ -25,16 +20,9 @@ EXH readEXH(const std::string_view path) {
|
|||
endianSwap(&exh.header.languageCount);
|
||||
endianSwap(&exh.header.rowCount);
|
||||
|
||||
exh.columnDefinitions.resize(exh.header.columnCount);
|
||||
|
||||
fread(exh.columnDefinitions.data(), sizeof(ExcelColumnDefinition) * exh.header.columnCount, 1, file);
|
||||
|
||||
exh.pages.resize(exh.header.pageCount);
|
||||
|
||||
fread(exh.pages.data(), sizeof(ExcelDataPagination) * exh.header.pageCount, 1, file);
|
||||
|
||||
exh.language.resize(exh.header.languageCount);
|
||||
fread(exh.language.data(), sizeof(Language) * exh.header.languageCount, 1, file);
|
||||
data.read_structures(&exh.columnDefinitions, exh.header.columnCount);
|
||||
data.read_structures(&exh.pages, exh.header.pageCount);
|
||||
data.read_structures(&exh.language, exh.header.languageCount);
|
||||
|
||||
for(auto& columnDef : exh.columnDefinitions) {
|
||||
endianSwap(&columnDef.offset);
|
||||
|
|
|
@ -3,18 +3,13 @@
|
|||
#include <stdexcept>
|
||||
#include <fstream>
|
||||
|
||||
EXL readEXL(std::string_view path) {
|
||||
std::fstream file;
|
||||
file.open(path.data(), std::iostream::in);
|
||||
|
||||
if(!file.is_open()) {
|
||||
throw std::runtime_error("Failed to read exl file from " + std::string(path.data()));
|
||||
}
|
||||
EXL readEXL(MemorySpan data) {
|
||||
auto stream = data.read_as_stream();
|
||||
|
||||
EXL exl;
|
||||
|
||||
std::string line;
|
||||
while (std::getline(file, line)) {
|
||||
while (std::getline(stream, line)) {
|
||||
const size_t comma = line.find_first_of(',');
|
||||
|
||||
std::string name = line.substr(0, comma);
|
||||
|
|
145
src/gamedata.cpp
145
src/gamedata.cpp
|
@ -49,9 +49,8 @@ GameData::GameData(const std::string_view dataDirectory) {
|
|||
repositories.push_back(repository);
|
||||
}
|
||||
|
||||
extractFile("exd/root.exl", "root.exl");
|
||||
|
||||
rootEXL = readEXL("root.exl");
|
||||
auto root_exl_data = extractFile("exd/root.exl");
|
||||
rootEXL = readEXL(*root_exl_data);
|
||||
}
|
||||
|
||||
std::vector<std::string> GameData::getAllSheetNames() {
|
||||
|
@ -96,9 +95,9 @@ std::tuple<Repository, std::string> GameData::calculateRepositoryCategory(std::s
|
|||
return {getBaseRepository(), tokens[0]};
|
||||
}
|
||||
|
||||
void GameData::extractFile(std::string_view dataFilePath, std::string_view outPath) {
|
||||
const uint64_t hash = calculateHash(dataFilePath);
|
||||
auto [repository, category] = calculateRepositoryCategory(dataFilePath);
|
||||
std::optional<MemoryBuffer> GameData::extractFile(const std::string_view data_file_path) {
|
||||
const uint64_t hash = calculateHash(data_file_path);
|
||||
auto [repository, category] = calculateRepositoryCategory(data_file_path);
|
||||
|
||||
auto [index_filename, index2_filename] = repository.get_index_filenames(categoryToID[category]);
|
||||
auto index_path = fmt::format("{data_directory}/{repository}/{filename}",
|
||||
|
@ -160,11 +159,9 @@ void GameData::extractFile(std::string_view dataFilePath, std::string_view outPa
|
|||
|
||||
fclose(file);
|
||||
|
||||
FILE* newFile = fopen(outPath.data(), "w");
|
||||
fwrite(data.data(), data.size(), 1, newFile);
|
||||
fclose(newFile);
|
||||
return {data};
|
||||
} else if(info.fileType == FileType::Model) {
|
||||
FILE* newFile = fopen(outPath.data(), "w");
|
||||
MemoryBuffer buffer;
|
||||
|
||||
// reset
|
||||
fseek(file, offset, SEEK_SET);
|
||||
|
@ -223,49 +220,54 @@ void GameData::extractFile(std::string_view dataFilePath, std::string_view outPa
|
|||
|
||||
std::vector<uint16_t> compressedBlockSizes(totalBlocks);
|
||||
fread(compressedBlockSizes.data(), compressedBlockSizes.size() * sizeof(uint16_t), 1, file);
|
||||
|
||||
int currentBlock = 0;
|
||||
int stackSize = 0;
|
||||
int runtimeSize = 0;
|
||||
uint32_t stackSize = 0;
|
||||
uint32_t runtimeSize = 0;
|
||||
|
||||
std::array<int, 3> vertexDataOffsets;
|
||||
std::array<int, 3> indexDataOffsets;
|
||||
std::array<uint32_t, 3> vertexDataOffsets;
|
||||
std::array<uint32_t, 3> indexDataOffsets;
|
||||
|
||||
std::array<int, 3> vertexDataSizes;
|
||||
std::array<int, 3> indexDataSizes;
|
||||
std::array<uint32_t, 3> vertexDataSizes;
|
||||
std::array<uint32_t, 3> indexDataSizes;
|
||||
|
||||
// data.append 0x44
|
||||
fseek(newFile, 0x44, SEEK_SET);
|
||||
buffer.seek(0x44, Seek::Set);
|
||||
|
||||
fseek(file, baseOffset + modelInfo.stackOffset, SEEK_SET);
|
||||
size_t stackStart = ftell(newFile);
|
||||
size_t stackStart = buffer.current_position();
|
||||
for(int i = 0; i < modelInfo.stackBlockNum; i++) {
|
||||
size_t lastPos = ftell(file);
|
||||
|
||||
auto data = read_data_block(file, lastPos);
|
||||
fwrite(data.data(), data.size(), 1, newFile); // i think we write this to file?
|
||||
buffer.write(data);
|
||||
|
||||
fseek(file, lastPos + compressedBlockSizes[currentBlock], SEEK_SET);
|
||||
currentBlock++;
|
||||
}
|
||||
|
||||
size_t stackEnd = ftell(newFile);
|
||||
size_t stackEnd = buffer.current_position();
|
||||
stackSize = (int)(stackEnd - stackStart);
|
||||
|
||||
fseek(file, baseOffset + modelInfo.runtimeOffset, SEEK_SET);
|
||||
size_t runtimeStart = ftell(newFile);
|
||||
size_t runtimeStart = buffer.current_position();
|
||||
for(int i = 0; i < modelInfo.runtimeBlockNum; i++) {
|
||||
size_t lastPos = ftell(file);
|
||||
|
||||
auto data = read_data_block(file, lastPos);
|
||||
fwrite(data.data(), data.size(), 1, newFile);
|
||||
buffer.write(data);
|
||||
|
||||
fseek(file, lastPos + compressedBlockSizes[currentBlock], SEEK_SET);
|
||||
currentBlock++;
|
||||
}
|
||||
|
||||
size_t runtimeEnd = ftell(newFile);
|
||||
size_t runtimeEnd = buffer.current_position();
|
||||
runtimeSize = (int)(runtimeEnd - runtimeStart);
|
||||
|
||||
// process all 3 lods
|
||||
for(int i = 0; i < 3; i++) {
|
||||
if(modelInfo.vertexBlockBufferBlockNum[i] != 0) {
|
||||
int currentVertexOffset = ftell(newFile);
|
||||
int currentVertexOffset = buffer.current_position();
|
||||
if(i == 0 || currentVertexOffset != vertexDataOffsets[i - 1])
|
||||
vertexDataOffsets[i] = currentVertexOffset;
|
||||
else
|
||||
|
@ -275,8 +277,10 @@ void GameData::extractFile(std::string_view dataFilePath, std::string_view outPa
|
|||
|
||||
for(int j = 0; j < modelInfo.vertexBlockBufferBlockNum[i]; j++) {
|
||||
size_t lastPos = ftell(file);
|
||||
|
||||
auto data = read_data_block(file, lastPos);
|
||||
fwrite(data.data(), data.size(), 1, newFile); // i think we write this to file?
|
||||
buffer.write(data);
|
||||
|
||||
vertexDataSizes[i] += (int)data.size();
|
||||
fseek(file, lastPos + compressedBlockSizes[currentBlock], SEEK_SET);
|
||||
currentBlock++;
|
||||
|
@ -286,7 +290,7 @@ void GameData::extractFile(std::string_view dataFilePath, std::string_view outPa
|
|||
// TODO: lol no edge geometry
|
||||
|
||||
if(modelInfo.indexBufferBlockNum[i] != 0) {
|
||||
int currentIndexOffset = ftell(newFile);
|
||||
int currentIndexOffset = buffer.current_position();
|
||||
if(i == 0 || currentIndexOffset != indexDataOffsets[i - 1])
|
||||
indexDataOffsets[i] = currentIndexOffset;
|
||||
else
|
||||
|
@ -294,8 +298,10 @@ void GameData::extractFile(std::string_view dataFilePath, std::string_view outPa
|
|||
|
||||
for(int j = 0; j < modelInfo.indexBufferBlockNum[i]; j++) {
|
||||
size_t lastPos = ftell(file);
|
||||
|
||||
auto data = read_data_block(file, lastPos);
|
||||
fwrite(data.data(), data.size(), 1, newFile); // i think we write this to file?
|
||||
buffer.write(data);
|
||||
|
||||
indexDataSizes[i] += (int)data.size();
|
||||
fseek(file, lastPos + compressedBlockSizes[currentBlock], SEEK_SET);
|
||||
currentBlock++;
|
||||
|
@ -304,45 +310,43 @@ void GameData::extractFile(std::string_view dataFilePath, std::string_view outPa
|
|||
}
|
||||
|
||||
// now write mdl header
|
||||
fseek(newFile, 0, SEEK_SET);
|
||||
fwrite(&modelInfo.version, sizeof(uint32_t), 1, newFile);
|
||||
fwrite(&stackSize, sizeof(uint32_t), 1, newFile);
|
||||
fwrite(&runtimeSize, sizeof(uint32_t), 1, newFile);
|
||||
fwrite(&modelInfo.vertexDeclarationNum, sizeof(unsigned short), 1, newFile);
|
||||
fwrite(&modelInfo.materialNum, sizeof(unsigned short), 1, newFile);
|
||||
buffer.seek(0, Seek::Set);
|
||||
|
||||
buffer.write(modelInfo.version);
|
||||
buffer.write(stackSize);
|
||||
buffer.write(runtimeSize);
|
||||
buffer.write(modelInfo.vertexDeclarationNum);
|
||||
buffer.write(modelInfo.materialNum);
|
||||
|
||||
for(int i = 0; i < 3; i++)
|
||||
fwrite(&vertexDataOffsets[i], sizeof(uint32_t), 1, newFile);
|
||||
buffer.write(vertexDataOffsets[i]);
|
||||
|
||||
for(int i = 0; i < 3; i++)
|
||||
fwrite(&indexDataOffsets[i], sizeof(uint32_t), 1, newFile);
|
||||
buffer.write(indexDataOffsets[i]);
|
||||
|
||||
for(int i = 0; i < 3; i++)
|
||||
fwrite(&vertexDataSizes[i], sizeof(uint32_t), 1, newFile);
|
||||
buffer.write(vertexDataSizes[i]);
|
||||
|
||||
for(int i = 0; i < 3; i++)
|
||||
fwrite(&indexDataSizes[i], sizeof(uint32_t), 1, newFile);
|
||||
buffer.write(indexDataSizes[i]);
|
||||
|
||||
fwrite(&modelInfo.numLods, sizeof(uint8_t), 1, file);
|
||||
fwrite(&modelInfo.indexBufferStreamingEnabled, sizeof(bool), 1, file);
|
||||
fwrite(&modelInfo.edgeGeometryEnabled, sizeof(bool), 1, file);
|
||||
buffer.write(modelInfo.numLods);
|
||||
buffer.write(modelInfo.indexBufferStreamingEnabled);
|
||||
buffer.write(modelInfo.edgeGeometryEnabled);
|
||||
|
||||
uint8_t dummy[] = {0};
|
||||
fwrite(dummy, sizeof(uint8_t), 1, file);
|
||||
uint8_t dummy = 0;
|
||||
buffer.write(dummy);
|
||||
|
||||
fclose(newFile);
|
||||
fclose(file);
|
||||
|
||||
return {buffer};
|
||||
} else {
|
||||
throw std::runtime_error("File type is not handled yet for " + std::string(dataFilePath));
|
||||
throw std::runtime_error("File type is not handled yet for " + std::string(data_file_path));
|
||||
}
|
||||
|
||||
fmt::print("Extracted {} to {}!\n", dataFilePath, outPath);
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
fmt::print("Failed to find file {}.\n", dataFilePath);
|
||||
fmt::print("Failed to find file {}.\n", data_file_path);
|
||||
}
|
||||
|
||||
std::optional<EXH> GameData::readExcelSheet(std::string_view name) {
|
||||
|
@ -358,9 +362,8 @@ std::optional<EXH> GameData::readExcelSheet(std::string_view name) {
|
|||
std::string outPath = newFilename + ".exh";
|
||||
std::replace(outPath.begin(), outPath.end(), '/', '_');
|
||||
|
||||
extractFile(exhFilename, outPath);
|
||||
|
||||
return readEXH(outPath);
|
||||
auto exh_data = extractFile(exhFilename);
|
||||
return readEXH(*exh_data);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -368,53 +371,45 @@ std::optional<EXH> GameData::readExcelSheet(std::string_view name) {
|
|||
}
|
||||
|
||||
void GameData::extractSkeleton() {
|
||||
std::string path = fmt::format("chara/human/c0201/skeleton/base/b0001/skl_c0201b0001.sklb");
|
||||
|
||||
extractFile(path, "test.skel");
|
||||
|
||||
FILE* file = fopen("test.skel", "rb");
|
||||
|
||||
fseek(file, 0, SEEK_END);
|
||||
size_t end = ftell(file);
|
||||
fseek(file, 0, SEEK_SET);
|
||||
const std::string path = fmt::format("chara/human/c0201/skeleton/base/b0001/skl_c0201b0001.sklb");
|
||||
auto skel_data = extractFile(path);
|
||||
auto skel_span = MemorySpan(*skel_data);
|
||||
|
||||
int32_t magic;
|
||||
fread(&magic, sizeof(int32_t), 1, file);
|
||||
|
||||
int32_t format;
|
||||
fread(&format, sizeof(int32_t), 1, file);
|
||||
|
||||
fseek(file, sizeof(uint16_t), SEEK_CUR);
|
||||
skel_span.read(&magic);
|
||||
|
||||
if(magic != 0x736B6C62)
|
||||
fmt::print("INVALID SKLB magic");
|
||||
|
||||
size_t dataOffset = 0;
|
||||
int32_t format;
|
||||
skel_span.read(&format);
|
||||
|
||||
skel_span.seek(sizeof(uint16_t), Seek::Current);
|
||||
|
||||
int16_t dataOffset = 0;
|
||||
switch(format) {
|
||||
case 0x31323030:
|
||||
fread(&dataOffset, sizeof(int16_t), 1, file);
|
||||
skel_span.read(&dataOffset);
|
||||
break;
|
||||
case 0x31333030:
|
||||
case 0x31333031:
|
||||
fseek(file, sizeof(uint16_t), SEEK_CUR);
|
||||
fread(&dataOffset, sizeof(int16_t), 1, file);
|
||||
skel_span.seek(sizeof(uint16_t), Seek::Current);
|
||||
skel_span.read(&dataOffset);
|
||||
break;
|
||||
default:
|
||||
fmt::print("INVALID SKLB format {}", format);
|
||||
break;
|
||||
}
|
||||
|
||||
fseek(file, dataOffset, SEEK_SET);
|
||||
skel_span.seek(dataOffset, Seek::Set);
|
||||
|
||||
std::vector<uint8_t> havokData(end - dataOffset);
|
||||
fread(havokData.data(), havokData.size(), 1, file);
|
||||
std::vector<uint8_t> havokData(skel_span.size() - dataOffset);
|
||||
skel_span.read_structures(&havokData, havokData.size());
|
||||
|
||||
FILE* newFile = fopen("test.sklb.havok", "wb");
|
||||
fwrite(havokData.data(), havokData.size(), 1, newFile);
|
||||
|
||||
fclose(newFile);
|
||||
fclose(file);
|
||||
}
|
||||
|
||||
IndexFile<IndexHashTableEntry> GameData::getIndexListing(std::string_view folder) {
|
||||
|
|
|
@ -8,19 +8,7 @@
|
|||
#include <fstream>
|
||||
#include <algorithm>
|
||||
|
||||
Model parseMDL(const std::string_view path) {
|
||||
FILE* file = fopen(path.data(), "rb");
|
||||
if(file == nullptr) {
|
||||
throw std::runtime_error("Failed to open exh file " + std::string(path));
|
||||
}
|
||||
|
||||
enum class FileType : int32_t {
|
||||
Empty = 1,
|
||||
Standard = 2,
|
||||
Model = 3,
|
||||
Texture = 4
|
||||
};
|
||||
|
||||
Model parseMDL(MemorySpan data) {
|
||||
struct ModelFileHeader {
|
||||
uint32_t version;
|
||||
uint32_t stackSize;
|
||||
|
@ -37,9 +25,7 @@ Model parseMDL(const std::string_view path) {
|
|||
uint8_t padding;
|
||||
} modelFileHeader;
|
||||
|
||||
fread(&modelFileHeader, sizeof(ModelFileHeader), 1, file);
|
||||
|
||||
fmt::print("stack size: {}\n", modelFileHeader.stackSize);
|
||||
data.read(&modelFileHeader);
|
||||
|
||||
struct VertexElement {
|
||||
uint8_t stream, offset, type, usage, usageIndex;
|
||||
|
@ -53,29 +39,28 @@ Model parseMDL(const std::string_view path) {
|
|||
std::vector<VertexDeclaration> vertexDecls(modelFileHeader.vertexDeclarationCount);
|
||||
for(int i = 0; i < modelFileHeader.vertexDeclarationCount; i++) {
|
||||
VertexElement element {};
|
||||
fread(&element, sizeof(VertexElement), 1, file);
|
||||
data.read(&element);
|
||||
|
||||
do {
|
||||
vertexDecls[i].elements.push_back(element);
|
||||
fread(&element, sizeof(VertexElement), 1, file);
|
||||
data.read(&element);
|
||||
} while (element.stream != 255);
|
||||
|
||||
int toSeek = 17 * 8 - (vertexDecls[i].elements.size() + 1) * 8;
|
||||
fseek(file, toSeek, SEEK_CUR);
|
||||
data.seek(toSeek, Seek::Current);
|
||||
}
|
||||
|
||||
uint16_t stringCount;
|
||||
fread(&stringCount, sizeof(uint16_t), 1, file);
|
||||
|
||||
fmt::print("string count: {}\n", stringCount);
|
||||
data.read(&stringCount);
|
||||
|
||||
// dummy
|
||||
fseek(file, sizeof(uint16_t), SEEK_CUR);
|
||||
data.seek(sizeof(uint16_t), Seek::Current);
|
||||
|
||||
uint32_t stringSize;
|
||||
fread(&stringSize, sizeof(uint32_t), 1, file);
|
||||
data.read(&stringSize);
|
||||
|
||||
std::vector<uint8_t> strings(stringSize);
|
||||
fread(strings.data(), stringSize, 1, file);
|
||||
std::vector<uint8_t> strings;
|
||||
data.read_structures(&strings, stringSize);
|
||||
|
||||
enum ModelFlags1 : uint8_t
|
||||
{
|
||||
|
@ -135,28 +120,23 @@ Model parseMDL(const std::string_view path) {
|
|||
uint8_t padding[6];
|
||||
} modelHeader;
|
||||
|
||||
fread(&modelHeader, sizeof(modelHeader), 1, file);
|
||||
|
||||
fmt::print("mesh count: {}\n", modelHeader.meshCount);
|
||||
fmt::print("attribute count: {}\n", modelHeader.attributeCount);
|
||||
data.read(&modelHeader);
|
||||
|
||||
struct ElementId {
|
||||
unsigned int elementId;
|
||||
unsigned int parentBoneName;
|
||||
uint32_t elementId;
|
||||
uint32_t parentBoneName;
|
||||
std::vector<float> translate;
|
||||
std::vector<float> rotate;
|
||||
};
|
||||
|
||||
std::vector<ElementId> elementIds(modelHeader.elementIdCount);
|
||||
for(int i = 0; i < modelHeader.elementIdCount; i++) {
|
||||
fread(&elementIds[i].elementId, sizeof(uint32_t), 1, file);
|
||||
fread(&elementIds[i].parentBoneName, sizeof(uint32_t), 1, file);
|
||||
data.read(&elementIds[i].elementId);
|
||||
data.read(&elementIds[i].parentBoneName);
|
||||
|
||||
elementIds[i].translate.resize(3); // FIXME: these always seem to be 3, convert to static array? then we could probably fread this all in one go!
|
||||
elementIds[i].rotate.resize(3);
|
||||
|
||||
fread(elementIds[i].translate.data(), sizeof(float) * 3, 1, file);
|
||||
fread(elementIds[i].rotate.data(), sizeof(float) * 3, 1, file);
|
||||
// FIXME: these always seem to be 3, convert to static array? then we could probably read this all in one go!
|
||||
data.read_structures(&elementIds[i].translate, 3);
|
||||
data.read_structures(&elementIds[i].rotate, 3);
|
||||
}
|
||||
|
||||
struct Lod {
|
||||
|
@ -184,10 +164,10 @@ Model parseMDL(const std::string_view path) {
|
|||
unsigned int indexDataOffset;
|
||||
};
|
||||
|
||||
std::array<Lod, 3> lods;
|
||||
fread(lods.data(), sizeof(Lod) * 3, 1, file);
|
||||
std::vector<Lod> lods;
|
||||
|
||||
// TODO: support models that support more than 3 lods
|
||||
data.read_structures(&lods, 3);
|
||||
|
||||
struct Mesh {
|
||||
unsigned short vertexCount;
|
||||
|
@ -199,7 +179,7 @@ Model parseMDL(const std::string_view path) {
|
|||
unsigned short boneTableIndex;
|
||||
unsigned int startIndex;
|
||||
|
||||
std::vector<unsigned int> vertexBufferOffset;
|
||||
std::vector<uint32_t> vertexBufferOffset;
|
||||
std::vector<uint8_t> vertexBufferStride;
|
||||
|
||||
uint8_t vertexStreamCount;
|
||||
|
@ -207,26 +187,23 @@ Model parseMDL(const std::string_view path) {
|
|||
|
||||
std::vector<Mesh> meshes(modelHeader.meshCount);
|
||||
for(int i = 0; i < modelHeader.meshCount; i++) {
|
||||
fread(&meshes[i].vertexCount, sizeof(uint16_t), 1, file);
|
||||
fread(&meshes[i].padding, sizeof(uint16_t), 1, file);
|
||||
fread(&meshes[i].indexCount, sizeof(uint32_t), 1, file);
|
||||
fread(&meshes[i].materialIndex, sizeof(uint16_t), 1, file);
|
||||
fread(&meshes[i].subMeshIndex, sizeof(uint16_t), 1, file);
|
||||
fread(&meshes[i].subMeshCount, sizeof(uint16_t), 1, file);
|
||||
fread(&meshes[i].boneTableIndex, sizeof(uint16_t), 1, file);
|
||||
fread(&meshes[i].startIndex, sizeof(uint32_t), 1, file);
|
||||
data.read(&meshes[i].vertexCount);
|
||||
data.read(&meshes[i].padding);
|
||||
data.read(&meshes[i].indexCount);
|
||||
data.read(&meshes[i].materialIndex);
|
||||
data.read(&meshes[i].subMeshIndex);
|
||||
data.read(&meshes[i].subMeshCount);
|
||||
data.read(&meshes[i].boneTableIndex);
|
||||
data.read(&meshes[i].startIndex);
|
||||
|
||||
meshes[i].vertexBufferOffset.resize(3);
|
||||
fread(meshes[i].vertexBufferOffset.data(), sizeof(uint32_t) * 3, 1, file);
|
||||
data.read_structures(&meshes[i].vertexBufferOffset, 3);
|
||||
data.read_structures(&meshes[i].vertexBufferStride, 3);
|
||||
|
||||
meshes[i].vertexBufferStride.resize(3);
|
||||
fread(meshes[i].vertexBufferStride.data(), sizeof(uint8_t) * 3, 1, file);
|
||||
|
||||
fread(&meshes[i].vertexStreamCount, sizeof(uint8_t), 1, file);
|
||||
data.read(&meshes[i].vertexStreamCount);
|
||||
}
|
||||
|
||||
std::vector<uint32_t> attributeNameOffsets(modelHeader.attributeCount);
|
||||
fread(attributeNameOffsets.data(), sizeof(uint32_t) * modelHeader.attributeCount, 1, file);
|
||||
std::vector<uint32_t> attributeNameOffsets;
|
||||
data.read_structures(&attributeNameOffsets, modelHeader.attributeCount);
|
||||
|
||||
// TODO: implement terrain shadow meshes
|
||||
|
||||
|
@ -238,69 +215,60 @@ Model parseMDL(const std::string_view path) {
|
|||
unsigned short boneCount;
|
||||
};
|
||||
|
||||
std::vector<Submesh> submeshes(modelHeader.submeshCount);
|
||||
for(int i = 0; i < modelHeader.submeshCount; i++) {
|
||||
fread(&submeshes[i], sizeof(Submesh), 1, file);
|
||||
}
|
||||
std::vector<Submesh> submeshes;
|
||||
data.read_structures(&submeshes, modelHeader.submeshCount);
|
||||
|
||||
// TODO: implement terrain shadow submeshes
|
||||
|
||||
std::vector<uint32_t> materialNameOffsets(modelHeader.materialCount);
|
||||
fread(materialNameOffsets.data(), sizeof(uint32_t) * modelHeader.materialCount, 1, file);
|
||||
std::vector<uint32_t> materialNameOffsets;
|
||||
data.read_structures(&materialNameOffsets, modelHeader.materialCount);
|
||||
|
||||
std::vector<uint32_t> boneNameOffsets(modelHeader.boneCount);
|
||||
fread(boneNameOffsets.data(), sizeof(uint32_t) * modelHeader.boneCount, 1, file);
|
||||
std::vector<uint32_t> boneNameOffsets;
|
||||
data.read_structures(&boneNameOffsets, modelHeader.boneCount);
|
||||
|
||||
struct BoneTable {
|
||||
std::vector<unsigned short> boneIndex;
|
||||
std::vector<uint16_t> boneIndex;
|
||||
uint8_t boneCount;
|
||||
std::vector<uint8_t> padding;
|
||||
};
|
||||
|
||||
std::vector<BoneTable> boneTables(modelHeader.boneTableCount);
|
||||
for(int i = 0; i < modelHeader.boneTableCount; i++) {
|
||||
boneTables[i].boneIndex.resize(64);
|
||||
fread(boneTables[i].boneIndex.data(), 64 * sizeof(uint16_t), 1, file);
|
||||
fread(&boneTables[i].boneCount, sizeof(uint8_t), 1, file);
|
||||
boneTables[i].padding.resize(3);
|
||||
fread(boneTables[i].padding.data(), sizeof(uint8_t) * 3, 1, file);
|
||||
data.read_structures(&boneTables[i].boneIndex, 64);
|
||||
|
||||
fmt::print("bone count: {}\n", boneTables[i].boneCount);
|
||||
data.read(&boneTables[i].boneCount);
|
||||
|
||||
data.read_structures(&boneTables[i].padding, 3);
|
||||
}
|
||||
|
||||
// TODO: implement shapes
|
||||
|
||||
unsigned int submeshBoneMapSize;
|
||||
fread(&submeshBoneMapSize, sizeof(uint32_t), 1, file);
|
||||
uint32_t submeshBoneMapSize;
|
||||
data.read(&submeshBoneMapSize);
|
||||
|
||||
std::vector<uint16_t > submeshBoneMap((int)submeshBoneMapSize / 2);
|
||||
fread(submeshBoneMap.data(), submeshBoneMap.size() * sizeof(uint16_t), 1, file);
|
||||
std::vector<uint16_t> submeshBoneMap;
|
||||
data.read_structures(&submeshBoneMap, (int)submeshBoneMapSize / 2);
|
||||
|
||||
uint8_t paddingAmount;
|
||||
fread(&paddingAmount, sizeof(uint8_t), 1, file);
|
||||
data.read(&paddingAmount);
|
||||
|
||||
fseek(file, paddingAmount, SEEK_CUR);
|
||||
data.seek(paddingAmount, Seek::Current);
|
||||
|
||||
struct BoundingBox {
|
||||
std::array<float, 4> min, max;
|
||||
};
|
||||
|
||||
BoundingBox boundingBoxes, modelBoundingBoxes, waterBoundingBoxes, verticalFogBoundingBoxes;
|
||||
fread(&boundingBoxes, sizeof(BoundingBox), 1, file);
|
||||
fread(&modelBoundingBoxes, sizeof(BoundingBox), 1, file);
|
||||
fread(&waterBoundingBoxes, sizeof(BoundingBox), 1, file);
|
||||
fread(&verticalFogBoundingBoxes, sizeof(BoundingBox), 1, file);
|
||||
data.read(&boundingBoxes);
|
||||
data.read(&modelBoundingBoxes);
|
||||
data.read(&waterBoundingBoxes);
|
||||
data.read(&verticalFogBoundingBoxes);
|
||||
|
||||
std::vector<BoundingBox> boneBoundingBoxes(modelHeader.boneCount);
|
||||
fread(boneBoundingBoxes.data(), modelHeader.boneCount * sizeof(BoundingBox), 1, file);
|
||||
|
||||
fmt::print("Successfully read mdl file!\n");
|
||||
|
||||
fmt::print("Now exporting as test.obj...\n");
|
||||
std::vector<BoundingBox> boneBoundingBoxes;
|
||||
data.read_structures(&boneBoundingBoxes, modelHeader.boneCount);
|
||||
|
||||
Model model;
|
||||
|
||||
// TODO: doesn't work for lod above 0
|
||||
for(int i = 0; i < modelHeader.lodCount; i++) {
|
||||
::Lod lod;
|
||||
|
||||
|
@ -333,44 +301,48 @@ Model parseMDL(const std::string_view path) {
|
|||
std::vector<Vertex> vertices(vertexCount);
|
||||
|
||||
for(int k = 0; k < vertexCount; k++) {
|
||||
for(auto & orderedElement : decl.elements) {
|
||||
VertexType type = (VertexType)orderedElement.type;
|
||||
VertexUsage usage = (VertexUsage)orderedElement.usage;
|
||||
for(auto& orderedElement : decl.elements) {
|
||||
auto type = static_cast<VertexType>(orderedElement.type);
|
||||
auto usage = static_cast<VertexUsage>(orderedElement.usage);
|
||||
|
||||
const int stream = orderedElement.stream;
|
||||
|
||||
fseek(file, lods[i].vertexDataOffset + meshes[j].vertexBufferOffset[stream] + orderedElement.offset + meshes[i].vertexBufferStride[stream] * k, SEEK_SET);
|
||||
data.seek(lods[i].vertexDataOffset + meshes[j].vertexBufferOffset[stream] + orderedElement.offset + meshes[i].vertexBufferStride[stream] * k, Seek::Set);
|
||||
|
||||
std::array<float, 4> floatData = {};
|
||||
|
||||
switch(type) {
|
||||
case VertexType::Single3:
|
||||
fread(floatData.data(), sizeof(float) * 3, 1, file);
|
||||
data.read_array(floatData.data(), 3);
|
||||
break;
|
||||
case VertexType::Single4:
|
||||
fread(floatData.data(), sizeof(float) * 4, 1, file);
|
||||
data.read_array(floatData.data(), 4);
|
||||
break;
|
||||
case VertexType::UInt:
|
||||
fseek(file, sizeof(uint8_t) * 4, SEEK_CUR);
|
||||
data.seek(sizeof(uint8_t) * 4, Seek::Current);
|
||||
break;
|
||||
case VertexType::ByteFloat4:
|
||||
case VertexType::ByteFloat4: {
|
||||
uint8_t values[4];
|
||||
fread(values, sizeof(uint8_t) * 4, 1, file);
|
||||
data.read_array(values, 4);
|
||||
|
||||
floatData[0] = byte_to_float(values[0]);
|
||||
floatData[1] = byte_to_float(values[1]);
|
||||
floatData[2] = byte_to_float(values[2]);
|
||||
floatData[3] = byte_to_float(values[3]);
|
||||
}
|
||||
break;
|
||||
case VertexType::Half2: {
|
||||
uint16_t values[2];
|
||||
fread(values, sizeof(uint16_t) * 2, 1, file);
|
||||
data.read_array(values, 2);
|
||||
|
||||
floatData[0] = half_to_float(values[0]);
|
||||
floatData[1] = half_to_float(values[1]);
|
||||
}
|
||||
break;
|
||||
case VertexType::Half4: {
|
||||
uint16_t values[4];
|
||||
fread(values, sizeof(uint16_t) * 4, 1, file);
|
||||
data.read_array(values, 4);
|
||||
|
||||
floatData[0] = half_to_float(values[0]);
|
||||
floatData[1] = half_to_float(values[1]);
|
||||
floatData[2] = half_to_float(values[2]);
|
||||
|
@ -390,9 +362,10 @@ Model parseMDL(const std::string_view path) {
|
|||
}
|
||||
}
|
||||
|
||||
fseek(file, modelFileHeader.indexOffsets[i] + (meshes[j].startIndex * 2), SEEK_SET);
|
||||
std::vector<uint16_t> indices(meshes[j].indexCount);
|
||||
fread(indices.data(), meshes[j].indexCount * sizeof(uint16_t), 1, file);
|
||||
data.seek(modelFileHeader.indexOffsets[i] + (meshes[j].startIndex * 2), Seek::Set);
|
||||
|
||||
std::vector<uint16_t> indices;
|
||||
data.read_structures(&indices, meshes[j].indexCount);
|
||||
|
||||
part.indices = indices;
|
||||
part.vertices = vertices;
|
||||
|
|
Reference in a new issue