Add race-specific skeleton extraction
This commit is contained in:
parent
319cc658d0
commit
a51734a220
3 changed files with 11 additions and 6 deletions
|
@ -8,6 +8,7 @@
|
||||||
#include "indexparser.h"
|
#include "indexparser.h"
|
||||||
#include "sqpack.h"
|
#include "sqpack.h"
|
||||||
#include "memorybuffer.h"
|
#include "memorybuffer.h"
|
||||||
|
#include "types/race.h"
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* This handles reading/extracting the raw data from game data packs, such as dat0, index and index2 files.
|
* This handles reading/extracting the raw data from game data packs, such as dat0, index and index2 files.
|
||||||
|
@ -34,7 +35,7 @@ public:
|
||||||
|
|
||||||
IndexFile<IndexHashTableEntry> getIndexListing(std::string_view folder);
|
IndexFile<IndexHashTableEntry> getIndexListing(std::string_view folder);
|
||||||
|
|
||||||
void extractSkeleton();
|
void extractSkeleton(Race race);
|
||||||
|
|
||||||
std::optional<EXH> readExcelSheet(std::string_view name);
|
std::optional<EXH> readExcelSheet(std::string_view name);
|
||||||
|
|
||||||
|
|
|
@ -397,8 +397,9 @@ std::optional<EXH> GameData::readExcelSheet(std::string_view name) {
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
void GameData::extractSkeleton() {
|
void GameData::extractSkeleton(Race race) {
|
||||||
const std::string path = fmt::format("chara/human/c0201/skeleton/base/b0001/skl_c0201b0001.sklb");
|
const std::string path = fmt::format("chara/human/c{race:04d}/skeleton/base/b0001/skl_c{race:04d}b0001.sklb",
|
||||||
|
fmt::arg("race", get_race_id(race)));
|
||||||
auto skel_data = extractFile(path);
|
auto skel_data = extractFile(path);
|
||||||
auto skel_span = MemorySpan(*skel_data);
|
auto skel_span = MemorySpan(*skel_data);
|
||||||
|
|
||||||
|
@ -433,7 +434,10 @@ void GameData::extractSkeleton() {
|
||||||
std::vector<uint8_t> havokData(skel_span.size() - dataOffset);
|
std::vector<uint8_t> havokData(skel_span.size() - dataOffset);
|
||||||
skel_span.read_structures(&havokData, havokData.size());
|
skel_span.read_structures(&havokData, havokData.size());
|
||||||
|
|
||||||
FILE* newFile = fopen("test.sklb.havok", "wb");
|
const std::string outputName = fmt::format("skl_c{race:04d}b0001.sklb",
|
||||||
|
fmt::arg("race", get_race_id(race)));
|
||||||
|
|
||||||
|
FILE* newFile = fopen(outputName.c_str(), "wb");
|
||||||
fwrite(havokData.data(), havokData.size(), 1, newFile);
|
fwrite(havokData.data(), havokData.size(), 1, newFile);
|
||||||
|
|
||||||
fclose(newFile);
|
fclose(newFile);
|
||||||
|
|
|
@ -33,7 +33,7 @@ Skeleton parseHavokXML(const std::string_view path) {
|
||||||
pugi::xml_document doc;
|
pugi::xml_document doc;
|
||||||
doc.load_file(path.data());
|
doc.load_file(path.data());
|
||||||
|
|
||||||
pugi::xpath_node build_tool = doc.select_node("//hkobject[@name=\"#0052\"]/hkparam[@name=\"bones\"]");
|
pugi::xpath_node build_tool = doc.select_node("//hkobject/hkparam[@name=\"bones\"]");
|
||||||
|
|
||||||
auto bonesNode = build_tool.node();
|
auto bonesNode = build_tool.node();
|
||||||
|
|
||||||
|
@ -71,7 +71,7 @@ Skeleton parseHavokXML(const std::string_view path) {
|
||||||
|
|
||||||
walkSkeleton(skeleton, skeleton.root_bone);
|
walkSkeleton(skeleton, skeleton.root_bone);
|
||||||
|
|
||||||
pugi::xpath_node build_tool2 = doc.select_node("//hkobject[@name=\"#0052\"]/hkparam[@name=\"referencePose\"]");
|
pugi::xpath_node build_tool2 = doc.select_node("//hkobject/hkparam[@name=\"referencePose\"]");
|
||||||
|
|
||||||
fmt::print("num ref poses: {}\n", build_tool2.node().attribute("numelements").as_int());
|
fmt::print("num ref poses: {}\n", build_tool2.node().attribute("numelements").as_int());
|
||||||
|
|
||||||
|
|
Reference in a new issue