mirror of
https://github.com/redstrate/Auracite.git
synced 2025-04-23 05:07:46 +00:00
Improve character_archive & prepare for future supported UI
Now we pack the different files into one ZIP archive, and give that as bytes. It's then possible to turn this into a Base64 string perfect for data URIs.
This commit is contained in:
parent
0acea2646e
commit
aa5318c84a
6 changed files with 915 additions and 243 deletions
968
Cargo.lock
generated
968
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
47
Cargo.toml
47
Cargo.toml
|
@ -6,6 +6,15 @@ description = "Export your FFXIV character in portable, generic formats"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
crate-type = ["cdylib", "rlib"]
|
crate-type = ["cdylib", "rlib"]
|
||||||
|
[features]
|
||||||
|
# Builds the Qt UI for Auracite
|
||||||
|
ui = []# ["cxx-qt-build", "cxx-qt", "cxx-qt-lib", "cxx-kde-frameworks"]
|
||||||
|
|
||||||
|
# Builds the CLI for Auracite
|
||||||
|
cli = ["clap", "clap_derive"]
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
cxx-qt-build = { git = "https://github.com/KDAB/cxx-qt", branch = "main", features = ["link_qt_object_files"] }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
# Used to scrape the Lodestone HTML pages
|
# Used to scrape the Lodestone HTML pages
|
||||||
|
@ -18,10 +27,6 @@ serde_json = "1.0"
|
||||||
# Used to do some misc regex operations during scraping
|
# Used to do some misc regex operations during scraping
|
||||||
regex = "1.11"
|
regex = "1.11"
|
||||||
|
|
||||||
# CLI interface
|
|
||||||
clap = { version = "4.5", features = ["derive"] }
|
|
||||||
clap_derive = "4.5"
|
|
||||||
|
|
||||||
# Used to generate the HTML page to easily preview your exported data
|
# Used to generate the HTML page to easily preview your exported data
|
||||||
minijinja = "2.0"
|
minijinja = "2.0"
|
||||||
|
|
||||||
|
@ -32,6 +37,13 @@ touche = { git = "https://github.com/redstrate/touche" }
|
||||||
|
|
||||||
# Used to generate the WebAssembly version
|
# Used to generate the WebAssembly version
|
||||||
wasm-bindgen = "0.2.95"
|
wasm-bindgen = "0.2.95"
|
||||||
|
wasm-bindgen-futures = "0.4"
|
||||||
|
|
||||||
|
# Download files on WebAssembly
|
||||||
|
reqwest = { version = "0.12" }
|
||||||
|
|
||||||
|
# Zip the character archive
|
||||||
|
zip = { version = "2.2", default-features = false }
|
||||||
|
|
||||||
# Not used directly by us, but to disable the "std" feature and is used by the scraper crate.
|
# Not used directly by us, but to disable the "std" feature and is used by the scraper crate.
|
||||||
ahash = { version = "0.8.0", default-features = false }
|
ahash = { version = "0.8.0", default-features = false }
|
||||||
|
@ -39,6 +51,29 @@ ahash = { version = "0.8.0", default-features = false }
|
||||||
# Ditto, but used by the ahash crate.
|
# Ditto, but used by the ahash crate.
|
||||||
getrandom = { version = "0.2", features = ["js"] }
|
getrandom = { version = "0.2", features = ["js"] }
|
||||||
|
|
||||||
|
[target.'cfg(target_family = "wasm")'.dependencies]
|
||||||
|
# Used to access Web APIs in WebAssembly
|
||||||
|
web-sys = { version = "0.3", features = ["console"] }
|
||||||
|
|
||||||
|
# For async
|
||||||
|
tokio = { version = "1.41", features = ["rt", "macros"] }
|
||||||
|
|
||||||
|
# Encoding the character archive to base64 so the browser can download it
|
||||||
|
base64 = "0.22"
|
||||||
|
|
||||||
[target.'cfg(not(target_family = "wasm"))'.dependencies]
|
[target.'cfg(not(target_family = "wasm"))'.dependencies]
|
||||||
# Download HTML pages, images, etc
|
# For async
|
||||||
downloader = "0.2"
|
tokio = { version = "1.41", features = ["rt", "rt-multi-thread", "macros"] }
|
||||||
|
|
||||||
|
# Download files
|
||||||
|
http_req = "0.13"
|
||||||
|
|
||||||
|
# CLI interface
|
||||||
|
clap = { version = "4.5", features = ["derive"], optional = true }
|
||||||
|
clap_derive = { version = "4.5", optional = true }
|
||||||
|
|
||||||
|
# Used for the Qt UI
|
||||||
|
cxx = "1.0"
|
||||||
|
cxx-qt = { git = "https://github.com/KDAB/cxx-qt", branch = "main" }
|
||||||
|
cxx-qt-lib = { git = "https://github.com/KDAB/cxx-qt", branch = "main", features = ["full"] }
|
||||||
|
cxx-kde-frameworks = { git = "https://github.com/mystchonky/cxx-kde-frameworks", branch = "master" }
|
15
build.rs
Normal file
15
build.rs
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
#[cfg(feature = "ui")]
|
||||||
|
use cxx_qt_build::{CxxQtBuilder, QmlModule};
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
#[cfg(feature = "ui")]
|
||||||
|
CxxQtBuilder::new()
|
||||||
|
.qt_module("Quick")
|
||||||
|
.qml_module(QmlModule {
|
||||||
|
uri: "zone.xiv.auracite",
|
||||||
|
rust_files: &["src/bin/ui/bridge.rs"],
|
||||||
|
qml_files: &["src/bin/ui/Main.qml"],
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.build();
|
||||||
|
}
|
|
@ -1,35 +1,13 @@
|
||||||
#[cfg(not(target_family = "wasm"))]
|
use reqwest::Url;
|
||||||
use downloader::{Download, Downloader};
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
pub fn download(url: &str, path: &Path) -> Result<(), ()> {
|
pub async fn download(url: &Url) -> Result<Vec<u8>, ()> {
|
||||||
#[cfg(target_family = "wasm")]
|
let client = reqwest::Client::builder()
|
||||||
{
|
.build()
|
||||||
// TODO: Implement
|
.unwrap();
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(target_family = "wasm"))]
|
let body = client.get(url.to_string())
|
||||||
{
|
.send()
|
||||||
let mut downloader = Downloader::builder().build().unwrap();
|
.await;
|
||||||
|
|
||||||
let mut dl = Download::new(url);
|
Ok(body.unwrap().bytes().await.unwrap().to_vec())
|
||||||
dl = dl.file_name(path);
|
|
||||||
|
|
||||||
if !path.exists() {
|
|
||||||
let result = downloader.download(&[dl]).unwrap();
|
|
||||||
|
|
||||||
for r in result {
|
|
||||||
return match r {
|
|
||||||
Err(e) => {
|
|
||||||
println!("Error: {}", e.to_string());
|
|
||||||
Err(())
|
|
||||||
}
|
|
||||||
Ok(s) => Ok(()),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
82
src/lib.rs
82
src/lib.rs
|
@ -3,18 +3,21 @@ pub mod downloader;
|
||||||
pub mod html;
|
pub mod html;
|
||||||
pub mod parser;
|
pub mod parser;
|
||||||
|
|
||||||
use clap::Parser;
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use std::convert::Infallible;
|
use std::convert::Infallible;
|
||||||
use std::fs::{read, write};
|
use std::io::Write;
|
||||||
use std::path::Path;
|
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
|
use reqwest::Url;
|
||||||
use touche::server::Service;
|
use touche::server::Service;
|
||||||
use touche::{Body, HttpBody, Request, Response, Server, StatusCode};
|
use touche::{Body, HttpBody, Request, Response, Server, StatusCode};
|
||||||
use wasm_bindgen::prelude::wasm_bindgen;
|
use wasm_bindgen::prelude::wasm_bindgen;
|
||||||
|
use zip::write::SimpleFileOptions;
|
||||||
|
use zip::ZipWriter;
|
||||||
use crate::downloader::download;
|
use crate::downloader::download;
|
||||||
use crate::html::write_html;
|
use crate::html::write_html;
|
||||||
use crate::parser::parse_search;
|
use crate::parser::parse_search;
|
||||||
|
#[cfg(target_family = "wasm")]
|
||||||
|
use base64::prelude::*;
|
||||||
|
|
||||||
const LODESTONE_HOST: &str = "https://na.finalfantasyxiv.com";
|
const LODESTONE_HOST: &str = "https://na.finalfantasyxiv.com";
|
||||||
|
|
||||||
|
@ -58,41 +61,50 @@ impl Service for PackageService<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[wasm_bindgen]
|
/// Archives the character named `character_name` and gives a ZIP file as bytes that can be written to disk.
|
||||||
pub extern fn archive_character(character_name: &str, use_dalamud: bool) {
|
pub async extern fn archive_character(character_name: &str, use_dalamud: bool) -> Vec<u8> {
|
||||||
let search_page_path = Path::new("/tmp/search.html");
|
let search_page = download(&Url::parse_with_params(&format!("{LODESTONE_HOST}/lodestone/character?"), &[("q", character_name)]).unwrap())
|
||||||
download(
|
.await
|
||||||
&format!("{LODESTONE_HOST}/lodestone/character/?q={}", character_name),
|
|
||||||
search_page_path,
|
|
||||||
)
|
|
||||||
.expect("Failed to download the search page from the Lodestone.");
|
.expect("Failed to download the search page from the Lodestone.");
|
||||||
|
|
||||||
let href = parse_search(&String::from_utf8(read(search_page_path).unwrap()).unwrap());
|
let href = parse_search(&String::from_utf8(search_page).unwrap());
|
||||||
if href.is_empty() {
|
if href.is_empty() {
|
||||||
println!("Unable to find character!");
|
println!("Unable to find character!");
|
||||||
}
|
}
|
||||||
|
|
||||||
let char_page_path = Path::new("/tmp/character.html");
|
let char_page = download(&Url::parse(&format!("{LODESTONE_HOST}{}", href)).unwrap())
|
||||||
download(&format!("{LODESTONE_HOST}{}", href), char_page_path)
|
.await
|
||||||
.expect("Failed to download the character page from the Lodestone.");
|
.expect("Failed to download the character page from the Lodestone.");
|
||||||
|
|
||||||
let mut char_data = crate::parser::parse_lodestone(&String::from_utf8(read(char_page_path).unwrap()).unwrap());
|
let mut char_data = crate::parser::parse_lodestone(&String::from_utf8(char_page).unwrap());
|
||||||
|
|
||||||
let character_folder = Path::new(&character_name);
|
// 2 MiB, for one JSON and two images
|
||||||
if !character_folder.exists() {
|
let mut buf = vec![0; 2097152];
|
||||||
std::fs::create_dir(character_folder).unwrap();
|
let mut zip = ZipWriter::new(std::io::Cursor::new(&mut buf[..]));
|
||||||
}
|
|
||||||
|
let options = SimpleFileOptions::default().compression_method(zip::CompressionMethod::Stored);
|
||||||
|
zip.start_file("character.json", options);
|
||||||
|
zip.write_all(serde_json::to_string(&char_data).unwrap().as_ref());
|
||||||
|
|
||||||
if !char_data.portrait_url.is_empty() {
|
if !char_data.portrait_url.is_empty() {
|
||||||
download(
|
let portrait_url = char_data.portrait_url.replace("img2.finalfantasyxiv.com", "img-tunnel.ryne.moe");
|
||||||
&char_data.portrait_url,
|
|
||||||
&character_folder.join("portrait.jpg"),
|
let portrait = download(&Url::parse(&portrait_url).unwrap())
|
||||||
)
|
.await
|
||||||
.expect("Failed to download the character portrait image.");
|
.expect("Failed to download the character portrait image.");
|
||||||
|
|
||||||
|
zip.start_file("portrait.jpg", options);
|
||||||
|
zip.write_all(&*portrait);
|
||||||
}
|
}
|
||||||
if !char_data.face_url.is_empty() {
|
if !char_data.face_url.is_empty() {
|
||||||
download(&char_data.face_url, &character_folder.join("face.jpg"))
|
let face_url = char_data.face_url.replace("img2.finalfantasyxiv.com", "img-tunnel.ryne.moe");
|
||||||
|
|
||||||
|
let face = download(&Url::parse(&face_url).unwrap())
|
||||||
|
.await
|
||||||
.expect("Failed to download the character face image.");
|
.expect("Failed to download the character face image.");
|
||||||
|
|
||||||
|
zip.start_file("face.jpg", options);
|
||||||
|
zip.write_all(&*face);
|
||||||
}
|
}
|
||||||
|
|
||||||
if use_dalamud {
|
if use_dalamud {
|
||||||
|
@ -115,16 +127,11 @@ pub extern fn archive_character(character_name: &str, use_dalamud: bool) {
|
||||||
char_data.player_commendations = package.player_commendations; // TODO: fetch from the lodestone?
|
char_data.player_commendations = package.player_commendations; // TODO: fetch from the lodestone?
|
||||||
}
|
}
|
||||||
|
|
||||||
let serialized = serde_json::to_string(&char_data).unwrap();
|
zip.finish();
|
||||||
write(character_folder.join("character.json"), serialized)
|
|
||||||
.expect("Failed to write the character JSON file.");
|
|
||||||
|
|
||||||
println!(
|
return buf;
|
||||||
"Download complete! The archive is located at: {}",
|
|
||||||
character_folder.file_name().unwrap().to_str().unwrap()
|
|
||||||
);
|
|
||||||
|
|
||||||
write_html(
|
/*write_html(
|
||||||
&char_data,
|
&char_data,
|
||||||
&character_folder
|
&character_folder
|
||||||
.join("character.html")
|
.join("character.html")
|
||||||
|
@ -132,5 +139,16 @@ pub extern fn archive_character(character_name: &str, use_dalamud: bool) {
|
||||||
.into_string()
|
.into_string()
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
)
|
)
|
||||||
.expect("Failed to write the character HTML file.");
|
.expect("Failed to write the character HTML file.");*/
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// Archives the character named `character_name` and converts the ZIP file to Base64. Useful for downloading via data URIs.
|
||||||
|
#[cfg(target_family = "wasm")]
|
||||||
|
#[wasm_bindgen]
|
||||||
|
pub async extern fn archive_character_base64(character_name: &str, use_dalamud: bool) -> String {
|
||||||
|
let buf = archive_character(character_name, use_dalamud).await;
|
||||||
|
|
||||||
|
let base64 = BASE64_STANDARD.encode(buf);
|
||||||
|
return format!("data:application/octet-stream;charset=utf-16le;base64,{base64}").into();
|
||||||
}
|
}
|
|
@ -7,9 +7,15 @@ const ENTRY_NAME_SELECTOR: &str = ".entry__name";
|
||||||
|
|
||||||
/// Parses the HTML from `data` and returns the relative Lodestone URL for the first search entry.
|
/// Parses the HTML from `data` and returns the relative Lodestone URL for the first search entry.
|
||||||
pub fn parse_search(data: &str) -> String {
|
pub fn parse_search(data: &str) -> String {
|
||||||
|
#[cfg(target_family = "wasm")]
|
||||||
|
web_sys::console::log_1(&"parsing doc...".into());
|
||||||
|
|
||||||
let document = Html::parse_document(data);
|
let document = Html::parse_document(data);
|
||||||
let mut href = String::new();
|
let mut href = String::new();
|
||||||
|
|
||||||
|
#[cfg(target_family = "wasm")]
|
||||||
|
web_sys::console::log_1(&"done!".into());
|
||||||
|
|
||||||
for element in document.select(&Selector::parse(ENTRY_SELECTOR).unwrap()) {
|
for element in document.select(&Selector::parse(ENTRY_SELECTOR).unwrap()) {
|
||||||
if let Some(block_name) = element
|
if let Some(block_name) = element
|
||||||
.select(&Selector::parse(ENTRY_NAME_SELECTOR).unwrap())
|
.select(&Selector::parse(ENTRY_NAME_SELECTOR).unwrap())
|
||||||
|
|
Loading…
Add table
Reference in a new issue