1
Fork 0
mirror of https://github.com/redstrate/Auracite.git synced 2025-04-20 19:57:46 +00:00

Begin working on WebAssembly support

It now compiles, but lacks a way to actually download anything.
This commit is contained in:
Joshua Goins 2024-10-30 16:27:32 -04:00
parent 0e769682bd
commit 6ac3f5ced7
5 changed files with 53 additions and 30 deletions

4
Cargo.lock generated
View file

@ -92,9 +92,11 @@ dependencies = [
name = "auracite" name = "auracite"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"ahash",
"clap", "clap",
"clap_derive", "clap_derive",
"downloader", "downloader",
"getrandom",
"minijinja", "minijinja",
"regex", "regex",
"scraper", "scraper",
@ -523,8 +525,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"js-sys",
"libc", "libc",
"wasi", "wasi",
"wasm-bindgen",
] ]
[[package]] [[package]]

View file

@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2021" edition = "2021"
description = "Export your FFXIV character in portable, generic formats" description = "Export your FFXIV character in portable, generic formats"
[lib]
crate-type = ["cdylib", "rlib"]
[dependencies] [dependencies]
# Used to scrape the Lodestone HTML pages # Used to scrape the Lodestone HTML pages
scraper = "0.20" scraper = "0.20"
@ -19,13 +22,20 @@ regex = "1.11"
clap = { version = "4.5", features = ["derive"] } clap = { version = "4.5", features = ["derive"] }
clap_derive = "4.5" clap_derive = "4.5"
# Download HTML pages, images, etc
downloader = "0.2"
# Used to generate the HTML page to easily preview your exported data # Used to generate the HTML page to easily preview your exported data
minijinja = "2.0" minijinja = "2.0"
# Used to communicate with the Dalamud plugin # Used to communicate with the Dalamud plugin
# Needs my fork for allowing server shutdown # Needs my fork for allowing server shutdown
# TODO: upstream this or poke upstream to add this # TODO: upstream this or poke upstream to add this
touche = { git = "https://github.com/redstrate/touche" } touche = { git = "https://github.com/redstrate/touche" }
# Not used directly by us, but to disable the "std" feature and is used by the scraper crate.
ahash = { version = "0.8.0", default-features = false }
# Ditto, but used by the ahash crate.
getrandom = { version = "0.2", features = ["js"] }
[target.'cfg(not(target_family = "wasm"))'.dependencies]
# Download HTML pages, images, etc
downloader = "0.2"

View file

@ -1,25 +1,35 @@
#[cfg(not(target_family = "wasm"))]
use downloader::{Download, Downloader}; use downloader::{Download, Downloader};
use std::path::Path; use std::path::Path;
pub fn download(url: &str, path: &Path) -> Result<(), ()> { pub fn download(url: &str, path: &Path) -> Result<(), ()> {
let mut downloader = Downloader::builder().build().unwrap(); #[cfg(target_family = "wasm")]
{
let mut dl = Download::new(url); // TODO: Implement
dl = dl.file_name(path); Ok(())
if !path.exists() {
let result = downloader.download(&[dl]).unwrap();
for r in result {
return match r {
Err(e) => {
println!("Error: {}", e.to_string());
Err(())
}
Ok(s) => Ok(()),
};
}
} }
#[cfg(not(target_family = "wasm"))]
{
let mut downloader = Downloader::builder().build().unwrap();
Ok(()) let mut dl = Download::new(url);
dl = dl.file_name(path);
if !path.exists() {
let result = downloader.download(&[dl]).unwrap();
for r in result {
return match r {
Err(e) => {
println!("Error: {}", e.to_string());
Err(())
}
Ok(s) => Ok(()),
};
}
}
Ok(())
}
} }

4
src/lib.rs Normal file
View file

@ -0,0 +1,4 @@
pub mod data;
pub mod downloader;
pub mod html;
pub mod parser;

View file

@ -1,11 +1,6 @@
mod data; use auracite::downloader::download;
mod downloader; use auracite::html::write_html;
mod html; use auracite::parser::{parse_lodestone, parse_search};
mod parser;
use crate::downloader::download;
use crate::html::write_html;
use crate::parser::{parse_lodestone, parse_search};
use clap::Parser; use clap::Parser;
use serde::Deserialize; use serde::Deserialize;
use std::convert::Infallible; use std::convert::Infallible;