Downloading and extracting mod archives from profile uuid
commit
897857fc98
|
|
@ -0,0 +1,2 @@
|
||||||
|
/target
|
||||||
|
/notes
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,11 @@
|
||||||
|
[package]
|
||||||
|
name = "munsikka"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
base64 = "0.22.1"
|
||||||
|
reqwest = { version = "0.12.7", features = [ "blocking", "json" ] }
|
||||||
|
yaml-rust2 = "0.8.1"
|
||||||
|
|
@ -0,0 +1,182 @@
|
||||||
|
use std::{
|
||||||
|
env::var,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
process::Command,
|
||||||
|
};
|
||||||
|
|
||||||
|
use reqwest::blocking::Client;
|
||||||
|
use yaml_rust2::Yaml;
|
||||||
|
|
||||||
|
pub const BASE_URL: &str = "http://thunderstore.io";
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ModInfo {
|
||||||
|
pub name: String,
|
||||||
|
pub author: String,
|
||||||
|
pub version: (i64, i64, i64),
|
||||||
|
pub website_url: String,
|
||||||
|
pub enabled: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ModInfo {
|
||||||
|
pub fn from_yaml(yaml: &Yaml) -> Option<Self> {
|
||||||
|
Some(Self {
|
||||||
|
name: yaml["displayName"].as_str()?.to_owned(),
|
||||||
|
author: yaml["authorName"].as_str()?.to_owned(),
|
||||||
|
version: (
|
||||||
|
yaml["versionNumber"]["major"].as_i64()?,
|
||||||
|
yaml["versionNumber"]["minor"].as_i64()?,
|
||||||
|
yaml["versionNumber"]["patch"].as_i64()?,
|
||||||
|
),
|
||||||
|
website_url: yaml["websiteUrl"].as_str()?.to_owned(),
|
||||||
|
enabled: yaml["enabled"].as_bool()?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn download_url(&self) -> String {
|
||||||
|
let url = self
|
||||||
|
.website_url
|
||||||
|
.replace("c/lethal-company/p", "package/download");
|
||||||
|
format!("{url}{}", self.version_string(),)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn unique_name(&self) -> String {
|
||||||
|
format!(
|
||||||
|
"{name}_{author}_{version}",
|
||||||
|
name = self.name,
|
||||||
|
author = self.author,
|
||||||
|
version = self.version_string(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn version_string(&self) -> String {
|
||||||
|
format!("{}.{}.{}", self.version.0, self.version.1, self.version.2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct FetchExtractor {
|
||||||
|
pub client: Client,
|
||||||
|
pub base_dir: PathBuf,
|
||||||
|
pub preprocessor: Option<Box<dyn Fn(Vec<u8>) -> Vec<u8>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FetchExtractor {
|
||||||
|
pub fn archive_path(&self, unique_name: &str) -> PathBuf {
|
||||||
|
self.base_dir
|
||||||
|
.join(PathBuf::from(format!("{}.zip", unique_name)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn extracted_path(&self, unique_name: &str) -> PathBuf {
|
||||||
|
self.base_dir
|
||||||
|
.join(PathBuf::from(format!("{}", unique_name)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_fetched(&self, unique_name: &str) -> Option<PathBuf> {
|
||||||
|
match self.archive_path(unique_name).exists() {
|
||||||
|
true => Some(self.archive_path(unique_name)),
|
||||||
|
false => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_extracted(&self, unique_name: &str) -> Option<PathBuf> {
|
||||||
|
match self.extracted_path(unique_name).exists() {
|
||||||
|
true => Some(self.extracted_path(unique_name)),
|
||||||
|
false => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Removes the archive and extracted files
|
||||||
|
pub fn clean(&self, unique_name: &str) -> std::io::Result<()> {
|
||||||
|
if unique_name.is_empty() {
|
||||||
|
return Err(std::io::Error::new(
|
||||||
|
std::io::ErrorKind::Other,
|
||||||
|
"unique_name is empty, this may lead to unexpected behaviour",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(parent_dir) = self.archive_path(unique_name).parent() {
|
||||||
|
std::fs::create_dir_all(parent_dir)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(parent_dir) = self.extracted_path(unique_name).parent() {
|
||||||
|
std::fs::create_dir_all(parent_dir)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(archive_path) = self.is_fetched(unique_name) {
|
||||||
|
if archive_path.is_file() {
|
||||||
|
std::fs::remove_file(self.archive_path(unique_name))?;
|
||||||
|
} else {
|
||||||
|
return Err(std::io::Error::new(
|
||||||
|
std::io::ErrorKind::Other,
|
||||||
|
format!("{:?} is not a file", archive_path),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(extract_path) = self.is_extracted(unique_name) {
|
||||||
|
if extract_path.is_dir() {
|
||||||
|
std::fs::remove_dir_all(self.extracted_path(unique_name))?;
|
||||||
|
} else {
|
||||||
|
return Err(std::io::Error::new(
|
||||||
|
std::io::ErrorKind::Other,
|
||||||
|
format!("{:?} is not a directory", extract_path),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fetches the archive and writes it to `self.archive_path()`
|
||||||
|
pub fn fetch(&self, url: &str, unique_name: &str) -> reqwest::Result<()> {
|
||||||
|
// Don't re-download if it already exists
|
||||||
|
if self.is_fetched(unique_name).is_some() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut zip_data = self.client.get(url).send()?.bytes()?.to_vec();
|
||||||
|
if let Some(preprocessor) = &self.preprocessor {
|
||||||
|
zip_data = preprocessor(zip_data);
|
||||||
|
}
|
||||||
|
|
||||||
|
let path = self.archive_path(unique_name);
|
||||||
|
if let Some(parent_dir) = path.parent() {
|
||||||
|
std::fs::create_dir_all(parent_dir).unwrap();
|
||||||
|
}
|
||||||
|
std::fs::write(&path, zip_data).unwrap();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extracts the archive to `self.extracted_path()`
|
||||||
|
pub fn extract(&self, unique_name: &str) -> std::io::Result<()> {
|
||||||
|
// Don't re-extract if it already exists
|
||||||
|
if self.is_extracted(unique_name).is_some() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(parent_dir) = self.extracted_path(unique_name).parent() {
|
||||||
|
std::fs::create_dir_all(parent_dir)?;
|
||||||
|
}
|
||||||
|
unzip_to(
|
||||||
|
&self.archive_path(unique_name),
|
||||||
|
&self.extracted_path(unique_name),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unzip_to(from: &Path, to: &Path) -> Result<(), ()> {
|
||||||
|
// TODO: use the zip library
|
||||||
|
Command::new("unzip")
|
||||||
|
.arg(from)
|
||||||
|
.arg("-d")
|
||||||
|
.arg(to)
|
||||||
|
.output()
|
||||||
|
.unwrap();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn data_dir() -> PathBuf {
|
||||||
|
PathBuf::from(var("HOME").unwrap()).join(".local/share/munsikka")
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,99 @@
|
||||||
|
use std::{error::Error, path::PathBuf};
|
||||||
|
|
||||||
|
use munsikka::*;
|
||||||
|
|
||||||
|
use base64::prelude::*;
|
||||||
|
use yaml_rust2::{Yaml, YamlLoader};
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
// First and only argument is the (legacy) profile UUID from a mod manager user
|
||||||
|
let profile_uuid = std::env::args()
|
||||||
|
.skip(1)
|
||||||
|
.next()
|
||||||
|
.expect("The programs expects profile uuid as an argument to fetch the mod list.");
|
||||||
|
|
||||||
|
let http_client = reqwest::blocking::Client::builder()
|
||||||
|
.user_agent("munsikka-mod-manager")
|
||||||
|
.build()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let profile_fetcher = FetchExtractor {
|
||||||
|
base_dir: data_dir().join(PathBuf::from("profile")),
|
||||||
|
client: http_client.clone(),
|
||||||
|
preprocessor: Some(Box::new(profile_preprocessor)),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mod_fetcher = FetchExtractor {
|
||||||
|
base_dir: data_dir().join(PathBuf::from("mod")),
|
||||||
|
client: http_client.clone(),
|
||||||
|
preprocessor: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mod_list = fetch_mod_list(&profile_fetcher, &profile_uuid).unwrap();
|
||||||
|
fetch_mods(&mod_fetcher, &mod_list).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Map the legacy profile response into a zip binary
|
||||||
|
fn profile_preprocessor(profile_response: Vec<u8>) -> Vec<u8> {
|
||||||
|
// The response is a base64-encoded zip file with some junk data before it.
|
||||||
|
// So we strip it down.
|
||||||
|
let base64_zip = String::from_utf8(profile_response).unwrap();
|
||||||
|
let base64_zip = base64_zip
|
||||||
|
.lines()
|
||||||
|
.filter(|line| !line.is_empty() && !line.starts_with("#"))
|
||||||
|
.next()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
BASE64_STANDARD.decode(base64_zip).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fetch_mod_list(
|
||||||
|
fetcher: &FetchExtractor,
|
||||||
|
profile_uuid: &str,
|
||||||
|
) -> Result<Vec<ModInfo>, Box<dyn Error>> {
|
||||||
|
let profile_url = format!("{BASE_URL}/api/experimental/legacyprofile/get/{profile_uuid}");
|
||||||
|
|
||||||
|
fetcher.clean(&profile_uuid)?;
|
||||||
|
fetcher.fetch(&profile_url, &profile_uuid)?;
|
||||||
|
fetcher.extract(profile_uuid)?;
|
||||||
|
|
||||||
|
let mods_yaml_path = fetcher
|
||||||
|
.extracted_path(profile_uuid)
|
||||||
|
.join(PathBuf::from("mods.yml"));
|
||||||
|
let mods_yaml = std::fs::read_to_string(&mods_yaml_path)?;
|
||||||
|
|
||||||
|
// Handle yaml
|
||||||
|
let mods = &YamlLoader::load_from_str(&mods_yaml)?[0];
|
||||||
|
Ok(mods
|
||||||
|
.as_vec()
|
||||||
|
.ok_or(std::io::Error::new(
|
||||||
|
std::io::ErrorKind::Other,
|
||||||
|
format!("YAML file is not a list: {:?}", mods_yaml_path),
|
||||||
|
))?
|
||||||
|
.iter()
|
||||||
|
.filter(|yaml| yaml["enabled"] == Yaml::Boolean(true))
|
||||||
|
.map(|yaml| ModInfo::from_yaml(yaml).unwrap())
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fetch_mods(fetcher: &FetchExtractor, mod_list: &Vec<ModInfo>) -> Result<(), Box<dyn Error>> {
|
||||||
|
println!("Fetching {} mods...", mod_list.len());
|
||||||
|
|
||||||
|
for mod_info in mod_list.iter() {
|
||||||
|
print!(
|
||||||
|
" {version: >10} | {name: <30} | {author: <30} ",
|
||||||
|
name = mod_info.name,
|
||||||
|
author = mod_info.author,
|
||||||
|
version = mod_info.version_string(),
|
||||||
|
);
|
||||||
|
// Skip download & extract if the mod is already extracted
|
||||||
|
if fetcher.is_extracted(&mod_info.unique_name()).is_none() {
|
||||||
|
fetcher.fetch(&mod_info.download_url(), &mod_info.unique_name())?;
|
||||||
|
fetcher.extract(&mod_info.unique_name())?;
|
||||||
|
println!("✅");
|
||||||
|
} else {
|
||||||
|
println!("📦");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
Loading…
Reference in New Issue