2021-07-09 04:37:08 +00:00
|
|
|
use anyhow::Result;
|
2021-07-23 01:50:28 +00:00
|
|
|
use argh::FromArgs;
|
2021-06-03 16:30:04 +00:00
|
|
|
use compress_tools::{list_archive_files, uncompress_archive_file};
|
|
|
|
use dotenv::dotenv;
|
2021-07-12 02:49:29 +00:00
|
|
|
use reqwest::StatusCode;
|
2021-06-03 16:30:04 +00:00
|
|
|
use skyrim_cell_dump::parse_plugin;
|
|
|
|
use sqlx::postgres::PgPoolOptions;
|
2021-07-24 03:40:05 +00:00
|
|
|
use std::borrow::Borrow;
|
2021-06-03 16:30:04 +00:00
|
|
|
use std::convert::TryInto;
|
|
|
|
use std::env;
|
|
|
|
use std::io::Seek;
|
|
|
|
use std::io::SeekFrom;
|
2021-07-22 01:35:11 +00:00
|
|
|
use std::path::Path;
|
2021-07-17 18:19:43 +00:00
|
|
|
use std::process::Command;
|
2021-07-09 04:37:08 +00:00
|
|
|
use std::time::Duration;
|
|
|
|
use tempfile::tempdir;
|
2021-07-26 23:31:25 +00:00
|
|
|
use tokio::fs::create_dir_all;
|
|
|
|
use tokio::io::AsyncWriteExt;
|
2021-06-03 16:30:04 +00:00
|
|
|
use tokio::io::{AsyncReadExt, AsyncSeekExt};
|
2021-06-14 02:30:40 +00:00
|
|
|
use tokio::time::sleep;
|
2021-07-23 01:50:28 +00:00
|
|
|
use tracing::{debug, error, info, info_span, warn};
|
2021-07-03 20:00:18 +00:00
|
|
|
use unrar::Archive;
|
2021-06-03 16:30:04 +00:00
|
|
|
|
2021-07-09 01:19:16 +00:00
|
|
|
mod models;
|
2021-07-09 04:37:08 +00:00
|
|
|
mod nexus_api;
|
|
|
|
mod nexus_scraper;
|
2021-07-09 01:19:16 +00:00
|
|
|
|
2021-07-12 02:49:29 +00:00
|
|
|
use models::game;
|
|
|
|
use models::plugin;
|
2021-07-22 01:35:11 +00:00
|
|
|
use models::{cell, cell::UnsavedCell};
|
2021-07-12 02:49:29 +00:00
|
|
|
use models::{file, file::File};
|
2021-07-22 03:20:51 +00:00
|
|
|
use models::{
|
|
|
|
game_mod,
|
|
|
|
game_mod::{Mod, UnsavedMod},
|
|
|
|
};
|
2021-07-22 01:35:11 +00:00
|
|
|
use models::{plugin_cell, plugin_cell::UnsavedPluginCell};
|
|
|
|
use models::{plugin_world, plugin_world::UnsavedPluginWorld};
|
|
|
|
use models::{world, world::UnsavedWorld};
|
2021-07-09 04:37:08 +00:00
|
|
|
use nexus_api::{GAME_ID, GAME_NAME};
|
2021-06-14 02:30:40 +00:00
|
|
|
|
2021-07-23 01:50:28 +00:00
|
|
|
#[derive(FromArgs)]
|
|
|
|
/// Downloads every mod off nexus mods, parses CELL and WRLD data from plugins in each, and saves the data to the database.
|
|
|
|
struct Args {
|
|
|
|
#[argh(option, short = 'p', default = "1")]
|
|
|
|
/// the page number to start scraping for mods on nexus mods.
|
|
|
|
page: usize,
|
|
|
|
}
|
|
|
|
|
2021-07-22 01:35:11 +00:00
|
|
|
fn get_local_form_id_and_master<'a>(
|
|
|
|
form_id: u32,
|
|
|
|
masters: &'a [&str],
|
|
|
|
file_name: &'a str,
|
|
|
|
) -> Result<(i32, &'a str)> {
|
|
|
|
let master_index = (form_id >> 24) as usize;
|
|
|
|
let local_form_id = (form_id & 0xFFFFFF).try_into()?;
|
|
|
|
if master_index >= masters.len() {
|
|
|
|
return Ok((local_form_id, file_name));
|
|
|
|
}
|
|
|
|
Ok((local_form_id, masters[master_index]))
|
|
|
|
}
|
|
|
|
|
2021-07-26 23:31:25 +00:00
|
|
|
async fn process_plugin(
|
2021-07-04 04:01:59 +00:00
|
|
|
plugin_buf: &mut [u8],
|
2021-07-03 20:00:18 +00:00
|
|
|
pool: &sqlx::Pool<sqlx::Postgres>,
|
|
|
|
db_file: &File,
|
2021-07-26 23:31:25 +00:00
|
|
|
db_mod: &Mod,
|
2021-07-22 01:35:11 +00:00
|
|
|
file_path: &str,
|
2021-07-26 23:31:25 +00:00
|
|
|
) -> Result<()> {
|
2021-07-12 14:43:03 +00:00
|
|
|
if plugin_buf.len() == 0 {
|
|
|
|
warn!("skipping processing of invalid empty plugin");
|
|
|
|
return Ok(());
|
|
|
|
}
|
2021-07-12 02:49:29 +00:00
|
|
|
info!(bytes = plugin_buf.len(), "parsing plugin");
|
2021-07-18 19:09:23 +00:00
|
|
|
match parse_plugin(&plugin_buf) {
|
|
|
|
Ok(plugin) => {
|
2021-07-22 01:35:11 +00:00
|
|
|
info!(
|
|
|
|
num_worlds = plugin.worlds.len(),
|
|
|
|
num_cells = plugin.cells.len(),
|
|
|
|
"parse finished"
|
|
|
|
);
|
2021-07-18 19:09:23 +00:00
|
|
|
let hash = seahash::hash(&plugin_buf);
|
2021-07-22 01:35:11 +00:00
|
|
|
let file_name = Path::new(file_path)
|
|
|
|
.file_name()
|
|
|
|
.expect("plugin path ends in a valid file_name")
|
|
|
|
.to_string_lossy();
|
2021-07-24 03:40:05 +00:00
|
|
|
let author = plugin.header.author.as_deref();
|
|
|
|
let description = plugin.header.description.as_deref();
|
|
|
|
let masters: Vec<&str> = plugin.header.masters.iter().map(|s| s.borrow()).collect();
|
2021-07-18 19:09:23 +00:00
|
|
|
let plugin_row = plugin::insert(
|
|
|
|
&pool,
|
|
|
|
&db_file.name,
|
|
|
|
hash as i64,
|
|
|
|
db_file.id,
|
2021-07-22 01:35:11 +00:00
|
|
|
plugin.header.version as f64,
|
2021-07-18 19:09:23 +00:00
|
|
|
plugin_buf.len() as i64,
|
2021-07-24 03:40:05 +00:00
|
|
|
author,
|
|
|
|
description,
|
|
|
|
&masters,
|
2021-07-22 01:35:11 +00:00
|
|
|
&file_name,
|
|
|
|
file_path,
|
2021-07-18 19:09:23 +00:00
|
|
|
)
|
|
|
|
.await?;
|
2021-07-22 01:35:11 +00:00
|
|
|
|
|
|
|
let worlds: Vec<UnsavedWorld> = plugin
|
|
|
|
.worlds
|
|
|
|
.iter()
|
|
|
|
.map(|world| {
|
2021-07-24 03:40:05 +00:00
|
|
|
let (form_id, master) =
|
|
|
|
get_local_form_id_and_master(world.form_id, &masters, &file_name)
|
|
|
|
.expect("form_id to be a valid i32");
|
2021-07-22 03:20:51 +00:00
|
|
|
UnsavedWorld { form_id, master }
|
2021-07-22 01:35:11 +00:00
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
let db_worlds = world::batched_insert(&pool, &worlds).await?;
|
|
|
|
let plugin_worlds: Vec<UnsavedPluginWorld> = db_worlds
|
|
|
|
.iter()
|
|
|
|
.zip(&plugin.worlds)
|
|
|
|
.map(|(db_world, plugin_world)| UnsavedPluginWorld {
|
|
|
|
plugin_id: plugin_row.id,
|
|
|
|
world_id: db_world.id,
|
2021-07-22 03:20:51 +00:00
|
|
|
editor_id: &plugin_world.editor_id,
|
2021-07-22 01:35:11 +00:00
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
plugin_world::batched_insert(&pool, &plugin_worlds).await?;
|
|
|
|
|
|
|
|
let cells: Vec<UnsavedCell> = plugin
|
|
|
|
.cells
|
|
|
|
.iter()
|
|
|
|
.map(|cell| {
|
|
|
|
let world_id = if let Some(world_form_id) = cell.world_form_id {
|
2021-07-24 03:40:05 +00:00
|
|
|
let (form_id, master) =
|
|
|
|
get_local_form_id_and_master(world_form_id, &masters, &file_name)
|
|
|
|
.expect("form_id to be valid i32");
|
2021-07-22 01:35:11 +00:00
|
|
|
Some(
|
|
|
|
db_worlds
|
|
|
|
.iter()
|
|
|
|
.find(|&world| world.form_id == form_id && world.master == master)
|
|
|
|
.expect("cell references world in the plugin worlds")
|
|
|
|
.id,
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
2021-07-24 03:40:05 +00:00
|
|
|
let (form_id, master) =
|
|
|
|
get_local_form_id_and_master(cell.form_id, &masters, &file_name)
|
|
|
|
.expect("form_id is a valid i32");
|
2021-07-22 01:35:11 +00:00
|
|
|
UnsavedCell {
|
|
|
|
form_id,
|
2021-07-22 03:20:51 +00:00
|
|
|
master,
|
2021-07-22 01:35:11 +00:00
|
|
|
x: cell.x,
|
|
|
|
y: cell.y,
|
|
|
|
world_id,
|
|
|
|
is_persistent: cell.is_persistent,
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
let db_cells = cell::batched_insert(&pool, &cells).await?;
|
|
|
|
let plugin_cells: Vec<UnsavedPluginCell> = db_cells
|
|
|
|
.iter()
|
|
|
|
.zip(&plugin.cells)
|
|
|
|
.map(|(db_cell, plugin_cell)| UnsavedPluginCell {
|
|
|
|
plugin_id: plugin_row.id,
|
|
|
|
cell_id: db_cell.id,
|
2021-07-22 03:20:51 +00:00
|
|
|
editor_id: plugin_cell.editor_id.as_ref().map(|id| id.as_ref()),
|
2021-07-22 01:35:11 +00:00
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
plugin_cell::batched_insert(&pool, &plugin_cells).await?;
|
2021-07-18 19:09:23 +00:00
|
|
|
}
|
|
|
|
Err(err) => {
|
|
|
|
warn!(error = %err, "Failed to parse plugin, skipping plugin");
|
|
|
|
}
|
2021-07-03 20:00:18 +00:00
|
|
|
}
|
|
|
|
|
2021-07-26 23:31:25 +00:00
|
|
|
let plugin_path = format!(
|
|
|
|
"plugins/{}/{}/{}/{}",
|
|
|
|
GAME_NAME, db_mod.nexus_mod_id, db_file.nexus_file_id, file_path
|
2021-07-09 04:37:08 +00:00
|
|
|
);
|
2021-07-26 23:31:25 +00:00
|
|
|
let plugin_path = Path::new(&plugin_path);
|
|
|
|
if let Some(dir) = plugin_path.parent() {
|
|
|
|
create_dir_all(dir).await?;
|
|
|
|
}
|
|
|
|
let mut file = tokio::fs::File::create(plugin_path).await?;
|
|
|
|
|
|
|
|
info!(path = %plugin_path.display(), "saving plugin to disk");
|
|
|
|
file.write_all(&plugin_buf).await?;
|
2021-07-09 04:37:08 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2021-06-03 16:30:04 +00:00
|
|
|
#[tokio::main]
|
|
|
|
pub async fn main() -> Result<()> {
|
|
|
|
dotenv().ok();
|
2021-07-11 23:45:26 +00:00
|
|
|
|
|
|
|
tracing_subscriber::fmt::init();
|
|
|
|
|
2021-06-03 16:30:04 +00:00
|
|
|
let pool = PgPoolOptions::new()
|
|
|
|
.max_connections(5)
|
|
|
|
.connect(&env::var("DATABASE_URL")?)
|
|
|
|
.await?;
|
2021-07-22 03:20:51 +00:00
|
|
|
let game = game::insert(&pool, GAME_NAME, GAME_ID as i32).await?;
|
2021-06-03 16:30:04 +00:00
|
|
|
let client = reqwest::Client::new();
|
|
|
|
|
2021-07-23 01:50:28 +00:00
|
|
|
let args: Args = argh::from_env();
|
|
|
|
let mut page = args.page;
|
2021-07-07 03:29:09 +00:00
|
|
|
let mut has_next_page = true;
|
2021-06-14 02:30:40 +00:00
|
|
|
|
2021-07-07 03:29:09 +00:00
|
|
|
while has_next_page {
|
2021-07-12 02:49:29 +00:00
|
|
|
let page_span = info_span!("page", page);
|
|
|
|
let _page_span = page_span.enter();
|
2021-07-26 21:02:14 +00:00
|
|
|
let mod_list_resp = nexus_scraper::get_mod_list_page(&client, page).await?;
|
|
|
|
let scraped = mod_list_resp.scrape_mods()?;
|
|
|
|
|
|
|
|
has_next_page = scraped.has_next_page;
|
|
|
|
let present_mods = game_mod::bulk_get_present_nexus_mod_ids(
|
|
|
|
&pool,
|
|
|
|
&scraped
|
|
|
|
.mods
|
|
|
|
.iter()
|
|
|
|
.map(|scraped_mod| scraped_mod.nexus_mod_id)
|
|
|
|
.collect::<Vec<i32>>(),
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
let mods_to_create: Vec<UnsavedMod> = scraped
|
|
|
|
.mods
|
|
|
|
.iter()
|
|
|
|
.filter(|scraped_mod| !present_mods.contains(&scraped_mod.nexus_mod_id))
|
|
|
|
.map(|scraped_mod| UnsavedMod {
|
|
|
|
name: scraped_mod.name,
|
|
|
|
nexus_mod_id: scraped_mod.nexus_mod_id,
|
|
|
|
author: scraped_mod.author,
|
|
|
|
category: scraped_mod.category,
|
|
|
|
description: scraped_mod.desc,
|
|
|
|
game_id: game.id,
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
let mods = game_mod::batched_insert(&pool, &mods_to_create).await?;
|
|
|
|
|
|
|
|
for db_mod in mods {
|
2021-07-12 02:49:29 +00:00
|
|
|
let mod_span = info_span!("mod", name = ?&db_mod.name, id = &db_mod.nexus_mod_id);
|
|
|
|
let _mod_span = mod_span.enter();
|
2021-07-26 21:02:14 +00:00
|
|
|
let files_resp = nexus_api::files::get(&client, db_mod.nexus_mod_id).await?;
|
2021-07-12 02:49:29 +00:00
|
|
|
|
2021-07-26 21:02:14 +00:00
|
|
|
debug!(duration = ?files_resp.wait, "sleeping");
|
|
|
|
sleep(files_resp.wait).await;
|
2021-06-14 02:30:40 +00:00
|
|
|
|
2021-07-19 02:11:03 +00:00
|
|
|
// Filter out replaced/deleted files (indicated by null category) and archived files
|
2021-07-26 21:02:14 +00:00
|
|
|
let files = files_resp
|
|
|
|
.files()?
|
|
|
|
.into_iter()
|
|
|
|
.filter(|file| match file.category {
|
|
|
|
None => {
|
|
|
|
info!(
|
|
|
|
name = file.file_name,
|
|
|
|
id = file.file_id,
|
|
|
|
"skipping file with no category"
|
2021-07-12 16:15:07 +00:00
|
|
|
);
|
2021-07-26 21:02:14 +00:00
|
|
|
false
|
|
|
|
}
|
|
|
|
Some(category) if category == "ARCHIVED" => false,
|
|
|
|
Some(_) => true,
|
|
|
|
});
|
|
|
|
|
|
|
|
for api_file in files {
|
|
|
|
let file_span =
|
|
|
|
info_span!("file", name = &api_file.file_name, id = &api_file.file_id);
|
|
|
|
let _file_span = file_span.enter();
|
|
|
|
let db_file = file::insert(
|
|
|
|
&pool,
|
|
|
|
api_file.name,
|
|
|
|
api_file.file_name,
|
|
|
|
api_file.file_id as i32,
|
|
|
|
db_mod.id,
|
|
|
|
api_file.category,
|
|
|
|
api_file.version,
|
|
|
|
api_file.mod_version,
|
|
|
|
api_file.size,
|
|
|
|
api_file.uploaded_at,
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
let mut checked_metadata = false;
|
|
|
|
match nexus_api::metadata::contains_plugin(&client, &api_file).await {
|
|
|
|
Ok(contains_plugin) => {
|
|
|
|
if let Some(contains_plugin) = contains_plugin {
|
|
|
|
checked_metadata = true;
|
|
|
|
if !contains_plugin {
|
|
|
|
info!("file metadata does not contain a plugin, skip downloading");
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
warn!("file has no metadata link, continuing with download");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Err(err) => {
|
|
|
|
warn!(error = %err, "error retreiving metadata for file, continuing with download");
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
let download_link_resp =
|
|
|
|
nexus_api::download_link::get(&client, db_mod.nexus_mod_id, api_file.file_id)
|
|
|
|
.await;
|
|
|
|
if let Err(err) = &download_link_resp {
|
|
|
|
if let Some(reqwest_err) = err.downcast_ref::<reqwest::Error>() {
|
|
|
|
if reqwest_err.status() == Some(StatusCode::NOT_FOUND) {
|
|
|
|
warn!(
|
|
|
|
status = ?reqwest_err.status(),
|
2021-07-29 03:54:39 +00:00
|
|
|
"failed to get download link for file, skipping file"
|
2021-07-26 21:02:14 +00:00
|
|
|
);
|
|
|
|
file::update_has_download_link(&pool, db_file.id, false).await?;
|
|
|
|
continue;
|
|
|
|
}
|
2021-07-18 20:06:49 +00:00
|
|
|
}
|
|
|
|
}
|
2021-07-26 21:02:14 +00:00
|
|
|
let download_link_resp = download_link_resp?;
|
|
|
|
|
2021-07-29 03:54:39 +00:00
|
|
|
let mut tokio_file = match download_link_resp.download_file(&client).await {
|
|
|
|
Ok(file) => {
|
|
|
|
info!(bytes = api_file.size, "download finished");
|
|
|
|
file::update_downloaded_at(&pool, db_file.id).await?;
|
|
|
|
file
|
|
|
|
}
|
|
|
|
Err(err) => {
|
|
|
|
warn!(error = %err, "failed all attempts at downloading file, skipping file");
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
};
|
2021-07-26 21:02:14 +00:00
|
|
|
|
|
|
|
let mut initial_bytes = [0; 8];
|
|
|
|
tokio_file.seek(SeekFrom::Start(0)).await?;
|
|
|
|
match tokio_file.read_exact(&mut initial_bytes).await {
|
|
|
|
Err(err) => {
|
|
|
|
warn!(error = %err, "failed to read initial bytes, skipping file");
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
_ => {}
|
2021-07-26 05:16:43 +00:00
|
|
|
}
|
2021-07-26 21:02:14 +00:00
|
|
|
let kind = infer::get(&initial_bytes).expect("unknown file type of file download");
|
|
|
|
info!(
|
|
|
|
mime_type = kind.mime_type(),
|
|
|
|
"inferred mime_type of downloaded archive"
|
|
|
|
);
|
|
|
|
|
|
|
|
match kind.mime_type() {
|
|
|
|
"application/vnd.rar" => {
|
|
|
|
info!("downloaded archive is RAR archive, attempt to uncompress entire archive");
|
|
|
|
// Use unrar to uncompress the entire .rar file to avoid bugs with compress_tools uncompressing certain .rar files:
|
|
|
|
// https://github.com/libarchive/libarchive/issues/373, https://github.com/libarchive/libarchive/issues/1426
|
|
|
|
tokio_file.seek(SeekFrom::Start(0)).await?;
|
|
|
|
let mut file = tokio_file.try_clone().await?.into_std().await;
|
|
|
|
let temp_dir = tempdir()?;
|
|
|
|
let temp_file_path = temp_dir.path().join("download.rar");
|
|
|
|
let mut temp_file = std::fs::File::create(&temp_file_path)?;
|
|
|
|
std::io::copy(&mut file, &mut temp_file)?;
|
|
|
|
|
|
|
|
let mut plugin_file_paths = Vec::new();
|
|
|
|
let list =
|
|
|
|
Archive::new(&temp_file_path.to_string_lossy().to_string())?.list();
|
|
|
|
if let Ok(list) = list {
|
|
|
|
for entry in list {
|
|
|
|
if let Ok(entry) = entry {
|
|
|
|
if let Some(extension) = entry.filename.extension() {
|
|
|
|
if entry.is_file()
|
|
|
|
&& (extension == "esp"
|
|
|
|
|| extension == "esm"
|
|
|
|
|| extension == "esl")
|
|
|
|
{
|
|
|
|
plugin_file_paths.push(entry.filename);
|
|
|
|
}
|
2021-07-12 17:49:02 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-07-26 21:02:14 +00:00
|
|
|
} else {
|
|
|
|
if !checked_metadata {
|
|
|
|
warn!("failed to read archive and server has no metadata, skipping file");
|
|
|
|
continue;
|
|
|
|
} else {
|
|
|
|
error!("failed to read archive, but server had metadata");
|
|
|
|
panic!("failed to read archive, but server had metadata");
|
|
|
|
}
|
2021-07-12 17:49:02 +00:00
|
|
|
}
|
2021-07-26 21:02:14 +00:00
|
|
|
info!(
|
|
|
|
num_plugin_files = plugin_file_paths.len(),
|
|
|
|
"listed plugins in downloaded archive"
|
|
|
|
);
|
|
|
|
|
|
|
|
if plugin_file_paths.len() > 0 {
|
|
|
|
info!("uncompressing downloaded archive");
|
|
|
|
let extract =
|
|
|
|
Archive::new(&temp_file_path.to_string_lossy().to_string())?
|
|
|
|
.extract_to(temp_dir.path().to_string_lossy().to_string());
|
|
|
|
extract
|
|
|
|
.expect("failed to extract")
|
|
|
|
.process()
|
|
|
|
.expect("failed to extract");
|
|
|
|
|
|
|
|
for file_path in plugin_file_paths.iter() {
|
|
|
|
info!(
|
|
|
|
?file_path,
|
|
|
|
"processing uncompressed file from downloaded archive"
|
|
|
|
);
|
|
|
|
let mut plugin_buf =
|
|
|
|
std::fs::read(temp_dir.path().join(file_path))?;
|
|
|
|
process_plugin(
|
|
|
|
&mut plugin_buf,
|
|
|
|
&pool,
|
|
|
|
&db_file,
|
|
|
|
&db_mod,
|
|
|
|
&file_path.to_string_lossy(),
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
temp_dir.close()?;
|
2021-07-04 04:01:59 +00:00
|
|
|
}
|
2021-07-26 21:02:14 +00:00
|
|
|
_ => {
|
|
|
|
tokio_file.seek(SeekFrom::Start(0)).await?;
|
|
|
|
let mut file = tokio_file.try_clone().await?.into_std().await;
|
|
|
|
let mut plugin_file_paths = Vec::new();
|
|
|
|
|
|
|
|
let archive_files = match list_archive_files(&file) {
|
|
|
|
Ok(files) => Ok(files),
|
|
|
|
Err(err) => {
|
|
|
|
if !checked_metadata {
|
|
|
|
warn!(error = %err, "failed to read archive and server has no metadata, skipping file");
|
|
|
|
continue;
|
|
|
|
} else {
|
|
|
|
error!(error = %err, "failed to read archive, but server had metadata");
|
|
|
|
Err(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}?;
|
|
|
|
for file_path in archive_files {
|
|
|
|
if file_path.ends_with(".esp")
|
|
|
|
|| file_path.ends_with(".esm")
|
|
|
|
|| file_path.ends_with(".esl")
|
|
|
|
{
|
|
|
|
plugin_file_paths.push(file_path);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
info!(
|
|
|
|
num_plugin_files = plugin_file_paths.len(),
|
|
|
|
"listed plugins in downloaded archive"
|
|
|
|
);
|
2021-07-12 17:49:02 +00:00
|
|
|
|
2021-07-22 01:35:11 +00:00
|
|
|
for file_path in plugin_file_paths.iter() {
|
2021-07-26 21:02:14 +00:00
|
|
|
let plugin_span = info_span!("plugin", name = ?file_path);
|
|
|
|
let plugin_span = plugin_span.enter();
|
|
|
|
file.seek(SeekFrom::Start(0))?;
|
|
|
|
let mut buf = Vec::default();
|
|
|
|
info!("uncompressing plugin file from downloaded archive");
|
|
|
|
match uncompress_archive_file(&mut file, &mut buf, file_path) {
|
|
|
|
Ok(_) => Ok(()),
|
|
|
|
Err(err) => {
|
|
|
|
if kind.mime_type() == "application/zip" {
|
|
|
|
// compress_tools or libarchive failed to extract zip file (e.g. archive is deflate64 compressed)
|
|
|
|
// Attempt to uncompress the archive using `unzip` unix command instead
|
|
|
|
warn!(error = %err, "failed to extract file with compress_tools, extracting whole archive with unzip instead");
|
|
|
|
drop(plugin_span);
|
|
|
|
file.seek(SeekFrom::Start(0))?;
|
|
|
|
let temp_dir = tempdir()?;
|
|
|
|
let temp_file_path = temp_dir
|
|
|
|
.path()
|
|
|
|
.join(format!("download.{}", kind.extension()));
|
|
|
|
let mut temp_file = std::fs::File::create(&temp_file_path)?;
|
|
|
|
std::io::copy(&mut file, &mut temp_file)?;
|
|
|
|
let extracted_path = temp_dir.path().join("extracted");
|
|
|
|
|
|
|
|
Command::new("unzip")
|
|
|
|
.args(&[
|
|
|
|
&temp_file_path.to_string_lossy(),
|
|
|
|
"-d",
|
|
|
|
&extracted_path.to_string_lossy(),
|
|
|
|
])
|
|
|
|
.status()?;
|
|
|
|
|
|
|
|
for file_path in plugin_file_paths.iter() {
|
|
|
|
let plugin_span =
|
|
|
|
info_span!("plugin", name = ?file_path);
|
|
|
|
let _plugin_span = plugin_span.enter();
|
|
|
|
info!("processing uncompressed file from downloaded archive");
|
|
|
|
let mut plugin_buf =
|
|
|
|
std::fs::read(extracted_path.join(file_path))?;
|
|
|
|
process_plugin(
|
|
|
|
&mut plugin_buf,
|
|
|
|
&pool,
|
|
|
|
&db_file,
|
|
|
|
&db_mod,
|
|
|
|
file_path,
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
}
|
|
|
|
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
Err(err)
|
|
|
|
}
|
|
|
|
}?;
|
2021-07-26 23:31:25 +00:00
|
|
|
process_plugin(&mut buf, &pool, &db_file, &db_mod, file_path).await?;
|
2021-06-03 16:30:04 +00:00
|
|
|
}
|
|
|
|
}
|
2021-07-07 03:29:09 +00:00
|
|
|
}
|
2021-06-03 16:30:04 +00:00
|
|
|
|
2021-07-26 21:02:14 +00:00
|
|
|
debug!(duration = ?download_link_resp.wait, "sleeping");
|
|
|
|
sleep(download_link_resp.wait).await;
|
2021-06-14 02:30:40 +00:00
|
|
|
}
|
2021-06-03 16:30:04 +00:00
|
|
|
}
|
2021-06-14 02:30:40 +00:00
|
|
|
|
|
|
|
page += 1;
|
2021-07-11 23:45:26 +00:00
|
|
|
debug!(?page, ?has_next_page, "sleeping 1 second");
|
2021-07-12 16:15:07 +00:00
|
|
|
sleep(Duration::from_secs(1)).await;
|
2021-06-03 16:30:04 +00:00
|
|
|
}
|
2021-06-14 02:30:40 +00:00
|
|
|
|
2021-06-03 16:30:04 +00:00
|
|
|
Ok(())
|
|
|
|
}
|