Refactor main.rs a bit, some fixes to 7zip fallback
Also adds reqwest timeouts
This commit is contained in:
parent
3f48f97080
commit
b2d17f6217
30
Cargo.lock
generated
30
Cargo.lock
generated
@ -1009,6 +1009,7 @@ dependencies = [
|
|||||||
"tracing-appender",
|
"tracing-appender",
|
||||||
"tracing-subscriber",
|
"tracing-subscriber",
|
||||||
"unrar",
|
"unrar",
|
||||||
|
"walkdir",
|
||||||
"zip",
|
"zip",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -1606,6 +1607,15 @@ version = "1.0.5"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e"
|
checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "same-file"
|
||||||
|
version = "1.0.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
|
||||||
|
dependencies = [
|
||||||
|
"winapi-util",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "schannel"
|
name = "schannel"
|
||||||
version = "0.1.19"
|
version = "0.1.19"
|
||||||
@ -2351,6 +2361,17 @@ version = "0.9.3"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe"
|
checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "walkdir"
|
||||||
|
version = "2.3.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56"
|
||||||
|
dependencies = [
|
||||||
|
"same-file",
|
||||||
|
"winapi",
|
||||||
|
"winapi-util",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "want"
|
name = "want"
|
||||||
version = "0.3.0"
|
version = "0.3.0"
|
||||||
@ -2483,6 +2504,15 @@ version = "0.4.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "winapi-util"
|
||||||
|
version = "0.1.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
|
||||||
|
dependencies = [
|
||||||
|
"winapi",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "winapi-x86_64-pc-windows-gnu"
|
name = "winapi-x86_64-pc-windows-gnu"
|
||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
|
@ -31,5 +31,6 @@ tracing-appender = "0.1"
|
|||||||
tracing-subscriber = "0.2"
|
tracing-subscriber = "0.2"
|
||||||
# Need this unicode fix: https://github.com/muja/unrar.rs/commit/3af9a6015dc89c1329a2fe5d6f4a7f69ded8ba1d
|
# Need this unicode fix: https://github.com/muja/unrar.rs/commit/3af9a6015dc89c1329a2fe5d6f4a7f69ded8ba1d
|
||||||
unrar = { git = "https://github.com/muja/unrar.rs.git" }
|
unrar = { git = "https://github.com/muja/unrar.rs.git" }
|
||||||
|
walkdir = "2"
|
||||||
# Need `ZipWriter::append_new` from https://github.com/zip-rs/zip/commit/ce272616ac69b798bb7b0925147a8a710dc2bb65
|
# Need `ZipWriter::append_new` from https://github.com/zip-rs/zip/commit/ce272616ac69b798bb7b0925147a8a710dc2bb65
|
||||||
zip = { git = "https://github.com/zip-rs/zip.git" }
|
zip = { git = "https://github.com/zip-rs/zip.git" }
|
81
src/extractors/compress_tools.rs
Normal file
81
src/extractors/compress_tools.rs
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
/// Extracts zip files most of the time with some exceptions. If this files we'll fall back to other methods.
|
||||||
|
use anyhow::{Context, Result};
|
||||||
|
use compress_tools::{list_archive_files, uncompress_archive_file};
|
||||||
|
use std::collections::VecDeque;
|
||||||
|
use std::fmt::Display;
|
||||||
|
use std::io::Seek;
|
||||||
|
use std::io::SeekFrom;
|
||||||
|
use tracing::{info, info_span};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ExtractorError;
|
||||||
|
|
||||||
|
impl Display for ExtractorError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "extraction error")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub struct Extractor<'a> {
|
||||||
|
file: &'a mut std::fs::File,
|
||||||
|
plugin_file_paths: Option<VecDeque<String>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Extractor<'a> {
|
||||||
|
pub fn new(file: &mut std::fs::File) -> Extractor {
|
||||||
|
Extractor {
|
||||||
|
file,
|
||||||
|
plugin_file_paths: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn list_plugins(&mut self) -> Result<()> {
|
||||||
|
let mut plugin_file_paths = VecDeque::new();
|
||||||
|
let archive_files = list_archive_files(&mut self.file)?;
|
||||||
|
for file_path in archive_files {
|
||||||
|
if file_path.ends_with(".esp")
|
||||||
|
|| file_path.ends_with(".esm")
|
||||||
|
|| file_path.ends_with(".esl")
|
||||||
|
{
|
||||||
|
plugin_file_paths.push_back(file_path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
info!(
|
||||||
|
num_plugin_files = plugin_file_paths.len(),
|
||||||
|
"listed plugins in downloaded archive"
|
||||||
|
);
|
||||||
|
self.plugin_file_paths = Some(plugin_file_paths);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_plugin(&mut self, file_path: &str) -> Result<Vec<u8>> {
|
||||||
|
let plugin_span = info_span!("plugin", name = ?file_path);
|
||||||
|
let _plugin_span = plugin_span.enter();
|
||||||
|
self.file.seek(SeekFrom::Start(0))?;
|
||||||
|
let mut buf = Vec::default();
|
||||||
|
info!("uncompressing plugin file from downloaded archive");
|
||||||
|
uncompress_archive_file(&mut self.file, &mut buf, &file_path)?;
|
||||||
|
Ok(buf)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for Extractor<'a> {
|
||||||
|
type Item = Result<(String, Vec<u8>)>;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
if self.plugin_file_paths.is_none() {
|
||||||
|
match self.list_plugins() {
|
||||||
|
Err(err) => return Some(Err(err).context(ExtractorError)),
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(plugin_file_paths) = &mut self.plugin_file_paths {
|
||||||
|
if let Some(file_path) = plugin_file_paths.pop_front() {
|
||||||
|
return match self.get_plugin(&file_path) {
|
||||||
|
Err(err) => Some(Err(err).context(ExtractorError)),
|
||||||
|
Ok(plugin_buf) => Some(Ok((file_path, plugin_buf))),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
1
src/extractors/mod.rs
Normal file
1
src/extractors/mod.rs
Normal file
@ -0,0 +1 @@
|
|||||||
|
pub mod compress_tools;
|
436
src/main.rs
436
src/main.rs
@ -1,43 +1,37 @@
|
|||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use argh::FromArgs;
|
use argh::FromArgs;
|
||||||
use compress_tools::{list_archive_files, uncompress_archive_file};
|
|
||||||
use dotenv::dotenv;
|
use dotenv::dotenv;
|
||||||
|
use models::file::File;
|
||||||
|
use models::game_mod::Mod;
|
||||||
use reqwest::StatusCode;
|
use reqwest::StatusCode;
|
||||||
use skyrim_cell_dump::parse_plugin;
|
|
||||||
use sqlx::postgres::PgPoolOptions;
|
use sqlx::postgres::PgPoolOptions;
|
||||||
use std::borrow::Borrow;
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::convert::TryInto;
|
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::io::Seek;
|
use std::io::Seek;
|
||||||
use std::io::SeekFrom;
|
use std::io::SeekFrom;
|
||||||
use std::path::Path;
|
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use tempfile::tempdir;
|
use tempfile::tempdir;
|
||||||
use tokio::fs::create_dir_all;
|
|
||||||
use tokio::io::AsyncWriteExt;
|
|
||||||
use tokio::io::{AsyncReadExt, AsyncSeekExt};
|
use tokio::io::{AsyncReadExt, AsyncSeekExt};
|
||||||
use tokio::time::sleep;
|
use tokio::time::sleep;
|
||||||
use tracing::{debug, error, info, info_span, warn};
|
use tracing::{debug, error, info, info_span, warn};
|
||||||
use unrar::Archive;
|
use unrar::Archive;
|
||||||
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
|
mod extractors;
|
||||||
mod models;
|
mod models;
|
||||||
mod nexus_api;
|
mod nexus_api;
|
||||||
mod nexus_scraper;
|
mod nexus_scraper;
|
||||||
|
mod plugin_processor;
|
||||||
|
|
||||||
|
use models::file;
|
||||||
use models::game;
|
use models::game;
|
||||||
use models::plugin;
|
use models::{game_mod, game_mod::UnsavedMod};
|
||||||
use models::{cell, cell::UnsavedCell};
|
|
||||||
use models::{file, file::File};
|
|
||||||
use models::{
|
|
||||||
game_mod,
|
|
||||||
game_mod::{Mod, UnsavedMod},
|
|
||||||
};
|
|
||||||
use models::{plugin_cell, plugin_cell::UnsavedPluginCell};
|
|
||||||
use models::{plugin_world, plugin_world::UnsavedPluginWorld};
|
|
||||||
use models::{world, world::UnsavedWorld};
|
|
||||||
use nexus_api::{GAME_ID, GAME_NAME};
|
use nexus_api::{GAME_ID, GAME_NAME};
|
||||||
|
use plugin_processor::process_plugin;
|
||||||
|
|
||||||
|
const REQUEST_TIMEOUT: Duration = Duration::from_secs(7200); // 2 hours
|
||||||
|
const CONNECT_TIMEOUT: Duration = Duration::from_secs(30);
|
||||||
|
|
||||||
#[derive(FromArgs)]
|
#[derive(FromArgs)]
|
||||||
/// Downloads every mod off nexus mods, parses CELL and WRLD data from plugins in each, and saves the data to the database.
|
/// Downloads every mod off nexus mods, parses CELL and WRLD data from plugins in each, and saves the data to the database.
|
||||||
@ -47,143 +41,139 @@ struct Args {
|
|||||||
page: usize,
|
page: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_local_form_id_and_master<'a>(
|
async fn extract_with_compress_tools(
|
||||||
form_id: u32,
|
file: &mut std::fs::File,
|
||||||
masters: &'a [&str],
|
|
||||||
file_name: &'a str,
|
|
||||||
) -> Result<(i32, &'a str)> {
|
|
||||||
let master_index = (form_id >> 24) as usize;
|
|
||||||
let local_form_id = (form_id & 0xFFFFFF).try_into()?;
|
|
||||||
if master_index >= masters.len() {
|
|
||||||
return Ok((local_form_id, file_name));
|
|
||||||
}
|
|
||||||
Ok((local_form_id, masters[master_index]))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn process_plugin(
|
|
||||||
plugin_buf: &mut [u8],
|
|
||||||
pool: &sqlx::Pool<sqlx::Postgres>,
|
pool: &sqlx::Pool<sqlx::Postgres>,
|
||||||
db_file: &File,
|
db_file: &File,
|
||||||
db_mod: &Mod,
|
db_mod: &Mod,
|
||||||
file_path: &str,
|
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
if plugin_buf.len() == 0 {
|
let extractor = extractors::compress_tools::Extractor::new(file);
|
||||||
warn!("skipping processing of invalid empty plugin");
|
for plugin in extractor.into_iter() {
|
||||||
return Ok(());
|
let (file_path, mut plugin_buf) = plugin?;
|
||||||
|
let plugin_span = info_span!("plugin", name = ?file_path);
|
||||||
|
let _plugin_span = plugin_span.enter();
|
||||||
|
process_plugin(&mut plugin_buf, &pool, &db_file, &db_mod, &file_path).await?;
|
||||||
}
|
}
|
||||||
info!(bytes = plugin_buf.len(), "parsing plugin");
|
Ok(())
|
||||||
match parse_plugin(&plugin_buf) {
|
}
|
||||||
Ok(plugin) => {
|
|
||||||
|
async fn extract_with_7zip(
|
||||||
|
file: &mut std::fs::File,
|
||||||
|
pool: &sqlx::Pool<sqlx::Postgres>,
|
||||||
|
db_file: &File,
|
||||||
|
db_mod: &Mod,
|
||||||
|
) -> Result<()> {
|
||||||
|
file.seek(SeekFrom::Start(0))?;
|
||||||
|
let temp_dir = tempdir()?;
|
||||||
|
let temp_file_path = temp_dir.path().join("download.zip");
|
||||||
|
let mut temp_file = std::fs::File::create(&temp_file_path)?;
|
||||||
|
std::io::copy(file, &mut temp_file)?;
|
||||||
|
drop(temp_file); // close handle to temp file so 7zip process can open it
|
||||||
|
let extracted_path = temp_dir.path().join("extracted");
|
||||||
|
|
||||||
|
Command::new("7z")
|
||||||
|
.args(&[
|
||||||
|
"x",
|
||||||
|
&format!("-o{}", &extracted_path.to_string_lossy()),
|
||||||
|
&temp_file_path.to_string_lossy().to_string(),
|
||||||
|
])
|
||||||
|
.status()?;
|
||||||
|
|
||||||
|
for entry in WalkDir::new(&extracted_path)
|
||||||
|
.contents_first(true)
|
||||||
|
.into_iter()
|
||||||
|
.filter_entry(|e| {
|
||||||
|
if let Some(extension) = e.path().extension() {
|
||||||
|
extension == "esp" || extension == "esm" || extension == "esl"
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
})
|
||||||
|
{
|
||||||
|
let entry = entry?;
|
||||||
|
let file_path = entry.path();
|
||||||
|
let plugin_span = info_span!("plugin", name = ?file_path);
|
||||||
|
let _plugin_span = plugin_span.enter();
|
||||||
|
info!("processing uncompressed file from downloaded archive");
|
||||||
|
let mut plugin_buf = std::fs::read(extracted_path.join(file_path))?;
|
||||||
|
process_plugin(
|
||||||
|
&mut plugin_buf,
|
||||||
|
&pool,
|
||||||
|
&db_file,
|
||||||
|
&db_mod,
|
||||||
|
&file_path.to_string_lossy(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn extract_with_unrar(
|
||||||
|
file: &mut std::fs::File,
|
||||||
|
pool: &sqlx::Pool<sqlx::Postgres>,
|
||||||
|
db_file: &File,
|
||||||
|
db_mod: &Mod,
|
||||||
|
checked_metadata: bool,
|
||||||
|
) -> Result<()> {
|
||||||
|
let temp_dir = tempdir()?;
|
||||||
|
let temp_file_path = temp_dir.path().join("download.rar");
|
||||||
|
let mut temp_file = std::fs::File::create(&temp_file_path)?;
|
||||||
|
std::io::copy(file, &mut temp_file)?;
|
||||||
|
|
||||||
|
let mut plugin_file_paths = Vec::new();
|
||||||
|
let list = Archive::new(&temp_file_path.to_string_lossy().to_string())?.list();
|
||||||
|
if let Ok(list) = list {
|
||||||
|
for entry in list {
|
||||||
|
if let Ok(entry) = entry {
|
||||||
|
if let Some(extension) = entry.filename.extension() {
|
||||||
|
if entry.is_file()
|
||||||
|
&& (extension == "esp" || extension == "esm" || extension == "esl")
|
||||||
|
{
|
||||||
|
plugin_file_paths.push(entry.filename);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if !checked_metadata {
|
||||||
|
warn!("failed to read archive and server has no metadata, skipping file");
|
||||||
|
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
|
||||||
|
return Ok(());
|
||||||
|
} else {
|
||||||
|
error!("failed to read archive, but server had metadata");
|
||||||
|
panic!("failed to read archive, but server had metadata");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
info!(
|
||||||
|
num_plugin_files = plugin_file_paths.len(),
|
||||||
|
"listed plugins in downloaded archive"
|
||||||
|
);
|
||||||
|
|
||||||
|
if plugin_file_paths.len() > 0 {
|
||||||
|
info!("uncompressing downloaded archive");
|
||||||
|
let extract = Archive::new(&temp_file_path.to_string_lossy().to_string())?
|
||||||
|
.extract_to(temp_dir.path().to_string_lossy().to_string());
|
||||||
|
extract
|
||||||
|
.expect("failed to extract")
|
||||||
|
.process()
|
||||||
|
.expect("failed to extract");
|
||||||
|
|
||||||
|
for file_path in plugin_file_paths.iter() {
|
||||||
info!(
|
info!(
|
||||||
num_worlds = plugin.worlds.len(),
|
?file_path,
|
||||||
num_cells = plugin.cells.len(),
|
"processing uncompressed file from downloaded archive"
|
||||||
"parse finished"
|
|
||||||
);
|
);
|
||||||
let hash = seahash::hash(&plugin_buf);
|
let mut plugin_buf = std::fs::read(temp_dir.path().join(file_path))?;
|
||||||
let file_name = Path::new(file_path)
|
process_plugin(
|
||||||
.file_name()
|
&mut plugin_buf,
|
||||||
.expect("plugin path ends in a valid file_name")
|
|
||||||
.to_string_lossy();
|
|
||||||
let author = plugin.header.author.as_deref();
|
|
||||||
let description = plugin.header.description.as_deref();
|
|
||||||
let masters: Vec<&str> = plugin.header.masters.iter().map(|s| s.borrow()).collect();
|
|
||||||
let plugin_row = plugin::insert(
|
|
||||||
&pool,
|
&pool,
|
||||||
&db_file.name,
|
&db_file,
|
||||||
hash as i64,
|
&db_mod,
|
||||||
db_file.id,
|
&file_path.to_string_lossy(),
|
||||||
plugin.header.version as f64,
|
|
||||||
plugin_buf.len() as i64,
|
|
||||||
author,
|
|
||||||
description,
|
|
||||||
&masters,
|
|
||||||
&file_name,
|
|
||||||
file_path,
|
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let worlds: Vec<UnsavedWorld> = plugin
|
|
||||||
.worlds
|
|
||||||
.iter()
|
|
||||||
.map(|world| {
|
|
||||||
let (form_id, master) =
|
|
||||||
get_local_form_id_and_master(world.form_id, &masters, &file_name)
|
|
||||||
.expect("form_id to be a valid i32");
|
|
||||||
UnsavedWorld { form_id, master }
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
let db_worlds = world::batched_insert(&pool, &worlds).await?;
|
|
||||||
let plugin_worlds: Vec<UnsavedPluginWorld> = db_worlds
|
|
||||||
.iter()
|
|
||||||
.zip(&plugin.worlds)
|
|
||||||
.map(|(db_world, plugin_world)| UnsavedPluginWorld {
|
|
||||||
plugin_id: plugin_row.id,
|
|
||||||
world_id: db_world.id,
|
|
||||||
editor_id: &plugin_world.editor_id,
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
plugin_world::batched_insert(&pool, &plugin_worlds).await?;
|
|
||||||
|
|
||||||
let cells: Vec<UnsavedCell> = plugin
|
|
||||||
.cells
|
|
||||||
.iter()
|
|
||||||
.map(|cell| {
|
|
||||||
let world_id = if let Some(world_form_id) = cell.world_form_id {
|
|
||||||
let (form_id, master) =
|
|
||||||
get_local_form_id_and_master(world_form_id, &masters, &file_name)
|
|
||||||
.expect("form_id to be valid i32");
|
|
||||||
Some(
|
|
||||||
db_worlds
|
|
||||||
.iter()
|
|
||||||
.find(|&world| world.form_id == form_id && world.master == master)
|
|
||||||
.expect("cell references world in the plugin worlds")
|
|
||||||
.id,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
let (form_id, master) =
|
|
||||||
get_local_form_id_and_master(cell.form_id, &masters, &file_name)
|
|
||||||
.expect("form_id is a valid i32");
|
|
||||||
UnsavedCell {
|
|
||||||
form_id,
|
|
||||||
master,
|
|
||||||
x: cell.x,
|
|
||||||
y: cell.y,
|
|
||||||
world_id,
|
|
||||||
is_persistent: cell.is_persistent,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
let db_cells = cell::batched_insert(&pool, &cells).await?;
|
|
||||||
let plugin_cells: Vec<UnsavedPluginCell> = db_cells
|
|
||||||
.iter()
|
|
||||||
.zip(&plugin.cells)
|
|
||||||
.map(|(db_cell, plugin_cell)| UnsavedPluginCell {
|
|
||||||
plugin_id: plugin_row.id,
|
|
||||||
cell_id: db_cell.id,
|
|
||||||
editor_id: plugin_cell.editor_id.as_ref().map(|id| id.as_ref()),
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
plugin_cell::batched_insert(&pool, &plugin_cells).await?;
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
warn!(error = %err, "Failed to parse plugin, skipping plugin");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let plugin_path = format!(
|
|
||||||
"plugins/{}/{}/{}/{}",
|
|
||||||
GAME_NAME, db_mod.nexus_mod_id, db_file.nexus_file_id, file_path
|
|
||||||
);
|
|
||||||
let plugin_path = Path::new(&plugin_path);
|
|
||||||
if let Some(dir) = plugin_path.parent() {
|
|
||||||
create_dir_all(dir).await?;
|
|
||||||
}
|
|
||||||
let mut file = tokio::fs::File::create(plugin_path).await?;
|
|
||||||
|
|
||||||
info!(path = %plugin_path.display(), "saving plugin to disk");
|
|
||||||
file.write_all(&plugin_buf).await?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -198,7 +188,10 @@ pub async fn main() -> Result<()> {
|
|||||||
.connect(&env::var("DATABASE_URL")?)
|
.connect(&env::var("DATABASE_URL")?)
|
||||||
.await?;
|
.await?;
|
||||||
let game = game::insert(&pool, GAME_NAME, GAME_ID as i32).await?;
|
let game = game::insert(&pool, GAME_NAME, GAME_ID as i32).await?;
|
||||||
let client = reqwest::Client::new();
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(REQUEST_TIMEOUT)
|
||||||
|
.connect_timeout(CONNECT_TIMEOUT)
|
||||||
|
.build()?;
|
||||||
|
|
||||||
let args: Args = argh::from_env();
|
let args: Args = argh::from_env();
|
||||||
let mut page = args.page;
|
let mut page = args.page;
|
||||||
@ -361,160 +354,31 @@ pub async fn main() -> Result<()> {
|
|||||||
// https://github.com/libarchive/libarchive/issues/373, https://github.com/libarchive/libarchive/issues/1426
|
// https://github.com/libarchive/libarchive/issues/373, https://github.com/libarchive/libarchive/issues/1426
|
||||||
tokio_file.seek(SeekFrom::Start(0)).await?;
|
tokio_file.seek(SeekFrom::Start(0)).await?;
|
||||||
let mut file = tokio_file.try_clone().await?.into_std().await;
|
let mut file = tokio_file.try_clone().await?.into_std().await;
|
||||||
let temp_dir = tempdir()?;
|
extract_with_unrar(&mut file, &pool, &db_file, &db_mod, checked_metadata)
|
||||||
let temp_file_path = temp_dir.path().join("download.rar");
|
.await?;
|
||||||
let mut temp_file = std::fs::File::create(&temp_file_path)?;
|
|
||||||
std::io::copy(&mut file, &mut temp_file)?;
|
|
||||||
|
|
||||||
let mut plugin_file_paths = Vec::new();
|
|
||||||
let list =
|
|
||||||
Archive::new(&temp_file_path.to_string_lossy().to_string())?.list();
|
|
||||||
if let Ok(list) = list {
|
|
||||||
for entry in list {
|
|
||||||
if let Ok(entry) = entry {
|
|
||||||
if let Some(extension) = entry.filename.extension() {
|
|
||||||
if entry.is_file()
|
|
||||||
&& (extension == "esp"
|
|
||||||
|| extension == "esm"
|
|
||||||
|| extension == "esl")
|
|
||||||
{
|
|
||||||
plugin_file_paths.push(entry.filename);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if !checked_metadata {
|
|
||||||
warn!("failed to read archive and server has no metadata, skipping file");
|
|
||||||
file::update_unable_to_extract_plugins(&pool, db_file.id, true)
|
|
||||||
.await?;
|
|
||||||
continue;
|
|
||||||
} else {
|
|
||||||
error!("failed to read archive, but server had metadata");
|
|
||||||
panic!("failed to read archive, but server had metadata");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
info!(
|
|
||||||
num_plugin_files = plugin_file_paths.len(),
|
|
||||||
"listed plugins in downloaded archive"
|
|
||||||
);
|
|
||||||
|
|
||||||
if plugin_file_paths.len() > 0 {
|
|
||||||
info!("uncompressing downloaded archive");
|
|
||||||
let extract =
|
|
||||||
Archive::new(&temp_file_path.to_string_lossy().to_string())?
|
|
||||||
.extract_to(temp_dir.path().to_string_lossy().to_string());
|
|
||||||
extract
|
|
||||||
.expect("failed to extract")
|
|
||||||
.process()
|
|
||||||
.expect("failed to extract");
|
|
||||||
|
|
||||||
for file_path in plugin_file_paths.iter() {
|
|
||||||
info!(
|
|
||||||
?file_path,
|
|
||||||
"processing uncompressed file from downloaded archive"
|
|
||||||
);
|
|
||||||
let mut plugin_buf =
|
|
||||||
std::fs::read(temp_dir.path().join(file_path))?;
|
|
||||||
process_plugin(
|
|
||||||
&mut plugin_buf,
|
|
||||||
&pool,
|
|
||||||
&db_file,
|
|
||||||
&db_mod,
|
|
||||||
&file_path.to_string_lossy(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
temp_dir.close()?;
|
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
tokio_file.seek(SeekFrom::Start(0)).await?;
|
tokio_file.seek(SeekFrom::Start(0)).await?;
|
||||||
let mut file = tokio_file.try_clone().await?.into_std().await;
|
let mut file = tokio_file.try_clone().await?.into_std().await;
|
||||||
let mut plugin_file_paths = Vec::new();
|
|
||||||
|
|
||||||
let archive_files = match list_archive_files(&file) {
|
match extract_with_compress_tools(&mut file, &pool, &db_file, &db_mod).await
|
||||||
Ok(files) => Ok(files),
|
{
|
||||||
|
Ok(_) => Ok(()),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
if !checked_metadata {
|
if err
|
||||||
warn!(error = %err, "failed to read archive and server has no metadata, skipping file");
|
.downcast_ref::<extractors::compress_tools::ExtractorError>()
|
||||||
file::update_unable_to_extract_plugins(&pool, db_file.id, true)
|
.is_some()
|
||||||
.await?;
|
&& kind.mime_type() == "application/zip"
|
||||||
continue;
|
{
|
||||||
|
// compress_tools or libarchive failed to extract zip file (e.g. archive is deflate64 compressed)
|
||||||
|
// Attempt to uncompress the archive using `7z` unix command instead
|
||||||
|
warn!(error = %err, "failed to extract file with compress_tools, extracting whole archive with 7z instead");
|
||||||
|
extract_with_7zip(&mut file, &pool, &db_file, &db_mod).await
|
||||||
} else {
|
} else {
|
||||||
error!(error = %err, "failed to read archive, but server had metadata");
|
|
||||||
Err(err)
|
Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}?;
|
}?;
|
||||||
for file_path in archive_files {
|
|
||||||
if file_path.ends_with(".esp")
|
|
||||||
|| file_path.ends_with(".esm")
|
|
||||||
|| file_path.ends_with(".esl")
|
|
||||||
{
|
|
||||||
plugin_file_paths.push(file_path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
info!(
|
|
||||||
num_plugin_files = plugin_file_paths.len(),
|
|
||||||
"listed plugins in downloaded archive"
|
|
||||||
);
|
|
||||||
|
|
||||||
for file_path in plugin_file_paths.iter() {
|
|
||||||
let plugin_span = info_span!("plugin", name = ?file_path);
|
|
||||||
let plugin_span = plugin_span.enter();
|
|
||||||
file.seek(SeekFrom::Start(0))?;
|
|
||||||
let mut buf = Vec::default();
|
|
||||||
info!("uncompressing plugin file from downloaded archive");
|
|
||||||
match uncompress_archive_file(&mut file, &mut buf, file_path) {
|
|
||||||
Ok(_) => Ok(()),
|
|
||||||
Err(err) => {
|
|
||||||
if kind.mime_type() == "application/zip" {
|
|
||||||
// compress_tools or libarchive failed to extract zip file (e.g. archive is deflate64 compressed)
|
|
||||||
// Attempt to uncompress the archive using `unzip` unix command instead
|
|
||||||
warn!(error = %err, "failed to extract file with compress_tools, extracting whole archive with unzip instead");
|
|
||||||
drop(plugin_span);
|
|
||||||
file.seek(SeekFrom::Start(0))?;
|
|
||||||
let temp_dir = tempdir()?;
|
|
||||||
let temp_file_path = temp_dir
|
|
||||||
.path()
|
|
||||||
.join(format!("download.{}", kind.extension()));
|
|
||||||
let mut temp_file = std::fs::File::create(&temp_file_path)?;
|
|
||||||
std::io::copy(&mut file, &mut temp_file)?;
|
|
||||||
let extracted_path = temp_dir.path().join("extracted");
|
|
||||||
|
|
||||||
Command::new("unzip")
|
|
||||||
.args(&[
|
|
||||||
&temp_file_path.to_string_lossy(),
|
|
||||||
"-d",
|
|
||||||
&extracted_path.to_string_lossy(),
|
|
||||||
])
|
|
||||||
.status()?;
|
|
||||||
|
|
||||||
for file_path in plugin_file_paths.iter() {
|
|
||||||
let plugin_span =
|
|
||||||
info_span!("plugin", name = ?file_path);
|
|
||||||
let _plugin_span = plugin_span.enter();
|
|
||||||
info!("processing uncompressed file from downloaded archive");
|
|
||||||
let mut plugin_buf =
|
|
||||||
std::fs::read(extracted_path.join(file_path))?;
|
|
||||||
process_plugin(
|
|
||||||
&mut plugin_buf,
|
|
||||||
&pool,
|
|
||||||
&db_file,
|
|
||||||
&db_mod,
|
|
||||||
file_path,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
Err(err)
|
|
||||||
}
|
|
||||||
}?;
|
|
||||||
process_plugin(&mut buf, &pool, &db_file, &db_mod, file_path).await?;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
162
src/plugin_processor.rs
Normal file
162
src/plugin_processor.rs
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use skyrim_cell_dump::parse_plugin;
|
||||||
|
use std::borrow::Borrow;
|
||||||
|
use std::convert::TryInto;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use tokio::fs::create_dir_all;
|
||||||
|
use tokio::io::AsyncWriteExt;
|
||||||
|
use tracing::{info, warn};
|
||||||
|
|
||||||
|
use crate::models::file::File;
|
||||||
|
use crate::models::game_mod::Mod;
|
||||||
|
use crate::models::plugin;
|
||||||
|
use crate::models::{cell, cell::UnsavedCell};
|
||||||
|
use crate::models::{plugin_cell, plugin_cell::UnsavedPluginCell};
|
||||||
|
use crate::models::{plugin_world, plugin_world::UnsavedPluginWorld};
|
||||||
|
use crate::models::{world, world::UnsavedWorld};
|
||||||
|
use crate::nexus_api::GAME_NAME;
|
||||||
|
|
||||||
|
fn get_local_form_id_and_master<'a>(
|
||||||
|
form_id: u32,
|
||||||
|
masters: &'a [&str],
|
||||||
|
file_name: &'a str,
|
||||||
|
) -> Result<(i32, &'a str)> {
|
||||||
|
let master_index = (form_id >> 24) as usize;
|
||||||
|
let local_form_id = (form_id & 0xFFFFFF).try_into()?;
|
||||||
|
if master_index >= masters.len() {
|
||||||
|
return Ok((local_form_id, file_name));
|
||||||
|
}
|
||||||
|
Ok((local_form_id, masters[master_index]))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn process_plugin(
|
||||||
|
plugin_buf: &mut [u8],
|
||||||
|
pool: &sqlx::Pool<sqlx::Postgres>,
|
||||||
|
db_file: &File,
|
||||||
|
db_mod: &Mod,
|
||||||
|
file_path: &str,
|
||||||
|
) -> Result<()> {
|
||||||
|
if plugin_buf.len() == 0 {
|
||||||
|
warn!("skipping processing of invalid empty plugin");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
info!(bytes = plugin_buf.len(), "parsing plugin");
|
||||||
|
match parse_plugin(&plugin_buf) {
|
||||||
|
Ok(plugin) => {
|
||||||
|
info!(
|
||||||
|
num_worlds = plugin.worlds.len(),
|
||||||
|
num_cells = plugin.cells.len(),
|
||||||
|
"parse finished"
|
||||||
|
);
|
||||||
|
let hash = seahash::hash(&plugin_buf);
|
||||||
|
let file_name = Path::new(file_path)
|
||||||
|
.file_name()
|
||||||
|
.expect("plugin path ends in a valid file_name")
|
||||||
|
.to_string_lossy();
|
||||||
|
let author = plugin.header.author.as_deref();
|
||||||
|
let description = plugin.header.description.as_deref();
|
||||||
|
let masters: Vec<&str> = plugin.header.masters.iter().map(|s| s.borrow()).collect();
|
||||||
|
let plugin_row = plugin::insert(
|
||||||
|
&pool,
|
||||||
|
&db_file.name,
|
||||||
|
hash as i64,
|
||||||
|
db_file.id,
|
||||||
|
plugin.header.version as f64,
|
||||||
|
plugin_buf.len() as i64,
|
||||||
|
author,
|
||||||
|
description,
|
||||||
|
&masters,
|
||||||
|
&file_name,
|
||||||
|
file_path,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let worlds: Vec<UnsavedWorld> = plugin
|
||||||
|
.worlds
|
||||||
|
.iter()
|
||||||
|
.map(|world| {
|
||||||
|
let (form_id, master) =
|
||||||
|
get_local_form_id_and_master(world.form_id, &masters, &file_name)
|
||||||
|
.expect("form_id to be a valid i32");
|
||||||
|
UnsavedWorld { form_id, master }
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
let db_worlds = world::batched_insert(&pool, &worlds).await?;
|
||||||
|
let plugin_worlds: Vec<UnsavedPluginWorld> = db_worlds
|
||||||
|
.iter()
|
||||||
|
.zip(&plugin.worlds)
|
||||||
|
.map(|(db_world, plugin_world)| UnsavedPluginWorld {
|
||||||
|
plugin_id: plugin_row.id,
|
||||||
|
world_id: db_world.id,
|
||||||
|
editor_id: &plugin_world.editor_id,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
plugin_world::batched_insert(&pool, &plugin_worlds).await?;
|
||||||
|
|
||||||
|
let cells: Vec<UnsavedCell> = plugin
|
||||||
|
.cells
|
||||||
|
.iter()
|
||||||
|
.map(|cell| {
|
||||||
|
let world_id = if let Some(world_form_id) = cell.world_form_id {
|
||||||
|
let (form_id, master) =
|
||||||
|
get_local_form_id_and_master(world_form_id, &masters, &file_name)
|
||||||
|
.expect("form_id to be valid i32");
|
||||||
|
Some(
|
||||||
|
db_worlds
|
||||||
|
.iter()
|
||||||
|
.find(|&world| world.form_id == form_id && world.master == master)
|
||||||
|
.expect("cell references world in the plugin worlds")
|
||||||
|
.id,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
let (form_id, master) =
|
||||||
|
get_local_form_id_and_master(cell.form_id, &masters, &file_name)
|
||||||
|
.expect("form_id is a valid i32");
|
||||||
|
UnsavedCell {
|
||||||
|
form_id,
|
||||||
|
master,
|
||||||
|
x: cell.x,
|
||||||
|
y: cell.y,
|
||||||
|
world_id,
|
||||||
|
is_persistent: cell.is_persistent,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
let db_cells = cell::batched_insert(&pool, &cells).await?;
|
||||||
|
let plugin_cells: Vec<UnsavedPluginCell> = db_cells
|
||||||
|
.iter()
|
||||||
|
.zip(&plugin.cells)
|
||||||
|
.map(|(db_cell, plugin_cell)| UnsavedPluginCell {
|
||||||
|
plugin_id: plugin_row.id,
|
||||||
|
cell_id: db_cell.id,
|
||||||
|
editor_id: plugin_cell.editor_id.as_ref().map(|id| id.as_ref()),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
plugin_cell::batched_insert(&pool, &plugin_cells).await?;
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
warn!(error = %err, "Failed to parse plugin, skipping plugin");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let plugin_path = [
|
||||||
|
"plugins",
|
||||||
|
GAME_NAME,
|
||||||
|
&format!("{}", db_mod.nexus_mod_id),
|
||||||
|
&format!("{}", db_file.nexus_file_id),
|
||||||
|
file_path,
|
||||||
|
]
|
||||||
|
.iter()
|
||||||
|
.collect::<PathBuf>();
|
||||||
|
let plugin_path = plugin_path.as_path();
|
||||||
|
if let Some(dir) = plugin_path.parent() {
|
||||||
|
create_dir_all(dir).await?;
|
||||||
|
}
|
||||||
|
let mut file = tokio::fs::File::create(plugin_path).await?;
|
||||||
|
|
||||||
|
info!(path = %plugin_path.display(), "saving plugin to disk");
|
||||||
|
file.write_all(&plugin_buf).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user