Update deps, rustfmt, clippy fixes
This commit is contained in:
@@ -3,8 +3,8 @@ use std::time::Duration;
|
||||
use tokio::time::sleep;
|
||||
use tracing::{debug, info, info_span};
|
||||
|
||||
use crate::nexus_scraper;
|
||||
use crate::nexus_api::SSE_GAME_ID;
|
||||
use crate::nexus_scraper;
|
||||
|
||||
const REQUEST_TIMEOUT: Duration = Duration::from_secs(7200); // 2 hours
|
||||
const CONNECT_TIMEOUT: Duration = Duration::from_secs(30);
|
||||
@@ -25,7 +25,8 @@ pub async fn backfill_is_translation(pool: &sqlx::Pool<sqlx::Postgres>) -> Resul
|
||||
while has_next_page {
|
||||
let page_span = info_span!("page", page);
|
||||
let _page_span = page_span.enter();
|
||||
let mod_list_resp = nexus_scraper::get_mod_list_page(&client, page, SSE_GAME_ID, true).await?;
|
||||
let mod_list_resp =
|
||||
nexus_scraper::get_mod_list_page(&client, page, SSE_GAME_ID, true).await?;
|
||||
let scraped = mod_list_resp.scrape_mods()?;
|
||||
let scraped_ids: Vec<i32> = scraped.mods.iter().map(|m| m.nexus_mod_id).collect();
|
||||
|
||||
|
||||
@@ -9,16 +9,22 @@ use crate::models::cell;
|
||||
pub async fn dump_cell_data(pool: &sqlx::Pool<sqlx::Postgres>, dir: &str) -> Result<()> {
|
||||
for x in -77..75 {
|
||||
for y in -50..44 {
|
||||
if let Ok(data) = cell::get_cell_data(&pool, "Skyrim.esm", 1, x, y).await {
|
||||
if let Ok(data) = cell::get_cell_data(pool, "Skyrim.esm", 1, x, y).await {
|
||||
let path = format!("{}/{}", &dir, x);
|
||||
let path = Path::new(&path);
|
||||
create_dir_all(&path)?;
|
||||
create_dir_all(path)?;
|
||||
let path = path.join(format!("{}.json", y));
|
||||
info!(x = x, y = y, form_id = data.form_id, "dumping cell data to {}", path.display());
|
||||
info!(
|
||||
x = x,
|
||||
y = y,
|
||||
form_id = data.form_id,
|
||||
"dumping cell data to {}",
|
||||
path.display()
|
||||
);
|
||||
let mut file = File::create(path)?;
|
||||
write!(file, "{}", serde_json::to_string(&data)?)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
return Ok(());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -10,14 +10,18 @@ pub async fn dump_cell_edit_counts(pool: &sqlx::Pool<sqlx::Postgres>, path: &str
|
||||
let mut cell_mod_edit_counts = HashMap::new();
|
||||
for x in -77..75 {
|
||||
for y in -50..44 {
|
||||
if let Some(count) = cell::count_mod_edits(&pool, "Skyrim.esm", 1, x, y).await? {
|
||||
if let Some(count) = cell::count_mod_edits(pool, "Skyrim.esm", 1, x, y).await? {
|
||||
info!(x = x, y = y, count = count, "read cell edit count");
|
||||
cell_mod_edit_counts.insert(format!("{},{}", x, y), count);
|
||||
}
|
||||
}
|
||||
}
|
||||
info!("writing {} cell edit counts to {}", cell_mod_edit_counts.len(), path);
|
||||
info!(
|
||||
"writing {} cell edit counts to {}",
|
||||
cell_mod_edit_counts.len(),
|
||||
path
|
||||
);
|
||||
let mut file = File::create(path)?;
|
||||
write!(file, "{}", serde_json::to_string(&cell_mod_edit_counts)?)?;
|
||||
return Ok(());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -7,26 +7,36 @@ use tracing::info;
|
||||
|
||||
use crate::models::file;
|
||||
|
||||
pub async fn dump_file_data(pool: &sqlx::Pool<sqlx::Postgres>, dir: &str, updated_after: Option<NaiveDateTime>) -> Result<()> {
|
||||
pub async fn dump_file_data(
|
||||
pool: &sqlx::Pool<sqlx::Postgres>,
|
||||
dir: &str,
|
||||
updated_after: Option<NaiveDateTime>,
|
||||
) -> Result<()> {
|
||||
let mut page = 1;
|
||||
let page_size = 20;
|
||||
let mut last_id = None;
|
||||
loop {
|
||||
let files =
|
||||
file::batched_get_with_cells(&pool, page_size, last_id, "Skyrim.esm", 1, updated_after).await?;
|
||||
file::batched_get_with_cells(pool, page_size, last_id, "Skyrim.esm", 1, updated_after)
|
||||
.await?;
|
||||
if files.is_empty() {
|
||||
break;
|
||||
}
|
||||
for file_with_cells in files {
|
||||
let path = Path::new(&dir);
|
||||
std::fs::create_dir_all(&path)?;
|
||||
std::fs::create_dir_all(path)?;
|
||||
let path = path.join(format!("{}.json", file_with_cells.nexus_file_id));
|
||||
info!(page = page, nexus_file_id = file_with_cells.nexus_file_id, "dumping file data to {}", path.display());
|
||||
info!(
|
||||
page = page,
|
||||
nexus_file_id = file_with_cells.nexus_file_id,
|
||||
"dumping file data to {}",
|
||||
path.display()
|
||||
);
|
||||
let mut file = File::create(path)?;
|
||||
write!(file, "{}", serde_json::to_string(&file_with_cells)?)?;
|
||||
last_id = Some(file_with_cells.id);
|
||||
}
|
||||
page += 1;
|
||||
}
|
||||
return Ok(());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -6,9 +6,9 @@ use tracing::info;
|
||||
use crate::models::game;
|
||||
|
||||
pub async fn dump_games(pool: &sqlx::Pool<sqlx::Postgres>, path: &str) -> Result<()> {
|
||||
let games = game::get_all(&pool).await?;
|
||||
let games = game::get_all(pool).await?;
|
||||
info!("writing {} games to {}", games.len(), path);
|
||||
let mut file = File::create(path)?;
|
||||
write!(file, "{}", serde_json::to_string(&games)?)?;
|
||||
return Ok(());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -13,12 +13,17 @@ pub async fn dump_mod_cell_counts(pool: &sqlx::Pool<sqlx::Postgres>, path: &str)
|
||||
let mut counts = HashMap::new();
|
||||
loop {
|
||||
let mod_cell_counts =
|
||||
game_mod::batched_get_cell_counts(&pool, page_size, last_id, "Skyrim.esm", 1).await?;
|
||||
game_mod::batched_get_cell_counts(pool, page_size, last_id, "Skyrim.esm", 1).await?;
|
||||
if mod_cell_counts.is_empty() {
|
||||
break;
|
||||
}
|
||||
for mod_cell_count in mod_cell_counts {
|
||||
info!(page = page, nexus_mod_id = mod_cell_count.nexus_mod_id, count = mod_cell_count.cells.unwrap_or(0), "read mod cell count");
|
||||
info!(
|
||||
page = page,
|
||||
nexus_mod_id = mod_cell_count.nexus_mod_id,
|
||||
count = mod_cell_count.cells.unwrap_or(0),
|
||||
"read mod cell count"
|
||||
);
|
||||
counts.insert(mod_cell_count.nexus_mod_id, mod_cell_count.cells);
|
||||
last_id = Some(mod_cell_count.nexus_mod_id);
|
||||
}
|
||||
@@ -27,5 +32,5 @@ pub async fn dump_mod_cell_counts(pool: &sqlx::Pool<sqlx::Postgres>, path: &str)
|
||||
info!("writing {} mod cell counts to {}", counts.len(), path);
|
||||
let mut file = File::create(path)?;
|
||||
write!(file, "{}", serde_json::to_string(&counts)?)?;
|
||||
return Ok(());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -9,27 +9,51 @@ use tracing::info;
|
||||
use crate::models::game;
|
||||
use crate::models::game_mod;
|
||||
|
||||
pub async fn dump_mod_data(pool: &sqlx::Pool<sqlx::Postgres>, dir: &str, updated_after: Option<NaiveDateTime>) -> Result<()> {
|
||||
pub async fn dump_mod_data(
|
||||
pool: &sqlx::Pool<sqlx::Postgres>,
|
||||
dir: &str,
|
||||
updated_after: Option<NaiveDateTime>,
|
||||
) -> Result<()> {
|
||||
let mut page = 1;
|
||||
let page_size = 20;
|
||||
let mut last_id = None;
|
||||
let game_id_to_name: HashMap<_, _> = game::get_all(&pool).await?.into_iter().map(|game| (game.id, game.name)).collect();
|
||||
let game_id_to_name: HashMap<_, _> = game::get_all(pool)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|game| (game.id, game.name))
|
||||
.collect();
|
||||
loop {
|
||||
let mods =
|
||||
game_mod::batched_get_with_cells_and_files(&pool, page_size, last_id, "Skyrim.esm", 1, updated_after).await?;
|
||||
let mods = game_mod::batched_get_with_cells_and_files(
|
||||
pool,
|
||||
page_size,
|
||||
last_id,
|
||||
"Skyrim.esm",
|
||||
1,
|
||||
updated_after,
|
||||
)
|
||||
.await?;
|
||||
if mods.is_empty() {
|
||||
break;
|
||||
}
|
||||
for mod_with_cells in mods {
|
||||
let path = Path::new(&dir).join(game_id_to_name.get(&mod_with_cells.game_id).expect("valid mod.game_id"));
|
||||
let path = Path::new(&dir).join(
|
||||
game_id_to_name
|
||||
.get(&mod_with_cells.game_id)
|
||||
.expect("valid mod.game_id"),
|
||||
);
|
||||
std::fs::create_dir_all(&path)?;
|
||||
let path = path.join(format!("{}.json", mod_with_cells.nexus_mod_id));
|
||||
info!(page = page, nexus_mod_id = mod_with_cells.nexus_mod_id, "dumping mod data to {}", path.display());
|
||||
info!(
|
||||
page = page,
|
||||
nexus_mod_id = mod_with_cells.nexus_mod_id,
|
||||
"dumping mod data to {}",
|
||||
path.display()
|
||||
);
|
||||
let mut file = File::create(path)?;
|
||||
write!(file, "{}", serde_json::to_string(&mod_with_cells)?)?;
|
||||
last_id = Some(mod_with_cells.id);
|
||||
}
|
||||
page += 1;
|
||||
}
|
||||
return Ok(());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -13,19 +13,27 @@ struct ModForSearchIdTranslated {
|
||||
id: i32,
|
||||
}
|
||||
|
||||
pub async fn dump_mod_search_index(pool: &sqlx::Pool<sqlx::Postgres>, game: &str, path: &str) -> Result<()> {
|
||||
pub async fn dump_mod_search_index(
|
||||
pool: &sqlx::Pool<sqlx::Postgres>,
|
||||
game: &str,
|
||||
path: &str,
|
||||
) -> Result<()> {
|
||||
let mut page = 1;
|
||||
let mut search_index = vec![];
|
||||
let page_size = 20;
|
||||
let mut last_id = None;
|
||||
let game_id = game::get_id_by_name(&pool, game).await?;
|
||||
let game_id = game::get_id_by_name(pool, game).await?;
|
||||
loop {
|
||||
let mods = game_mod::batched_get_for_search(&pool, game_id, page_size, last_id).await?;
|
||||
let mods = game_mod::batched_get_for_search(pool, game_id, page_size, last_id).await?;
|
||||
if mods.is_empty() {
|
||||
break;
|
||||
}
|
||||
for mod_for_search in mods {
|
||||
info!(page = page, nexus_mod_id = mod_for_search.nexus_mod_id, "read mod name for search index");
|
||||
info!(
|
||||
page = page,
|
||||
nexus_mod_id = mod_for_search.nexus_mod_id,
|
||||
"read mod name for search index"
|
||||
);
|
||||
search_index.push(ModForSearchIdTranslated {
|
||||
name: mod_for_search.name,
|
||||
id: mod_for_search.nexus_mod_id,
|
||||
@@ -34,8 +42,12 @@ pub async fn dump_mod_search_index(pool: &sqlx::Pool<sqlx::Postgres>, game: &str
|
||||
}
|
||||
page += 1;
|
||||
}
|
||||
info!("writing {} mod names for search index to {}", search_index.len(), path);
|
||||
info!(
|
||||
"writing {} mod names for search index to {}",
|
||||
search_index.len(),
|
||||
path
|
||||
);
|
||||
let mut file = File::create(path)?;
|
||||
write!(file, "{}", serde_json::to_string(&search_index)?)?;
|
||||
return Ok(());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -5,23 +5,39 @@ use std::io::Write;
|
||||
use std::path::Path;
|
||||
use tracing::info;
|
||||
|
||||
use crate::models::{plugin, format_radix};
|
||||
use crate::models::{format_radix, plugin};
|
||||
|
||||
pub async fn dump_plugin_data(pool: &sqlx::Pool<sqlx::Postgres>, dir: &str, updated_after: Option<NaiveDateTime>) -> Result<()> {
|
||||
pub async fn dump_plugin_data(
|
||||
pool: &sqlx::Pool<sqlx::Postgres>,
|
||||
dir: &str,
|
||||
updated_after: Option<NaiveDateTime>,
|
||||
) -> Result<()> {
|
||||
let mut page: u32 = 1;
|
||||
let page_size = 20;
|
||||
let mut last_hash = None;
|
||||
loop {
|
||||
let plugins =
|
||||
plugin::batched_get_by_hash_with_mods(pool, page_size, last_hash, "Skyrim.esm", 1, updated_after).await?;
|
||||
let plugins = plugin::batched_get_by_hash_with_mods(
|
||||
pool,
|
||||
page_size,
|
||||
last_hash,
|
||||
"Skyrim.esm",
|
||||
1,
|
||||
updated_after,
|
||||
)
|
||||
.await?;
|
||||
if plugins.is_empty() {
|
||||
break;
|
||||
}
|
||||
for plugin in plugins {
|
||||
let path = Path::new(&dir);
|
||||
create_dir_all(&path)?;
|
||||
create_dir_all(path)?;
|
||||
let path = path.join(format!("{}.json", format_radix(plugin.hash as u64, 36)));
|
||||
info!(page = page, hash = plugin.hash, "dumping plugin data to {}", path.display());
|
||||
info!(
|
||||
page = page,
|
||||
hash = plugin.hash,
|
||||
"dumping plugin data to {}",
|
||||
path.display()
|
||||
);
|
||||
let mut file = File::create(path)?;
|
||||
let json_val = serde_json::to_string(&plugin)?;
|
||||
write!(file, "{}", json_val)?;
|
||||
|
||||
@@ -2,21 +2,21 @@ pub mod backfills;
|
||||
pub mod download_tiles;
|
||||
pub mod dump_cell_data;
|
||||
pub mod dump_cell_edit_counts;
|
||||
pub mod dump_file_data;
|
||||
pub mod dump_games;
|
||||
pub mod dump_mod_cell_counts;
|
||||
pub mod dump_mod_data;
|
||||
pub mod dump_mod_search_index;
|
||||
pub mod dump_plugin_data;
|
||||
pub mod dump_file_data;
|
||||
pub mod dump_games;
|
||||
pub mod update;
|
||||
|
||||
pub use download_tiles::download_tiles;
|
||||
pub use dump_cell_data::dump_cell_data;
|
||||
pub use dump_cell_edit_counts::dump_cell_edit_counts;
|
||||
pub use dump_file_data::dump_file_data;
|
||||
pub use dump_games::dump_games;
|
||||
pub use dump_mod_cell_counts::dump_mod_cell_counts;
|
||||
pub use dump_mod_data::dump_mod_data;
|
||||
pub use dump_mod_search_index::dump_mod_search_index;
|
||||
pub use dump_plugin_data::dump_plugin_data;
|
||||
pub use dump_file_data::dump_file_data;
|
||||
pub use dump_games::dump_games;
|
||||
pub use update::update;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use chrono::{NaiveDateTime, NaiveTime};
|
||||
use humansize::{file_size_opts, FileSize};
|
||||
use humansize::{format_size_i, DECIMAL};
|
||||
use reqwest::StatusCode;
|
||||
use std::collections::HashSet;
|
||||
use std::io::SeekFrom;
|
||||
@@ -36,7 +36,7 @@ pub async fn update(
|
||||
.build()?;
|
||||
|
||||
let game_id = get_game_id(game_name).expect("valid game name");
|
||||
let game = game::insert(&pool, game_name, game_id).await?;
|
||||
let game = game::insert(pool, game_name, game_id).await?;
|
||||
|
||||
while has_next_page {
|
||||
if !full && pages_with_no_updates >= 50 {
|
||||
@@ -46,13 +46,18 @@ pub async fn update(
|
||||
|
||||
let page_span = info_span!("page", page, game_name, include_translations);
|
||||
let _page_span = page_span.enter();
|
||||
let mod_list_resp =
|
||||
nexus_scraper::get_mod_list_page(&client, page, game.nexus_game_id, include_translations).await?;
|
||||
let mod_list_resp = nexus_scraper::get_mod_list_page(
|
||||
&client,
|
||||
page,
|
||||
game.nexus_game_id,
|
||||
include_translations,
|
||||
)
|
||||
.await?;
|
||||
let scraped = mod_list_resp.scrape_mods()?;
|
||||
|
||||
has_next_page = scraped.has_next_page;
|
||||
let processed_mods = game_mod::bulk_get_last_updated_by_nexus_mod_ids(
|
||||
&pool,
|
||||
pool,
|
||||
game.id,
|
||||
&scraped
|
||||
.mods
|
||||
@@ -101,7 +106,7 @@ pub async fn update(
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mods = game_mod::batched_insert(&pool, &mods_to_create_or_update).await?;
|
||||
let mods = game_mod::batched_insert(pool, &mods_to_create_or_update).await?;
|
||||
|
||||
if mods.is_empty() {
|
||||
pages_with_no_updates += 1;
|
||||
@@ -112,7 +117,8 @@ pub async fn update(
|
||||
for db_mod in mods {
|
||||
let mod_span = info_span!("mod", name = ?&db_mod.name, id = &db_mod.nexus_mod_id);
|
||||
let _mod_span = mod_span.enter();
|
||||
let files_resp = nexus_api::files::get(&client, game_name, db_mod.nexus_mod_id).await?;
|
||||
let files_resp =
|
||||
nexus_api::files::get(&client, game_name, db_mod.nexus_mod_id).await?;
|
||||
|
||||
debug!(duration = ?files_resp.wait, "sleeping");
|
||||
sleep(files_resp.wait).await;
|
||||
@@ -135,7 +141,7 @@ pub async fn update(
|
||||
});
|
||||
|
||||
let processed_file_ids: HashSet<i32> =
|
||||
file::get_processed_nexus_file_ids_by_mod_id(&pool, db_mod.id)
|
||||
file::get_processed_nexus_file_ids_by_mod_id(pool, db_mod.id)
|
||||
.await?
|
||||
.into_iter()
|
||||
.collect();
|
||||
@@ -150,7 +156,7 @@ pub async fn update(
|
||||
continue;
|
||||
}
|
||||
let db_file = file::insert(
|
||||
&pool,
|
||||
pool,
|
||||
&file::UnsavedFile {
|
||||
name: api_file.name,
|
||||
file_name: api_file.file_name,
|
||||
@@ -174,7 +180,7 @@ pub async fn update(
|
||||
info!(
|
||||
"file metadata does not contain a plugin, skip downloading"
|
||||
);
|
||||
file::update_has_plugin(&pool, db_file.id, false).await?;
|
||||
file::update_has_plugin(pool, db_file.id, false).await?;
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
@@ -186,10 +192,7 @@ pub async fn update(
|
||||
}
|
||||
};
|
||||
|
||||
let humanized_size = api_file
|
||||
.size
|
||||
.file_size(file_size_opts::CONVENTIONAL)
|
||||
.expect("unable to create human-readable file size");
|
||||
let humanized_size = format_size_i(api_file.size, DECIMAL);
|
||||
info!(size = %humanized_size, "decided to download file");
|
||||
let download_link_resp = nexus_api::download_link::get(
|
||||
&client,
|
||||
@@ -205,7 +208,7 @@ pub async fn update(
|
||||
status = ?reqwest_err.status(),
|
||||
"failed to get download link for file, skipping file"
|
||||
);
|
||||
file::update_has_download_link(&pool, db_file.id, false).await?;
|
||||
file::update_has_download_link(pool, db_file.id, false).await?;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@@ -215,7 +218,7 @@ pub async fn update(
|
||||
let mut tokio_file = match download_link_resp.download_file(&client).await {
|
||||
Ok(file) => {
|
||||
info!(bytes = api_file.size, "download finished");
|
||||
file::update_downloaded_at(&pool, db_file.id).await?;
|
||||
file::update_downloaded_at(pool, db_file.id).await?;
|
||||
file
|
||||
}
|
||||
Err(err) => {
|
||||
@@ -228,14 +231,14 @@ pub async fn update(
|
||||
tokio_file.seek(SeekFrom::Start(0)).await?;
|
||||
if let Err(err) = tokio_file.read_exact(&mut initial_bytes).await {
|
||||
warn!(error = %err, "failed to read initial bytes, skipping file");
|
||||
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
|
||||
file::update_unable_to_extract_plugins(pool, db_file.id, true).await?;
|
||||
continue;
|
||||
}
|
||||
let kind = match infer::get(&initial_bytes) {
|
||||
Some(kind) => kind,
|
||||
None => {
|
||||
warn!(initial_bytes = ?initial_bytes, "unable to determine file type of archive, skipping file");
|
||||
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
|
||||
file::update_unable_to_extract_plugins(pool, db_file.id, true).await?;
|
||||
continue;
|
||||
}
|
||||
};
|
||||
@@ -253,7 +256,7 @@ pub async fn update(
|
||||
let mut file = tokio_file.try_clone().await?.into_std().await;
|
||||
match extract_with_unrar(
|
||||
&mut file,
|
||||
&pool,
|
||||
pool,
|
||||
&db_file,
|
||||
&db_mod,
|
||||
game_name,
|
||||
@@ -266,7 +269,15 @@ pub async fn update(
|
||||
// unrar failed to extract rar file (e.g. archive has unicode filenames)
|
||||
// Attempt to uncompress the archive using `7z` unix command instead
|
||||
warn!(error = %err, "failed to extract file with unrar, extracting whole archive with 7z instead");
|
||||
extract_with_7zip(&mut file, &pool, &db_file, &db_mod, game_name, checked_metadata).await
|
||||
extract_with_7zip(
|
||||
&mut file,
|
||||
pool,
|
||||
&db_file,
|
||||
&db_mod,
|
||||
game_name,
|
||||
checked_metadata,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}?;
|
||||
}
|
||||
@@ -274,8 +285,10 @@ pub async fn update(
|
||||
tokio_file.seek(SeekFrom::Start(0)).await?;
|
||||
let mut file = tokio_file.try_clone().await?.into_std().await;
|
||||
|
||||
match extract_with_compress_tools(&mut file, &pool, &db_file, &db_mod, game_name)
|
||||
.await
|
||||
match extract_with_compress_tools(
|
||||
&mut file, pool, &db_file, &db_mod, game_name,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => {
|
||||
@@ -289,11 +302,24 @@ pub async fn update(
|
||||
// compress_tools or libarchive failed to extract zip/7z file (e.g. archive is deflate64 compressed)
|
||||
// Attempt to uncompress the archive using `7z` unix command instead
|
||||
warn!(error = %err, "failed to extract file with compress_tools, extracting whole archive with 7z instead");
|
||||
extract_with_7zip(&mut file, &pool, &db_file, &db_mod, game_name, checked_metadata).await
|
||||
} else if kind.mime_type() == "application/vnd.microsoft.portable-executable" {
|
||||
extract_with_7zip(
|
||||
&mut file,
|
||||
pool,
|
||||
&db_file,
|
||||
&db_mod,
|
||||
game_name,
|
||||
checked_metadata,
|
||||
)
|
||||
.await
|
||||
} else if kind.mime_type()
|
||||
== "application/vnd.microsoft.portable-executable"
|
||||
{
|
||||
// we tried to extract this .exe file, but it's not an archive so there's nothing we can do
|
||||
warn!("archive is an .exe file that cannot be extracted, skipping file");
|
||||
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
|
||||
file::update_unable_to_extract_plugins(
|
||||
pool, db_file.id, true,
|
||||
)
|
||||
.await?;
|
||||
continue;
|
||||
} else {
|
||||
Err(err)
|
||||
@@ -307,7 +333,7 @@ pub async fn update(
|
||||
sleep(download_link_resp.wait).await;
|
||||
}
|
||||
|
||||
game_mod::update_last_updated_files_at(&pool, db_mod.id).await?;
|
||||
game_mod::update_last_updated_files_at(pool, db_mod.id).await?;
|
||||
}
|
||||
|
||||
page += 1;
|
||||
|
||||
@@ -57,7 +57,7 @@ impl<'a> Extractor<'a> {
|
||||
self.file.seek(SeekFrom::Start(0))?;
|
||||
let mut buf = Vec::default();
|
||||
info!("uncompressing plugin file from downloaded archive");
|
||||
uncompress_archive_file(&mut self.file, &mut buf, &file_path)?;
|
||||
uncompress_archive_file(&mut self.file, &mut buf, file_path)?;
|
||||
Ok(buf)
|
||||
}
|
||||
}
|
||||
@@ -95,8 +95,16 @@ pub async fn extract_with_compress_tools(
|
||||
let (file_path, mut plugin_buf) = plugin?;
|
||||
let plugin_span = info_span!("plugin", name = ?file_path);
|
||||
let _plugin_span = plugin_span.enter();
|
||||
let safe_file_path = file_path.replace("\\", "/");
|
||||
process_plugin(&mut plugin_buf, &pool, &db_file, &db_mod, &safe_file_path, game_name).await?;
|
||||
let safe_file_path = file_path.replace('\\', "/");
|
||||
process_plugin(
|
||||
&mut plugin_buf,
|
||||
pool,
|
||||
db_file,
|
||||
db_mod,
|
||||
&safe_file_path,
|
||||
game_name,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -5,8 +5,8 @@ use tempfile::tempdir;
|
||||
use tracing::{info, info_span, warn};
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::models::{file, file::File};
|
||||
use crate::models::game_mod::Mod;
|
||||
use crate::models::{file, file::File};
|
||||
use crate::plugin_processor::process_plugin;
|
||||
|
||||
pub async fn extract_with_7zip(
|
||||
@@ -26,16 +26,16 @@ pub async fn extract_with_7zip(
|
||||
let extracted_path = temp_dir.path().join("extracted");
|
||||
|
||||
let status = Command::new("7z")
|
||||
.args(&[
|
||||
.args([
|
||||
"x",
|
||||
&format!("-o{}", &extracted_path.to_string_lossy()),
|
||||
&temp_file_path.to_string_lossy().to_string(),
|
||||
&temp_file_path.to_string_lossy(),
|
||||
])
|
||||
.status()?;
|
||||
|
||||
if !status.success() && !checked_metadata {
|
||||
warn!("failed to extract archive and server has no metadata, skipping file");
|
||||
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
|
||||
file::update_unable_to_extract_plugins(pool, db_file.id, true).await?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
@@ -43,7 +43,9 @@ pub async fn extract_with_7zip(
|
||||
.contents_first(true)
|
||||
.into_iter()
|
||||
.filter_entry(|e| {
|
||||
if e.file_type().is_dir() { return false }
|
||||
if e.file_type().is_dir() {
|
||||
return false;
|
||||
}
|
||||
if let Some(extension) = e.path().extension() {
|
||||
extension == "esp" || extension == "esm" || extension == "esl"
|
||||
} else {
|
||||
@@ -59,9 +61,9 @@ pub async fn extract_with_7zip(
|
||||
let mut plugin_buf = std::fs::read(extracted_path.join(file_path))?;
|
||||
process_plugin(
|
||||
&mut plugin_buf,
|
||||
&pool,
|
||||
&db_file,
|
||||
&db_mod,
|
||||
pool,
|
||||
db_file,
|
||||
db_mod,
|
||||
&file_path.to_string_lossy(),
|
||||
game_name,
|
||||
)
|
||||
|
||||
@@ -37,7 +37,7 @@ pub async fn extract_with_unrar(
|
||||
Err(_) => {
|
||||
if !checked_metadata {
|
||||
warn!("failed to read archive and server has no metadata, skipping file");
|
||||
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
|
||||
file::update_unable_to_extract_plugins(pool, db_file.id, true).await?;
|
||||
return Ok(());
|
||||
} else {
|
||||
error!("failed to read archive, but server had metadata");
|
||||
@@ -58,14 +58,14 @@ pub async fn extract_with_unrar(
|
||||
let mut extract = match extract {
|
||||
Err(err) => {
|
||||
warn!(error = %err, "failed to extract with unrar");
|
||||
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
|
||||
file::update_unable_to_extract_plugins(pool, db_file.id, true).await?;
|
||||
return Ok(());
|
||||
}
|
||||
Ok(extract) => extract,
|
||||
};
|
||||
if let Err(err) = extract.process() {
|
||||
warn!(error = %err, "failed to extract with unrar");
|
||||
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
|
||||
file::update_unable_to_extract_plugins(pool, db_file.id, true).await?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
@@ -77,9 +77,9 @@ pub async fn extract_with_unrar(
|
||||
let mut plugin_buf = std::fs::read(temp_dir.path().join(file_path))?;
|
||||
process_plugin(
|
||||
&mut plugin_buf,
|
||||
&pool,
|
||||
&db_file,
|
||||
&db_mod,
|
||||
pool,
|
||||
db_file,
|
||||
db_mod,
|
||||
&file_path.to_string_lossy(),
|
||||
game_name,
|
||||
)
|
||||
|
||||
11
src/main.rs
11
src/main.rs
@@ -14,7 +14,8 @@ mod plugin_processor;
|
||||
|
||||
use commands::{
|
||||
backfills::backfill_is_translation, download_tiles, dump_cell_data, dump_cell_edit_counts,
|
||||
dump_mod_cell_counts, dump_mod_data, dump_mod_search_index, dump_plugin_data, dump_file_data, dump_games, update,
|
||||
dump_file_data, dump_games, dump_mod_cell_counts, dump_mod_data, dump_mod_search_index,
|
||||
dump_plugin_data, update,
|
||||
};
|
||||
|
||||
#[derive(FromArgs)]
|
||||
@@ -24,7 +25,11 @@ struct Args {
|
||||
/// the page number to start scraping for mods on nexus mods
|
||||
page: usize,
|
||||
|
||||
#[argh(option, short = 'g', default = "String::from(\"skyrimspecialedition\")")]
|
||||
#[argh(
|
||||
option,
|
||||
short = 'g',
|
||||
default = "String::from(\"skyrimspecialedition\")"
|
||||
)]
|
||||
/// name of nexus game to scrape (e.g. "skyrim" or "skyrimspecialedition")
|
||||
game: String,
|
||||
|
||||
@@ -121,5 +126,5 @@ pub async fn main() -> Result<()> {
|
||||
return backfill_is_translation(&pool).await;
|
||||
}
|
||||
|
||||
return update(&pool, args.page, &args.game, args.full).await;
|
||||
update(&pool, args.page, &args.game, args.full).await
|
||||
}
|
||||
|
||||
@@ -268,4 +268,4 @@ pub async fn batched_get_with_cells(
|
||||
.await
|
||||
.context("Failed to batch get with cells")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,28 +34,18 @@ pub async fn insert(
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(pool))]
|
||||
pub async fn get_all(
|
||||
pool: &sqlx::Pool<sqlx::Postgres>,
|
||||
) -> Result<Vec<Game>> {
|
||||
sqlx::query_as!(
|
||||
Game,
|
||||
"SELECT * FROM games"
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.context("Failed to fetch games")
|
||||
pub async fn get_all(pool: &sqlx::Pool<sqlx::Postgres>) -> Result<Vec<Game>> {
|
||||
sqlx::query_as!(Game, "SELECT * FROM games")
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.context("Failed to fetch games")
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(pool))]
|
||||
pub async fn get_id_by_name(
|
||||
pool: &sqlx::Pool<sqlx::Postgres>,
|
||||
name: &str,
|
||||
) -> Result<i32> {
|
||||
sqlx::query_scalar!(
|
||||
"SELECT id FROM games WHERE name = $1",
|
||||
name
|
||||
)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
.context("Failed to fetch game id by name")
|
||||
}
|
||||
pub async fn get_id_by_name(pool: &sqlx::Pool<sqlx::Postgres>, name: &str) -> Result<i32> {
|
||||
sqlx::query_scalar!("SELECT id FROM games WHERE name = $1", name)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
.context("Failed to fetch game id by name")
|
||||
}
|
||||
|
||||
|
||||
@@ -474,42 +474,45 @@ pub async fn batched_get_with_cells_and_files(
|
||||
.await
|
||||
.context("Failed to batch get mod files")?;
|
||||
|
||||
Ok(mods.into_iter().map(|m| {
|
||||
let id = m.id;
|
||||
ModWithCellsAndFiles {
|
||||
id: m.id,
|
||||
name: m.name,
|
||||
nexus_mod_id: m.nexus_mod_id,
|
||||
author_name: m.author_name,
|
||||
author_id: m.author_id,
|
||||
category_name: m.category_name,
|
||||
category_id: m.category_id,
|
||||
description: m.description,
|
||||
thumbnail_link: m.thumbnail_link,
|
||||
game_id: m.game_id,
|
||||
is_translation: m.is_translation,
|
||||
updated_at: m.updated_at,
|
||||
created_at: m.created_at,
|
||||
last_update_at: m.last_update_at,
|
||||
first_upload_at: m.first_upload_at,
|
||||
last_updated_files_at: m.last_updated_files_at,
|
||||
cells: mod_cells
|
||||
.iter()
|
||||
.find(|c| c.mod_id == id)
|
||||
.map(|c| c.cells.clone())
|
||||
.unwrap_or_else(|| Some(serde_json::Value::Array(vec![]))),
|
||||
files: mod_files
|
||||
.iter()
|
||||
.find(|f| f.mod_id == id)
|
||||
.map(|f| f.files.clone())
|
||||
.unwrap_or_else(|| Some(serde_json::Value::Array(vec![]))),
|
||||
plugin_count: plugins_count
|
||||
.iter()
|
||||
.find(|p| p.mod_id == id)
|
||||
.map(|p| p.plugin_count)
|
||||
.unwrap_or(Some(0)),
|
||||
}
|
||||
}).collect())
|
||||
Ok(mods
|
||||
.into_iter()
|
||||
.map(|m| {
|
||||
let id = m.id;
|
||||
ModWithCellsAndFiles {
|
||||
id: m.id,
|
||||
name: m.name,
|
||||
nexus_mod_id: m.nexus_mod_id,
|
||||
author_name: m.author_name,
|
||||
author_id: m.author_id,
|
||||
category_name: m.category_name,
|
||||
category_id: m.category_id,
|
||||
description: m.description,
|
||||
thumbnail_link: m.thumbnail_link,
|
||||
game_id: m.game_id,
|
||||
is_translation: m.is_translation,
|
||||
updated_at: m.updated_at,
|
||||
created_at: m.created_at,
|
||||
last_update_at: m.last_update_at,
|
||||
first_upload_at: m.first_upload_at,
|
||||
last_updated_files_at: m.last_updated_files_at,
|
||||
cells: mod_cells
|
||||
.iter()
|
||||
.find(|c| c.mod_id == id)
|
||||
.map(|c| c.cells.clone())
|
||||
.unwrap_or_else(|| Some(serde_json::Value::Array(vec![]))),
|
||||
files: mod_files
|
||||
.iter()
|
||||
.find(|f| f.mod_id == id)
|
||||
.map(|f| f.files.clone())
|
||||
.unwrap_or_else(|| Some(serde_json::Value::Array(vec![]))),
|
||||
plugin_count: plugins_count
|
||||
.iter()
|
||||
.find(|p| p.mod_id == id)
|
||||
.map(|p| p.plugin_count)
|
||||
.unwrap_or(Some(0)),
|
||||
}
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(pool))]
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use anyhow::{Context, Result};
|
||||
use chrono::NaiveDateTime;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::FromRow;
|
||||
use sqlx::types::Json;
|
||||
use sqlx::FromRow;
|
||||
use tracing::instrument;
|
||||
|
||||
use super::hash_to_string;
|
||||
|
||||
@@ -16,7 +16,12 @@ pub struct DownloadLinkResponse {
|
||||
}
|
||||
|
||||
#[instrument(skip(client))]
|
||||
pub async fn get(client: &Client, game_name: &str, mod_id: i32, file_id: i64) -> Result<DownloadLinkResponse> {
|
||||
pub async fn get(
|
||||
client: &Client,
|
||||
game_name: &str,
|
||||
mod_id: i32,
|
||||
file_id: i64,
|
||||
) -> Result<DownloadLinkResponse> {
|
||||
for attempt in 1..=3 {
|
||||
let res = match client
|
||||
.get(format!(
|
||||
|
||||
Reference in New Issue
Block a user