Fix clippy lints, add 7z fallback for failed unrar
This commit is contained in:
parent
be9c78217d
commit
ba6ca0c928
@ -63,9 +63,8 @@ impl<'a> Iterator for Extractor<'a> {
|
|||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
if self.plugin_file_paths.is_none() {
|
if self.plugin_file_paths.is_none() {
|
||||||
match self.list_plugins() {
|
if let Err(err) = self.list_plugins() {
|
||||||
Err(err) => return Some(Err(err).context(ExtractorError)),
|
return Some(Err(err).context(ExtractorError));
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(plugin_file_paths) = &mut self.plugin_file_paths {
|
if let Some(plugin_file_paths) = &mut self.plugin_file_paths {
|
||||||
|
78
src/main.rs
78
src/main.rs
@ -123,9 +123,9 @@ async fn extract_with_unrar(
|
|||||||
|
|
||||||
let mut plugin_file_paths = Vec::new();
|
let mut plugin_file_paths = Vec::new();
|
||||||
let list = Archive::new(&temp_file_path.to_string_lossy().to_string())?.list();
|
let list = Archive::new(&temp_file_path.to_string_lossy().to_string())?.list();
|
||||||
if let Ok(list) = list {
|
match list {
|
||||||
for entry in list {
|
Ok(list) => {
|
||||||
if let Ok(entry) = entry {
|
for entry in list.flatten() {
|
||||||
if let Some(extension) = entry.filename.extension() {
|
if let Some(extension) = entry.filename.extension() {
|
||||||
if entry.is_file()
|
if entry.is_file()
|
||||||
&& (extension == "esp" || extension == "esm" || extension == "esl")
|
&& (extension == "esp" || extension == "esm" || extension == "esl")
|
||||||
@ -135,14 +135,15 @@ async fn extract_with_unrar(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
Err(_) => {
|
||||||
if !checked_metadata {
|
if !checked_metadata {
|
||||||
warn!("failed to read archive and server has no metadata, skipping file");
|
warn!("failed to read archive and server has no metadata, skipping file");
|
||||||
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
|
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
} else {
|
} else {
|
||||||
error!("failed to read archive, but server had metadata");
|
error!("failed to read archive, but server had metadata");
|
||||||
panic!("failed to read archive, but server had metadata");
|
panic!("failed to read archive, but server had metadata");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
info!(
|
info!(
|
||||||
@ -150,7 +151,7 @@ async fn extract_with_unrar(
|
|||||||
"listed plugins in downloaded archive"
|
"listed plugins in downloaded archive"
|
||||||
);
|
);
|
||||||
|
|
||||||
if plugin_file_paths.len() > 0 {
|
if !plugin_file_paths.is_empty() {
|
||||||
info!("uncompressing downloaded archive");
|
info!("uncompressing downloaded archive");
|
||||||
let extract = Archive::new(&temp_file_path.to_string_lossy().to_string())?
|
let extract = Archive::new(&temp_file_path.to_string_lossy().to_string())?
|
||||||
.extract_to(temp_dir.path().to_string_lossy().to_string());
|
.extract_to(temp_dir.path().to_string_lossy().to_string());
|
||||||
@ -163,13 +164,10 @@ async fn extract_with_unrar(
|
|||||||
}
|
}
|
||||||
Ok(extract) => extract
|
Ok(extract) => extract
|
||||||
};
|
};
|
||||||
match extract.process() {
|
if let Err(err) = extract.process() {
|
||||||
Err(err) => {
|
warn!(error = %err, "failed to extract with unrar");
|
||||||
warn!(error = %err, "failed to extract with unrar");
|
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
|
||||||
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
|
return Ok(())
|
||||||
return Ok(())
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for file_path in plugin_file_paths.iter() {
|
for file_path in plugin_file_paths.iter() {
|
||||||
@ -285,15 +283,17 @@ pub async fn main() -> Result<()> {
|
|||||||
}
|
}
|
||||||
let db_file = file::insert(
|
let db_file = file::insert(
|
||||||
&pool,
|
&pool,
|
||||||
api_file.name,
|
&file::UnsavedFile {
|
||||||
api_file.file_name,
|
name: api_file.name,
|
||||||
api_file.file_id as i32,
|
file_name: api_file.file_name,
|
||||||
db_mod.id,
|
nexus_file_id: api_file.file_id as i32,
|
||||||
api_file.category,
|
mod_id: db_mod.id,
|
||||||
api_file.version,
|
category: api_file.category,
|
||||||
api_file.mod_version,
|
version: api_file.version,
|
||||||
api_file.size,
|
mod_version: api_file.mod_version,
|
||||||
api_file.uploaded_at,
|
size: api_file.size,
|
||||||
|
uploaded_at: api_file.uploaded_at,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@ -350,13 +350,10 @@ pub async fn main() -> Result<()> {
|
|||||||
|
|
||||||
let mut initial_bytes = [0; 8];
|
let mut initial_bytes = [0; 8];
|
||||||
tokio_file.seek(SeekFrom::Start(0)).await?;
|
tokio_file.seek(SeekFrom::Start(0)).await?;
|
||||||
match tokio_file.read_exact(&mut initial_bytes).await {
|
if let Err(err) = tokio_file.read_exact(&mut initial_bytes).await {
|
||||||
Err(err) => {
|
warn!(error = %err, "failed to read initial bytes, skipping file");
|
||||||
warn!(error = %err, "failed to read initial bytes, skipping file");
|
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
|
||||||
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
|
continue;
|
||||||
continue;
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
let kind = match infer::get(&initial_bytes) {
|
let kind = match infer::get(&initial_bytes) {
|
||||||
Some(kind) => kind,
|
Some(kind) => kind,
|
||||||
@ -378,8 +375,15 @@ pub async fn main() -> Result<()> {
|
|||||||
// https://github.com/libarchive/libarchive/issues/373, https://github.com/libarchive/libarchive/issues/1426
|
// https://github.com/libarchive/libarchive/issues/373, https://github.com/libarchive/libarchive/issues/1426
|
||||||
tokio_file.seek(SeekFrom::Start(0)).await?;
|
tokio_file.seek(SeekFrom::Start(0)).await?;
|
||||||
let mut file = tokio_file.try_clone().await?.into_std().await;
|
let mut file = tokio_file.try_clone().await?.into_std().await;
|
||||||
extract_with_unrar(&mut file, &pool, &db_file, &db_mod, checked_metadata)
|
match extract_with_unrar(&mut file, &pool, &db_file, &db_mod, checked_metadata).await {
|
||||||
.await?;
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => {
|
||||||
|
// unrar failed to extract rar file (e.g. archive has unicode filenames)
|
||||||
|
// Attempt to uncompress the archive using `7z` unix command instead
|
||||||
|
warn!(error = %err, "failed to extract file with unrar, extracting whole archive with 7z instead");
|
||||||
|
extract_with_7zip(&mut file, &pool, &db_file, &db_mod).await
|
||||||
|
}
|
||||||
|
}?;
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
tokio_file.seek(SeekFrom::Start(0)).await?;
|
tokio_file.seek(SeekFrom::Start(0)).await?;
|
||||||
|
@ -73,7 +73,7 @@ pub async fn batched_insert<'a>(
|
|||||||
let mut ys: Vec<Option<i32>> = vec![];
|
let mut ys: Vec<Option<i32>> = vec![];
|
||||||
let mut world_ids: Vec<Option<i32>> = vec![];
|
let mut world_ids: Vec<Option<i32>> = vec![];
|
||||||
let mut is_persistents: Vec<bool> = vec![];
|
let mut is_persistents: Vec<bool> = vec![];
|
||||||
batch.into_iter().for_each(|unsaved_cell| {
|
batch.iter().for_each(|unsaved_cell| {
|
||||||
form_ids.push(unsaved_cell.form_id);
|
form_ids.push(unsaved_cell.form_id);
|
||||||
masters.push(unsaved_cell.master);
|
masters.push(unsaved_cell.master);
|
||||||
xs.push(unsaved_cell.x);
|
xs.push(unsaved_cell.x);
|
||||||
|
@ -23,6 +23,19 @@ pub struct File {
|
|||||||
pub unable_to_extract_plugins: bool,
|
pub unable_to_extract_plugins: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct UnsavedFile<'a> {
|
||||||
|
pub name: &'a str,
|
||||||
|
pub file_name: &'a str,
|
||||||
|
pub nexus_file_id: i32,
|
||||||
|
pub mod_id: i32,
|
||||||
|
pub category: Option<&'a str>,
|
||||||
|
pub version: Option<&'a str>,
|
||||||
|
pub mod_version: Option<&'a str>,
|
||||||
|
pub size: i64,
|
||||||
|
pub uploaded_at: NaiveDateTime,
|
||||||
|
}
|
||||||
|
|
||||||
#[instrument(level = "debug", skip(pool))]
|
#[instrument(level = "debug", skip(pool))]
|
||||||
pub async fn get_by_nexus_file_id(
|
pub async fn get_by_nexus_file_id(
|
||||||
pool: &sqlx::Pool<sqlx::Postgres>,
|
pool: &sqlx::Pool<sqlx::Postgres>,
|
||||||
@ -59,17 +72,9 @@ pub async fn get_processed_nexus_file_ids_by_mod_id(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(level = "debug", skip(pool))]
|
#[instrument(level = "debug", skip(pool))]
|
||||||
pub async fn insert(
|
pub async fn insert<'a>(
|
||||||
pool: &sqlx::Pool<sqlx::Postgres>,
|
pool: &sqlx::Pool<sqlx::Postgres>,
|
||||||
name: &str,
|
unsaved_file: &UnsavedFile<'a>,
|
||||||
file_name: &str,
|
|
||||||
nexus_file_id: i32,
|
|
||||||
mod_id: i32,
|
|
||||||
category: Option<&str>,
|
|
||||||
version: Option<&str>,
|
|
||||||
mod_version: Option<&str>,
|
|
||||||
size: i64,
|
|
||||||
uploaded_at: NaiveDateTime,
|
|
||||||
) -> Result<File> {
|
) -> Result<File> {
|
||||||
sqlx::query_as!(
|
sqlx::query_as!(
|
||||||
File,
|
File,
|
||||||
@ -80,15 +85,15 @@ pub async fn insert(
|
|||||||
SET (name, file_name, category, version, mod_version, uploaded_at, updated_at) =
|
SET (name, file_name, category, version, mod_version, uploaded_at, updated_at) =
|
||||||
(EXCLUDED.name, EXCLUDED.file_name, EXCLUDED.category, EXCLUDED.version, EXCLUDED.mod_version, EXCLUDED.uploaded_at, now())
|
(EXCLUDED.name, EXCLUDED.file_name, EXCLUDED.category, EXCLUDED.version, EXCLUDED.mod_version, EXCLUDED.uploaded_at, now())
|
||||||
RETURNING *",
|
RETURNING *",
|
||||||
name,
|
unsaved_file.name,
|
||||||
file_name,
|
unsaved_file.file_name,
|
||||||
nexus_file_id,
|
unsaved_file.nexus_file_id,
|
||||||
mod_id,
|
unsaved_file.mod_id,
|
||||||
category,
|
unsaved_file.category,
|
||||||
version,
|
unsaved_file.version,
|
||||||
mod_version,
|
unsaved_file.mod_version,
|
||||||
size,
|
unsaved_file.size,
|
||||||
uploaded_at
|
unsaved_file.uploaded_at
|
||||||
)
|
)
|
||||||
.fetch_one(pool)
|
.fetch_one(pool)
|
||||||
.await
|
.await
|
||||||
|
@ -106,7 +106,7 @@ pub async fn batched_insert<'a>(
|
|||||||
let mut categories: Vec<Option<&str>> = vec![];
|
let mut categories: Vec<Option<&str>> = vec![];
|
||||||
let mut descriptions: Vec<Option<&str>> = vec![];
|
let mut descriptions: Vec<Option<&str>> = vec![];
|
||||||
let mut game_ids: Vec<i32> = vec![];
|
let mut game_ids: Vec<i32> = vec![];
|
||||||
batch.into_iter().for_each(|unsaved_mod| {
|
batch.iter().for_each(|unsaved_mod| {
|
||||||
names.push(unsaved_mod.name);
|
names.push(unsaved_mod.name);
|
||||||
nexus_mod_ids.push(unsaved_mod.nexus_mod_id);
|
nexus_mod_ids.push(unsaved_mod.nexus_mod_id);
|
||||||
authors.push(unsaved_mod.author);
|
authors.push(unsaved_mod.author);
|
||||||
|
@ -21,19 +21,24 @@ pub struct Plugin {
|
|||||||
pub created_at: NaiveDateTime,
|
pub created_at: NaiveDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct UnsavedPlugin<'a> {
|
||||||
|
pub name: &'a str,
|
||||||
|
pub hash: i64,
|
||||||
|
pub file_id: i32,
|
||||||
|
pub version: f64,
|
||||||
|
pub size: i64,
|
||||||
|
pub author: Option<&'a str>,
|
||||||
|
pub description: Option<&'a str>,
|
||||||
|
pub masters: &'a [&'a str],
|
||||||
|
pub file_name: &'a str,
|
||||||
|
pub file_path: &'a str,
|
||||||
|
}
|
||||||
|
|
||||||
#[instrument(level = "debug", skip(pool))]
|
#[instrument(level = "debug", skip(pool))]
|
||||||
pub async fn insert(
|
pub async fn insert<'a>(
|
||||||
pool: &sqlx::Pool<sqlx::Postgres>,
|
pool: &sqlx::Pool<sqlx::Postgres>,
|
||||||
name: &str,
|
unsaved_plugin: &UnsavedPlugin<'a>,
|
||||||
hash: i64,
|
|
||||||
file_id: i32,
|
|
||||||
version: f64,
|
|
||||||
size: i64,
|
|
||||||
author: Option<&str>,
|
|
||||||
description: Option<&str>,
|
|
||||||
masters: &[&str],
|
|
||||||
file_name: &str,
|
|
||||||
file_path: &str,
|
|
||||||
) -> Result<Plugin> {
|
) -> Result<Plugin> {
|
||||||
// sqlx doesn't understand slices of &str with the query_as! macro: https://github.com/launchbadge/sqlx/issues/280
|
// sqlx doesn't understand slices of &str with the query_as! macro: https://github.com/launchbadge/sqlx/issues/280
|
||||||
sqlx::query_as(
|
sqlx::query_as(
|
||||||
@ -45,16 +50,16 @@ pub async fn insert(
|
|||||||
(EXCLUDED.name, EXCLUDED.hash, EXCLUDED.version, EXCLUDED.author, EXCLUDED.description, EXCLUDED.masters, EXCLUDED.file_name, now())
|
(EXCLUDED.name, EXCLUDED.hash, EXCLUDED.version, EXCLUDED.author, EXCLUDED.description, EXCLUDED.masters, EXCLUDED.file_name, now())
|
||||||
RETURNING *"#,
|
RETURNING *"#,
|
||||||
)
|
)
|
||||||
.bind(name)
|
.bind(unsaved_plugin.name)
|
||||||
.bind(hash)
|
.bind(unsaved_plugin.hash)
|
||||||
.bind(file_id)
|
.bind(unsaved_plugin.file_id)
|
||||||
.bind(version)
|
.bind(unsaved_plugin.version)
|
||||||
.bind(size)
|
.bind(unsaved_plugin.size)
|
||||||
.bind(author)
|
.bind(unsaved_plugin.author)
|
||||||
.bind(description)
|
.bind(unsaved_plugin.description)
|
||||||
.bind(masters)
|
.bind(unsaved_plugin.masters)
|
||||||
.bind(file_name)
|
.bind(unsaved_plugin.file_name)
|
||||||
.bind(file_path)
|
.bind(unsaved_plugin.file_path)
|
||||||
.fetch_one(pool)
|
.fetch_one(pool)
|
||||||
.await
|
.await
|
||||||
.context("Failed to insert plugin")
|
.context("Failed to insert plugin")
|
||||||
|
@ -57,7 +57,7 @@ pub async fn batched_insert<'a>(
|
|||||||
let mut plugin_ids: Vec<i32> = vec![];
|
let mut plugin_ids: Vec<i32> = vec![];
|
||||||
let mut cell_ids: Vec<i32> = vec![];
|
let mut cell_ids: Vec<i32> = vec![];
|
||||||
let mut editor_ids: Vec<Option<&str>> = vec![];
|
let mut editor_ids: Vec<Option<&str>> = vec![];
|
||||||
batch.into_iter().for_each(|unsaved_plugin_cell| {
|
batch.iter().for_each(|unsaved_plugin_cell| {
|
||||||
plugin_ids.push(unsaved_plugin_cell.plugin_id);
|
plugin_ids.push(unsaved_plugin_cell.plugin_id);
|
||||||
cell_ids.push(unsaved_plugin_cell.cell_id);
|
cell_ids.push(unsaved_plugin_cell.cell_id);
|
||||||
editor_ids.push(unsaved_plugin_cell.editor_id);
|
editor_ids.push(unsaved_plugin_cell.editor_id);
|
||||||
|
@ -57,10 +57,10 @@ pub async fn batched_insert<'a>(
|
|||||||
let mut plugin_ids: Vec<i32> = vec![];
|
let mut plugin_ids: Vec<i32> = vec![];
|
||||||
let mut world_ids: Vec<i32> = vec![];
|
let mut world_ids: Vec<i32> = vec![];
|
||||||
let mut editor_ids: Vec<&str> = vec![];
|
let mut editor_ids: Vec<&str> = vec![];
|
||||||
batch.into_iter().for_each(|unsaved_plugin_world| {
|
batch.iter().for_each(|unsaved_plugin_world| {
|
||||||
plugin_ids.push(unsaved_plugin_world.plugin_id);
|
plugin_ids.push(unsaved_plugin_world.plugin_id);
|
||||||
world_ids.push(unsaved_plugin_world.world_id);
|
world_ids.push(unsaved_plugin_world.world_id);
|
||||||
editor_ids.push(unsaved_plugin_world.editor_id.clone());
|
editor_ids.push(unsaved_plugin_world.editor_id);
|
||||||
});
|
});
|
||||||
saved_plugin_worlds.append(
|
saved_plugin_worlds.append(
|
||||||
&mut sqlx::query_as(
|
&mut sqlx::query_as(
|
||||||
|
@ -52,7 +52,7 @@ pub async fn batched_insert<'a>(
|
|||||||
for batch in worlds.chunks(BATCH_SIZE) {
|
for batch in worlds.chunks(BATCH_SIZE) {
|
||||||
let mut form_ids: Vec<i32> = vec![];
|
let mut form_ids: Vec<i32> = vec![];
|
||||||
let mut masters: Vec<&str> = vec![];
|
let mut masters: Vec<&str> = vec![];
|
||||||
batch.into_iter().for_each(|unsaved_world| {
|
batch.iter().for_each(|unsaved_world| {
|
||||||
form_ids.push(unsaved_world.form_id);
|
form_ids.push(unsaved_world.form_id);
|
||||||
masters.push(unsaved_world.master);
|
masters.push(unsaved_world.master);
|
||||||
});
|
});
|
||||||
|
@ -70,7 +70,7 @@ impl FilesResponse {
|
|||||||
.as_array()
|
.as_array()
|
||||||
.ok_or_else(|| anyhow!("files value in API response is not an array"))?;
|
.ok_or_else(|| anyhow!("files value in API response is not an array"))?;
|
||||||
let files: Vec<ApiFile> = files
|
let files: Vec<ApiFile> = files
|
||||||
.into_iter()
|
.iter()
|
||||||
.map(|file| {
|
.map(|file| {
|
||||||
let file_id = file
|
let file_id = file
|
||||||
.get("file_id")
|
.get("file_id")
|
||||||
|
@ -76,7 +76,6 @@ impl ModListResponse {
|
|||||||
.attr("data-mod-id")
|
.attr("data-mod-id")
|
||||||
.expect("Missing mod id attribute")
|
.expect("Missing mod id attribute")
|
||||||
.parse::<i32>()
|
.parse::<i32>()
|
||||||
.ok()
|
|
||||||
.expect("Failed to parse mod id");
|
.expect("Failed to parse mod id");
|
||||||
let name_elem = right
|
let name_elem = right
|
||||||
.select(&name_select)
|
.select(&name_select)
|
||||||
|
@ -9,7 +9,7 @@ use tracing::{info, warn};
|
|||||||
|
|
||||||
use crate::models::file::File;
|
use crate::models::file::File;
|
||||||
use crate::models::game_mod::Mod;
|
use crate::models::game_mod::Mod;
|
||||||
use crate::models::plugin;
|
use crate::models::{plugin, plugin::UnsavedPlugin};
|
||||||
use crate::models::{cell, cell::UnsavedCell};
|
use crate::models::{cell, cell::UnsavedCell};
|
||||||
use crate::models::{plugin_cell, plugin_cell::UnsavedPluginCell};
|
use crate::models::{plugin_cell, plugin_cell::UnsavedPluginCell};
|
||||||
use crate::models::{plugin_world, plugin_world::UnsavedPluginWorld};
|
use crate::models::{plugin_world, plugin_world::UnsavedPluginWorld};
|
||||||
@ -36,7 +36,7 @@ pub async fn process_plugin(
|
|||||||
db_mod: &Mod,
|
db_mod: &Mod,
|
||||||
file_path: &str,
|
file_path: &str,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
if plugin_buf.len() == 0 {
|
if plugin_buf.is_empty() {
|
||||||
warn!("skipping processing of invalid empty plugin");
|
warn!("skipping processing of invalid empty plugin");
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
@ -58,16 +58,18 @@ pub async fn process_plugin(
|
|||||||
let masters: Vec<&str> = plugin.header.masters.iter().map(|s| s.borrow()).collect();
|
let masters: Vec<&str> = plugin.header.masters.iter().map(|s| s.borrow()).collect();
|
||||||
let plugin_row = plugin::insert(
|
let plugin_row = plugin::insert(
|
||||||
&pool,
|
&pool,
|
||||||
&db_file.name,
|
&UnsavedPlugin {
|
||||||
hash as i64,
|
name: &db_file.name,
|
||||||
db_file.id,
|
hash: hash as i64,
|
||||||
plugin.header.version as f64,
|
file_id: db_file.id,
|
||||||
plugin_buf.len() as i64,
|
version: plugin.header.version as f64,
|
||||||
author,
|
size: plugin_buf.len() as i64,
|
||||||
description,
|
author,
|
||||||
&masters,
|
description,
|
||||||
&file_name,
|
masters: &masters,
|
||||||
file_path,
|
file_name: &file_name,
|
||||||
|
file_path,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user