Fix clippy lints, add 7z fallback for failed unrar

This commit is contained in:
Tyler Hallada 2021-08-14 11:43:09 -06:00
parent be9c78217d
commit ba6ca0c928
12 changed files with 114 additions and 100 deletions

View File

@ -63,9 +63,8 @@ impl<'a> Iterator for Extractor<'a> {
fn next(&mut self) -> Option<Self::Item> {
if self.plugin_file_paths.is_none() {
match self.list_plugins() {
Err(err) => return Some(Err(err).context(ExtractorError)),
_ => {}
if let Err(err) = self.list_plugins() {
return Some(Err(err).context(ExtractorError));
}
}
if let Some(plugin_file_paths) = &mut self.plugin_file_paths {

View File

@ -123,9 +123,9 @@ async fn extract_with_unrar(
let mut plugin_file_paths = Vec::new();
let list = Archive::new(&temp_file_path.to_string_lossy().to_string())?.list();
if let Ok(list) = list {
for entry in list {
if let Ok(entry) = entry {
match list {
Ok(list) => {
for entry in list.flatten() {
if let Some(extension) = entry.filename.extension() {
if entry.is_file()
&& (extension == "esp" || extension == "esm" || extension == "esl")
@ -135,14 +135,15 @@ async fn extract_with_unrar(
}
}
}
} else {
if !checked_metadata {
warn!("failed to read archive and server has no metadata, skipping file");
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
return Ok(());
} else {
error!("failed to read archive, but server had metadata");
panic!("failed to read archive, but server had metadata");
Err(_) => {
if !checked_metadata {
warn!("failed to read archive and server has no metadata, skipping file");
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
return Ok(());
} else {
error!("failed to read archive, but server had metadata");
panic!("failed to read archive, but server had metadata");
}
}
}
info!(
@ -150,7 +151,7 @@ async fn extract_with_unrar(
"listed plugins in downloaded archive"
);
if plugin_file_paths.len() > 0 {
if !plugin_file_paths.is_empty() {
info!("uncompressing downloaded archive");
let extract = Archive::new(&temp_file_path.to_string_lossy().to_string())?
.extract_to(temp_dir.path().to_string_lossy().to_string());
@ -163,13 +164,10 @@ async fn extract_with_unrar(
}
Ok(extract) => extract
};
match extract.process() {
Err(err) => {
warn!(error = %err, "failed to extract with unrar");
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
return Ok(())
}
_ => {}
if let Err(err) = extract.process() {
warn!(error = %err, "failed to extract with unrar");
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
return Ok(())
}
for file_path in plugin_file_paths.iter() {
@ -285,15 +283,17 @@ pub async fn main() -> Result<()> {
}
let db_file = file::insert(
&pool,
api_file.name,
api_file.file_name,
api_file.file_id as i32,
db_mod.id,
api_file.category,
api_file.version,
api_file.mod_version,
api_file.size,
api_file.uploaded_at,
&file::UnsavedFile {
name: api_file.name,
file_name: api_file.file_name,
nexus_file_id: api_file.file_id as i32,
mod_id: db_mod.id,
category: api_file.category,
version: api_file.version,
mod_version: api_file.mod_version,
size: api_file.size,
uploaded_at: api_file.uploaded_at,
},
)
.await?;
@ -350,13 +350,10 @@ pub async fn main() -> Result<()> {
let mut initial_bytes = [0; 8];
tokio_file.seek(SeekFrom::Start(0)).await?;
match tokio_file.read_exact(&mut initial_bytes).await {
Err(err) => {
warn!(error = %err, "failed to read initial bytes, skipping file");
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
continue;
}
_ => {}
if let Err(err) = tokio_file.read_exact(&mut initial_bytes).await {
warn!(error = %err, "failed to read initial bytes, skipping file");
file::update_unable_to_extract_plugins(&pool, db_file.id, true).await?;
continue;
}
let kind = match infer::get(&initial_bytes) {
Some(kind) => kind,
@ -378,8 +375,15 @@ pub async fn main() -> Result<()> {
// https://github.com/libarchive/libarchive/issues/373, https://github.com/libarchive/libarchive/issues/1426
tokio_file.seek(SeekFrom::Start(0)).await?;
let mut file = tokio_file.try_clone().await?.into_std().await;
extract_with_unrar(&mut file, &pool, &db_file, &db_mod, checked_metadata)
.await?;
match extract_with_unrar(&mut file, &pool, &db_file, &db_mod, checked_metadata).await {
Ok(_) => Ok(()),
Err(err) => {
// unrar failed to extract rar file (e.g. archive has unicode filenames)
// Attempt to uncompress the archive using `7z` unix command instead
warn!(error = %err, "failed to extract file with unrar, extracting whole archive with 7z instead");
extract_with_7zip(&mut file, &pool, &db_file, &db_mod).await
}
}?;
}
_ => {
tokio_file.seek(SeekFrom::Start(0)).await?;

View File

@ -73,7 +73,7 @@ pub async fn batched_insert<'a>(
let mut ys: Vec<Option<i32>> = vec![];
let mut world_ids: Vec<Option<i32>> = vec![];
let mut is_persistents: Vec<bool> = vec![];
batch.into_iter().for_each(|unsaved_cell| {
batch.iter().for_each(|unsaved_cell| {
form_ids.push(unsaved_cell.form_id);
masters.push(unsaved_cell.master);
xs.push(unsaved_cell.x);

View File

@ -23,6 +23,19 @@ pub struct File {
pub unable_to_extract_plugins: bool,
}
#[derive(Debug)]
pub struct UnsavedFile<'a> {
pub name: &'a str,
pub file_name: &'a str,
pub nexus_file_id: i32,
pub mod_id: i32,
pub category: Option<&'a str>,
pub version: Option<&'a str>,
pub mod_version: Option<&'a str>,
pub size: i64,
pub uploaded_at: NaiveDateTime,
}
#[instrument(level = "debug", skip(pool))]
pub async fn get_by_nexus_file_id(
pool: &sqlx::Pool<sqlx::Postgres>,
@ -59,17 +72,9 @@ pub async fn get_processed_nexus_file_ids_by_mod_id(
}
#[instrument(level = "debug", skip(pool))]
pub async fn insert(
pub async fn insert<'a>(
pool: &sqlx::Pool<sqlx::Postgres>,
name: &str,
file_name: &str,
nexus_file_id: i32,
mod_id: i32,
category: Option<&str>,
version: Option<&str>,
mod_version: Option<&str>,
size: i64,
uploaded_at: NaiveDateTime,
unsaved_file: &UnsavedFile<'a>,
) -> Result<File> {
sqlx::query_as!(
File,
@ -80,15 +85,15 @@ pub async fn insert(
SET (name, file_name, category, version, mod_version, uploaded_at, updated_at) =
(EXCLUDED.name, EXCLUDED.file_name, EXCLUDED.category, EXCLUDED.version, EXCLUDED.mod_version, EXCLUDED.uploaded_at, now())
RETURNING *",
name,
file_name,
nexus_file_id,
mod_id,
category,
version,
mod_version,
size,
uploaded_at
unsaved_file.name,
unsaved_file.file_name,
unsaved_file.nexus_file_id,
unsaved_file.mod_id,
unsaved_file.category,
unsaved_file.version,
unsaved_file.mod_version,
unsaved_file.size,
unsaved_file.uploaded_at
)
.fetch_one(pool)
.await

View File

@ -106,7 +106,7 @@ pub async fn batched_insert<'a>(
let mut categories: Vec<Option<&str>> = vec![];
let mut descriptions: Vec<Option<&str>> = vec![];
let mut game_ids: Vec<i32> = vec![];
batch.into_iter().for_each(|unsaved_mod| {
batch.iter().for_each(|unsaved_mod| {
names.push(unsaved_mod.name);
nexus_mod_ids.push(unsaved_mod.nexus_mod_id);
authors.push(unsaved_mod.author);

View File

@ -21,19 +21,24 @@ pub struct Plugin {
pub created_at: NaiveDateTime,
}
#[derive(Debug)]
pub struct UnsavedPlugin<'a> {
pub name: &'a str,
pub hash: i64,
pub file_id: i32,
pub version: f64,
pub size: i64,
pub author: Option<&'a str>,
pub description: Option<&'a str>,
pub masters: &'a [&'a str],
pub file_name: &'a str,
pub file_path: &'a str,
}
#[instrument(level = "debug", skip(pool))]
pub async fn insert(
pub async fn insert<'a>(
pool: &sqlx::Pool<sqlx::Postgres>,
name: &str,
hash: i64,
file_id: i32,
version: f64,
size: i64,
author: Option<&str>,
description: Option<&str>,
masters: &[&str],
file_name: &str,
file_path: &str,
unsaved_plugin: &UnsavedPlugin<'a>,
) -> Result<Plugin> {
// sqlx doesn't understand slices of &str with the query_as! macro: https://github.com/launchbadge/sqlx/issues/280
sqlx::query_as(
@ -45,16 +50,16 @@ pub async fn insert(
(EXCLUDED.name, EXCLUDED.hash, EXCLUDED.version, EXCLUDED.author, EXCLUDED.description, EXCLUDED.masters, EXCLUDED.file_name, now())
RETURNING *"#,
)
.bind(name)
.bind(hash)
.bind(file_id)
.bind(version)
.bind(size)
.bind(author)
.bind(description)
.bind(masters)
.bind(file_name)
.bind(file_path)
.bind(unsaved_plugin.name)
.bind(unsaved_plugin.hash)
.bind(unsaved_plugin.file_id)
.bind(unsaved_plugin.version)
.bind(unsaved_plugin.size)
.bind(unsaved_plugin.author)
.bind(unsaved_plugin.description)
.bind(unsaved_plugin.masters)
.bind(unsaved_plugin.file_name)
.bind(unsaved_plugin.file_path)
.fetch_one(pool)
.await
.context("Failed to insert plugin")

View File

@ -57,7 +57,7 @@ pub async fn batched_insert<'a>(
let mut plugin_ids: Vec<i32> = vec![];
let mut cell_ids: Vec<i32> = vec![];
let mut editor_ids: Vec<Option<&str>> = vec![];
batch.into_iter().for_each(|unsaved_plugin_cell| {
batch.iter().for_each(|unsaved_plugin_cell| {
plugin_ids.push(unsaved_plugin_cell.plugin_id);
cell_ids.push(unsaved_plugin_cell.cell_id);
editor_ids.push(unsaved_plugin_cell.editor_id);

View File

@ -57,10 +57,10 @@ pub async fn batched_insert<'a>(
let mut plugin_ids: Vec<i32> = vec![];
let mut world_ids: Vec<i32> = vec![];
let mut editor_ids: Vec<&str> = vec![];
batch.into_iter().for_each(|unsaved_plugin_world| {
batch.iter().for_each(|unsaved_plugin_world| {
plugin_ids.push(unsaved_plugin_world.plugin_id);
world_ids.push(unsaved_plugin_world.world_id);
editor_ids.push(unsaved_plugin_world.editor_id.clone());
editor_ids.push(unsaved_plugin_world.editor_id);
});
saved_plugin_worlds.append(
&mut sqlx::query_as(

View File

@ -52,7 +52,7 @@ pub async fn batched_insert<'a>(
for batch in worlds.chunks(BATCH_SIZE) {
let mut form_ids: Vec<i32> = vec![];
let mut masters: Vec<&str> = vec![];
batch.into_iter().for_each(|unsaved_world| {
batch.iter().for_each(|unsaved_world| {
form_ids.push(unsaved_world.form_id);
masters.push(unsaved_world.master);
});

View File

@ -70,7 +70,7 @@ impl FilesResponse {
.as_array()
.ok_or_else(|| anyhow!("files value in API response is not an array"))?;
let files: Vec<ApiFile> = files
.into_iter()
.iter()
.map(|file| {
let file_id = file
.get("file_id")

View File

@ -76,7 +76,6 @@ impl ModListResponse {
.attr("data-mod-id")
.expect("Missing mod id attribute")
.parse::<i32>()
.ok()
.expect("Failed to parse mod id");
let name_elem = right
.select(&name_select)

View File

@ -9,7 +9,7 @@ use tracing::{info, warn};
use crate::models::file::File;
use crate::models::game_mod::Mod;
use crate::models::plugin;
use crate::models::{plugin, plugin::UnsavedPlugin};
use crate::models::{cell, cell::UnsavedCell};
use crate::models::{plugin_cell, plugin_cell::UnsavedPluginCell};
use crate::models::{plugin_world, plugin_world::UnsavedPluginWorld};
@ -36,7 +36,7 @@ pub async fn process_plugin(
db_mod: &Mod,
file_path: &str,
) -> Result<()> {
if plugin_buf.len() == 0 {
if plugin_buf.is_empty() {
warn!("skipping processing of invalid empty plugin");
return Ok(());
}
@ -58,16 +58,18 @@ pub async fn process_plugin(
let masters: Vec<&str> = plugin.header.masters.iter().map(|s| s.borrow()).collect();
let plugin_row = plugin::insert(
&pool,
&db_file.name,
hash as i64,
db_file.id,
plugin.header.version as f64,
plugin_buf.len() as i64,
author,
description,
&masters,
&file_name,
file_path,
&UnsavedPlugin {
name: &db_file.name,
hash: hash as i64,
file_id: db_file.id,
version: plugin.header.version as f64,
size: plugin_buf.len() as i64,
author,
description,
masters: &masters,
file_name: &file_name,
file_path,
},
)
.await?;