Log warning and skip invalid plugins
There's not much I can do for plugins with invalid headers (e.g. TES4 Oblivion plugins), so just log the error and skip over it. A plugin row will not be created in the DB for invalid plugins. It was useful to crash the process on parse errors before when I was working out the kinks in the parser, but I think I'm confident enough in it now to skip over the parse failures and defer analyzing the failures until after the fact to make sure there still isn't a bug in the parser and I skipped over a valid TES5 plugin. Also fixes the plugin span logging in the delate64 error case when extracting zip files.
This commit is contained in:
parent
62c0f5295f
commit
a0aa24f360
81
src/main.rs
81
src/main.rs
@ -46,38 +46,44 @@ where
|
|||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
info!(bytes = plugin_buf.len(), "parsing plugin");
|
info!(bytes = plugin_buf.len(), "parsing plugin");
|
||||||
let plugin = parse_plugin(&plugin_buf)?;
|
match parse_plugin(&plugin_buf) {
|
||||||
info!(num_cells = plugin.cells.len(), "parse finished");
|
Ok(plugin) => {
|
||||||
let hash = seahash::hash(&plugin_buf);
|
info!(num_cells = plugin.cells.len(), "parse finished");
|
||||||
let plugin_row = plugin::insert(
|
let hash = seahash::hash(&plugin_buf);
|
||||||
&pool,
|
let plugin_row = plugin::insert(
|
||||||
&db_file.name,
|
&pool,
|
||||||
hash as i64,
|
&db_file.name,
|
||||||
db_file.id,
|
hash as i64,
|
||||||
Some(plugin.header.version as f64),
|
db_file.id,
|
||||||
plugin_buf.len() as i64,
|
Some(plugin.header.version as f64),
|
||||||
plugin.header.author,
|
plugin_buf.len() as i64,
|
||||||
plugin.header.description,
|
plugin.header.author,
|
||||||
Some(
|
plugin.header.description,
|
||||||
&plugin
|
Some(
|
||||||
.header
|
&plugin
|
||||||
.masters
|
.header
|
||||||
.iter()
|
.masters
|
||||||
.map(|s| s.to_string())
|
.iter()
|
||||||
.collect::<Vec<String>>(),
|
.map(|s| s.to_string())
|
||||||
),
|
.collect::<Vec<String>>(),
|
||||||
)
|
),
|
||||||
.await?;
|
)
|
||||||
for cell in plugin.cells {
|
.await?;
|
||||||
let cell_row = cell::insert(
|
for cell in plugin.cells {
|
||||||
&pool,
|
let cell_row = cell::insert(
|
||||||
cell.form_id.try_into().unwrap(),
|
&pool,
|
||||||
cell.x,
|
cell.form_id.try_into().unwrap(),
|
||||||
cell.y,
|
cell.x,
|
||||||
cell.is_persistent,
|
cell.y,
|
||||||
)
|
cell.is_persistent,
|
||||||
.await?;
|
)
|
||||||
plugin_cell::insert(&pool, plugin_row.id, cell_row.id, cell.editor_id).await?;
|
.await?;
|
||||||
|
plugin_cell::insert(&pool, plugin_row.id, cell_row.id, cell.editor_id).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
warn!(error = %err, "Failed to parse plugin, skipping plugin");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
plugin_archive.start_file(
|
plugin_archive.start_file(
|
||||||
format!(
|
format!(
|
||||||
@ -338,7 +344,7 @@ pub async fn main() -> Result<()> {
|
|||||||
|
|
||||||
for file_name in plugin_file_paths.iter() {
|
for file_name in plugin_file_paths.iter() {
|
||||||
let plugin_span = info_span!("plugin", name = ?file_name);
|
let plugin_span = info_span!("plugin", name = ?file_name);
|
||||||
let _plugin_span = plugin_span.enter();
|
let plugin_span = plugin_span.enter();
|
||||||
file.seek(SeekFrom::Start(0))?;
|
file.seek(SeekFrom::Start(0))?;
|
||||||
let mut buf = Vec::default();
|
let mut buf = Vec::default();
|
||||||
info!("uncompressing plugin file from downloaded archive");
|
info!("uncompressing plugin file from downloaded archive");
|
||||||
@ -349,6 +355,7 @@ pub async fn main() -> Result<()> {
|
|||||||
// compress_tools or libarchive failed to extract zip file (e.g. archive is deflate64 compressed)
|
// compress_tools or libarchive failed to extract zip file (e.g. archive is deflate64 compressed)
|
||||||
// Attempt to uncompress the archive using `unzip` unix command instead
|
// Attempt to uncompress the archive using `unzip` unix command instead
|
||||||
warn!(error = %err, "failed to extract file with compress_tools, extracting whole archive with unzip instead");
|
warn!(error = %err, "failed to extract file with compress_tools, extracting whole archive with unzip instead");
|
||||||
|
drop(plugin_span);
|
||||||
file.seek(SeekFrom::Start(0))?;
|
file.seek(SeekFrom::Start(0))?;
|
||||||
let temp_dir = tempdir()?;
|
let temp_dir = tempdir()?;
|
||||||
let temp_file_path = temp_dir
|
let temp_file_path = temp_dir
|
||||||
@ -367,10 +374,10 @@ pub async fn main() -> Result<()> {
|
|||||||
.status()?;
|
.status()?;
|
||||||
|
|
||||||
for file_name in plugin_file_paths.iter() {
|
for file_name in plugin_file_paths.iter() {
|
||||||
info!(
|
let plugin_span =
|
||||||
?file_name,
|
info_span!("plugin", name = ?file_name);
|
||||||
"processing uncompressed file from downloaded archive"
|
let _plugin_span = plugin_span.enter();
|
||||||
);
|
info!("processing uncompressed file from downloaded archive");
|
||||||
let mut plugin_buf =
|
let mut plugin_buf =
|
||||||
std::fs::read(extracted_path.join(file_name))?;
|
std::fs::read(extracted_path.join(file_name))?;
|
||||||
process_plugin(
|
process_plugin(
|
||||||
|
Loading…
Reference in New Issue
Block a user