diff --git a/src/main.rs b/src/main.rs
index a6f9cd9..68645a1 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -16,10 +16,9 @@ use sqlx::postgres::PgPoolOptions;
use std::convert::TryInto;
use std::env;
use std::fs::OpenOptions;
-use std::io::Read;
use std::io::Seek;
use std::io::SeekFrom;
-use tempfile::{tempfile, tempdir};
+use tempfile::{tempdir, tempfile};
use tokio::io::{AsyncReadExt, AsyncSeekExt};
use tokio::time::sleep;
use tokio_util::compat::FuturesAsyncReadCompatExt;
@@ -268,7 +267,6 @@ async fn insert_plugin_cell(
}
fn rate_limit_wait_duration(res: &Response) -> Result> {
- dbg!(res.headers().get("x-rl-daily-remaining"));
let daily_remaining = res
.headers()
.get("x-rl-daily-remaining")
@@ -301,7 +299,7 @@ fn rate_limit_wait_duration(res: &Response) -> Result (
- plugin_buf: &[u8],
+ plugin_buf: &mut [u8],
pool: &sqlx::Pool,
plugin_archive: &mut ZipWriter,
name: &str,
@@ -310,7 +308,8 @@ async fn process_plugin(
file_id: i64,
file_name: &str,
) -> Result<()>
- where W: std::io::Write + std::io::Seek
+where
+ W: std::io::Write + std::io::Seek,
{
let plugin = parse_plugin(&plugin_buf)?;
let hash = seahash::hash(&plugin_buf);
@@ -341,13 +340,7 @@ async fn process_plugin(
cell.is_persistent,
)
.await?;
- insert_plugin_cell(
- &pool,
- plugin_row.id,
- cell_row.id,
- cell.editor_id,
- )
- .await?;
+ insert_plugin_cell(&pool, plugin_row.id, cell_row.id, cell.editor_id).await?;
}
plugin_archive.start_file(
format!(
@@ -356,7 +349,9 @@ async fn process_plugin(
),
FileOptions::default(),
)?;
- std::io::copy(plugin_buf, plugin_archive)?;
+
+ let mut reader = std::io::Cursor::new(&plugin_buf);
+ std::io::copy(&mut reader, plugin_archive)?;
Ok(())
}
@@ -458,7 +453,7 @@ pub async fn main() -> Result<()> {
.await?;
for mod_obj in mods {
- dbg!(&mod_obj);
+ dbg!(&mod_obj.name);
let res = client
.get(format!(
"https://api.nexusmods.com/v1/games/{}/mods/{}/files.json",
@@ -496,7 +491,6 @@ pub async fn main() -> Result<()> {
.ok_or_else(|| anyhow!("Missing file_id key in file in API response"))?
.as_i64()
.ok_or_else(|| anyhow!("file_id value in API response file is not a number"))?;
- let file_id = 18422; // DELETEME: temp test bad rar file
dbg!(file_id);
let name = file
.get("name")
@@ -585,9 +579,6 @@ pub async fn main() -> Result<()> {
tokio::io::copy(&mut byte_stream, &mut tokio_file).await?;
- // let bytes = res.bytes().await?;
- // let reader = std::io::Cursor::new(&bytes);
-
let mut plugin_archive = ZipWriter::new(
OpenOptions::new()
.write(true)
@@ -610,6 +601,7 @@ pub async fn main() -> Result<()> {
tokio_file.seek(SeekFrom::Start(0)).await?;
tokio_file.read_exact(&mut initial_bytes).await?;
let kind = infer::get(&initial_bytes).expect("unknown file type of file download");
+ dbg!(kind.mime_type());
match kind.mime_type() {
// "application/zip" => {
// let mut archive = ZipArchive::new(reader)?;
@@ -638,16 +630,17 @@ pub async fn main() -> Result<()> {
// Use unrar to uncompress the entire .rar file to avoid a bug with compress_tools panicking when uncompressing
// certain .rar files: https://github.com/libarchive/libarchive/issues/373
- "application/x-rar-compressed" => {
+ "application/x-rar-compressed" | "application/vnd.rar" => {
tokio_file.seek(SeekFrom::Start(0)).await?;
let mut file = tokio_file.into_std().await;
let temp_dir = tempdir()?;
let temp_file_path = temp_dir.path().join("download.rar");
- let mut temp_file = std::fs::File::create(temp_file_path)?;
+ let mut temp_file = std::fs::File::create(&temp_file_path)?;
std::io::copy(&mut file, &mut temp_file)?;
-
+
let mut plugin_file_paths = Vec::new();
- let list = Archive::new(temp_file_path.to_string_lossy().to_string()).list();
+ let list =
+ Archive::new(temp_file_path.to_string_lossy().to_string()).list();
if let Ok(list) = list {
for entry in list {
if let Ok(entry) = entry {
@@ -659,22 +652,35 @@ pub async fn main() -> Result<()> {
}
}
}
- }
+ }
if plugin_file_paths.len() > 0 {
- let extract = Archive::new(temp_file_path.to_string_lossy().to_string()).extract_to(temp_dir.path().to_string_lossy().to_string());
- extract.expect("failed to extract").process().expect("failed to extract");
+ let extract =
+ Archive::new(temp_file_path.to_string_lossy().to_string())
+ .extract_to(temp_dir.path().to_string_lossy().to_string());
+ extract
+ .expect("failed to extract")
+ .process()
+ .expect("failed to extract");
for file_name in plugin_file_paths.iter() {
dbg!(file_name);
- let plugin_file = std::fs::File::open(temp_dir.path().join(file_name))?;
- let mut plugin_buf = Vec::new();
- plugin_file.read(&mut plugin_buf)?;
- process_plugin(&plugin_buf, &pool, &mut plugin_archive, name, &db_file, &mod_obj, file_id, file_name).await?;
+ let mut plugin_buf =
+ std::fs::read(temp_dir.path().join(file_name))?;
+ process_plugin(
+ &mut plugin_buf,
+ &pool,
+ &mut plugin_archive,
+ name,
+ &db_file,
+ &mod_obj,
+ file_id,
+ file_name,
+ )
+ .await?;
}
- dbg!("uncompressed!");
}
temp_dir.close()?;
- },
+ }
_ => {
tokio_file.seek(SeekFrom::Start(0)).await?;
let mut file = tokio_file.into_std().await;
@@ -694,7 +700,17 @@ pub async fn main() -> Result<()> {
dbg!(file_name);
let mut buf = Vec::default();
uncompress_archive_file(&mut file, &mut buf, file_name)?;
- process_plugin(&buf, &pool, &mut plugin_archive, name, &db_file, &mod_obj, file_id, file_name).await?;
+ process_plugin(
+ &mut buf,
+ &pool,
+ &mut plugin_archive,
+ name,
+ &db_file,
+ &mod_obj,
+ file_id,
+ file_name,
+ )
+ .await?;
}
}
};
@@ -703,13 +719,10 @@ pub async fn main() -> Result<()> {
if let Some(duration) = duration {
sleep(duration).await;
}
- break;
}
- break;
}
page += 1;
- break;
}
Ok(())