diff --git a/Cargo.lock b/Cargo.lock index 64d1fd8..652f184 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -421,7 +421,7 @@ dependencies = [ [[package]] name = "nhentairs" -version = "0.3.0" +version = "0.4.0" dependencies = [ "reqwest", "serde", diff --git a/Cargo.toml b/Cargo.toml index 340e7b8..899fb71 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "nhentairs" -version = "0.3.0" +version = "0.4.0" authors = ["blank X "] edition = "2018" diff --git a/src/commands/download.rs b/src/commands/download.rs index 32772ae..8353b99 100644 --- a/src/commands/download.rs +++ b/src/commands/download.rs @@ -2,11 +2,11 @@ use crate::utils; use crate::structs; use std::env; +use std::sync::Arc; use std::path::Path; use std::process::exit; +use tokio::sync::Mutex; use tokio::task::JoinHandle; -use std::collections::HashMap; -use tokio::sync::{mpsc, oneshot}; use std::fs::{rename, create_dir, write}; extern crate tokio; extern crate reqwest; @@ -20,23 +20,27 @@ pub async fn run(args: env::Args) { exit(1); } let client = reqwest::Client::new(); - let mut handles: Vec> = Vec::new(); - let mut hashm = HashMap::new(); - let (tx, mut rx) = mpsc::channel(100); - for sauce in sauces { - let cloned_client = client.clone(); - let mut cloned_tx = tx.clone(); - handles.push(tokio::spawn(async move { - let sauce_info = match utils::get_sauce_info(cloned_client, sauce).await.unwrap() { - structs::GalleryInfo::Info(sauce_info) => sauce_info, - structs::GalleryInfo::Error(sauce_error) => panic!("{} returned: {}", sauce, sauce_error.error) - }; + let mut pages_vec: Vec<(String, String)> = Vec::new(); + { + let mut handles: Vec> = Vec::with_capacity(sauces.len()); + let mut sauce_info_vec: Vec = Vec::with_capacity(sauces.len()); + for sauce in sauces { + let cloned_client = client.clone(); + handles.push(tokio::spawn(async move { + match utils::get_sauce_info(cloned_client, sauce).await.unwrap() { + structs::GalleryInfo::Info(sauce_info) => sauce_info, + structs::GalleryInfo::Error(sauce_error) => panic!("{} returned: {}", sauce, sauce_error.error) + } + })); + } + for handle in handles { + sauce_info_vec.push(handle.await.unwrap()); + } + for sauce_info in sauce_info_vec { let base_path = sauce_info.id.to_string(); let base_path = Path::new(&base_path); match create_dir(base_path) { - Ok(()) => { - write(base_path.join("info.txt"), format!("{}\n", utils::human_sauce_info(&sauce_info))).unwrap(); - }, + Ok(()) => write(base_path.join("info.txt"), format!("{}\n", utils::human_sauce_info(&sauce_info))).unwrap(), Err(err) => match err.kind() { std::io::ErrorKind::AlreadyExists => (), _ => panic!("Got a weird error while creating dir: {}", err) @@ -54,65 +58,44 @@ pub async fn run(args: env::Args) { file_name.push_str(file_ext); let file_path = base_path.join(&file_name); if !file_path.exists() { - cloned_tx.send(( + pages_vec.push(( String::from(file_path.to_str().unwrap()), format!("https://i.nhentai.net/galleries/{}/{}", sauce_info.media_id, file_name) - )).await.unwrap(); + )); } page_num += 1; } - })); - } - drop(tx); - while let Some((file_path, url)) = rx.recv().await { - hashm.insert(file_path, url); - } - for handle in handles { - handle.await.unwrap(); + } } let mut handles = Vec::with_capacity(DOWNLOAD_WORKERS); - let (tx, mut rx) = mpsc::channel(DOWNLOAD_WORKERS); - tokio::spawn(async move { - while let Some(ntx) = rx.recv().await { - let ntx: oneshot::Sender<(String, String)> = ntx; - ntx.send(match hashm.iter().next() { - Some((key, value)) => { - let key = key.to_string(); - let value = value.to_string(); - hashm.remove(&key).unwrap(); - (key, value) - }, - None => (String::new(), String::new()), - }).unwrap(); - } - }); + let mutex = Arc::new(Mutex::new(pages_vec)); for worker_id in 0..DOWNLOAD_WORKERS { let tcloned_client = client.clone(); - let mut cloned_tx = tx.clone(); + let tcloned_mutex = Arc::clone(&mutex); handles.push(tokio::spawn(async move { - println!("[DW{}] Up!", worker_id); + eprintln!("[DW{}] Up!", worker_id); loop { let cloned_client = tcloned_client.clone(); - let (ntx, nrx) = oneshot::channel(); - cloned_tx.send(ntx).await.unwrap(); - let (file_path, url) = nrx.await.unwrap(); - if file_path.is_empty() && url.is_empty() { - println!("[DW{}] Down!", worker_id); + let cloned_mutex = Arc::clone(&tcloned_mutex); + let mut pages_vec = cloned_mutex.lock().await; + if pages_vec.len() < 1 { + eprintln!("[DW{}] Down!", worker_id); break; } - println!("[DW{}] Downloading {} to {}", worker_id, url, file_path); - let mut tmp_file_path = String::from(&file_path); + let (file_path, url) = pages_vec.remove(0); + drop(pages_vec); + eprintln!("[DW{}] Downloading {} to {}", worker_id, url, file_path); + let mut tmp_file_path = file_path.clone(); tmp_file_path.push_str(".tmp"); utils::download_file(cloned_client, &url, &tmp_file_path).await.unwrap(); rename(&tmp_file_path, &file_path).unwrap(); - println!("[DW{}] {} downloaded", worker_id, file_path); + eprintln!("[DW{}] {} downloaded", worker_id, file_path); } })); } - drop(tx); for handle in handles { handle.await.unwrap(); }