use crate::utils; use crate::structs; use std::env; use std::path::Path; use std::process::exit; use tokio::task::JoinHandle; use std::collections::HashMap; use tokio::sync::{mpsc, oneshot}; use std::fs::{rename, create_dir, write}; extern crate tokio; extern crate reqwest; const DOWNLOAD_WORKERS: usize = 5; pub async fn run(args: env::Args) { let sauces = utils::get_arg_sauces(args).unwrap(); if sauces.len() < 1 { eprintln!("Missing sauce(s)"); exit(1); } let client = reqwest::Client::new(); let mut handles: Vec> = Vec::new(); let mut hashm = HashMap::new(); let (tx, mut rx) = mpsc::channel(100); for sauce in sauces { let cloned_client = client.clone(); let mut cloned_tx = tx.clone(); handles.push(tokio::spawn(async move { let sauce_info = match utils::get_sauce_info(cloned_client, sauce).await.unwrap() { structs::GalleryInfo::Info(sauce_info) => sauce_info, structs::GalleryInfo::Error(sauce_error) => panic!("{} returned: {}", sauce, sauce_error.error) }; let base_path = sauce_info.id.to_string(); let base_path = Path::new(&base_path); match create_dir(base_path) { Ok(()) => { write(base_path.join("info.txt"), format!("{}\n", utils::human_sauce_info(&sauce_info))).unwrap(); }, Err(err) => match err.kind() { std::io::ErrorKind::AlreadyExists => (), _ => panic!("Got a weird error while creating dir: {}", err) } }; let mut page_num: usize = 1; for page in sauce_info.images.pages { let file_ext = match page.t.as_str() { "j" => ".jpg", "p" => ".png", "g" => ".gif", _ => panic!("Unknown extension type: {}", page.t) }; let mut file_name = page_num.to_string(); file_name.push_str(file_ext); let file_path = base_path.join(&file_name); if !file_path.exists() { cloned_tx.send(( String::from(file_path.to_str().unwrap()), format!("https://i.nhentai.net/galleries/{}/{}", sauce_info.media_id, file_name) )).await.unwrap(); } page_num += 1; } })); } drop(tx); while let Some((file_path, url)) = rx.recv().await { hashm.insert(file_path, url); } for handle in handles { handle.await.unwrap(); } let mut handles = Vec::with_capacity(DOWNLOAD_WORKERS); let (tx, mut rx) = mpsc::channel(DOWNLOAD_WORKERS); tokio::spawn(async move { while let Some(ntx) = rx.recv().await { let ntx: oneshot::Sender<(String, String)> = ntx; ntx.send(match hashm.iter().next() { Some((key, value)) => { let key = key.to_string(); let value = value.to_string(); hashm.remove(&key).unwrap(); (key, value) }, None => (String::new(), String::new()), }).unwrap(); } }); for worker_id in 0..DOWNLOAD_WORKERS { let tcloned_client = client.clone(); let mut cloned_tx = tx.clone(); handles.push(tokio::spawn(async move { println!("[DW{}] Up!", worker_id); loop { let cloned_client = tcloned_client.clone(); let (ntx, nrx) = oneshot::channel(); cloned_tx.send(ntx).await.unwrap(); let (file_path, url) = nrx.await.unwrap(); if file_path.is_empty() && url.is_empty() { println!("[DW{}] Down!", worker_id); break; } println!("[DW{}] Downloading {} to {}", worker_id, url, file_path); let mut tmp_file_path = String::from(&file_path); tmp_file_path.push_str(".tmp"); utils::download_file(cloned_client, &url, &tmp_file_path).await.unwrap(); rename(&tmp_file_path, &file_path).unwrap(); println!("[DW{}] {} downloaded", worker_id, file_path); } })); } drop(tx); for handle in handles { handle.await.unwrap(); } }