nhentairs/src/commands/download.rs

115 lines
4.5 KiB
Rust

use crate::utils;
use crate::structs;
use std::env;
use std::sync::Arc;
use std::path::Path;
use std::process::exit;
use tokio::sync::Mutex;
use tokio::task::JoinHandle;
use tokio::time::{sleep, Duration};
use std::fs::{rename, create_dir, write};
extern crate tokio;
extern crate reqwest;
const DOWNLOAD_WORKERS: usize = 5;
const FAIL_DOWNLOAD_WAIT_TIME: u64 = 5000;
pub async fn run(args: env::Args) {
let sauces = utils::get_arg_sauces(args).unwrap();
if sauces.len() < 1 {
eprintln!("Missing sauce(s)");
exit(1);
}
let client = reqwest::Client::new();
let mut pages_vec: Vec<(String, String)> = Vec::new();
{
let mut handles: Vec<JoinHandle<structs::GalleryInfoSuccess>> = Vec::with_capacity(sauces.len());
let mut sauce_info_vec: Vec<structs::GalleryInfoSuccess> = Vec::with_capacity(sauces.len());
for sauce in sauces {
let cloned_client = client.clone();
handles.push(tokio::spawn(async move {
match utils::get_sauce_info(cloned_client, sauce).await.unwrap() {
structs::GalleryInfo::Info(sauce_info) => sauce_info,
structs::GalleryInfo::Error(sauce_error) => panic!("{} returned: {}", sauce, sauce_error.error)
}
}));
}
for handle in handles {
sauce_info_vec.push(handle.await.unwrap());
}
for sauce_info in sauce_info_vec {
let base_path = sauce_info.id.to_string();
let base_path = Path::new(&base_path);
match create_dir(base_path) {
Ok(()) => write(base_path.join("info.txt"), format!("{}\n", &sauce_info)).unwrap(),
Err(err) => match err.kind() {
std::io::ErrorKind::AlreadyExists => (),
_ => panic!("Got a weird error while creating dir: {}", err)
}
};
let mut page_num: i32 = 1;
for page in sauce_info.images.pages {
let file_ext = match page.t.as_str() {
"j" => ".jpg",
"p" => ".png",
"g" => ".gif",
_ => panic!("Unknown extension type: {}", page.t)
};
let mut file_name = page_num.to_string();
file_name.push_str(file_ext);
let file_path = base_path.join(&file_name);
if !file_path.exists() {
pages_vec.push((
String::from(file_path.to_str().unwrap()),
format!("https://i.nhentai.net/galleries/{}/{}",
sauce_info.media_id,
file_name)
));
}
page_num += 1;
}
}
}
let mut handles = Vec::with_capacity(DOWNLOAD_WORKERS);
let mutex = Arc::new(Mutex::new(pages_vec));
for worker_id in 0..DOWNLOAD_WORKERS {
let tcloned_client = client.clone();
let tcloned_mutex = Arc::clone(&mutex);
handles.push(tokio::spawn(async move {
eprintln!("[DW{}] Up!", worker_id);
loop {
let cloned_client = tcloned_client.clone();
let cloned_mutex = Arc::clone(&tcloned_mutex);
let mut pages_vec = cloned_mutex.lock().await;
if pages_vec.len() < 1 {
eprintln!("[DW{}] Down!", worker_id);
break;
}
let (file_path, url) = pages_vec.remove(0);
drop(pages_vec);
eprintln!("[DW{}] Downloading {} to {}", worker_id, url, file_path);
let mut tmp_file_path = file_path.clone();
tmp_file_path.push_str(".tmp");
loop {
match utils::download_file(cloned_client.clone(), &url, &tmp_file_path).await {
Ok(success) => {
if success {
break;
}
},
Err(err) => eprintln!("[DW{}] Failed to download {} due to {}, sleeping for {}ms", worker_id, file_path, err, FAIL_DOWNLOAD_WAIT_TIME)
};
sleep(Duration::from_millis(FAIL_DOWNLOAD_WAIT_TIME)).await;
}
rename(&tmp_file_path, &file_path).unwrap();
eprintln!("[DW{}] {} downloaded", worker_id, file_path);
}
}));
}
for handle in handles {
handle.await.unwrap();
}
}