use crate::utils; use clap::ArgMatches; use reqwest::redirect::Policy; use reqwest::Client; use std::collections::HashMap; use std::fs::rename; use std::path::Path; use std::process::{exit, Command}; const MAX_DOWNLOAD_ATTEMPTS: i32 = 5; pub async fn download(arg_m: &ArgMatches) { let print_only = arg_m.is_present("print"); let resolution = arg_m.value_of("resolution"); let ids = arg_m.values_of("id").unwrap().collect::>(); let policy = Policy::custom(|attempt| { if attempt.previous().len() > 10 { attempt.error("too many redirects") } else if attempt.url().path() == "/404" { attempt.stop() } else { attempt.follow() } }); let client = Client::builder().redirect(policy).build().unwrap(); let mut return_fail = false; for id in ids { let hentai_info = utils::get_hentai(client.clone(), id).await; match hentai_info { Ok(hentai_info) => { match hentai_info { Some(hentai_info) => { let slug = hentai_info.state.data.video.hentai_video.slug; let filename = format!("{}.mkv", &slug); if !print_only && Path::new(&filename).exists() { continue; } let mut download_url = None; let mut perm_urls: HashMap = HashMap::new(); let mut temp_urls: HashMap = HashMap::new(); for server in hentai_info.state.data.video.videos_manifest.servers { let mut to_hashmap = match server.is_permanent { true => perm_urls.clone(), false => temp_urls.clone(), }; for stream in server.streams { if stream.url.is_empty() { continue; } if server.is_permanent && Some(stream.height.as_str()) == resolution { download_url = Some((stream.url, stream.filesize_mbs)); break; } if !to_hashmap.contains_key(&stream.height) { to_hashmap .insert(stream.height, (stream.url, stream.filesize_mbs)); }; } if download_url.is_some() { break; } match server.is_permanent { true => perm_urls.extend(to_hashmap), false => temp_urls.extend(to_hashmap), }; } if download_url.is_none() { if resolution.is_some() { download_url = temp_urls .get(resolution.unwrap()) .map(|i| (i.0.to_string(), i.1)); } if download_url.is_none() { download_url = magic_thing(perm_urls).or_else(|| magic_thing(temp_urls)); if download_url.is_none() { eprintln!("Failed to get {}: cannot get download url", id); return_fail = true; continue; } } } let (download_url, filesize_mbs) = download_url.unwrap(); if print_only { println!("{}", download_url); } else { let mut fail_dl = true; let tmp_filename = format!("{}.tmp", &slug); for i in 0..MAX_DOWNLOAD_ATTEMPTS { eprintln!( "Downloading {} ({}MB, attempt {})", &filename, filesize_mbs, i ); match Command::new("ffmpeg") .args(&[ "-v", "warning", "-stats", "-nostdin", "-y", "-i", &download_url, "-c", "copy", "-f", "matroska", &tmp_filename, ]) .spawn() { Ok(mut child) => { match child.wait() { Ok(exit_status) => { if exit_status.success() { fail_dl = false; match rename(&tmp_filename, &filename) { Ok(_) => (), Err(err) => eprintln!( "Failed to rename {} to {} due to {}", &tmp_filename, &filename, err ), }; break; } eprintln!( "ffmpeg exited with {:?}", exit_status.code() ); } Err(err) => eprintln!( "Failed to wait on ffmpeg process due to {}", err ), }; } Err(err) => { eprintln!("Failed to spawn ffmpeg process due to {}", err) } }; } if fail_dl { eprintln!("Failed to download {}", &filename); return_fail = true; } } } None => { eprintln!("Failed to get {}: does not exist", id); return_fail = true; } }; } Err(err) => { eprintln!("Failed to get {}: {}", id, err); return_fail = true; } }; } if return_fail { exit(1); } } fn magic_thing(map: HashMap) -> Option<(String, i32)> { let mut keys = Vec::new(); for i in map.keys() { match i.parse::() { Ok(i) => keys.push(i), Err(_) => { keys.clear(); break; } }; } keys.sort(); match keys.pop() { Some(key) => map.get(&key.to_string()).map(|i| i.clone()), None => { let mut keys: Vec<_> = map.keys().collect(); keys.sort(); match keys.pop() { Some(key) => map.get(key.as_str()).map(|i| i.clone()), None => None, } } } }