Show file size in view and download

This commit is contained in:
blank X 2021-02-04 12:59:11 +07:00
parent 15530a4287
commit 6c2e981509
Signed by: blankie
GPG Key ID: CC15FC822C7F61F5
4 changed files with 31 additions and 23 deletions

2
Cargo.lock generated
View File

@ -202,7 +202,7 @@ dependencies = [
[[package]] [[package]]
name = "hanimers" name = "hanimers"
version = "0.1.3" version = "0.1.4"
dependencies = [ dependencies = [
"clap", "clap",
"quick-xml", "quick-xml",

View File

@ -1,6 +1,6 @@
[package] [package]
name = "hanimers" name = "hanimers"
version = "0.1.3" version = "0.1.4"
authors = ["blank X <theblankx@protonmail.com>"] authors = ["blank X <theblankx@protonmail.com>"]
edition = "2018" edition = "2018"

View File

@ -37,8 +37,8 @@ pub async fn download(arg_m: &ArgMatches<'_>) {
continue; continue;
} }
let mut download_url = None; let mut download_url = None;
let mut perm_urls: HashMap<String, String> = HashMap::new(); let mut perm_urls: HashMap<String, (String, i32)> = HashMap::new();
let mut temp_urls: HashMap<String, String> = HashMap::new(); let mut temp_urls: HashMap<String, (String, i32)> = HashMap::new();
for server in hentai_info.state.data.video.videos_manifest.servers { for server in hentai_info.state.data.video.videos_manifest.servers {
let mut to_hashmap = match server.is_permanent { let mut to_hashmap = match server.is_permanent {
true => perm_urls.clone(), true => perm_urls.clone(),
@ -49,11 +49,11 @@ pub async fn download(arg_m: &ArgMatches<'_>) {
continue; continue;
} }
if server.is_permanent && Some(stream.height.as_str()) == resolution { if server.is_permanent && Some(stream.height.as_str()) == resolution {
download_url = Some(stream.url); download_url = Some((stream.url, stream.filesize_mbs));
break; break;
} }
if !to_hashmap.contains_key(&stream.height) { if !to_hashmap.contains_key(&stream.height) {
to_hashmap.insert(stream.height, stream.url); to_hashmap.insert(stream.height, (stream.url, stream.filesize_mbs));
}; };
} }
if download_url.is_some() { if download_url.is_some() {
@ -66,7 +66,7 @@ pub async fn download(arg_m: &ArgMatches<'_>) {
} }
if download_url.is_none() { if download_url.is_none() {
if resolution.is_some() { if resolution.is_some() {
download_url = temp_urls.get(resolution.unwrap()).map(|i| i.to_string()); download_url = temp_urls.get(resolution.unwrap()).map(|i| (i.0.to_string(), i.1));
} }
if download_url.is_none() { if download_url.is_none() {
download_url = magic_thing(perm_urls).or_else(|| { magic_thing(temp_urls) }); download_url = magic_thing(perm_urls).or_else(|| { magic_thing(temp_urls) });
@ -77,14 +77,14 @@ pub async fn download(arg_m: &ArgMatches<'_>) {
} }
} }
} }
let download_url = download_url.unwrap(); let (download_url, filesize_mbs) = download_url.unwrap();
if print_only { if print_only {
println!("{}", download_url); println!("{}", download_url);
} else { } else {
let mut fail_dl = true; let mut fail_dl = true;
let tmp_filename = format!("{}.tmp", &slug); let tmp_filename = format!("{}.tmp", &slug);
for i in 0..MAX_DOWNLOAD_ATTEMPTS { for i in 0..MAX_DOWNLOAD_ATTEMPTS {
eprintln!("Downloading {} (attempt {})", &filename, i); eprintln!("Downloading {} ({}MB, attempt {})", &filename, filesize_mbs, i);
match Command::new("ffmpeg").args(&["-v", "warning", "-stats", "-nostdin", "-y", "-i", &download_url, "-c", "copy", "-f", "matroska", &tmp_filename]).spawn() { match Command::new("ffmpeg").args(&["-v", "warning", "-stats", "-nostdin", "-y", "-i", &download_url, "-c", "copy", "-f", "matroska", &tmp_filename]).spawn() {
Ok(mut child) => { Ok(mut child) => {
match child.wait() { match child.wait() {
@ -128,7 +128,7 @@ pub async fn download(arg_m: &ArgMatches<'_>) {
} }
} }
fn magic_thing(map: HashMap<String, String>) -> Option<String> { fn magic_thing(map: HashMap<String, (String, i32)>) -> Option<(String, i32)> {
let mut keys = Vec::new(); let mut keys = Vec::new();
for i in map.keys() { for i in map.keys() {
match i.parse::<i32>() { match i.parse::<i32>() {
@ -141,12 +141,12 @@ fn magic_thing(map: HashMap<String, String>) -> Option<String> {
} }
keys.sort(); keys.sort();
match keys.pop() { match keys.pop() {
Some(key) => Some(map.get(&key.to_string()).unwrap().to_string()), Some(key) => map.get(&key.to_string()).map(|i| i.clone()),
None => { None => {
let mut keys: Vec<_> = map.keys().collect(); let mut keys: Vec<_> = map.keys().collect();
keys.sort(); keys.sort();
match keys.pop() { match keys.pop() {
Some(key) => Some(map.get(&key.to_string()).unwrap().to_string()), Some(key) => map.get(key.as_str()).map(|i| i.clone()),
None => None None => None
} }
} }

View File

@ -1,5 +1,6 @@
use std::fmt; use std::fmt;
use std::marker::PhantomData; use std::marker::PhantomData;
use std::collections::HashMap;
use serde::de::{self, Visitor}; use serde::de::{self, Visitor};
use serde::{Deserialize, Deserializer}; use serde::{Deserialize, Deserializer};
use quick_xml::Reader; use quick_xml::Reader;
@ -84,33 +85,39 @@ impl fmt::Display for HentaiInfo {
); );
let servers = &self.state.data.video.videos_manifest.servers; let servers = &self.state.data.video.videos_manifest.servers;
if !servers.is_empty() { if !servers.is_empty() {
let mut string_servers = Vec::new(); let mut string_servers = HashMap::new();
for server in servers { for server in servers {
let mut tmp_string_servers = string_servers.clone(); let mut tmp_string_servers = string_servers.clone();
for stream in &server.streams { for stream in &server.streams {
if !stream.url.is_empty() && !tmp_string_servers.contains(&stream.height) { if !stream.url.is_empty() && !tmp_string_servers.contains_key(&stream.height) {
tmp_string_servers.push(stream.height.clone()); tmp_string_servers.insert(&stream.height, stream.filesize_mbs);
} }
} }
string_servers.extend(tmp_string_servers); string_servers.extend(tmp_string_servers);
} }
if !string_servers.is_empty() { if !string_servers.is_empty() {
string_servers.sort(); let mut int_servers = HashMap::with_capacity(string_servers.len());
let mut int_servers = Vec::with_capacity(string_servers.len()); for (i, j) in &string_servers {
for i in &string_servers {
match i.parse::<i32>() { match i.parse::<i32>() {
Ok(i) => int_servers.push(i), Ok(i) => int_servers.insert(i, j),
Err(_) => { Err(_) => {
int_servers.clear(); int_servers.clear();
break; break;
} }
}; };
} }
int_servers.sort();
text.push_str(&format!("Resolution: {}\n", text.push_str(&format!("Resolution: {}\n",
match int_servers.is_empty() { match int_servers.is_empty() {
true => string_servers.join(", "), true => {
false => int_servers.iter().map(|i| i.to_string()).collect::<Vec<_>>().join(", ") let mut keys: Vec<&&String> = string_servers.keys().collect();
keys.sort();
keys.iter().map(|&i| format!("{} ({}MB)", i, string_servers.get(i).unwrap())).collect::<Vec<_>>().join(", ")
},
false => {
let mut keys: Vec<&i32> = int_servers.keys().collect();
keys.sort();
keys.iter().map(|i| format!("{} ({}MB)", i, int_servers.get(i).unwrap())).collect::<Vec<_>>().join(", ")
}
} }
)); ));
} }
@ -160,7 +167,8 @@ pub struct VideoServer {
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
pub struct VideoStream { pub struct VideoStream {
pub height: String, pub height: String,
pub url: String pub url: String,
pub filesize_mbs: i32
} }
fn remove_html<'de, D>(deserializer: D) -> Result<String, D::Error> fn remove_html<'de, D>(deserializer: D) -> Result<String, D::Error>