some more

This commit is contained in:
blank X 2021-11-18 17:25:24 +07:00
parent e4586cd5f3
commit c5ceea00fb
Signed by: blankie
GPG Key ID: CC15FC822C7F61F5
3 changed files with 233 additions and 4 deletions

121
main.go
View File

@ -2,7 +2,9 @@ package main
import (
"bufio"
"errors"
"fmt"
"io"
"net/http"
"net/url"
"os"
@ -148,6 +150,121 @@ func main() {
fmt.Fprintf(os.Stderr, "Failed to get index: %s\n", err)
os.Exit(1)
}
url := urls[i]
fmt.Println(url)
unparsedUrl := urls[i]
fmt.Println(unparsedUrl)
parsedUrl, err := url.Parse(unparsedUrl)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to parse url: %s\n", err)
os.Exit(1)
}
domain := submission.Domain
if domain == "" {
domain = parsedUrl.Hostname()
}
path := parsedUrl.EscapedPath()
pathExtless, pathExt := SplitExt(path)
urls = make([]string, 1)
urls[0] = unparsedUrl
if domain == "imgur.com" || strings.HasSuffix(domain, ".imgur.com") {
parsedUrl.Host = "i.imgur.com"
if strings.HasPrefix(path, "/a/") || strings.HasPrefix(path, "/gallery/") {
albumId := strings.SplitN(path, "/", 4)[2]
imgurImages, err := GetImgurAlbum(client, albumId)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to get imgur album: %s\n", err)
os.Exit(1)
}
urls = nil
for _, i := range imgurImages {
urls = append(urls, "https://i.imgur.com/"+i.Hash+i.Ext)
}
} else {
if pathExt == ".gifv" {
parsedUrl.RawPath = pathExtless + ".mp4"
}
urls[0] = parsedUrl.String()
}
} else if domain == "gfycat.com" {
gfyMp4, err := GetGfycat(client, pathExtless)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to get gfycat video: %s\n", err)
os.Exit(1)
}
if gfyMp4 != "" {
urls[0] = gfyMp4
}
} else if submission.IsRedditMediaDomain && submission.Preview != nil {
preview := submission.Preview.Images[0]
if pathExt == ".gif" {
if preview.Variants.Mp4 != nil {
urls[0] = preview.Variants.Mp4.Url
} else if preview.Variants.Gif != nil {
urls[0] = preview.Variants.Gif.Url
}
} else if submission.IsVideo {
urls[0] = preview.Source.Url
}
}
i, err = InteractivelyAskIndex(stdin, urls)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to get index: %s\n", err)
os.Exit(1)
}
unparsedUrl = urls[i]
fmt.Println(unparsedUrl)
response, err := client.Get(unparsedUrl)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to get response: %s\n", err)
os.Exit(1)
}
defer response.Body.Close()
ext, err := GetExtension(response.Header.Get("Content-Type"))
if err != nil {
fmt.Fprintf(os.Stderr, "Warning: Failed to get file extension: %s\n", err)
} else {
filename = filename + ext
}
fmt.Printf("Downloading to %s\n", filename)
file, err := os.OpenFile(filename, os.O_WRONLY|os.O_CREATE, 0o664)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to open file for writing: %s\n", err)
os.Exit(1)
}
writer := bufio.NewWriter(file)
defer writer.Flush()
reader := bufio.NewReader(response.Body)
contentLengthString := ""
if response.ContentLength >= 0 {
contentLengthString = FormatBytes(float64(response.ContentLength))
}
lastOutputLength := 0
bytesDownloaded := 0
toWrite := make([]byte, 1024*1024)
for {
output := fmt.Sprintf("%s downloaded", FormatBytes(float64(bytesDownloaded)))
if response.ContentLength >= 0 {
output = fmt.Sprintf("%s out of %s downloaded (%.2f%%)", FormatBytes(float64(bytesDownloaded)), contentLengthString, float64(bytesDownloaded)/float64(response.ContentLength)*100.0)
}
fmt.Print(output)
for i := 0; i < lastOutputLength-len(output); i++ {
fmt.Print(" ")
}
lastOutputLength = len(output)
fmt.Print("\r")
n, err := reader.Read(toWrite)
if n == 0 && errors.Is(err, io.EOF) {
break
}
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to read response: %s\n", err)
os.Exit(1)
}
_, err = writer.Write(toWrite[:n])
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to write response: %s\n", err)
os.Exit(1)
}
bytesDownloaded += n
}
fmt.Println()
}

View File

@ -61,17 +61,18 @@ type Submission struct {
} `json:"s"`
} `json:"media_metadata"`
IsRedditMediaDomain bool `json:"is_reddit_media_domain"`
Preview struct {
Preview *struct {
Images []struct {
Variants struct {
Mp4 *PreviewSource `json:"mp4"`
Gif *PreviewSource `json:"gif"`
} `json:"variants"`
} `json:"images"`
Source *PreviewSource `json:"source"`
} `json:"images"`
} `json:"preview"`
IsSelf bool `json:"is_self"`
Title string `json:"title"`
Domain string `json:"domain"`
}
type SubmissionResponseItem struct {
@ -81,3 +82,20 @@ type SubmissionResponseItem struct {
} `json:"children"`
} `json:"data"`
}
type ImgurImage struct {
Hash string `json:"hash"`
Ext string `json:"ext"`
}
type ImgurResponse struct {
Data struct {
Images []ImgurImage `json:"images"`
} `json:"data"`
}
type GfycatResponse struct {
GfyItem struct {
Mp4Url string `json:"mp4Url"`
} `json:"gfyItem"`
}

View File

@ -7,6 +7,7 @@ import (
"fmt"
"io"
"io/fs"
"mime"
"net/http"
"os"
"path/filepath"
@ -172,3 +173,96 @@ func InteractivelyAskIndex(stdin *bufio.Reader, items []string) (int, error) {
}
return i, nil
}
func SplitExt(str string) (string, string) {
if str == "" {
return "", ""
}
s := strings.Split(str[1:], ".")
if len(s) < 2 {
return str, ""
}
return str[:1] + strings.Join(s[:len(s)-1], "."), "." + s[len(s)-1]
}
func GetImgurAlbum(client *http.Client, albumId string) ([]ImgurImage, error) {
response, err := client.Get("https://imgur.com/ajaxalbums/getimages/" + albumId + "/hit.json?all=true")
if err != nil {
return nil, fmt.Errorf("Failed to get response: %s", err)
}
contents, err := io.ReadAll(response.Body)
response.Body.Close()
if err != nil {
return nil, fmt.Errorf("Failed to read response body: %s", err)
}
if response.StatusCode != 200 {
return nil, fmt.Errorf("Response returned status code %d, body: %s", response.StatusCode, contents)
}
var imgurResponse *ImgurResponse
err = json.Unmarshal(contents, &imgurResponse)
if err != nil {
return nil, fmt.Errorf("Failed to parse response: %s", err)
}
return imgurResponse.Data.Images, nil
}
func GetGfycat(client *http.Client, gfyId string) (string, error) {
response, err := client.Get("https://api.gfycat.com/v1/gfycats/" + gfyId)
if err != nil {
return "", fmt.Errorf("Failed to get response: %s", err)
}
contents, err := io.ReadAll(response.Body)
response.Body.Close()
if err != nil {
return "", fmt.Errorf("Failed to read response body: %s", err)
}
if response.StatusCode != 200 {
return "", fmt.Errorf("Response returned status code %d, body: %s", response.StatusCode, contents)
}
var gfycatResponse *GfycatResponse
err = json.Unmarshal(contents, &gfycatResponse)
if err != nil {
return "", fmt.Errorf("Failed to parse response: %s", err)
}
return gfycatResponse.GfyItem.Mp4Url, nil
}
func FormatBytes(size float64) string {
label := ""
if size >= 1024 {
size /= 1024
label = "K"
}
if size >= 1024 {
size /= 1024
label = "M"
}
if size >= 1024 {
size /= 1024
label = "G"
}
if size >= 1024 {
size /= 1024
label = "T"
}
return fmt.Sprintf("%.2f %sB", size, label)
}
func GetExtension(mimeType string) (string, error) {
if mimeType == "" {
return "", nil
}
mediaType, _, err := mime.ParseMediaType(mimeType)
if err != nil {
return "", fmt.Errorf("Failed to parse media type of %s: %s", mimeType, err)
}
exts, err := mime.ExtensionsByType(mediaType)
if err != nil {
return "", fmt.Errorf("Failed to find file extensions of %s: %s", mediaType, err)
}
if len(exts) < 1 {
return "", nil
}
fmt.Println(exts)
return exts[0], nil
}