You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
This repo is archived. You can view files and clone it, but cannot push or open issues/pull-requests.
omordl/main.go

256 lines
7.5 KiB

package main
import (
"bufio"
"errors"
"fmt"
"io"
"net/http"
"net/url"
"os"
"os/exec"
"sort"
"strings"
"golang.org/x/sys/unix"
)
func main() {
if len(os.Args) != 2 && len(os.Args) != 3 {
fmt.Fprintf(os.Stderr, "Usage: %s <submission id/url> [direct url to download instead of inferring from the post]\n", os.Args[0])
os.Exit(1)
}
LoadMimetypes()
submissionUrl, err := url.Parse(os.Args[1])
if err != nil {
fmt.Fprintf(os.Stderr, "Error when parsing submission url: %s\n", err)
os.Exit(1)
}
submissionJsonUrl := ""
submissionId := os.Args[1]
if submissionUrl.Hostname() == "redd.it" {
submissionJsonUrl = "https://old.reddit.com" + submissionUrl.EscapedPath() + ".json?raw_json=1&limit=1"
submissionId = submissionUrl.EscapedPath()[1:]
} else if submissionUrl.Hostname() == "" {
submissionJsonUrl = "https://old.reddit.com/" + os.Args[1] + ".json?raw_json=1&limit=1"
} else {
submissionJsonUrl = "https://old.reddit.com" + submissionUrl.EscapedPath() + "/.json?raw_json=1&limit=1"
split := strings.SplitN(submissionUrl.EscapedPath(), "/", 6)
if len(split) < 5 {
fmt.Fprintln(os.Stderr, "URL passed does not have enough path seperators")
os.Exit(1)
}
submissionId = split[4]
}
client := &http.Client{}
submission, err := GetSubmission(client, submissionJsonUrl)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to get submission: %s\n", err)
os.Exit(1)
}
filename := strings.ReplaceAll(submission.Title, "/", "_") + "-" + submissionId
if filename[0] == '.' {
filename = "_" + filename[1:]
}
if submission.CrosspostParent != "" && len(submission.CrosspostParentList) > 0 {
submission, err = GetSubmission(client, "https://old.reddit.com"+submission.CrosspostParentList[0].Permalink+".json?raw_json=1&limit=1")
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to get original submission: %s\n", err)
os.Exit(1)
}
}
if submission.IsSelf {
fmt.Fprintln(os.Stderr, "Cannot download selfposts")
os.Exit(1)
}
urls := make([]string, 1)
urls[0] = submission.Url
if submission.IsVideo {
ffmpegPath, err := exec.LookPath("ffmpeg")
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to find ffmpeg: %s\n", err)
os.Exit(1)
}
ffmpegUrl := submission.SecureMedia.RedditVideo.HlsUrl
if ffmpegUrl == "" {
ffmpegUrl = submission.SecureMedia.RedditVideo.DashUrl
}
if ffmpegUrl != "" {
client.CloseIdleConnections()
err = unix.Exec(ffmpegPath, []string{"ffmpeg", "-nostdin", "-i", ffmpegUrl, "-c", "copy", "--", filename + ".mp4"}, os.Environ())
fmt.Fprintf(os.Stderr, "Failed to exec as ffmpeg: %s\n", err)
os.Exit(1)
}
fallbackUrl := submission.SecureMedia.RedditVideo.FallbackUrl
if fallbackUrl != "" {
urls[0] = fallbackUrl
}
} else if submission.IsGallery {
var galleryKeys []string
if submission.GalleryData != nil {
sort.Sort(submission.GalleryData)
for _, i := range submission.GalleryData.Items {
galleryKeys = append(galleryKeys, i.MediaId)
}
} else {
for i, _ := range submission.MediaMetadata {
galleryKeys = append(galleryKeys, i)
}
}
urls = nil
for _, i := range galleryKeys {
mediaMetadataItem := submission.MediaMetadata[i]
if mediaMetadataItem.Status != "valid" {
continue
}
if mediaMetadataItem.S.U != "" {
urls = append(urls, mediaMetadataItem.S.U)
} else if mediaMetadataItem.S.Mp4 != "" {
urls = append(urls, mediaMetadataItem.S.Mp4)
} else if mediaMetadataItem.S.Gif != "" {
urls = append(urls, mediaMetadataItem.S.Gif)
}
}
}
stdin := bufio.NewReader(os.Stdin)
i, err := InteractivelyAskIndex(stdin, urls)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to get index: %s\n", err)
os.Exit(1)
}
if len(urls) > 1 {
filename = fmt.Sprintf("%s_%03d", filename, i)
}
unparsedUrl := urls[i]
parsedUrl, err := url.Parse(unparsedUrl)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to parse url: %s\n", err)
os.Exit(1)
}
domain := submission.Domain
if domain == "" {
domain = parsedUrl.Hostname()
}
path := parsedUrl.EscapedPath()
pathExtless, pathExt := SplitExt(path)
urls = make([]string, 1)
urls[0] = unparsedUrl
if domain == "imgur.com" || strings.HasSuffix(domain, ".imgur.com") {
parsedUrl.Host = "i.imgur.com"
if strings.HasPrefix(path, "/a/") || strings.HasPrefix(path, "/gallery/") {
albumId := strings.SplitN(strings.SplitN(path, "/", 4)[2], ".", 2)[0]
imgurImages, err := GetImgurAlbum(client, albumId)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to get imgur album: %s\n", err)
os.Exit(1)
}
urls = nil
for _, i := range imgurImages {
urls = append(urls, "https://i.imgur.com/"+i.Hash+i.Ext)
}
} else {
if pathExt == ".gifv" {
parsedUrl.RawPath = pathExtless + ".mp4"
}
urls[0] = parsedUrl.String()
}
} else if domain == "gfycat.com" {
gfyMp4, err := GetGfycat(client, pathExtless)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to get gfycat video: %s\n", err)
os.Exit(1)
}
if gfyMp4 != "" {
urls[0] = gfyMp4
}
} else if submission.IsRedditMediaDomain && submission.Preview != nil {
preview := submission.Preview.Images[0]
if pathExt == ".gif" {
if preview.Variants.Mp4.Source != nil {
urls[0] = preview.Variants.Mp4.Source.Url
} else if preview.Variants.Gif.Source != nil {
urls[0] = preview.Variants.Gif.Source.Url
}
} else if submission.IsVideo {
urls[0] = preview.Source.Url
}
}
i, err = InteractivelyAskIndex(stdin, urls)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to get index: %s\n", err)
os.Exit(1)
}
if len(urls) > 1 {
filename = fmt.Sprintf("%s_%03d", filename, i)
}
files, err := os.ReadDir(".")
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to list files: %s\n", err)
os.Exit(1)
}
for _, i := range files {
if strings.HasPrefix(i.Name(), filename) {
fmt.Printf("A file that starts with %s exists (%s), potentially overwrite (y/N)? ", filename, i.Name())
b, err := stdin.ReadByte()
if err != nil {
fmt.Fprintf(os.Stderr, "Error when reading stdin: %s\n", err)
os.Exit(1)
}
if b != 'y' && b != 'Y' {
fmt.Println("Not overwriting")
os.Exit(1)
}
break
}
}
if len(os.Args) == 3 {
unparsedUrl = os.Args[2]
} else {
unparsedUrl = urls[i]
}
response, err := client.Get(unparsedUrl)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to get response: %s\n", err)
os.Exit(1)
}
defer response.Body.Close()
ext, err := GetExtension(response.Header.Get("Content-Type"))
if err != nil {
fmt.Fprintf(os.Stderr, "Warning: Failed to get file extension: %s\n", err)
} else {
filename = filename + ext
}
fmt.Printf("Downloading to %s\n", filename)
file, err := os.OpenFile(filename, os.O_WRONLY|os.O_CREATE, 0o664)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to open file for writing: %s\n", err)
os.Exit(1)
}
writer := bufio.NewWriter(file)
defer writer.Flush()
reader := bufio.NewReader(response.Body)
lastOutputLength := 0
bytesDownloaded := 0
toWrite := make([]byte, 1024*1024)
for {
lastOutputLength = PrintStatus(float64(bytesDownloaded), float64(response.ContentLength), lastOutputLength)
n, err := reader.Read(toWrite)
if n > 0 {
_, writeErr := writer.Write(toWrite[:n])
if writeErr != nil {
fmt.Fprintf(os.Stderr, "Failed to write response: %s\n", writeErr)
os.Exit(1)
}
bytesDownloaded += n
lastOutputLength = PrintStatus(float64(bytesDownloaded), float64(response.ContentLength), lastOutputLength)
}
if errors.Is(err, io.EOF) {
break
}
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to read response: %s\n", err)
os.Exit(1)
}
}
fmt.Println()
}