This repository has been archived on 2022-04-16. You can view files and clone it, but cannot push or open issues or pull requests.
omordl/main.go

154 lines
4.1 KiB
Go
Raw Normal View History

2021-11-17 15:38:18 +00:00
package main
import (
"bufio"
"fmt"
"net/http"
"net/url"
"os"
"os/exec"
"sort"
"strings"
"time"
"golang.org/x/sys/unix"
)
func main() {
if len(os.Args) != 2 {
fmt.Fprintf(os.Stderr, "Usage: %s <submission id/url>\n", os.Args[0])
os.Exit(1)
}
submissionUrl, err := url.Parse(os.Args[1])
if err != nil {
fmt.Fprintf(os.Stderr, "Error when parsing submission url: %s\n", err)
os.Exit(1)
}
submissionId := os.Args[1]
if submissionUrl.Hostname() == "redd.it" {
submissionId = submissionUrl.EscapedPath()[1:]
} else if submissionUrl.Hostname() != "" {
split := strings.SplitN(submissionUrl.EscapedPath(), "/", 6)
if len(split) < 5 {
fmt.Fprintln(os.Stderr, "URL passed does not have enough path seperators")
os.Exit(1)
}
submissionId = split[4]
}
if submissionId == "" {
fmt.Fprintln(os.Stderr, "Submission ID is empty")
os.Exit(1)
}
client := &http.Client{
Timeout: time.Duration(30 * time.Second),
}
err = LoadConfigAndData()
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to load config and data: %s\n", err)
os.Exit(1)
}
token, err := GetToken(client)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to get token: %s\n", err)
os.Exit(1)
}
submission, err := GetSubmission(client, token, submissionId)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to get submission: %s\n", err)
os.Exit(1)
}
filename := strings.ReplaceAll(submission.Title, "/", "_") + "-" + submissionId
if filename[0] == '.' {
filename = "_" + filename[1:]
}
files, err := os.ReadDir(".")
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to list files: %s\n", err)
os.Exit(1)
}
stdin := bufio.NewReader(os.Stdin)
for _, i := range files {
if strings.HasPrefix(i.Name(), filename) {
fmt.Printf("A file that starts with %s exists (%s), potentially overwrite (y/N)? ", filename, i.Name())
b, err := stdin.ReadByte()
if err != nil {
fmt.Fprintf(os.Stderr, "Error when reading stdin: %s\n", err)
os.Exit(1)
}
if b != 'y' && b != 'Y' {
fmt.Println("Not overwriting")
os.Exit(1)
}
break
}
}
if submission.CrosspostParent != "" && len(submission.CrosspostParentList) > 0 {
submission, err = GetSubmission(client, token, submission.CrosspostParent[3:])
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to get original submission: %s\n", err)
os.Exit(1)
}
}
if submission.IsSelf {
fmt.Fprintln(os.Stderr, "Cannot download selfposts")
os.Exit(1)
}
urls := make([]string, 1)
urls[0] = submission.Url
if submission.IsVideo {
ffmpegPath, err := exec.LookPath("ffmpeg")
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to find ffmpeg: %s\n", err)
os.Exit(1)
}
ffmpegUrl := submission.SecureMedia.RedditVideo.HlsUrl
if ffmpegUrl == "" {
ffmpegUrl = submission.SecureMedia.RedditVideo.DashUrl
}
if ffmpegUrl != "" {
client.CloseIdleConnections()
err = unix.Exec(ffmpegPath, []string{"ffmpeg", "-nostdin", "-i", ffmpegUrl, "-c", "copy", "--", filename + ".mp4"}, os.Environ())
fmt.Fprintf(os.Stderr, "Failed to exec as ffmpeg: %s\n", err)
os.Exit(1)
}
fallbackUrl := submission.SecureMedia.RedditVideo.FallbackUrl
if fallbackUrl != "" {
urls[0] = fallbackUrl
}
} else if submission.IsGallery {
var galleryKeys []string
if submission.GalleryData != nil {
sort.Sort(submission.GalleryData)
for _, i := range submission.GalleryData.Items {
galleryKeys = append(galleryKeys, i.MediaId)
}
} else {
for i, _ := range submission.MediaMetadata {
galleryKeys = append(galleryKeys, i)
}
}
urls = nil
for _, i := range galleryKeys {
mediaMetadataItem := submission.MediaMetadata[i]
if mediaMetadataItem.Status != "valid" {
continue
}
if mediaMetadataItem.S.U != "" {
urls = append(urls, mediaMetadataItem.S.U)
} else if mediaMetadataItem.S.Mp4 != "" {
urls = append(urls, mediaMetadataItem.S.Mp4)
} else if mediaMetadataItem.S.Gif != "" {
urls = append(urls, mediaMetadataItem.S.Gif)
}
}
}
fmt.Println(urls)
i, err := InteractivelyAskIndex(stdin, urls)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to get index: %s\n", err)
os.Exit(1)
}
url := urls[i]
fmt.Println(url)
}