rimgo/api/tag.go

111 lines
2.5 KiB
Go
Raw Normal View History

2022-02-18 21:56:56 +00:00
package api
import (
"io"
2022-02-18 21:56:56 +00:00
"net/http"
"net/url"
2022-02-18 21:56:56 +00:00
"strings"
2022-03-16 02:56:34 +00:00
"github.com/patrickmn/go-cache"
2022-02-18 21:56:56 +00:00
"github.com/tidwall/gjson"
)
2022-07-22 15:55:22 +00:00
type Tag struct {
2022-07-22 16:20:51 +00:00
Tag string
Display string
Sort string
PostCount int64
Posts []Submission
Background string
BackgroundId string
2022-07-22 15:55:22 +00:00
}
2023-01-01 20:12:03 +00:00
func (client *Client) FetchTag(tag string, sort string, page string) (Tag, error) {
// Dots are automatically removed on Imgur, so more cache hits
tag = strings.ReplaceAll(tag, ".", "")
2023-01-01 20:12:03 +00:00
cacheData, found := client.Cache.Get(tag + sort + page + "-tag")
2022-03-16 02:56:34 +00:00
if found {
2022-07-22 15:55:22 +00:00
return cacheData.(Tag), nil
2022-03-16 02:56:34 +00:00
}
2022-02-18 21:56:56 +00:00
req, err := http.NewRequest("GET", "https://api.imgur.com/post/v1/posts/t/"+tag, nil)
if err != nil {
2022-07-22 15:55:22 +00:00
return Tag{}, err
2022-02-18 21:56:56 +00:00
}
q := req.URL.Query()
2023-01-01 20:12:03 +00:00
q.Add("client_id", client.ClientID)
2022-02-18 21:56:56 +00:00
q.Add("include", "cover")
q.Add("page", page)
switch sort {
case "newest":
q.Add("filter[window]", "week")
q.Add("sort", "-time")
case "best":
q.Add("filter[window]", "all")
q.Add("sort", "-top")
case "popular":
default:
q.Add("filter[window]", "week")
q.Add("sort", "-viral")
sort = "popular"
}
req.URL.RawQuery = q.Encode()
res, err := http.DefaultClient.Do(req)
if err != nil {
2022-07-22 15:55:22 +00:00
return Tag{}, err
2022-02-18 21:56:56 +00:00
}
body, err := io.ReadAll(res.Body)
2022-02-18 21:56:56 +00:00
if err != nil {
2022-07-22 15:55:22 +00:00
return Tag{}, err
2022-02-18 21:56:56 +00:00
}
data := gjson.Parse(string(body))
2022-07-22 15:55:22 +00:00
posts := make([]Submission, 0)
2022-02-18 21:56:56 +00:00
data.Get("posts").ForEach(
func(key, value gjson.Result) bool {
url, _ := url.Parse(strings.ReplaceAll(value.Get("url").String(), "https://imgur.com", ""))
q := url.Query()
q.Add("tag", tag+"."+sort+"."+page+"."+key.String())
url.RawQuery = q.Encode()
posts = append(posts, Submission{
Id: value.Get("id").String(),
Title: value.Get("title").String(),
Link: url.String(),
Cover: Media{
Id: value.Get("cover_id").String(),
Type: value.Get("cover.type").String(),
2023-06-13 13:37:00 +00:00
Url: value.Get("cover.url").String(),
},
Points: value.Get("point_count").Int(),
Upvotes: value.Get("upvote_count").Int(),
Downvotes: value.Get("downvote_count").Int(),
Comments: value.Get("comment_count").Int(),
Views: value.Get("view_count").Int(),
IsAlbum: value.Get("is_album").Bool(),
})
2022-02-18 21:56:56 +00:00
return true
},
)
2022-07-22 15:55:22 +00:00
tagData := Tag{
Tag: tag,
Display: data.Get("display").String(),
Sort: sort,
PostCount: data.Get("post_count").Int(),
Posts: posts,
Background: "/" + data.Get("background_id").String() + ".webp",
2022-03-16 02:56:34 +00:00
}
client.Cache.Set(tag+sort+page+"-tag", tagData, 4*cache.DefaultExpiration)
2022-03-16 02:56:34 +00:00
return tagData, nil
2022-02-18 21:56:56 +00:00
}