nitter/src/apiutils.nim

88 lines
2.6 KiB
Nim
Raw Normal View History

2021-12-27 01:37:38 +00:00
# SPDX-License-Identifier: AGPL-3.0-only
import httpclient, asyncdispatch, options, times, strutils, uri
import packedjson, zippy
import types, tokens, consts, parserutils, http_pool
const rl = "x-rate-limit-"
2020-06-01 00:16:24 +00:00
2022-01-02 10:21:03 +00:00
var pool: HttpPool
2020-06-16 22:20:34 +00:00
proc genParams*(pars: openarray[(string, string)] = @[]; cursor="";
count="20"; ext=true): seq[(string, string)] =
2020-06-01 00:16:24 +00:00
result = timelineParams
for p in pars:
result &= p
2020-06-16 22:20:34 +00:00
if ext:
result &= ("ext", "mediaStats")
if count.len > 0:
result &= ("count", count)
2021-12-28 07:07:15 +00:00
if cursor.len > 0:
# The raw cursor often has plus signs, which sometimes get turned into spaces,
# so we need to them back into a plus
if " " in cursor:
result &= ("cursor", cursor.replace(" ", "+"))
else:
result &= ("cursor", cursor)
2020-06-01 00:16:24 +00:00
proc genHeaders*(token: Token = nil): HttpHeaders =
2020-06-01 00:16:24 +00:00
result = newHttpHeaders({
"connection": "keep-alive",
2020-06-01 00:16:24 +00:00
"authorization": auth,
"content-type": "application/json",
"x-guest-token": if token == nil: "" else: token.tok,
"x-twitter-active-user": "yes",
"authority": "api.twitter.com",
"accept-encoding": "gzip",
2020-06-01 00:16:24 +00:00
"accept-language": "en-US,en;q=0.9",
"accept": "*/*",
"DNT": "1"
2020-06-01 00:16:24 +00:00
})
proc fetch*(url: Uri; oldApi=false): Future[JsonNode] {.async.} =
once:
pool = HttpPool()
2020-07-09 07:18:14 +00:00
var token = await getToken()
if token.tok.len == 0:
2021-01-07 21:31:29 +00:00
raise rateLimitError()
2020-06-01 00:16:24 +00:00
let headers = genHeaders(token)
2020-06-01 00:16:24 +00:00
try:
var resp: AsyncResponse
2021-12-30 00:39:00 +00:00
var body = pool.use(headers):
resp = await c.get($url)
2021-12-30 00:39:00 +00:00
await resp.body
if body.len > 0:
if resp.headers.getOrDefault("content-encoding") == "gzip":
body = uncompress(body, dfGzip)
else:
echo "non-gzip body, url: ", url, ", body: ", body
2020-06-01 00:16:24 +00:00
2020-06-16 22:20:34 +00:00
if body.startsWith('{') or body.startsWith('['):
result = parseJson(body)
else:
echo resp.status, ": ", body
result = newJNull()
if not oldApi and resp.headers.hasKey(rl & "reset"):
2021-12-20 03:19:11 +00:00
token.remaining = parseInt(resp.headers[rl & "remaining"])
token.reset = fromUnix(parseInt(resp.headers[rl & "reset"]))
2020-06-01 00:16:24 +00:00
if result.getError notin {invalidToken, forbidden, badToken}:
token.lastUse = getTime()
else:
echo "fetch error: ", result.getError
release(token, true)
raise rateLimitError()
2021-12-28 04:41:41 +00:00
if resp.status == $Http400:
raise newException(InternalError, $url)
except InternalError as e:
raise e
except Exception as e:
2021-12-28 04:41:41 +00:00
echo "error: ", e.name, ", msg: ", e.msg, ", token: ", token[], ", url: ", url
if "length" notin e.msg and "descriptor" notin e.msg:
2021-01-18 15:01:34 +00:00
release(token, true)
2021-01-07 21:31:29 +00:00
raise rateLimitError()