Compare commits

..

66 Commits

Author SHA1 Message Date
blankie 4e0b16d2bc
Show video link in RSS feed 2024-01-18 07:46:15 +11:00
blankie cd1fd532e6
RSS: Fix URLs to point to the correct feed and page
Fixes #1131
2024-01-18 07:46:15 +11:00
blankie 58795b5f1c
Add a link to this fork 2024-01-18 07:46:15 +11:00
blankie e3d13b02b5
Add black purple theme 2024-01-18 07:46:15 +11:00
Zed 52db03b73a Fix broken video playback by forcing fmp4 2024-01-12 03:48:42 +01:00
blankie 583c858cdf
Fix search queries in user search RSS feeds (#1126)
Fixes #992
2023-12-03 09:54:24 +01:00
Zed a9740fec8b Fix compilation with old Nim again 2023-11-25 10:11:57 +00:00
Zed f8254c2f0f Add support for business and gov verification
Also improve icon rendering on Firefox
2023-11-25 10:07:28 +00:00
Zed d6be08d093 Fix jobDetails error on old Nim versions 2023-11-25 05:53:13 +00:00
Zed 4dac9f0798 Add simple job_details card support 2023-11-25 05:31:15 +00:00
Zed 06ab1ea2e7 Enable disabled tests 2023-11-15 11:11:56 +00:00
Zed c2819dab44 Fix #1106
Closes #831
2023-11-15 11:11:53 +00:00
Zed eaedd2aee7 Fix ARM64 Dockerfile versions 2023-11-08 16:38:43 +00:00
Zed 5e188647fc Bump Nim in the ARM64 Dockerfile, add nitter user 2023-11-08 14:53:35 +00:00
Zed e0d9dd0f9c Fix #670 2023-11-08 14:27:22 +00:00
Zed d17583286a Don't requests made before reset 2023-11-01 05:44:59 +00:00
Zed 209f453b79 Purge expired accounts after parsing 2023-11-01 05:09:44 +00:00
Zed e1838e0933 Move CI workflow to buildjet 2023-11-01 05:09:35 +00:00
Zed 623424f516 Fix outdated test 2023-11-01 04:52:44 +00:00
Zed 7b3fcdc622 Fix guest accounts CI setup attempt 4 2023-11-01 04:19:10 +00:00
Zed 1d20bd01cb Remove redundant "active" field from /.health 2023-11-01 04:16:33 +00:00
Zed 58e73a14c5 Fix guest accounts CI setup attempt 3 2023-11-01 04:13:22 +00:00
Zed b0b335106d Fix missing CI file argument 2023-11-01 04:06:42 +00:00
Zed 006b91c903 Prevent annoying warnings on devel 2023-11-01 04:04:45 +00:00
Zed 33bad37128 Fix guest accounts CI setup attempt 2 2023-11-01 01:25:00 +00:00
Zed b930a3d5bf Fix guest accounts CI setup 2023-10-31 23:54:11 +00:00
Zed bd0be724f0 Merge branch 'master' into guest_accounts 2023-10-31 23:47:02 +00:00
Zed 60a82563da Run tests on multiple Nim versions 2023-10-31 23:46:24 +00:00
Zed b8103cf501 Fix compilation on Nim 1.6.x 2023-10-31 23:02:45 +00:00
Émilien (perso) b62d73dbd3
nim version min require + update dockerfile arm (#1053) 2023-10-31 22:33:08 +00:00
Zed 4120558649 Replace /.tokens with /.health and /.accounts 2023-10-31 12:04:32 +00:00
Zed 089275826c Bump minimum Nim version 2023-10-31 11:33:24 +00:00
Zed edad09f4c9 Update nimcrypto and jsony 2023-10-31 08:31:51 +00:00
Zed 32e3469e3a Fix multi-user timelines 2023-10-31 05:53:55 +00:00
LS 735b30c2da
fix(nitter): add graphql user search (#1047)
* fix(nitter): add graphql user search

* fix(nitter): rm gitignore 2nd guest_accounts

* fix(nitter): keep query from user search in result. remove personal mods

* fix(nitter): removce useless line gitignore
2023-10-30 12:13:06 +00:00
Zed 537af7fd5e Improve Liberapay css for Firefox compatibility 2023-09-19 01:29:41 +00:00
Zed 7d14789910 Improve guest accounts loading, add JSONL support 2023-09-18 18:26:01 +00:00
Zed 7abcb489f4 Increase photo rail cache ttl 2023-09-18 17:15:09 +00:00
Zed 14f9a092d8
Fix crash on missing quote tweet data crash 2023-09-14 23:35:41 +00:00
Zed fcd74e8048 Retry rate limited requests with different account 2023-09-02 08:15:58 +02:00
Zed 4250245263 Shorten media proxy error log 2023-09-02 07:28:56 +02:00
Zed b8fe212e94 Add media proxying error logging 2023-09-01 21:39:02 +02:00
Zed 84dcf49079 Fix negative pending requests bug 2023-08-31 05:07:12 +02:00
Zed 82beb5da8c Add empty oauth token logging 2023-08-31 01:31:27 +02:00
Zed 282ce8b0e9 Add 429 logging 2023-08-31 01:29:54 +02:00
Zed 37b58a5a7e Fix accounts logging 2023-08-30 03:43:49 +02:00
Zed 898b19b92f Improve rate limit handling, minor refactor 2023-08-30 03:10:21 +02:00
Zed 986b91ac73 Handle ProtocolError and BadClientError equally 2023-08-29 23:58:03 +02:00
Zed 4ccf350dc7 Improve .tokens output 2023-08-29 23:45:18 +02:00
Zed 7630f57f17 Fix cards not being displayed 2023-08-26 05:16:38 +02:00
Zed 03794a8d4a Cleanup 2023-08-25 16:32:39 +02:00
Zed ae9fa02bf5 Switch to TweetDetail for tweets 2023-08-25 16:28:30 +02:00
Zed 88b005c9da Revert "Switch to using typeahead for user search"
This reverts commit a3e11e3272.
2023-08-23 19:31:40 +02:00
Zed a3e11e3272 Switch to using typeahead for user search 2023-08-23 10:14:44 +02:00
Zed 45808361af Fix tweetDetail stats 2023-08-22 04:45:49 +02:00
Zed 8df5256c1d Switch back to old user search endpoint 2023-08-22 04:33:14 +02:00
Zed 6e8744943f Tweak /.tokens, add amount of limited accounts 2023-08-22 03:43:18 +02:00
Zed 5c08e6a774 Fix compilation on older versions of Nim 2023-08-22 02:27:44 +02:00
Zed 30bdf3a14e Reduce max concurrent pending requests per account 2023-08-22 01:32:28 +02:00
Zed 12504bcffe
Fix compilation error 2023-08-21 18:12:06 +02:00
Zed c3d9441370
Unify some guest account logs 2023-08-21 14:49:50 +02:00
Zed 51714b5ad2
Add guest accounts variable to GitHub action 2023-08-21 11:25:27 +02:00
Zed e8b5cbef7b Add missing limitedAt assignment 2023-08-20 12:31:08 +02:00
Zed 3d8858f0d8 Track rate limits, reset after 24 hours 2023-08-20 11:56:42 +02:00
Zed bbd68e6840 Filter out account limits that already reset 2023-08-19 01:13:36 +02:00
Zed 3572dd7771 Replace tokens with guest accounts, swap endpoints 2023-08-19 00:25:14 +02:00
47 changed files with 676 additions and 640 deletions

View File

@ -10,25 +10,34 @@ on:
jobs: jobs:
test: test:
runs-on: ubuntu-latest runs-on: buildjet-2vcpu-ubuntu-2204
strategy:
matrix:
nim:
- "1.6.10"
- "1.6.x"
- "2.0.x"
- "devel"
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Cache nimble - name: Cache nimble
id: cache-nimble id: cache-nimble
uses: actions/cache@v3 uses: buildjet/cache@v3
with: with:
path: ~/.nimble path: ~/.nimble
key: nimble-${{ hashFiles('*.nimble') }} key: ${{ matrix.nim }}-nimble-${{ hashFiles('*.nimble') }}
restore-keys: "nimble-" restore-keys: |
${{ matrix.nim }}-nimble-
- uses: actions/setup-python@v4 - uses: actions/setup-python@v4
with: with:
python-version: "3.10" python-version: "3.10"
cache: "pip" cache: "pip"
- uses: jiro4989/setup-nim-action@v1 - uses: jiro4989/setup-nim-action@v1
with: with:
nim-version: "1.x" nim-version: ${{ matrix.nim }}
repo-token: ${{ secrets.GITHUB_TOKEN }}
- run: nimble build -d:release -Y - run: nimble build -d:release -Y
- run: pip install seleniumbase - run: pip install seleniumbase
- run: seleniumbase install chromedriver - run: seleniumbase install chromedriver
@ -37,9 +46,11 @@ jobs:
run: | run: |
sudo apt install libsass-dev -y sudo apt install libsass-dev -y
cp nitter.example.conf nitter.conf cp nitter.example.conf nitter.conf
sed -i 's/enableDebug = false/enableDebug = true/g' nitter.conf
nimble md nimble md
nimble scss nimble scss
echo '${{ secrets.GUEST_ACCOUNTS }}' > ./guest_accounts.jsonl
- name: Run tests - name: Run tests
run: | run: |
./nitter & ./nitter &
pytest -n4 tests pytest -n8 tests

1
.gitignore vendored
View File

@ -10,4 +10,5 @@ nitter
/public/css/style.css /public/css/style.css
/public/md/*.html /public/md/*.html
nitter.conf nitter.conf
guest_accounts.json*
dump.rdb dump.rdb

View File

@ -1,7 +1,7 @@
FROM alpine:3.17 as nim FROM alpine:3.18 as nim
LABEL maintainer="setenforce@protonmail.com" LABEL maintainer="setenforce@protonmail.com"
RUN apk --no-cache add gcc git libc-dev libsass-dev "nim=1.6.8-r0" nimble pcre RUN apk --no-cache add libsass-dev pcre gcc git libc-dev "nim=1.6.14-r0" "nimble=0.13.1-r2"
WORKDIR /src/nitter WORKDIR /src/nitter
@ -13,11 +13,13 @@ RUN nimble build -d:danger -d:lto -d:strip \
&& nimble scss \ && nimble scss \
&& nimble md && nimble md
FROM alpine:3.17 FROM alpine:3.18
WORKDIR /src/ WORKDIR /src/
RUN apk --no-cache add ca-certificates pcre openssl1.1-compat RUN apk --no-cache add pcre ca-certificates openssl1.1-compat
COPY --from=nim /src/nitter/nitter ./ COPY --from=nim /src/nitter/nitter ./
COPY --from=nim /src/nitter/nitter.example.conf ./nitter.conf COPY --from=nim /src/nitter/nitter.example.conf ./nitter.conf
COPY --from=nim /src/nitter/public ./public COPY --from=nim /src/nitter/public ./public
EXPOSE 8080 EXPOSE 8080
RUN adduser -h /src/ -D -s /bin/sh nitter
USER nitter
CMD ./nitter CMD ./nitter

View File

@ -7,12 +7,7 @@
# disable annoying warnings # disable annoying warnings
warning("GcUnsafe2", off) warning("GcUnsafe2", off)
warning("HoleEnumConv", off)
hint("XDeclaredButNotUsed", off) hint("XDeclaredButNotUsed", off)
hint("XCannotRaiseY", off) hint("XCannotRaiseY", off)
hint("User", off) hint("User", off)
const
nimVersion = (major: NimMajor, minor: NimMinor, patch: NimPatch)
when nimVersion >= (1, 6, 0):
warning("HoleEnumConv", off)

View File

@ -23,7 +23,7 @@ redisMaxConnections = 30
hmacKey = "secretkey" # random key for cryptographic signing of video urls hmacKey = "secretkey" # random key for cryptographic signing of video urls
base64Media = false # use base64 encoding for proxied media urls base64Media = false # use base64 encoding for proxied media urls
enableRSS = true # set this to false to disable RSS feeds enableRSS = true # set this to false to disable RSS feeds
enableDebug = false # enable request logs and debug endpoints (/.tokens) enableDebug = false # enable request logs and debug endpoints (/.accounts)
proxy = "" # http/https url, SOCKS proxies are not supported proxy = "" # http/https url, SOCKS proxies are not supported
proxyAuth = "" proxyAuth = ""
tokenCount = 10 tokenCount = 10

View File

@ -10,11 +10,11 @@ bin = @["nitter"]
# Dependencies # Dependencies
requires "nim >= 1.4.8" requires "nim >= 1.6.10"
requires "jester#baca3f" requires "jester#baca3f"
requires "karax#5cf360c" requires "karax#5cf360c"
requires "sass#7dfdd03" requires "sass#7dfdd03"
requires "nimcrypto#4014ef9" requires "nimcrypto#a079df9"
requires "markdown#158efe3" requires "markdown#158efe3"
requires "packedjson#9e6fbb6" requires "packedjson#9e6fbb6"
requires "supersnappy#6c94198" requires "supersnappy#6c94198"
@ -22,8 +22,8 @@ requires "redpool#8b7c1db"
requires "https://github.com/zedeus/redis#d0a0e6f" requires "https://github.com/zedeus/redis#d0a0e6f"
requires "zippy#ca5989a" requires "zippy#ca5989a"
requires "flatty#e668085" requires "flatty#e668085"
requires "jsony#ea811be" requires "jsony#1de1f08"
requires "oauth#b8c163b"
# Tasks # Tasks

View File

@ -33,23 +33,6 @@ proc getGraphUserTweets*(id: string; kind: TimelineKind; after=""): Future[Profi
js = await fetch(url ? params, apiId) js = await fetch(url ? params, apiId)
result = parseGraphTimeline(js, "user", after) result = parseGraphTimeline(js, "user", after)
# proc getTimeline*(id: string; after=""; replies=false): Future[Profile] {.async.} =
# if id.len == 0: return
# let
# ps = genParams({"userId": id, "include_tweet_replies": $replies}, after)
# url = oldUserTweets / (id & ".json") ? ps
# result = parseTimeline(await fetch(url, Api.timeline), after)
proc getUserTimeline*(id: string; after=""): Future[Profile] {.async.} =
var ps = genParams({"id": id})
if after.len > 0:
ps.add ("down_cursor", after)
let
url = legacyUserTweets ? ps
js = await fetch(url, Api.userTimeline)
result = parseUserTimeline(js, after)
proc getGraphListTweets*(id: string; after=""): Future[Timeline] {.async.} = proc getGraphListTweets*(id: string; after=""): Future[Timeline] {.async.} =
if id.len == 0: return if id.len == 0: return
let let
@ -112,10 +95,10 @@ proc getTweet*(id: string; after=""): Future[Conversation] {.async.} =
if after.len > 0: if after.len > 0:
result.replies = await getReplies(id, after) result.replies = await getReplies(id, after)
proc getGraphSearch*(query: Query; after=""): Future[Profile] {.async.} = proc getGraphTweetSearch*(query: Query; after=""): Future[Timeline] {.async.} =
let q = genQueryParam(query) let q = genQueryParam(query)
if q.len == 0 or q == emptyQuery: if q.len == 0 or q == emptyQuery:
return Profile(tweets: Timeline(query: query, beginning: true)) return Timeline(query: query, beginning: true)
var var
variables = %*{ variables = %*{
@ -129,44 +112,29 @@ proc getGraphSearch*(query: Query; after=""): Future[Profile] {.async.} =
if after.len > 0: if after.len > 0:
variables["cursor"] = % after variables["cursor"] = % after
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures} let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
result = Profile(tweets: parseGraphSearch(await fetch(url, Api.search), after)) result = parseGraphSearch[Tweets](await fetch(url, Api.search), after)
result.tweets.query = query
proc getTweetSearch*(query: Query; after=""): Future[Timeline] {.async.} =
var q = genQueryParam(query)
if q.len == 0 or q == emptyQuery:
return Timeline(query: query, beginning: true)
if after.len > 0:
q &= " max_id:" & after
let url = tweetSearch ? genParams({
"q": q ,
"modules": "status",
"result_type": "recent",
})
result = parseTweetSearch(await fetch(url, Api.search), after)
result.query = query result.query = query
proc getUserSearch*(query: Query; page="1"): Future[Result[User]] {.async.} = proc getGraphUserSearch*(query: Query; after=""): Future[Result[User]] {.async.} =
if query.text.len == 0: if query.text.len == 0:
return Result[User](query: query, beginning: true) return Result[User](query: query, beginning: true)
var url = userSearch ? { var
"q": query.text, variables = %*{
"skip_status": "1", "rawQuery": query.text,
"count": "20", "count": 20,
"page": page "product": "People",
} "withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false
}
if after.len > 0:
variables["cursor"] = % after
result.beginning = false
result = parseUsers(await fetchRaw(url, Api.userSearch)) let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
result = parseGraphSearch[User](await fetch(url, Api.search), after)
result.query = query result.query = query
if page.len == 0:
result.bottom = "2"
elif page.allCharsInSet(Digits):
result.bottom = $(parseInt(page) + 1)
proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} = proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
if name.len == 0: return if name.len == 0: return

View File

@ -1,7 +1,7 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import httpclient, asyncdispatch, options, strutils, uri import httpclient, asyncdispatch, options, strutils, uri, times, math, tables
import jsony, packedjson, zippy import jsony, packedjson, zippy, oauth1
import types, tokens, consts, parserutils, http_pool import types, auth, consts, parserutils, http_pool
import experimental/types/common import experimental/types/common
const const
@ -29,12 +29,30 @@ proc genParams*(pars: openArray[(string, string)] = @[]; cursor="";
else: else:
result &= ("cursor", cursor) result &= ("cursor", cursor)
proc genHeaders*(token: Token = nil): HttpHeaders = proc getOauthHeader(url, oauthToken, oauthTokenSecret: string): string =
let
encodedUrl = url.replace(",", "%2C").replace("+", "%20")
params = OAuth1Parameters(
consumerKey: consumerKey,
signatureMethod: "HMAC-SHA1",
timestamp: $int(round(epochTime())),
nonce: "0",
isIncludeVersionToHeader: true,
token: oauthToken
)
signature = getSignature(HttpGet, encodedUrl, "", params, consumerSecret, oauthTokenSecret)
params.signature = percentEncode(signature)
return getOauth1RequestHeader(params)["authorization"]
proc genHeaders*(url, oauthToken, oauthTokenSecret: string): HttpHeaders =
let header = getOauthHeader(url, oauthToken, oauthTokenSecret)
result = newHttpHeaders({ result = newHttpHeaders({
"connection": "keep-alive", "connection": "keep-alive",
"authorization": auth, "authorization": header,
"content-type": "application/json", "content-type": "application/json",
"x-guest-token": if token == nil: "" else: token.tok,
"x-twitter-active-user": "yes", "x-twitter-active-user": "yes",
"authority": "api.twitter.com", "authority": "api.twitter.com",
"accept-encoding": "gzip", "accept-encoding": "gzip",
@ -43,24 +61,18 @@ proc genHeaders*(token: Token = nil): HttpHeaders =
"DNT": "1" "DNT": "1"
}) })
template updateToken() =
if resp.headers.hasKey(rlRemaining):
let
remaining = parseInt(resp.headers[rlRemaining])
reset = parseInt(resp.headers[rlReset])
token.setRateLimit(api, remaining, reset)
template fetchImpl(result, fetchBody) {.dirty.} = template fetchImpl(result, fetchBody) {.dirty.} =
once: once:
pool = HttpPool() pool = HttpPool()
var token = await getToken(api) var account = await getGuestAccount(api)
if token.tok.len == 0: if account.oauthToken.len == 0:
echo "[accounts] Empty oauth token, account: ", account.id
raise rateLimitError() raise rateLimitError()
try: try:
var resp: AsyncResponse var resp: AsyncResponse
pool.use(genHeaders(token)): pool.use(genHeaders($url, account.oauthToken, account.oauthSecret)):
template getContent = template getContent =
resp = await c.get($url) resp = await c.get($url)
result = await resp.body result = await resp.body
@ -71,57 +83,75 @@ template fetchImpl(result, fetchBody) {.dirty.} =
badClient = true badClient = true
raise newException(BadClientError, "Bad client") raise newException(BadClientError, "Bad client")
if resp.headers.hasKey(rlRemaining):
let
remaining = parseInt(resp.headers[rlRemaining])
reset = parseInt(resp.headers[rlReset])
account.setRateLimit(api, remaining, reset)
if result.len > 0: if result.len > 0:
if resp.headers.getOrDefault("content-encoding") == "gzip": if resp.headers.getOrDefault("content-encoding") == "gzip":
result = uncompress(result, dfGzip) result = uncompress(result, dfGzip)
else:
echo "non-gzip body, url: ", url, ", body: ", result if result.startsWith("{\"errors"):
let errors = result.fromJson(Errors)
if errors in {expiredToken, badToken}:
echo "fetch error: ", errors
invalidate(account)
raise rateLimitError()
elif errors in {rateLimited}:
# rate limit hit, resets after 24 hours
setLimited(account, api)
raise rateLimitError()
elif result.startsWith("429 Too Many Requests"):
echo "[accounts] 429 error, API: ", api, ", account: ", account.id
account.apis[api].remaining = 0
# rate limit hit, resets after the 15 minute window
raise rateLimitError()
fetchBody fetchBody
release(token, used=true)
if resp.status == $Http400: if resp.status == $Http400:
raise newException(InternalError, $url) raise newException(InternalError, $url)
except InternalError as e: except InternalError as e:
raise e raise e
except BadClientError as e: except BadClientError as e:
release(token, used=true) raise e
except OSError as e:
raise e raise e
except Exception as e: except Exception as e:
echo "error: ", e.name, ", msg: ", e.msg, ", token: ", token[], ", url: ", url let id = if account.isNil: "null" else: $account.id
if "length" notin e.msg and "descriptor" notin e.msg: echo "error: ", e.name, ", msg: ", e.msg, ", accountId: ", id, ", url: ", url
release(token, invalid=true)
raise rateLimitError() raise rateLimitError()
finally:
release(account)
template retry(bod) =
try:
bod
except RateLimitError:
echo "[accounts] Rate limited, retrying ", api, " request..."
bod
proc fetch*(url: Uri; api: Api): Future[JsonNode] {.async.} = proc fetch*(url: Uri; api: Api): Future[JsonNode] {.async.} =
var body: string retry:
fetchImpl body: var body: string
if body.startsWith('{') or body.startsWith('['): fetchImpl body:
result = parseJson(body) if body.startsWith('{') or body.startsWith('['):
else: result = parseJson(body)
echo resp.status, ": ", body, " --- url: ", url else:
result = newJNull() echo resp.status, ": ", body, " --- url: ", url
result = newJNull()
updateToken() let error = result.getError
if error in {expiredToken, badToken}:
let error = result.getError echo "fetchBody error: ", error
if error in {invalidToken, badToken}: invalidate(account)
echo "fetch error: ", result.getError raise rateLimitError()
release(token, invalid=true)
raise rateLimitError()
proc fetchRaw*(url: Uri; api: Api): Future[string] {.async.} = proc fetchRaw*(url: Uri; api: Api): Future[string] {.async.} =
fetchImpl result: retry:
if not (result.startsWith('{') or result.startsWith('[')): fetchImpl result:
echo resp.status, ": ", result, " --- url: ", url if not (result.startsWith('{') or result.startsWith('[')):
result.setLen(0) echo resp.status, ": ", result, " --- url: ", url
result.setLen(0)
updateToken()
if result.startsWith("{\"errors"):
let errors = result.fromJson(Errors)
if errors in {invalidToken, badToken}:
echo "fetch error: ", errors
release(token, invalid=true)
raise rateLimitError()

209
src/auth.nim Normal file
View File

@ -0,0 +1,209 @@
#SPDX-License-Identifier: AGPL-3.0-only
import std/[asyncdispatch, times, json, random, sequtils, strutils, tables, packedsets, os]
import types
import experimental/parser/guestaccount
# max requests at a time per account to avoid race conditions
const
maxConcurrentReqs = 2
dayInSeconds = 24 * 60 * 60
apiMaxReqs: Table[Api, int] = {
Api.search: 50,
Api.tweetDetail: 150,
Api.photoRail: 180,
Api.userTweets: 500,
Api.userTweetsAndReplies: 500,
Api.userMedia: 500,
Api.userRestId: 500,
Api.userScreenName: 500,
Api.tweetResult: 500,
Api.list: 500,
Api.listTweets: 500,
Api.listMembers: 500,
Api.listBySlug: 500
}.toTable
var
accountPool: seq[GuestAccount]
enableLogging = false
template log(str: varargs[string, `$`]) =
if enableLogging: echo "[accounts] ", str.join("")
proc snowflakeToEpoch(flake: int64): int64 =
int64(((flake shr 22) + 1288834974657) div 1000)
proc hasExpired(account: GuestAccount): bool =
let
created = snowflakeToEpoch(account.id)
now = epochTime().int64
daysOld = int(now - created) div dayInSeconds
return daysOld > 30
proc getAccountPoolHealth*(): JsonNode =
let now = epochTime().int
var
totalReqs = 0
limited: PackedSet[int64]
reqsPerApi: Table[string, int]
oldest = now.int64
newest = 0'i64
average = 0'i64
for account in accountPool:
let created = snowflakeToEpoch(account.id)
if created > newest:
newest = created
if created < oldest:
oldest = created
average += created
for api in account.apis.keys:
let
apiStatus = account.apis[api]
reqs = apiMaxReqs[api] - apiStatus.remaining
if apiStatus.limited:
limited.incl account.id
# no requests made with this account and endpoint since the limit reset
if apiStatus.reset < now:
continue
reqsPerApi.mgetOrPut($api, 0).inc reqs
totalReqs.inc reqs
if accountPool.len > 0:
average = average div accountPool.len
else:
oldest = 0
average = 0
return %*{
"accounts": %*{
"total": accountPool.len,
"limited": limited.card,
"oldest": $fromUnix(oldest),
"newest": $fromUnix(newest),
"average": $fromUnix(average)
},
"requests": %*{
"total": totalReqs,
"apis": reqsPerApi
}
}
proc getAccountPoolDebug*(): JsonNode =
let now = epochTime().int
var list = newJObject()
for account in accountPool:
let accountJson = %*{
"apis": newJObject(),
"pending": account.pending,
}
for api in account.apis.keys:
let
apiStatus = account.apis[api]
obj = %*{}
if apiStatus.reset > now.int:
obj["remaining"] = %apiStatus.remaining
if "remaining" notin obj and not apiStatus.limited:
continue
if apiStatus.limited:
obj["limited"] = %true
accountJson{"apis", $api} = obj
list[$account.id] = accountJson
return %list
proc rateLimitError*(): ref RateLimitError =
newException(RateLimitError, "rate limited")
proc isLimited(account: GuestAccount; api: Api): bool =
if account.isNil:
return true
if api in account.apis:
let limit = account.apis[api]
if limit.limited and (epochTime().int - limit.limitedAt) > dayInSeconds:
account.apis[api].limited = false
log "resetting limit, api: ", api, ", id: ", account.id
return limit.limited or (limit.remaining <= 10 and limit.reset > epochTime().int)
else:
return false
proc isReady(account: GuestAccount; api: Api): bool =
not (account.isNil or account.pending > maxConcurrentReqs or account.isLimited(api))
proc invalidate*(account: var GuestAccount) =
if account.isNil: return
log "invalidating expired account: ", account.id
# TODO: This isn't sufficient, but it works for now
let idx = accountPool.find(account)
if idx > -1: accountPool.delete(idx)
account = nil
proc release*(account: GuestAccount) =
if account.isNil: return
dec account.pending
proc getGuestAccount*(api: Api): Future[GuestAccount] {.async.} =
for i in 0 ..< accountPool.len:
if result.isReady(api): break
result = accountPool.sample()
if not result.isNil and result.isReady(api):
inc result.pending
else:
log "no accounts available for API: ", api
raise rateLimitError()
proc setLimited*(account: GuestAccount; api: Api) =
account.apis[api].limited = true
account.apis[api].limitedAt = epochTime().int
log "rate limited, api: ", api, ", reqs left: ", account.apis[api].remaining, ", id: ", account.id
proc setRateLimit*(account: GuestAccount; api: Api; remaining, reset: int) =
# avoid undefined behavior in race conditions
if api in account.apis:
let limit = account.apis[api]
if limit.reset >= reset and limit.remaining < remaining:
return
if limit.reset == reset and limit.remaining >= remaining:
account.apis[api].remaining = remaining
return
account.apis[api] = RateLimit(remaining: remaining, reset: reset)
proc initAccountPool*(cfg: Config; path: string) =
enableLogging = cfg.enableDebug
let jsonlPath = if path.endsWith(".json"): (path & 'l') else: path
if fileExists(jsonlPath):
log "Parsing JSONL guest accounts file: ", jsonlPath
for line in jsonlPath.lines:
accountPool.add parseGuestAccount(line)
elif fileExists(path):
log "Parsing JSON guest accounts file: ", path
accountPool = parseGuestAccounts(path)
else:
echo "[accounts] ERROR: ", path, " not found. This file is required to authenticate API requests."
quit 1
let accountsPrePurge = accountPool.len
accountPool.keepItIf(not it.hasExpired)
log "Successfully added ", accountPool.len, " valid accounts."
if accountsPrePurge > accountPool.len:
log "Purged ", accountsPrePurge - accountPool.len, " expired accounts."

View File

@ -2,17 +2,13 @@
import uri, sequtils, strutils import uri, sequtils, strutils
const const
auth* = "Bearer AAAAAAAAAAAAAAAAAAAAAFQODgEAAAAAVHTp76lzh3rFzcHbmHVvQxYYpTw%3DckAlMINMjmCwxUcaXbAN4XqJVdgMJaHqNOFgPMK0zN1qLqLQCF" consumerKey* = "3nVuSoBZnx6U4vzUxf5w"
consumerSecret* = "Bcs59EFbbsdF6Sl9Ng71smgStWEGwXXKSjYvPVt7qys"
api = parseUri("https://api.twitter.com") api = parseUri("https://api.twitter.com")
activate* = $(api / "1.1/guest/activate.json") activate* = $(api / "1.1/guest/activate.json")
legacyUserTweets* = api / "1.1/timeline/user.json"
photoRail* = api / "1.1/statuses/media_timeline.json" photoRail* = api / "1.1/statuses/media_timeline.json"
userSearch* = api / "1.1/users/search.json"
tweetSearch* = api / "1.1/search/universal.json"
# oldUserTweets* = api / "2/timeline/profile"
graphql = api / "graphql" graphql = api / "graphql"
graphUser* = graphql / "u7wQyGi6oExe8_TRWGMq4Q/UserResultByScreenNameQuery" graphUser* = graphql / "u7wQyGi6oExe8_TRWGMq4Q/UserResultByScreenNameQuery"
@ -20,7 +16,7 @@ const
graphUserTweets* = graphql / "3JNH4e9dq1BifLxAa3UMWg/UserWithProfileTweetsQueryV2" graphUserTweets* = graphql / "3JNH4e9dq1BifLxAa3UMWg/UserWithProfileTweetsQueryV2"
graphUserTweetsAndReplies* = graphql / "8IS8MaO-2EN6GZZZb8jF0g/UserWithProfileTweetsAndRepliesQueryV2" graphUserTweetsAndReplies* = graphql / "8IS8MaO-2EN6GZZZb8jF0g/UserWithProfileTweetsAndRepliesQueryV2"
graphUserMedia* = graphql / "PDfFf8hGeJvUCiTyWtw4wQ/MediaTimelineV2" graphUserMedia* = graphql / "PDfFf8hGeJvUCiTyWtw4wQ/MediaTimelineV2"
graphTweet* = graphql / "83h5UyHZ9wEKBVzALX8R_g/ConversationTimelineV2" graphTweet* = graphql / "q94uRCEn65LZThakYcPT6g/TweetDetail"
graphTweetResult* = graphql / "sITyJdhRPpvpEjg4waUmTA/TweetResultByIdQuery" graphTweetResult* = graphql / "sITyJdhRPpvpEjg4waUmTA/TweetResultByIdQuery"
graphSearchTimeline* = graphql / "gkjsKepM6gl_HmFWoWKfgg/SearchTimeline" graphSearchTimeline* = graphql / "gkjsKepM6gl_HmFWoWKfgg/SearchTimeline"
graphListById* = graphql / "iTpgCtbdxrsJfyx0cFjHqg/ListByRestId" graphListById* = graphql / "iTpgCtbdxrsJfyx0cFjHqg/ListByRestId"
@ -33,11 +29,10 @@ const
"include_cards": "1", "include_cards": "1",
"include_entities": "1", "include_entities": "1",
"include_profile_interstitial_type": "0", "include_profile_interstitial_type": "0",
"include_quote_count": "1", "include_quote_count": "0",
"include_reply_count": "1", "include_reply_count": "0",
"include_user_entities": "1", "include_user_entities": "0",
"include_ext_reply_count": "1", "include_ext_reply_count": "0",
"include_ext_is_blue_verified": "1",
"include_ext_media_color": "0", "include_ext_media_color": "0",
"cards_platform": "Web-13", "cards_platform": "Web-13",
"tweet_mode": "extended", "tweet_mode": "extended",
@ -91,8 +86,12 @@ const
tweetVariables* = """{ tweetVariables* = """{
"focalTweetId": "$1", "focalTweetId": "$1",
$2 $2
"includeHasBirdwatchNotes": false "includeHasBirdwatchNotes": false,
}""" "includePromotedContent": false,
"withBirdwatchNotes": false,
"withVoice": false,
"withV2Timeline": true
}""".replace(" ", "").replace("\n", "")
# oldUserTweetsVariables* = """{ # oldUserTweetsVariables* = """{
# "userId": "$1", $2 # "userId": "$1", $2

View File

@ -1,7 +1,7 @@
import options import options
import jsony import jsony
import user, ../types/[graphuser, graphlistmembers] import user, ../types/[graphuser, graphlistmembers]
from ../../types import User, Result, Query, QueryKind from ../../types import User, VerifiedType, Result, Query, QueryKind
proc parseGraphUser*(json: string): User = proc parseGraphUser*(json: string): User =
if json.len == 0 or json[0] != '{': if json.len == 0 or json[0] != '{':
@ -12,9 +12,10 @@ proc parseGraphUser*(json: string): User =
if raw.data.userResult.result.unavailableReason.get("") == "Suspended": if raw.data.userResult.result.unavailableReason.get("") == "Suspended":
return User(suspended: true) return User(suspended: true)
result = toUser raw.data.userResult.result.legacy result = raw.data.userResult.result.legacy
result.id = raw.data.userResult.result.restId result.id = raw.data.userResult.result.restId
result.verified = result.verified or raw.data.userResult.result.isBlueVerified if result.verifiedType == VerifiedType.none and raw.data.userResult.result.isBlueVerified:
result.verifiedType = blue
proc parseGraphListMembers*(json, cursor: string): Result[User] = proc parseGraphListMembers*(json, cursor: string): Result[User] =
result = Result[User]( result = Result[User](
@ -30,7 +31,7 @@ proc parseGraphListMembers*(json, cursor: string): Result[User] =
of TimelineTimelineItem: of TimelineTimelineItem:
let userResult = entry.content.itemContent.userResults.result let userResult = entry.content.itemContent.userResults.result
if userResult.restId.len > 0: if userResult.restId.len > 0:
result.content.add toUser userResult.legacy result.content.add userResult.legacy
of TimelineTimelineCursor: of TimelineTimelineCursor:
if entry.content.cursorType == "Bottom": if entry.content.cursorType == "Bottom":
result.bottom = entry.content.value result.bottom = entry.content.value

View File

@ -0,0 +1,21 @@
import std/strutils
import jsony
import ../types/guestaccount
from ../../types import GuestAccount
proc toGuestAccount(account: RawAccount): GuestAccount =
let id = account.oauthToken[0 ..< account.oauthToken.find('-')]
result = GuestAccount(
id: parseBiggestInt(id),
oauthToken: account.oauthToken,
oauthSecret: account.oauthTokenSecret
)
proc parseGuestAccount*(raw: string): GuestAccount =
let rawAccount = raw.fromJson(RawAccount)
result = rawAccount.toGuestAccount
proc parseGuestAccounts*(path: string): seq[GuestAccount] =
let rawAccounts = readFile(path).fromJson(seq[RawAccount])
for account in rawAccounts:
result.add account.toGuestAccount

View File

@ -1,6 +1,6 @@
import std/[options, tables, strutils, strformat, sugar] import std/[options, tables, strutils, strformat, sugar]
import jsony import jsony
import ../types/unifiedcard import user, ../types/unifiedcard
from ../../types import Card, CardKind, Video from ../../types import Card, CardKind, Video
from ../../utils import twimg, https from ../../utils import twimg, https
@ -27,6 +27,14 @@ proc parseMediaDetails(data: ComponentData; card: UnifiedCard; result: var Card)
result.text = data.topicDetail.title result.text = data.topicDetail.title
result.dest = "Topic" result.dest = "Topic"
proc parseJobDetails(data: ComponentData; card: UnifiedCard; result: var Card) =
data.destination.parseDestination(card, result)
result.kind = CardKind.jobDetails
result.title = data.title
result.text = data.shortDescriptionText
result.dest = &"@{data.profileUser.username} · {data.location}"
proc parseAppDetails(data: ComponentData; card: UnifiedCard; result: var Card) = proc parseAppDetails(data: ComponentData; card: UnifiedCard; result: var Card) =
let app = card.appStoreData[data.appId][0] let app = card.appStoreData[data.appId][0]
@ -84,6 +92,8 @@ proc parseUnifiedCard*(json: string): Card =
component.parseMedia(card, result) component.parseMedia(card, result)
of buttonGroup: of buttonGroup:
discard discard
of ComponentType.jobDetails:
component.data.parseJobDetails(card, result)
of ComponentType.hidden: of ComponentType.hidden:
result.kind = CardKind.hidden result.kind = CardKind.hidden
of ComponentType.unknown: of ComponentType.unknown:

View File

@ -56,7 +56,7 @@ proc toUser*(raw: RawUser): User =
tweets: raw.statusesCount, tweets: raw.statusesCount,
likes: raw.favouritesCount, likes: raw.favouritesCount,
media: raw.mediaCount, media: raw.mediaCount,
verified: raw.verified, verifiedType: raw.verifiedType,
protected: raw.protected, protected: raw.protected,
joinDate: parseTwitterDate(raw.createdAt), joinDate: parseTwitterDate(raw.createdAt),
banner: getBanner(raw), banner: getBanner(raw),
@ -68,6 +68,11 @@ proc toUser*(raw: RawUser): User =
result.expandUserEntities(raw) result.expandUserEntities(raw)
proc parseHook*(s: string; i: var int; v: var User) =
var u: RawUser
parseHook(s, i, u)
v = toUser u
proc parseUser*(json: string; username=""): User = proc parseUser*(json: string; username=""): User =
handleErrors: handleErrors:
case error.code case error.code
@ -75,7 +80,7 @@ proc parseUser*(json: string; username=""): User =
of userNotFound: return of userNotFound: return
else: echo "[error - parseUser]: ", error else: echo "[error - parseUser]: ", error
result = toUser json.fromJson(RawUser) result = json.fromJson(User)
proc parseUsers*(json: string; after=""): Result[User] = proc parseUsers*(json: string; after=""): Result[User] =
result = Result[User](beginning: after.len == 0) result = Result[User](beginning: after.len == 0)

View File

@ -1,5 +1,5 @@
import options import options
import user from ../../types import User
type type
GraphUser* = object GraphUser* = object
@ -9,7 +9,7 @@ type
result*: UserResult result*: UserResult
UserResult = object UserResult = object
legacy*: RawUser legacy*: User
restId*: string restId*: string
isBlueVerified*: bool isBlueVerified*: bool
unavailableReason*: Option[string] unavailableReason*: Option[string]

View File

@ -0,0 +1,4 @@
type
RawAccount* = object
oauthToken*: string
oauthTokenSecret*: string

View File

@ -1,5 +1,5 @@
import std/tables import std/tables
import user from ../../types import User
type type
Search* = object Search* = object
@ -7,7 +7,7 @@ type
timeline*: Timeline timeline*: Timeline
GlobalObjects = object GlobalObjects = object
users*: Table[string, RawUser] users*: Table[string, User]
Timeline = object Timeline = object
instructions*: seq[Instructions] instructions*: seq[Instructions]

View File

@ -1,7 +1,10 @@
import options, tables import std/[options, tables, times]
from ../../types import VideoType, VideoVariant import jsony
from ../../types import VideoType, VideoVariant, User
type type
Text* = distinct string
UnifiedCard* = object UnifiedCard* = object
componentObjects*: Table[string, Component] componentObjects*: Table[string, Component]
destinationObjects*: Table[string, Destination] destinationObjects*: Table[string, Destination]
@ -13,6 +16,7 @@ type
media media
swipeableMedia swipeableMedia
buttonGroup buttonGroup
jobDetails
appStoreDetails appStoreDetails
twitterListDetails twitterListDetails
communityDetails communityDetails
@ -29,12 +33,15 @@ type
appId*: string appId*: string
mediaId*: string mediaId*: string
destination*: string destination*: string
location*: string
title*: Text title*: Text
subtitle*: Text subtitle*: Text
name*: Text name*: Text
memberCount*: int memberCount*: int
mediaList*: seq[MediaItem] mediaList*: seq[MediaItem]
topicDetail*: tuple[title: Text] topicDetail*: tuple[title: Text]
profileUser*: User
shortDescriptionText*: string
MediaItem* = object MediaItem* = object
id*: string id*: string
@ -69,12 +76,9 @@ type
title*: Text title*: Text
category*: Text category*: Text
Text = object
content: string
TypeField = Component | Destination | MediaEntity | AppStoreData TypeField = Component | Destination | MediaEntity | AppStoreData
converter fromText*(text: Text): string = text.content converter fromText*(text: Text): string = string(text)
proc renameHook*(v: var TypeField; fieldName: var string) = proc renameHook*(v: var TypeField; fieldName: var string) =
if fieldName == "type": if fieldName == "type":
@ -86,6 +90,7 @@ proc enumHook*(s: string; v: var ComponentType) =
of "media": media of "media": media
of "swipeable_media": swipeableMedia of "swipeable_media": swipeableMedia
of "button_group": buttonGroup of "button_group": buttonGroup
of "job_details": jobDetails
of "app_store_details": appStoreDetails of "app_store_details": appStoreDetails
of "twitter_list_details": twitterListDetails of "twitter_list_details": twitterListDetails
of "community_details": communityDetails of "community_details": communityDetails
@ -106,3 +111,18 @@ proc enumHook*(s: string; v: var MediaType) =
of "photo": photo of "photo": photo
of "model3d": model3d of "model3d": model3d
else: echo "ERROR: Unknown enum value (MediaType): ", s; photo else: echo "ERROR: Unknown enum value (MediaType): ", s; photo
proc parseHook*(s: string; i: var int; v: var DateTime) =
var str: string
parseHook(s, i, str)
v = parse(str, "yyyy-MM-dd hh:mm:ss")
proc parseHook*(s: string; i: var int; v: var Text) =
if s[i] == '"':
var str: string
parseHook(s, i, str)
v = Text(str)
else:
var t: tuple[content: string]
parseHook(s, i, t)
v = Text(t.content)

View File

@ -1,5 +1,6 @@
import options import options
import common import common
from ../../types import VerifiedType
type type
RawUser* = object RawUser* = object
@ -15,7 +16,7 @@ type
favouritesCount*: int favouritesCount*: int
statusesCount*: int statusesCount*: int
mediaCount*: int mediaCount*: int
verified*: bool verifiedType*: VerifiedType
protected*: bool protected*: bool
profileLinkColor*: string profileLinkColor*: string
profileBannerUrl*: string profileBannerUrl*: string

View File

@ -39,11 +39,8 @@ template use*(pool: HttpPool; heads: HttpHeaders; body: untyped): untyped =
try: try:
body body
except ProtocolError: except BadClientError, ProtocolError:
# Twitter closed the connection, retry # Twitter returned 503 or closed the connection, we need a new client
body
except BadClientError:
# Twitter returned 503, we need a new client
pool.release(c, true) pool.release(c, true)
badClient = false badClient = false
c = pool.acquire(heads) c = pool.acquire(heads)

View File

@ -6,7 +6,7 @@ from os import getEnv
import jester import jester
import types, config, prefs, formatters, redis_cache, http_pool, tokens import types, config, prefs, formatters, redis_cache, http_pool, auth
import views/[general, about] import views/[general, about]
import routes/[ import routes/[
preferences, timeline, status, media, search, rss, list, debug, preferences, timeline, status, media, search, rss, list, debug,
@ -15,8 +15,13 @@ import routes/[
const instancesUrl = "https://github.com/zedeus/nitter/wiki/Instances" const instancesUrl = "https://github.com/zedeus/nitter/wiki/Instances"
const issuesUrl = "https://github.com/zedeus/nitter/issues" const issuesUrl = "https://github.com/zedeus/nitter/issues"
let configPath = getEnv("NITTER_CONF_FILE", "./nitter.conf") let
let (cfg, fullCfg) = getConfig(configPath) configPath = getEnv("NITTER_CONF_FILE", "./nitter.conf")
(cfg, fullCfg) = getConfig(configPath)
accountsPath = getEnv("NITTER_ACCOUNTS_FILE", "./guest_accounts.json")
initAccountPool(cfg, accountsPath)
if not cfg.enableDebug: if not cfg.enableDebug:
# Silence Jester's query warning # Silence Jester's query warning
@ -38,8 +43,6 @@ waitFor initRedisPool(cfg)
stdout.write &"Connected to Redis at {cfg.redisHost}:{cfg.redisPort}\n" stdout.write &"Connected to Redis at {cfg.redisHost}:{cfg.redisPort}\n"
stdout.flushFile stdout.flushFile
asyncCheck initTokenPool(cfg)
createUnsupportedRouter(cfg) createUnsupportedRouter(cfg)
createResolverRouter(cfg) createResolverRouter(cfg)
createPrefRouter(cfg) createPrefRouter(cfg)

View File

@ -1,10 +1,10 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import strutils, options, times, math, tables import strutils, options, times, math
import packedjson, packedjson/deserialiser import packedjson, packedjson/deserialiser
import types, parserutils, utils import types, parserutils, utils
import experimental/parser/unifiedcard import experimental/parser/unifiedcard
proc parseGraphTweet(js: JsonNode): Tweet proc parseGraphTweet(js: JsonNode; isLegacy=false): Tweet
proc parseUser(js: JsonNode; id=""): User = proc parseUser(js: JsonNode; id=""): User =
if js.isNull: return if js.isNull: return
@ -21,7 +21,7 @@ proc parseUser(js: JsonNode; id=""): User =
tweets: js{"statuses_count"}.getInt, tweets: js{"statuses_count"}.getInt,
likes: js{"favourites_count"}.getInt, likes: js{"favourites_count"}.getInt,
media: js{"media_count"}.getInt, media: js{"media_count"}.getInt,
verified: js{"verified"}.getBool or js{"ext_is_blue_verified"}.getBool, verifiedType: parseEnum[VerifiedType](js{"verified_type"}.getStr("None")),
protected: js{"protected"}.getBool, protected: js{"protected"}.getBool,
joinDate: js{"created_at"}.getTime joinDate: js{"created_at"}.getTime
) )
@ -29,11 +29,13 @@ proc parseUser(js: JsonNode; id=""): User =
result.expandUserEntities(js) result.expandUserEntities(js)
proc parseGraphUser(js: JsonNode): User = proc parseGraphUser(js: JsonNode): User =
let user = ? js{"user_result", "result"} var user = js{"user_result", "result"}
result = parseUser(user{"legacy"}) if user.isNull:
user = ? js{"user_results", "result"}
result = parseUser(user{"legacy"}, user{"rest_id"}.getStr)
if "is_blue_verified" in user: if result.verifiedType == VerifiedType.none and user{"is_blue_verified"}.getBool(false):
result.verified = user{"is_blue_verified"}.getBool() result.verifiedType = blue
proc parseGraphList*(js: JsonNode): List = proc parseGraphList*(js: JsonNode): List =
if js.isNull: return if js.isNull: return
@ -217,8 +219,6 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
) )
) )
result.expandTweetEntities(js)
# fix for pinned threads # fix for pinned threads
if result.hasThread and result.threadId == 0: if result.hasThread and result.threadId == 0:
result.threadId = js{"self_thread", "id_str"}.getId result.threadId = js{"self_thread", "id_str"}.getId
@ -252,6 +252,8 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
else: else:
result.card = some parseCard(jsCard, js{"entities", "urls"}) result.card = some parseCard(jsCard, js{"entities", "urls"})
result.expandTweetEntities(js)
with jsMedia, js{"extended_entities", "media"}: with jsMedia, js{"extended_entities", "media"}:
for m in jsMedia: for m in jsMedia:
case m{"type"}.getStr case m{"type"}.getStr
@ -287,169 +289,6 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
result.text.removeSuffix(" Learn more.") result.text.removeSuffix(" Learn more.")
result.available = false result.available = false
proc parseLegacyTweet(js: JsonNode): Tweet =
result = parseTweet(js, js{"card"})
if not result.isNil and result.available:
result.user = parseUser(js{"user"})
if result.quote.isSome:
result.quote = some parseLegacyTweet(js{"quoted_status"})
proc parseTweetSearch*(js: JsonNode; after=""): Timeline =
result.beginning = after.len == 0
if js.kind == JNull or "modules" notin js or js{"modules"}.len == 0:
return
for item in js{"modules"}:
with tweet, item{"status", "data"}:
let parsed = parseLegacyTweet(tweet)
if parsed.retweet.isSome:
parsed.retweet = some parseLegacyTweet(tweet{"retweeted_status"})
result.content.add @[parsed]
if result.content.len > 0:
result.bottom = $(result.content[^1][0].id - 1)
proc parseUserTimelineTweet(tweet: JsonNode; users: TableRef[string, User]): Tweet =
result = parseTweet(tweet, tweet{"card"})
if result.isNil or not result.available:
return
with user, tweet{"user"}:
let userId = user{"id_str"}.getStr
if user{"ext_is_blue_verified"}.getBool(false):
users[userId].verified = users[userId].verified or true
result.user = users[userId]
proc parseUserTimeline*(js: JsonNode; after=""): Profile =
result = Profile(tweets: Timeline(beginning: after.len == 0))
if js.kind == JNull or "response" notin js or "twitter_objects" notin js:
return
var users = newTable[string, User]()
for userId, user in js{"twitter_objects", "users"}:
users[userId] = parseUser(user)
for entity in js{"response", "timeline"}:
let
tweetId = entity{"tweet", "id"}.getId
isPinned = entity{"tweet", "is_pinned"}.getBool(false)
with tweet, js{"twitter_objects", "tweets", $tweetId}:
var parsed = parseUserTimelineTweet(tweet, users)
if not parsed.isNil and parsed.available:
if parsed.quote.isSome:
parsed.quote = some parseUserTimelineTweet(tweet{"quoted_status"}, users)
if parsed.retweet.isSome:
let retweet = parseUserTimelineTweet(tweet{"retweeted_status"}, users)
if retweet.quote.isSome:
retweet.quote = some parseUserTimelineTweet(tweet{"retweeted_status", "quoted_status"}, users)
parsed.retweet = some retweet
if isPinned:
parsed.pinned = true
result.pinned = some parsed
else:
result.tweets.content.add parsed
result.tweets.bottom = js{"response", "cursor", "bottom"}.getStr
# proc finalizeTweet(global: GlobalObjects; id: string): Tweet =
# let intId = if id.len > 0: parseBiggestInt(id) else: 0
# result = global.tweets.getOrDefault(id, Tweet(id: intId))
# if result.quote.isSome:
# let quote = get(result.quote).id
# if $quote in global.tweets:
# result.quote = some global.tweets[$quote]
# else:
# result.quote = some Tweet()
# if result.retweet.isSome:
# let rt = get(result.retweet).id
# if $rt in global.tweets:
# result.retweet = some finalizeTweet(global, $rt)
# else:
# result.retweet = some Tweet()
# proc parsePin(js: JsonNode; global: GlobalObjects): Tweet =
# let pin = js{"pinEntry", "entry", "entryId"}.getStr
# if pin.len == 0: return
# let id = pin.getId
# if id notin global.tweets: return
# global.tweets[id].pinned = true
# return finalizeTweet(global, id)
# proc parseGlobalObjects(js: JsonNode): GlobalObjects =
# result = GlobalObjects()
# let
# tweets = ? js{"globalObjects", "tweets"}
# users = ? js{"globalObjects", "users"}
# for k, v in users:
# result.users[k] = parseUser(v, k)
# for k, v in tweets:
# var tweet = parseTweet(v, v{"card"})
# if tweet.user.id in result.users:
# tweet.user = result.users[tweet.user.id]
# result.tweets[k] = tweet
# proc parseInstructions(res: var Profile; global: GlobalObjects; js: JsonNode) =
# if js.kind != JArray or js.len == 0:
# return
# for i in js:
# if res.tweets.beginning and i{"pinEntry"}.notNull:
# with pin, parsePin(i, global):
# res.pinned = some pin
# with r, i{"replaceEntry", "entry"}:
# if "top" in r{"entryId"}.getStr:
# res.tweets.top = r.getCursor
# elif "bottom" in r{"entryId"}.getStr:
# res.tweets.bottom = r.getCursor
# proc parseTimeline*(js: JsonNode; after=""): Profile =
# result = Profile(tweets: Timeline(beginning: after.len == 0))
# let global = parseGlobalObjects(? js)
# let instructions = ? js{"timeline", "instructions"}
# if instructions.len == 0: return
# result.parseInstructions(global, instructions)
# var entries: JsonNode
# for i in instructions:
# if "addEntries" in i:
# entries = i{"addEntries", "entries"}
# for e in ? entries:
# let entry = e{"entryId"}.getStr
# if "tweet" in entry or entry.startsWith("sq-I-t") or "tombstone" in entry:
# let tweet = finalizeTweet(global, e.getEntryId)
# if not tweet.available: continue
# result.tweets.content.add tweet
# elif "cursor-top" in entry:
# result.tweets.top = e.getCursor
# elif "cursor-bottom" in entry:
# result.tweets.bottom = e.getCursor
# elif entry.startsWith("sq-cursor"):
# with cursor, e{"content", "operation", "cursor"}:
# if cursor{"cursorType"}.getStr == "Bottom":
# result.tweets.bottom = cursor{"value"}.getStr
# else:
# result.tweets.top = cursor{"value"}.getStr
proc parsePhotoRail*(js: JsonNode): PhotoRail = proc parsePhotoRail*(js: JsonNode): PhotoRail =
with error, js{"error"}: with error, js{"error"}:
if error.getStr == "Not authorized.": if error.getStr == "Not authorized.":
@ -467,7 +306,7 @@ proc parsePhotoRail*(js: JsonNode): PhotoRail =
if url.len == 0: continue if url.len == 0: continue
result.add GalleryPhoto(url: url, tweetId: $t.id) result.add GalleryPhoto(url: url, tweetId: $t.id)
proc parseGraphTweet(js: JsonNode): Tweet = proc parseGraphTweet(js: JsonNode; isLegacy=false): Tweet =
if js.kind == JNull: if js.kind == JNull:
return Tweet() return Tweet()
@ -483,9 +322,14 @@ proc parseGraphTweet(js: JsonNode): Tweet =
of "TweetPreviewDisplay": of "TweetPreviewDisplay":
return Tweet(text: "You're unable to view this Tweet because it's only available to the Subscribers of the account owner.") return Tweet(text: "You're unable to view this Tweet because it's only available to the Subscribers of the account owner.")
of "TweetWithVisibilityResults": of "TweetWithVisibilityResults":
return parseGraphTweet(js{"tweet"}) return parseGraphTweet(js{"tweet"}, isLegacy)
else:
discard
var jsCard = copy(js{"tweet_card", "legacy"}) if not js.hasKey("legacy"):
return Tweet()
var jsCard = copy(js{if isLegacy: "card" else: "tweet_card", "legacy"})
if jsCard.kind != JNull: if jsCard.kind != JNull:
var values = newJObject() var values = newJObject()
for val in jsCard["binding_values"]: for val in jsCard["binding_values"]:
@ -500,10 +344,9 @@ proc parseGraphTweet(js: JsonNode): Tweet =
result.expandNoteTweetEntities(noteTweet) result.expandNoteTweetEntities(noteTweet)
if result.quote.isSome: if result.quote.isSome:
result.quote = some(parseGraphTweet(js{"quoted_status_result", "result"})) result.quote = some(parseGraphTweet(js{"quoted_status_result", "result"}, isLegacy))
proc parseGraphThread(js: JsonNode): tuple[thread: Chain; self: bool] = proc parseGraphThread(js: JsonNode): tuple[thread: Chain; self: bool] =
let thread = js{"content", "items"}
for t in js{"content", "items"}: for t in js{"content", "items"}:
let entryId = t{"entryId"}.getStr let entryId = t{"entryId"}.getStr
if "cursor-showmore" in entryId: if "cursor-showmore" in entryId:
@ -511,28 +354,33 @@ proc parseGraphThread(js: JsonNode): tuple[thread: Chain; self: bool] =
result.thread.cursor = cursor.getStr result.thread.cursor = cursor.getStr
result.thread.hasMore = true result.thread.hasMore = true
elif "tweet" in entryId: elif "tweet" in entryId:
let tweet = parseGraphTweet(t{"item", "content", "tweetResult", "result"}) let
result.thread.content.add tweet isLegacy = t{"item"}.hasKey("itemContent")
(contentKey, resultKey) = if isLegacy: ("itemContent", "tweet_results")
else: ("content", "tweetResult")
if t{"item", "content", "tweetDisplayType"}.getStr == "SelfThread": with content, t{"item", contentKey}:
result.self = true result.thread.content.add parseGraphTweet(content{resultKey, "result"}, isLegacy)
if content{"tweetDisplayType"}.getStr == "SelfThread":
result.self = true
proc parseGraphTweetResult*(js: JsonNode): Tweet = proc parseGraphTweetResult*(js: JsonNode): Tweet =
with tweet, js{"data", "tweet_result", "result"}: with tweet, js{"data", "tweet_result", "result"}:
result = parseGraphTweet(tweet) result = parseGraphTweet(tweet, false)
proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation = proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation =
result = Conversation(replies: Result[Chain](beginning: true)) result = Conversation(replies: Result[Chain](beginning: true))
let instructions = ? js{"data", "timeline_response", "instructions"} let instructions = ? js{"data", "threaded_conversation_with_injections_v2", "instructions"}
if instructions.len == 0: if instructions.len == 0:
return return
for e in instructions[0]{"entries"}: for e in instructions[0]{"entries"}:
let entryId = e{"entryId"}.getStr let entryId = e{"entryId"}.getStr
if entryId.startsWith("tweet"): if entryId.startsWith("tweet"):
with tweetResult, e{"content", "content", "tweetResult", "result"}: with tweetResult, e{"content", "itemContent", "tweet_results", "result"}:
let tweet = parseGraphTweet(tweetResult) let tweet = parseGraphTweet(tweetResult, true)
if not tweet.available: if not tweet.available:
tweet.id = parseBiggestInt(entryId.getId()) tweet.id = parseBiggestInt(entryId.getId())
@ -546,7 +394,7 @@ proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation =
let tweet = Tweet( let tweet = Tweet(
id: parseBiggestInt(id), id: parseBiggestInt(id),
available: false, available: false,
text: e{"content", "content", "tombstoneInfo", "richText"}.getTombstone text: e{"content", "itemContent", "tombstoneInfo", "richText"}.getTombstone
) )
if id == tweetId: if id == tweetId:
@ -560,7 +408,7 @@ proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation =
else: else:
result.replies.content.add thread result.replies.content.add thread
elif entryId.startsWith("cursor-bottom"): elif entryId.startsWith("cursor-bottom"):
result.replies.bottom = e{"content", "content", "value"}.getStr result.replies.bottom = e{"content", "itemContent", "value"}.getStr
proc parseGraphTimeline*(js: JsonNode; root: string; after=""): Profile = proc parseGraphTimeline*(js: JsonNode; root: string; after=""): Profile =
result = Profile(tweets: Timeline(beginning: after.len == 0)) result = Profile(tweets: Timeline(beginning: after.len == 0))
@ -578,7 +426,7 @@ proc parseGraphTimeline*(js: JsonNode; root: string; after=""): Profile =
let entryId = e{"entryId"}.getStr let entryId = e{"entryId"}.getStr
if entryId.startsWith("tweet"): if entryId.startsWith("tweet"):
with tweetResult, e{"content", "content", "tweetResult", "result"}: with tweetResult, e{"content", "content", "tweetResult", "result"}:
let tweet = parseGraphTweet(tweetResult) let tweet = parseGraphTweet(tweetResult, false)
if not tweet.available: if not tweet.available:
tweet.id = parseBiggestInt(entryId.getId()) tweet.id = parseBiggestInt(entryId.getId())
result.tweets.content.add tweet result.tweets.content.add tweet
@ -589,7 +437,7 @@ proc parseGraphTimeline*(js: JsonNode; root: string; after=""): Profile =
result.tweets.bottom = e{"content", "value"}.getStr result.tweets.bottom = e{"content", "value"}.getStr
if after.len == 0 and i{"__typename"}.getStr == "TimelinePinEntry": if after.len == 0 and i{"__typename"}.getStr == "TimelinePinEntry":
with tweetResult, i{"entry", "content", "content", "tweetResult", "result"}: with tweetResult, i{"entry", "content", "content", "tweetResult", "result"}:
let tweet = parseGraphTweet(tweetResult) let tweet = parseGraphTweet(tweetResult, false)
tweet.pinned = true tweet.pinned = true
if not tweet.available and tweet.tombstone.len == 0: if not tweet.available and tweet.tombstone.len == 0:
let entryId = i{"entry", "entryId"}.getEntryId let entryId = i{"entry", "entryId"}.getEntryId
@ -597,8 +445,8 @@ proc parseGraphTimeline*(js: JsonNode; root: string; after=""): Profile =
tweet.id = parseBiggestInt(entryId) tweet.id = parseBiggestInt(entryId)
result.pinned = some tweet result.pinned = some tweet
proc parseGraphSearch*(js: JsonNode; after=""): Timeline = proc parseGraphSearch*[T: User | Tweets](js: JsonNode; after=""): Result[T] =
result = Timeline(beginning: after.len == 0) result = Result[T](beginning: after.len == 0)
let instructions = js{"data", "search_by_raw_query", "search_timeline", "timeline", "instructions"} let instructions = js{"data", "search_by_raw_query", "search_timeline", "timeline", "instructions"}
if instructions.len == 0: if instructions.len == 0:
@ -607,15 +455,21 @@ proc parseGraphSearch*(js: JsonNode; after=""): Timeline =
for instruction in instructions: for instruction in instructions:
let typ = instruction{"type"}.getStr let typ = instruction{"type"}.getStr
if typ == "TimelineAddEntries": if typ == "TimelineAddEntries":
for e in instructions[0]{"entries"}: for e in instruction{"entries"}:
let entryId = e{"entryId"}.getStr let entryId = e{"entryId"}.getStr
if entryId.startsWith("tweet"): when T is Tweets:
with tweetResult, e{"content", "itemContent", "tweet_results", "result"}: if entryId.startsWith("tweet"):
let tweet = parseGraphTweet(tweetResult) with tweetRes, e{"content", "itemContent", "tweet_results", "result"}:
if not tweet.available: let tweet = parseGraphTweet(tweetRes)
tweet.id = parseBiggestInt(entryId.getId()) if not tweet.available:
result.content.add tweet tweet.id = parseBiggestInt(entryId.getId())
elif entryId.startsWith("cursor-bottom"): result.content.add tweet
elif T is User:
if entryId.startsWith("user"):
with userRes, e{"content", "itemContent"}:
result.content.add parseGraphUser(userRes)
if entryId.startsWith("cursor-bottom"):
result.bottom = e{"content", "value"}.getStr result.bottom = e{"content", "value"}.getStr
elif typ == "TimelineReplaceEntry": elif typ == "TimelineReplaceEntry":
if instruction{"entry_id_to_replace"}.getStr.startsWith("cursor-bottom"): if instruction{"entry_id_to_replace"}.getStr.startsWith("cursor-bottom"):

View File

@ -1,9 +1,17 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import std/[strutils, times, macros, htmlgen, options, algorithm, re] import std/[times, macros, htmlgen, options, algorithm, re]
import std/strutils except escape
import std/unicode except strip import std/unicode except strip
from xmltree import escape
import packedjson import packedjson
import types, utils, formatters import types, utils, formatters
const
unicodeOpen = "\uFFFA"
unicodeClose = "\uFFFB"
xmlOpen = escape("<")
xmlClose = escape(">")
let let
unRegex = re"(^|[^A-z0-9-_./?])@([A-z0-9_]{1,15})" unRegex = re"(^|[^A-z0-9-_./?])@([A-z0-9_]{1,15})"
unReplace = "$1<a href=\"/$2\">@$2</a>" unReplace = "$1<a href=\"/$2\">@$2</a>"
@ -36,7 +44,8 @@ template with*(ident, value, body): untyped =
template with*(ident; value: JsonNode; body): untyped = template with*(ident; value: JsonNode; body): untyped =
if true: if true:
let ident {.inject.} = value let ident {.inject.} = value
if value.notNull: body # value.notNull causes a compilation error for versions < 1.6.14
if notNull(value): body
template getCursor*(js: JsonNode): string = template getCursor*(js: JsonNode): string =
js{"content", "operation", "cursor", "value"}.getStr js{"content", "operation", "cursor", "value"}.getStr
@ -237,7 +246,7 @@ proc expandUserEntities*(user: var User; js: JsonNode) =
.replacef(htRegex, htReplace) .replacef(htRegex, htReplace)
proc expandTextEntities(tweet: Tweet; entities: JsonNode; text: string; textSlice: Slice[int]; proc expandTextEntities(tweet: Tweet; entities: JsonNode; text: string; textSlice: Slice[int];
replyTo=""; hasQuote=false) = replyTo=""; hasRedundantLink=false) =
let hasCard = tweet.card.isSome let hasCard = tweet.card.isSome
var replacements = newSeq[ReplaceSlice]() var replacements = newSeq[ReplaceSlice]()
@ -248,7 +257,7 @@ proc expandTextEntities(tweet: Tweet; entities: JsonNode; text: string; textSlic
if urlStr.len == 0 or urlStr notin text: if urlStr.len == 0 or urlStr notin text:
continue continue
replacements.extractUrls(u, textSlice.b, hideTwitter = hasQuote) replacements.extractUrls(u, textSlice.b, hideTwitter = hasRedundantLink)
if hasCard and u{"url"}.getStr == get(tweet.card).url: if hasCard and u{"url"}.getStr == get(tweet.card).url:
get(tweet.card).url = u{"expanded_url"}.getStr get(tweet.card).url = u{"expanded_url"}.getStr
@ -288,9 +297,10 @@ proc expandTextEntities(tweet: Tweet; entities: JsonNode; text: string; textSlic
proc expandTweetEntities*(tweet: Tweet; js: JsonNode) = proc expandTweetEntities*(tweet: Tweet; js: JsonNode) =
let let
entities = ? js{"entities"} entities = ? js{"entities"}
hasQuote = js{"is_quote_status"}.getBool
textRange = js{"display_text_range"} textRange = js{"display_text_range"}
textSlice = textRange{0}.getInt .. textRange{1}.getInt textSlice = textRange{0}.getInt .. textRange{1}.getInt
hasQuote = js{"is_quote_status"}.getBool
hasJobCard = tweet.card.isSome and get(tweet.card).kind == jobDetails
var replyTo = "" var replyTo = ""
if tweet.replyId != 0: if tweet.replyId != 0:
@ -298,12 +308,14 @@ proc expandTweetEntities*(tweet: Tweet; js: JsonNode) =
replyTo = reply.getStr replyTo = reply.getStr
tweet.reply.add replyTo tweet.reply.add replyTo
tweet.expandTextEntities(entities, tweet.text, textSlice, replyTo, hasQuote) tweet.expandTextEntities(entities, tweet.text, textSlice, replyTo, hasQuote or hasJobCard)
proc expandNoteTweetEntities*(tweet: Tweet; js: JsonNode) = proc expandNoteTweetEntities*(tweet: Tweet; js: JsonNode) =
let let
entities = ? js{"entity_set"} entities = ? js{"entity_set"}
text = js{"text"}.getStr text = js{"text"}.getStr.multiReplace(("<", unicodeOpen), (">", unicodeClose))
textSlice = 0..text.runeLen textSlice = 0..text.runeLen
tweet.expandTextEntities(entities, text, textSlice) tweet.expandTextEntities(entities, text, textSlice)
tweet.text = tweet.text.multiReplace((unicodeOpen, xmlOpen), (unicodeClose, xmlClose))

View File

@ -60,7 +60,7 @@ proc genQueryParam*(query: Query): string =
param &= "OR " param &= "OR "
if query.fromUser.len > 0 and query.kind in {posts, media}: if query.fromUser.len > 0 and query.kind in {posts, media}:
param &= "filter:self_threads OR-filter:replies " param &= "filter:self_threads OR -filter:replies "
if "nativeretweets" notin query.excludes: if "nativeretweets" notin query.excludes:
param &= "include:nativeretweets " param &= "include:nativeretweets "

View File

@ -52,6 +52,7 @@ proc initRedisPool*(cfg: Config) {.async.} =
await migrate("profileDates", "p:*") await migrate("profileDates", "p:*")
await migrate("profileStats", "p:*") await migrate("profileStats", "p:*")
await migrate("userType", "p:*") await migrate("userType", "p:*")
await migrate("verifiedType", "p:*")
pool.withAcquire(r): pool.withAcquire(r):
# optimize memory usage for user ID buckets # optimize memory usage for user ID buckets
@ -85,7 +86,7 @@ proc cache*(data: List) {.async.} =
await setEx(data.listKey, listCacheTime, compress(toFlatty(data))) await setEx(data.listKey, listCacheTime, compress(toFlatty(data)))
proc cache*(data: PhotoRail; name: string) {.async.} = proc cache*(data: PhotoRail; name: string) {.async.} =
await setEx("pr:" & toLower(name), baseCacheTime, compress(toFlatty(data))) await setEx("pr:" & toLower(name), baseCacheTime * 2, compress(toFlatty(data)))
proc cache*(data: User) {.async.} = proc cache*(data: User) {.async.} =
if data.username.len == 0: return if data.username.len == 0: return
@ -147,15 +148,15 @@ proc getCachedUsername*(userId: string): Future[string] {.async.} =
if result.len > 0 and user.id.len > 0: if result.len > 0 and user.id.len > 0:
await all(cacheUserId(result, user.id), cache(user)) await all(cacheUserId(result, user.id), cache(user))
proc getCachedTweet*(id: int64): Future[Tweet] {.async.} = # proc getCachedTweet*(id: int64): Future[Tweet] {.async.} =
if id == 0: return # if id == 0: return
let tweet = await get(id.tweetKey) # let tweet = await get(id.tweetKey)
if tweet != redisNil: # if tweet != redisNil:
tweet.deserialize(Tweet) # tweet.deserialize(Tweet)
else: # else:
result = await getGraphTweetResult($id) # result = await getGraphTweetResult($id)
if not result.isNil: # if not result.isNil:
await cache(result) # await cache(result)
proc getCachedPhotoRail*(name: string): Future[PhotoRail] {.async.} = proc getCachedPhotoRail*(name: string): Future[PhotoRail] {.async.} =
if name.len == 0: return if name.len == 0: return

View File

@ -1,10 +1,13 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import jester import jester
import router_utils import router_utils
import ".."/[tokens, types] import ".."/[auth, types]
proc createDebugRouter*(cfg: Config) = proc createDebugRouter*(cfg: Config) =
router debug: router debug:
get "/.tokens": get "/.health":
respJson getAccountPoolHealth()
get "/.accounts":
cond cfg.enableDebug cond cfg.enableDebug
respJson getPoolJson() respJson getAccountPoolDebug()

View File

@ -37,6 +37,7 @@ proc proxyMedia*(req: jester.Request; url: string): Future[HttpCode] {.async.} =
try: try:
let res = await client.get(url) let res = await client.get(url)
if res.status != "200 OK": if res.status != "200 OK":
echo "[media] Proxying failed, status: $1, url: $2" % [res.status, url]
return Http404 return Http404
let hashed = $hash(url) let hashed = $hash(url)
@ -65,6 +66,7 @@ proc proxyMedia*(req: jester.Request; url: string): Future[HttpCode] {.async.} =
await request.client.send(data) await request.client.send(data)
data.setLen 0 data.setLen 0
except HttpRequestError, ProtocolError, OSError: except HttpRequestError, ProtocolError, OSError:
echo "[media] Proxying exception, error: $1, url: $2" % [getCurrentExceptionMsg(), url]
result = Http404 result = Http404
finally: finally:
client.close() client.close()

View File

@ -15,7 +15,7 @@ proc redisKey*(page, name, cursor: string): string =
if cursor.len > 0: if cursor.len > 0:
result &= ":" & cursor result &= ":" & cursor
proc timelineRss*(req: Request; cfg: Config; query: Query; prefs: Prefs): Future[Rss] {.async.} = proc timelineRss*(req: Request; cfg: Config; query: Query; prefs: Prefs; tab, param: string): Future[Rss] {.async.} =
var profile: Profile var profile: Profile
let let
name = req.params.getOrDefault("name") name = req.params.getOrDefault("name")
@ -27,7 +27,7 @@ proc timelineRss*(req: Request; cfg: Config; query: Query; prefs: Prefs): Future
else: else:
var q = query var q = query
q.fromUser = names q.fromUser = names
profile.tweets = await getTweetSearch(q, after) profile.tweets = await getGraphTweetSearch(q, after)
# this is kinda dumb # this is kinda dumb
profile.user = User( profile.user = User(
username: name, username: name,
@ -39,7 +39,7 @@ proc timelineRss*(req: Request; cfg: Config; query: Query; prefs: Prefs): Future
return Rss(feed: profile.user.username, cursor: "suspended") return Rss(feed: profile.user.username, cursor: "suspended")
if profile.user.fullname.len > 0: if profile.user.fullname.len > 0:
let rss = renderTimelineRss(profile, cfg, prefs, multi=(names.len > 1)) let rss = renderTimelineRss(profile, cfg, prefs, tab, param, multi=(names.len > 1))
return Rss(feed: rss, cursor: profile.tweets.bottom) return Rss(feed: rss, cursor: profile.tweets.bottom)
template respRss*(rss, page) = template respRss*(rss, page) =
@ -76,7 +76,7 @@ proc createRssRouter*(cfg: Config) =
if rss.cursor.len > 0: if rss.cursor.len > 0:
respRss(rss, "Search") respRss(rss, "Search")
let tweets = await getTweetSearch(query, cursor) let tweets = await getGraphTweetSearch(query, cursor)
rss.cursor = tweets.bottom rss.cursor = tweets.bottom
rss.feed = renderSearchRss(tweets.content, query.text, genQueryUrl(query), cfg, cookiePrefs()) rss.feed = renderSearchRss(tweets.content, query.text, genQueryUrl(query), cfg, cookiePrefs())
@ -94,7 +94,7 @@ proc createRssRouter*(cfg: Config) =
if rss.cursor.len > 0: if rss.cursor.len > 0:
respRss(rss, "User") respRss(rss, "User")
rss = await timelineRss(request, cfg, Query(fromUser: @[name]), cookiePrefs()) rss = await timelineRss(request, cfg, Query(fromUser: @[name]), cookiePrefs(), "", "")
await cacheRss(key, rss) await cacheRss(key, rss)
respRss(rss, "User") respRss(rss, "User")
@ -110,11 +110,13 @@ proc createRssRouter*(cfg: Config) =
case tab case tab
of "with_replies": getReplyQuery(name) of "with_replies": getReplyQuery(name)
of "media": getMediaQuery(name) of "media": getMediaQuery(name)
# of "search": initQuery(params(request), name=name) of "search": initQuery(params(request), name=name)
else: Query(fromUser: @[name]) else: Query(fromUser: @[name])
let param = if tab != "search": ""
else: genQueryUrl(query)
let searchKey = if tab != "search": "" let searchKey = if tab != "search": ""
else: ":" & $hash(genQueryUrl(query)) else: ":" & $hash(param)
let key = redisKey(tab, name & searchKey, getCursor()) let key = redisKey(tab, name & searchKey, getCursor())
@ -122,7 +124,7 @@ proc createRssRouter*(cfg: Config) =
if rss.cursor.len > 0: if rss.cursor.len > 0:
respRss(rss, "User") respRss(rss, "User")
rss = await timelineRss(request, cfg, query, cookiePrefs()) rss = await timelineRss(request, cfg, query, cookiePrefs(), tab, param)
await cacheRss(key, rss) await cacheRss(key, rss)
respRss(rss, "User") respRss(rss, "User")

View File

@ -29,13 +29,13 @@ proc createSearchRouter*(cfg: Config) =
redirect("/" & q) redirect("/" & q)
var users: Result[User] var users: Result[User]
try: try:
users = await getUserSearch(query, getCursor()) users = await getGraphUserSearch(query, getCursor())
except InternalError: except InternalError:
users = Result[User](beginning: true, query: query) users = Result[User](beginning: true, query: query)
resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs, title) resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs, title)
of tweets: of tweets:
let let
tweets = await getTweetSearch(query, getCursor()) tweets = await getGraphTweetSearch(query, getCursor())
rss = "/search/rss?" & genQueryUrl(query) rss = "/search/rss?" & genQueryUrl(query)
resp renderMain(renderTweetSearch(tweets, prefs, getPath()), resp renderMain(renderTweetSearch(tweets, prefs, getPath()),
request, cfg, prefs, title, rss=rss) request, cfg, prefs, title, rss=rss)

View File

@ -53,10 +53,10 @@ proc fetchProfile*(after: string; query: Query; skipRail=false;
result = result =
case query.kind case query.kind
of posts: await getUserTimeline(userId, after) of posts: await getGraphUserTweets(userId, TimelineKind.tweets, after)
of replies: await getGraphUserTweets(userId, TimelineKind.replies, after) of replies: await getGraphUserTweets(userId, TimelineKind.replies, after)
of media: await getGraphUserTweets(userId, TimelineKind.media, after) of media: await getGraphUserTweets(userId, TimelineKind.media, after)
else: Profile(tweets: await getTweetSearch(query, after)) else: Profile(tweets: await getGraphTweetSearch(query, after))
result.user = await user result.user = await user
result.photoRail = await rail result.photoRail = await rail
@ -67,7 +67,7 @@ proc showTimeline*(request: Request; query: Query; cfg: Config; prefs: Prefs;
rss, after: string): Future[string] {.async.} = rss, after: string): Future[string] {.async.} =
if query.fromUser.len != 1: if query.fromUser.len != 1:
let let
timeline = await getTweetSearch(query, after) timeline = await getGraphTweetSearch(query, after)
html = renderTweetSearch(timeline, prefs, getPath()) html = renderTweetSearch(timeline, prefs, getPath())
return renderMain(html, request, cfg, prefs, "Multi", rss=rss) return renderMain(html, request, cfg, prefs, "Multi", rss=rss)
@ -122,7 +122,7 @@ proc createTimelineRouter*(cfg: Config) =
# used for the infinite scroll feature # used for the infinite scroll feature
if @"scroll".len > 0: if @"scroll".len > 0:
if query.fromUser.len != 1: if query.fromUser.len != 1:
var timeline = await getTweetSearch(query, after) var timeline = await getGraphTweetSearch(query, after)
if timeline.content.len == 0: resp Http404 if timeline.content.len == 0: resp Http404
timeline.beginning = true timeline.beginning = true
resp $renderTweetSearch(timeline, prefs, getPath()) resp $renderTweetSearch(timeline, prefs, getPath())

View File

@ -28,6 +28,8 @@ $more_replies_dots: #AD433B;
$error_red: #420A05; $error_red: #420A05;
$verified_blue: #1DA1F2; $verified_blue: #1DA1F2;
$verified_business: #FAC82B;
$verified_government: #C1B6A4;
$icon_text: $fg_color; $icon_text: $fg_color;
$tab: $fg_color; $tab: $fg_color;

View File

@ -39,6 +39,8 @@ body {
--error_red: #{$error_red}; --error_red: #{$error_red};
--verified_blue: #{$verified_blue}; --verified_blue: #{$verified_blue};
--verified_business: #{$verified_business};
--verified_government: #{$verified_government};
--icon_text: #{$icon_text}; --icon_text: #{$icon_text};
--tab: #{$fg_color}; --tab: #{$fg_color};
@ -141,17 +143,30 @@ ul {
.verified-icon { .verified-icon {
color: var(--icon_text); color: var(--icon_text);
background-color: var(--verified_blue);
border-radius: 50%; border-radius: 50%;
flex-shrink: 0; flex-shrink: 0;
margin: 2px 0 3px 3px; margin: 2px 0 3px 3px;
padding-top: 2px; padding-top: 3px;
height: 12px; height: 11px;
width: 14px; width: 14px;
font-size: 8px; font-size: 8px;
display: inline-block; display: inline-block;
text-align: center; text-align: center;
vertical-align: middle; vertical-align: middle;
&.blue {
background-color: var(--verified_blue);
}
&.business {
color: var(--bg_panel);
background-color: var(--verified_business);
}
&.government {
color: var(--bg_panel);
background-color: var(--verified_government);
}
} }
@media(max-width: 600px) { @media(max-width: 600px) {

View File

@ -70,8 +70,9 @@ nav {
.lp { .lp {
height: 14px; height: 14px;
margin-top: 2px; display: inline-block;
display: block; position: relative;
top: 2px;
fill: var(--fg_nav); fill: var(--fg_nav);
&:hover { &:hover {

View File

@ -115,7 +115,7 @@
} }
.profile-card-tabs-name { .profile-card-tabs-name {
@include breakable; flex-shrink: 100;
} }
.profile-card-avatar { .profile-card-avatar {

View File

@ -14,6 +14,8 @@
button { button {
margin: 0 2px 0 0; margin: 0 2px 0 0;
height: 23px; height: 23px;
display: flex;
align-items: center;
} }
.pref-input { .pref-input {

View File

@ -1,166 +0,0 @@
# SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, httpclient, times, sequtils, json, random
import strutils, tables
import types, consts
const
maxConcurrentReqs = 5 # max requests at a time per token, to avoid race conditions
maxLastUse = 1.hours # if a token is unused for 60 minutes, it expires
maxAge = 2.hours + 55.minutes # tokens expire after 3 hours
failDelay = initDuration(minutes=30)
var
tokenPool: seq[Token]
lastFailed: Time
enableLogging = false
let headers = newHttpHeaders({"authorization": auth})
template log(str) =
if enableLogging: echo "[tokens] ", str
proc getPoolJson*(): JsonNode =
var
list = newJObject()
totalReqs = 0
totalPending = 0
reqsPerApi: Table[string, int]
for token in tokenPool:
totalPending.inc(token.pending)
list[token.tok] = %*{
"apis": newJObject(),
"pending": token.pending,
"init": $token.init,
"lastUse": $token.lastUse
}
for api in token.apis.keys:
list[token.tok]["apis"][$api] = %token.apis[api]
let
maxReqs =
case api
of Api.search: 100000
of Api.photoRail: 180
of Api.timeline: 187
of Api.userTweets, Api.userTimeline: 300
of Api.userTweetsAndReplies, Api.userRestId,
Api.userScreenName, Api.tweetDetail, Api.tweetResult,
Api.list, Api.listTweets, Api.listMembers, Api.listBySlug, Api.userMedia: 500
of Api.userSearch: 900
reqs = maxReqs - token.apis[api].remaining
reqsPerApi[$api] = reqsPerApi.getOrDefault($api, 0) + reqs
totalReqs.inc(reqs)
return %*{
"amount": tokenPool.len,
"requests": totalReqs,
"pending": totalPending,
"apis": reqsPerApi,
"tokens": list
}
proc rateLimitError*(): ref RateLimitError =
newException(RateLimitError, "rate limited")
proc fetchToken(): Future[Token] {.async.} =
if getTime() - lastFailed < failDelay:
raise rateLimitError()
let client = newAsyncHttpClient(headers=headers)
try:
let
resp = await client.postContent(activate)
tokNode = parseJson(resp)["guest_token"]
tok = tokNode.getStr($(tokNode.getInt))
time = getTime()
return Token(tok: tok, init: time, lastUse: time)
except Exception as e:
echo "[tokens] fetching token failed: ", e.msg
if "Try again" notin e.msg:
echo "[tokens] fetching tokens paused, resuming in 30 minutes"
lastFailed = getTime()
finally:
client.close()
proc expired(token: Token): bool =
let time = getTime()
token.init < time - maxAge or token.lastUse < time - maxLastUse
proc isLimited(token: Token; api: Api): bool =
if token.isNil or token.expired:
return true
if api in token.apis:
let limit = token.apis[api]
return (limit.remaining <= 10 and limit.reset > epochTime().int)
else:
return false
proc isReady(token: Token; api: Api): bool =
not (token.isNil or token.pending > maxConcurrentReqs or token.isLimited(api))
proc release*(token: Token; used=false; invalid=false) =
if token.isNil: return
if invalid or token.expired:
if invalid: log "discarding invalid token"
elif token.expired: log "discarding expired token"
let idx = tokenPool.find(token)
if idx > -1: tokenPool.delete(idx)
elif used:
dec token.pending
token.lastUse = getTime()
proc getToken*(api: Api): Future[Token] {.async.} =
for i in 0 ..< tokenPool.len:
if result.isReady(api): break
release(result)
result = tokenPool.sample()
if not result.isReady(api):
release(result)
result = await fetchToken()
log "added new token to pool"
tokenPool.add result
if not result.isNil:
inc result.pending
else:
raise rateLimitError()
proc setRateLimit*(token: Token; api: Api; remaining, reset: int) =
# avoid undefined behavior in race conditions
if api in token.apis:
let limit = token.apis[api]
if limit.reset >= reset and limit.remaining < remaining:
return
token.apis[api] = RateLimit(remaining: remaining, reset: reset)
proc poolTokens*(amount: int) {.async.} =
var futs: seq[Future[Token]]
for i in 0 ..< amount:
futs.add fetchToken()
for token in futs:
var newToken: Token
try: newToken = await token
except: discard
if not newToken.isNil:
log "added new token to pool"
tokenPool.add newToken
proc initTokenPool*(cfg: Config) {.async.} =
enableLogging = cfg.enableDebug
while true:
if tokenPool.countIt(not it.isLimited(Api.userTimeline)) < cfg.minTokens:
await poolTokens(min(4, cfg.minTokens - tokenPool.len))
await sleepAsync(2000)

View File

@ -10,18 +10,13 @@ type
BadClientError* = object of CatchableError BadClientError* = object of CatchableError
TimelineKind* {.pure.} = enum TimelineKind* {.pure.} = enum
tweets tweets, replies, media
replies
media
Api* {.pure.} = enum Api* {.pure.} = enum
tweetDetail tweetDetail
tweetResult tweetResult
timeline
userTimeline
photoRail photoRail
search search
userSearch
list list
listBySlug listBySlug
listMembers listMembers
@ -35,11 +30,13 @@ type
RateLimit* = object RateLimit* = object
remaining*: int remaining*: int
reset*: int reset*: int
limited*: bool
limitedAt*: int
Token* = ref object GuestAccount* = ref object
tok*: string id*: int64
init*: Time oauthToken*: string
lastUse*: Time oauthSecret*: string
pending*: int pending*: int
apis*: Table[Api, RateLimit] apis*: Table[Api, RateLimit]
@ -54,7 +51,7 @@ type
userNotFound = 50 userNotFound = 50
suspended = 63 suspended = 63
rateLimited = 88 rateLimited = 88
invalidToken = 89 expiredToken = 89
listIdOrSlug = 112 listIdOrSlug = 112
tweetNotFound = 144 tweetNotFound = 144
tweetNotAuthorized = 179 tweetNotAuthorized = 179
@ -64,6 +61,12 @@ type
tweetUnavailable = 421 tweetUnavailable = 421
tweetCensored = 422 tweetCensored = 422
VerifiedType* = enum
none = "None"
blue = "Blue"
business = "Business"
government = "Government"
User* = object User* = object
id*: string id*: string
username*: string username*: string
@ -79,7 +82,7 @@ type
tweets*: int tweets*: int
likes*: int likes*: int
media*: int media*: int
verified*: bool verifiedType*: VerifiedType
protected*: bool protected*: bool
suspended*: bool suspended*: bool
joinDate*: DateTime joinDate*: DateTime
@ -163,6 +166,7 @@ type
imageDirectMessage = "image_direct_message" imageDirectMessage = "image_direct_message"
audiospace = "audiospace" audiospace = "audiospace"
newsletterPublication = "newsletter_publication" newsletterPublication = "newsletter_publication"
jobDetails = "job_details"
hidden hidden
unknown unknown

View File

@ -16,7 +16,8 @@ const
"twimg.com", "twimg.com",
"abs.twimg.com", "abs.twimg.com",
"pbs.twimg.com", "pbs.twimg.com",
"video.twimg.com" "video.twimg.com",
"x.com"
] ]
proc setHmacKey*(key: string) = proc setHmacKey*(key: string) =
@ -30,7 +31,9 @@ proc getHmac*(data: string): string =
proc getVidUrl*(link: string): string = proc getVidUrl*(link: string): string =
if link.len == 0: return if link.len == 0: return
let sig = getHmac(link) let
link = link.replace("cmaf", "fmp4")
sig = getHmac(link)
if base64Media: if base64Media:
&"/video/enc/{sig}/{encode(link, safe=true)}" &"/video/enc/{sig}/{encode(link, safe=true)}"
else: else:
@ -57,4 +60,4 @@ proc isTwitterUrl*(uri: Uri): bool =
uri.hostname in twitterDomains uri.hostname in twitterDomains
proc isTwitterUrl*(url: string): bool = proc isTwitterUrl*(url: string): bool =
parseUri(url).hostname in twitterDomains isTwitterUrl(parseUri(url))

View File

@ -52,7 +52,7 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
let opensearchUrl = getUrlPrefix(cfg) & "/opensearch" let opensearchUrl = getUrlPrefix(cfg) & "/opensearch"
buildHtml(head): buildHtml(head):
link(rel="stylesheet", type="text/css", href="/css/style.css?v=18") link(rel="stylesheet", type="text/css", href="/css/style.css?v=19")
link(rel="stylesheet", type="text/css", href="/css/fontello.css?v=2") link(rel="stylesheet", type="text/css", href="/css/fontello.css?v=2")
if theme.len > 0: if theme.len > 0:

View File

@ -23,6 +23,13 @@ proc icon*(icon: string; text=""; title=""; class=""; href=""): VNode =
if text.len > 0: if text.len > 0:
text " " & text text " " & text
template verifiedIcon*(user: User): untyped {.dirty.} =
if user.verifiedType != VerifiedType.none:
let lower = ($user.verifiedType).toLowerAscii()
icon "ok", class=(&"verified-icon {lower}"), title=(&"Verified {lower} account")
else:
text ""
proc linkUser*(user: User, class=""): VNode = proc linkUser*(user: User, class=""): VNode =
let let
isName = "username" notin class isName = "username" notin class
@ -32,11 +39,11 @@ proc linkUser*(user: User, class=""): VNode =
buildHtml(a(href=href, class=class, title=nameText)): buildHtml(a(href=href, class=class, title=nameText)):
text nameText text nameText
if isName and user.verified: if isName:
icon "ok", class="verified-icon", title="Verified account" verifiedIcon(user)
if isName and user.protected: if user.protected:
text " " text " "
icon "lock", title="Protected account" icon "lock", title="Protected account"
proc linkText*(text: string; class=""): VNode = proc linkText*(text: string; class=""): VNode =
let url = if "http" notin text: https & text else: text let url = if "http" notin text: https & text else: text

View File

@ -132,8 +132,20 @@ Twitter feed for: ${desc}. Generated by ${cfg.hostname}
#end for #end for
#end proc #end proc
# #
#proc renderTimelineRss*(profile: Profile; cfg: Config; prefs: Prefs; multi=false): string = #proc renderTimelineRss*(profile: Profile; cfg: Config; prefs: Prefs; tab, param: string; multi=false): string =
#let urlPrefix = getUrlPrefix(cfg) #let urlPrefix = getUrlPrefix(cfg)
#var atomLink = &"{urlPrefix}/{profile.user.username}"
#var link = &"{urlPrefix}/{profile.user.username}"
#if tab != "":
# atomLink &= "/" & tab
# link &= "/" & tab
#end if
#atomLink &= "/rss"
#if param != "":
# let escParam = xmltree.escape(param)
# atomLink &= "?" & escParam
# link &= "?" & escParam
#end if
#result = "" #result = ""
#let handle = (if multi: "" else: "@") & profile.user.username #let handle = (if multi: "" else: "@") & profile.user.username
#var title = profile.user.fullname #var title = profile.user.fullname
@ -143,9 +155,9 @@ Twitter feed for: ${desc}. Generated by ${cfg.hostname}
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:atom="http://www.w3.org/2005/Atom" xmlns:dc="http://purl.org/dc/elements/1.1/" version="2.0"> <rss xmlns:atom="http://www.w3.org/2005/Atom" xmlns:dc="http://purl.org/dc/elements/1.1/" version="2.0">
<channel> <channel>
<atom:link href="${urlPrefix}/${profile.user.username}/rss" rel="self" type="application/rss+xml" /> <atom:link href="${atomLink}" rel="self" type="application/rss+xml" />
<title>${title}</title> <title>${title}</title>
<link>${urlPrefix}/${profile.user.username}</link> <link>${link}</link>
<description>${getDescription(handle, cfg)}</description> <description>${getDescription(handle, cfg)}</description>
<language>en-us</language> <language>en-us</language>
<ttl>40</ttl> <ttl>40</ttl>
@ -181,15 +193,16 @@ ${renderRssTweets(tweets, cfg, prefs)}
#end proc #end proc
# #
#proc renderSearchRss*(tweets: seq[Tweets]; name, param: string; cfg: Config; prefs: Prefs): string = #proc renderSearchRss*(tweets: seq[Tweets]; name, param: string; cfg: Config; prefs: Prefs): string =
#let link = &"{getUrlPrefix(cfg)}/search" #let urlPrefix = getUrlPrefix(cfg)
#let escName = xmltree.escape(name) #let escName = xmltree.escape(name)
#let escParam = xmltree.escape(param)
#result = "" #result = ""
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:atom="http://www.w3.org/2005/Atom" xmlns:dc="http://purl.org/dc/elements/1.1/" version="2.0"> <rss xmlns:atom="http://www.w3.org/2005/Atom" xmlns:dc="http://purl.org/dc/elements/1.1/" version="2.0">
<channel> <channel>
<atom:link href="${link}" rel="self" type="application/rss+xml" /> <atom:link href="${urlPrefix}/search/rss?${escParam}" rel="self" type="application/rss+xml" />
<title>Search results for "${escName}"</title> <title>Search results for "${escName}"</title>
<link>${link}</link> <link>${urlPrefix}/search?${escParam}</link>
<description>${getDescription(&"Search \"{escName}\"", cfg)}</description> <description>${getDescription(&"Search \"{escName}\"", cfg)}</description>
<language>en-us</language> <language>en-us</language>
<ttl>40</ttl> <ttl>40</ttl>

View File

@ -200,8 +200,7 @@ proc renderAttribution(user: User; prefs: Prefs): VNode =
buildHtml(a(class="attribution", href=("/" & user.username))): buildHtml(a(class="attribution", href=("/" & user.username))):
renderMiniAvatar(user, prefs) renderMiniAvatar(user, prefs)
strong: text user.fullname strong: text user.fullname
if user.verified: verifiedIcon(user)
icon "ok", class="verified-icon", title="Verified account"
proc renderMediaTags(tags: seq[User]): VNode = proc renderMediaTags(tags: seq[User]): VNode =
buildHtml(tdiv(class="media-tag-block")): buildHtml(tdiv(class="media-tag-block")):

View File

@ -13,11 +13,6 @@ card = [
'Basic OBS Studio plugin, written in nim, supporting C++ (C fine too) - obsplugin.nim', 'Basic OBS Studio plugin, written in nim, supporting C++ (C fine too) - obsplugin.nim',
'gist.github.com', True], 'gist.github.com', True],
['FluentAI/status/1116417904831029248',
'Amazons Alexa isnt just AI — thousands of humans are listening',
'One of the only ways to improve Alexa is to have human beings check it for errors',
'theverge.com', True],
['nim_lang/status/1082989146040340480', ['nim_lang/status/1082989146040340480',
'Nim in 2018: A short recap', 'Nim in 2018: A short recap',
'There were several big news in the Nim world in 2018 two new major releases, partnership with Status, and much more. But let us go chronologically.', 'There were several big news in the Nim world in 2018 two new major releases, partnership with Status, and much more. But let us go chronologically.',
@ -25,6 +20,11 @@ card = [
] ]
no_thumb = [ no_thumb = [
['FluentAI/status/1116417904831029248',
'LinkedIn',
'This link will take you to a page thats not on LinkedIn',
'lnkd.in'],
['Thom_Wolf/status/1122466524860702729', ['Thom_Wolf/status/1122466524860702729',
'facebookresearch/fairseq', 'facebookresearch/fairseq',
'Facebook AI Research Sequence-to-Sequence Toolkit written in Python. - GitHub - facebookresearch/fairseq: Facebook AI Research Sequence-to-Sequence Toolkit written in Python.', 'Facebook AI Research Sequence-to-Sequence Toolkit written in Python. - GitHub - facebookresearch/fairseq: Facebook AI Research Sequence-to-Sequence Toolkit written in Python.',

View File

@ -9,7 +9,7 @@ text = [
What are we doing wrong? reuters.com/article/us-norwa"""], What are we doing wrong? reuters.com/article/us-norwa"""],
['nim_lang/status/1491461266849808397#m', ['nim_lang/status/1491461266849808397#m',
'Nim language', '@nim_lang', 'Nim', '@nim_lang',
"""What's better than Nim 1.6.0? """What's better than Nim 1.6.0?
Nim 1.6.2 :) Nim 1.6.2 :)

View File

@ -6,7 +6,7 @@ normal = [['jack'], ['elonmusk']]
after = [['jack', '1681686036294803456'], after = [['jack', '1681686036294803456'],
['elonmusk', '1681686036294803456']] ['elonmusk', '1681686036294803456']]
no_more = [['mobile_test_8?cursor=1000']] no_more = [['mobile_test_8?cursor=DAABCgABF4YVAqN___kKAAICNn_4msIQAAgAAwAAAAIAAA']]
empty = [['emptyuser'], ['mobile_test_10']] empty = [['emptyuser'], ['mobile_test_10']]

View File

@ -1,4 +1,4 @@
from base import BaseTestCase, Tweet, get_timeline_tweet from base import BaseTestCase, Tweet, Conversation, get_timeline_tweet
from parameterized import parameterized from parameterized import parameterized
# image = tweet + 'div.attachments.media-body > div > div > a > div > img' # image = tweet + 'div.attachments.media-body > div > div > a > div > img'
@ -35,7 +35,16 @@ multiline = [
CALM CALM
AND AND
CLICHÉ CLICHÉ
ON"""] ON"""],
[1718660434457239868, 'WebDesignMuseum',
"""
Happy 32nd Birthday HTML tags!
On October 29, 1991, the internet pioneer, Tim Berners-Lee, published a document entitled HTML Tags.
The document contained a description of the first 18 HTML tags: <title>, <nextid>, <a>, <isindex>, <plaintext>, <listing>, <p>, <h1><h6>, <address>, <hp1>, <hp2>, <dl>, <dt>, <dd>, <ul>, <li>,<menu> and <dir>. The design of the first version of HTML language was influenced by the SGML universal markup language.
#WebDesignHistory"""]
] ]
link = [ link = [
@ -74,22 +83,18 @@ retweet = [
[3, 'mobile_test_8', 'mobile test 8', 'jack', '@jack', 'twttr'] [3, 'mobile_test_8', 'mobile test 8', 'jack', '@jack', 'twttr']
] ]
# reply = [
# ['mobile_test/with_replies', 15]
# ]
class TweetTest(BaseTestCase): class TweetTest(BaseTestCase):
# @parameterized.expand(timeline) @parameterized.expand(timeline)
# def test_timeline(self, index, fullname, username, date, tid, text): def test_timeline(self, index, fullname, username, date, tid, text):
# self.open_nitter(username) self.open_nitter(username)
# tweet = get_timeline_tweet(index) tweet = get_timeline_tweet(index)
# self.assert_exact_text(fullname, tweet.fullname) self.assert_exact_text(fullname, tweet.fullname)
# self.assert_exact_text('@' + username, tweet.username) self.assert_exact_text('@' + username, tweet.username)
# self.assert_exact_text(date, tweet.date) self.assert_exact_text(date, tweet.date)
# self.assert_text(text, tweet.text) self.assert_text(text, tweet.text)
# permalink = self.find_element(tweet.date + ' a') permalink = self.find_element(tweet.date + ' a')
# self.assertIn(tid, permalink.get_attribute('href')) self.assertIn(tid, permalink.get_attribute('href'))
@parameterized.expand(status) @parameterized.expand(status)
def test_status(self, tid, fullname, username, date, text): def test_status(self, tid, fullname, username, date, text):
@ -103,18 +108,18 @@ class TweetTest(BaseTestCase):
@parameterized.expand(multiline) @parameterized.expand(multiline)
def test_multiline_formatting(self, tid, username, text): def test_multiline_formatting(self, tid, username, text):
self.open_nitter(f'{username}/status/{tid}') self.open_nitter(f'{username}/status/{tid}')
self.assert_text(text.strip('\n'), '.main-tweet') self.assert_text(text.strip('\n'), Conversation.main)
@parameterized.expand(emoji) @parameterized.expand(emoji)
def test_emoji(self, tweet, text): def test_emoji(self, tweet, text):
self.open_nitter(tweet) self.open_nitter(tweet)
self.assert_text(text, '.main-tweet') self.assert_text(text, Conversation.main)
@parameterized.expand(link) @parameterized.expand(link)
def test_link(self, tweet, links): def test_link(self, tweet, links):
self.open_nitter(tweet) self.open_nitter(tweet)
for link in links: for link in links:
self.assert_text(link, '.main-tweet') self.assert_text(link, Conversation.main)
@parameterized.expand(username) @parameterized.expand(username)
def test_username(self, tweet, usernames): def test_username(self, tweet, usernames):
@ -123,22 +128,22 @@ class TweetTest(BaseTestCase):
link = self.find_link_text(f'@{un}') link = self.find_link_text(f'@{un}')
self.assertIn(f'/{un}', link.get_property('href')) self.assertIn(f'/{un}', link.get_property('href'))
# @parameterized.expand(retweet) @parameterized.expand(retweet)
# def test_retweet(self, index, url, retweet_by, fullname, username, text): def test_retweet(self, index, url, retweet_by, fullname, username, text):
# self.open_nitter(url) self.open_nitter(url)
# tweet = get_timeline_tweet(index) tweet = get_timeline_tweet(index)
# self.assert_text(f'{retweet_by} retweeted', tweet.retweet) self.assert_text(f'{retweet_by} retweeted', tweet.retweet)
# self.assert_text(text, tweet.text) self.assert_text(text, tweet.text)
# self.assert_exact_text(fullname, tweet.fullname) self.assert_exact_text(fullname, tweet.fullname)
# self.assert_exact_text(username, tweet.username) self.assert_exact_text(username, tweet.username)
@parameterized.expand(invalid) @parameterized.expand(invalid)
def test_invalid_id(self, tweet): def test_invalid_id(self, tweet):
self.open_nitter(tweet) self.open_nitter(tweet)
self.assert_text('Tweet not found', '.error-panel') self.assert_text('Tweet not found', '.error-panel')
# @parameterized.expand(reply) #@parameterized.expand(reply)
# def test_thread(self, tweet, num): #def test_thread(self, tweet, num):
# self.open_nitter(tweet) #self.open_nitter(tweet)
# thread = self.find_element(f'.timeline > div:nth-child({num})') #thread = self.find_element(f'.timeline > div:nth-child({num})')
# self.assertIn(thread.get_attribute('class'), 'thread-line') #self.assertIn(thread.get_attribute('class'), 'thread-line')

View File

@ -14,7 +14,7 @@ poll = [
image = [ image = [
['mobile_test/status/519364660823207936', 'BzUnaDFCUAAmrjs'], ['mobile_test/status/519364660823207936', 'BzUnaDFCUAAmrjs'],
['mobile_test_2/status/324619691039543297', 'BIFH45vCUAAQecj'] #['mobile_test_2/status/324619691039543297', 'BIFH45vCUAAQecj']
] ]
gif = [ gif = [