Optimize profile fetching and caching
This commit is contained in:
parent
d38b63f5a9
commit
ff61d97a1d
31
src/api.nim
31
src/api.nim
|
@ -4,11 +4,22 @@ import packedjson
|
|||
import types, query, formatters, consts, apiutils, parser
|
||||
import experimental/parser as newParser
|
||||
|
||||
proc getGraphUser*(id: string): Future[User] {.async.} =
|
||||
proc getGraphUser*(username: string): Future[User] {.async.} =
|
||||
if username.len == 0: return
|
||||
let
|
||||
variables = """{
|
||||
"screen_name": "$1",
|
||||
"withSafetyModeUserFields": false,
|
||||
"withSuperFollowsUserFields": false
|
||||
}""" % [username]
|
||||
js = await fetchRaw(graphUser ? {"variables": variables}, Api.userScreenName)
|
||||
result = parseGraphUser(js)
|
||||
|
||||
proc getGraphUserById*(id: string): Future[User] {.async.} =
|
||||
if id.len == 0 or id.any(c => not c.isDigit): return
|
||||
let
|
||||
variables = %*{"userId": id, "withSuperFollowsUserFields": true}
|
||||
js = await fetchRaw(graphUser ? {"variables": $variables}, Api.userRestId)
|
||||
variables = """{"userId": "$1", "withSuperFollowsUserFields": true}""" % [id]
|
||||
js = await fetchRaw(graphUserById ? {"variables": variables}, Api.userRestId)
|
||||
result = parseGraphUser(js)
|
||||
|
||||
proc getGraphListBySlug*(name, list: string): Future[List] {.async.} =
|
||||
|
@ -47,20 +58,6 @@ proc getListTimeline*(id: string; after=""): Future[Timeline] {.async.} =
|
|||
url = listTimeline ? ps
|
||||
result = parseTimeline(await fetch(url, Api.timeline), after)
|
||||
|
||||
proc getUser*(username: string): Future[User] {.async.} =
|
||||
if username.len == 0: return
|
||||
let
|
||||
ps = genParams({"screen_name": username})
|
||||
json = await fetchRaw(userShow ? ps, Api.userShow)
|
||||
result = parseUser(json, username)
|
||||
|
||||
proc getUserById*(userId: string): Future[User] {.async.} =
|
||||
if userId.len == 0: return
|
||||
let
|
||||
ps = genParams({"user_id": userId})
|
||||
json = await fetchRaw(userShow ? ps, Api.userShow)
|
||||
result = parseUser(json)
|
||||
|
||||
proc getTimeline*(id: string; after=""; replies=false): Future[Timeline] {.async.} =
|
||||
if id.len == 0: return
|
||||
let
|
||||
|
|
|
@ -19,7 +19,8 @@ const
|
|||
tweet* = timelineApi / "conversation"
|
||||
|
||||
graphql = api / "graphql"
|
||||
graphUser* = graphql / "I5nvpI91ljifos1Y3Lltyg/UserByRestId"
|
||||
graphUser* = graphql / "7mjxD3-C6BxitPMVQ6w0-Q/UserByScreenName"
|
||||
graphUserById* = graphql / "I5nvpI91ljifos1Y3Lltyg/UserByRestId"
|
||||
graphList* = graphql / "JADTh6cjebfgetzvF3tQvQ/List"
|
||||
graphListBySlug* = graphql / "ErWsz9cObLel1BF-HjuBlA/ListBySlug"
|
||||
graphListMembers* = graphql / "Ke6urWMeCV2UlKXGRy4sow/ListMembers"
|
||||
|
|
|
@ -1,9 +1,14 @@
|
|||
import options
|
||||
import jsony
|
||||
import user, ../types/[graphuser, graphlistmembers]
|
||||
from ../../types import User, Result, Query, QueryKind
|
||||
|
||||
proc parseGraphUser*(json: string): User =
|
||||
let raw = json.fromJson(GraphUser)
|
||||
|
||||
if raw.data.user.result.reason.get("") == "Suspended":
|
||||
return User(suspended: true)
|
||||
|
||||
result = toUser raw.data.user.result.legacy
|
||||
result.id = raw.data.user.result.restId
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import options
|
||||
import user
|
||||
|
||||
type
|
||||
|
@ -10,3 +11,4 @@ type
|
|||
UserResult = object
|
||||
legacy*: RawUser
|
||||
restId*: string
|
||||
reason*: Option[string]
|
||||
|
|
|
@ -118,11 +118,11 @@ proc getUserId*(username: string): Future[string] {.async.} =
|
|||
pool.withAcquire(r):
|
||||
result = await r.hGet(name.uidKey, name)
|
||||
if result == redisNil:
|
||||
let user = await getUser(username)
|
||||
let user = await getGraphUser(username)
|
||||
if user.suspended:
|
||||
return "suspended"
|
||||
else:
|
||||
await cacheUserId(name, user.id)
|
||||
await all(cacheUserId(name, user.id), cache(user))
|
||||
return user.id
|
||||
|
||||
proc getCachedUser*(username: string; fetch=true): Future[User] {.async.} =
|
||||
|
@ -130,8 +130,7 @@ proc getCachedUser*(username: string; fetch=true): Future[User] {.async.} =
|
|||
if prof != redisNil:
|
||||
prof.deserialize(User)
|
||||
elif fetch:
|
||||
let userId = await getUserId(username)
|
||||
result = await getGraphUser(userId)
|
||||
result = await getGraphUser(username)
|
||||
await cache(result)
|
||||
|
||||
proc getCachedUsername*(userId: string): Future[string] {.async.} =
|
||||
|
@ -142,9 +141,11 @@ proc getCachedUsername*(userId: string): Future[string] {.async.} =
|
|||
if username != redisNil:
|
||||
result = username
|
||||
else:
|
||||
let user = await getUserById(userId)
|
||||
let user = await getGraphUserById(userId)
|
||||
result = user.username
|
||||
await setEx(key, baseCacheTime, result)
|
||||
if result.len > 0 and user.id.len > 0:
|
||||
await all(cacheUserId(result, user.id), cache(user))
|
||||
|
||||
proc getCachedTweet*(id: int64): Future[Tweet] {.async.} =
|
||||
if id == 0: return
|
||||
|
|
|
@ -41,7 +41,7 @@ proc getPoolJson*(): JsonNode =
|
|||
let
|
||||
maxReqs =
|
||||
case api
|
||||
of Api.listMembers, Api.listBySlug, Api.list, Api.userRestId: 500
|
||||
of Api.listMembers, Api.listBySlug, Api.list, Api.userRestId, Api.userScreenName: 500
|
||||
of Api.timeline: 187
|
||||
else: 180
|
||||
reqs = maxReqs - token.apis[api].remaining
|
||||
|
|
|
@ -17,6 +17,7 @@ type
|
|||
listBySlug
|
||||
listMembers
|
||||
userRestId
|
||||
userScreenName
|
||||
status
|
||||
|
||||
RateLimit* = object
|
||||
|
|
Loading…
Reference in New Issue