2020-06-05 14:27:48 +00:00
|
|
|
import asyncdispatch, times, strutils, tables
|
2020-06-07 06:50:40 +00:00
|
|
|
import redis, redpool, frosty, snappy
|
2020-06-01 00:16:24 +00:00
|
|
|
|
|
|
|
import types, api
|
|
|
|
|
|
|
|
const redisNil = "\0\0"
|
|
|
|
|
|
|
|
var
|
|
|
|
pool: RedisPool
|
|
|
|
baseCacheTime = 60 * 60
|
|
|
|
rssCacheTime: int
|
|
|
|
listCacheTime*: int
|
|
|
|
|
|
|
|
proc setCacheTimes*(cfg: Config) =
|
|
|
|
rssCacheTime = cfg.rssCacheTime * 60
|
|
|
|
listCacheTime = cfg.listCacheTime * 60
|
|
|
|
|
2020-06-07 06:50:40 +00:00
|
|
|
proc migrate*(key, match: string) {.async.} =
|
|
|
|
pool.withAcquire(r):
|
|
|
|
let hasKey = await r.get(key)
|
|
|
|
if hasKey == redisNil:
|
|
|
|
let list = await r.scan(newCursor(0), match, 100000)
|
|
|
|
r.startPipelining()
|
|
|
|
for item in list:
|
|
|
|
if item != "p:":
|
|
|
|
discard await r.del(item)
|
|
|
|
await r.setk(key, "true")
|
|
|
|
discard await r.flushPipeline()
|
|
|
|
|
2020-06-06 07:27:25 +00:00
|
|
|
proc initRedisPool*(cfg: Config) {.async.} =
|
2020-06-01 11:36:20 +00:00
|
|
|
try:
|
2020-06-06 07:27:25 +00:00
|
|
|
pool = await newRedisPool(cfg.redisConns, maxConns=cfg.redisMaxConns,
|
|
|
|
host=cfg.redisHost, port=cfg.redisPort)
|
|
|
|
|
2020-06-07 06:50:40 +00:00
|
|
|
await migrate("snappyRss", "rss:*")
|
|
|
|
await migrate("frosty", "*")
|
|
|
|
|
2020-06-01 11:36:20 +00:00
|
|
|
except OSError:
|
2020-06-07 07:18:40 +00:00
|
|
|
stdout.write "Failed to connect to Redis.\n"
|
|
|
|
stdout.flushFile
|
2020-06-01 11:48:28 +00:00
|
|
|
quit(1)
|
2020-06-01 00:16:24 +00:00
|
|
|
|
|
|
|
template toKey(p: Profile): string = "p:" & toLower(p.username)
|
|
|
|
template toKey(l: List): string = toLower("l:" & l.username & '/' & l.name)
|
|
|
|
|
|
|
|
proc get(query: string): Future[string] {.async.} =
|
|
|
|
pool.withAcquire(r):
|
|
|
|
result = await r.get(query)
|
|
|
|
|
2020-06-02 20:36:02 +00:00
|
|
|
proc setex(key: string; time: int; data: string) {.async.} =
|
2020-06-01 00:16:24 +00:00
|
|
|
pool.withAcquire(r):
|
2020-06-02 20:36:02 +00:00
|
|
|
discard await r.setex(key, time, data)
|
2020-06-01 00:16:24 +00:00
|
|
|
|
2020-06-02 20:36:02 +00:00
|
|
|
proc cache*(data: List) {.async.} =
|
2020-06-07 06:50:40 +00:00
|
|
|
await setex(data.toKey, listCacheTime, compress(freeze(data)))
|
2020-06-01 00:16:24 +00:00
|
|
|
|
|
|
|
proc cache*(data: PhotoRail; id: string) {.async.} =
|
2020-06-07 06:50:40 +00:00
|
|
|
await setex("pr:" & id, baseCacheTime, compress(freeze(data)))
|
2020-06-01 00:16:24 +00:00
|
|
|
|
2020-06-02 20:36:02 +00:00
|
|
|
proc cache*(data: Profile) {.async.} =
|
2020-06-03 05:49:32 +00:00
|
|
|
if data.username.len == 0: return
|
2020-06-01 00:16:24 +00:00
|
|
|
pool.withAcquire(r):
|
|
|
|
r.startPipelining()
|
2020-06-07 06:50:40 +00:00
|
|
|
discard await r.setex(data.toKey, baseCacheTime, compress(freeze(data)))
|
2020-06-01 00:16:24 +00:00
|
|
|
discard await r.hset("p:", toLower(data.username), data.id)
|
|
|
|
discard await r.flushPipeline()
|
|
|
|
|
2020-06-02 22:03:41 +00:00
|
|
|
proc cacheProfileId*(username, id: string) {.async.} =
|
|
|
|
if username.len == 0 or id.len == 0: return
|
|
|
|
pool.withAcquire(r):
|
|
|
|
discard await r.hset("p:", toLower(username), id)
|
|
|
|
|
2020-06-06 07:27:25 +00:00
|
|
|
proc cacheRss*(query: string; rss: Rss) {.async.} =
|
2020-06-01 00:16:24 +00:00
|
|
|
let key = "rss:" & query
|
|
|
|
pool.withAcquire(r):
|
|
|
|
r.startPipelining()
|
2020-06-06 07:27:25 +00:00
|
|
|
discard await r.hset(key, "rss", rss.feed)
|
|
|
|
discard await r.hset(key, "min", rss.cursor)
|
2020-06-01 00:16:24 +00:00
|
|
|
discard await r.expire(key, rssCacheTime)
|
|
|
|
discard await r.flushPipeline()
|
|
|
|
|
|
|
|
proc getProfileId*(username: string): Future[string] {.async.} =
|
|
|
|
pool.withAcquire(r):
|
|
|
|
result = await r.hget("p:", toLower(username))
|
|
|
|
if result == redisNil:
|
|
|
|
result.setLen(0)
|
|
|
|
|
2020-06-09 16:19:20 +00:00
|
|
|
proc getCachedProfile*(username: string; fetch=true;
|
|
|
|
cache=false): Future[Profile] {.async.} =
|
2020-06-01 00:16:24 +00:00
|
|
|
let prof = await get("p:" & toLower(username))
|
|
|
|
if prof != redisNil:
|
2020-06-07 06:50:40 +00:00
|
|
|
uncompress(prof).thaw(result)
|
2020-06-02 22:03:41 +00:00
|
|
|
elif fetch:
|
2020-06-01 07:46:17 +00:00
|
|
|
result = await getProfile(username)
|
2020-06-09 16:19:20 +00:00
|
|
|
if cache: await cache(result)
|
2020-06-01 00:16:24 +00:00
|
|
|
|
|
|
|
proc getCachedPhotoRail*(id: string): Future[PhotoRail] {.async.} =
|
|
|
|
if id.len == 0: return
|
|
|
|
let rail = await get("pr:" & toLower(id))
|
|
|
|
if rail != redisNil:
|
2020-06-07 06:50:40 +00:00
|
|
|
uncompress(rail).thaw(result)
|
2020-06-01 00:16:24 +00:00
|
|
|
else:
|
|
|
|
result = await getPhotoRail(id)
|
|
|
|
await cache(result, id)
|
|
|
|
|
|
|
|
proc getCachedList*(username=""; name=""; id=""): Future[List] {.async.} =
|
|
|
|
let list = if id.len > 0: redisNil
|
|
|
|
else: await get(toLower("l:" & username & '/' & name))
|
|
|
|
|
|
|
|
if list != redisNil:
|
2020-06-07 06:50:40 +00:00
|
|
|
uncompress(list).thaw(result)
|
2020-06-01 00:16:24 +00:00
|
|
|
else:
|
|
|
|
if id.len > 0:
|
|
|
|
result = await getGraphListById(id)
|
|
|
|
else:
|
|
|
|
result = await getGraphList(username, name)
|
2020-06-02 20:36:02 +00:00
|
|
|
await cache(result)
|
2020-06-01 00:16:24 +00:00
|
|
|
|
2020-06-06 07:27:25 +00:00
|
|
|
proc getCachedRss*(key: string): Future[Rss] {.async.} =
|
|
|
|
let k = "rss:" & key
|
2020-06-01 00:16:24 +00:00
|
|
|
pool.withAcquire(r):
|
2020-06-06 07:27:25 +00:00
|
|
|
result.cursor = await r.hget(k, "min")
|
|
|
|
if result.cursor.len > 2:
|
|
|
|
result.feed = await r.hget(k, "rss")
|
|
|
|
else:
|
|
|
|
result.cursor.setLen 0
|