Add template to make Redis usage cleaner
This commit is contained in:
parent
2aa07e7395
commit
23f87c115a
|
@ -13,6 +13,9 @@ var
|
||||||
rssCacheTime: int
|
rssCacheTime: int
|
||||||
listCacheTime*: int
|
listCacheTime*: int
|
||||||
|
|
||||||
|
template dawait(future) =
|
||||||
|
discard await future
|
||||||
|
|
||||||
# flatty can't serialize DateTime, so we need to define this
|
# flatty can't serialize DateTime, so we need to define this
|
||||||
proc toFlatty*(s: var string, x: DateTime) =
|
proc toFlatty*(s: var string, x: DateTime) =
|
||||||
s.toFlatty(x.toTime().toUnix())
|
s.toFlatty(x.toTime().toUnix())
|
||||||
|
@ -33,9 +36,9 @@ proc migrate*(key, match: string) {.async.} =
|
||||||
let list = await r.scan(newCursor(0), match, 100000)
|
let list = await r.scan(newCursor(0), match, 100000)
|
||||||
r.startPipelining()
|
r.startPipelining()
|
||||||
for item in list:
|
for item in list:
|
||||||
discard await r.del(item)
|
dawait r.del(item)
|
||||||
await r.setk(key, "true")
|
await r.setk(key, "true")
|
||||||
discard await r.flushPipeline()
|
dawait r.flushPipeline()
|
||||||
|
|
||||||
proc initRedisPool*(cfg: Config) {.async.} =
|
proc initRedisPool*(cfg: Config) {.async.} =
|
||||||
try:
|
try:
|
||||||
|
@ -68,7 +71,7 @@ proc get(query: string): Future[string] {.async.} =
|
||||||
|
|
||||||
proc setEx(key: string; time: int; data: string) {.async.} =
|
proc setEx(key: string; time: int; data: string) {.async.} =
|
||||||
pool.withAcquire(r):
|
pool.withAcquire(r):
|
||||||
discard await r.setEx(key, time, data)
|
dawait r.setEx(key, time, data)
|
||||||
|
|
||||||
proc cache*(data: List) {.async.} =
|
proc cache*(data: List) {.async.} =
|
||||||
await setEx(data.listKey, listCacheTime, compress(toFlatty(data)))
|
await setEx(data.listKey, listCacheTime, compress(toFlatty(data)))
|
||||||
|
@ -81,25 +84,25 @@ proc cache*(data: Profile) {.async.} =
|
||||||
let name = toLower(data.username)
|
let name = toLower(data.username)
|
||||||
pool.withAcquire(r):
|
pool.withAcquire(r):
|
||||||
r.startPipelining()
|
r.startPipelining()
|
||||||
discard await r.setEx(name.profileKey, baseCacheTime, compress(toFlatty(data)))
|
dawait r.setEx(name.profileKey, baseCacheTime, compress(toFlatty(data)))
|
||||||
discard await r.setEx("i:" & data.id , baseCacheTime, data.username)
|
dawait r.setEx("i:" & data.id , baseCacheTime, data.username)
|
||||||
discard await r.hSet(name.pidKey, name, data.id)
|
dawait r.hSet(name.pidKey, name, data.id)
|
||||||
discard await r.flushPipeline()
|
dawait r.flushPipeline()
|
||||||
|
|
||||||
proc cacheProfileId*(username, id: string) {.async.} =
|
proc cacheProfileId*(username, id: string) {.async.} =
|
||||||
if username.len == 0 or id.len == 0: return
|
if username.len == 0 or id.len == 0: return
|
||||||
let name = toLower(username)
|
let name = toLower(username)
|
||||||
pool.withAcquire(r):
|
pool.withAcquire(r):
|
||||||
discard await r.hSet(name.pidKey, name, id)
|
dawait r.hSet(name.pidKey, name, id)
|
||||||
|
|
||||||
proc cacheRss*(query: string; rss: Rss) {.async.} =
|
proc cacheRss*(query: string; rss: Rss) {.async.} =
|
||||||
let key = "rss:" & query
|
let key = "rss:" & query
|
||||||
pool.withAcquire(r):
|
pool.withAcquire(r):
|
||||||
r.startPipelining()
|
r.startPipelining()
|
||||||
discard await r.hSet(key, "rss", rss.feed)
|
dawait r.hSet(key, "rss", rss.feed)
|
||||||
discard await r.hSet(key, "min", rss.cursor)
|
dawait r.hSet(key, "min", rss.cursor)
|
||||||
discard await r.expire(key, rssCacheTime)
|
dawait r.expire(key, rssCacheTime)
|
||||||
discard await r.flushPipeline()
|
dawait r.flushPipeline()
|
||||||
|
|
||||||
proc getProfileId*(username: string): Future[string] {.async.} =
|
proc getProfileId*(username: string): Future[string] {.async.} =
|
||||||
let name = toLower(username)
|
let name = toLower(username)
|
||||||
|
|
Loading…
Reference in New Issue