nitter/src/api/list.nim

66 lines
2.1 KiB
Nim
Raw Normal View History

2019-10-02 20:28:53 +00:00
import httpclient, asyncdispatch, htmlparser
2019-09-20 23:08:30 +00:00
import sequtils, strutils, json, uri
import ".."/[types, parser, parserutils, query]
import utils, consts, timeline, search
2019-10-23 07:03:15 +00:00
proc getListTimeline*(username, list, after, agent: string; media=true): Future[Timeline] {.async.} =
2019-09-20 23:08:30 +00:00
let url = base / (listUrl % [username, list])
var params = toSeq({
"include_available_features": "1",
"include_entities": "1",
"reset_error_state": "false"
})
if after.len > 0:
params.add {"max_position": after}
2019-10-02 08:13:17 +00:00
let json = await fetchJson(url ? params, genHeaders(agent, url))
result = await finishTimeline(json, Query(), after, agent, media)
2019-09-23 22:52:38 +00:00
if result.content.len == 0:
return
result.minId = getLastId(result)
2019-09-20 23:08:30 +00:00
2019-10-23 07:03:15 +00:00
proc getListMembersSearch(username, list, after, agent: string): Future[Result[Profile]] {.async.} =
2019-10-02 08:13:17 +00:00
let
2019-10-02 20:28:53 +00:00
referer = base / (listMembersUrl % [username, list])
url = referer / "timeline"
2019-10-02 08:13:17 +00:00
headers = genHeaders({"x-push-with": "XMLHttpRequest"}, agent, referer, xml=true)
2019-09-20 23:08:30 +00:00
var params = toSeq({
"include_available_features": "1",
"include_entities": "1",
"reset_error_state": "false"
})
if after.len > 0:
params.add {"max_position": after}
let json = await fetchJson(url ? params, headers)
result = getResult[Profile](json, Query(kind: userList), after)
2019-09-20 23:08:30 +00:00
if json == nil or not json.hasKey("items_html"): return
let html = json["items_html"].to(string)
result.hasMore = html != "\n"
for p in parseHtml(html).selectAll(".account"):
result.content.add parseListProfile(p)
2019-10-23 07:03:15 +00:00
proc getListMembers*(username, list, after, agent: string): Future[Result[Profile]] {.async.} =
if after.len > 0:
return await getListMembersSearch(username, list, after, agent)
let
url = base / (listMembersUrl % [username, list])
2020-01-22 13:01:27 +00:00
html = await fetchHtml(url, genHeaders(agent, url))
2019-10-23 07:03:15 +00:00
result = Result[Profile](
minId: html.selectAttr(".stream-container", "data-min-position"),
hasMore: html.select(".has-more-items") != nil,
beginning: true,
query: Query(kind: userList),
content: html.selectAll(".account").map(parseListProfile)
)