2021-12-27 01:37:38 +00:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
2020-06-02 14:22:44 +00:00
|
|
|
import asyncdispatch, httpclient, uri, strutils
|
|
|
|
import packedjson
|
2020-06-01 00:16:24 +00:00
|
|
|
import types, query, formatters, consts, apiutils, parser
|
2022-01-16 05:00:11 +00:00
|
|
|
import experimental/parser/user
|
2020-06-01 00:16:24 +00:00
|
|
|
|
2021-10-02 08:13:56 +00:00
|
|
|
proc getGraphListBySlug*(name, list: string): Future[List] {.async.} =
|
2020-06-01 00:16:24 +00:00
|
|
|
let
|
|
|
|
variables = %*{"screenName": name, "listSlug": list, "withHighlightedLabel": false}
|
2022-01-05 21:48:45 +00:00
|
|
|
url = graphListBySlug ? {"variables": $variables}
|
|
|
|
result = parseGraphList(await fetch(url, Api.listBySlug))
|
2020-06-01 00:16:24 +00:00
|
|
|
|
2021-10-02 08:13:56 +00:00
|
|
|
proc getGraphList*(id: string): Future[List] {.async.} =
|
2020-06-01 00:16:24 +00:00
|
|
|
let
|
|
|
|
variables = %*{"listId": id, "withHighlightedLabel": false}
|
2022-01-05 21:48:45 +00:00
|
|
|
url = graphList ? {"variables": $variables}
|
|
|
|
result = parseGraphList(await fetch(url, Api.list))
|
2020-06-01 00:16:24 +00:00
|
|
|
|
|
|
|
proc getListTimeline*(id: string; after=""): Future[Timeline] {.async.} =
|
2021-12-29 07:03:00 +00:00
|
|
|
if id.len == 0: return
|
2020-06-01 00:16:24 +00:00
|
|
|
let
|
|
|
|
ps = genParams({"list_id": id, "ranking_mode": "reverse_chronological"}, after)
|
|
|
|
url = listTimeline ? ps
|
2022-01-05 21:48:45 +00:00
|
|
|
result = parseTimeline(await fetch(url, Api.timeline), after)
|
2020-06-01 00:16:24 +00:00
|
|
|
|
|
|
|
proc getListMembers*(list: List; after=""): Future[Result[Profile]] {.async.} =
|
|
|
|
if list.id.len == 0: return
|
|
|
|
let
|
|
|
|
ps = genParams({"list_id": list.id}, after)
|
|
|
|
url = listMembers ? ps
|
2022-01-05 21:48:45 +00:00
|
|
|
result = parseListMembers(await fetch(url, Api.listMembers), after)
|
2020-06-01 00:16:24 +00:00
|
|
|
|
2020-06-01 07:46:17 +00:00
|
|
|
proc getProfile*(username: string): Future[Profile] {.async.} =
|
|
|
|
let
|
|
|
|
ps = genParams({"screen_name": username})
|
2022-01-16 05:00:11 +00:00
|
|
|
json = await fetchRaw(userShow ? ps, Api.userShow)
|
|
|
|
result = parseUser(json)
|
2020-06-01 07:46:17 +00:00
|
|
|
|
2021-10-04 10:03:40 +00:00
|
|
|
proc getProfileById*(userId: string): Future[Profile] {.async.} =
|
|
|
|
let
|
|
|
|
ps = genParams({"user_id": userId})
|
2022-01-16 05:00:11 +00:00
|
|
|
json = await fetchRaw(userShow ? ps, Api.userShow)
|
|
|
|
result = parseUser(json)
|
2021-10-04 10:03:40 +00:00
|
|
|
|
2020-06-01 00:16:24 +00:00
|
|
|
proc getTimeline*(id: string; after=""; replies=false): Future[Timeline] {.async.} =
|
|
|
|
let
|
|
|
|
ps = genParams({"userId": id, "include_tweet_replies": $replies}, after)
|
|
|
|
url = timeline / (id & ".json") ? ps
|
2022-01-05 21:48:45 +00:00
|
|
|
result = parseTimeline(await fetch(url, Api.timeline), after)
|
2020-06-01 00:16:24 +00:00
|
|
|
|
|
|
|
proc getMediaTimeline*(id: string; after=""): Future[Timeline] {.async.} =
|
|
|
|
let url = mediaTimeline / (id & ".json") ? genParams(cursor=after)
|
2022-01-05 21:48:45 +00:00
|
|
|
result = parseTimeline(await fetch(url, Api.timeline), after)
|
2020-06-01 00:16:24 +00:00
|
|
|
|
2020-06-16 22:20:34 +00:00
|
|
|
proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
|
|
|
|
let
|
|
|
|
ps = genParams({"screen_name": name, "trim_user": "true"},
|
|
|
|
count="18", ext=false)
|
|
|
|
url = photoRail ? ps
|
2022-01-05 21:48:45 +00:00
|
|
|
result = parsePhotoRail(await fetch(url, Api.photoRail))
|
2020-06-01 00:16:24 +00:00
|
|
|
|
|
|
|
proc getSearch*[T](query: Query; after=""): Future[Result[T]] {.async.} =
|
|
|
|
when T is Profile:
|
|
|
|
const
|
|
|
|
searchMode = ("result_filter", "user")
|
|
|
|
parse = parseUsers
|
|
|
|
else:
|
|
|
|
const
|
|
|
|
searchMode = ("tweet_search_mode", "live")
|
|
|
|
parse = parseTimeline
|
|
|
|
|
2021-12-28 06:25:37 +00:00
|
|
|
let q = genQueryParam(query)
|
|
|
|
if q.len == 0 or q == emptyQuery:
|
|
|
|
return Result[T](beginning: true, query: query)
|
|
|
|
|
|
|
|
let url = search ? genParams(searchParams & @[("q", q), searchMode], after)
|
|
|
|
try:
|
2022-01-05 21:48:45 +00:00
|
|
|
result = parse(await fetch(url, Api.search), after)
|
2021-12-28 06:25:37 +00:00
|
|
|
result.query = query
|
|
|
|
except InternalError:
|
|
|
|
return Result[T](beginning: true, query: query)
|
2020-06-01 00:16:24 +00:00
|
|
|
|
|
|
|
proc getTweetImpl(id: string; after=""): Future[Conversation] {.async.} =
|
|
|
|
let url = tweet / (id & ".json") ? genParams(cursor=after)
|
2022-01-05 21:48:45 +00:00
|
|
|
result = parseConversation(await fetch(url, Api.tweet), id)
|
2020-06-01 00:16:24 +00:00
|
|
|
|
|
|
|
proc getReplies*(id, after: string): Future[Result[Chain]] {.async.} =
|
|
|
|
result = (await getTweetImpl(id, after)).replies
|
|
|
|
result.beginning = after.len == 0
|
|
|
|
|
|
|
|
proc getTweet*(id: string; after=""): Future[Conversation] {.async.} =
|
|
|
|
result = await getTweetImpl(id)
|
|
|
|
if after.len > 0:
|
|
|
|
result.replies = await getReplies(id, after)
|
|
|
|
|
|
|
|
proc resolve*(url: string; prefs: Prefs): Future[string] {.async.} =
|
|
|
|
let client = newAsyncHttpClient(maxRedirects=0)
|
|
|
|
try:
|
2021-12-20 02:11:12 +00:00
|
|
|
let resp = await client.request(url, HttpHead)
|
2021-12-27 01:27:49 +00:00
|
|
|
result = resp.headers["location"].replaceUrls(prefs)
|
2020-06-01 00:16:24 +00:00
|
|
|
except:
|
|
|
|
discard
|
|
|
|
finally:
|
|
|
|
client.close()
|