nitter/src/apiutils.nim

92 lines
2.7 KiB
Nim
Raw Normal View History

2021-12-27 01:37:38 +00:00
# SPDX-License-Identifier: AGPL-3.0-only
import httpclient, asyncdispatch, options, times, strutils, uri
import packedjson, zippy
import types, tokens, consts, parserutils, http_pool
2022-01-05 21:48:45 +00:00
const
rlRemaining = "x-rate-limit-remaining"
rlReset = "x-rate-limit-reset"
2020-06-01 00:16:24 +00:00
2022-01-02 10:21:03 +00:00
var pool: HttpPool
2022-01-06 02:57:14 +00:00
proc genParams*(pars: openArray[(string, string)] = @[]; cursor="";
2020-06-16 22:20:34 +00:00
count="20"; ext=true): seq[(string, string)] =
2020-06-01 00:16:24 +00:00
result = timelineParams
for p in pars:
result &= p
2020-06-16 22:20:34 +00:00
if ext:
result &= ("ext", "mediaStats")
if count.len > 0:
result &= ("count", count)
2021-12-28 07:07:15 +00:00
if cursor.len > 0:
# The raw cursor often has plus signs, which sometimes get turned into spaces,
# so we need to them back into a plus
if " " in cursor:
result &= ("cursor", cursor.replace(" ", "+"))
else:
result &= ("cursor", cursor)
2020-06-01 00:16:24 +00:00
proc genHeaders*(token: Token = nil): HttpHeaders =
2020-06-01 00:16:24 +00:00
result = newHttpHeaders({
"connection": "keep-alive",
2020-06-01 00:16:24 +00:00
"authorization": auth,
"content-type": "application/json",
"x-guest-token": if token == nil: "" else: token.tok,
"x-twitter-active-user": "yes",
"authority": "api.twitter.com",
"accept-encoding": "gzip",
2020-06-01 00:16:24 +00:00
"accept-language": "en-US,en;q=0.9",
"accept": "*/*",
"DNT": "1"
2020-06-01 00:16:24 +00:00
})
2022-01-05 21:48:45 +00:00
proc fetch*(url: Uri; api: Api): Future[JsonNode] {.async.} =
once:
pool = HttpPool()
2022-01-05 21:48:45 +00:00
var token = await getToken(api)
2020-07-09 07:18:14 +00:00
if token.tok.len == 0:
2021-01-07 21:31:29 +00:00
raise rateLimitError()
2020-06-01 00:16:24 +00:00
let headers = genHeaders(token)
2020-06-01 00:16:24 +00:00
try:
var resp: AsyncResponse
2021-12-30 00:39:00 +00:00
var body = pool.use(headers):
resp = await c.get($url)
2021-12-30 00:39:00 +00:00
await resp.body
if body.len > 0:
if resp.headers.getOrDefault("content-encoding") == "gzip":
body = uncompress(body, dfGzip)
else:
echo "non-gzip body, url: ", url, ", body: ", body
2020-06-01 00:16:24 +00:00
2020-06-16 22:20:34 +00:00
if body.startsWith('{') or body.startsWith('['):
result = parseJson(body)
else:
echo resp.status, ": ", body
result = newJNull()
2022-01-05 21:48:45 +00:00
if api != Api.search and resp.headers.hasKey(rlRemaining):
let
remaining = parseInt(resp.headers[rlRemaining])
reset = parseInt(resp.headers[rlReset])
token.setRateLimit(api, remaining, reset)
2020-06-01 00:16:24 +00:00
if result.getError notin {invalidToken, forbidden, badToken}:
release(token, used=true)
else:
echo "fetch error: ", result.getError
release(token, invalid=true)
raise rateLimitError()
2021-12-28 04:41:41 +00:00
if resp.status == $Http400:
raise newException(InternalError, $url)
except InternalError as e:
raise e
except Exception as e:
2021-12-28 04:41:41 +00:00
echo "error: ", e.name, ", msg: ", e.msg, ", token: ", token[], ", url: ", url
if "length" notin e.msg and "descriptor" notin e.msg:
release(token, invalid=true)
2021-01-07 21:31:29 +00:00
raise rateLimitError()