Replace search endpoints, switch Bearer token

This commit is contained in:
Zed 2023-04-21 03:47:21 +02:00
parent 34363a2b99
commit 5dd85c63d7
13 changed files with 125 additions and 114 deletions

View file

@ -69,6 +69,24 @@ proc getGraphListMembers*(list: List; after=""): Future[Result[User]] {.async.}
let url = graphListMembers ? {"variables": $variables, "features": gqlFeatures}
result = parseGraphListMembers(await fetchRaw(url, Api.listMembers), after)
proc getGraphSearch*(query: Query; after=""): Future[Result[Tweet]] {.async.} =
let q = genQueryParam(query)
if q.len == 0 or q == emptyQuery: return
var
variables = %*{
"rawQuery": q,
"count": 20,
"product": "Latest",
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false
}
if after.len > 0:
variables["cursor"] = % after
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
result = parseGraphSearch(await fetch(url, Api.search), after)
result.query = query
proc getGraphTweetResult*(id: string): Future[Tweet] {.async.} =
if id.len == 0: return
let
@ -99,32 +117,24 @@ proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
if name.len == 0: return
let
ps = genParams({"screen_name": name, "trim_user": "true"},
count="18", ext=false)
count="18", ext=false)
url = photoRail ? ps
result = parsePhotoRail(await fetch(url, Api.timeline))
proc getSearch*[T](query: Query; after=""): Future[Result[T]] {.async.} =
when T is User:
const
searchMode = ("result_filter", "user")
parse = parseUsers
fetchFunc = fetchRaw
else:
const
searchMode = ("tweet_search_mode", "live")
parse = parseTimeline
fetchFunc = fetch
proc getUserSearch*(query: Query; page="1"): Future[Result[User]] {.async.} =
var url = userSearch ? {
"q": query.text,
"skip_status": "1",
"count": "20",
"page": page
}
let q = genQueryParam(query)
if q.len == 0 or q == emptyQuery:
return Result[T](beginning: true, query: query)
let url = search ? genParams(searchParams & @[("q", q), searchMode], after)
try:
result = parse(await fetchFunc(url, Api.search), after)
result.query = query
except InternalError:
return Result[T](beginning: true, query: query)
result = parseUsers(await fetchRaw(url, Api.userSearch))
result.query = query
if page.len == 0:
result.bottom = "2"
elif page.allCharsInSet(Digits):
result.bottom = $(parseInt(page) + 1)
proc resolve*(url: string; prefs: Prefs): Future[string] {.async.} =
let client = newAsyncHttpClient(maxRedirects=0)

View file

@ -17,8 +17,8 @@ proc genParams*(pars: openArray[(string, string)] = @[]; cursor="";
result &= p
if ext:
result &= ("ext", "mediaStats")
result &= ("include_ext_alt_text", "true")
result &= ("include_ext_media_availability", "true")
result &= ("include_ext_alt_text", "1")
result &= ("include_ext_media_availability", "1")
if count.len > 0:
result &= ("count", count)
if cursor.len > 0:

View file

@ -1,15 +1,14 @@
# SPDX-License-Identifier: AGPL-3.0-only
import uri, sequtils
import uri, sequtils, strutils
const
auth* = "Bearer AAAAAAAAAAAAAAAAAAAAAPYXBAAAAAAACLXUNDekMxqa8h%2F40K4moUkGsoc%3DTYfbDKbT3jJPCEVnMYqilB28NHfOPqkca3qaAxGfsyKCs0wRbw"
auth* = "Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA"
api = parseUri("https://api.twitter.com")
activate* = $(api / "1.1/guest/activate.json")
photoRail* = api / "1.1/statuses/media_timeline.json"
status* = api / "1.1/statuses/show"
search* = api / "2/search/adaptive.json"
userSearch* = api / "1.1/users/search.json"
graphql = api / "graphql"
graphUser* = graphql / "8mPfHBetXOg-EHAyeVxUoA/UserByScreenName"
@ -19,6 +18,7 @@ const
graphUserMedia* = graphql / "MA_EP2a21zpzNWKRkaPBMg/UserMedia"
graphTweet* = graphql / "6I7Hm635Q6ftv69L8VrSeQ/TweetDetail"
graphTweetResult* = graphql / "rt-rHeSJ-2H9O9gxWQcPcg/TweetResultByRestId"
graphSearchTimeline* = graphql / "gkjsKepM6gl_HmFWoWKfgg/SearchTimeline"
graphListById* = graphql / "iTpgCtbdxrsJfyx0cFjHqg/ListByRestId"
graphListBySlug* = graphql / "-kmqNvm5Y-cVrfvBy6docg/ListBySlug"
graphListMembers* = graphql / "P4NpVZDqUD_7MEM84L-8nw/ListMembers"
@ -33,53 +33,46 @@ const
"include_mute_edge": "0",
"include_can_dm": "0",
"include_can_media_tag": "1",
"include_ext_is_blue_verified": "true",
"include_ext_is_blue_verified": "1",
"skip_status": "1",
"cards_platform": "Web-12",
"include_cards": "1",
"include_composer_source": "false",
"include_composer_source": "0",
"include_reply_count": "1",
"tweet_mode": "extended",
"include_entities": "true",
"include_user_entities": "true",
"include_ext_media_color": "false",
"send_error_codes": "true",
"simple_quoted_tweet": "true",
"include_quote_count": "true"
"include_entities": "1",
"include_user_entities": "1",
"include_ext_media_color": "0",
"send_error_codes": "1",
"simple_quoted_tweet": "1",
"include_quote_count": "1"
}.toSeq
searchParams* = {
"query_source": "typed_query",
"pc": "1",
"spelling_corrections": "1"
}.toSeq
## top: nothing
## latest: "tweet_search_mode: live"
## user: "result_filter: user"
## photos: "result_filter: photos"
## videos: "result_filter: videos"
gqlFeatures* = """{
"longform_notetweets_consumption_enabled": true,
"longform_notetweets_richtext_consumption_enabled": true,
"responsive_web_twitter_blue_verified_badge_is_enabled": true,
"responsive_web_graphql_exclude_directive_enabled": true,
"blue_business_profile_image_shape_enabled": false,
"freedom_of_speech_not_reach_fetch_enabled": false,
"graphql_is_translatable_rweb_tweet_is_translatable_enabled": false,
"interactive_text_enabled": false,
"longform_notetweets_consumption_enabled": true,
"longform_notetweets_richtext_consumption_enabled": true,
"longform_notetweets_rich_text_read_enabled": false,
"responsive_web_edit_tweet_api_enabled": false,
"responsive_web_enhance_cards_enabled": false,
"responsive_web_graphql_exclude_directive_enabled": true,
"responsive_web_graphql_skip_user_profile_image_extensions_enabled": false,
"responsive_web_graphql_timeline_navigation_enabled": false,
"responsive_web_text_conversations_enabled": false,
"responsive_web_twitter_blue_verified_badge_is_enabled": true,
"spaces_2022_h2_clipping": true,
"spaces_2022_h2_spaces_communities": true,
"standardized_nudges_misinfo": false,
"tweet_awards_web_tipping_enabled": false,
"tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled": false,
"tweetypie_unmention_optimization_enabled": false,
"view_counts_everywhere_api_enabled": false,
"verified_phone_label_enabled": false,
"vibe_api_enabled": false,
"verified_phone_label_enabled": false
}"""
"view_counts_everywhere_api_enabled": false
}""".replace(" ", "").replace("\n", "")
tweetVariables* = """{
"focalTweetId": "$1",
@ -109,7 +102,8 @@ const
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false,
"withVoice": false
"withVoice": false,
"withV2Timeline": true
}"""
listTweetsVariables* = """{

View file

@ -1,2 +1,2 @@
import parser/[user, graphql, timeline]
export user, graphql, timeline
import parser/[user, graphql]
export user, graphql

View file

@ -1,30 +0,0 @@
import std/[strutils, tables]
import jsony
import user, ../types/timeline
from ../../types import Result, User
proc getId(id: string): string {.inline.} =
let start = id.rfind("-")
if start < 0: return id
id[start + 1 ..< id.len]
proc parseUsers*(json: string; after=""): Result[User] =
result = Result[User](beginning: after.len == 0)
let raw = json.fromJson(Search)
if raw.timeline.instructions.len == 0:
return
for i in raw.timeline.instructions:
if i.addEntries.entries.len > 0:
for e in i.addEntries.entries:
let id = e.entryId.getId
if e.entryId.startsWith("user"):
if id in raw.globalObjects.users:
result.content.add toUser raw.globalObjects.users[id]
elif e.entryId.startsWith("cursor"):
let cursor = e.content.operation.cursor
if cursor.cursorType == "Top":
result.top = cursor.value
elif cursor.cursorType == "Bottom":
result.bottom = cursor.value

View file

@ -2,7 +2,7 @@ import std/[algorithm, unicode, re, strutils, strformat, options, nre]
import jsony
import utils, slices
import ../types/user as userType
from ../../types import User, Error
from ../../types import Result, User, Error
let
unRegex = re.re"(^|[^A-z0-9-_./?])@([A-z0-9_]{1,15})"
@ -76,3 +76,10 @@ proc parseUser*(json: string; username=""): User =
else: echo "[error - parseUser]: ", error
result = toUser json.fromJson(RawUser)
proc parseUsers*(json: string; after=""): Result[User] =
result = Result[User](beginning: after.len == 0)
let raw = json.fromJson(seq[RawUser])
for user in raw:
result.content.add user.toUser

View file

@ -359,7 +359,7 @@ proc parseTimeline*(js: JsonNode; after=""): Timeline =
result.top = e.getCursor
elif "cursor-bottom" in entry:
result.bottom = e.getCursor
elif entry.startsWith("sq-C"):
elif entry.startsWith("sq-cursor"):
with cursor, e{"content", "operation", "cursor"}:
if cursor{"cursorType"}.getStr == "Bottom":
result.bottom = cursor{"value"}.getStr
@ -383,6 +383,12 @@ proc parseGraphTweet(js: JsonNode): Tweet =
if js.kind == JNull or js{"__typename"}.getStr == "TweetUnavailable":
return Tweet(available: false)
if js{"__typename"}.getStr == "TweetTombstone":
return Tweet(
available: false,
text: js{"tombstone", "text"}.getTombstone
)
var jsCard = copy(js{"card", "legacy"})
if jsCard.kind != JNull:
var values = newJObject()
@ -444,7 +450,7 @@ proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation =
let tweet = Tweet(
id: parseBiggestInt(id),
available: false,
text: e{"content", "itemContent"}.getTombstone
text: e{"content", "itemContent", "tombstoneInfo", "richText"}.getTombstone
)
if id == tweetId:
@ -465,18 +471,44 @@ proc parseGraphTimeline*(js: JsonNode; root: string; after=""): Timeline =
let instructions =
if root == "list": ? js{"data", "list", "tweets_timeline", "timeline", "instructions"}
else: ? js{"data", "user", "result", "timeline", "timeline", "instructions"}
else: ? js{"data", "user", "result", "timeline_v2", "timeline", "instructions"}
if instructions.len == 0:
return
for e in instructions[instructions.len - 1]{"entries"}:
let entryId = e{"entryId"}.getStr
if entryId.startsWith("tweet"):
with tweetResult, e{"content", "itemContent", "tweet_results", "result"}:
let tweet = parseGraphTweet(tweetResult)
if not tweet.available:
tweet.id = parseBiggestInt(entryId.getId())
result.content.add tweet
elif entryId.startsWith("cursor-bottom"):
result.bottom = e{"content", "value"}.getStr
for i in instructions:
if i{"type"}.getStr == "TimelineAddEntries":
for e in i{"entries"}:
let entryId = e{"entryId"}.getStr
if entryId.startsWith("tweet"):
with tweetResult, e{"content", "itemContent", "tweet_results", "result"}:
let tweet = parseGraphTweet(tweetResult)
if not tweet.available:
tweet.id = parseBiggestInt(entryId.getId())
result.content.add tweet
elif entryId.startsWith("cursor-bottom"):
result.bottom = e{"content", "value"}.getStr
proc parseGraphSearch*(js: JsonNode; after=""): Timeline =
result = Timeline(beginning: after.len == 0)
let instructions = js{"data", "search_by_raw_query", "search_timeline", "timeline", "instructions"}
if instructions.len == 0:
return
for instruction in instructions:
let typ = instruction{"type"}.getStr
if typ == "TimelineAddEntries":
for e in instructions[0]{"entries"}:
let entryId = e{"entryId"}.getStr
if entryId.startsWith("tweet"):
with tweetResult, e{"content", "itemContent", "tweet_results", "result"}:
let tweet = parseGraphTweet(tweetResult)
if not tweet.available:
tweet.id = parseBiggestInt(entryId.getId())
result.content.add tweet
elif entryId.startsWith("cursor-bottom"):
result.bottom = e{"content", "value"}.getStr
elif typ == "TimelineReplaceEntry":
if instruction{"entry_id_to_replace"}.getStr.startsWith("cursor-bottom"):
result.bottom = instruction{"entry", "content", "value"}.getStr

View file

@ -130,12 +130,9 @@ proc getBanner*(js: JsonNode): string =
return
proc getTombstone*(js: JsonNode): string =
result = js{"tombstoneInfo", "richText", "text"}.getStr
result = js{"text"}.getStr
result.removeSuffix(" Learn more")
if result.len == 0:
result = js{"tombstoneInfo", "text"}.getStr
proc getMp4Resolution*(url: string): int =
# parses the height out of a URL like this one:
# https://video.twimg.com/ext_tw_video/<tweet-id>/pu/vid/720x1280/<random>.mp4

View file

@ -28,7 +28,7 @@ proc timelineRss*(req: Request; cfg: Config; query: Query): Future[Rss] {.async.
var q = query
q.fromUser = names
profile = Profile(
tweets: await getSearch[Tweet](q, after),
tweets: await getGraphSearch(q, after),
# this is kinda dumb
user: User(
username: name,
@ -78,7 +78,7 @@ proc createRssRouter*(cfg: Config) =
if rss.cursor.len > 0:
respRss(rss, "Search")
let tweets = await getSearch[Tweet](query, cursor)
let tweets = await getGraphSearch(query, cursor)
rss.cursor = tweets.bottom
rss.feed = renderSearchRss(tweets.content, query.text, genQueryUrl(query), cfg)

View file

@ -27,11 +27,11 @@ proc createSearchRouter*(cfg: Config) =
of users:
if "," in q:
redirect("/" & q)
let users = await getSearch[User](query, getCursor())
let users = await getUserSearch(query, getCursor())
resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs, title)
of tweets:
let
tweets = await getSearch[Tweet](query, getCursor())
tweets = await getGraphSearch(query, getCursor())
rss = "/search/rss?" & genQueryUrl(query)
resp renderMain(renderTweetSearch(tweets, prefs, getPath()),
request, cfg, prefs, title, rss=rss)

View file

@ -50,7 +50,7 @@ proc fetchProfile*(after: string; query: Query; skipRail=false;
of posts: getGraphUserTweets(userId, TimelineKind.tweets, after)
of replies: getGraphUserTweets(userId, TimelineKind.replies, after)
of media: getGraphUserTweets(userId, TimelineKind.media, after)
else: getSearch[Tweet](query, after)
else: getGraphSearch(query, after)
rail =
skipIf(skipRail or query.kind == media, @[]):
@ -83,7 +83,7 @@ proc showTimeline*(request: Request; query: Query; cfg: Config; prefs: Prefs;
rss, after: string): Future[string] {.async.} =
if query.fromUser.len != 1:
let
timeline = await getSearch[Tweet](query, after)
timeline = await getGraphSearch(query, after)
html = renderTweetSearch(timeline, prefs, getPath())
return renderMain(html, request, cfg, prefs, "Multi", rss=rss)
@ -138,7 +138,7 @@ proc createTimelineRouter*(cfg: Config) =
# used for the infinite scroll feature
if @"scroll".len > 0:
if query.fromUser.len != 1:
var timeline = await getSearch[Tweet](query, after)
var timeline = await getGraphSearch(query, after)
if timeline.content.len == 0: resp Http404
timeline.beginning = true
resp $renderTweetSearch(timeline, prefs, getPath())

View file

@ -41,12 +41,12 @@ proc getPoolJson*(): JsonNode =
let
maxReqs =
case api
of Api.search: 250
of Api.timeline: 187
of Api.listMembers, Api.listBySlug, Api.list, Api.listTweets,
Api.userTweets, Api.userTweetsAndReplies, Api.userMedia,
Api.userRestId, Api.userScreenName,
Api.tweetDetail, Api.tweetResult: 500
Api.tweetDetail, Api.tweetResult, Api.search: 500
of Api.userSearch: 900
reqs = maxReqs - token.apis[api].remaining
reqsPerApi[$api] = reqsPerApi.getOrDefault($api, 0) + reqs

View file

@ -19,6 +19,7 @@ type
tweetResult
timeline
search
userSearch
list
listBySlug
listMembers