mirror of
https://github.com/zedeus/nitter.git
synced 2024-10-31 22:08:50 +00:00
Use legacy timeline/user endpoint for Tweets tab
This commit is contained in:
parent
5725780c99
commit
624394430c
8 changed files with 81 additions and 38 deletions
10
src/api.nim
10
src/api.nim
|
@ -40,6 +40,16 @@ proc getGraphUserTweets*(id: string; kind: TimelineKind; after=""): Future[Profi
|
||||||
# url = oldUserTweets / (id & ".json") ? ps
|
# url = oldUserTweets / (id & ".json") ? ps
|
||||||
# result = parseTimeline(await fetch(url, Api.timeline), after)
|
# result = parseTimeline(await fetch(url, Api.timeline), after)
|
||||||
|
|
||||||
|
proc getUserTimeline*(id: string; after=""): Future[Profile] {.async.} =
|
||||||
|
var ps = genParams({"id": id})
|
||||||
|
if after.len > 0:
|
||||||
|
ps.add ("down_cursor", after)
|
||||||
|
|
||||||
|
let
|
||||||
|
url = legacyUserTweets ? ps
|
||||||
|
js = await fetch(url, Api.userTimeline)
|
||||||
|
result = parseUserTimeline(js, after)
|
||||||
|
|
||||||
proc getGraphListTweets*(id: string; after=""): Future[Timeline] {.async.} =
|
proc getGraphListTweets*(id: string; after=""): Future[Timeline] {.async.} =
|
||||||
if id.len == 0: return
|
if id.len == 0: return
|
||||||
let
|
let
|
||||||
|
|
|
@ -16,8 +16,8 @@ proc genParams*(pars: openArray[(string, string)] = @[]; cursor="";
|
||||||
for p in pars:
|
for p in pars:
|
||||||
result &= p
|
result &= p
|
||||||
if ext:
|
if ext:
|
||||||
result &= ("ext", "mediaStats,isBlueVerified,isVerified,blue,blueVerified")
|
|
||||||
result &= ("include_ext_alt_text", "1")
|
result &= ("include_ext_alt_text", "1")
|
||||||
|
result &= ("include_ext_media_stats", "1")
|
||||||
result &= ("include_ext_media_availability", "1")
|
result &= ("include_ext_media_availability", "1")
|
||||||
if count.len > 0:
|
if count.len > 0:
|
||||||
result &= ("count", count)
|
result &= ("count", count)
|
||||||
|
|
|
@ -7,6 +7,7 @@ const
|
||||||
api = parseUri("https://api.twitter.com")
|
api = parseUri("https://api.twitter.com")
|
||||||
activate* = $(api / "1.1/guest/activate.json")
|
activate* = $(api / "1.1/guest/activate.json")
|
||||||
|
|
||||||
|
legacyUserTweets* = api / "1.1/timeline/user.json"
|
||||||
photoRail* = api / "1.1/statuses/media_timeline.json"
|
photoRail* = api / "1.1/statuses/media_timeline.json"
|
||||||
userSearch* = api / "1.1/users/search.json"
|
userSearch* = api / "1.1/users/search.json"
|
||||||
tweetSearch* = api / "1.1/search/universal.json"
|
tweetSearch* = api / "1.1/search/universal.json"
|
||||||
|
@ -28,28 +29,20 @@ const
|
||||||
graphListTweets* = graphql / "BbGLL1ZfMibdFNWlk7a0Pw/ListTimeline"
|
graphListTweets* = graphql / "BbGLL1ZfMibdFNWlk7a0Pw/ListTimeline"
|
||||||
|
|
||||||
timelineParams* = {
|
timelineParams* = {
|
||||||
"cards_platform": "Web-13",
|
|
||||||
"tweet_mode": "extended",
|
|
||||||
"ui_lang": "en-US",
|
|
||||||
"send_error_codes": "1",
|
|
||||||
"simple_quoted_tweet": "1",
|
|
||||||
"skip_status": "1",
|
|
||||||
"include_blocked_by": "0",
|
|
||||||
"include_blocking": "0",
|
|
||||||
"include_can_dm": "0",
|
|
||||||
"include_can_media_tag": "1",
|
"include_can_media_tag": "1",
|
||||||
"include_cards": "1",
|
"include_cards": "1",
|
||||||
"include_composer_source": "0",
|
|
||||||
"include_entities": "1",
|
"include_entities": "1",
|
||||||
"include_ext_is_blue_verified": "1",
|
|
||||||
"include_ext_media_color": "0",
|
|
||||||
"include_followed_by": "0",
|
|
||||||
"include_mute_edge": "0",
|
|
||||||
"include_profile_interstitial_type": "0",
|
"include_profile_interstitial_type": "0",
|
||||||
"include_quote_count": "1",
|
"include_quote_count": "1",
|
||||||
"include_reply_count": "1",
|
"include_reply_count": "1",
|
||||||
"include_user_entities": "1",
|
"include_user_entities": "1",
|
||||||
"include_want_retweets": "0",
|
"include_ext_reply_count": "1",
|
||||||
|
"include_ext_is_blue_verified": "1",
|
||||||
|
"include_ext_media_color": "0",
|
||||||
|
"cards_platform": "Web-13",
|
||||||
|
"tweet_mode": "extended",
|
||||||
|
"send_error_codes": "1",
|
||||||
|
"simple_quoted_tweet": "1"
|
||||||
}.toSeq
|
}.toSeq
|
||||||
|
|
||||||
gqlFeatures* = """{
|
gqlFeatures* = """{
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import strutils, options, times, math
|
import strutils, options, times, math, tables
|
||||||
import packedjson, packedjson/deserialiser
|
import packedjson, packedjson/deserialiser
|
||||||
import types, parserutils, utils
|
import types, parserutils, utils
|
||||||
import experimental/parser/unifiedcard
|
import experimental/parser/unifiedcard
|
||||||
|
@ -81,7 +81,7 @@ proc parseGif(js: JsonNode): Gif =
|
||||||
proc parseVideo(js: JsonNode): Video =
|
proc parseVideo(js: JsonNode): Video =
|
||||||
result = Video(
|
result = Video(
|
||||||
thumb: js{"media_url_https"}.getImageStr,
|
thumb: js{"media_url_https"}.getImageStr,
|
||||||
views: js{"ext", "mediaStats", "r", "ok", "viewCount"}.getStr($js{"mediaStats", "viewCount"}.getInt),
|
views: getVideoViewCount(js),
|
||||||
available: true,
|
available: true,
|
||||||
title: js{"ext_alt_text"}.getStr,
|
title: js{"ext_alt_text"}.getStr,
|
||||||
durationMs: js{"video_info", "duration_millis"}.getInt
|
durationMs: js{"video_info", "duration_millis"}.getInt
|
||||||
|
@ -313,6 +313,54 @@ proc parseTweetSearch*(js: JsonNode; after=""): Timeline =
|
||||||
if result.content.len > 0:
|
if result.content.len > 0:
|
||||||
result.bottom = $(result.content[^1][0].id - 1)
|
result.bottom = $(result.content[^1][0].id - 1)
|
||||||
|
|
||||||
|
proc parseUserTimelineTweet(tweet: JsonNode; users: TableRef[string, User]): Tweet =
|
||||||
|
result = parseTweet(tweet, tweet{"card"})
|
||||||
|
|
||||||
|
if result.isNil or not result.available:
|
||||||
|
return
|
||||||
|
|
||||||
|
with user, tweet{"user"}:
|
||||||
|
let userId = user{"id_str"}.getStr
|
||||||
|
if user{"ext_is_blue_verified"}.getBool(false):
|
||||||
|
users[userId].verified = users[userId].verified or true
|
||||||
|
result.user = users[userId]
|
||||||
|
|
||||||
|
proc parseUserTimeline*(js: JsonNode; after=""): Profile =
|
||||||
|
result = Profile(tweets: Timeline(beginning: after.len == 0))
|
||||||
|
|
||||||
|
if js.kind == JNull or "response" notin js or "twitter_objects" notin js:
|
||||||
|
return
|
||||||
|
|
||||||
|
var users = newTable[string, User]()
|
||||||
|
for userId, user in js{"twitter_objects", "users"}:
|
||||||
|
users[userId] = parseUser(user)
|
||||||
|
|
||||||
|
for entity in js{"response", "timeline"}:
|
||||||
|
let
|
||||||
|
tweetId = entity{"tweet", "id"}.getId
|
||||||
|
isPinned = entity{"tweet", "is_pinned"}.getBool(false)
|
||||||
|
|
||||||
|
with tweet, js{"twitter_objects", "tweets", $tweetId}:
|
||||||
|
var parsed = parseUserTimelineTweet(tweet, users)
|
||||||
|
|
||||||
|
if not parsed.isNil and parsed.available:
|
||||||
|
if parsed.quote.isSome:
|
||||||
|
parsed.quote = some parseUserTimelineTweet(tweet{"quoted_status"}, users)
|
||||||
|
|
||||||
|
if parsed.retweet.isSome:
|
||||||
|
let retweet = parseUserTimelineTweet(tweet{"retweeted_status"}, users)
|
||||||
|
if retweet.quote.isSome:
|
||||||
|
retweet.quote = some parseUserTimelineTweet(tweet{"retweeted_status", "quoted_status"}, users)
|
||||||
|
parsed.retweet = some retweet
|
||||||
|
|
||||||
|
if isPinned:
|
||||||
|
parsed.pinned = true
|
||||||
|
result.pinned = some parsed
|
||||||
|
else:
|
||||||
|
result.tweets.content.add parsed
|
||||||
|
|
||||||
|
result.tweets.bottom = js{"response", "cursor", "bottom"}.getStr
|
||||||
|
|
||||||
# proc finalizeTweet(global: GlobalObjects; id: string): Tweet =
|
# proc finalizeTweet(global: GlobalObjects; id: string): Tweet =
|
||||||
# let intId = if id.len > 0: parseBiggestInt(id) else: 0
|
# let intId = if id.len > 0: parseBiggestInt(id) else: 0
|
||||||
# result = global.tweets.getOrDefault(id, Tweet(id: intId))
|
# result = global.tweets.getOrDefault(id, Tweet(id: intId))
|
||||||
|
|
|
@ -148,6 +148,12 @@ proc getMp4Resolution*(url: string): int =
|
||||||
# cannot determine resolution (e.g. m3u8/non-mp4 video)
|
# cannot determine resolution (e.g. m3u8/non-mp4 video)
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
proc getVideoViewCount*(js: JsonNode): string =
|
||||||
|
with stats, js{"ext_media_stats"}:
|
||||||
|
return stats{"view_count"}.getStr($stats{"viewCount"}.getInt)
|
||||||
|
|
||||||
|
return $js{"mediaStats", "viewCount"}.getInt(0)
|
||||||
|
|
||||||
proc extractSlice(js: JsonNode): Slice[int] =
|
proc extractSlice(js: JsonNode): Slice[int] =
|
||||||
result = js["indices"][0].getInt ..< js["indices"][1].getInt
|
result = js["indices"][0].getInt ..< js["indices"][1].getInt
|
||||||
|
|
||||||
|
|
|
@ -53,7 +53,7 @@ proc fetchProfile*(after: string; query: Query; skipRail=false;
|
||||||
|
|
||||||
result =
|
result =
|
||||||
case query.kind
|
case query.kind
|
||||||
# of posts: await getTimeline(userId, after)
|
of posts: await getUserTimeline(userId, after)
|
||||||
of replies: await getGraphUserTweets(userId, TimelineKind.replies, after)
|
of replies: await getGraphUserTweets(userId, TimelineKind.replies, after)
|
||||||
of media: await getGraphUserTweets(userId, TimelineKind.media, after)
|
of media: await getGraphUserTweets(userId, TimelineKind.media, after)
|
||||||
else: Profile(tweets: await getTweetSearch(query, after))
|
else: Profile(tweets: await getTweetSearch(query, after))
|
||||||
|
@ -63,21 +63,6 @@ proc fetchProfile*(after: string; query: Query; skipRail=false;
|
||||||
|
|
||||||
result.tweets.query = query
|
result.tweets.query = query
|
||||||
|
|
||||||
if result.user.protected or result.user.suspended:
|
|
||||||
return
|
|
||||||
|
|
||||||
if query.kind == posts:
|
|
||||||
if result.user.verified:
|
|
||||||
for chain in result.tweets.content:
|
|
||||||
if chain[0].user.id == result.user.id:
|
|
||||||
chain[0].user.verified = true
|
|
||||||
if not skipPinned and result.user.pinnedTweet > 0 and after.len == 0:
|
|
||||||
let tweet = await getCachedTweet(result.user.pinnedTweet)
|
|
||||||
if not tweet.isNil:
|
|
||||||
tweet.pinned = true
|
|
||||||
tweet.user = result.user
|
|
||||||
result.pinned = some tweet
|
|
||||||
|
|
||||||
proc showTimeline*(request: Request; query: Query; cfg: Config; prefs: Prefs;
|
proc showTimeline*(request: Request; query: Query; cfg: Config; prefs: Prefs;
|
||||||
rss, after: string): Future[string] {.async.} =
|
rss, after: string): Future[string] {.async.} =
|
||||||
if query.fromUser.len != 1:
|
if query.fromUser.len != 1:
|
||||||
|
|
|
@ -44,10 +44,10 @@ proc getPoolJson*(): JsonNode =
|
||||||
of Api.search: 100000
|
of Api.search: 100000
|
||||||
of Api.photoRail: 180
|
of Api.photoRail: 180
|
||||||
of Api.timeline: 187
|
of Api.timeline: 187
|
||||||
of Api.userTweets: 300
|
of Api.userTweets, Api.userTimeline: 300
|
||||||
of Api.userTweetsAndReplies, Api.userRestId,
|
of Api.userTweetsAndReplies, Api.userRestId,
|
||||||
Api.userScreenName, Api.tweetDetail, Api.tweetResult: 500
|
Api.userScreenName, Api.tweetDetail, Api.tweetResult,
|
||||||
of Api.list, Api.listTweets, Api.listMembers, Api.listBySlug, Api.userMedia: 500
|
Api.list, Api.listTweets, Api.listMembers, Api.listBySlug, Api.userMedia: 500
|
||||||
of Api.userSearch: 900
|
of Api.userSearch: 900
|
||||||
reqs = maxReqs - token.apis[api].remaining
|
reqs = maxReqs - token.apis[api].remaining
|
||||||
|
|
||||||
|
@ -161,6 +161,6 @@ proc initTokenPool*(cfg: Config) {.async.} =
|
||||||
enableLogging = cfg.enableDebug
|
enableLogging = cfg.enableDebug
|
||||||
|
|
||||||
while true:
|
while true:
|
||||||
if tokenPool.countIt(not it.isLimited(Api.timeline)) < cfg.minTokens:
|
if tokenPool.countIt(not it.isLimited(Api.userTimeline)) < cfg.minTokens:
|
||||||
await poolTokens(min(4, cfg.minTokens - tokenPool.len))
|
await poolTokens(min(4, cfg.minTokens - tokenPool.len))
|
||||||
await sleepAsync(2000)
|
await sleepAsync(2000)
|
||||||
|
|
|
@ -18,6 +18,7 @@ type
|
||||||
tweetDetail
|
tweetDetail
|
||||||
tweetResult
|
tweetResult
|
||||||
timeline
|
timeline
|
||||||
|
userTimeline
|
||||||
photoRail
|
photoRail
|
||||||
search
|
search
|
||||||
userSearch
|
userSearch
|
||||||
|
|
Loading…
Reference in a new issue