mirror of
https://github.com/zedeus/nitter.git
synced 2024-12-12 02:56:29 +00:00
Refactor header code
This commit is contained in:
parent
1871cebf1f
commit
bafe5fb550
9 changed files with 79 additions and 151 deletions
|
@ -7,15 +7,6 @@ import utils, consts, timeline, search
|
||||||
proc getListTimeline*(username, list, agent, after: string): Future[Timeline] {.async.} =
|
proc getListTimeline*(username, list, agent, after: string): Future[Timeline] {.async.} =
|
||||||
let url = base / (listUrl % [username, list])
|
let url = base / (listUrl % [username, list])
|
||||||
|
|
||||||
let headers = newHttpHeaders({
|
|
||||||
"Accept": jsonAccept,
|
|
||||||
"Referer": $url,
|
|
||||||
"User-Agent": agent,
|
|
||||||
"X-Twitter-Active-User": "yes",
|
|
||||||
"X-Requested-With": "XMLHttpRequest",
|
|
||||||
"Accept-Language": lang
|
|
||||||
})
|
|
||||||
|
|
||||||
var params = toSeq({
|
var params = toSeq({
|
||||||
"include_available_features": "1",
|
"include_available_features": "1",
|
||||||
"include_entities": "1",
|
"include_entities": "1",
|
||||||
|
@ -25,7 +16,7 @@ proc getListTimeline*(username, list, agent, after: string): Future[Timeline] {.
|
||||||
if after.len > 0:
|
if after.len > 0:
|
||||||
params.add {"max_position": after}
|
params.add {"max_position": after}
|
||||||
|
|
||||||
let json = await fetchJson(url ? params, headers)
|
let json = await fetchJson(url ? params, genHeaders(agent, url))
|
||||||
result = await finishTimeline(json, Query(), after, agent)
|
result = await finishTimeline(json, Query(), after, agent)
|
||||||
if result.content.len == 0:
|
if result.content.len == 0:
|
||||||
return
|
return
|
||||||
|
@ -36,16 +27,10 @@ proc getListTimeline*(username, list, agent, after: string): Future[Timeline] {.
|
||||||
else: get(last.retweet).id
|
else: get(last.retweet).id
|
||||||
|
|
||||||
proc getListMembers*(username, list, agent: string): Future[Result[Profile]] {.async.} =
|
proc getListMembers*(username, list, agent: string): Future[Result[Profile]] {.async.} =
|
||||||
let url = base / (listMembersUrl % [username, list])
|
let
|
||||||
|
url = base / (listMembersUrl % [username, list])
|
||||||
let headers = newHttpHeaders({
|
referer = base / &"{username}/lists/{list}/members"
|
||||||
"Accept": htmlAccept,
|
html = await fetchHtml(url, genHeaders(agent, referer))
|
||||||
"Referer": $(base / &"{username}/lists/{list}/members"),
|
|
||||||
"User-Agent": agent,
|
|
||||||
"Accept-Language": lang
|
|
||||||
})
|
|
||||||
|
|
||||||
let html = await fetchHtml(url, headers)
|
|
||||||
|
|
||||||
result = Result[Profile](
|
result = Result[Profile](
|
||||||
minId: html.selectAttr(".stream-container", "data-min-position"),
|
minId: html.selectAttr(".stream-container", "data-min-position"),
|
||||||
|
@ -56,17 +41,10 @@ proc getListMembers*(username, list, agent: string): Future[Result[Profile]] {.a
|
||||||
)
|
)
|
||||||
|
|
||||||
proc getListMembersSearch*(username, list, agent, after: string): Future[Result[Profile]] {.async.} =
|
proc getListMembersSearch*(username, list, agent, after: string): Future[Result[Profile]] {.async.} =
|
||||||
let url = base / ((listMembersUrl & "/timeline") % [username, list])
|
let
|
||||||
|
url = base / ((listMembersUrl & "/timeline") % [username, list])
|
||||||
let headers = newHttpHeaders({
|
referer = base / &"{username}/lists/{list}/members"
|
||||||
"Accept": jsonAccept,
|
headers = genHeaders({"x-push-with": "XMLHttpRequest"}, agent, referer, xml=true)
|
||||||
"Referer": $(base / &"{username}/lists/{list}/members"),
|
|
||||||
"User-Agent": agent,
|
|
||||||
"X-Twitter-Active-User": "yes",
|
|
||||||
"X-Requested-With": "XMLHttpRequest",
|
|
||||||
"X-Push-With": "XMLHttpRequest",
|
|
||||||
"Accept-Language": lang
|
|
||||||
})
|
|
||||||
|
|
||||||
var params = toSeq({
|
var params = toSeq({
|
||||||
"include_available_features": "1",
|
"include_available_features": "1",
|
||||||
|
|
|
@ -51,35 +51,23 @@ proc getGuestToken(agent: string; force=false): Future[string] {.async.} =
|
||||||
tokenUpdated = getTime()
|
tokenUpdated = getTime()
|
||||||
tokenUses = 0
|
tokenUses = 0
|
||||||
|
|
||||||
let headers = newHttpHeaders({
|
|
||||||
"Accept": jsonAccept,
|
|
||||||
"Referer": $base,
|
|
||||||
"User-Agent": agent,
|
|
||||||
"Authorization": auth
|
|
||||||
})
|
|
||||||
|
|
||||||
newClient()
|
|
||||||
|
|
||||||
let
|
let
|
||||||
|
headers = genHeaders({"authorization": auth}, agent, base, lang=false)
|
||||||
url = apiBase / tokenUrl
|
url = apiBase / tokenUrl
|
||||||
json = parseJson(await client.postContent($url))
|
json = await fetchJson(url, headers)
|
||||||
|
|
||||||
|
if json != nil:
|
||||||
result = json["guest_token"].to(string)
|
result = json["guest_token"].to(string)
|
||||||
guestToken = result
|
guestToken = result
|
||||||
|
|
||||||
proc getVideoFetch(tweet: Tweet; agent, token: string) {.async.} =
|
proc getVideoFetch(tweet: Tweet; agent, token: string) {.async.} =
|
||||||
if tweet.video.isNone(): return
|
if tweet.video.isNone(): return
|
||||||
|
|
||||||
let headers = newHttpHeaders({
|
let
|
||||||
"Accept": jsonAccept,
|
headers = genHeaders({"authorization": auth, "x-guest-token": token},
|
||||||
"Referer": $(base / getLink(tweet)),
|
agent, base / getLink(tweet), lang=false)
|
||||||
"User-Agent": agent,
|
url = apiBase / (videoUrl % tweet.id)
|
||||||
"Authorization": auth,
|
json = await fetchJson(url, headers)
|
||||||
"x-guest-token": token
|
|
||||||
})
|
|
||||||
|
|
||||||
let url = apiBase / (videoUrl % tweet.id)
|
|
||||||
let json = await fetchJson(url, headers)
|
|
||||||
|
|
||||||
if json == nil:
|
if json == nil:
|
||||||
if getTime() - tokenUpdated > initDuration(seconds=1):
|
if getTime() - tokenUpdated > initDuration(seconds=1):
|
||||||
|
@ -114,52 +102,31 @@ proc getVideo*(tweet: Tweet; agent, token: string; force=false) {.async.} =
|
||||||
proc getPoll*(tweet: Tweet; agent: string) {.async.} =
|
proc getPoll*(tweet: Tweet; agent: string) {.async.} =
|
||||||
if tweet.poll.isNone(): return
|
if tweet.poll.isNone(): return
|
||||||
|
|
||||||
let headers = newHttpHeaders({
|
let
|
||||||
"Accept": htmlAccept,
|
headers = genHeaders(agent, base / getLink(tweet), auth=true)
|
||||||
"Referer": $(base / getLink(tweet)),
|
url = base / (pollUrl % tweet.id)
|
||||||
"User-Agent": agent,
|
html = await fetchHtml(url, headers)
|
||||||
"Authority": "twitter.com",
|
|
||||||
"Accept-Language": lang,
|
|
||||||
})
|
|
||||||
|
|
||||||
let url = base / (pollUrl % tweet.id)
|
|
||||||
let html = await fetchHtml(url, headers)
|
|
||||||
if html == nil: return
|
if html == nil: return
|
||||||
|
|
||||||
tweet.poll = some parsePoll(html)
|
tweet.poll = some parsePoll(html)
|
||||||
|
|
||||||
proc getCard*(tweet: Tweet; agent: string) {.async.} =
|
proc getCard*(tweet: Tweet; agent: string) {.async.} =
|
||||||
if tweet.card.isNone(): return
|
if tweet.card.isNone(): return
|
||||||
|
|
||||||
let headers = newHttpHeaders({
|
let
|
||||||
"Accept": htmlAccept,
|
headers = genHeaders(agent, base / getLink(tweet), auth=true)
|
||||||
"Referer": $(base / getLink(tweet)),
|
query = get(tweet.card).query.replace("sensitive=true", "sensitive=false")
|
||||||
"User-Agent": agent,
|
html = await fetchHtml(base / query, headers)
|
||||||
"Authority": "twitter.com",
|
|
||||||
"Accept-Language": lang,
|
|
||||||
})
|
|
||||||
|
|
||||||
let query = get(tweet.card).query.replace("sensitive=true", "sensitive=false")
|
|
||||||
let html = await fetchHtml(base / query, headers)
|
|
||||||
if html == nil: return
|
if html == nil: return
|
||||||
|
|
||||||
parseCard(get(tweet.card), html)
|
parseCard(get(tweet.card), html)
|
||||||
|
|
||||||
proc getPhotoRail*(username, agent: string): Future[seq[GalleryPhoto]] {.async.} =
|
proc getPhotoRail*(username, agent: string): Future[seq[GalleryPhoto]] {.async.} =
|
||||||
let headers = newHttpHeaders({
|
let
|
||||||
"Accept": jsonAccept,
|
headers = genHeaders({"x-requested-with": "XMLHttpRequest"}, agent, base / username)
|
||||||
"Referer": $(base / username),
|
params = {"for_photo_rail": "true", "oldest_unread_id": "0"}
|
||||||
"User-Agent": agent,
|
url = base / (timelineMediaUrl % username) ? params
|
||||||
"X-Requested-With": "XMLHttpRequest"
|
html = await fetchHtml(url, headers, jsonKey="items_html")
|
||||||
})
|
|
||||||
|
|
||||||
let params = {
|
|
||||||
"for_photo_rail": "true",
|
|
||||||
"oldest_unread_id": "0"
|
|
||||||
}
|
|
||||||
|
|
||||||
let url = base / (timelineMediaUrl % username) ? params
|
|
||||||
let html = await fetchHtml(url, headers, jsonKey="items_html")
|
|
||||||
|
|
||||||
result = parsePhotoRail(html)
|
result = parsePhotoRail(html)
|
||||||
|
|
||||||
|
|
|
@ -12,21 +12,15 @@ proc getProfileFallback(username: string; headers: HttpHeaders): Future[Profile]
|
||||||
result = parseIntentProfile(html)
|
result = parseIntentProfile(html)
|
||||||
|
|
||||||
proc getProfile*(username, agent: string): Future[Profile] {.async.} =
|
proc getProfile*(username, agent: string): Future[Profile] {.async.} =
|
||||||
let headers = newHttpHeaders({
|
|
||||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9",
|
|
||||||
"Referer": $(base / username),
|
|
||||||
"User-Agent": agent,
|
|
||||||
"X-Twitter-Active-User": "yes",
|
|
||||||
"X-Requested-With": "XMLHttpRequest",
|
|
||||||
"Accept-Language": lang
|
|
||||||
})
|
|
||||||
|
|
||||||
let
|
let
|
||||||
|
headers = genHeaders(agent, base / username, xml=true)
|
||||||
|
|
||||||
params = {
|
params = {
|
||||||
"screen_name": username,
|
"screen_name": username,
|
||||||
"wants_hovercard": "true",
|
"wants_hovercard": "true",
|
||||||
"_": $(epochTime().int)
|
"_": $(epochTime().int)
|
||||||
}
|
}
|
||||||
|
|
||||||
url = base / profilePopupUrl ? params
|
url = base / profilePopupUrl ? params
|
||||||
html = await fetchHtml(url, headers, jsonKey="html")
|
html = await fetchHtml(url, headers, jsonKey="html")
|
||||||
|
|
||||||
|
@ -37,14 +31,11 @@ proc getProfile*(username, agent: string): Future[Profile] {.async.} =
|
||||||
|
|
||||||
result = parsePopupProfile(html)
|
result = parsePopupProfile(html)
|
||||||
|
|
||||||
proc getProfileFull*(username: string): Future[Profile] {.async.} =
|
proc getProfileFull*(username, agent: string): Future[Profile] {.async.} =
|
||||||
let headers = newHttpHeaders({
|
let
|
||||||
"authority": "twitter.com",
|
url = base / username
|
||||||
"accept": htmlAccept,
|
headers = genHeaders(agent, url, auth=true)
|
||||||
"referer": "https://twitter.com/" & username,
|
html = await fetchHtml(url, headers)
|
||||||
"accept-language": lang
|
|
||||||
})
|
|
||||||
|
|
||||||
let html = await fetchHtml(base / username, headers)
|
|
||||||
if html == nil: return
|
if html == nil: return
|
||||||
result = parseTimelineProfile(html)
|
result = parseTimelineProfile(html)
|
||||||
|
|
|
@ -22,14 +22,8 @@ proc getSearch*[T](query: Query; after, agent: string): Future[Result[T]] {.asyn
|
||||||
param = genQueryParam(query)
|
param = genQueryParam(query)
|
||||||
encoded = encodeUrl(param, usePlus=false)
|
encoded = encodeUrl(param, usePlus=false)
|
||||||
|
|
||||||
headers = newHttpHeaders({
|
referer = base / ("search?f=$1&q=$2&src=typd" % [kind, encoded])
|
||||||
"Accept": jsonAccept,
|
headers = genHeaders(agent, referer, auth=true, xml=true)
|
||||||
"Referer": $(base / ("search?f=$1&q=$2&src=typd" % [kind, encoded])),
|
|
||||||
"User-Agent": agent,
|
|
||||||
"X-Requested-With": "XMLHttpRequest",
|
|
||||||
"Authority": "twitter.com",
|
|
||||||
"Accept-Language": lang
|
|
||||||
})
|
|
||||||
|
|
||||||
params = {
|
params = {
|
||||||
"f": kind,
|
"f": kind,
|
||||||
|
|
|
@ -22,14 +22,7 @@ proc finishTimeline*(json: JsonNode; query: Query; after, agent: string): Future
|
||||||
result.content = thread.content
|
result.content = thread.content
|
||||||
|
|
||||||
proc getTimeline*(username, after, agent: string): Future[Timeline] {.async.} =
|
proc getTimeline*(username, after, agent: string): Future[Timeline] {.async.} =
|
||||||
let headers = newHttpHeaders({
|
let headers = genHeaders(agent, base / username, xml=true)
|
||||||
"Accept": jsonAccept,
|
|
||||||
"Referer": $(base / username),
|
|
||||||
"User-Agent": agent,
|
|
||||||
"X-Twitter-Active-User": "yes",
|
|
||||||
"X-Requested-With": "XMLHttpRequest",
|
|
||||||
"Accept-Language": lang
|
|
||||||
})
|
|
||||||
|
|
||||||
var params = toSeq({
|
var params = toSeq({
|
||||||
"include_available_features": "1",
|
"include_available_features": "1",
|
||||||
|
@ -45,18 +38,12 @@ proc getTimeline*(username, after, agent: string): Future[Timeline] {.async.} =
|
||||||
result = await finishTimeline(json, Query(), after, agent)
|
result = await finishTimeline(json, Query(), after, agent)
|
||||||
|
|
||||||
proc getProfileAndTimeline*(username, agent, after: string): Future[(Profile, Timeline)] {.async.} =
|
proc getProfileAndTimeline*(username, agent, after: string): Future[(Profile, Timeline)] {.async.} =
|
||||||
let headers = newHttpHeaders({
|
|
||||||
"authority": "twitter.com",
|
|
||||||
"accept": htmlAccept,
|
|
||||||
"referer": "https://twitter.com/" & username,
|
|
||||||
"accept-language": lang
|
|
||||||
})
|
|
||||||
|
|
||||||
var url = base / username
|
var url = base / username
|
||||||
if after.len > 0:
|
if after.len > 0:
|
||||||
url = url ? {"max_position": after}
|
url = url ? {"max_position": after}
|
||||||
|
|
||||||
let
|
let
|
||||||
|
headers = genHeaders(agent, base / username, auth=true)
|
||||||
html = await fetchHtml(url, headers)
|
html = await fetchHtml(url, headers)
|
||||||
timeline = parseTimeline(html.select("#timeline > .stream-container"), after)
|
timeline = parseTimeline(html.select("#timeline > .stream-container"), after)
|
||||||
profile = parseTimelineProfile(html)
|
profile = parseTimelineProfile(html)
|
||||||
|
|
|
@ -4,18 +4,12 @@ import ".."/[types, parser]
|
||||||
import utils, consts, media
|
import utils, consts, media
|
||||||
|
|
||||||
proc getTweet*(username, id, after, agent: string): Future[Conversation] {.async.} =
|
proc getTweet*(username, id, after, agent: string): Future[Conversation] {.async.} =
|
||||||
let headers = newHttpHeaders({
|
|
||||||
"Accept": jsonAccept,
|
|
||||||
"Referer": $base,
|
|
||||||
"User-Agent": agent,
|
|
||||||
"X-Twitter-Active-User": "yes",
|
|
||||||
"X-Requested-With": "XMLHttpRequest",
|
|
||||||
"Accept-Language": lang,
|
|
||||||
"Pragma": "no-cache",
|
|
||||||
"X-Previous-Page-Name": "profile"
|
|
||||||
})
|
|
||||||
|
|
||||||
let
|
let
|
||||||
|
headers = genHeaders({
|
||||||
|
"pragma": "no-cache",
|
||||||
|
"x-previous-page-name": "profile"
|
||||||
|
}, agent, base, xml=true)
|
||||||
|
|
||||||
url = base / username / tweetUrl / id ? {"max_position": after}
|
url = base / username / tweetUrl / id ? {"max_position": after}
|
||||||
html = await fetchHtml(url, headers)
|
html = await fetchHtml(url, headers)
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,35 @@
|
||||||
import httpclient, asyncdispatch, htmlparser
|
import httpclient, asyncdispatch, htmlparser
|
||||||
import strutils, json, xmltree, uri
|
import strutils, json, xmltree, uri
|
||||||
|
|
||||||
|
import consts
|
||||||
|
|
||||||
|
proc genHeaders*(headers: openArray[tuple[key: string, val: string]];
|
||||||
|
agent: string; referer: Uri; lang=true;
|
||||||
|
auth=false; xml=false): HttpHeaders =
|
||||||
|
result = newHttpHeaders({
|
||||||
|
"referer": $referer,
|
||||||
|
"user-agent": agent,
|
||||||
|
"x-twitter-active-user": "yes",
|
||||||
|
})
|
||||||
|
|
||||||
|
if auth: result["authority"] = "twitter.com"
|
||||||
|
if lang: result["accept-language"] = consts.lang
|
||||||
|
if xml: result["x-requested-with"] = "XMLHttpRequest"
|
||||||
|
|
||||||
|
for (key, val) in headers:
|
||||||
|
result[key] = val
|
||||||
|
|
||||||
|
proc genHeaders*(agent: string; referer: Uri; lang=true;
|
||||||
|
auth=false; xml=false): HttpHeaders =
|
||||||
|
genHeaders([], agent, referer, lang, auth, xml)
|
||||||
|
|
||||||
template newClient*() {.dirty.} =
|
template newClient*() {.dirty.} =
|
||||||
var client = newAsyncHttpClient()
|
var client = newAsyncHttpClient()
|
||||||
defer: client.close()
|
defer: client.close()
|
||||||
client.headers = headers
|
client.headers = headers
|
||||||
|
|
||||||
proc fetchHtml*(url: Uri; headers: HttpHeaders; jsonKey = ""): Future[XmlNode] {.async.} =
|
proc fetchHtml*(url: Uri; headers: HttpHeaders; jsonKey = ""): Future[XmlNode] {.async.} =
|
||||||
|
headers["accept"] = htmlAccept
|
||||||
newClient()
|
newClient()
|
||||||
|
|
||||||
var resp = ""
|
var resp = ""
|
||||||
|
@ -16,12 +39,11 @@ proc fetchHtml*(url: Uri; headers: HttpHeaders; jsonKey = ""): Future[XmlNode] {
|
||||||
return nil
|
return nil
|
||||||
|
|
||||||
if jsonKey.len > 0:
|
if jsonKey.len > 0:
|
||||||
let json = parseJson(resp)[jsonKey].str
|
resp = parseJson(resp)[jsonKey].str
|
||||||
return parseHtml(json)
|
|
||||||
else:
|
|
||||||
return parseHtml(resp)
|
return parseHtml(resp)
|
||||||
|
|
||||||
proc fetchJson*(url: Uri; headers: HttpHeaders): Future[JsonNode] {.async.} =
|
proc fetchJson*(url: Uri; headers: HttpHeaders): Future[JsonNode] {.async.} =
|
||||||
|
headers["accept"] = jsonAccept
|
||||||
newClient()
|
newClient()
|
||||||
|
|
||||||
var resp = ""
|
var resp = ""
|
||||||
|
|
|
@ -39,7 +39,7 @@ proc getCachedProfile*(username, agent: string; force=false): Future[Profile] {.
|
||||||
result.getOne("lower(username) = ?", toLower(username))
|
result.getOne("lower(username) = ?", toLower(username))
|
||||||
doAssert not result.isOutdated
|
doAssert not result.isOutdated
|
||||||
except AssertionError, KeyError:
|
except AssertionError, KeyError:
|
||||||
result = await getProfileFull(username)
|
result = await getProfileFull(username, agent)
|
||||||
cache(result)
|
cache(result)
|
||||||
|
|
||||||
proc setProfileCacheTime*(minutes: int) =
|
proc setProfileCacheTime*(minutes: int) =
|
||||||
|
|
|
@ -9,8 +9,8 @@ card = [
|
||||||
'github.com', False],
|
'github.com', False],
|
||||||
|
|
||||||
['lorenlugosch/status/1115440394148487168',
|
['lorenlugosch/status/1115440394148487168',
|
||||||
'lorenlugosch/pretrain_speech_model',
|
'lorenlugosch/end-to-end-SLU',
|
||||||
'Speech Model Pre-training for End-to-End Spoken Language Understanding - lorenlugosch/pretrain_speech_model',
|
'PyTorch code for end-to-end spoken language understanding (SLU) with ASR-based transfer learning - lorenlugosch/end-to-end-SLU',
|
||||||
'github.com', False],
|
'github.com', False],
|
||||||
|
|
||||||
['PyTorch/status/1123379369672450051',
|
['PyTorch/status/1123379369672450051',
|
||||||
|
@ -85,12 +85,7 @@ playable = [
|
||||||
['nim_lang/status/1121090879823986688',
|
['nim_lang/status/1121090879823986688',
|
||||||
'Nim - First natively compiled language w/ hot code-reloading at...',
|
'Nim - First natively compiled language w/ hot code-reloading at...',
|
||||||
'#nim #c++ #ACCUConf Nim is a statically typed systems and applications programming language which offers perhaps some of the most powerful metaprogramming ca...',
|
'#nim #c++ #ACCUConf Nim is a statically typed systems and applications programming language which offers perhaps some of the most powerful metaprogramming ca...',
|
||||||
'youtube.com'],
|
'youtube.com']
|
||||||
|
|
||||||
['lele/status/819930645145288704',
|
|
||||||
'Eurocrash presents Open Decks - emerging dj #4: E-Musik',
|
|
||||||
"OPEN DECKS is Eurocrash's new project about discovering new and emerging dj talents. Every selected dj will have the chance to perform the first dj-set in front of an actual audience. The best dj...",
|
|
||||||
'mixcloud.com']
|
|
||||||
]
|
]
|
||||||
|
|
||||||
promo = [
|
promo = [
|
||||||
|
|
Loading…
Reference in a new issue