1
0
mirror of https://github.com/zedeus/nitter synced 2024-11-22 01:45:22 +01:00

Track token rate limits per endpoint

This commit is contained in:
Zed 2022-01-05 22:48:45 +01:00
parent d726894555
commit dd71e60f35
5 changed files with 79 additions and 58 deletions

View File

@ -6,57 +6,57 @@ import types, query, formatters, consts, apiutils, parser
proc getGraphListBySlug*(name, list: string): Future[List] {.async.} =
let
variables = %*{"screenName": name, "listSlug": list, "withHighlightedLabel": false}
js = await fetch(graphList ? {"variables": $variables})
result = parseGraphList(js)
url = graphListBySlug ? {"variables": $variables}
result = parseGraphList(await fetch(url, Api.listBySlug))
proc getGraphList*(id: string): Future[List] {.async.} =
let
variables = %*{"listId": id, "withHighlightedLabel": false}
js = await fetch(graphListId ? {"variables": $variables})
result = parseGraphList(js)
url = graphList ? {"variables": $variables}
result = parseGraphList(await fetch(url, Api.list))
proc getListTimeline*(id: string; after=""): Future[Timeline] {.async.} =
if id.len == 0: return
let
ps = genParams({"list_id": id, "ranking_mode": "reverse_chronological"}, after)
url = listTimeline ? ps
result = parseTimeline(await fetch(url), after)
result = parseTimeline(await fetch(url, Api.timeline), after)
proc getListMembers*(list: List; after=""): Future[Result[Profile]] {.async.} =
if list.id.len == 0: return
let
ps = genParams({"list_id": list.id}, after)
url = listMembers ? ps
result = parseListMembers(await fetch(url, oldApi=true), after)
result = parseListMembers(await fetch(url, Api.listMembers), after)
proc getProfile*(username: string): Future[Profile] {.async.} =
let
ps = genParams({"screen_name": username})
js = await fetch(userShow ? ps, oldApi=true)
js = await fetch(userShow ? ps, Api.userShow)
result = parseUserShow(js, username=username)
proc getProfileById*(userId: string): Future[Profile] {.async.} =
let
ps = genParams({"user_id": userId})
js = await fetch(userShow ? ps, oldApi=true)
js = await fetch(userShow ? ps, Api.userShow)
result = parseUserShow(js, id=userId)
proc getTimeline*(id: string; after=""; replies=false): Future[Timeline] {.async.} =
let
ps = genParams({"userId": id, "include_tweet_replies": $replies}, after)
url = timeline / (id & ".json") ? ps
result = parseTimeline(await fetch(url), after)
result = parseTimeline(await fetch(url, Api.timeline), after)
proc getMediaTimeline*(id: string; after=""): Future[Timeline] {.async.} =
let url = mediaTimeline / (id & ".json") ? genParams(cursor=after)
result = parseTimeline(await fetch(url), after)
result = parseTimeline(await fetch(url, Api.timeline), after)
proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
let
ps = genParams({"screen_name": name, "trim_user": "true"},
count="18", ext=false)
url = photoRail ? ps
result = parsePhotoRail(await fetch(url, oldApi=true))
result = parsePhotoRail(await fetch(url, Api.photoRail))
proc getSearch*[T](query: Query; after=""): Future[Result[T]] {.async.} =
when T is Profile:
@ -74,14 +74,14 @@ proc getSearch*[T](query: Query; after=""): Future[Result[T]] {.async.} =
let url = search ? genParams(searchParams & @[("q", q), searchMode], after)
try:
result = parse(await fetch(url), after)
result = parse(await fetch(url, Api.search), after)
result.query = query
except InternalError:
return Result[T](beginning: true, query: query)
proc getTweetImpl(id: string; after=""): Future[Conversation] {.async.} =
let url = tweet / (id & ".json") ? genParams(cursor=after)
result = parseConversation(await fetch(url), id)
result = parseConversation(await fetch(url, Api.tweet), id)
proc getReplies*(id, after: string): Future[Result[Chain]] {.async.} =
result = (await getTweetImpl(id, after)).replies

View File

@ -3,7 +3,9 @@ import httpclient, asyncdispatch, options, times, strutils, uri
import packedjson, zippy
import types, tokens, consts, parserutils, http_pool
const rl = "x-rate-limit-"
const
rlRemaining = "x-rate-limit-remaining"
rlReset = "x-rate-limit-reset"
var pool: HttpPool
@ -38,11 +40,11 @@ proc genHeaders*(token: Token = nil): HttpHeaders =
"DNT": "1"
})
proc fetch*(url: Uri; oldApi=false): Future[JsonNode] {.async.} =
proc fetch*(url: Uri; api: Api): Future[JsonNode] {.async.} =
once:
pool = HttpPool()
var token = await getToken()
var token = await getToken(api)
if token.tok.len == 0:
raise rateLimitError()
@ -65,9 +67,14 @@ proc fetch*(url: Uri; oldApi=false): Future[JsonNode] {.async.} =
echo resp.status, ": ", body
result = newJNull()
if not oldApi and resp.headers.hasKey(rl & "reset"):
token.remaining = parseInt(resp.headers[rl & "remaining"])
token.reset = fromUnix(parseInt(resp.headers[rl & "reset"]))
if api != Api.search and resp.headers.hasKey(rlRemaining):
let
remaining = parseInt(resp.headers[rlRemaining])
reset = parseInt(resp.headers[rlReset])
token.setRateLimit(api, remaining, reset)
echo api, " ", remaining, " ", url.path
else:
echo api, " ", url.path
if result.getError notin {invalidToken, forbidden, badToken}:
token.lastUse = getTime()

View File

@ -19,8 +19,8 @@ const
tweet* = timelineApi / "conversation"
graphql = api / "graphql"
graphList* = graphql / "ErWsz9cObLel1BF-HjuBlA/ListBySlug"
graphListId* = graphql / "JADTh6cjebfgetzvF3tQvQ/List"
graphListBySlug* = graphql / "ErWsz9cObLel1BF-HjuBlA/ListBySlug"
graphList* = graphql / "JADTh6cjebfgetzvF3tQvQ/List"
timelineParams* = {
"include_profile_interstitial_type": "0",

View File

@ -1,13 +1,12 @@
# SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, httpclient, times, sequtils, json, math, random
import strutils, strformat
import asyncdispatch, httpclient, times, sequtils, json, random
import strutils, tables
import zippy
import types, agents, consts, http_pool
const
expirationTime = 3.hours
maxLastUse = 1.hours
resetPeriod = 15.minutes
maxAge = 3.hours # tokens expire after 3 hours
maxLastUse = 1.hours # if a token is unused for 60 minutes, it expires
failDelay = initDuration(minutes=30)
var
@ -15,14 +14,9 @@ var
tokenPool: seq[Token]
lastFailed: Time
proc getPoolInfo*: string =
if tokenPool.len == 0: return "token pool empty"
let avg = tokenPool.mapIt(it.remaining).sum() div tokenPool.len
return &"{tokenPool.len} tokens, average remaining: {avg}"
proc rateLimitError*(): ref RateLimitError =
newException(RateLimitError, "rate limited with " & getPoolInfo())
newException(RateLimitError, "rate limited")
proc fetchToken(): Future[Token] {.async.} =
if getTime() - lastFailed < failDelay:
@ -37,51 +31,58 @@ proc fetchToken(): Future[Token] {.async.} =
"authorization": auth
})
var
resp: string
tokNode: JsonNode
tok: string
try:
let
resp = clientPool.use(headers): await c.postContent(activate)
tokNode = parseJson(uncompress(resp))["guest_token"]
tok = tokNode.getStr($(tokNode.getInt))
time = getTime()
let time = getTime()
result = Token(tok: tok, remaining: 187, reset: time + resetPeriod,
init: time, lastUse: time)
return Token(tok: tok, init: time, lastUse: time)
except Exception as e:
lastFailed = getTime()
echo "fetching token failed: ", e.msg
template expired(token: Token): untyped =
proc expired(token: Token): bool =
let time = getTime()
token.init < time - expirationTime or
token.lastUse < time - maxLastUse
token.init < time - maxAge or token.lastUse < time - maxLastUse
template isLimited(token: Token): untyped =
token == nil or (token.remaining <= 5 and token.reset > getTime()) or
token.expired
proc isLimited(token: Token; api: Api): bool =
if token.isNil or token.expired:
return true
if api in token.apis:
let limit = token.apis[api]
return (limit.remaining <= 5 and limit.reset > getTime())
else:
return false
proc release*(token: Token; invalid=false) =
if token != nil and (invalid or token.expired):
if not token.isNil and (invalid or token.expired):
let idx = tokenPool.find(token)
if idx > -1: tokenPool.delete(idx)
proc getToken*(): Future[Token] {.async.} =
proc getToken*(api: Api): Future[Token] {.async.} =
for i in 0 ..< tokenPool.len:
if not result.isLimited: break
if not (result.isNil or result.isLimited(api)):
break
release(result)
result = tokenPool.sample()
if result.isLimited:
if result.isNil or result.isLimited(api):
release(result)
result = await fetchToken()
tokenPool.add result
if result == nil:
if result.isNil:
raise rateLimitError()
proc setRateLimit*(token: Token; api: Api; remaining, reset: int) =
token.apis[api] = RateLimit(
remaining: remaining,
reset: fromUnix(reset)
)
proc poolTokens*(amount: int) {.async.} =
var futs: seq[Future[Token]]
for i in 0 ..< amount:
@ -93,13 +94,13 @@ proc poolTokens*(amount: int) {.async.} =
try: newToken = await token
except: discard
if newToken != nil:
if not newToken.isNil:
tokenPool.add newToken
proc initTokenPool*(cfg: Config) {.async.} =
clientPool = HttpPool()
while true:
if tokenPool.countIt(not it.isLimited) < cfg.minTokens:
if tokenPool.countIt(not it.isLimited(Api.timeline)) < cfg.minTokens:
await poolTokens(min(4, cfg.minTokens - tokenPool.len))
await sleepAsync(2000)

View File

@ -8,12 +8,25 @@ type
RateLimitError* = object of CatchableError
InternalError* = object of CatchableError
Token* = ref object
tok*: string
Api* {.pure.} = enum
userShow
photoRail
timeline
search
tweet
list
listBySlug
listMembers
RateLimit* = object
remaining*: int
reset*: Time
Token* = ref object
tok*: string
init*: Time
lastUse*: Time
apis*: Table[Api, RateLimit]
Error* = enum
null = 0