Update httpx and friends to 0.21.3 (#3121)

This commit is contained in:
Andy Jones 2022-01-15 10:16:10 -08:00 committed by GitHub
parent 321ddc91bc
commit 3ddd0f8944
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 28 additions and 27 deletions

2
.gitignore vendored
View File

@ -26,3 +26,5 @@ dist/
local/ local/
gh-pages/ gh-pages/
searx.egg-info/ searx.egg-info/
.env
geckodriver.log

View File

@ -171,7 +171,7 @@ headers set HTTP header information
data set HTTP data information data set HTTP data information
cookies set HTTP cookies cookies set HTTP cookies
verify bool Performing SSL-Validity check verify bool Performing SSL-Validity check
allow_redirects bool Follow redirects follow_redirects bool Follow redirects
max_redirects int maximum redirects, hard limit max_redirects int maximum redirects, hard limit
soft_max_redirects int maximum redirects, soft limit. Record an error but don't stop the engine soft_max_redirects int maximum redirects, soft limit. Record an error but don't stop the engine
raise_for_httperror bool True by default: raise an exception if the HTTP code of response is >= 300 raise_for_httperror bool True by default: raise an exception if the HTTP code of response is >= 300

View File

@ -7,10 +7,10 @@ lxml==4.6.3
pygments==2.8.0 pygments==2.8.0
python-dateutil==2.8.2 python-dateutil==2.8.2
pyyaml==6.0 pyyaml==6.0
httpx[http2]==0.19.0 httpx[http2]==0.21.3
Brotli==1.0.9 Brotli==1.0.9
uvloop==0.16.0; python_version >= '3.7' uvloop==0.16.0; python_version >= '3.7'
uvloop==0.14.0; python_version < '3.7' uvloop==0.14.0; python_version < '3.7'
httpx-socks[asyncio]==0.4.1 httpx-socks[asyncio]==0.7.2
langdetect==1.0.9 langdetect==1.0.9
setproctitle==1.2.2 setproctitle==1.2.2

View File

@ -39,7 +39,7 @@ cookies = dict()
def init(engine_settings=None): def init(engine_settings=None):
global cookies global cookies
# initial cookies # initial cookies
resp = http_get(url, allow_redirects=False) resp = http_get(url, follow_redirects=False)
if resp.ok: if resp.ok:
for r in resp.history: for r in resp.history:
cookies.update(r.cookies) cookies.update(r.cookies)

View File

@ -122,17 +122,17 @@ def request(method, url, **kwargs):
def get(url, **kwargs): def get(url, **kwargs):
kwargs.setdefault('allow_redirects', True) kwargs.setdefault('follow_redirects', True)
return request('get', url, **kwargs) return request('get', url, **kwargs)
def options(url, **kwargs): def options(url, **kwargs):
kwargs.setdefault('allow_redirects', True) kwargs.setdefault('follow_redirects', True)
return request('options', url, **kwargs) return request('options', url, **kwargs)
def head(url, **kwargs): def head(url, **kwargs):
kwargs.setdefault('allow_redirects', False) kwargs.setdefault('follow_redirects', False)
return request('head', url, **kwargs) return request('head', url, **kwargs)

View File

@ -30,17 +30,15 @@ logger = logger.getChild('searx.http.client')
LOOP = None LOOP = None
SSLCONTEXTS = {} SSLCONTEXTS = {}
TRANSPORT_KWARGS = { TRANSPORT_KWARGS = {
'backend': 'asyncio',
'trust_env': False, 'trust_env': False,
} }
async def close_connections_for_url(connection_pool: httpcore.AsyncConnectionPool, url: httpcore._utils.URL): async def close_connections_for_url(connection_pool: httpcore.AsyncConnectionPool, url: httpx._models.URL):
origin = httpcore._utils.url_to_origin(url) logger.debug('Drop connections for %r', url.host)
logger.debug('Drop connections for %r', origin) connections_to_close = [conn for conn in connection_pool._pool if conn._origin == url.host]
connections_to_close = connection_pool._connections_for_origin(origin)
for connection in connections_to_close: for connection in connections_to_close:
await connection_pool._remove_from_pool(connection) connection_pool._pool.remove(connection)
try: try:
await connection.aclose() await connection.aclose()
except httpx.NetworkError as e: except httpx.NetworkError as e:
@ -76,12 +74,12 @@ class AsyncProxyTransportFixed(AsyncProxyTransport):
Note: AsyncProxyTransport inherit from AsyncConnectionPool Note: AsyncProxyTransport inherit from AsyncConnectionPool
""" """
async def handle_async_request(self, method, url, headers=None, stream=None, extensions=None): async def handle_async_request(self, request: httpx.Request):
retry = 2 retry = 2
while retry > 0: while retry > 0:
retry -= 1 retry -= 1
try: try:
return await super().handle_async_request(method, url, headers, stream, extensions) return await super().handle_async_request(request)
except (ProxyConnectionError, ProxyTimeoutError, ProxyError) as e: except (ProxyConnectionError, ProxyTimeoutError, ProxyError) as e:
raise httpx.ProxyError(e) raise httpx.ProxyError(e)
except OSError as e: except OSError as e:
@ -89,25 +87,25 @@ class AsyncProxyTransportFixed(AsyncProxyTransport):
raise httpx.NetworkError(e) raise httpx.NetworkError(e)
except httpx.RemoteProtocolError as e: except httpx.RemoteProtocolError as e:
# in case of httpx.RemoteProtocolError: Server disconnected # in case of httpx.RemoteProtocolError: Server disconnected
await close_connections_for_url(self, url) await close_connections_for_url(self, request.url)
logger.warning('httpx.RemoteProtocolError: retry', exc_info=e) logger.warning('httpx.RemoteProtocolError: retry', exc_info=e)
# retry # retry
except (httpx.NetworkError, httpx.ProtocolError) as e: except (httpx.NetworkError, httpx.ProtocolError) as e:
# httpx.WriteError on HTTP/2 connection leaves a new opened stream # httpx.WriteError on HTTP/2 connection leaves a new opened stream
# then each new request creates a new stream and raise the same WriteError # then each new request creates a new stream and raise the same WriteError
await close_connections_for_url(self, url) await close_connections_for_url(self, request.url)
raise e raise e
class AsyncHTTPTransportFixed(httpx.AsyncHTTPTransport): class AsyncHTTPTransportFixed(httpx.AsyncHTTPTransport):
"""Fix httpx.AsyncHTTPTransport""" """Fix httpx.AsyncHTTPTransport"""
async def handle_async_request(self, method, url, headers=None, stream=None, extensions=None): async def handle_async_request(self, request: httpx.Request):
retry = 2 retry = 2
while retry > 0: while retry > 0:
retry -= 1 retry -= 1
try: try:
return await super().handle_async_request(method, url, headers, stream, extensions) return await super().handle_async_request(request)
except OSError as e: except OSError as e:
# socket.gaierror when DNS resolution fails # socket.gaierror when DNS resolution fails
raise httpx.ConnectError(e) raise httpx.ConnectError(e)
@ -115,16 +113,16 @@ class AsyncHTTPTransportFixed(httpx.AsyncHTTPTransport):
# httpx.CloseError: [Errno 104] Connection reset by peer # httpx.CloseError: [Errno 104] Connection reset by peer
# raised by _keepalive_sweep() # raised by _keepalive_sweep()
# from https://github.com/encode/httpcore/blob/4b662b5c42378a61e54d673b4c949420102379f5/httpcore/_backends/asyncio.py#L198 # noqa # from https://github.com/encode/httpcore/blob/4b662b5c42378a61e54d673b4c949420102379f5/httpcore/_backends/asyncio.py#L198 # noqa
await close_connections_for_url(self._pool, url) await close_connections_for_url(self._pool, request.url)
logger.warning('httpx.CloseError: retry', exc_info=e) logger.warning('httpx.CloseError: retry', exc_info=e)
# retry # retry
except httpx.RemoteProtocolError as e: except httpx.RemoteProtocolError as e:
# in case of httpx.RemoteProtocolError: Server disconnected # in case of httpx.RemoteProtocolError: Server disconnected
await close_connections_for_url(self._pool, url) await close_connections_for_url(self._pool, request.url)
logger.warning('httpx.RemoteProtocolError: retry', exc_info=e) logger.warning('httpx.RemoteProtocolError: retry', exc_info=e)
# retry # retry
except (httpx.ProtocolError, httpx.NetworkError) as e: except (httpx.ProtocolError, httpx.NetworkError) as e:
await close_connections_for_url(self._pool, url) await close_connections_for_url(self._pool, request.url)
raise e raise e

View File

@ -76,7 +76,7 @@ def _is_url_image(image_url):
a = time() a = time()
try: try:
network.set_timeout_for_thread(10.0, time()) network.set_timeout_for_thread(10.0, time())
r = network.get(image_url, timeout=10.0, allow_redirects=True, headers={ r = network.get(image_url, timeout=10.0, follow_redirects=True, headers={
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:84.0) Gecko/20100101 Firefox/84.0', 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:84.0) Gecko/20100101 Firefox/84.0',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Language': 'en-US;q=0.5,en;q=0.3', 'Accept-Language': 'en-US;q=0.5,en;q=0.3',

View File

@ -69,9 +69,10 @@ class OnlineProcessor(EngineProcessor):
if max_redirects: if max_redirects:
request_args['max_redirects'] = max_redirects request_args['max_redirects'] = max_redirects
# allow_redirects # follow_redirects
if 'allow_redirects' in params: if 'follow_redirects' in params:
request_args['allow_redirects'] = params['allow_redirects'] # httpx has renamed this parameter to 'follow_redirects'
request_args['follow_redirects'] = params['follow_redirects']
# soft_max_redirects # soft_max_redirects
soft_max_redirects = params.get('soft_max_redirects', max_redirects or 0) soft_max_redirects = params.get('soft_max_redirects', max_redirects or 0)

View File

@ -941,7 +941,7 @@ def image_proxy():
url=url, url=url,
headers=headers, headers=headers,
timeout=settings['outgoing']['request_timeout'], timeout=settings['outgoing']['request_timeout'],
allow_redirects=True, follow_redirects=True,
max_redirects=20) max_redirects=20)
resp = next(stream) resp = next(stream)