[mod] upgrade httpx 0.21.2

httpx 0.21.2 and httpcore 0.14.4 fix multiple issues:
* https://github.com/encode/httpx/releases/tag/0.21.2
* https://github.com/encode/httpcore/releases/tag/0.14.4

so most of the workarounds in searx.network have been removed.
This commit is contained in:
Alexandre Flament 2021-12-11 13:17:04 +01:00 committed by Noémi Ványi
parent 0248777f95
commit 27d58993c2
3 changed files with 74 additions and 115 deletions

View File

@ -5,7 +5,6 @@ import logging
import threading
import uvloop
import httpcore
import httpx
from httpx_socks import AsyncProxyTransport
from python_socks import (
@ -27,17 +26,6 @@ TRANSPORT_KWARGS = {
}
async def close_connections_for_url(connection_pool: httpcore.AsyncConnectionPool, url: httpx._models.URL):
logger.debug('Drop connections for %r', url.host)
connections_to_close = [conn for conn in connection_pool._pool if conn._origin == url.host]
for connection in connections_to_close:
connection_pool._pool.remove(connection)
try:
await connection.aclose()
except httpx.NetworkError as e:
logger.warning('Error closing an existing connection', exc_info=e)
def get_sslcontexts(proxy_url=None, cert=None, verify=True, trust_env=True, http2=False):
global SSLCONTEXTS
key = (proxy_url, cert, verify, trust_env, http2)
@ -49,74 +37,25 @@ def get_sslcontexts(proxy_url=None, cert=None, verify=True, trust_env=True, http
class AsyncHTTPTransportNoHttp(httpx.AsyncHTTPTransport):
"""Block HTTP request"""
async def handle_async_request(self, method, url, headers=None, stream=None, extensions=None):
raise httpx.UnsupportedProtocol("HTTP protocol is disabled")
async def handle_async_request(self, request):
raise httpx.UnsupportedProtocol('HTTP protocol is disabled')
class AsyncProxyTransportFixed(AsyncProxyTransport):
"""Fix httpx_socks.AsyncProxyTransport
Map python_socks exceptions to httpx.ProxyError
Map socket.gaierror to httpx.ConnectError
Note: keepalive_expiry is ignored, AsyncProxyTransport should call:
* self._keepalive_sweep()
* self._response_closed(self, connection)
Note: AsyncProxyTransport inherit from AsyncConnectionPool
Map python_socks exceptions to httpx.ProxyError exceptions
"""
async def handle_async_request(self, request: httpx.Request):
retry = 2
while retry > 0:
retry -= 1
try:
return await super().handle_async_request(request)
except (ProxyConnectionError, ProxyTimeoutError, ProxyError) as e:
raise httpx.ProxyError(e)
except OSError as e:
# socket.gaierror when DNS resolution fails
raise httpx.NetworkError(e)
except httpx.RemoteProtocolError as e:
# in case of httpx.RemoteProtocolError: Server disconnected
await close_connections_for_url(self, request.url)
logger.warning('httpx.RemoteProtocolError: retry', exc_info=e)
# retry
except (httpx.NetworkError, httpx.ProtocolError) as e:
# httpx.WriteError on HTTP/2 connection leaves a new opened stream
# then each new request creates a new stream and raise the same WriteError
await close_connections_for_url(self, request.url)
raise e
class AsyncHTTPTransportFixed(httpx.AsyncHTTPTransport):
"""Fix httpx.AsyncHTTPTransport"""
async def handle_async_request(self, request: httpx.Request):
retry = 2
while retry > 0:
retry -= 1
try:
return await super().handle_async_request(request)
except OSError as e:
# socket.gaierror when DNS resolution fails
raise httpx.ConnectError(e)
except httpx.CloseError as e:
# httpx.CloseError: [Errno 104] Connection reset by peer
# raised by _keepalive_sweep()
# from https://github.com/encode/httpcore/blob/4b662b5c42378a61e54d673b4c949420102379f5/httpcore/_backends/asyncio.py#L198 # noqa
await close_connections_for_url(self._pool, request.url)
logger.warning('httpx.CloseError: retry', exc_info=e)
# retry
except httpx.RemoteProtocolError as e:
# in case of httpx.RemoteProtocolError: Server disconnected
await close_connections_for_url(self._pool, request.url)
logger.warning('httpx.RemoteProtocolError: retry', exc_info=e)
# retry
except (httpx.ProtocolError, httpx.NetworkError) as e:
await close_connections_for_url(self._pool, request.url)
raise e
async def handle_async_request(self, request):
try:
return await super().handle_async_request(request)
except ProxyConnectionError as e:
raise httpx.ProxyError("ProxyConnectionError: " + e.strerror, request=request) from e
except ProxyTimeoutError as e:
raise httpx.ProxyError("ProxyTimeoutError: " + e.args[0], request=request) from e
except ProxyError as e:
raise httpx.ProxyError("ProxyError: " + e.args[0], request=request) from e
def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit, retries):
@ -132,29 +71,35 @@ def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit
proxy_type, proxy_host, proxy_port, proxy_username, proxy_password = parse_proxy_url(proxy_url)
verify = get_sslcontexts(proxy_url, None, True, False, http2) if verify is True else verify
return AsyncProxyTransportFixed(proxy_type=proxy_type, proxy_host=proxy_host, proxy_port=proxy_port,
username=proxy_username, password=proxy_password,
rdns=rdns,
loop=get_loop(),
verify=verify,
http2=http2,
local_address=local_address,
max_connections=limit.max_connections,
max_keepalive_connections=limit.max_keepalive_connections,
keepalive_expiry=limit.keepalive_expiry,
retries=retries,
**TRANSPORT_KWARGS)
return AsyncProxyTransportFixed(
proxy_type=proxy_type,
proxy_host=proxy_host,
proxy_port=proxy_port,
username=proxy_username,
password=proxy_password,
rdns=rdns,
loop=get_loop(),
verify=verify,
http2=http2,
local_address=local_address,
limits=limit,
retries=retries,
**TRANSPORT_KWARGS,
)
def get_transport(verify, http2, local_address, proxy_url, limit, retries):
verify = get_sslcontexts(None, None, True, False, http2) if verify is True else verify
return AsyncHTTPTransportFixed(verify=verify,
http2=http2,
local_address=local_address,
proxy=httpx._config.Proxy(proxy_url) if proxy_url else None,
limits=limit,
retries=retries,
**TRANSPORT_KWARGS)
return httpx.AsyncHTTPTransport(
# pylint: disable=protected-access
verify=verify,
http2=http2,
limits=limit,
proxy=httpx._config.Proxy(proxy_url) if proxy_url else None,
local_address=local_address,
retries=retries,
**TRANSPORT_KWARGS,
)
def iter_proxies(proxies):

View File

@ -179,48 +179,60 @@ class Network:
await asyncio.gather(*[close_client(client) for client in self._clients.values()], return_exceptions=False)
@staticmethod
def get_kwargs_clients(kwargs):
def extract_kwargs_clients(kwargs):
kwargs_clients = {}
if 'verify' in kwargs:
kwargs_clients['verify'] = kwargs.pop('verify')
if 'max_redirects' in kwargs:
kwargs_clients['max_redirects'] = kwargs.pop('max_redirects')
if 'allow_redirects' in kwargs:
# see https://github.com/encode/httpx/pull/1808
kwargs['follow_redirects'] = kwargs.pop('allow_redirects')
return kwargs_clients
def is_valid_respones(self, response):
if (self.retry_on_http_error is True and 400 <= response.status_code <= 599) \
or (isinstance(self.retry_on_http_error, list) and response.status_code in self.retry_on_http_error) \
or (isinstance(self.retry_on_http_error, int) and response.status_code == self.retry_on_http_error):
def is_valid_response(self, response):
# pylint: disable=too-many-boolean-expressions
if (
(self.retry_on_http_error is True and 400 <= response.status_code <= 599)
or (isinstance(self.retry_on_http_error, list) and response.status_code in self.retry_on_http_error)
or (isinstance(self.retry_on_http_error, int) and response.status_code == self.retry_on_http_error)
):
return False
return True
async def request(self, method, url, **kwargs):
async def call_client(self, stream, method, url, **kwargs):
retries = self.retries
was_disconnected = False
kwargs_clients = Network.extract_kwargs_clients(kwargs)
while retries >= 0: # pragma: no cover
kwargs_clients = Network.get_kwargs_clients(kwargs)
client = await self.get_client(**kwargs_clients)
try:
response = await client.request(method, url, **kwargs)
if self.is_valid_respones(response) or retries <= 0:
if stream:
response = client.stream(method, url, **kwargs)
else:
response = await client.request(method, url, **kwargs)
if self.is_valid_response(response) or retries <= 0:
return response
except httpx.RemoteProtocolError as e:
if not was_disconnected:
# the server has closed the connection:
# try again without decreasing the retries variable & with a new HTTP client
was_disconnected = True
await client.aclose()
self._logger.warning('httpx.RemoteProtocolError: the server has disconnected, retrying')
continue
if retries <= 0:
raise e
except (httpx.RequestError, httpx.HTTPStatusError) as e:
if retries <= 0:
raise e
retries -= 1
async def request(self, method, url, **kwargs):
return await self.call_client(False, method, url, **kwargs)
async def stream(self, method, url, **kwargs):
retries = self.retries
while retries >= 0: # pragma: no cover
kwargs_clients = Network.get_kwargs_clients(kwargs)
client = await self.get_client(**kwargs_clients)
try:
response = client.stream(method, url, **kwargs)
if self.is_valid_respones(response) or retries <= 0:
return response
except (httpx.RequestError, httpx.HTTPStatusError) as e:
if retries <= 0:
raise e
retries -= 1
return await self.call_client(True, method, url, **kwargs)
@classmethod
async def aclose_all(cls):

View File

@ -77,13 +77,15 @@ class TestNetwork(SearxTestCase):
'verify': True,
'max_redirects': 5,
'timeout': 2,
'allow_redirects': True,
}
kwargs_client = Network.get_kwargs_clients(kwargs)
kwargs_client = Network.extract_kwargs_clients(kwargs)
self.assertEqual(len(kwargs_client), 2)
self.assertEqual(len(kwargs), 1)
self.assertEqual(len(kwargs), 2)
self.assertEqual(kwargs['timeout'], 2)
self.assertEqual(kwargs['follow_redirects'], True)
self.assertTrue(kwargs_client['verify'])
self.assertEqual(kwargs_client['max_redirects'], 5)