forked from zaclys/searxng
		
	[upd] upgrade httpx 0.19.0
adjust searx.network module to the new internal API see https://github.com/encode/httpx/pull/1522
This commit is contained in:
		
							parent
							
								
									602cbc2c99
								
							
						
					
					
						commit
						8e73438cbe
					
				
					 4 changed files with 53 additions and 53 deletions
				
			
		|  | @ -7,10 +7,10 @@ lxml==4.6.3 | ||||||
| pygments==2.10.0 | pygments==2.10.0 | ||||||
| python-dateutil==2.8.2 | python-dateutil==2.8.2 | ||||||
| pyyaml==5.4.1 | pyyaml==5.4.1 | ||||||
| httpx[http2]==0.17.1 | httpx[http2]==0.19.0 | ||||||
| Brotli==1.0.9 | Brotli==1.0.9 | ||||||
| uvloop==0.16.0; python_version >= '3.7' | uvloop==0.16.0; python_version >= '3.7' | ||||||
| uvloop==0.14.0; python_version < '3.7' | uvloop==0.14.0; python_version < '3.7' | ||||||
| httpx-socks[asyncio]==0.3.1 | httpx-socks[asyncio]==0.4.1 | ||||||
| langdetect==1.0.9 | langdetect==1.0.9 | ||||||
| setproctitle==1.2.2 | setproctitle==1.2.2 | ||||||
|  |  | ||||||
|  | @ -172,7 +172,7 @@ async def stream_chunk_to_queue(network, queue, method, url, **kwargs): | ||||||
|             async for chunk in response.aiter_raw(65536): |             async for chunk in response.aiter_raw(65536): | ||||||
|                 if len(chunk) > 0: |                 if len(chunk) > 0: | ||||||
|                     queue.put(chunk) |                     queue.put(chunk) | ||||||
|     except httpx.ResponseClosed: |     except httpx.StreamClosed: | ||||||
|         # the response was queued before the exception. |         # the response was queued before the exception. | ||||||
|         # the exception was raised on aiter_raw. |         # the exception was raised on aiter_raw. | ||||||
|         # we do nothing here: in the finally block, None will be queued |         # we do nothing here: in the finally block, None will be queued | ||||||
|  |  | ||||||
|  | @ -5,6 +5,7 @@ | ||||||
| import asyncio | import asyncio | ||||||
| import logging | import logging | ||||||
| import threading | import threading | ||||||
|  | 
 | ||||||
| import httpcore | import httpcore | ||||||
| import httpx | import httpx | ||||||
| from httpx_socks import AsyncProxyTransport | from httpx_socks import AsyncProxyTransport | ||||||
|  | @ -26,19 +27,22 @@ else: | ||||||
|     uvloop.install() |     uvloop.install() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| logger = logger.getChild('searx.http.client') | logger = logger.getChild('searx.network.client') | ||||||
| LOOP = None | LOOP = None | ||||||
| SSLCONTEXTS = {} | SSLCONTEXTS = {} | ||||||
| TRANSPORT_KWARGS = { | TRANSPORT_KWARGS = { | ||||||
|     'backend': 'asyncio', |     # use anyio : | ||||||
|  |     # * https://github.com/encode/httpcore/issues/344 | ||||||
|  |     # * https://github.com/encode/httpx/discussions/1511 | ||||||
|  |     'backend': 'anyio', | ||||||
|     'trust_env': False, |     'trust_env': False, | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # pylint: disable=protected-access | # pylint: disable=protected-access | ||||||
| async def close_connections_for_url( | async def close_connections_for_url( | ||||||
|         connection_pool: httpcore.AsyncConnectionPool, |     connection_pool: httpcore.AsyncConnectionPool, url: httpcore._utils.URL | ||||||
|         url: httpcore._utils.URL ): | ): | ||||||
| 
 | 
 | ||||||
|     origin = httpcore._utils.url_to_origin(url) |     origin = httpcore._utils.url_to_origin(url) | ||||||
|     logger.debug('Drop connections for %r', origin) |     logger.debug('Drop connections for %r', origin) | ||||||
|  | @ -47,7 +51,7 @@ async def close_connections_for_url( | ||||||
|         await connection_pool._remove_from_pool(connection) |         await connection_pool._remove_from_pool(connection) | ||||||
|         try: |         try: | ||||||
|             await connection.aclose() |             await connection.aclose() | ||||||
|         except httpcore.NetworkError as e: |         except httpx.NetworkError as e: | ||||||
|             logger.warning('Error closing an existing connection', exc_info=e) |             logger.warning('Error closing an existing connection', exc_info=e) | ||||||
| # pylint: enable=protected-access | # pylint: enable=protected-access | ||||||
| 
 | 
 | ||||||
|  | @ -60,80 +64,78 @@ def get_sslcontexts(proxy_url=None, cert=None, verify=True, trust_env=True, http | ||||||
|     return SSLCONTEXTS[key] |     return SSLCONTEXTS[key] | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class AsyncHTTPTransportNoHttp(httpcore.AsyncHTTPTransport): | class AsyncHTTPTransportNoHttp(httpx.AsyncHTTPTransport): | ||||||
|     """Block HTTP request""" |     """Block HTTP request""" | ||||||
| 
 | 
 | ||||||
|     async def arequest(self, method, url, headers=None, stream=None, ext=None): |     async def handle_async_request( | ||||||
|         raise httpcore.UnsupportedProtocol("HTTP protocol is disabled") |         self, method, url, headers=None, stream=None, extensions=None | ||||||
|  |     ): | ||||||
|  |         raise httpx.UnsupportedProtocol('HTTP protocol is disabled') | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class AsyncProxyTransportFixed(AsyncProxyTransport): | class AsyncProxyTransportFixed(AsyncProxyTransport): | ||||||
|     """Fix httpx_socks.AsyncProxyTransport |     """Fix httpx_socks.AsyncProxyTransport | ||||||
| 
 | 
 | ||||||
|     Map python_socks exceptions to httpcore.ProxyError |     Map python_socks exceptions to httpx.ProxyError / httpx.ConnectError | ||||||
| 
 | 
 | ||||||
|     Map socket.gaierror to httpcore.ConnectError |     Map socket.gaierror to httpx.ConnectError | ||||||
| 
 |  | ||||||
|     Note: keepalive_expiry is ignored, AsyncProxyTransport should call: |  | ||||||
|     * self._keepalive_sweep() |  | ||||||
|     * self._response_closed(self, connection) |  | ||||||
| 
 | 
 | ||||||
|     Note: AsyncProxyTransport inherit from AsyncConnectionPool |     Note: AsyncProxyTransport inherit from AsyncConnectionPool | ||||||
| 
 |  | ||||||
|     Note: the API is going to change on httpx 0.18.0 |  | ||||||
|     see https://github.com/encode/httpx/pull/1522 |  | ||||||
|     """ |     """ | ||||||
| 
 | 
 | ||||||
|     async def arequest(self, method, url, headers=None, stream=None, ext=None): |     async def handle_async_request( | ||||||
|  |         self, method, url, headers=None, stream=None, extensions=None | ||||||
|  |     ): | ||||||
|         retry = 2 |         retry = 2 | ||||||
|         while retry > 0: |         while retry > 0: | ||||||
|             retry -= 1 |             retry -= 1 | ||||||
|             try: |             try: | ||||||
|                 return await super().arequest(method, url, headers, stream, ext) |                 return await super().handle_async_request( | ||||||
|  |                     method, url, headers=headers, stream=stream, extensions=extensions | ||||||
|  |                 ) | ||||||
|             except (ProxyConnectionError, ProxyTimeoutError, ProxyError) as e: |             except (ProxyConnectionError, ProxyTimeoutError, ProxyError) as e: | ||||||
|                 raise httpcore.ProxyError(e) |                 raise httpx.ProxyError from e | ||||||
|             except OSError as e: |             except OSError as e: | ||||||
|                 # socket.gaierror when DNS resolution fails |                 # socket.gaierror when DNS resolution fails | ||||||
|                 raise httpcore.NetworkError(e) |                 raise httpx.ConnectError from e | ||||||
|             except httpcore.RemoteProtocolError as e: |             except httpx.NetworkError as e: | ||||||
|                 # in case of httpcore.RemoteProtocolError: Server disconnected |                 # httpx.WriteError on HTTP/2 connection leaves a new opened stream | ||||||
|                 await close_connections_for_url(self, url) |  | ||||||
|                 logger.warning('httpcore.RemoteProtocolError: retry', exc_info=e) |  | ||||||
|                 # retry |  | ||||||
|             except (httpcore.NetworkError, httpcore.ProtocolError) as e: |  | ||||||
|                 # httpcore.WriteError on HTTP/2 connection leaves a new opened stream |  | ||||||
|                 # then each new request creates a new stream and raise the same WriteError |                 # then each new request creates a new stream and raise the same WriteError | ||||||
|                 await close_connections_for_url(self, url) |                 await close_connections_for_url(self, url) | ||||||
|                 raise e |                 raise e | ||||||
|  |             except httpx.RemoteProtocolError as e: | ||||||
|  |                 # in case of httpx.RemoteProtocolError: Server disconnected | ||||||
|  |                 await close_connections_for_url(self, url) | ||||||
|  |                 logger.warning('httpx.RemoteProtocolError: retry', exc_info=e) | ||||||
|  |                 # retry | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class AsyncHTTPTransportFixed(httpx.AsyncHTTPTransport): | class AsyncHTTPTransportFixed(httpx.AsyncHTTPTransport): | ||||||
|     """Fix httpx.AsyncHTTPTransport""" |     """Fix httpx.AsyncHTTPTransport""" | ||||||
| 
 | 
 | ||||||
|     async def arequest(self, method, url, headers=None, stream=None, ext=None): |     async def handle_async_request( | ||||||
|  |         self, method, url, headers=None, stream=None, extensions=None | ||||||
|  |     ): | ||||||
|         retry = 2 |         retry = 2 | ||||||
|         while retry > 0: |         while retry > 0: | ||||||
|             retry -= 1 |             retry -= 1 | ||||||
|             try: |             try: | ||||||
|                 return await super().arequest(method, url, headers, stream, ext) |                 return await super().handle_async_request( | ||||||
|  |                     method, url, headers=headers, stream=stream, extensions=extensions | ||||||
|  |                 ) | ||||||
|             except OSError as e: |             except OSError as e: | ||||||
|                 # socket.gaierror when DNS resolution fails |                 # socket.gaierror when DNS resolution fails | ||||||
|                 raise httpcore.ConnectError(e) |                 raise httpx.ConnectError from e | ||||||
|             except httpcore.CloseError as e: |             except httpx.NetworkError as e: | ||||||
|                 # httpcore.CloseError: [Errno 104] Connection reset by peer |                 # httpx.WriteError on HTTP/2 connection leaves a new opened stream | ||||||
|                 # raised by _keepalive_sweep() |                 # then each new request creates a new stream and raise the same WriteError | ||||||
|                 #   from https://github.com/encode/httpcore/blob/4b662b5c42378a61e54d673b4c949420102379f5/httpcore/_backends/asyncio.py#L198  # pylint: disable=line-too-long |  | ||||||
|                 await close_connections_for_url(self._pool, url) |  | ||||||
|                 logger.warning('httpcore.CloseError: retry', exc_info=e) |  | ||||||
|                 # retry |  | ||||||
|             except httpcore.RemoteProtocolError as e: |  | ||||||
|                 # in case of httpcore.RemoteProtocolError: Server disconnected |  | ||||||
|                 await close_connections_for_url(self._pool, url) |  | ||||||
|                 logger.warning('httpcore.RemoteProtocolError: retry', exc_info=e) |  | ||||||
|                 # retry |  | ||||||
|             except (httpcore.ProtocolError, httpcore.NetworkError) as e: |  | ||||||
|                 await close_connections_for_url(self._pool, url) |                 await close_connections_for_url(self._pool, url) | ||||||
|                 raise e |                 raise e | ||||||
|  |             except httpx.RemoteProtocolError as e: | ||||||
|  |                 # in case of httpx.RemoteProtocolError: Server disconnected | ||||||
|  |                 await close_connections_for_url(self._pool, url) | ||||||
|  |                 logger.warning('httpx.RemoteProtocolError: retry', exc_info=e) | ||||||
|  |                 # retry | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit, retries): | def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit, retries): | ||||||
|  |  | ||||||
|  | @ -138,12 +138,10 @@ class Network: | ||||||
|         request = response.request |         request = response.request | ||||||
|         status = f"{response.status_code} {response.reason_phrase}" |         status = f"{response.status_code} {response.reason_phrase}" | ||||||
|         response_line = f"{response.http_version} {status}" |         response_line = f"{response.http_version} {status}" | ||||||
|         if hasattr(response, "_elapsed"): |         content_type = response.headers.get("Content-Type") | ||||||
|             elapsed_time = f"{response.elapsed.total_seconds()} sec" |         content_type = f' ({content_type})' if content_type else '' | ||||||
|         else: |  | ||||||
|             elapsed_time = "stream" |  | ||||||
|         self._logger.debug( |         self._logger.debug( | ||||||
|             f'HTTP Request: {request.method} {request.url} "{response_line}" ({elapsed_time})' |             f'HTTP Request: {request.method} {request.url} "{response_line}"{content_type}' | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     def get_client(self, verify=None, max_redirects=None): |     def get_client(self, verify=None, max_redirects=None): | ||||||
|  |  | ||||||
		Loading…
	
	Add table
		
		Reference in a new issue
	
	 Alexandre Flament
						Alexandre Flament