forked from Icycoide/searxng
[format.python] initial formatting of the python code
This patch was generated by black [1]:: make format.python [1] https://github.com/psf/black Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
This commit is contained in:
parent
fcdc2c2cd2
commit
3d96a9839a
184 changed files with 2800 additions and 2836 deletions
|
@ -35,7 +35,7 @@ except ImportError:
|
|||
self._count.release()
|
||||
|
||||
def get(self):
|
||||
if not self._count.acquire(True): #pylint: disable=consider-using-with
|
||||
if not self._count.acquire(True): # pylint: disable=consider-using-with
|
||||
raise Empty
|
||||
return self._queue.popleft()
|
||||
|
||||
|
@ -43,6 +43,7 @@ except ImportError:
|
|||
THREADLOCAL = threading.local()
|
||||
"""Thread-local data is data for thread specific values."""
|
||||
|
||||
|
||||
def reset_time_for_thread():
|
||||
THREADLOCAL.total_time = 0
|
||||
|
||||
|
@ -187,10 +188,7 @@ async def stream_chunk_to_queue(network, queue, method, url, **kwargs):
|
|||
def _stream_generator(method, url, **kwargs):
|
||||
queue = SimpleQueue()
|
||||
network = get_context_network()
|
||||
future = asyncio.run_coroutine_threadsafe(
|
||||
stream_chunk_to_queue(network, queue, method, url, **kwargs),
|
||||
get_loop()
|
||||
)
|
||||
future = asyncio.run_coroutine_threadsafe(stream_chunk_to_queue(network, queue, method, url, **kwargs), get_loop())
|
||||
|
||||
# yield chunks
|
||||
obj_or_exception = queue.get()
|
||||
|
@ -203,10 +201,7 @@ def _stream_generator(method, url, **kwargs):
|
|||
|
||||
|
||||
def _close_response_method(self):
|
||||
asyncio.run_coroutine_threadsafe(
|
||||
self.aclose(),
|
||||
get_loop()
|
||||
)
|
||||
asyncio.run_coroutine_threadsafe(self.aclose(), get_loop())
|
||||
# reach the end of _self.generator ( _stream_generator ) to an avoid memory leak.
|
||||
# it makes sure that :
|
||||
# * the httpx response is closed (see the stream_chunk_to_queue function)
|
||||
|
|
|
@ -10,12 +10,7 @@ import anyio
|
|||
import httpcore
|
||||
import httpx
|
||||
from httpx_socks import AsyncProxyTransport
|
||||
from python_socks import (
|
||||
parse_proxy_url,
|
||||
ProxyConnectionError,
|
||||
ProxyTimeoutError,
|
||||
ProxyError
|
||||
)
|
||||
from python_socks import parse_proxy_url, ProxyConnectionError, ProxyTimeoutError, ProxyError
|
||||
|
||||
from searx import logger
|
||||
|
||||
|
@ -41,9 +36,7 @@ TRANSPORT_KWARGS = {
|
|||
|
||||
|
||||
# pylint: disable=protected-access
|
||||
async def close_connections_for_url(
|
||||
connection_pool: httpcore.AsyncConnectionPool, url: httpcore._utils.URL
|
||||
):
|
||||
async def close_connections_for_url(connection_pool: httpcore.AsyncConnectionPool, url: httpcore._utils.URL):
|
||||
|
||||
origin = httpcore._utils.url_to_origin(url)
|
||||
logger.debug('Drop connections for %r', origin)
|
||||
|
@ -54,6 +47,8 @@ async def close_connections_for_url(
|
|||
await connection.aclose()
|
||||
except httpx.NetworkError as e:
|
||||
logger.warning('Error closing an existing connection', exc_info=e)
|
||||
|
||||
|
||||
# pylint: enable=protected-access
|
||||
|
||||
|
||||
|
@ -67,9 +62,7 @@ def get_sslcontexts(proxy_url=None, cert=None, verify=True, trust_env=True, http
|
|||
class AsyncHTTPTransportNoHttp(httpx.AsyncHTTPTransport):
|
||||
"""Block HTTP request"""
|
||||
|
||||
async def handle_async_request(
|
||||
self, method, url, headers=None, stream=None, extensions=None
|
||||
):
|
||||
async def handle_async_request(self, method, url, headers=None, stream=None, extensions=None):
|
||||
raise httpx.UnsupportedProtocol('HTTP protocol is disabled')
|
||||
|
||||
|
||||
|
@ -83,9 +76,7 @@ class AsyncProxyTransportFixed(AsyncProxyTransport):
|
|||
Note: AsyncProxyTransport inherit from AsyncConnectionPool
|
||||
"""
|
||||
|
||||
async def handle_async_request(
|
||||
self, method, url, headers=None, stream=None, extensions=None
|
||||
):
|
||||
async def handle_async_request(self, method, url, headers=None, stream=None, extensions=None):
|
||||
retry = 2
|
||||
while retry > 0:
|
||||
retry -= 1
|
||||
|
@ -116,9 +107,7 @@ class AsyncProxyTransportFixed(AsyncProxyTransport):
|
|||
class AsyncHTTPTransportFixed(httpx.AsyncHTTPTransport):
|
||||
"""Fix httpx.AsyncHTTPTransport"""
|
||||
|
||||
async def handle_async_request(
|
||||
self, method, url, headers=None, stream=None, extensions=None
|
||||
):
|
||||
async def handle_async_request(self, method, url, headers=None, stream=None, extensions=None):
|
||||
retry = 2
|
||||
while retry > 0:
|
||||
retry -= 1
|
||||
|
@ -152,14 +141,17 @@ def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit
|
|||
rdns = False
|
||||
socks5h = 'socks5h://'
|
||||
if proxy_url.startswith(socks5h):
|
||||
proxy_url = 'socks5://' + proxy_url[len(socks5h):]
|
||||
proxy_url = 'socks5://' + proxy_url[len(socks5h) :]
|
||||
rdns = True
|
||||
|
||||
proxy_type, proxy_host, proxy_port, proxy_username, proxy_password = parse_proxy_url(proxy_url)
|
||||
verify = get_sslcontexts(proxy_url, None, True, False, http2) if verify is True else verify
|
||||
return AsyncProxyTransportFixed(
|
||||
proxy_type=proxy_type, proxy_host=proxy_host, proxy_port=proxy_port,
|
||||
username=proxy_username, password=proxy_password,
|
||||
proxy_type=proxy_type,
|
||||
proxy_host=proxy_host,
|
||||
proxy_port=proxy_port,
|
||||
username=proxy_username,
|
||||
password=proxy_password,
|
||||
rdns=rdns,
|
||||
loop=get_loop(),
|
||||
verify=verify,
|
||||
|
@ -169,7 +161,7 @@ def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit
|
|||
max_keepalive_connections=limit.max_keepalive_connections,
|
||||
keepalive_expiry=limit.keepalive_expiry,
|
||||
retries=retries,
|
||||
**TRANSPORT_KWARGS
|
||||
**TRANSPORT_KWARGS,
|
||||
)
|
||||
|
||||
|
||||
|
@ -183,36 +175,40 @@ def get_transport(verify, http2, local_address, proxy_url, limit, retries):
|
|||
proxy=httpx._config.Proxy(proxy_url) if proxy_url else None,
|
||||
limits=limit,
|
||||
retries=retries,
|
||||
**TRANSPORT_KWARGS
|
||||
**TRANSPORT_KWARGS,
|
||||
)
|
||||
|
||||
|
||||
def new_client(
|
||||
# pylint: disable=too-many-arguments
|
||||
enable_http, verify, enable_http2,
|
||||
max_connections, max_keepalive_connections, keepalive_expiry,
|
||||
proxies, local_address, retries, max_redirects, hook_log_response ):
|
||||
# pylint: disable=too-many-arguments
|
||||
enable_http,
|
||||
verify,
|
||||
enable_http2,
|
||||
max_connections,
|
||||
max_keepalive_connections,
|
||||
keepalive_expiry,
|
||||
proxies,
|
||||
local_address,
|
||||
retries,
|
||||
max_redirects,
|
||||
hook_log_response,
|
||||
):
|
||||
limit = httpx.Limits(
|
||||
max_connections=max_connections,
|
||||
max_keepalive_connections=max_keepalive_connections,
|
||||
keepalive_expiry=keepalive_expiry
|
||||
keepalive_expiry=keepalive_expiry,
|
||||
)
|
||||
# See https://www.python-httpx.org/advanced/#routing
|
||||
mounts = {}
|
||||
for pattern, proxy_url in proxies.items():
|
||||
if not enable_http and pattern.startswith('http://'):
|
||||
continue
|
||||
if (proxy_url.startswith('socks4://')
|
||||
or proxy_url.startswith('socks5://')
|
||||
or proxy_url.startswith('socks5h://')
|
||||
):
|
||||
if proxy_url.startswith('socks4://') or proxy_url.startswith('socks5://') or proxy_url.startswith('socks5h://'):
|
||||
mounts[pattern] = get_transport_for_socks_proxy(
|
||||
verify, enable_http2, local_address, proxy_url, limit, retries
|
||||
)
|
||||
else:
|
||||
mounts[pattern] = get_transport(
|
||||
verify, enable_http2, local_address, proxy_url, limit, retries
|
||||
)
|
||||
mounts[pattern] = get_transport(verify, enable_http2, local_address, proxy_url, limit, retries)
|
||||
|
||||
if not enable_http:
|
||||
mounts['http://'] = AsyncHTTPTransportNoHttp()
|
||||
|
@ -221,7 +217,7 @@ def new_client(
|
|||
|
||||
event_hooks = None
|
||||
if hook_log_response:
|
||||
event_hooks = {'response': [ hook_log_response ]}
|
||||
event_hooks = {'response': [hook_log_response]}
|
||||
|
||||
return httpx.AsyncClient(
|
||||
transport=transport,
|
||||
|
|
|
@ -31,39 +31,49 @@ PROXY_PATTERN_MAPPING = {
|
|||
'socks5h:': 'socks5h://',
|
||||
}
|
||||
|
||||
ADDRESS_MAPPING = {
|
||||
'ipv4': '0.0.0.0',
|
||||
'ipv6': '::'
|
||||
}
|
||||
ADDRESS_MAPPING = {'ipv4': '0.0.0.0', 'ipv6': '::'}
|
||||
|
||||
|
||||
class Network:
|
||||
|
||||
__slots__ = (
|
||||
'enable_http', 'verify', 'enable_http2',
|
||||
'max_connections', 'max_keepalive_connections', 'keepalive_expiry',
|
||||
'local_addresses', 'proxies', 'using_tor_proxy', 'max_redirects', 'retries', 'retry_on_http_error',
|
||||
'_local_addresses_cycle', '_proxies_cycle', '_clients', '_logger'
|
||||
'enable_http',
|
||||
'verify',
|
||||
'enable_http2',
|
||||
'max_connections',
|
||||
'max_keepalive_connections',
|
||||
'keepalive_expiry',
|
||||
'local_addresses',
|
||||
'proxies',
|
||||
'using_tor_proxy',
|
||||
'max_redirects',
|
||||
'retries',
|
||||
'retry_on_http_error',
|
||||
'_local_addresses_cycle',
|
||||
'_proxies_cycle',
|
||||
'_clients',
|
||||
'_logger',
|
||||
)
|
||||
|
||||
_TOR_CHECK_RESULT = {}
|
||||
|
||||
def __init__(
|
||||
# pylint: disable=too-many-arguments
|
||||
self,
|
||||
enable_http=True,
|
||||
verify=True,
|
||||
enable_http2=False,
|
||||
max_connections=None,
|
||||
max_keepalive_connections=None,
|
||||
keepalive_expiry=None,
|
||||
proxies=None,
|
||||
using_tor_proxy=False,
|
||||
local_addresses=None,
|
||||
retries=0,
|
||||
retry_on_http_error=None,
|
||||
max_redirects=30,
|
||||
logger_name=None):
|
||||
# pylint: disable=too-many-arguments
|
||||
self,
|
||||
enable_http=True,
|
||||
verify=True,
|
||||
enable_http2=False,
|
||||
max_connections=None,
|
||||
max_keepalive_connections=None,
|
||||
keepalive_expiry=None,
|
||||
proxies=None,
|
||||
using_tor_proxy=False,
|
||||
local_addresses=None,
|
||||
retries=0,
|
||||
retry_on_http_error=None,
|
||||
max_redirects=30,
|
||||
logger_name=None,
|
||||
):
|
||||
|
||||
self.enable_http = enable_http
|
||||
self.verify = verify
|
||||
|
@ -144,9 +154,7 @@ class Network:
|
|||
response_line = f"{response.http_version} {status}"
|
||||
content_type = response.headers.get("Content-Type")
|
||||
content_type = f' ({content_type})' if content_type else ''
|
||||
self._logger.debug(
|
||||
f'HTTP Request: {request.method} {request.url} "{response_line}"{content_type}'
|
||||
)
|
||||
self._logger.debug(f'HTTP Request: {request.method} {request.url} "{response_line}"{content_type}')
|
||||
|
||||
@staticmethod
|
||||
async def check_tor_proxy(client: httpx.AsyncClient, proxies) -> bool:
|
||||
|
@ -187,7 +195,7 @@ class Network:
|
|||
local_address,
|
||||
0,
|
||||
max_redirects,
|
||||
hook_log_response
|
||||
hook_log_response,
|
||||
)
|
||||
if self.using_tor_proxy and not await self.check_tor_proxy(client, proxies):
|
||||
await client.aclose()
|
||||
|
@ -201,6 +209,7 @@ class Network:
|
|||
await client.aclose()
|
||||
except httpx.HTTPError:
|
||||
pass
|
||||
|
||||
await asyncio.gather(*[close_client(client) for client in self._clients.values()], return_exceptions=False)
|
||||
|
||||
@staticmethod
|
||||
|
@ -214,7 +223,8 @@ class Network:
|
|||
|
||||
def is_valid_respones(self, response):
|
||||
# pylint: disable=too-many-boolean-expressions
|
||||
if ((self.retry_on_http_error is True and 400 <= response.status_code <= 599)
|
||||
if (
|
||||
(self.retry_on_http_error is True and 400 <= response.status_code <= 599)
|
||||
or (isinstance(self.retry_on_http_error, list) and response.status_code in self.retry_on_http_error)
|
||||
or (isinstance(self.retry_on_http_error, int) and response.status_code == self.retry_on_http_error)
|
||||
):
|
||||
|
@ -269,6 +279,7 @@ def check_network_configuration():
|
|||
network._logger.exception('Error') # pylint: disable=protected-access
|
||||
exception_count += 1
|
||||
return exception_count
|
||||
|
||||
future = asyncio.run_coroutine_threadsafe(check(), get_loop())
|
||||
exception_count = future.result()
|
||||
if exception_count > 0:
|
||||
|
@ -279,6 +290,7 @@ def initialize(settings_engines=None, settings_outgoing=None):
|
|||
# pylint: disable=import-outside-toplevel)
|
||||
from searx.engines import engines
|
||||
from searx import settings
|
||||
|
||||
# pylint: enable=import-outside-toplevel)
|
||||
|
||||
settings_engines = settings_engines or settings['engines']
|
||||
|
|
|
@ -10,13 +10,14 @@ from searx.exceptions import (
|
|||
SearxEngineAccessDeniedException,
|
||||
)
|
||||
|
||||
|
||||
def is_cloudflare_challenge(resp):
|
||||
if resp.status_code in [429, 503]:
|
||||
if (('__cf_chl_jschl_tk__=' in resp.text)
|
||||
or ('/cdn-cgi/challenge-platform/' in resp.text
|
||||
and 'orchestrate/jsch/v1' in resp.text
|
||||
and 'window._cf_chl_enter(' in resp.text
|
||||
)):
|
||||
if ('__cf_chl_jschl_tk__=' in resp.text) or (
|
||||
'/cdn-cgi/challenge-platform/' in resp.text
|
||||
and 'orchestrate/jsch/v1' in resp.text
|
||||
and 'window._cf_chl_enter(' in resp.text
|
||||
):
|
||||
return True
|
||||
if resp.status_code == 403 and '__cf_chl_captcha_tk__=' in resp.text:
|
||||
return True
|
||||
|
@ -32,21 +33,14 @@ def raise_for_cloudflare_captcha(resp):
|
|||
if is_cloudflare_challenge(resp):
|
||||
# https://support.cloudflare.com/hc/en-us/articles/200170136-Understanding-Cloudflare-Challenge-Passage-Captcha-
|
||||
# suspend for 2 weeks
|
||||
raise SearxEngineCaptchaException(
|
||||
message='Cloudflare CAPTCHA',
|
||||
suspended_time=3600 * 24 * 15
|
||||
)
|
||||
raise SearxEngineCaptchaException(message='Cloudflare CAPTCHA', suspended_time=3600 * 24 * 15)
|
||||
|
||||
if is_cloudflare_firewall(resp):
|
||||
raise SearxEngineAccessDeniedException(
|
||||
message='Cloudflare Firewall', suspended_time=3600 * 24
|
||||
)
|
||||
raise SearxEngineAccessDeniedException(message='Cloudflare Firewall', suspended_time=3600 * 24)
|
||||
|
||||
|
||||
def raise_for_recaptcha(resp):
|
||||
if (resp.status_code == 503
|
||||
and '"https://www.google.com/recaptcha/' in resp.text
|
||||
):
|
||||
if resp.status_code == 503 and '"https://www.google.com/recaptcha/' in resp.text:
|
||||
raise SearxEngineCaptchaException(message='ReCAPTCHA', suspended_time=3600 * 24 * 7)
|
||||
|
||||
|
||||
|
@ -71,8 +65,7 @@ def raise_for_httperror(resp):
|
|||
raise_for_captcha(resp)
|
||||
if resp.status_code in (402, 403):
|
||||
raise SearxEngineAccessDeniedException(
|
||||
message='HTTP error ' + str(resp.status_code),
|
||||
suspended_time=3600 * 24
|
||||
message='HTTP error ' + str(resp.status_code), suspended_time=3600 * 24
|
||||
)
|
||||
if resp.status_code == 429:
|
||||
raise SearxEngineTooManyRequestsException()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue