[httpx] replace searx.poolrequests by searx.network

settings.yml:

* outgoing.networks:
   * can contains network definition
   * propertiers: enable_http, verify, http2, max_connections, max_keepalive_connections,
     keepalive_expiry, local_addresses, support_ipv4, support_ipv6, proxies, max_redirects, retries
   * retries: 0 by default, number of times searx retries to send the HTTP request (using different IP & proxy each time)
   * local_addresses can be "192.168.0.1/24" (it supports IPv6)
   * support_ipv4 & support_ipv6: both True by default
     see https://github.com/searx/searx/pull/1034
* each engine can define a "network" section:
   * either a full network description
   * either reference an existing network

* all HTTP requests of engine use the same HTTP configuration (it was not the case before, see proxy configuration in master)
This commit is contained in:
Alexandre Flament 2021-04-05 10:43:33 +02:00
parent eaa694fb7d
commit d14994dc73
31 changed files with 1036 additions and 677 deletions

View file

@ -10,7 +10,7 @@ from searx.engines.wikidata import send_wikidata_query
from searx.utils import extract_text
import searx
import searx.search
import searx.poolrequests
import searx.network
SPARQL_WIKIPEDIA_ARTICLE = """
SELECT DISTINCT ?item ?name
@ -59,7 +59,7 @@ def get_wikipedia_summary(language, pageid):
search_url = 'https://{language}.wikipedia.org/api/rest_v1/page/summary/{title}'
url = search_url.format(title=quote(pageid), language=language)
try:
response = searx.poolrequests.get(url)
response = searx.network.get(url)
response.raise_for_status()
api_result = json.loads(response.text)
return api_result.get('extract')
@ -89,7 +89,7 @@ def get_website_description(url, lang1, lang2=None):
lang_list.append(lang2)
headers['Accept-Language'] = f'{",".join(lang_list)};q=0.8'
try:
response = searx.poolrequests.get(url, headers=headers, timeout=10)
response = searx.network.get(url, headers=headers, timeout=10)
response.raise_for_status()
except Exception:
return (None, None)