mirror of
https://github.com/searxng/searxng.git
synced 2025-08-03 02:22:22 +02:00
[httpx] replace searx.poolrequests by searx.network
settings.yml: * outgoing.networks: * can contains network definition * propertiers: enable_http, verify, http2, max_connections, max_keepalive_connections, keepalive_expiry, local_addresses, support_ipv4, support_ipv6, proxies, max_redirects, retries * retries: 0 by default, number of times searx retries to send the HTTP request (using different IP & proxy each time) * local_addresses can be "192.168.0.1/24" (it supports IPv6) * support_ipv4 & support_ipv6: both True by default see https://github.com/searx/searx/pull/1034 * each engine can define a "network" section: * either a full network description * either reference an existing network * all HTTP requests of engine use the same HTTP configuration (it was not the case before, see proxy configuration in master)
This commit is contained in:
parent
eaa694fb7d
commit
d14994dc73
31 changed files with 1036 additions and 677 deletions
|
@ -6,7 +6,7 @@ import asyncio
|
|||
|
||||
import httpx
|
||||
|
||||
import searx.poolrequests as poolrequests
|
||||
import searx.network
|
||||
from searx.engines import settings
|
||||
from searx import logger
|
||||
from searx.utils import gen_useragent
|
||||
|
@ -64,10 +64,6 @@ class OnlineProcessor(EngineProcessor):
|
|||
auth=params['auth']
|
||||
)
|
||||
|
||||
# setting engine based proxies
|
||||
if hasattr(self.engine, 'proxies'):
|
||||
request_args['proxies'] = poolrequests.get_proxies(self.engine.proxies)
|
||||
|
||||
# max_redirects
|
||||
max_redirects = params.get('max_redirects')
|
||||
if max_redirects:
|
||||
|
@ -85,9 +81,9 @@ class OnlineProcessor(EngineProcessor):
|
|||
|
||||
# specific type of request (GET or POST)
|
||||
if params['method'] == 'GET':
|
||||
req = poolrequests.get
|
||||
req = searx.network.get
|
||||
else:
|
||||
req = poolrequests.post
|
||||
req = searx.network.post
|
||||
|
||||
request_args['data'] = params['data']
|
||||
|
||||
|
@ -128,11 +124,11 @@ class OnlineProcessor(EngineProcessor):
|
|||
|
||||
def search(self, query, params, result_container, start_time, timeout_limit):
|
||||
# set timeout for all HTTP requests
|
||||
poolrequests.set_timeout_for_thread(timeout_limit, start_time=start_time)
|
||||
searx.network.set_timeout_for_thread(timeout_limit, start_time=start_time)
|
||||
# reset the HTTP total time
|
||||
poolrequests.reset_time_for_thread()
|
||||
# enable HTTP only if explicitly enabled
|
||||
poolrequests.set_enable_http_protocol(self.engine.enable_http)
|
||||
searx.network.reset_time_for_thread()
|
||||
# set the network
|
||||
searx.network.set_context_network_name(self.engine_name)
|
||||
|
||||
# suppose everything will be alright
|
||||
http_exception = False
|
||||
|
@ -149,7 +145,7 @@ class OnlineProcessor(EngineProcessor):
|
|||
|
||||
# update engine time when there is no exception
|
||||
engine_time = time() - start_time
|
||||
page_load_time = poolrequests.get_time_for_thread()
|
||||
page_load_time = searx.network.get_time_for_thread()
|
||||
result_container.add_timing(self.engine_name, engine_time, page_load_time)
|
||||
with threading.RLock():
|
||||
self.engine.stats['engine_time'] += engine_time
|
||||
|
@ -162,7 +158,7 @@ class OnlineProcessor(EngineProcessor):
|
|||
|
||||
# Timing
|
||||
engine_time = time() - start_time
|
||||
page_load_time = poolrequests.get_time_for_thread()
|
||||
page_load_time = searx.network.get_time_for_thread()
|
||||
result_container.add_timing(self.engine_name, engine_time, page_load_time)
|
||||
|
||||
# Record the errors
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue