mirror of
https://github.com/searxng/searxng.git
synced 2025-07-13 08:19:17 +02:00
[chore] engines: remove redundant usages of utils#gen_useragent
These engines override the user agent manually using `gen_useragent`, although that's already done in the online preprocessor that runs before the actual `request(query, params)` method is called. Hence, this call is duplicated. Related: - https://github.com/searxng/searxng/pull/4990#discussion_r2195142838
This commit is contained in:
parent
2fe8540903
commit
d24e489850
3 changed files with 10 additions and 13 deletions
|
@ -6,7 +6,7 @@ from datetime import datetime
|
|||
import re
|
||||
import json
|
||||
|
||||
from searx.utils import html_to_text, gen_useragent
|
||||
from searx.utils import html_to_text
|
||||
from searx.exceptions import SearxEngineAPIException, SearxEngineCaptchaException
|
||||
|
||||
# Metadata
|
||||
|
@ -77,9 +77,6 @@ def request(query, params):
|
|||
query_params["tl_request"] = time_range_dict.get(params['time_range'])
|
||||
|
||||
params["url"] = f"{query_url}?{urlencode(query_params)}"
|
||||
params["headers"] = {
|
||||
"User-Agent": gen_useragent(),
|
||||
}
|
||||
return params
|
||||
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ from lxml import html
|
|||
|
||||
from flask_babel import gettext
|
||||
from searx.network import get
|
||||
from searx.utils import eval_xpath_getindex, gen_useragent, html_to_text
|
||||
from searx.utils import eval_xpath_getindex, html_to_text
|
||||
|
||||
|
||||
about = {
|
||||
|
@ -41,12 +41,13 @@ def _get_ui_version():
|
|||
def request(query, params):
|
||||
params['url'] = search_url
|
||||
params['method'] = 'POST'
|
||||
params['headers'] = {
|
||||
'Content-Type': 'application/json',
|
||||
'X-S2-UI-Version': _get_ui_version(),
|
||||
'X-S2-Client': "webapp-browser",
|
||||
'User-Agent': gen_useragent(),
|
||||
}
|
||||
params['headers'].update(
|
||||
{
|
||||
'Content-Type': 'application/json',
|
||||
'X-S2-UI-Version': _get_ui_version(),
|
||||
'X-S2-Client': "webapp-browser",
|
||||
}
|
||||
)
|
||||
params['data'] = dumps(
|
||||
{
|
||||
"queryString": query,
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
from urllib.parse import quote_plus
|
||||
from lxml import html
|
||||
|
||||
from searx.utils import eval_xpath, eval_xpath_list, extract_text, gen_useragent
|
||||
from searx.utils import eval_xpath, eval_xpath_list, extract_text
|
||||
|
||||
about = {
|
||||
"website": 'https://uxwing.com',
|
||||
|
@ -22,7 +22,6 @@ enable_http2 = False
|
|||
|
||||
def request(query, params):
|
||||
params['url'] = f"{base_url}/?s={quote_plus(query)}"
|
||||
params['headers'] = {'User-Agent': gen_useragent()}
|
||||
return params
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue