mirror of
https://github.com/searxng/searxng.git
synced 2025-08-17 01:06:44 +02:00
Replaces `x_for` functionality with `trusted_proxies`. This allows defining which IP / ranges to trust extracting the client IP address from X-Forwarded-For and X-Real-IP headers. We don't know if the proxy chain will give us the proper client address (REMOTE_ADDR in the WSGI environment), so we rely on reading the headers of the proxy before SearXNG (if there is one, in that case it must be added to trusted_proxies) hoping it has done the proper checks. In case a proxy in the chain does not check the client address correctly, integrity is compromised and this should be fixed by whoever manages the proxy, not us. Closes: - https://github.com/searxng/searxng/issues/4940 - https://github.com/searxng/searxng/issues/4939 - https://github.com/searxng/searxng/issues/4907 - https://github.com/searxng/searxng/issues/3632 - https://github.com/searxng/searxng/issues/3191 - https://github.com/searxng/searxng/issues/1237 Related: - https://github.com/searxng/searxng-docker/issues/386 - https://github.com/inetol-infrastructure/searxng-container/issues/81
65 lines
1.9 KiB
Python
65 lines
1.9 KiB
Python
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
"""
|
|
Method ``http_user_agent``
|
|
--------------------------
|
|
|
|
The ``http_user_agent`` method evaluates a request as the request of a bot if
|
|
the User-Agent_ header is unset or matches the regular expression
|
|
:py:obj:`USER_AGENT`.
|
|
|
|
.. _User-Agent:
|
|
https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent
|
|
|
|
"""
|
|
|
|
from __future__ import annotations
|
|
import re
|
|
from ipaddress import (
|
|
IPv4Network,
|
|
IPv6Network,
|
|
)
|
|
|
|
import werkzeug
|
|
import flask
|
|
|
|
from . import config
|
|
from ._helpers import too_many_requests
|
|
|
|
|
|
USER_AGENT = (
|
|
r'('
|
|
+ r'unknown'
|
|
+ r'|[Cc][Uu][Rr][Ll]|[wW]get|Scrapy|splash|JavaFX|FeedFetcher|python-requests|Go-http-client|Java|Jakarta|okhttp'
|
|
+ r'|HttpClient|Jersey|Python|libwww-perl|Ruby|SynHttpClient|UniversalFeedParser|Googlebot|GoogleImageProxy'
|
|
+ r'|bingbot|Baiduspider|yacybot|YandexMobileBot|YandexBot|Yahoo! Slurp|MJ12bot|AhrefsBot|archive.org_bot|msnbot'
|
|
+ r'|MJ12bot|SeznamBot|linkdexbot|Netvibes|SMTBot|zgrab|James BOT|Sogou|Abonti|Pixray|Spinn3r|SemrushBot|Exabot'
|
|
+ r'|ZmEu|BLEXBot|bitlybot|HeadlessChrome'
|
|
# unmaintained Farside instances
|
|
+ r'|'
|
|
+ re.escape(r'Mozilla/5.0 (compatible; Farside/0.1.0; +https://farside.link)')
|
|
# other bots and client to block
|
|
+ '|.*PetalBot.*'
|
|
+ r')'
|
|
)
|
|
"""Regular expression that matches to User-Agent_ from known *bots*"""
|
|
|
|
_regexp = None
|
|
|
|
|
|
def regexp_user_agent():
|
|
global _regexp # pylint: disable=global-statement
|
|
if not _regexp:
|
|
_regexp = re.compile(USER_AGENT)
|
|
return _regexp
|
|
|
|
|
|
def filter_request(
|
|
network: IPv4Network | IPv6Network,
|
|
request: flask.Request,
|
|
cfg: config.Config, # pylint: disable=unused-argument
|
|
) -> werkzeug.Response | None:
|
|
|
|
user_agent = request.headers.get('User-Agent', 'unknown')
|
|
if regexp_user_agent().match(user_agent):
|
|
return too_many_requests(network, f"bot detected, HTTP header User-Agent: {user_agent}")
|
|
return None
|