searxng/searx/engines/stract.py
Markus Heiser 57b9673efb [mod] addition of various type hints / tbc
- pyright configuration [1]_
- stub files: types-lxml [2]_
- addition of various type hints
- enable use of new type system features on older Python versions [3]_
- ``.tool-versions`` - set python to lowest version we support (3.10.18) [4]_:
  Older versions typically lack some typing features found in newer Python
  versions.  Therefore, for local type checking (before commit), it is necessary
  to use the older Python interpreter.

.. [1] https://docs.basedpyright.com/v1.20.0/configuration/config-files/
.. [2] https://pypi.org/project/types-lxml/
.. [3] https://typing-extensions.readthedocs.io/en/latest/#
.. [4] https://mise.jdx.dev/configuration.html#tool-versions

Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
Format: reST
2025-09-03 13:37:36 +02:00

79 lines
2.2 KiB
Python

# SPDX-License-Identifier: AGPL-3.0-or-later
"""Stract is an independent open source search engine. At this state, it's
still in beta and hence this implementation will need to be updated once beta
ends.
"""
from json import dumps
from searx.utils import searxng_useragent
from searx.enginelib.traits import EngineTraits
about = {
"website": "https://stract.com/",
"use_official_api": True,
"official_api_documentation": "https://stract.com/beta/api/docs/#/search/api",
"require_api_key": False,
"results": "JSON",
}
categories = ['general']
paging = True
base_url = "https://stract.com/beta/api"
search_url = base_url + "/search"
def request(query, params):
params['url'] = search_url
params['method'] = "POST"
params['headers'] = {
'Accept': 'application/json',
'Content-Type': 'application/json',
'User-Agent': searxng_useragent(),
}
region = traits.get_region(params["searxng_locale"], default=traits.all_locale)
params['data'] = dumps(
{
'query': query,
'page': params['pageno'] - 1,
'selectedRegion': region,
}
)
return params
def response(resp):
results = []
for result in resp.json()["webpages"]:
results.append(
{
'url': result['url'],
'title': result['title'],
'content': ''.join(fragment['text'] for fragment in result['snippet']['text']['fragments']),
}
)
return results
def fetch_traits(engine_traits: EngineTraits):
# pylint: disable=import-outside-toplevel
from searx import network
from babel import Locale, languages
from searx.locales import region_tag
territories = Locale("en").territories
json = network.get(base_url + "/docs/openapi.json").json()
regions = json['components']['schemas']['Region']['enum']
engine_traits.all_locale = regions[0]
for region in regions[1:]:
for code, name in territories.items():
if region not in (code, name):
continue
for lang in languages.get_official_languages(code, de_facto=True):
engine_traits.regions[region_tag(Locale(lang, code))] = region