mirror of
https://github.com/searxng/searxng.git
synced 2025-07-23 13:19:17 +02:00
[feat] plugins: add new time/timezone search plugin
This commit is contained in:
parent
6ca8db5e67
commit
8669aafd4b
4 changed files with 150 additions and 1 deletions
|
@ -25,6 +25,7 @@ ENGINE_DESCRIPTIONS: dict[str, typing.Any]
|
|||
ENGINE_TRAITS: dict[str, typing.Any]
|
||||
LOCALES: dict[str, typing.Any]
|
||||
TRACKER_PATTERNS: TrackerPatternsDB
|
||||
TIMEZONES: dict[str, typing.Any]
|
||||
|
||||
lazy_globals = {
|
||||
"CURRENCIES": CurrenciesDB(),
|
||||
|
@ -37,6 +38,7 @@ lazy_globals = {
|
|||
"ENGINE_TRAITS": None,
|
||||
"LOCALES": None,
|
||||
"TRACKER_PATTERNS": TrackerPatternsDB(),
|
||||
"TIMEZONES": None,
|
||||
}
|
||||
|
||||
data_json_files = {
|
||||
|
@ -48,6 +50,7 @@ data_json_files = {
|
|||
"ENGINE_DESCRIPTIONS": "engine_descriptions.json",
|
||||
"ENGINE_TRAITS": "engine_traits.json",
|
||||
"LOCALES": "locales.json",
|
||||
"TIMEZONES": "timezones.json",
|
||||
}
|
||||
|
||||
|
||||
|
|
61
searx/plugins/time_zone.py
Normal file
61
searx/plugins/time_zone.py
Normal file
|
@ -0,0 +1,61 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# pylint: disable=missing-module-docstring, missing-class-docstring
|
||||
import zoneinfo
|
||||
import datetime
|
||||
|
||||
from flask_babel import gettext
|
||||
from searx.result_types import EngineResults
|
||||
from searx.data import TIMEZONES
|
||||
|
||||
from . import Plugin, PluginInfo
|
||||
|
||||
|
||||
datetime_format = "%H:%M - %A, %d/%m/%y"
|
||||
|
||||
|
||||
class SXNGPlugin(Plugin):
|
||||
|
||||
id = "time_zone"
|
||||
keywords = ["time", "timezone", "now", "clock", "timezones"]
|
||||
|
||||
def __init__(self, plg_cfg: "PluginCfg"):
|
||||
super().__init__(plg_cfg)
|
||||
|
||||
self.info = PluginInfo(
|
||||
id=self.id,
|
||||
name=gettext("Timezones plugin"),
|
||||
description=gettext("Display the current time on different time zones."),
|
||||
preference_section="query",
|
||||
examples=["time Berlin", "clock Los Angeles"],
|
||||
)
|
||||
|
||||
def post_search(self, request: "SXNG_Request", search: "SearchWithPlugins") -> EngineResults:
|
||||
results = EngineResults()
|
||||
|
||||
# remove keywords from the query
|
||||
query = search.search_query.query
|
||||
query_parts = filter(lambda part: part.lower() not in self.keywords, query.split(" "))
|
||||
location = "_".join(query_parts)
|
||||
|
||||
if not location:
|
||||
results.add(results.types.Answer(answer=f"{datetime.datetime.now().strftime(datetime_format)}"))
|
||||
return results
|
||||
|
||||
# location is too short for proper matching
|
||||
if len(location) <= 3:
|
||||
return results
|
||||
|
||||
zones = TIMEZONES["cities"].copy()
|
||||
zones.update(TIMEZONES["countries"])
|
||||
for key, tz_name in zones.items():
|
||||
if location in key.lower():
|
||||
zone = zoneinfo.ZoneInfo(tz_name)
|
||||
now = datetime.datetime.now(tz=zone)
|
||||
|
||||
results.add(
|
||||
results.types.Answer(
|
||||
answer=f"{now.strftime(datetime_format)} at {tz_name.replace('_', ' ')} ({now.strftime('%Z')})"
|
||||
)
|
||||
)
|
||||
|
||||
return results
|
|
@ -238,6 +238,9 @@ plugins:
|
|||
searx.plugins.hostnames.SXNGPlugin:
|
||||
active: true
|
||||
|
||||
searx.plugins.time_zone.SXNGPlugin:
|
||||
active: true
|
||||
|
||||
searx.plugins.oa_doi_rewrite.SXNGPlugin:
|
||||
active: false
|
||||
|
||||
|
|
82
searxng_extra/update/update_timezones.py
Normal file
82
searxng_extra/update/update_timezones.py
Normal file
|
@ -0,0 +1,82 @@
|
|||
#!/usr/bin/env python
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Fetch user query --> timezone mapping"""
|
||||
|
||||
import json
|
||||
import collections
|
||||
import zoneinfo
|
||||
|
||||
from searx.locales import LOCALE_NAMES, locales_initialize
|
||||
from searx.network import set_timeout_for_thread
|
||||
from searx.engines import wikidata, set_loggers
|
||||
from searx.data import data_dir
|
||||
|
||||
DATA_FILE = data_dir / 'timezones.json'
|
||||
|
||||
set_loggers(wikidata, 'wikidata')
|
||||
locales_initialize()
|
||||
|
||||
|
||||
SPARQL_TAGS_REQUEST = """
|
||||
SELECT
|
||||
?label # country name
|
||||
?capitalLabel # one (arbitrary “first”) capital
|
||||
WHERE {
|
||||
?item wdt:P36 ?capital ; # capital(s)
|
||||
wdt:P31 wd:Q3624078 ; # sovereign state
|
||||
rdfs:label ?label .
|
||||
?capital rdfs:label ?capitalLabel .
|
||||
FILTER ( LANG(?capitalLabel) = "en" ).
|
||||
FILTER ( LANG(?label) IN (%LANGUAGES_SPARQL%)).
|
||||
|
||||
MINUS { # exclude defunct states
|
||||
?item wdt:P31 wd:Q3024240 .
|
||||
}
|
||||
}
|
||||
GROUP BY ?label ?capitalLabel
|
||||
ORDER BY ?item ?label
|
||||
"""
|
||||
|
||||
|
||||
LANGUAGES = LOCALE_NAMES.keys()
|
||||
LANGUAGES_SPARQL = ', '.join(set(map(lambda l: repr(l.split('_')[0]), LANGUAGES)))
|
||||
|
||||
|
||||
def wikidata_request_result_iterator(request): # pylint: disable=invalid-name
|
||||
res = wikidata.send_wikidata_query(request.replace('%LANGUAGES_SPARQL%', LANGUAGES_SPARQL), timeout=30)
|
||||
if res is not None:
|
||||
yield from res['results']['bindings']
|
||||
|
||||
|
||||
def get_countries(cities: dict[str, str]):
|
||||
results = collections.OrderedDict()
|
||||
for tag in wikidata_request_result_iterator(SPARQL_TAGS_REQUEST):
|
||||
countryLabel = tag['label']['value'].lower()
|
||||
capitalLabel = tag['capitalLabel']['value'].lower()
|
||||
if capitalLabel not in cities.keys():
|
||||
print("ignore", capitalLabel)
|
||||
continue
|
||||
capitalTZ = cities[capitalLabel]
|
||||
if countryLabel not in results:
|
||||
# keep only the first mapping
|
||||
results[countryLabel] = capitalTZ
|
||||
return results
|
||||
|
||||
|
||||
def get_zoneinfo_cities():
|
||||
return {
|
||||
e.split("/")[1].replace("_", " ").lower(): e
|
||||
for e in zoneinfo.available_timezones()
|
||||
if "/" in e and not e.startswith("Etc/")
|
||||
}
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
set_timeout_for_thread(60)
|
||||
tz_cities = get_zoneinfo_cities()
|
||||
result = {
|
||||
'countries': get_countries(tz_cities),
|
||||
'cities': tz_cities,
|
||||
}
|
||||
with DATA_FILE.open('w', encoding="utf8") as f:
|
||||
json.dump(result, f, indent=4, sort_keys=True, ensure_ascii=False)
|
Loading…
Add table
Add a link
Reference in a new issue