forked from Icycoide/searxng
Merge https://github.com/asciimoo/searx into template_oscar
Conflicts: searx/translations/de/LC_MESSAGES/messages.po searx/translations/en/LC_MESSAGES/messages.po searx/translations/es/LC_MESSAGES/messages.po searx/translations/fr/LC_MESSAGES/messages.po searx/translations/hu/LC_MESSAGES/messages.po searx/translations/it/LC_MESSAGES/messages.po searx/translations/nl/LC_MESSAGES/messages.po searx/webapp.py
This commit is contained in:
commit
0e1035eac1
70 changed files with 5606 additions and 328 deletions
|
@ -41,7 +41,7 @@ def load_module(filename):
|
|||
module.name = modname
|
||||
return module
|
||||
|
||||
if not 'engines' in settings or not settings['engines']:
|
||||
if 'engines' not in settings or not settings['engines']:
|
||||
print '[E] Error no engines found. Edit your settings.yml'
|
||||
exit(2)
|
||||
|
||||
|
@ -68,15 +68,15 @@ for engine_data in settings['engines']:
|
|||
engine.categories = ['general']
|
||||
|
||||
if not hasattr(engine, 'language_support'):
|
||||
#engine.language_support = False
|
||||
# engine.language_support = False
|
||||
engine.language_support = True
|
||||
|
||||
if not hasattr(engine, 'timeout'):
|
||||
#engine.language_support = False
|
||||
# engine.language_support = False
|
||||
engine.timeout = settings['server']['request_timeout']
|
||||
|
||||
if not hasattr(engine, 'shortcut'):
|
||||
#engine.shortcut = '''
|
||||
# engine.shortcut = '''
|
||||
engine.shortcut = ''
|
||||
|
||||
# checking required variables
|
||||
|
@ -161,7 +161,8 @@ def get_engines_stats():
|
|||
|
||||
for engine in scores_per_result:
|
||||
if max_score_per_result:
|
||||
engine['percentage'] = int(engine['avg'] / max_score_per_result * 100)
|
||||
engine['percentage'] = int(engine['avg']
|
||||
/ max_score_per_result * 100)
|
||||
else:
|
||||
engine['percentage'] = 0
|
||||
|
||||
|
|
|
@ -116,15 +116,22 @@ def response(resp):
|
|||
|
||||
if len(heading)>0:
|
||||
# TODO get infobox.meta.value where .label='article_title'
|
||||
results.append({
|
||||
'infobox': heading,
|
||||
'id': infobox_id,
|
||||
'entity': entity,
|
||||
'content': content,
|
||||
'img_src' : image,
|
||||
'attributes': attributes,
|
||||
'urls': urls,
|
||||
'relatedTopics': relatedTopics
|
||||
})
|
||||
if image==None and len(attributes)==0 and len(urls)==1 and len(relatedTopics)==0 and len(content)==0:
|
||||
results.append({
|
||||
'url': urls[0]['url'],
|
||||
'title': heading,
|
||||
'content': content
|
||||
})
|
||||
else:
|
||||
results.append({
|
||||
'infobox': heading,
|
||||
'id': infobox_id,
|
||||
'entity': entity,
|
||||
'content': content,
|
||||
'img_src' : image,
|
||||
'attributes': attributes,
|
||||
'urls': urls,
|
||||
'relatedTopics': relatedTopics
|
||||
})
|
||||
|
||||
return results
|
||||
|
|
108
searx/engines/faroo.py
Normal file
108
searx/engines/faroo.py
Normal file
|
@ -0,0 +1,108 @@
|
|||
## Faroo (Web, News)
|
||||
#
|
||||
# @website http://www.faroo.com
|
||||
# @provide-api yes (http://www.faroo.com/hp/api/api.html), require API-key
|
||||
#
|
||||
# @using-api yes
|
||||
# @results JSON
|
||||
# @stable yes
|
||||
# @parse url, title, content, publishedDate, img_src
|
||||
|
||||
from urllib import urlencode
|
||||
from json import loads
|
||||
import datetime
|
||||
from searx.utils import searx_useragent
|
||||
|
||||
# engine dependent config
|
||||
categories = ['general', 'news']
|
||||
paging = True
|
||||
language_support = True
|
||||
number_of_results = 10
|
||||
api_key = None
|
||||
|
||||
# search-url
|
||||
url = 'http://www.faroo.com/'
|
||||
search_url = url + 'api?{query}&start={offset}&length={number_of_results}&l={language}&src={categorie}&i=false&f=json&key={api_key}'
|
||||
|
||||
search_category = {'general': 'web',
|
||||
'news': 'news'}
|
||||
|
||||
# do search-request
|
||||
def request(query, params):
|
||||
offset = (params['pageno']-1) * number_of_results + 1
|
||||
categorie = search_category.get(params['category'], 'web')
|
||||
|
||||
if params['language'] == 'all':
|
||||
language = 'en'
|
||||
else:
|
||||
language = params['language'].split('_')[0]
|
||||
|
||||
# skip, if language is not supported
|
||||
if language != 'en' and\
|
||||
language != 'de' and\
|
||||
language != 'zh':
|
||||
return params
|
||||
|
||||
params['url'] = search_url.format(offset=offset,
|
||||
number_of_results=number_of_results,
|
||||
query=urlencode({'q': query}),
|
||||
language=language,
|
||||
categorie=categorie,
|
||||
api_key=api_key )
|
||||
|
||||
# using searx User-Agent
|
||||
params['headers']['User-Agent'] = searx_useragent()
|
||||
|
||||
return params
|
||||
|
||||
|
||||
# get response from search-request
|
||||
def response(resp):
|
||||
# HTTP-Code 401: api-key is not valide
|
||||
if resp.status_code == 401:
|
||||
raise Exception("API key is not valide")
|
||||
return []
|
||||
|
||||
# HTTP-Code 429: rate limit exceeded
|
||||
if resp.status_code == 429:
|
||||
raise Exception("rate limit has been exceeded!")
|
||||
return []
|
||||
|
||||
results = []
|
||||
|
||||
search_res = loads(resp.text)
|
||||
|
||||
# return empty array if there are no results
|
||||
if not search_res.get('results', {}):
|
||||
return []
|
||||
|
||||
# parse results
|
||||
for result in search_res['results']:
|
||||
if result['news']:
|
||||
# timestamp (how many milliseconds have passed between now and the beginning of 1970)
|
||||
publishedDate = datetime.datetime.fromtimestamp(result['date']/1000.0)
|
||||
|
||||
# append news result
|
||||
results.append({'url': result['url'],
|
||||
'title': result['title'],
|
||||
'publishedDate': publishedDate,
|
||||
'content': result['kwic']})
|
||||
|
||||
else:
|
||||
# append general result
|
||||
# TODO, publishedDate correct?
|
||||
results.append({'url': result['url'],
|
||||
'title': result['title'],
|
||||
'content': result['kwic']})
|
||||
|
||||
# append image result if image url is set
|
||||
# TODO, show results with an image like in faroo
|
||||
if result['iurl']:
|
||||
results.append({'template': 'images.html',
|
||||
'url': result['url'],
|
||||
'title': result['title'],
|
||||
'content': result['kwic'],
|
||||
'img_src': result['iurl']})
|
||||
|
||||
# return results
|
||||
return results
|
|
@ -2,7 +2,7 @@ import json
|
|||
from requests import get
|
||||
from urllib import urlencode
|
||||
|
||||
resultCount=2
|
||||
resultCount=1
|
||||
urlSearch = 'https://www.wikidata.org/w/api.php?action=query&list=search&format=json&srnamespace=0&srprop=sectiontitle&{query}'
|
||||
urlDetail = 'https://www.wikidata.org/w/api.php?action=wbgetentities&format=json&props=labels%7Cinfo%7Csitelinks%7Csitelinks%2Furls%7Cdescriptions%7Cclaims&{query}'
|
||||
urlMap = 'https://www.openstreetmap.org/?lat={latitude}&lon={longitude}&zoom={zoom}&layers=M'
|
||||
|
@ -33,17 +33,20 @@ def response(resp):
|
|||
return results
|
||||
|
||||
def getDetail(jsonresponse, wikidata_id, language):
|
||||
result = jsonresponse.get('entities', {}).get(wikidata_id, {})
|
||||
|
||||
title = result.get('labels', {}).get(language, {}).get('value', None)
|
||||
if title == None:
|
||||
title = result.get('labels', {}).get('en', {}).get('value', wikidata_id)
|
||||
results = []
|
||||
urls = []
|
||||
attributes = []
|
||||
|
||||
description = result.get('descriptions', {}).get(language, {}).get('value', '')
|
||||
if description == '':
|
||||
result = jsonresponse.get('entities', {}).get(wikidata_id, {})
|
||||
|
||||
title = result.get('labels', {}).get(language, {}).get('value', None)
|
||||
if title == None:
|
||||
title = result.get('labels', {}).get('en', {}).get('value', None)
|
||||
if title == None:
|
||||
return results
|
||||
|
||||
description = result.get('descriptions', {}).get(language, {}).get('value', None)
|
||||
if description == None:
|
||||
description = result.get('descriptions', {}).get('en', {}).get('value', '')
|
||||
|
||||
claims = result.get('claims', {})
|
||||
|
@ -52,10 +55,15 @@ def getDetail(jsonresponse, wikidata_id, language):
|
|||
urls.append({ 'title' : 'Official site', 'url': official_website })
|
||||
results.append({ 'title': title, 'url' : official_website })
|
||||
|
||||
wikipedia_link_count = 0
|
||||
if language != 'en':
|
||||
add_url(urls, 'Wikipedia (' + language + ')', get_wikilink(result, language + 'wiki'))
|
||||
wikipedia_link_count += add_url(urls, 'Wikipedia (' + language + ')', get_wikilink(result, language + 'wiki'))
|
||||
wikipedia_en_link = get_wikilink(result, 'enwiki')
|
||||
add_url(urls, 'Wikipedia (en)', wikipedia_en_link)
|
||||
wikipedia_link_count += add_url(urls, 'Wikipedia (en)', wikipedia_en_link)
|
||||
if wikipedia_link_count == 0:
|
||||
misc_language = get_wiki_firstlanguage(result, 'wiki')
|
||||
if misc_language != None:
|
||||
add_url(urls, 'Wikipedia (' + misc_language + ')', get_wikilink(result, misc_language + 'wiki'))
|
||||
|
||||
if language != 'en':
|
||||
add_url(urls, 'Wiki voyage (' + language + ')', get_wikilink(result, language + 'wikivoyage'))
|
||||
|
@ -105,14 +113,20 @@ def getDetail(jsonresponse, wikidata_id, language):
|
|||
if date_of_death != None:
|
||||
attributes.append({'label' : 'Date of death', 'value' : date_of_death})
|
||||
|
||||
|
||||
results.append({
|
||||
'infobox' : title,
|
||||
'id' : wikipedia_en_link,
|
||||
'content' : description,
|
||||
'attributes' : attributes,
|
||||
'urls' : urls
|
||||
})
|
||||
if len(attributes)==0 and len(urls)==2 and len(description)==0:
|
||||
results.append({
|
||||
'url': urls[0]['url'],
|
||||
'title': title,
|
||||
'content': description
|
||||
})
|
||||
else:
|
||||
results.append({
|
||||
'infobox' : title,
|
||||
'id' : wikipedia_en_link,
|
||||
'content' : description,
|
||||
'attributes' : attributes,
|
||||
'urls' : urls
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
|
@ -120,7 +134,9 @@ def getDetail(jsonresponse, wikidata_id, language):
|
|||
def add_url(urls, title, url):
|
||||
if url != None:
|
||||
urls.append({'title' : title, 'url' : url})
|
||||
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
||||
def get_mainsnak(claims, propertyName):
|
||||
propValue = claims.get(propertyName, {})
|
||||
|
@ -147,7 +163,8 @@ def get_string(claims, propertyName, defaultValue=None):
|
|||
if len(result) == 0:
|
||||
return defaultValue
|
||||
else:
|
||||
return ', '.join(result)
|
||||
#TODO handle multiple urls
|
||||
return result[0]
|
||||
|
||||
|
||||
def get_time(claims, propertyName, defaultValue=None):
|
||||
|
@ -213,3 +230,9 @@ def get_wikilink(result, wikiid):
|
|||
elif url.startswith('//'):
|
||||
url = 'https:' + url
|
||||
return url
|
||||
|
||||
def get_wiki_firstlanguage(result, wikipatternid):
|
||||
for k in result.get('sitelinks', {}).keys():
|
||||
if k.endswith(wikipatternid) and len(k)==(2+len(wikipatternid)):
|
||||
return k[0:2]
|
||||
return None
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
## Yahoo (News)
|
||||
#
|
||||
# Yahoo (News)
|
||||
#
|
||||
# @website https://news.yahoo.com
|
||||
# @provide-api yes (https://developer.yahoo.com/boss/search/), $0.80/1000 queries
|
||||
#
|
||||
# @provide-api yes (https://developer.yahoo.com/boss/search/)
|
||||
# $0.80/1000 queries
|
||||
#
|
||||
# @using-api no (because pricing)
|
||||
# @results HTML (using search portal)
|
||||
# @stable no (HTML can change)
|
||||
|
@ -22,7 +23,7 @@ paging = True
|
|||
language_support = True
|
||||
|
||||
# search-url
|
||||
search_url = 'https://news.search.yahoo.com/search?{query}&b={offset}&fl=1&vl=lang_{lang}'
|
||||
search_url = 'https://news.search.yahoo.com/search?{query}&b={offset}&fl=1&vl=lang_{lang}' # noqa
|
||||
|
||||
# specific xpath variables
|
||||
results_xpath = '//div[@class="res"]'
|
||||
|
@ -41,7 +42,7 @@ def request(query, params):
|
|||
language = 'en'
|
||||
else:
|
||||
language = params['language'].split('_')[0]
|
||||
|
||||
|
||||
params['url'] = search_url.format(offset=offset,
|
||||
query=urlencode({'p': query}),
|
||||
lang=language)
|
||||
|
|
|
@ -13,7 +13,7 @@ from urllib import urlencode
|
|||
from dateutil import parser
|
||||
|
||||
# engine dependent config
|
||||
categories = ['videos']
|
||||
categories = ['videos', 'music']
|
||||
paging = True
|
||||
language_support = True
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue