forked from Icycoide/searxng
[enh] add infoboxes and answers
This commit is contained in:
parent
e39d9fe542
commit
6bfd566353
10 changed files with 525 additions and 130 deletions
|
@ -38,16 +38,14 @@ def response(resp):
|
|||
except:
|
||||
return results
|
||||
|
||||
title = '{0} {1} in {2} is {3}'.format(
|
||||
answer = '{0} {1} = {2} {3} (1 {1} = {4} {3})'.format(
|
||||
resp.search_params['ammount'],
|
||||
resp.search_params['from'],
|
||||
resp.search_params['ammount'] * conversion_rate,
|
||||
resp.search_params['to'],
|
||||
resp.search_params['ammount'] * conversion_rate
|
||||
conversion_rate
|
||||
)
|
||||
|
||||
content = '1 {0} is {1} {2}'.format(resp.search_params['from'],
|
||||
conversion_rate,
|
||||
resp.search_params['to'])
|
||||
now_date = datetime.now().strftime('%Y%m%d')
|
||||
url = 'http://finance.yahoo.com/currency/converter-results/{0}/{1}-{2}-to-{3}.html' # noqa
|
||||
url = url.format(
|
||||
|
@ -56,6 +54,7 @@ def response(resp):
|
|||
resp.search_params['from'].lower(),
|
||||
resp.search_params['to'].lower()
|
||||
)
|
||||
results.append({'title': title, 'content': content, 'url': url})
|
||||
|
||||
results.append({'answer' : answer, 'url': url})
|
||||
|
||||
return results
|
||||
|
|
|
@ -1,10 +1,25 @@
|
|||
import json
|
||||
from urllib import urlencode
|
||||
from lxml import html
|
||||
from searx.engines.xpath import extract_text
|
||||
|
||||
url = 'http://api.duckduckgo.com/?{query}&format=json&pretty=0&no_redirect=1'
|
||||
url = 'https://api.duckduckgo.com/?{query}&format=json&pretty=0&no_redirect=1&d=1'
|
||||
|
||||
def result_to_text(url, text, htmlResult):
|
||||
# TODO : remove result ending with "Meaning" or "Category"
|
||||
dom = html.fromstring(htmlResult)
|
||||
a = dom.xpath('//a')
|
||||
if len(a)>=1:
|
||||
return extract_text(a[0])
|
||||
else:
|
||||
return text
|
||||
|
||||
def html_to_text(htmlFragment):
|
||||
dom = html.fromstring(htmlFragment)
|
||||
return extract_text(dom)
|
||||
|
||||
def request(query, params):
|
||||
# TODO add kl={locale}
|
||||
params['url'] = url.format(query=urlencode({'q': query}))
|
||||
return params
|
||||
|
||||
|
@ -12,12 +27,104 @@ def request(query, params):
|
|||
def response(resp):
|
||||
search_res = json.loads(resp.text)
|
||||
results = []
|
||||
|
||||
content = ''
|
||||
heading = search_res.get('Heading', '')
|
||||
attributes = []
|
||||
urls = []
|
||||
infobox_id = None
|
||||
relatedTopics = []
|
||||
|
||||
# add answer if there is one
|
||||
answer = search_res.get('Answer', '')
|
||||
if answer != '':
|
||||
results.append({ 'answer' : html_to_text(answer) })
|
||||
|
||||
# add infobox
|
||||
if 'Definition' in search_res:
|
||||
if search_res.get('AbstractURL'):
|
||||
res = {'title': search_res.get('Heading', ''),
|
||||
'content': search_res.get('Definition', ''),
|
||||
'url': search_res.get('AbstractURL', ''),
|
||||
'class': 'definition_result'}
|
||||
results.append(res)
|
||||
content = content + search_res.get('Definition', '')
|
||||
|
||||
if 'Abstract' in search_res:
|
||||
content = content + search_res.get('Abstract', '')
|
||||
|
||||
|
||||
# image
|
||||
image = search_res.get('Image', '')
|
||||
image = None if image == '' else image
|
||||
|
||||
# attributes
|
||||
if 'Infobox' in search_res:
|
||||
infobox = search_res.get('Infobox', None)
|
||||
if 'content' in infobox:
|
||||
for info in infobox.get('content'):
|
||||
attributes.append({'label': info.get('label'), 'value': info.get('value')})
|
||||
|
||||
# urls
|
||||
for ddg_result in search_res.get('Results', []):
|
||||
if 'FirstURL' in ddg_result:
|
||||
firstURL = ddg_result.get('FirstURL', '')
|
||||
text = ddg_result.get('Text', '')
|
||||
urls.append({'title':text, 'url':firstURL})
|
||||
results.append({'title':heading, 'url': firstURL})
|
||||
|
||||
# related topics
|
||||
for ddg_result in search_res.get('RelatedTopics', None):
|
||||
if 'FirstURL' in ddg_result:
|
||||
suggestion = result_to_text(ddg_result.get('FirstURL', None), ddg_result.get('Text', None), ddg_result.get('Result', None))
|
||||
if suggestion != heading:
|
||||
results.append({'suggestion': suggestion})
|
||||
elif 'Topics' in ddg_result:
|
||||
suggestions = []
|
||||
relatedTopics.append({ 'name' : ddg_result.get('Name', ''), 'suggestions': suggestions })
|
||||
for topic_result in ddg_result.get('Topics', []):
|
||||
suggestion = result_to_text(topic_result.get('FirstURL', None), topic_result.get('Text', None), topic_result.get('Result', None))
|
||||
if suggestion != heading:
|
||||
suggestions.append(suggestion)
|
||||
|
||||
# abstract
|
||||
abstractURL = search_res.get('AbstractURL', '')
|
||||
if abstractURL != '':
|
||||
# add as result ? problem always in english
|
||||
infobox_id = abstractURL
|
||||
urls.append({'title': search_res.get('AbstractSource'), 'url': abstractURL})
|
||||
|
||||
# definition
|
||||
definitionURL = search_res.get('DefinitionURL', '')
|
||||
if definitionURL != '':
|
||||
# add as result ? as answer ? problem always in english
|
||||
infobox_id = definitionURL
|
||||
urls.append({'title': search_res.get('DefinitionSource'), 'url': definitionURL})
|
||||
|
||||
# entity
|
||||
entity = search_res.get('Entity', None)
|
||||
# TODO continent / country / department / location / waterfall / mountain range : link to map search, get weather, near by locations
|
||||
# TODO musician : link to music search
|
||||
# TODO concert tour : ??
|
||||
# TODO film / actor / television / media franchise : links to IMDB / rottentomatoes (or scrap result)
|
||||
# TODO music : link tu musicbrainz / last.fm
|
||||
# TODO book : ??
|
||||
# TODO artist / playwright : ??
|
||||
# TODO compagny : ??
|
||||
# TODO software / os : ??
|
||||
# TODO software engineer : ??
|
||||
# TODO prepared food : ??
|
||||
# TODO website : ??
|
||||
# TODO performing art : ??
|
||||
# TODO prepared food : ??
|
||||
# TODO programming language : ??
|
||||
# TODO file format : ??
|
||||
|
||||
if len(heading)>0:
|
||||
# TODO get infobox.meta.value where .label='article_title'
|
||||
results.append({
|
||||
'infobox': heading,
|
||||
'id': infobox_id,
|
||||
'entity': entity,
|
||||
'content': content,
|
||||
'img_src' : image,
|
||||
'attributes': attributes,
|
||||
'urls': urls,
|
||||
'relatedTopics': relatedTopics
|
||||
})
|
||||
|
||||
return results
|
||||
|
|
193
searx/engines/wikidata.py
Normal file
193
searx/engines/wikidata.py
Normal file
|
@ -0,0 +1,193 @@
|
|||
import json
|
||||
from datetime import datetime
|
||||
from requests import get
|
||||
from urllib import urlencode
|
||||
|
||||
resultCount=2
|
||||
urlSearch = 'https://www.wikidata.org/w/api.php?action=query&list=search&format=json&srnamespace=0&srprop=sectionsnippet&{query}'
|
||||
urlDetail = 'https://www.wikidata.org/w/api.php?action=wbgetentities&format=json&props=labels%7Cinfo%7Csitelinks%7Csitelinks%2Furls%7Cdescriptions%7Cclaims&{query}'
|
||||
# find the right URL for urlMap
|
||||
urlMap = 'http://www.openstreetmap.org/?lat={latitude}&lon={longitude}&zoom={zoom}&layers=M'
|
||||
|
||||
def request(query, params):
|
||||
params['url'] = urlSearch.format(query=urlencode({'srsearch': query, 'srlimit': resultCount}))
|
||||
print params['url']
|
||||
return params
|
||||
|
||||
|
||||
def response(resp):
|
||||
results = []
|
||||
search_res = json.loads(resp.text)
|
||||
# TODO parallel http queries
|
||||
before = datetime.now()
|
||||
for r in search_res.get('query', {}).get('search', {}):
|
||||
wikidata_id = r.get('title', '')
|
||||
results = results + getDetail(wikidata_id)
|
||||
after = datetime.now()
|
||||
print str(after - before) + " second(s)"
|
||||
|
||||
return results
|
||||
|
||||
def getDetail(wikidata_id):
|
||||
language = 'fr'
|
||||
|
||||
url = urlDetail.format(query=urlencode({'ids': wikidata_id, 'languages': language + '|en'}))
|
||||
print url
|
||||
response = get(url)
|
||||
result = json.loads(response.content)
|
||||
result = result.get('entities', {}).get(wikidata_id, {})
|
||||
|
||||
title = result.get('labels', {}).get(language, {}).get('value', None)
|
||||
if title == None:
|
||||
title = result.get('labels', {}).get('en', {}).get('value', wikidata_id)
|
||||
results = []
|
||||
urls = []
|
||||
attributes = []
|
||||
|
||||
description = result.get('descriptions', {}).get(language, {}).get('value', '')
|
||||
if description == '':
|
||||
description = result.get('descriptions', {}).get('en', {}).get('value', '')
|
||||
|
||||
claims = result.get('claims', {})
|
||||
official_website = get_string(claims, 'P856', None)
|
||||
print official_website
|
||||
if official_website != None:
|
||||
urls.append({ 'title' : 'Official site', 'url': official_website })
|
||||
results.append({ 'title': title, 'url' : official_website })
|
||||
|
||||
if language != 'en':
|
||||
add_url(urls, 'Wikipedia (' + language + ')', get_wikilink(result, language + 'wiki'))
|
||||
wikipedia_en_link = get_wikilink(result, 'enwiki')
|
||||
add_url(urls, 'Wikipedia (en)', wikipedia_en_link)
|
||||
|
||||
if language != 'en':
|
||||
add_url(urls, 'Wiki voyage (' + language + ')', get_wikilink(result, language + 'wikivoyage'))
|
||||
add_url(urls, 'Wiki voyage (en)', get_wikilink(result, 'enwikivoyage'))
|
||||
|
||||
if language != 'en':
|
||||
add_url(urls, 'Wikiquote (' + language + ')', get_wikilink(result, language + 'wikiquote'))
|
||||
add_url(urls, 'Wikiquote (en)', get_wikilink(result, 'enwikiquote'))
|
||||
|
||||
|
||||
add_url(urls, 'Commons wiki', get_wikilink(result, 'commonswiki'))
|
||||
|
||||
add_url(urls, 'Location', get_geolink(claims, 'P625', None))
|
||||
|
||||
add_url(urls, 'Wikidata', 'https://www.wikidata.org/wiki/' + wikidata_id + '?uselang='+ language)
|
||||
|
||||
postal_code = get_string(claims, 'P281', None)
|
||||
if postal_code != None:
|
||||
attributes.append({'label' : 'Postal code(s)', 'value' : postal_code})
|
||||
|
||||
date_of_birth = get_time(claims, 'P569', None)
|
||||
if date_of_birth != None:
|
||||
attributes.append({'label' : 'Date of birth', 'value' : date_of_birth})
|
||||
|
||||
date_of_death = get_time(claims, 'P570', None)
|
||||
if date_of_death != None:
|
||||
attributes.append({'label' : 'Date of death', 'value' : date_of_death})
|
||||
|
||||
|
||||
results.append({
|
||||
'infobox' : title,
|
||||
'id' : wikipedia_en_link,
|
||||
'content' : description,
|
||||
'attributes' : attributes,
|
||||
'urls' : urls
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
def add_url(urls, title, url):
|
||||
if url != None:
|
||||
urls.append({'title' : title, 'url' : url})
|
||||
|
||||
def get_mainsnak(claims, propertyName):
|
||||
propValue = claims.get(propertyName, {})
|
||||
if len(propValue) == 0:
|
||||
return None
|
||||
|
||||
propValue = propValue[0].get('mainsnak', None)
|
||||
return propValue
|
||||
|
||||
def get_string(claims, propertyName, defaultValue=None):
|
||||
propValue = claims.get(propertyName, {})
|
||||
if len(propValue) == 0:
|
||||
return defaultValue
|
||||
|
||||
result = []
|
||||
for e in propValue:
|
||||
mainsnak = e.get('mainsnak', {})
|
||||
|
||||
datatype = mainsnak.get('datatype', '')
|
||||
datavalue = mainsnak.get('datavalue', {})
|
||||
if datavalue != None:
|
||||
result.append(datavalue.get('value', ''))
|
||||
|
||||
if len(result) == 0:
|
||||
return defaultValue
|
||||
else:
|
||||
return ', '.join(result)
|
||||
|
||||
def get_time(claims, propertyName, defaultValue=None):
|
||||
propValue = claims.get(propertyName, {})
|
||||
if len(propValue) == 0:
|
||||
return defaultValue
|
||||
|
||||
result = []
|
||||
for e in propValue:
|
||||
mainsnak = e.get('mainsnak', {})
|
||||
|
||||
datatype = mainsnak.get('datatype', '')
|
||||
datavalue = mainsnak.get('datavalue', {})
|
||||
if datavalue != None:
|
||||
value = datavalue.get('value', '')
|
||||
result.append(value.get('time', ''))
|
||||
|
||||
if len(result) == 0:
|
||||
return defaultValue
|
||||
else:
|
||||
return ', '.join(result)
|
||||
|
||||
def get_geolink(claims, propertyName, defaultValue=''):
|
||||
mainsnak = get_mainsnak(claims, propertyName)
|
||||
|
||||
if mainsnak == None:
|
||||
return defaultValue
|
||||
|
||||
datatype = mainsnak.get('datatype', '')
|
||||
datavalue = mainsnak.get('datavalue', {})
|
||||
|
||||
if datatype != 'globe-coordinate':
|
||||
return defaultValue
|
||||
|
||||
value = datavalue.get('value', {})
|
||||
|
||||
precision = value.get('precision', 0.0002)
|
||||
|
||||
# there is no zoom information, deduce from precision (error prone)
|
||||
# samples :
|
||||
# 13 --> 5
|
||||
# 1 --> 6
|
||||
# 0.016666666666667 --> 9
|
||||
# 0.00027777777777778 --> 19
|
||||
# wolframalpha : quadratic fit { {13, 5}, {1, 6}, {0.0166666, 9}, {0.0002777777,19}}
|
||||
# 14.1186-8.8322 x+0.625447 x^2
|
||||
if precision < 0.0003:
|
||||
zoom = 19
|
||||
else:
|
||||
zoom = int(15 - precision*8.8322 + precision*precision*0.625447)
|
||||
|
||||
url = urlMap.replace('{latitude}', str(value.get('latitude',0))).replace('{longitude}', str(value.get('longitude',0))).replace('{zoom}', str(zoom))
|
||||
|
||||
return url
|
||||
|
||||
def get_wikilink(result, wikiid):
|
||||
url = result.get('sitelinks', {}).get(wikiid, {}).get('url', None)
|
||||
if url == None:
|
||||
return url
|
||||
elif url.startswith('http://'):
|
||||
url = url.replace('http://', 'https://')
|
||||
elif url.startswith('//'):
|
||||
url = 'https:' + url
|
||||
return url
|
Loading…
Add table
Add a link
Reference in a new issue