mirror of
https://github.com/searxng/searxng.git
synced 2025-08-03 18:42:33 +02:00
update versions.cfg to use the current up-to-date packages
This commit is contained in:
parent
bbd83f5a51
commit
4689fe341c
40 changed files with 486 additions and 398 deletions
|
@ -86,7 +86,7 @@ def load_engine(engine_data):
|
|||
continue
|
||||
if getattr(engine, engine_attr) is None:
|
||||
logger.error('Missing engine config attribute: "{0}.{1}"'
|
||||
.format(engine.name, engine_attr))
|
||||
.format(engine.name, engine_attr))
|
||||
sys.exit(1)
|
||||
|
||||
engine.stats = {
|
||||
|
@ -106,7 +106,7 @@ def load_engine(engine_data):
|
|||
if engine.shortcut:
|
||||
if engine.shortcut in engine_shortcuts:
|
||||
logger.error('Engine config error: ambigious shortcut: {0}'
|
||||
.format(engine.shortcut))
|
||||
.format(engine.shortcut))
|
||||
sys.exit(1)
|
||||
engine_shortcuts[engine.shortcut] = engine.name
|
||||
return engine
|
||||
|
|
|
@ -1,15 +1,17 @@
|
|||
## Bing (Web)
|
||||
#
|
||||
# @website https://www.bing.com
|
||||
# @provide-api yes (http://datamarket.azure.com/dataset/bing/search),
|
||||
# max. 5000 query/month
|
||||
#
|
||||
# @using-api no (because of query limit)
|
||||
# @results HTML (using search portal)
|
||||
# @stable no (HTML can change)
|
||||
# @parse url, title, content
|
||||
#
|
||||
# @todo publishedDate
|
||||
"""
|
||||
Bing (Web)
|
||||
|
||||
@website https://www.bing.com
|
||||
@provide-api yes (http://datamarket.azure.com/dataset/bing/search),
|
||||
max. 5000 query/month
|
||||
|
||||
@using-api no (because of query limit)
|
||||
@results HTML (using search portal)
|
||||
@stable no (HTML can change)
|
||||
@parse url, title, content
|
||||
|
||||
@todo publishedDate
|
||||
"""
|
||||
|
||||
from urllib import urlencode
|
||||
from cgi import escape
|
||||
|
|
|
@ -1,17 +1,19 @@
|
|||
## Bing (Images)
|
||||
#
|
||||
# @website https://www.bing.com/images
|
||||
# @provide-api yes (http://datamarket.azure.com/dataset/bing/search),
|
||||
# max. 5000 query/month
|
||||
#
|
||||
# @using-api no (because of query limit)
|
||||
# @results HTML (using search portal)
|
||||
# @stable no (HTML can change)
|
||||
# @parse url, title, img_src
|
||||
#
|
||||
# @todo currently there are up to 35 images receive per page,
|
||||
# because bing does not parse count=10.
|
||||
# limited response to 10 images
|
||||
"""
|
||||
Bing (Images)
|
||||
|
||||
@website https://www.bing.com/images
|
||||
@provide-api yes (http://datamarket.azure.com/dataset/bing/search),
|
||||
max. 5000 query/month
|
||||
|
||||
@using-api no (because of query limit)
|
||||
@results HTML (using search portal)
|
||||
@stable no (HTML can change)
|
||||
@parse url, title, img_src
|
||||
|
||||
@todo currently there are up to 35 images receive per page,
|
||||
because bing does not parse count=10.
|
||||
limited response to 10 images
|
||||
"""
|
||||
|
||||
from urllib import urlencode
|
||||
from lxml import html
|
||||
|
@ -76,7 +78,7 @@ def response(resp):
|
|||
title = link.attrib.get('t1')
|
||||
ihk = link.attrib.get('ihk')
|
||||
|
||||
#url = 'http://' + link.attrib.get('t3')
|
||||
# url = 'http://' + link.attrib.get('t3')
|
||||
url = yaml_data.get('surl')
|
||||
img_src = yaml_data.get('imgurl')
|
||||
|
||||
|
|
|
@ -1,13 +1,15 @@
|
|||
## Bing (News)
|
||||
#
|
||||
# @website https://www.bing.com/news
|
||||
# @provide-api yes (http://datamarket.azure.com/dataset/bing/search),
|
||||
# max. 5000 query/month
|
||||
#
|
||||
# @using-api no (because of query limit)
|
||||
# @results HTML (using search portal)
|
||||
# @stable no (HTML can change)
|
||||
# @parse url, title, content, publishedDate
|
||||
"""
|
||||
Bing (News)
|
||||
|
||||
@website https://www.bing.com/news
|
||||
@provide-api yes (http://datamarket.azure.com/dataset/bing/search),
|
||||
max. 5000 query/month
|
||||
|
||||
@using-api no (because of query limit)
|
||||
@results HTML (using search portal)
|
||||
@stable no (HTML can change)
|
||||
@parse url, title, content, publishedDate
|
||||
"""
|
||||
|
||||
from urllib import urlencode
|
||||
from cgi import escape
|
||||
|
@ -87,6 +89,8 @@ def response(resp):
|
|||
publishedDate = parser.parse(publishedDate, dayfirst=False)
|
||||
except TypeError:
|
||||
publishedDate = datetime.now()
|
||||
except ValueError:
|
||||
publishedDate = datetime.now()
|
||||
|
||||
# append result
|
||||
results.append({'url': url,
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
## Blekko (Images)
|
||||
#
|
||||
# @website https://blekko.com
|
||||
# @provide-api yes (inofficial)
|
||||
#
|
||||
# @using-api yes
|
||||
# @results JSON
|
||||
# @stable yes
|
||||
# @parse url, title, img_src
|
||||
"""
|
||||
Blekko (Images)
|
||||
|
||||
@website https://blekko.com
|
||||
@provide-api yes (inofficial)
|
||||
|
||||
@using-api yes
|
||||
@results JSON
|
||||
@stable yes
|
||||
@parse url, title, img_src
|
||||
"""
|
||||
|
||||
from json import loads
|
||||
from urllib import urlencode
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
## BTDigg (Videos, Music, Files)
|
||||
#
|
||||
# @website https://btdigg.org
|
||||
# @provide-api yes (on demand)
|
||||
#
|
||||
# @using-api no
|
||||
# @results HTML (using search portal)
|
||||
# @stable no (HTML can change)
|
||||
# @parse url, title, content, seed, leech, magnetlink
|
||||
"""
|
||||
BTDigg (Videos, Music, Files)
|
||||
|
||||
@website https://btdigg.org
|
||||
@provide-api yes (on demand)
|
||||
|
||||
@using-api no
|
||||
@results HTML (using search portal)
|
||||
@stable no (HTML can change)
|
||||
@parse url, title, content, seed, leech, magnetlink
|
||||
"""
|
||||
|
||||
from urlparse import urljoin
|
||||
from cgi import escape
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
## Dailymotion (Videos)
|
||||
#
|
||||
# @website https://www.dailymotion.com
|
||||
# @provide-api yes (http://www.dailymotion.com/developer)
|
||||
#
|
||||
# @using-api yes
|
||||
# @results JSON
|
||||
# @stable yes
|
||||
# @parse url, title, thumbnail, publishedDate, embedded
|
||||
#
|
||||
# @todo set content-parameter with correct data
|
||||
"""
|
||||
Dailymotion (Videos)
|
||||
|
||||
@website https://www.dailymotion.com
|
||||
@provide-api yes (http://www.dailymotion.com/developer)
|
||||
|
||||
@using-api yes
|
||||
@results JSON
|
||||
@stable yes
|
||||
@parse url, title, thumbnail, publishedDate, embedded
|
||||
|
||||
@todo set content-parameter with correct data
|
||||
"""
|
||||
|
||||
from urllib import urlencode
|
||||
from json import loads
|
||||
|
@ -48,7 +50,7 @@ def response(resp):
|
|||
search_res = loads(resp.text)
|
||||
|
||||
# return empty array if there are no results
|
||||
if not 'list' in search_res:
|
||||
if 'list' not in search_res:
|
||||
return []
|
||||
|
||||
# parse results
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
## Deezer (Music)
|
||||
#
|
||||
# @website https://deezer.com
|
||||
# @provide-api yes (http://developers.deezer.com/api/)
|
||||
#
|
||||
# @using-api yes
|
||||
# @results JSON
|
||||
# @stable yes
|
||||
# @parse url, title, content, embedded
|
||||
"""
|
||||
Deezer (Music)
|
||||
|
||||
@website https://deezer.com
|
||||
@provide-api yes (http://developers.deezer.com/api/)
|
||||
|
||||
@using-api yes
|
||||
@results JSON
|
||||
@stable yes
|
||||
@parse url, title, content, embedded
|
||||
"""
|
||||
|
||||
from json import loads
|
||||
from urllib import urlencode
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
## Deviantart (Images)
|
||||
#
|
||||
# @website https://www.deviantart.com/
|
||||
# @provide-api yes (https://www.deviantart.com/developers/) (RSS)
|
||||
#
|
||||
# @using-api no (TODO, rewrite to api)
|
||||
# @results HTML
|
||||
# @stable no (HTML can change)
|
||||
# @parse url, title, thumbnail_src, img_src
|
||||
#
|
||||
# @todo rewrite to api
|
||||
"""
|
||||
Deviantart (Images)
|
||||
|
||||
@website https://www.deviantart.com/
|
||||
@provide-api yes (https://www.deviantart.com/developers/) (RSS)
|
||||
|
||||
@using-api no (TODO, rewrite to api)
|
||||
@results HTML
|
||||
@stable no (HTML can change)
|
||||
@parse url, title, thumbnail_src, img_src
|
||||
|
||||
@todo rewrite to api
|
||||
"""
|
||||
|
||||
from urllib import urlencode
|
||||
from urlparse import urljoin
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
## Digg (News, Social media)
|
||||
#
|
||||
# @website https://digg.com/
|
||||
# @provide-api no
|
||||
#
|
||||
# @using-api no
|
||||
# @results HTML (using search portal)
|
||||
# @stable no (HTML can change)
|
||||
# @parse url, title, content, publishedDate, thumbnail
|
||||
"""
|
||||
Digg (News, Social media)
|
||||
|
||||
@website https://digg.com/
|
||||
@provide-api no
|
||||
|
||||
@using-api no
|
||||
@results HTML (using search portal)
|
||||
@stable no (HTML can change)
|
||||
@parse url, title, content, publishedDate, thumbnail
|
||||
"""
|
||||
|
||||
from urllib import quote_plus
|
||||
from json import loads
|
||||
|
|
|
@ -1,17 +1,19 @@
|
|||
## DuckDuckGo (Web)
|
||||
#
|
||||
# @website https://duckduckgo.com/
|
||||
# @provide-api yes (https://duckduckgo.com/api),
|
||||
# but not all results from search-site
|
||||
#
|
||||
# @using-api no
|
||||
# @results HTML (using search portal)
|
||||
# @stable no (HTML can change)
|
||||
# @parse url, title, content
|
||||
#
|
||||
# @todo rewrite to api
|
||||
# @todo language support
|
||||
# (the current used site does not support language-change)
|
||||
"""
|
||||
DuckDuckGo (Web)
|
||||
|
||||
@website https://duckduckgo.com/
|
||||
@provide-api yes (https://duckduckgo.com/api),
|
||||
but not all results from search-site
|
||||
|
||||
@using-api no
|
||||
@results HTML (using search portal)
|
||||
@stable no (HTML can change)
|
||||
@parse url, title, content
|
||||
|
||||
@todo rewrite to api
|
||||
@todo language support
|
||||
(the current used site does not support language-change)
|
||||
"""
|
||||
|
||||
from urllib import urlencode
|
||||
from lxml.html import fromstring
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
## Dummy
|
||||
#
|
||||
# @results empty array
|
||||
# @stable yes
|
||||
"""
|
||||
Dummy
|
||||
|
||||
@results empty array
|
||||
@stable yes
|
||||
"""
|
||||
|
||||
|
||||
# do search-request
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
## Faroo (Web, News)
|
||||
#
|
||||
# @website http://www.faroo.com
|
||||
# @provide-api yes (http://www.faroo.com/hp/api/api.html), require API-key
|
||||
#
|
||||
# @using-api yes
|
||||
# @results JSON
|
||||
# @stable yes
|
||||
# @parse url, title, content, publishedDate, img_src
|
||||
"""
|
||||
Faroo (Web, News)
|
||||
|
||||
@website http://www.faroo.com
|
||||
@provide-api yes (http://www.faroo.com/hp/api/api.html), require API-key
|
||||
|
||||
@using-api yes
|
||||
@results JSON
|
||||
@stable yes
|
||||
@parse url, title, content, publishedDate, img_src
|
||||
"""
|
||||
|
||||
from urllib import urlencode
|
||||
from json import loads
|
||||
|
|
|
@ -1,15 +1,17 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
## Flickr (Images)
|
||||
#
|
||||
# @website https://www.flickr.com
|
||||
# @provide-api yes (https://secure.flickr.com/services/api/flickr.photos.search.html)
|
||||
#
|
||||
# @using-api yes
|
||||
# @results JSON
|
||||
# @stable yes
|
||||
# @parse url, title, thumbnail, img_src
|
||||
#More info on api-key : https://www.flickr.com/services/apps/create/
|
||||
"""
|
||||
Flickr (Images)
|
||||
|
||||
@website https://www.flickr.com
|
||||
@provide-api yes (https://secure.flickr.com/services/api/flickr.photos.search.html)
|
||||
|
||||
@using-api yes
|
||||
@results JSON
|
||||
@stable yes
|
||||
@parse url, title, thumbnail, img_src
|
||||
More info on api-key : https://www.flickr.com/services/apps/create/
|
||||
"""
|
||||
|
||||
from urllib import urlencode
|
||||
from json import loads
|
||||
|
@ -48,10 +50,10 @@ def response(resp):
|
|||
search_results = loads(resp.text)
|
||||
|
||||
# return empty array if there are no results
|
||||
if not 'photos' in search_results:
|
||||
if 'photos' not in search_results:
|
||||
return []
|
||||
|
||||
if not 'photo' in search_results['photos']:
|
||||
if 'photo' not in search_results['photos']:
|
||||
return []
|
||||
|
||||
photos = search_results['photos']['photo']
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# Flickr (Images)
|
||||
#
|
||||
# @website https://www.flickr.com
|
||||
# @provide-api yes (https://secure.flickr.com/services/api/flickr.photos.search.html)
|
||||
#
|
||||
# @using-api no
|
||||
# @results HTML
|
||||
# @stable no
|
||||
# @parse url, title, thumbnail, img_src
|
||||
"""
|
||||
Flickr (Images)
|
||||
|
||||
@website https://www.flickr.com
|
||||
@provide-api yes (https://secure.flickr.com/services/api/flickr.photos.search.html)
|
||||
|
||||
@using-api no
|
||||
@results HTML
|
||||
@stable no
|
||||
@parse url, title, thumbnail, img_src
|
||||
"""
|
||||
|
||||
from urllib import urlencode
|
||||
from json import loads
|
||||
|
@ -20,8 +22,8 @@ logger = logger.getChild('flickr-noapi')
|
|||
|
||||
categories = ['images']
|
||||
|
||||
url = 'https://secure.flickr.com/'
|
||||
search_url = url + 'search/?{query}&page={page}'
|
||||
url = 'https://www.flickr.com/'
|
||||
search_url = url + 'search?{query}&page={page}'
|
||||
photo_url = 'https://www.flickr.com/photos/{userid}/{photoid}'
|
||||
regex = re.compile(r"\"search-photos-models\",\"photos\":(.*}),\"totalItems\":", re.DOTALL)
|
||||
image_sizes = ('o', 'k', 'h', 'b', 'c', 'z', 'n', 'm', 't', 'q', 's')
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
## General Files (Files)
|
||||
#
|
||||
# @website http://www.general-files.org
|
||||
# @provide-api no (nothing found)
|
||||
#
|
||||
# @using-api no (because nothing found)
|
||||
# @results HTML (using search portal)
|
||||
# @stable no (HTML can change)
|
||||
# @parse url, title, content
|
||||
#
|
||||
# @todo detect torrents?
|
||||
"""
|
||||
General Files (Files)
|
||||
|
||||
@website http://www.general-files.org
|
||||
@provide-api no (nothing found)
|
||||
|
||||
@using-api no (because nothing found)
|
||||
@results HTML (using search portal)
|
||||
@stable no (HTML can change)
|
||||
@parse url, title, content
|
||||
|
||||
@todo detect torrents?
|
||||
"""
|
||||
|
||||
from lxml import html
|
||||
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
## Gigablast (Web)
|
||||
#
|
||||
# @website http://gigablast.com
|
||||
# @provide-api yes (http://gigablast.com/api.html)
|
||||
#
|
||||
# @using-api yes
|
||||
# @results XML
|
||||
# @stable yes
|
||||
# @parse url, title, content
|
||||
"""
|
||||
Gigablast (Web)
|
||||
|
||||
@website http://gigablast.com
|
||||
@provide-api yes (http://gigablast.com/api.html)
|
||||
|
||||
@using-api yes
|
||||
@results XML
|
||||
@stable yes
|
||||
@parse url, title, content
|
||||
"""
|
||||
|
||||
from urllib import urlencode
|
||||
from cgi import escape
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
## Github (It)
|
||||
#
|
||||
# @website https://github.com/
|
||||
# @provide-api yes (https://developer.github.com/v3/)
|
||||
#
|
||||
# @using-api yes
|
||||
# @results JSON
|
||||
# @stable yes (using api)
|
||||
# @parse url, title, content
|
||||
"""
|
||||
Github (It)
|
||||
|
||||
@website https://github.com/
|
||||
@provide-api yes (https://developer.github.com/v3/)
|
||||
|
||||
@using-api yes
|
||||
@results JSON
|
||||
@stable yes (using api)
|
||||
@parse url, title, content
|
||||
"""
|
||||
|
||||
from urllib import urlencode
|
||||
from json import loads
|
||||
|
@ -37,7 +39,7 @@ def response(resp):
|
|||
search_res = loads(resp.text)
|
||||
|
||||
# check if items are recieved
|
||||
if not 'items' in search_res:
|
||||
if 'items' not in search_res:
|
||||
return []
|
||||
|
||||
# parse results
|
||||
|
|
|
@ -1,13 +1,15 @@
|
|||
## Google (Images)
|
||||
#
|
||||
# @website https://www.google.com
|
||||
# @provide-api yes (https://developers.google.com/web-search/docs/),
|
||||
# deprecated!
|
||||
#
|
||||
# @using-api yes
|
||||
# @results JSON
|
||||
# @stable yes (but deprecated)
|
||||
# @parse url, title, img_src
|
||||
"""
|
||||
Google (Images)
|
||||
|
||||
@website https://www.google.com
|
||||
@provide-api yes (https://developers.google.com/web-search/docs/),
|
||||
deprecated!
|
||||
|
||||
@using-api yes
|
||||
@results JSON
|
||||
@stable yes (but deprecated)
|
||||
@parse url, title, img_src
|
||||
"""
|
||||
|
||||
from urllib import urlencode, unquote
|
||||
from json import loads
|
||||
|
|
|
@ -1,13 +1,15 @@
|
|||
## Google (News)
|
||||
#
|
||||
# @website https://www.google.com
|
||||
# @provide-api yes (https://developers.google.com/web-search/docs/),
|
||||
# deprecated!
|
||||
#
|
||||
# @using-api yes
|
||||
# @results JSON
|
||||
# @stable yes (but deprecated)
|
||||
# @parse url, title, content, publishedDate
|
||||
"""
|
||||
Google (News)
|
||||
|
||||
@website https://www.google.com
|
||||
@provide-api yes (https://developers.google.com/web-search/docs/),
|
||||
deprecated!
|
||||
|
||||
@using-api yes
|
||||
@results JSON
|
||||
@stable yes (but deprecated)
|
||||
@parse url, title, content, publishedDate
|
||||
"""
|
||||
|
||||
from urllib import urlencode
|
||||
from json import loads
|
||||
|
|
|
@ -6,7 +6,7 @@ search_url = None
|
|||
url_query = None
|
||||
content_query = None
|
||||
title_query = None
|
||||
#suggestion_xpath = ''
|
||||
# suggestion_xpath = ''
|
||||
|
||||
|
||||
def iterate(iterable):
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
## Kickass Torrent (Videos, Music, Files)
|
||||
#
|
||||
# @website https://kickass.so
|
||||
# @provide-api no (nothing found)
|
||||
#
|
||||
# @using-api no
|
||||
# @results HTML (using search portal)
|
||||
# @stable yes (HTML can change)
|
||||
# @parse url, title, content, seed, leech, magnetlink
|
||||
"""
|
||||
Kickass Torrent (Videos, Music, Files)
|
||||
|
||||
@website https://kickass.so
|
||||
@provide-api no (nothing found)
|
||||
|
||||
@using-api no
|
||||
@results HTML (using search portal)
|
||||
@stable yes (HTML can change)
|
||||
@parse url, title, content, seed, leech, magnetlink
|
||||
"""
|
||||
|
||||
from urlparse import urljoin
|
||||
from cgi import escape
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
## general mediawiki-engine (Web)
|
||||
#
|
||||
# @website websites built on mediawiki (https://www.mediawiki.org)
|
||||
# @provide-api yes (http://www.mediawiki.org/wiki/API:Search)
|
||||
#
|
||||
# @using-api yes
|
||||
# @results JSON
|
||||
# @stable yes
|
||||
# @parse url, title
|
||||
#
|
||||
# @todo content
|
||||
"""
|
||||
general mediawiki-engine (Web)
|
||||
|
||||
@website websites built on mediawiki (https://www.mediawiki.org)
|
||||
@provide-api yes (http://www.mediawiki.org/wiki/API:Search)
|
||||
|
||||
@using-api yes
|
||||
@results JSON
|
||||
@stable yes
|
||||
@parse url, title
|
||||
|
||||
@todo content
|
||||
"""
|
||||
|
||||
from json import loads
|
||||
from string import Formatter
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
## Mixcloud (Music)
|
||||
#
|
||||
# @website https://http://www.mixcloud.com/
|
||||
# @provide-api yes (http://www.mixcloud.com/developers/
|
||||
#
|
||||
# @using-api yes
|
||||
# @results JSON
|
||||
# @stable yes
|
||||
# @parse url, title, content, embedded, publishedDate
|
||||
"""
|
||||
Mixcloud (Music)
|
||||
|
||||
@website https://http://www.mixcloud.com/
|
||||
@provide-api yes (http://www.mixcloud.com/developers/
|
||||
|
||||
@using-api yes
|
||||
@results JSON
|
||||
@stable yes
|
||||
@parse url, title, content, embedded, publishedDate
|
||||
"""
|
||||
|
||||
from json import loads
|
||||
from urllib import urlencode
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
## OpenStreetMap (Map)
|
||||
#
|
||||
# @website https://openstreetmap.org/
|
||||
# @provide-api yes (http://wiki.openstreetmap.org/wiki/Nominatim)
|
||||
#
|
||||
# @using-api yes
|
||||
# @results JSON
|
||||
# @stable yes
|
||||
# @parse url, title
|
||||
"""
|
||||
OpenStreetMap (Map)
|
||||
|
||||
@website https://openstreetmap.org/
|
||||
@provide-api yes (http://wiki.openstreetmap.org/wiki/Nominatim)
|
||||
|
||||
@using-api yes
|
||||
@results JSON
|
||||
@stable yes
|
||||
@parse url, title
|
||||
"""
|
||||
|
||||
from json import loads
|
||||
from searx.utils import searx_useragent
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
## Photon (Map)
|
||||
#
|
||||
# @website https://photon.komoot.de
|
||||
# @provide-api yes (https://photon.komoot.de/)
|
||||
#
|
||||
# @using-api yes
|
||||
# @results JSON
|
||||
# @stable yes
|
||||
# @parse url, title
|
||||
"""
|
||||
Photon (Map)
|
||||
|
||||
@website https://photon.komoot.de
|
||||
@provide-api yes (https://photon.komoot.de/)
|
||||
|
||||
@using-api yes
|
||||
@results JSON
|
||||
@stable yes
|
||||
@parse url, title
|
||||
"""
|
||||
|
||||
from urllib import urlencode
|
||||
from json import loads
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
## Searchcode (It)
|
||||
#
|
||||
# @website https://searchcode.com/
|
||||
# @provide-api yes (https://searchcode.com/api/)
|
||||
#
|
||||
# @using-api yes
|
||||
# @results JSON
|
||||
# @stable yes
|
||||
# @parse url, title, content
|
||||
"""
|
||||
Searchcode (It)
|
||||
|
||||
@website https://searchcode.com/
|
||||
@provide-api yes (https://searchcode.com/api/)
|
||||
|
||||
@using-api yes
|
||||
@results JSON
|
||||
@stable yes
|
||||
@parse url, title, content
|
||||
"""
|
||||
|
||||
from urllib import urlencode
|
||||
from json import loads
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
## Searchcode (It)
|
||||
#
|
||||
# @website https://searchcode.com/
|
||||
# @provide-api yes (https://searchcode.com/api/)
|
||||
#
|
||||
# @using-api yes
|
||||
# @results JSON
|
||||
# @stable yes
|
||||
# @parse url, title, content
|
||||
"""
|
||||
Searchcode (It)
|
||||
|
||||
@website https://searchcode.com/
|
||||
@provide-api yes (https://searchcode.com/api/)
|
||||
|
||||
@using-api yes
|
||||
@results JSON
|
||||
@stable yes
|
||||
@parse url, title, content
|
||||
"""
|
||||
|
||||
from urllib import urlencode
|
||||
from json import loads
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
## Soundcloud (Music)
|
||||
#
|
||||
# @website https://soundcloud.com
|
||||
# @provide-api yes (https://developers.soundcloud.com/)
|
||||
#
|
||||
# @using-api yes
|
||||
# @results JSON
|
||||
# @stable yes
|
||||
# @parse url, title, content, publishedDate, embedded
|
||||
"""
|
||||
Soundcloud (Music)
|
||||
|
||||
@website https://soundcloud.com
|
||||
@provide-api yes (https://developers.soundcloud.com/)
|
||||
|
||||
@using-api yes
|
||||
@results JSON
|
||||
@stable yes
|
||||
@parse url, title, content, publishedDate, embedded
|
||||
"""
|
||||
|
||||
from json import loads
|
||||
from urllib import urlencode, quote_plus
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
## Spotify (Music)
|
||||
#
|
||||
# @website https://spotify.com
|
||||
# @provide-api yes (https://developer.spotify.com/web-api/search-item/)
|
||||
#
|
||||
# @using-api yes
|
||||
# @results JSON
|
||||
# @stable yes
|
||||
# @parse url, title, content, embedded
|
||||
"""
|
||||
Spotify (Music)
|
||||
|
||||
@website https://spotify.com
|
||||
@provide-api yes (https://developer.spotify.com/web-api/search-item/)
|
||||
|
||||
@using-api yes
|
||||
@results JSON
|
||||
@stable yes
|
||||
@parse url, title, content, embedded
|
||||
"""
|
||||
|
||||
from json import loads
|
||||
from urllib import urlencode
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
## Stackoverflow (It)
|
||||
#
|
||||
# @website https://stackoverflow.com/
|
||||
# @provide-api not clear (https://api.stackexchange.com/docs/advanced-search)
|
||||
#
|
||||
# @using-api no
|
||||
# @results HTML
|
||||
# @stable no (HTML can change)
|
||||
# @parse url, title, content
|
||||
"""
|
||||
Stackoverflow (It)
|
||||
|
||||
@website https://stackoverflow.com/
|
||||
@provide-api not clear (https://api.stackexchange.com/docs/advanced-search)
|
||||
|
||||
@using-api no
|
||||
@results HTML
|
||||
@stable no (HTML can change)
|
||||
@parse url, title, content
|
||||
"""
|
||||
|
||||
from urlparse import urljoin
|
||||
from cgi import escape
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
## Subtitleseeker (Video)
|
||||
#
|
||||
# @website http://www.subtitleseeker.com
|
||||
# @provide-api no
|
||||
#
|
||||
# @using-api no
|
||||
# @results HTML
|
||||
# @stable no (HTML can change)
|
||||
# @parse url, title, content
|
||||
"""
|
||||
Subtitleseeker (Video)
|
||||
|
||||
@website http://www.subtitleseeker.com
|
||||
@provide-api no
|
||||
|
||||
@using-api no
|
||||
@results HTML
|
||||
@stable no (HTML can change)
|
||||
@parse url, title, content
|
||||
"""
|
||||
|
||||
from cgi import escape
|
||||
from urllib import quote_plus
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
## Twitter (Social media)
|
||||
#
|
||||
# @website https://twitter.com/
|
||||
# @provide-api yes (https://dev.twitter.com/docs/using-search)
|
||||
#
|
||||
# @using-api no
|
||||
# @results HTML (using search portal)
|
||||
# @stable no (HTML can change)
|
||||
# @parse url, title, content
|
||||
#
|
||||
# @todo publishedDate
|
||||
"""
|
||||
Twitter (Social media)
|
||||
|
||||
@website https://twitter.com/
|
||||
@provide-api yes (https://dev.twitter.com/docs/using-search)
|
||||
|
||||
@using-api no
|
||||
@results HTML (using search portal)
|
||||
@stable no (HTML can change)
|
||||
@parse url, title, content
|
||||
|
||||
@todo publishedDate
|
||||
"""
|
||||
|
||||
from urlparse import urljoin
|
||||
from urllib import urlencode
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
## 1x (Images)
|
||||
#
|
||||
# @website http://1x.com/
|
||||
# @provide-api no
|
||||
#
|
||||
# @using-api no
|
||||
# @results HTML
|
||||
# @stable no (HTML can change)
|
||||
# @parse url, title, thumbnail, img_src, content
|
||||
"""
|
||||
1x (Images)
|
||||
|
||||
@website http://1x.com/
|
||||
@provide-api no
|
||||
|
||||
@using-api no
|
||||
@results HTML
|
||||
@stable no (HTML can change)
|
||||
@parse url, title, thumbnail, img_src, content
|
||||
"""
|
||||
|
||||
from urllib import urlencode
|
||||
from urlparse import urljoin
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
## 500px (Images)
|
||||
#
|
||||
# @website https://500px.com
|
||||
# @provide-api yes (https://developers.500px.com/)
|
||||
#
|
||||
# @using-api no
|
||||
# @results HTML
|
||||
# @stable no (HTML can change)
|
||||
# @parse url, title, thumbnail, img_src, content
|
||||
#
|
||||
# @todo rewrite to api
|
||||
"""
|
||||
500px (Images)
|
||||
|
||||
@website https://500px.com
|
||||
@provide-api yes (https://developers.500px.com/)
|
||||
|
||||
@using-api no
|
||||
@results HTML
|
||||
@stable no (HTML can change)
|
||||
@parse url, title, thumbnail, img_src, content
|
||||
|
||||
@todo rewrite to api
|
||||
"""
|
||||
|
||||
|
||||
from urllib import urlencode
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
## Yacy (Web, Images, Videos, Music, Files)
|
||||
# Yacy (Web, Images, Videos, Music, Files)
|
||||
#
|
||||
# @website http://yacy.net
|
||||
# @provide-api yes
|
||||
|
|
|
@ -1,13 +1,15 @@
|
|||
## Yahoo (Web)
|
||||
#
|
||||
# @website https://search.yahoo.com/web
|
||||
# @provide-api yes (https://developer.yahoo.com/boss/search/),
|
||||
# $0.80/1000 queries
|
||||
#
|
||||
# @using-api no (because pricing)
|
||||
# @results HTML (using search portal)
|
||||
# @stable no (HTML can change)
|
||||
# @parse url, title, content, suggestion
|
||||
"""
|
||||
Yahoo (Web)
|
||||
|
||||
@website https://search.yahoo.com/web
|
||||
@provide-api yes (https://developer.yahoo.com/boss/search/),
|
||||
$0.80/1000 queries
|
||||
|
||||
@using-api no (because pricing)
|
||||
@results HTML (using search portal)
|
||||
@stable no (HTML can change)
|
||||
@parse url, title, content, suggestion
|
||||
"""
|
||||
|
||||
from urllib import urlencode
|
||||
from urlparse import unquote
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
## Youtube (Videos)
|
||||
# Youtube (Videos)
|
||||
#
|
||||
# @website https://www.youtube.com/
|
||||
# @provide-api yes (http://gdata-samples-youtube-search-py.appspot.com/)
|
||||
|
@ -47,7 +47,7 @@ def response(resp):
|
|||
search_results = loads(resp.text)
|
||||
|
||||
# return empty array if there are no results
|
||||
if not 'feed' in search_results:
|
||||
if 'feed' not in search_results:
|
||||
return []
|
||||
|
||||
feed = search_results['feed']
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue