commit: 93fd1e4c76b2eb3d219a8b146ae9e36b103ae5cf
parent: 81850dd603631b7e4b0d3b22d85e9eb3ae5b1e58
Author: Adam Tauber <asciimoo@gmail.com>
Date: Sat, 2 May 2015 14:58:32 -0400
Merge pull request #308 from dalf/versions_upgrade
update versions.cfg to use the current up-to-date packages
Diffstat:
40 files changed, 481 insertions(+), 393 deletions(-)
diff --git a/searx/autocomplete.py b/searx/autocomplete.py
@@ -28,7 +28,7 @@ from searx.poolrequests import get as http_get
def get(*args, **kwargs):
- if not 'timeout' in kwargs:
+ if 'timeout' not in kwargs:
kwargs['timeout'] = settings['server']['request_timeout']
return http_get(*args, **kwargs)
diff --git a/searx/engines/__init__.py b/searx/engines/__init__.py
@@ -86,7 +86,7 @@ def load_engine(engine_data):
continue
if getattr(engine, engine_attr) is None:
logger.error('Missing engine config attribute: "{0}.{1}"'
- .format(engine.name, engine_attr))
+ .format(engine.name, engine_attr))
sys.exit(1)
engine.stats = {
@@ -106,7 +106,7 @@ def load_engine(engine_data):
if engine.shortcut:
if engine.shortcut in engine_shortcuts:
logger.error('Engine config error: ambigious shortcut: {0}'
- .format(engine.shortcut))
+ .format(engine.shortcut))
sys.exit(1)
engine_shortcuts[engine.shortcut] = engine.name
return engine
diff --git a/searx/engines/bing.py b/searx/engines/bing.py
@@ -1,15 +1,17 @@
-## Bing (Web)
-#
-# @website https://www.bing.com
-# @provide-api yes (http://datamarket.azure.com/dataset/bing/search),
-# max. 5000 query/month
-#
-# @using-api no (because of query limit)
-# @results HTML (using search portal)
-# @stable no (HTML can change)
-# @parse url, title, content
-#
-# @todo publishedDate
+"""
+ Bing (Web)
+
+ @website https://www.bing.com
+ @provide-api yes (http://datamarket.azure.com/dataset/bing/search),
+ max. 5000 query/month
+
+ @using-api no (because of query limit)
+ @results HTML (using search portal)
+ @stable no (HTML can change)
+ @parse url, title, content
+
+ @todo publishedDate
+"""
from urllib import urlencode
from cgi import escape
diff --git a/searx/engines/bing_images.py b/searx/engines/bing_images.py
@@ -1,17 +1,19 @@
-## Bing (Images)
-#
-# @website https://www.bing.com/images
-# @provide-api yes (http://datamarket.azure.com/dataset/bing/search),
-# max. 5000 query/month
-#
-# @using-api no (because of query limit)
-# @results HTML (using search portal)
-# @stable no (HTML can change)
-# @parse url, title, img_src
-#
-# @todo currently there are up to 35 images receive per page,
-# because bing does not parse count=10.
-# limited response to 10 images
+"""
+ Bing (Images)
+
+ @website https://www.bing.com/images
+ @provide-api yes (http://datamarket.azure.com/dataset/bing/search),
+ max. 5000 query/month
+
+ @using-api no (because of query limit)
+ @results HTML (using search portal)
+ @stable no (HTML can change)
+ @parse url, title, img_src
+
+ @todo currently there are up to 35 images receive per page,
+ because bing does not parse count=10.
+ limited response to 10 images
+"""
from urllib import urlencode
from lxml import html
@@ -76,7 +78,7 @@ def response(resp):
title = link.attrib.get('t1')
ihk = link.attrib.get('ihk')
- #url = 'http://' + link.attrib.get('t3')
+ # url = 'http://' + link.attrib.get('t3')
url = yaml_data.get('surl')
img_src = yaml_data.get('imgurl')
diff --git a/searx/engines/bing_news.py b/searx/engines/bing_news.py
@@ -1,13 +1,15 @@
-## Bing (News)
-#
-# @website https://www.bing.com/news
-# @provide-api yes (http://datamarket.azure.com/dataset/bing/search),
-# max. 5000 query/month
-#
-# @using-api no (because of query limit)
-# @results HTML (using search portal)
-# @stable no (HTML can change)
-# @parse url, title, content, publishedDate
+"""
+ Bing (News)
+
+ @website https://www.bing.com/news
+ @provide-api yes (http://datamarket.azure.com/dataset/bing/search),
+ max. 5000 query/month
+
+ @using-api no (because of query limit)
+ @results HTML (using search portal)
+ @stable no (HTML can change)
+ @parse url, title, content, publishedDate
+"""
from urllib import urlencode
from cgi import escape
@@ -87,6 +89,8 @@ def response(resp):
publishedDate = parser.parse(publishedDate, dayfirst=False)
except TypeError:
publishedDate = datetime.now()
+ except ValueError:
+ publishedDate = datetime.now()
# append result
results.append({'url': url,
diff --git a/searx/engines/blekko_images.py b/searx/engines/blekko_images.py
@@ -1,12 +1,14 @@
-## Blekko (Images)
-#
-# @website https://blekko.com
-# @provide-api yes (inofficial)
-#
-# @using-api yes
-# @results JSON
-# @stable yes
-# @parse url, title, img_src
+"""
+ Blekko (Images)
+
+ @website https://blekko.com
+ @provide-api yes (inofficial)
+
+ @using-api yes
+ @results JSON
+ @stable yes
+ @parse url, title, img_src
+"""
from json import loads
from urllib import urlencode
diff --git a/searx/engines/btdigg.py b/searx/engines/btdigg.py
@@ -1,12 +1,14 @@
-## BTDigg (Videos, Music, Files)
-#
-# @website https://btdigg.org
-# @provide-api yes (on demand)
-#
-# @using-api no
-# @results HTML (using search portal)
-# @stable no (HTML can change)
-# @parse url, title, content, seed, leech, magnetlink
+"""
+ BTDigg (Videos, Music, Files)
+
+ @website https://btdigg.org
+ @provide-api yes (on demand)
+
+ @using-api no
+ @results HTML (using search portal)
+ @stable no (HTML can change)
+ @parse url, title, content, seed, leech, magnetlink
+"""
from urlparse import urljoin
from cgi import escape
diff --git a/searx/engines/dailymotion.py b/searx/engines/dailymotion.py
@@ -1,14 +1,16 @@
-## Dailymotion (Videos)
-#
-# @website https://www.dailymotion.com
-# @provide-api yes (http://www.dailymotion.com/developer)
-#
-# @using-api yes
-# @results JSON
-# @stable yes
-# @parse url, title, thumbnail, publishedDate, embedded
-#
-# @todo set content-parameter with correct data
+"""
+ Dailymotion (Videos)
+
+ @website https://www.dailymotion.com
+ @provide-api yes (http://www.dailymotion.com/developer)
+
+ @using-api yes
+ @results JSON
+ @stable yes
+ @parse url, title, thumbnail, publishedDate, embedded
+
+ @todo set content-parameter with correct data
+"""
from urllib import urlencode
from json import loads
@@ -48,7 +50,7 @@ def response(resp):
search_res = loads(resp.text)
# return empty array if there are no results
- if not 'list' in search_res:
+ if 'list' not in search_res:
return []
# parse results
diff --git a/searx/engines/deezer.py b/searx/engines/deezer.py
@@ -1,12 +1,14 @@
-## Deezer (Music)
-#
-# @website https://deezer.com
-# @provide-api yes (http://developers.deezer.com/api/)
-#
-# @using-api yes
-# @results JSON
-# @stable yes
-# @parse url, title, content, embedded
+"""
+ Deezer (Music)
+
+ @website https://deezer.com
+ @provide-api yes (http://developers.deezer.com/api/)
+
+ @using-api yes
+ @results JSON
+ @stable yes
+ @parse url, title, content, embedded
+"""
from json import loads
from urllib import urlencode
diff --git a/searx/engines/deviantart.py b/searx/engines/deviantart.py
@@ -1,14 +1,16 @@
-## Deviantart (Images)
-#
-# @website https://www.deviantart.com/
-# @provide-api yes (https://www.deviantart.com/developers/) (RSS)
-#
-# @using-api no (TODO, rewrite to api)
-# @results HTML
-# @stable no (HTML can change)
-# @parse url, title, thumbnail_src, img_src
-#
-# @todo rewrite to api
+"""
+ Deviantart (Images)
+
+ @website https://www.deviantart.com/
+ @provide-api yes (https://www.deviantart.com/developers/) (RSS)
+
+ @using-api no (TODO, rewrite to api)
+ @results HTML
+ @stable no (HTML can change)
+ @parse url, title, thumbnail_src, img_src
+
+ @todo rewrite to api
+"""
from urllib import urlencode
from urlparse import urljoin
diff --git a/searx/engines/digg.py b/searx/engines/digg.py
@@ -1,12 +1,14 @@
-## Digg (News, Social media)
-#
-# @website https://digg.com/
-# @provide-api no
-#
-# @using-api no
-# @results HTML (using search portal)
-# @stable no (HTML can change)
-# @parse url, title, content, publishedDate, thumbnail
+"""
+ Digg (News, Social media)
+
+ @website https://digg.com/
+ @provide-api no
+
+ @using-api no
+ @results HTML (using search portal)
+ @stable no (HTML can change)
+ @parse url, title, content, publishedDate, thumbnail
+"""
from urllib import quote_plus
from json import loads
diff --git a/searx/engines/duckduckgo.py b/searx/engines/duckduckgo.py
@@ -1,17 +1,19 @@
-## DuckDuckGo (Web)
-#
-# @website https://duckduckgo.com/
-# @provide-api yes (https://duckduckgo.com/api),
-# but not all results from search-site
-#
-# @using-api no
-# @results HTML (using search portal)
-# @stable no (HTML can change)
-# @parse url, title, content
-#
-# @todo rewrite to api
-# @todo language support
-# (the current used site does not support language-change)
+"""
+ DuckDuckGo (Web)
+
+ @website https://duckduckgo.com/
+ @provide-api yes (https://duckduckgo.com/api),
+ but not all results from search-site
+
+ @using-api no
+ @results HTML (using search portal)
+ @stable no (HTML can change)
+ @parse url, title, content
+
+ @todo rewrite to api
+ @todo language support
+ (the current used site does not support language-change)
+"""
from urllib import urlencode
from lxml.html import fromstring
diff --git a/searx/engines/dummy.py b/searx/engines/dummy.py
@@ -1,7 +1,9 @@
-## Dummy
-#
-# @results empty array
-# @stable yes
+"""
+ Dummy
+
+ @results empty array
+ @stable yes
+"""
# do search-request
diff --git a/searx/engines/faroo.py b/searx/engines/faroo.py
@@ -1,12 +1,14 @@
-## Faroo (Web, News)
-#
-# @website http://www.faroo.com
-# @provide-api yes (http://www.faroo.com/hp/api/api.html), require API-key
-#
-# @using-api yes
-# @results JSON
-# @stable yes
-# @parse url, title, content, publishedDate, img_src
+"""
+ Faroo (Web, News)
+
+ @website http://www.faroo.com
+ @provide-api yes (http://www.faroo.com/hp/api/api.html), require API-key
+
+ @using-api yes
+ @results JSON
+ @stable yes
+ @parse url, title, content, publishedDate, img_src
+"""
from urllib import urlencode
from json import loads
diff --git a/searx/engines/flickr.py b/searx/engines/flickr.py
@@ -1,15 +1,17 @@
#!/usr/bin/env python
-## Flickr (Images)
-#
-# @website https://www.flickr.com
-# @provide-api yes (https://secure.flickr.com/services/api/flickr.photos.search.html)
-#
-# @using-api yes
-# @results JSON
-# @stable yes
-# @parse url, title, thumbnail, img_src
-#More info on api-key : https://www.flickr.com/services/apps/create/
+"""
+ Flickr (Images)
+
+ @website https://www.flickr.com
+ @provide-api yes (https://secure.flickr.com/services/api/flickr.photos.search.html)
+
+ @using-api yes
+ @results JSON
+ @stable yes
+ @parse url, title, thumbnail, img_src
+ More info on api-key : https://www.flickr.com/services/apps/create/
+"""
from urllib import urlencode
from json import loads
@@ -48,10 +50,10 @@ def response(resp):
search_results = loads(resp.text)
# return empty array if there are no results
- if not 'photos' in search_results:
+ if 'photos' not in search_results:
return []
- if not 'photo' in search_results['photos']:
+ if 'photo' not in search_results['photos']:
return []
photos = search_results['photos']['photo']
diff --git a/searx/engines/flickr_noapi.py b/searx/engines/flickr_noapi.py
@@ -1,14 +1,16 @@
#!/usr/bin/env python
-# Flickr (Images)
-#
-# @website https://www.flickr.com
-# @provide-api yes (https://secure.flickr.com/services/api/flickr.photos.search.html)
-#
-# @using-api no
-# @results HTML
-# @stable no
-# @parse url, title, thumbnail, img_src
+"""
+ Flickr (Images)
+
+ @website https://www.flickr.com
+ @provide-api yes (https://secure.flickr.com/services/api/flickr.photos.search.html)
+
+ @using-api no
+ @results HTML
+ @stable no
+ @parse url, title, thumbnail, img_src
+"""
from urllib import urlencode
from json import loads
diff --git a/searx/engines/generalfile.py b/searx/engines/generalfile.py
@@ -1,14 +1,16 @@
-## General Files (Files)
-#
-# @website http://www.general-files.org
-# @provide-api no (nothing found)
-#
-# @using-api no (because nothing found)
-# @results HTML (using search portal)
-# @stable no (HTML can change)
-# @parse url, title, content
-#
-# @todo detect torrents?
+"""
+ General Files (Files)
+
+ @website http://www.general-files.org
+ @provide-api no (nothing found)
+
+ @using-api no (because nothing found)
+ @results HTML (using search portal)
+ @stable no (HTML can change)
+ @parse url, title, content
+
+ @todo detect torrents?
+"""
from lxml import html
diff --git a/searx/engines/gigablast.py b/searx/engines/gigablast.py
@@ -1,12 +1,14 @@
-## Gigablast (Web)
-#
-# @website http://gigablast.com
-# @provide-api yes (http://gigablast.com/api.html)
-#
-# @using-api yes
-# @results XML
-# @stable yes
-# @parse url, title, content
+"""
+ Gigablast (Web)
+
+ @website http://gigablast.com
+ @provide-api yes (http://gigablast.com/api.html)
+
+ @using-api yes
+ @results XML
+ @stable yes
+ @parse url, title, content
+"""
from urllib import urlencode
from cgi import escape
diff --git a/searx/engines/github.py b/searx/engines/github.py
@@ -1,12 +1,14 @@
-## Github (It)
-#
-# @website https://github.com/
-# @provide-api yes (https://developer.github.com/v3/)
-#
-# @using-api yes
-# @results JSON
-# @stable yes (using api)
-# @parse url, title, content
+"""
+ Github (It)
+
+ @website https://github.com/
+ @provide-api yes (https://developer.github.com/v3/)
+
+ @using-api yes
+ @results JSON
+ @stable yes (using api)
+ @parse url, title, content
+"""
from urllib import urlencode
from json import loads
@@ -37,7 +39,7 @@ def response(resp):
search_res = loads(resp.text)
# check if items are recieved
- if not 'items' in search_res:
+ if 'items' not in search_res:
return []
# parse results
diff --git a/searx/engines/google_images.py b/searx/engines/google_images.py
@@ -1,13 +1,15 @@
-## Google (Images)
-#
-# @website https://www.google.com
-# @provide-api yes (https://developers.google.com/web-search/docs/),
-# deprecated!
-#
-# @using-api yes
-# @results JSON
-# @stable yes (but deprecated)
-# @parse url, title, img_src
+"""
+ Google (Images)
+
+ @website https://www.google.com
+ @provide-api yes (https://developers.google.com/web-search/docs/),
+ deprecated!
+
+ @using-api yes
+ @results JSON
+ @stable yes (but deprecated)
+ @parse url, title, img_src
+"""
from urllib import urlencode, unquote
from json import loads
diff --git a/searx/engines/google_news.py b/searx/engines/google_news.py
@@ -1,13 +1,15 @@
-## Google (News)
-#
-# @website https://www.google.com
-# @provide-api yes (https://developers.google.com/web-search/docs/),
-# deprecated!
-#
-# @using-api yes
-# @results JSON
-# @stable yes (but deprecated)
-# @parse url, title, content, publishedDate
+"""
+ Google (News)
+
+ @website https://www.google.com
+ @provide-api yes (https://developers.google.com/web-search/docs/),
+ deprecated!
+
+ @using-api yes
+ @results JSON
+ @stable yes (but deprecated)
+ @parse url, title, content, publishedDate
+"""
from urllib import urlencode
from json import loads
diff --git a/searx/engines/json_engine.py b/searx/engines/json_engine.py
@@ -6,7 +6,7 @@ search_url = None
url_query = None
content_query = None
title_query = None
-#suggestion_xpath = ''
+# suggestion_xpath = ''
def iterate(iterable):
diff --git a/searx/engines/kickass.py b/searx/engines/kickass.py
@@ -1,12 +1,14 @@
-## Kickass Torrent (Videos, Music, Files)
-#
-# @website https://kickass.so
-# @provide-api no (nothing found)
-#
-# @using-api no
-# @results HTML (using search portal)
-# @stable yes (HTML can change)
-# @parse url, title, content, seed, leech, magnetlink
+"""
+ Kickass Torrent (Videos, Music, Files)
+
+ @website https://kickass.so
+ @provide-api no (nothing found)
+
+ @using-api no
+ @results HTML (using search portal)
+ @stable yes (HTML can change)
+ @parse url, title, content, seed, leech, magnetlink
+"""
from urlparse import urljoin
from cgi import escape
diff --git a/searx/engines/mediawiki.py b/searx/engines/mediawiki.py
@@ -1,14 +1,16 @@
-## general mediawiki-engine (Web)
-#
-# @website websites built on mediawiki (https://www.mediawiki.org)
-# @provide-api yes (http://www.mediawiki.org/wiki/API:Search)
-#
-# @using-api yes
-# @results JSON
-# @stable yes
-# @parse url, title
-#
-# @todo content
+"""
+ general mediawiki-engine (Web)
+
+ @website websites built on mediawiki (https://www.mediawiki.org)
+ @provide-api yes (http://www.mediawiki.org/wiki/API:Search)
+
+ @using-api yes
+ @results JSON
+ @stable yes
+ @parse url, title
+
+ @todo content
+"""
from json import loads
from string import Formatter
diff --git a/searx/engines/mixcloud.py b/searx/engines/mixcloud.py
@@ -1,12 +1,14 @@
-## Mixcloud (Music)
-#
-# @website https://http://www.mixcloud.com/
-# @provide-api yes (http://www.mixcloud.com/developers/
-#
-# @using-api yes
-# @results JSON
-# @stable yes
-# @parse url, title, content, embedded, publishedDate
+"""
+ Mixcloud (Music)
+
+ @website https://http://www.mixcloud.com/
+ @provide-api yes (http://www.mixcloud.com/developers/
+
+ @using-api yes
+ @results JSON
+ @stable yes
+ @parse url, title, content, embedded, publishedDate
+"""
from json import loads
from urllib import urlencode
diff --git a/searx/engines/openstreetmap.py b/searx/engines/openstreetmap.py
@@ -1,12 +1,14 @@
-## OpenStreetMap (Map)
-#
-# @website https://openstreetmap.org/
-# @provide-api yes (http://wiki.openstreetmap.org/wiki/Nominatim)
-#
-# @using-api yes
-# @results JSON
-# @stable yes
-# @parse url, title
+"""
+ OpenStreetMap (Map)
+
+ @website https://openstreetmap.org/
+ @provide-api yes (http://wiki.openstreetmap.org/wiki/Nominatim)
+
+ @using-api yes
+ @results JSON
+ @stable yes
+ @parse url, title
+"""
from json import loads
from searx.utils import searx_useragent
diff --git a/searx/engines/photon.py b/searx/engines/photon.py
@@ -1,12 +1,14 @@
-## Photon (Map)
-#
-# @website https://photon.komoot.de
-# @provide-api yes (https://photon.komoot.de/)
-#
-# @using-api yes
-# @results JSON
-# @stable yes
-# @parse url, title
+"""
+ Photon (Map)
+
+ @website https://photon.komoot.de
+ @provide-api yes (https://photon.komoot.de/)
+
+ @using-api yes
+ @results JSON
+ @stable yes
+ @parse url, title
+"""
from urllib import urlencode
from json import loads
diff --git a/searx/engines/searchcode_code.py b/searx/engines/searchcode_code.py
@@ -1,12 +1,14 @@
-## Searchcode (It)
-#
-# @website https://searchcode.com/
-# @provide-api yes (https://searchcode.com/api/)
-#
-# @using-api yes
-# @results JSON
-# @stable yes
-# @parse url, title, content
+"""
+ Searchcode (It)
+
+ @website https://searchcode.com/
+ @provide-api yes (https://searchcode.com/api/)
+
+ @using-api yes
+ @results JSON
+ @stable yes
+ @parse url, title, content
+"""
from urllib import urlencode
from json import loads
diff --git a/searx/engines/searchcode_doc.py b/searx/engines/searchcode_doc.py
@@ -1,12 +1,14 @@
-## Searchcode (It)
-#
-# @website https://searchcode.com/
-# @provide-api yes (https://searchcode.com/api/)
-#
-# @using-api yes
-# @results JSON
-# @stable yes
-# @parse url, title, content
+"""
+ Searchcode (It)
+
+ @website https://searchcode.com/
+ @provide-api yes (https://searchcode.com/api/)
+
+ @using-api yes
+ @results JSON
+ @stable yes
+ @parse url, title, content
+"""
from urllib import urlencode
from json import loads
diff --git a/searx/engines/soundcloud.py b/searx/engines/soundcloud.py
@@ -1,12 +1,14 @@
-## Soundcloud (Music)
-#
-# @website https://soundcloud.com
-# @provide-api yes (https://developers.soundcloud.com/)
-#
-# @using-api yes
-# @results JSON
-# @stable yes
-# @parse url, title, content, publishedDate, embedded
+"""
+ Soundcloud (Music)
+
+ @website https://soundcloud.com
+ @provide-api yes (https://developers.soundcloud.com/)
+
+ @using-api yes
+ @results JSON
+ @stable yes
+ @parse url, title, content, publishedDate, embedded
+"""
from json import loads
from urllib import urlencode, quote_plus
diff --git a/searx/engines/spotify.py b/searx/engines/spotify.py
@@ -1,12 +1,14 @@
-## Spotify (Music)
-#
-# @website https://spotify.com
-# @provide-api yes (https://developer.spotify.com/web-api/search-item/)
-#
-# @using-api yes
-# @results JSON
-# @stable yes
-# @parse url, title, content, embedded
+"""
+ Spotify (Music)
+
+ @website https://spotify.com
+ @provide-api yes (https://developer.spotify.com/web-api/search-item/)
+
+ @using-api yes
+ @results JSON
+ @stable yes
+ @parse url, title, content, embedded
+"""
from json import loads
from urllib import urlencode
diff --git a/searx/engines/stackoverflow.py b/searx/engines/stackoverflow.py
@@ -1,12 +1,14 @@
-## Stackoverflow (It)
-#
-# @website https://stackoverflow.com/
-# @provide-api not clear (https://api.stackexchange.com/docs/advanced-search)
-#
-# @using-api no
-# @results HTML
-# @stable no (HTML can change)
-# @parse url, title, content
+"""
+ Stackoverflow (It)
+
+ @website https://stackoverflow.com/
+ @provide-api not clear (https://api.stackexchange.com/docs/advanced-search)
+
+ @using-api no
+ @results HTML
+ @stable no (HTML can change)
+ @parse url, title, content
+"""
from urlparse import urljoin
from cgi import escape
diff --git a/searx/engines/subtitleseeker.py b/searx/engines/subtitleseeker.py
@@ -1,12 +1,14 @@
-## Subtitleseeker (Video)
-#
-# @website http://www.subtitleseeker.com
-# @provide-api no
-#
-# @using-api no
-# @results HTML
-# @stable no (HTML can change)
-# @parse url, title, content
+"""
+ Subtitleseeker (Video)
+
+ @website http://www.subtitleseeker.com
+ @provide-api no
+
+ @using-api no
+ @results HTML
+ @stable no (HTML can change)
+ @parse url, title, content
+"""
from cgi import escape
from urllib import quote_plus
diff --git a/searx/engines/twitter.py b/searx/engines/twitter.py
@@ -1,14 +1,16 @@
-## Twitter (Social media)
-#
-# @website https://twitter.com/
-# @provide-api yes (https://dev.twitter.com/docs/using-search)
-#
-# @using-api no
-# @results HTML (using search portal)
-# @stable no (HTML can change)
-# @parse url, title, content
-#
-# @todo publishedDate
+"""
+ Twitter (Social media)
+
+ @website https://twitter.com/
+ @provide-api yes (https://dev.twitter.com/docs/using-search)
+
+ @using-api no
+ @results HTML (using search portal)
+ @stable no (HTML can change)
+ @parse url, title, content
+
+ @todo publishedDate
+"""
from urlparse import urljoin
from urllib import urlencode
diff --git a/searx/engines/www1x.py b/searx/engines/www1x.py
@@ -1,13 +1,14 @@
-## 1x (Images)
-#
-# @website http://1x.com/
-# @provide-api no
-#
-# @using-api no
-# @results HTML
-# @stable no (HTML can change)
-# @parse url, title, thumbnail, img_src, content
+"""
+ 1x (Images)
+ @website http://1x.com/
+ @provide-api no
+
+ @using-api no
+ @results HTML
+ @stable no (HTML can change)
+ @parse url, title, thumbnail, img_src, content
+"""
from urllib import urlencode
from urlparse import urljoin
diff --git a/searx/engines/www500px.py b/searx/engines/www500px.py
@@ -1,14 +1,16 @@
-## 500px (Images)
-#
-# @website https://500px.com
-# @provide-api yes (https://developers.500px.com/)
-#
-# @using-api no
-# @results HTML
-# @stable no (HTML can change)
-# @parse url, title, thumbnail, img_src, content
-#
-# @todo rewrite to api
+"""
+ 500px (Images)
+
+ @website https://500px.com
+ @provide-api yes (https://developers.500px.com/)
+
+ @using-api no
+ @results HTML
+ @stable no (HTML can change)
+ @parse url, title, thumbnail, img_src, content
+
+ @todo rewrite to api
+"""
from urllib import urlencode
diff --git a/searx/engines/yacy.py b/searx/engines/yacy.py
@@ -1,4 +1,4 @@
-## Yacy (Web, Images, Videos, Music, Files)
+# Yacy (Web, Images, Videos, Music, Files)
#
# @website http://yacy.net
# @provide-api yes
diff --git a/searx/engines/yahoo.py b/searx/engines/yahoo.py
@@ -1,13 +1,15 @@
-## Yahoo (Web)
-#
-# @website https://search.yahoo.com/web
-# @provide-api yes (https://developer.yahoo.com/boss/search/),
-# $0.80/1000 queries
-#
-# @using-api no (because pricing)
-# @results HTML (using search portal)
-# @stable no (HTML can change)
-# @parse url, title, content, suggestion
+"""
+ Yahoo (Web)
+
+ @website https://search.yahoo.com/web
+ @provide-api yes (https://developer.yahoo.com/boss/search/),
+ $0.80/1000 queries
+
+ @using-api no (because pricing)
+ @results HTML (using search portal)
+ @stable no (HTML can change)
+ @parse url, title, content, suggestion
+"""
from urllib import urlencode
from urlparse import unquote
diff --git a/searx/engines/youtube.py b/searx/engines/youtube.py
@@ -1,4 +1,4 @@
-## Youtube (Videos)
+# Youtube (Videos)
#
# @website https://www.youtube.com/
# @provide-api yes (http://gdata-samples-youtube-search-py.appspot.com/)
@@ -47,7 +47,7 @@ def response(resp):
search_results = loads(resp.text)
# return empty array if there are no results
- if not 'feed' in search_results:
+ if 'feed' not in search_results:
return []
feed = search_results['feed']
diff --git a/versions.cfg b/versions.cfg
@@ -2,96 +2,115 @@
Babel = 1.3
Flask = 0.10.1
Flask-Babel = 0.9
-Jinja2 = 2.7.2
-MarkupSafe = 0.18
-Pygments = 2.0.1
-WebOb = 1.3.1
-WebTest = 2.0.11
-Werkzeug = 0.9.4
+Jinja2 = 2.7.3
+MarkupSafe = 0.23
+Pygments = 2.0.2
+WebOb = 1.4.1
+WebTest = 2.0.18
+Werkzeug = 0.10.4
buildout-versions = 1.7
collective.recipe.omelette = 0.16
coverage = 3.7.1
-decorator = 3.4.0
-docutils = 0.11
-flake8 = 2.1.0
-itsdangerous = 0.23
-mccabe = 0.2.1
+decorator = 3.4.2
+docutils = 0.12
+flake8 = 2.4.0
+itsdangerous = 0.24
+mccabe = 0.3
mock = 1.0.1
-pep8 = 1.4.6
-plone.testing = 4.0.8
-pyflakes = 0.7.3
-pytz = 2013b
-pyyaml = 3.10
-requests = 2.5.3
+pep8 = 1.5.7
+plone.testing = 4.0.13
+pyflakes = 0.8.1
+pytz = 2015.2
+pyyaml = 3.11
+requests = 2.6.2
robotframework-debuglibrary = 0.3
robotframework-httplibrary = 0.4.2
-robotframework-selenium2library = 1.5.0
-robotsuite = 1.4.2
-selenium = 2.39.0
+robotframework-selenium2library = 1.6.0
+robotsuite = 1.6.1
+selenium = 2.45.0
speaklater = 1.3
-unittest2 = 0.5.1
-waitress = 0.8.8
+unittest2 = 1.0.1
+waitress = 0.8.9
zc.recipe.testrunner = 2.0.0
pyopenssl = 0.15.1
ndg-httpsclient = 0.3.3
pyasn1 = 0.1.7
pyasn1-modules = 0.0.5
-certifi = 14.05.14
+certifi = 2015.04.28
+
+#
+cffi = 0.9.2
+cryptography = 0.8.2
# Required by:
-# WebTest==2.0.11
+# WebTest==2.0.18
beautifulsoup4 = 4.3.2
# Required by:
+# cryptography==0.8.2
+enum34 = 1.0.4
+
+# Required by:
# robotframework-httplibrary==0.4.2
-jsonpatch = 1.3
+jsonpatch = 1.9
# Required by:
# robotframework-httplibrary==0.4.2
-jsonpointer = 1.1
+jsonpointer = 1.7
+
+# Required by:
+# traceback2==1.4.0
+linecache2 = 1.0.0
+
+# Required by:
+# robotsuite==1.6.1
+# searx==0.7.0
+lxml = 3.4.4
# Required by:
-# robotsuite==1.4.2
-# searx==0.1
-lxml = 3.2.5
+# cffi==0.9.2
+pycparser = 2.12
+
+# Required by:
+# searx==0.7.0
+python-dateutil = 2.4.2
# Required by:
# robotframework-httplibrary==0.4.2
-robotframework = 2.8.3
+robotframework = 2.8.7
+
+# Required by:
+# searx==0.7.0
+# zope.exceptions==4.0.7
+# zope.interface==4.1.2
+# zope.testrunner==4.4.8
+setuptools = 15.2
# Required by:
-# plone.testing==4.0.8
-# robotsuite==1.4.2
-# searx==0.1
-# zope.exceptions==4.0.6
-# zope.interface==4.0.5
-# zope.testrunner==4.4.1
-setuptools = 2.1
+# robotsuite==1.6.1
+# zope.testrunner==4.4.8
+six = 1.9.0
# Required by:
-# zope.testrunner==4.4.1
-six = 1.6.1
+# unittest2==1.0.1
+traceback2 = 1.4.0
# Required by:
# collective.recipe.omelette==0.16
zc.recipe.egg = 2.0.1
# Required by:
-# zope.testrunner==4.4.1
-zope.exceptions = 4.0.6
+# zope.testrunner==4.4.8
+zope.exceptions = 4.0.7
# Required by:
-# zope.testrunner==4.4.1
-zope.interface = 4.0.5
+# zope.testrunner==4.4.8
+zope.interface = 4.1.2
# Required by:
-# plone.testing==4.0.8
-zope.testing = 4.1.2
+# plone.testing==4.0.13
+zope.testing = 4.1.3
# Required by:
# zc.recipe.testrunner==2.0.0
-zope.testrunner = 4.4.1
-
-# Required by:
-# searx==0.3.0
-python-dateutil = 2.2
+zope.testrunner = 4.4.8