commit: 629a05e149eaaab05a724dd3915ed363c364c796
parent bb628469d31d9ce61b2188aae3f570441eec8803
Author: Thomas Pointhuber <thomas.pointhuber@gmx.at>
Date: Tue, 2 Sep 2014 21:19:20 +0200
fix youtube engine and add comments
* add language-support
* decrease search-results/site to 5
* add comments
Diffstat:
2 files changed, 36 insertions(+), 10 deletions(-)
diff --git a/searx/engines/youtube.py b/searx/engines/youtube.py
@@ -1,42 +1,69 @@
+## Youtube (Videos)
+#
+# @website https://www.youtube.com/
+# @provide-api yes (http://gdata-samples-youtube-search-py.appspot.com/)
+#
+# @using-api yes
+# @results JSON
+# @stable yes
+# @parse url, title, content, publishedDate, thumbnail
+
from json import loads
from urllib import urlencode
from dateutil import parser
+# engine dependent config
categories = ['videos']
-
-search_url = ('https://gdata.youtube.com/feeds/api/videos'
- '?alt=json&{query}&start-index={index}&max-results=25') # noqa
-
paging = True
+language_support = True
+
+# search-url
+base_url = 'https://gdata.youtube.com/feeds/api/videos'
+search_url = base_url + '?alt=json&{query}&start-index={index}&max-results=5' # noqa
+# do search-request
def request(query, params):
- index = (params['pageno'] - 1) * 25 + 1
+ index = (params['pageno'] - 1) * 5 + 1
+
params['url'] = search_url.format(query=urlencode({'q': query}),
index=index)
+
+ # add language tag if specified
+ if params['language'] != 'all':
+ params['url'] += '&lr=' + params['language'].split('_')[0]
+
return params
+# get response from search-request
def response(resp):
results = []
+
search_results = loads(resp.text)
+
+ # return empty array if there are no results
if not 'feed' in search_results:
- return results
+ return []
+
feed = search_results['feed']
+ # parse results
for result in feed['entry']:
url = [x['href'] for x in result['link'] if x['type'] == 'text/html']
+
if not url:
return
+
# remove tracking
url = url[0].replace('feature=youtube_gdata', '')
if url.endswith('&'):
url = url[:-1]
+
title = result['title']['$t']
content = ''
thumbnail = ''
-#"2013-12-31T15:22:51.000Z"
pubdate = result['published']['$t']
publishedDate = parser.parse(pubdate)
@@ -49,6 +76,7 @@ def response(resp):
else:
content = result['content']['$t']
+ # append result
results.append({'url': url,
'title': title,
'content': content,
@@ -56,4 +84,5 @@ def response(resp):
'publishedDate': publishedDate,
'thumbnail': thumbnail})
+ # return results
return results
diff --git a/searx/settings.yml b/searx/settings.yml
@@ -131,13 +131,10 @@ engines:
- name : youtube
engine : youtube
- categories : videos
shortcut : yt
- name : dailymotion
engine : dailymotion
- locale : en_US
- categories : videos
shortcut : dm
- name : vimeo