logo

searx

My custom branche(s) on searx, a meta-search engine git clone https://hacktivis.me/git/searx.git
commit: 2d81079384706aab4c39efcc8a2c4267af4397a5
parent d923b2a420b77a3a29375516ed9929beeda0924a
Author: Thomas Pointhuber <thomas.pointhuber@gmx.at>
Date:   Mon,  1 Jun 2015 12:30:07 +0200

[enh] implement image support for swisscows engine

Diffstat:

Msearx/engines/swisscows.py38++++++++++++++++++++++++++++++--------
Msearx/tests/engines/test_swisscows.py30+++++++++++++++++++++++++-----
2 files changed, 55 insertions(+), 13 deletions(-)

diff --git a/searx/engines/swisscows.py b/searx/engines/swisscows.py @@ -1,5 +1,5 @@ """ - Swisscows (Web) + Swisscows (Web, Images) @website https://swisscows.ch @provide-api no @@ -15,7 +15,7 @@ from urllib import urlencode, unquote import re # engine dependent config -categories = ['general'] +categories = ['general', 'images'] paging = True language_support = True @@ -24,7 +24,7 @@ base_url = 'https://swisscows.ch/' search_string = '?{query}&page={page}' # regex -regex_json = re.compile('initialData: {"Request":(.|\n)*}\]},\s*environment') +regex_json = re.compile('initialData: {"Request":(.|\n)*},\s*environment') regex_json_remove_start = re.compile('^initialData:\s*') regex_json_remove_end = re.compile(',\s*environment$') regex_img_url_remove_start = re.compile('^https?://i\.swisscows\.ch/\?link=') @@ -45,6 +45,10 @@ def request(query, params): 'region': region}), page=params['pageno']) + # image search query is something like 'image?{query}&page={page}' + if params['category'] == 'images': + search_path = 'image' + search_path + params['url'] = base_url + search_path return params @@ -63,12 +67,30 @@ def response(resp): json_raw = regex_json_remove_end.sub('', regex_json_remove_start.sub('', json_regex.group())) json = loads(json_raw) - # parse normal results + # parse results for result in json['Results'].get('items', []): - # append result - results.append({'url': result['Url'].replace(u'\uE000', '').replace(u'\uE001', ''), - 'title': result['Title'].replace(u'\uE000', '').replace(u'\uE001', ''), - 'content': result['Description'].replace(u'\uE000', '').replace(u'\uE001', '')}) + result_title = result['Title'].replace(u'\uE000', '').replace(u'\uE001', '') + + # parse image results + if result.get('ContentType', '').startswith('image'): + img_url = unquote(regex_img_url_remove_start.sub('', result['Url'])) + + # append result + results.append({'url': result['SourceUrl'], + 'title': result['Title'], + 'content': '', + 'img_src': img_url, + 'template': 'images.html'}) + + # parse general results + else: + result_url = result['Url'].replace(u'\uE000', '').replace(u'\uE001', '') + result_content = result['Description'].replace(u'\uE000', '').replace(u'\uE001', '') + + # append result + results.append({'url': result_url, + 'title': result_title, + 'content': result_content}) # parse images for result in json.get('Images', []): diff --git a/searx/tests/engines/test_swisscows.py b/searx/tests/engines/test_swisscows.py @@ -51,7 +51,23 @@ class TestSwisscowsEngine(SearxTestCase): "Description":"\uE000This should\uE001 be the content.", "Url":"http://this.should.be.the.link/", "DisplayUrl":"www.\uE000this.should.be.the\uE001.link", - "Id":"782ef287-e439-451c-b380-6ebc14ba033d"} + "Id":"782ef287-e439-451c-b380-6ebc14ba033d"}, + {"Title":"Datei:This should1.svg", + "Url":"https://i.swisscows.ch/?link=http%3a%2f%2fts2.mm.This/should1.png", + "SourceUrl":"http://de.wikipedia.org/wiki/Datei:This should1.svg", + "DisplayUrl":"de.wikipedia.org/wiki/Datei:This should1.svg", + "Width":950, + "Height":534, + "FileSize":92100, + "ContentType":"image/jpeg", + "Thumbnail":{ + "Url":"https://i.swisscows.ch/?link=http%3a%2f%2fts2.mm.This/should1.png", + "ContentType":"image/jpeg", + "Width":300, + "Height":168, + "FileSize":9134}, + "Id":"6a97a542-8f65-425f-b7f6-1178c3aba7be" + } ],"TotalCount":55300, "Query":"This should " }, @@ -94,11 +110,15 @@ class TestSwisscowsEngine(SearxTestCase): response = mock.Mock(content=html) results = swisscows.response(response) self.assertEqual(type(results), list) - self.assertEqual(len(results), 2) + self.assertEqual(len(results), 3) self.assertEqual(results[0]['title'], 'This should be the title') self.assertEqual(results[0]['url'], 'http://this.should.be.the.link/') self.assertEqual(results[0]['content'], 'This should be the content.') - self.assertEqual(results[1]['title'], 'Datei:This should.svg') - self.assertEqual(results[1]['url'], 'http://de.wikipedia.org/wiki/Datei:This should.svg') - self.assertEqual(results[1]['img_src'], 'http://ts2.mm.This/should.png') + self.assertEqual(results[1]['title'], 'Datei:This should1.svg') + self.assertEqual(results[1]['url'], 'http://de.wikipedia.org/wiki/Datei:This should1.svg') + self.assertEqual(results[1]['img_src'], 'http://ts2.mm.This/should1.png') self.assertEqual(results[1]['template'], 'images.html') + self.assertEqual(results[2]['title'], 'Datei:This should.svg') + self.assertEqual(results[2]['url'], 'http://de.wikipedia.org/wiki/Datei:This should.svg') + self.assertEqual(results[2]['img_src'], 'http://ts2.mm.This/should.png') + self.assertEqual(results[2]['template'], 'images.html')