logo

searx

My custom branche(s) on searx, a meta-search engine git clone https://hacktivis.me/git/searx.git
commit: cac1761a54d4d72f9000e40cc04f05da3d78b7da
parent 295b1699ced9b79f3b6e5e4375460ca6ddb64431
Author: dalf <alex@al-f.net>
Date:   Sat, 11 Oct 2014 15:49:50 +0200

[enh] infoboxes : if the result doesn't contain anything except one link, use the normal result template

Diffstat:

Msearx/engines/duckduckgo_definitions.py27+++++++++++++++++----------
Msearx/engines/wikidata.py58++++++++++++++++++++++++++++++++++++++++------------------
2 files changed, 57 insertions(+), 28 deletions(-)

diff --git a/searx/engines/duckduckgo_definitions.py b/searx/engines/duckduckgo_definitions.py @@ -116,15 +116,22 @@ def response(resp): if len(heading)>0: # TODO get infobox.meta.value where .label='article_title' - results.append({ - 'infobox': heading, - 'id': infobox_id, - 'entity': entity, - 'content': content, - 'img_src' : image, - 'attributes': attributes, - 'urls': urls, - 'relatedTopics': relatedTopics - }) + if image==None and len(attributes)==0 and len(urls)==1 and len(relatedTopics)==0 and len(content)==0: + results.append({ + 'url': urls[0]['url'], + 'title': heading, + 'content': content + }) + else: + results.append({ + 'infobox': heading, + 'id': infobox_id, + 'entity': entity, + 'content': content, + 'img_src' : image, + 'attributes': attributes, + 'urls': urls, + 'relatedTopics': relatedTopics + }) return results diff --git a/searx/engines/wikidata.py b/searx/engines/wikidata.py @@ -33,17 +33,20 @@ def response(resp): return results def getDetail(jsonresponse, wikidata_id, language): + results = [] + urls = [] + attributes = [] + result = jsonresponse.get('entities', {}).get(wikidata_id, {}) title = result.get('labels', {}).get(language, {}).get('value', None) if title == None: - title = result.get('labels', {}).get('en', {}).get('value', wikidata_id) - results = [] - urls = [] - attributes = [] + title = result.get('labels', {}).get('en', {}).get('value', None) + if title == None: + return results - description = result.get('descriptions', {}).get(language, {}).get('value', '') - if description == '': + description = result.get('descriptions', {}).get(language, {}).get('value', None) + if description == None: description = result.get('descriptions', {}).get('en', {}).get('value', '') claims = result.get('claims', {}) @@ -52,11 +55,16 @@ def getDetail(jsonresponse, wikidata_id, language): urls.append({ 'title' : 'Official site', 'url': official_website }) results.append({ 'title': title, 'url' : official_website }) + wikipedia_link_count = 0 if language != 'en': - add_url(urls, 'Wikipedia (' + language + ')', get_wikilink(result, language + 'wiki')) + wikipedia_link_count += add_url(urls, 'Wikipedia (' + language + ')', get_wikilink(result, language + 'wiki')) wikipedia_en_link = get_wikilink(result, 'enwiki') - add_url(urls, 'Wikipedia (en)', wikipedia_en_link) - + wikipedia_link_count += add_url(urls, 'Wikipedia (en)', wikipedia_en_link) + if wikipedia_link_count == 0: + misc_language = get_wiki_firstlanguage(result, 'wiki') + if misc_language != None: + add_url(urls, 'Wikipedia (' + misc_language + ')', get_wikilink(result, misc_language + 'wiki')) + if language != 'en': add_url(urls, 'Wiki voyage (' + language + ')', get_wikilink(result, language + 'wikivoyage')) add_url(urls, 'Wiki voyage (en)', get_wikilink(result, 'enwikivoyage')) @@ -105,14 +113,20 @@ def getDetail(jsonresponse, wikidata_id, language): if date_of_death != None: attributes.append({'label' : 'Date of death', 'value' : date_of_death}) - - results.append({ - 'infobox' : title, - 'id' : wikipedia_en_link, - 'content' : description, - 'attributes' : attributes, - 'urls' : urls - }) + if len(attributes)==0 and len(urls)==2 and len(description)==0: + results.append({ + 'url': urls[0]['url'], + 'title': title, + 'content': description + }) + else: + results.append({ + 'infobox' : title, + 'id' : wikipedia_en_link, + 'content' : description, + 'attributes' : attributes, + 'urls' : urls + }) return results @@ -120,7 +134,9 @@ def getDetail(jsonresponse, wikidata_id, language): def add_url(urls, title, url): if url != None: urls.append({'title' : title, 'url' : url}) - + return 1 + else: + return 0 def get_mainsnak(claims, propertyName): propValue = claims.get(propertyName, {}) @@ -213,3 +229,9 @@ def get_wikilink(result, wikiid): elif url.startswith('//'): url = 'https:' + url return url + +def get_wiki_firstlanguage(result, wikipatternid): + for k in result.get('sitelinks', {}).keys(): + if k.endswith(wikipatternid) and len(k)==(2+len(wikipatternid)): + return k[0:2] + return None