From 0a71525ab6d4fe4cbc4b33b4653bdb39ae4d55e9 Mon Sep 17 00:00:00 2001 From: Dalf Date: Sun, 28 Sep 2014 16:53:30 +0200 Subject: [PATCH] [enh] add infoboxes and answers (clean up) --- searx/engines/wikidata.py | 45 ++++++++++++++++------------ searx/search.py | 1 - searx/settings.yml | 4 +++ searx/templates/default/infobox.html | 44 +++++++++++++++++++++++++++ 4 files changed, 74 insertions(+), 20 deletions(-) create mode 100644 searx/templates/default/infobox.html diff --git a/searx/engines/wikidata.py b/searx/engines/wikidata.py index a5ee44246..46f2323c8 100644 --- a/searx/engines/wikidata.py +++ b/searx/engines/wikidata.py @@ -1,13 +1,12 @@ import json -from datetime import datetime from requests import get from urllib import urlencode +from datetime import datetime resultCount=2 -urlSearch = 'https://www.wikidata.org/w/api.php?action=query&list=search&format=json&srnamespace=0&srprop=sectionsnippet&{query}' +urlSearch = 'https://www.wikidata.org/w/api.php?action=query&list=search&format=json&srnamespace=0&srprop=sectiontitle&{query}' urlDetail = 'https://www.wikidata.org/w/api.php?action=wbgetentities&format=json&props=labels%7Cinfo%7Csitelinks%7Csitelinks%2Furls%7Cdescriptions%7Cclaims&{query}' -# find the right URL for urlMap -urlMap = 'http://www.openstreetmap.org/?lat={latitude}&lon={longitude}&zoom={zoom}&layers=M' +urlMap = 'https://www.openstreetmap.org/?lat={latitude}&lon={longitude}&zoom={zoom}&layers=M' def request(query, params): params['url'] = urlSearch.format(query=urlencode({'srsearch': query, 'srlimit': resultCount})) @@ -18,24 +17,27 @@ def request(query, params): def response(resp): results = [] search_res = json.loads(resp.text) - # TODO parallel http queries - before = datetime.now() + + wikidata_ids = set() for r in search_res.get('query', {}).get('search', {}): - wikidata_id = r.get('title', '') - results = results + getDetail(wikidata_id) - after = datetime.now() - print str(after - before) + " second(s)" + wikidata_ids.add(r.get('title', '')) + + language = resp.search_params['language'].split('_')[0] + if language == 'all': + language = 'en' + url = urlDetail.format(query=urlencode({'ids': '|'.join(wikidata_ids), 'languages': language + '|en'})) + + before = datetime.now() + htmlresponse = get(url) + print datetime.now() - before + jsonresponse = json.loads(htmlresponse.content) + for wikidata_id in wikidata_ids: + results = results + getDetail(jsonresponse, wikidata_id, language) return results -def getDetail(wikidata_id): - language = 'fr' - - url = urlDetail.format(query=urlencode({'ids': wikidata_id, 'languages': language + '|en'})) - print url - response = get(url) - result = json.loads(response.content) - result = result.get('entities', {}).get(wikidata_id, {}) +def getDetail(jsonresponse, wikidata_id, language): + result = jsonresponse.get('entities', {}).get(wikidata_id, {}) title = result.get('labels', {}).get(language, {}).get('value', None) if title == None: @@ -50,7 +52,6 @@ def getDetail(wikidata_id): claims = result.get('claims', {}) official_website = get_string(claims, 'P856', None) - print official_website if official_website != None: urls.append({ 'title' : 'Official site', 'url': official_website }) results.append({ 'title': title, 'url' : official_website }) @@ -98,10 +99,12 @@ def getDetail(wikidata_id): return results + def add_url(urls, title, url): if url != None: urls.append({'title' : title, 'url' : url}) + def get_mainsnak(claims, propertyName): propValue = claims.get(propertyName, {}) if len(propValue) == 0: @@ -110,6 +113,7 @@ def get_mainsnak(claims, propertyName): propValue = propValue[0].get('mainsnak', None) return propValue + def get_string(claims, propertyName, defaultValue=None): propValue = claims.get(propertyName, {}) if len(propValue) == 0: @@ -129,6 +133,7 @@ def get_string(claims, propertyName, defaultValue=None): else: return ', '.join(result) + def get_time(claims, propertyName, defaultValue=None): propValue = claims.get(propertyName, {}) if len(propValue) == 0: @@ -149,6 +154,7 @@ def get_time(claims, propertyName, defaultValue=None): else: return ', '.join(result) + def get_geolink(claims, propertyName, defaultValue=''): mainsnak = get_mainsnak(claims, propertyName) @@ -182,6 +188,7 @@ def get_geolink(claims, propertyName, defaultValue=''): return url + def get_wikilink(result, wikiid): url = result.get('sitelinks', {}).get(wikiid, {}).get('url', None) if url == None: diff --git a/searx/search.py b/searx/search.py index 7eb605e11..f9157ef7e 100644 --- a/searx/search.py +++ b/searx/search.py @@ -76,7 +76,6 @@ def make_callback(engine_name, results, suggestions, answers, infoboxes, callbac # if it is an infobox, add it to list of infoboxes if 'infobox' in result: infoboxes.append(result) - print result continue # append result diff --git a/searx/settings.yml b/searx/settings.yml index 77bcd2aa4..02f7caacb 100644 --- a/searx/settings.yml +++ b/searx/settings.yml @@ -44,6 +44,10 @@ engines: engine : duckduckgo_definitions shortcut : ddd + - name : wikidata + engine : wikidata + shortcut : wd + - name : duckduckgo engine : duckduckgo shortcut : ddg diff --git a/searx/templates/default/infobox.html b/searx/templates/default/infobox.html new file mode 100644 index 000000000..f963e898c --- /dev/null +++ b/searx/templates/default/infobox.html @@ -0,0 +1,44 @@ +
+

{{ infobox.infobox }}

+ {% if infobox.img_src %}{% endif %} +

{{ infobox.entity }}

+

{{ infobox.content }}

+ {% if infobox.attributes %} +
+ + {% for attribute in infobox.attributes %} + + {% endfor %} +
{{ attribute.label }}{{ attribute.value }}
+
+ {% endif %} + + {% if infobox.urls %} +
+ +
+ {% endif %} + + {% if infobox.relatedTopics %} +
+ {% for topic in infobox.relatedTopics %} +
+

{{ topic.name }}

+ {% for suggestion in topic.suggestions %} +
+ + +
+ {% endfor %} +
+ {% endfor %} +
+ {% endif %} + +
+ +