[pylint] searx/engines/duckduckgo_definitions.py

BTW: normalize indentations

Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
This commit is contained in:
Markus Heiser 2021-05-24 14:41:03 +02:00
parent 21541f8a12
commit 343570f7fb

View File

@ -1,6 +1,8 @@
# SPDX-License-Identifier: AGPL-3.0-or-later # SPDX-License-Identifier: AGPL-3.0-or-later
""" # lint: pylint
DuckDuckGo (Instant Answer API) # pylint: disable=missing-function-docstring
"""DuckDuckGo (Instant Answer API)
""" """
import json import json
@ -48,7 +50,7 @@ def is_broken_text(text):
def result_to_text(text, htmlResult): def result_to_text(text, htmlResult):
# TODO : remove result ending with "Meaning" or "Category" # TODO : remove result ending with "Meaning" or "Category" # pylint: disable=fixme
result = None result = None
dom = html.fromstring(htmlResult) dom = html.fromstring(htmlResult)
a = dom.xpath('//a') a = dom.xpath('//a')
@ -63,13 +65,18 @@ def result_to_text(text, htmlResult):
def request(query, params): def request(query, params):
params['url'] = URL.format(query=urlencode({'q': query})) params['url'] = URL.format(query=urlencode({'q': query}))
language = match_language(params['language'], supported_languages, language_aliases) language = match_language(
params['language'],
supported_languages, # pylint: disable=undefined-variable
language_aliases
)
language = language.split('-')[0] language = language.split('-')[0]
params['headers']['Accept-Language'] = language params['headers']['Accept-Language'] = language
return params return params
def response(resp): def response(resp):
# pylint: disable=too-many-locals, too-many-branches, too-many-statements
results = [] results = []
search_res = json.loads(resp.text) search_res = json.loads(resp.text)
@ -124,17 +131,23 @@ def response(resp):
firstURL = ddg_result.get('FirstURL') firstURL = ddg_result.get('FirstURL')
text = ddg_result.get('Text') text = ddg_result.get('Text')
if not is_broken_text(text): if not is_broken_text(text):
suggestion = result_to_text(text, suggestion = result_to_text(
ddg_result.get('Result')) text,
ddg_result.get('Result')
)
if suggestion != heading and suggestion is not None: if suggestion != heading and suggestion is not None:
results.append({'suggestion': suggestion}) results.append({'suggestion': suggestion})
elif 'Topics' in ddg_result: elif 'Topics' in ddg_result:
suggestions = [] suggestions = []
relatedTopics.append({'name': ddg_result.get('Name', ''), relatedTopics.append({
'suggestions': suggestions}) 'name': ddg_result.get('Name', ''),
'suggestions': suggestions
})
for topic_result in ddg_result.get('Topics', []): for topic_result in ddg_result.get('Topics', []):
suggestion = result_to_text(topic_result.get('Text'), suggestion = result_to_text(
topic_result.get('Result')) topic_result.get('Text'),
topic_result.get('Result')
)
if suggestion != heading and suggestion is not None: if suggestion != heading and suggestion is not None:
suggestions.append(suggestion) suggestions.append(suggestion)
@ -143,19 +156,25 @@ def response(resp):
if abstractURL != '': if abstractURL != '':
# add as result ? problem always in english # add as result ? problem always in english
infobox_id = abstractURL infobox_id = abstractURL
urls.append({'title': search_res.get('AbstractSource'), urls.append({
'url': abstractURL, 'title': search_res.get('AbstractSource'),
'official': True}) 'url': abstractURL,
results.append({'url': abstractURL, 'official': True
'title': heading}) })
results.append({
'url': abstractURL,
'title': heading
})
# definition # definition
definitionURL = search_res.get('DefinitionURL', '') definitionURL = search_res.get('DefinitionURL', '')
if definitionURL != '': if definitionURL != '':
# add as result ? as answer ? problem always in english # add as result ? as answer ? problem always in english
infobox_id = definitionURL infobox_id = definitionURL
urls.append({'title': search_res.get('DefinitionSource'), urls.append({
'url': definitionURL}) 'title': search_res.get('DefinitionSource'),
'url': definitionURL
})
# to merge with wikidata's infobox # to merge with wikidata's infobox
if infobox_id: if infobox_id:
@ -183,8 +202,10 @@ def response(resp):
# * netflix_id # * netflix_id
external_url = get_external_url(data_type, data_value) external_url = get_external_url(data_type, data_value)
if external_url is not None: if external_url is not None:
urls.append({'title': data_label, urls.append({
'url': external_url}) 'title': data_label,
'url': external_url
})
elif data_type in ['instance', 'wiki_maps_trigger', 'google_play_artist_id']: elif data_type in ['instance', 'wiki_maps_trigger', 'google_play_artist_id']:
# ignore instance: Wikidata value from "Instance Of" (Qxxxx) # ignore instance: Wikidata value from "Instance Of" (Qxxxx)
# ignore wiki_maps_trigger: reference to a javascript # ignore wiki_maps_trigger: reference to a javascript
@ -194,9 +215,11 @@ def response(resp):
# There is already an URL for the website # There is already an URL for the website
pass pass
elif data_type == 'area': elif data_type == 'area':
attributes.append({'label': data_label, attributes.append({
'value': area_to_str(data_value), 'label': data_label,
'entity': 'P2046'}) 'value': area_to_str(data_value),
'entity': 'P2046'
})
osm_zoom = area_to_osm_zoom(data_value.get('amount')) osm_zoom = area_to_osm_zoom(data_value.get('amount'))
elif data_type == 'coordinates': elif data_type == 'coordinates':
if data_value.get('globe') == 'http://www.wikidata.org/entity/Q2': if data_value.get('globe') == 'http://www.wikidata.org/entity/Q2':
@ -205,12 +228,16 @@ def response(resp):
coordinates = info coordinates = info
else: else:
# coordinate NOT on Earth # coordinate NOT on Earth
attributes.append({'label': data_label, attributes.append({
'value': data_value, 'label': data_label,
'entity': 'P625'}) 'value': data_value,
'entity': 'P625'
})
elif data_type == 'string': elif data_type == 'string':
attributes.append({'label': data_label, attributes.append({
'value': data_value}) 'label': data_label,
'value': data_value
})
if coordinates: if coordinates:
data_label = coordinates.get('label') data_label = coordinates.get('label')
@ -218,25 +245,31 @@ def response(resp):
latitude = data_value.get('latitude') latitude = data_value.get('latitude')
longitude = data_value.get('longitude') longitude = data_value.get('longitude')
url = get_earth_coordinates_url(latitude, longitude, osm_zoom) url = get_earth_coordinates_url(latitude, longitude, osm_zoom)
urls.append({'title': 'OpenStreetMap', urls.append({
'url': url, 'title': 'OpenStreetMap',
'entity': 'P625'}) 'url': url,
'entity': 'P625'
})
if len(heading) > 0: if len(heading) > 0:
# TODO get infobox.meta.value where .label='article_title' # TODO get infobox.meta.value where .label='article_title' # pylint: disable=fixme
if image is None and len(attributes) == 0 and len(urls) == 1 and\ if image is None and len(attributes) == 0 and len(urls) == 1 and\
len(relatedTopics) == 0 and len(content) == 0: len(relatedTopics) == 0 and len(content) == 0:
results.append({'url': urls[0]['url'], results.append({
'title': heading, 'url': urls[0]['url'],
'content': content}) 'title': heading,
'content': content
})
else: else:
results.append({'infobox': heading, results.append({
'id': infobox_id, 'infobox': heading,
'content': content, 'id': infobox_id,
'img_src': image, 'content': content,
'attributes': attributes, 'img_src': image,
'urls': urls, 'attributes': attributes,
'relatedTopics': relatedTopics}) 'urls': urls,
'relatedTopics': relatedTopics
})
return results return results