[mod] plugin: call on_result after each engine from the ResultContainer

Currently, searx.search.Search calls on_result once the engine results have been merged
(ResultContainer.order_results).

on_result plugins can rewrite the results: once the URL(s) are modified, even they can be merged,
it won't be the case since ResultContainer.order_results has already be called.

This commit call on_result inside for each result of each engines.
In addition the on_result function can return False to remove the result.

Note: the on_result function now run on the engine thread instead of the Flask thread.
This commit is contained in:
Alexandre Flament 2021-09-06 08:49:13 +02:00
parent fc20c561bf
commit 660c180170
2 changed files with 70 additions and 40 deletions

View File

@ -145,7 +145,7 @@ class ResultContainer:
"""docstring for ResultContainer""" """docstring for ResultContainer"""
__slots__ = '_merged_results', 'infoboxes', 'suggestions', 'answers', 'corrections', '_number_of_results',\ __slots__ = '_merged_results', 'infoboxes', 'suggestions', 'answers', 'corrections', '_number_of_results',\
'_ordered', 'paging', 'unresponsive_engines', 'timings', 'redirect_url', 'engine_data' '_closed', 'paging', 'unresponsive_engines', 'timings', 'redirect_url', 'engine_data', 'on_result'
def __init__(self): def __init__(self):
super().__init__() super().__init__()
@ -156,42 +156,47 @@ class ResultContainer:
self.corrections = set() self.corrections = set()
self._number_of_results = [] self._number_of_results = []
self.engine_data = defaultdict(dict) self.engine_data = defaultdict(dict)
self._ordered = False self._closed = False
self.paging = False self.paging = False
self.unresponsive_engines = set() self.unresponsive_engines = set()
self.timings = [] self.timings = []
self.redirect_url = None self.redirect_url = None
self.on_result = lambda _: True
def extend(self, engine_name, results): def extend(self, engine_name, results):
if self._closed:
return
standard_result_count = 0 standard_result_count = 0
error_msgs = set() error_msgs = set()
for result in list(results): for result in list(results):
result['engine'] = engine_name result['engine'] = engine_name
if 'suggestion' in result: if 'suggestion' in result and self.on_result(result):
self.suggestions.add(result['suggestion']) self.suggestions.add(result['suggestion'])
elif 'answer' in result: elif 'answer' in result and self.on_result(result):
self.answers[result['answer']] = result self.answers[result['answer']] = result
elif 'correction' in result: elif 'correction' in result and self.on_result(result):
self.corrections.add(result['correction']) self.corrections.add(result['correction'])
elif 'infobox' in result: elif 'infobox' in result and self.on_result(result):
self._merge_infobox(result) self._merge_infobox(result)
elif 'number_of_results' in result: elif 'number_of_results' in result and self.on_result(result):
self._number_of_results.append(result['number_of_results']) self._number_of_results.append(result['number_of_results'])
elif 'engine_data' in result: elif 'engine_data' in result and self.on_result(result):
self.engine_data[engine_name][result['key']] = result['engine_data'] self.engine_data[engine_name][result['key']] = result['engine_data']
else: elif 'url' in result:
# standard result (url, title, content) # standard result (url, title, content)
if 'url' in result and not isinstance(result['url'], str): if not self._is_valid_url_result(result, error_msgs):
logger.debug('result: invalid URL: %s', str(result)) continue
error_msgs.add('invalid URL') # normalize the result
elif 'title' in result and not isinstance(result['title'], str): self._normalize_url_result(result)
logger.debug('result: invalid title: %s', str(result)) # call on_result call searx.search.SearchWithPlugins._on_result
error_msgs.add('invalid title') # which calls the plugins
elif 'content' in result and not isinstance(result['content'], str): if not self.on_result(result):
logger.debug('result: invalid content: %s', str(result)) continue
error_msgs.add('invalid content') self.__merge_url_result(result, standard_result_count + 1)
else: standard_result_count += 1
self._merge_result(result, standard_result_count + 1) elif self.on_result(result):
self.__merge_result_no_url(result, standard_result_count + 1)
standard_result_count += 1 standard_result_count += 1
if len(error_msgs) > 0: if len(error_msgs) > 0:
@ -219,14 +224,29 @@ class ResultContainer:
if add_infobox: if add_infobox:
self.infoboxes.append(infobox) self.infoboxes.append(infobox)
def _merge_result(self, result, position): def _is_valid_url_result(self, result, error_msgs):
if 'url' in result: if 'url' in result:
self.__merge_url_result(result, position) if not isinstance(result['url'], str):
return logger.debug('result: invalid URL: %s', str(result))
error_msgs.add('invalid URL')
return False
self.__merge_result_no_url(result, position) if 'title' in result and not isinstance(result['title'], str):
logger.debug('result: invalid title: %s', str(result))
error_msgs.add('invalid title')
return False
def __merge_url_result(self, result, position): if 'content' in result:
if not isinstance(result['content'], str):
logger.debug('result: invalid content: %s', str(result))
error_msgs.add('invalid content')
return False
return True
def _normalize_url_result(self, result):
"""Return True if the result is valid
"""
result['parsed_url'] = urlparse(result['url']) result['parsed_url'] = urlparse(result['url'])
# if the result has no scheme, use http as default # if the result has no scheme, use http as default
@ -234,12 +254,13 @@ class ResultContainer:
result['parsed_url'] = result['parsed_url']._replace(scheme="http") result['parsed_url'] = result['parsed_url']._replace(scheme="http")
result['url'] = result['parsed_url'].geturl() result['url'] = result['parsed_url'].geturl()
result['engines'] = set([result['engine']])
# strip multiple spaces and cariage returns from content # strip multiple spaces and cariage returns from content
if result.get('content'):
result['content'] = WHITESPACE_REGEX.sub(' ', result['content']) result['content'] = WHITESPACE_REGEX.sub(' ', result['content'])
return True
def __merge_url_result(self, result, position):
result['engines'] = set([result['engine']])
duplicated = self.__find_duplicated_http_result(result) duplicated = self.__find_duplicated_http_result(result)
if duplicated: if duplicated:
self.__merge_duplicated_http_result(duplicated, result, position) self.__merge_duplicated_http_result(duplicated, result, position)
@ -295,7 +316,9 @@ class ResultContainer:
with RLock(): with RLock():
self._merged_results.append(result) self._merged_results.append(result)
def order_results(self): def close(self):
self._closed = True
for result in self._merged_results: for result in self._merged_results:
score = result_score(result) score = result_score(result)
result['score'] = score result['score'] = score
@ -349,12 +372,11 @@ class ResultContainer:
categoryPositions[category] = {'index': len(gresults), 'count': 8} categoryPositions[category] = {'index': len(gresults), 'count': 8}
# update _merged_results # update _merged_results
self._ordered = True
self._merged_results = gresults self._merged_results = gresults
def get_ordered_results(self): def get_ordered_results(self):
if not self._ordered: if not self._closed:
self.order_results() self.close()
return self._merged_results return self._merged_results
def results_length(self): def results_length(self):

View File

@ -1,6 +1,6 @@
# SPDX-License-Identifier: AGPL-3.0-or-later # SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint # lint: pylint
# pylint: disable=missing-module-docstring # pylint: disable=missing-module-docstring, too-few-public-methods
import typing import typing
import threading import threading
@ -179,7 +179,18 @@ class SearchWithPlugins(Search):
def __init__(self, search_query, ordered_plugin_list, request): def __init__(self, search_query, ordered_plugin_list, request):
super().__init__(search_query) super().__init__(search_query)
self.ordered_plugin_list = ordered_plugin_list self.ordered_plugin_list = ordered_plugin_list
self.request = request self.result_container.on_result = self._on_result
# pylint: disable=line-too-long
# get the "real" request to use it outside the Flask context.
# see
# * https://github.com/pallets/flask/blob/d01d26e5210e3ee4cbbdef12f05c886e08e92852/src/flask/globals.py#L55
# * https://github.com/pallets/werkzeug/blob/3c5d3c9bd0d9ce64590f0af8997a38f3823b368d/src/werkzeug/local.py#L548-L559
# * https://werkzeug.palletsprojects.com/en/2.0.x/local/#werkzeug.local.LocalProxy._get_current_object
# pylint: enable=line-too-long
self.request = request._get_current_object()
def _on_result(self, result):
return plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result)
def search(self): def search(self):
if plugins.call(self.ordered_plugin_list, 'pre_search', self.request, self): if plugins.call(self.ordered_plugin_list, 'pre_search', self.request, self):
@ -187,9 +198,6 @@ class SearchWithPlugins(Search):
plugins.call(self.ordered_plugin_list, 'post_search', self.request, self) plugins.call(self.ordered_plugin_list, 'post_search', self.request, self)
results = self.result_container.get_ordered_results() self.result_container.close()
for result in results:
plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result)
return self.result_container return self.result_container