mirror of https://github.com/searxng/searxng.git
Merge pull request #302 from dalf/mod_plugin_on_result
[mod] plugin: call on_result for each result of each engines.
This commit is contained in:
commit
b671e0364f
|
@ -26,8 +26,8 @@ Example plugin
|
||||||
# attach callback to the post search hook
|
# attach callback to the post search hook
|
||||||
# request: flask request object
|
# request: flask request object
|
||||||
# ctx: the whole local context of the post search hook
|
# ctx: the whole local context of the post search hook
|
||||||
def post_search(request, ctx):
|
def post_search(request, search):
|
||||||
ctx['search'].suggestions.add('example')
|
search.result_container.suggestions.add('example')
|
||||||
return True
|
return True
|
||||||
|
|
||||||
External plugins
|
External plugins
|
||||||
|
@ -50,20 +50,52 @@ Plugin entry points
|
||||||
|
|
||||||
Entry points (hooks) define when a plugin runs. Right now only three hooks are
|
Entry points (hooks) define when a plugin runs. Right now only three hooks are
|
||||||
implemented. So feel free to implement a hook if it fits the behaviour of your
|
implemented. So feel free to implement a hook if it fits the behaviour of your
|
||||||
plugin.
|
plugin. A plugin doesn't need to implement all the hooks.
|
||||||
|
|
||||||
Pre search hook
|
|
||||||
---------------
|
|
||||||
|
|
||||||
Runs BEFORE the search request. Function to implement: ``pre_search``
|
.. py:function:: pre_search(request, search) -> bool
|
||||||
|
|
||||||
Post search hook
|
Runs BEFORE the search request.
|
||||||
----------------
|
|
||||||
|
|
||||||
Runs AFTER the search request. Function to implement: ``post_search``
|
`search.result_container` can be changed.
|
||||||
|
|
||||||
Result hook
|
Return a boolean:
|
||||||
-----------
|
|
||||||
|
|
||||||
Runs when a new result is added to the result list. Function to implement:
|
* True to continue the search
|
||||||
``on_result``
|
* False to stop the search
|
||||||
|
|
||||||
|
:param flask.request request:
|
||||||
|
:param searx.search.SearchWithPlugins search:
|
||||||
|
:return: False to stop the search
|
||||||
|
:rtype: bool
|
||||||
|
|
||||||
|
|
||||||
|
.. py:function:: post_search(request, search) -> None
|
||||||
|
|
||||||
|
Runs AFTER the search request.
|
||||||
|
|
||||||
|
:param flask.request request: Flask request.
|
||||||
|
:param searx.search.SearchWithPlugins search: Context.
|
||||||
|
|
||||||
|
|
||||||
|
.. py:function:: on_result(request, search, result) -> bool
|
||||||
|
|
||||||
|
Runs for each result of each engine.
|
||||||
|
|
||||||
|
`result` can be changed.
|
||||||
|
|
||||||
|
If `result["url"]` is defined, then `result["parsed_url"] = urlparse(result['url'])`
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
`result["url"]` can be changed, but `result["parsed_url"]` must be updated too.
|
||||||
|
|
||||||
|
Return a boolean:
|
||||||
|
|
||||||
|
* True to keep the result
|
||||||
|
* False to remove the result
|
||||||
|
|
||||||
|
:param flask.request request:
|
||||||
|
:param searx.search.SearchWithPlugins search:
|
||||||
|
:param typing.Dict result: Result, see - :ref:`engine results`
|
||||||
|
:return: True to keep the result
|
||||||
|
:rtype: bool
|
||||||
|
|
|
@ -0,0 +1,38 @@
|
||||||
|
.. _searx.search:
|
||||||
|
|
||||||
|
======
|
||||||
|
Search
|
||||||
|
======
|
||||||
|
|
||||||
|
.. autoclass:: searx.search.EngineRef
|
||||||
|
:members:
|
||||||
|
|
||||||
|
.. autoclass:: searx.search.SearchQuery
|
||||||
|
:members:
|
||||||
|
|
||||||
|
.. autoclass:: searx.search.Search
|
||||||
|
|
||||||
|
.. attribute:: search_query
|
||||||
|
:type: searx.search.SearchQuery
|
||||||
|
|
||||||
|
.. attribute:: result_container
|
||||||
|
:type: searx.results.ResultContainer
|
||||||
|
|
||||||
|
.. automethod:: search() -> searx.results.ResultContainer
|
||||||
|
|
||||||
|
.. autoclass:: searx.search.SearchWithPlugins
|
||||||
|
:members:
|
||||||
|
|
||||||
|
.. attribute:: search_query
|
||||||
|
:type: searx.search.SearchQuery
|
||||||
|
|
||||||
|
.. attribute:: result_container
|
||||||
|
:type: searx.results.ResultContainer
|
||||||
|
|
||||||
|
.. attribute:: ordered_plugin_list
|
||||||
|
:type: typing.List
|
||||||
|
|
||||||
|
.. attribute:: request
|
||||||
|
:type: flask.request
|
||||||
|
|
||||||
|
.. automethod:: search() -> searx.results.ResultContainer
|
|
@ -20,14 +20,8 @@ def get_ahmia_blacklist():
|
||||||
return ahmia_blacklist
|
return ahmia_blacklist
|
||||||
|
|
||||||
|
|
||||||
def not_blacklisted(result):
|
def on_result(request, search, result):
|
||||||
if not result.get('is_onion') or not result.get('parsed_url'):
|
if not result.get('is_onion') or not result.get('parsed_url'):
|
||||||
return True
|
return True
|
||||||
result_hash = md5(result['parsed_url'].hostname.encode()).hexdigest()
|
result_hash = md5(result['parsed_url'].hostname.encode()).hexdigest()
|
||||||
return result_hash not in get_ahmia_blacklist()
|
return result_hash not in get_ahmia_blacklist()
|
||||||
|
|
||||||
|
|
||||||
def post_search(request, search):
|
|
||||||
filtered_results = list(filter(not_blacklisted, search.result_container._merged_results))
|
|
||||||
search.result_container._merged_results = filtered_results
|
|
||||||
return True
|
|
||||||
|
|
|
@ -11,8 +11,6 @@ description = gettext('Avoid paywalls by redirecting to open-access versions of
|
||||||
default_on = False
|
default_on = False
|
||||||
preference_section = 'general'
|
preference_section = 'general'
|
||||||
|
|
||||||
doi_resolvers = settings['doi_resolvers']
|
|
||||||
|
|
||||||
|
|
||||||
def extract_doi(url):
|
def extract_doi(url):
|
||||||
match = regex.search(url.path)
|
match = regex.search(url.path)
|
||||||
|
@ -25,13 +23,12 @@ def extract_doi(url):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_doi_resolver(args, preference_doi_resolver):
|
def get_doi_resolver(preferences):
|
||||||
doi_resolvers = settings['doi_resolvers']
|
doi_resolvers = settings['doi_resolvers']
|
||||||
doi_resolver = args.get('doi_resolver', preference_doi_resolver)[0]
|
selected_resolver = preferences.get_value('doi_resolver')[0]
|
||||||
if doi_resolver not in doi_resolvers:
|
if selected_resolver not in doi_resolvers:
|
||||||
doi_resolver = settings['default_doi_resolver']
|
selected_resolver = settings['default_doi_resolver']
|
||||||
doi_resolver_url = doi_resolvers[doi_resolver]
|
return doi_resolvers[selected_resolver]
|
||||||
return doi_resolver_url
|
|
||||||
|
|
||||||
|
|
||||||
def on_result(request, search, result):
|
def on_result(request, search, result):
|
||||||
|
@ -43,6 +40,6 @@ def on_result(request, search, result):
|
||||||
for suffix in ('/', '.pdf', '.xml', '/full', '/meta', '/abstract'):
|
for suffix in ('/', '.pdf', '.xml', '/full', '/meta', '/abstract'):
|
||||||
if doi.endswith(suffix):
|
if doi.endswith(suffix):
|
||||||
doi = doi[:-len(suffix)]
|
doi = doi[:-len(suffix)]
|
||||||
result['url'] = get_doi_resolver(request.args, request.preferences.get_value('doi_resolver')) + doi
|
result['url'] = get_doi_resolver(request.preferences) + doi
|
||||||
result['parsed_url'] = urlparse(result['url'])
|
result['parsed_url'] = urlparse(result['url'])
|
||||||
return True
|
return True
|
||||||
|
|
|
@ -145,7 +145,7 @@ class ResultContainer:
|
||||||
"""docstring for ResultContainer"""
|
"""docstring for ResultContainer"""
|
||||||
|
|
||||||
__slots__ = '_merged_results', 'infoboxes', 'suggestions', 'answers', 'corrections', '_number_of_results',\
|
__slots__ = '_merged_results', 'infoboxes', 'suggestions', 'answers', 'corrections', '_number_of_results',\
|
||||||
'_ordered', 'paging', 'unresponsive_engines', 'timings', 'redirect_url', 'engine_data'
|
'_closed', 'paging', 'unresponsive_engines', 'timings', 'redirect_url', 'engine_data', 'on_result'
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
@ -156,43 +156,48 @@ class ResultContainer:
|
||||||
self.corrections = set()
|
self.corrections = set()
|
||||||
self._number_of_results = []
|
self._number_of_results = []
|
||||||
self.engine_data = defaultdict(dict)
|
self.engine_data = defaultdict(dict)
|
||||||
self._ordered = False
|
self._closed = False
|
||||||
self.paging = False
|
self.paging = False
|
||||||
self.unresponsive_engines = set()
|
self.unresponsive_engines = set()
|
||||||
self.timings = []
|
self.timings = []
|
||||||
self.redirect_url = None
|
self.redirect_url = None
|
||||||
|
self.on_result = lambda _: True
|
||||||
|
|
||||||
def extend(self, engine_name, results):
|
def extend(self, engine_name, results):
|
||||||
|
if self._closed:
|
||||||
|
return
|
||||||
|
|
||||||
standard_result_count = 0
|
standard_result_count = 0
|
||||||
error_msgs = set()
|
error_msgs = set()
|
||||||
for result in list(results):
|
for result in list(results):
|
||||||
result['engine'] = engine_name
|
result['engine'] = engine_name
|
||||||
if 'suggestion' in result:
|
if 'suggestion' in result and self.on_result(result):
|
||||||
self.suggestions.add(result['suggestion'])
|
self.suggestions.add(result['suggestion'])
|
||||||
elif 'answer' in result:
|
elif 'answer' in result and self.on_result(result):
|
||||||
self.answers[result['answer']] = result
|
self.answers[result['answer']] = result
|
||||||
elif 'correction' in result:
|
elif 'correction' in result and self.on_result(result):
|
||||||
self.corrections.add(result['correction'])
|
self.corrections.add(result['correction'])
|
||||||
elif 'infobox' in result:
|
elif 'infobox' in result and self.on_result(result):
|
||||||
self._merge_infobox(result)
|
self._merge_infobox(result)
|
||||||
elif 'number_of_results' in result:
|
elif 'number_of_results' in result and self.on_result(result):
|
||||||
self._number_of_results.append(result['number_of_results'])
|
self._number_of_results.append(result['number_of_results'])
|
||||||
elif 'engine_data' in result:
|
elif 'engine_data' in result and self.on_result(result):
|
||||||
self.engine_data[engine_name][result['key']] = result['engine_data']
|
self.engine_data[engine_name][result['key']] = result['engine_data']
|
||||||
else:
|
elif 'url' in result:
|
||||||
# standard result (url, title, content)
|
# standard result (url, title, content)
|
||||||
if 'url' in result and not isinstance(result['url'], str):
|
if not self._is_valid_url_result(result, error_msgs):
|
||||||
logger.debug('result: invalid URL: %s', str(result))
|
continue
|
||||||
error_msgs.add('invalid URL')
|
# normalize the result
|
||||||
elif 'title' in result and not isinstance(result['title'], str):
|
self._normalize_url_result(result)
|
||||||
logger.debug('result: invalid title: %s', str(result))
|
# call on_result call searx.search.SearchWithPlugins._on_result
|
||||||
error_msgs.add('invalid title')
|
# which calls the plugins
|
||||||
elif 'content' in result and not isinstance(result['content'], str):
|
if not self.on_result(result):
|
||||||
logger.debug('result: invalid content: %s', str(result))
|
continue
|
||||||
error_msgs.add('invalid content')
|
self.__merge_url_result(result, standard_result_count + 1)
|
||||||
else:
|
standard_result_count += 1
|
||||||
self._merge_result(result, standard_result_count + 1)
|
elif self.on_result(result):
|
||||||
standard_result_count += 1
|
self.__merge_result_no_url(result, standard_result_count + 1)
|
||||||
|
standard_result_count += 1
|
||||||
|
|
||||||
if len(error_msgs) > 0:
|
if len(error_msgs) > 0:
|
||||||
for msg in error_msgs:
|
for msg in error_msgs:
|
||||||
|
@ -219,14 +224,29 @@ class ResultContainer:
|
||||||
if add_infobox:
|
if add_infobox:
|
||||||
self.infoboxes.append(infobox)
|
self.infoboxes.append(infobox)
|
||||||
|
|
||||||
def _merge_result(self, result, position):
|
def _is_valid_url_result(self, result, error_msgs):
|
||||||
if 'url' in result:
|
if 'url' in result:
|
||||||
self.__merge_url_result(result, position)
|
if not isinstance(result['url'], str):
|
||||||
return
|
logger.debug('result: invalid URL: %s', str(result))
|
||||||
|
error_msgs.add('invalid URL')
|
||||||
|
return False
|
||||||
|
|
||||||
self.__merge_result_no_url(result, position)
|
if 'title' in result and not isinstance(result['title'], str):
|
||||||
|
logger.debug('result: invalid title: %s', str(result))
|
||||||
|
error_msgs.add('invalid title')
|
||||||
|
return False
|
||||||
|
|
||||||
def __merge_url_result(self, result, position):
|
if 'content' in result:
|
||||||
|
if not isinstance(result['content'], str):
|
||||||
|
logger.debug('result: invalid content: %s', str(result))
|
||||||
|
error_msgs.add('invalid content')
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _normalize_url_result(self, result):
|
||||||
|
"""Return True if the result is valid
|
||||||
|
"""
|
||||||
result['parsed_url'] = urlparse(result['url'])
|
result['parsed_url'] = urlparse(result['url'])
|
||||||
|
|
||||||
# if the result has no scheme, use http as default
|
# if the result has no scheme, use http as default
|
||||||
|
@ -234,12 +254,13 @@ class ResultContainer:
|
||||||
result['parsed_url'] = result['parsed_url']._replace(scheme="http")
|
result['parsed_url'] = result['parsed_url']._replace(scheme="http")
|
||||||
result['url'] = result['parsed_url'].geturl()
|
result['url'] = result['parsed_url'].geturl()
|
||||||
|
|
||||||
result['engines'] = set([result['engine']])
|
|
||||||
|
|
||||||
# strip multiple spaces and cariage returns from content
|
# strip multiple spaces and cariage returns from content
|
||||||
if result.get('content'):
|
result['content'] = WHITESPACE_REGEX.sub(' ', result['content'])
|
||||||
result['content'] = WHITESPACE_REGEX.sub(' ', result['content'])
|
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __merge_url_result(self, result, position):
|
||||||
|
result['engines'] = set([result['engine']])
|
||||||
duplicated = self.__find_duplicated_http_result(result)
|
duplicated = self.__find_duplicated_http_result(result)
|
||||||
if duplicated:
|
if duplicated:
|
||||||
self.__merge_duplicated_http_result(duplicated, result, position)
|
self.__merge_duplicated_http_result(duplicated, result, position)
|
||||||
|
@ -295,7 +316,9 @@ class ResultContainer:
|
||||||
with RLock():
|
with RLock():
|
||||||
self._merged_results.append(result)
|
self._merged_results.append(result)
|
||||||
|
|
||||||
def order_results(self):
|
def close(self):
|
||||||
|
self._closed = True
|
||||||
|
|
||||||
for result in self._merged_results:
|
for result in self._merged_results:
|
||||||
score = result_score(result)
|
score = result_score(result)
|
||||||
result['score'] = score
|
result['score'] = score
|
||||||
|
@ -349,12 +372,11 @@ class ResultContainer:
|
||||||
categoryPositions[category] = {'index': len(gresults), 'count': 8}
|
categoryPositions[category] = {'index': len(gresults), 'count': 8}
|
||||||
|
|
||||||
# update _merged_results
|
# update _merged_results
|
||||||
self._ordered = True
|
|
||||||
self._merged_results = gresults
|
self._merged_results = gresults
|
||||||
|
|
||||||
def get_ordered_results(self):
|
def get_ordered_results(self):
|
||||||
if not self._ordered:
|
if not self._closed:
|
||||||
self.order_results()
|
self.close()
|
||||||
return self._merged_results
|
return self._merged_results
|
||||||
|
|
||||||
def results_length(self):
|
def results_length(self):
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
# lint: pylint
|
||||||
# pylint: disable=missing-module-docstring
|
# pylint: disable=missing-module-docstring, too-few-public-methods
|
||||||
|
|
||||||
import typing
|
import typing
|
||||||
import threading
|
import threading
|
||||||
|
@ -39,7 +39,7 @@ class Search:
|
||||||
|
|
||||||
__slots__ = "search_query", "result_container", "start_time", "actual_timeout"
|
__slots__ = "search_query", "result_container", "start_time", "actual_timeout"
|
||||||
|
|
||||||
def __init__(self, search_query):
|
def __init__(self, search_query: SearchQuery):
|
||||||
# init vars
|
# init vars
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.search_query = search_query
|
self.search_query = search_query
|
||||||
|
@ -163,7 +163,7 @@ class Search:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# do search-request
|
# do search-request
|
||||||
def search(self):
|
def search(self) -> ResultContainer:
|
||||||
self.start_time = default_timer()
|
self.start_time = default_timer()
|
||||||
if not self.search_external_bang():
|
if not self.search_external_bang():
|
||||||
if not self.search_answerers():
|
if not self.search_answerers():
|
||||||
|
@ -172,24 +172,32 @@ class Search:
|
||||||
|
|
||||||
|
|
||||||
class SearchWithPlugins(Search):
|
class SearchWithPlugins(Search):
|
||||||
"""Similar to the Search class but call the plugins."""
|
"""Inherit from the Search class, add calls to the plugins."""
|
||||||
|
|
||||||
__slots__ = 'ordered_plugin_list', 'request'
|
__slots__ = 'ordered_plugin_list', 'request'
|
||||||
|
|
||||||
def __init__(self, search_query, ordered_plugin_list, request):
|
def __init__(self, search_query: SearchQuery, ordered_plugin_list, request: "flask.Request"):
|
||||||
super().__init__(search_query)
|
super().__init__(search_query)
|
||||||
self.ordered_plugin_list = ordered_plugin_list
|
self.ordered_plugin_list = ordered_plugin_list
|
||||||
self.request = request
|
self.result_container.on_result = self._on_result
|
||||||
|
# pylint: disable=line-too-long
|
||||||
|
# get the "real" request to use it outside the Flask context.
|
||||||
|
# see
|
||||||
|
# * https://github.com/pallets/flask/blob/d01d26e5210e3ee4cbbdef12f05c886e08e92852/src/flask/globals.py#L55
|
||||||
|
# * https://github.com/pallets/werkzeug/blob/3c5d3c9bd0d9ce64590f0af8997a38f3823b368d/src/werkzeug/local.py#L548-L559
|
||||||
|
# * https://werkzeug.palletsprojects.com/en/2.0.x/local/#werkzeug.local.LocalProxy._get_current_object
|
||||||
|
# pylint: enable=line-too-long
|
||||||
|
self.request = request._get_current_object()
|
||||||
|
|
||||||
def search(self):
|
def _on_result(self, result):
|
||||||
|
return plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result)
|
||||||
|
|
||||||
|
def search(self) -> ResultContainer:
|
||||||
if plugins.call(self.ordered_plugin_list, 'pre_search', self.request, self):
|
if plugins.call(self.ordered_plugin_list, 'pre_search', self.request, self):
|
||||||
super().search()
|
super().search()
|
||||||
|
|
||||||
plugins.call(self.ordered_plugin_list, 'post_search', self.request, self)
|
plugins.call(self.ordered_plugin_list, 'post_search', self.request, self)
|
||||||
|
|
||||||
results = self.result_container.get_ordered_results()
|
self.result_container.close()
|
||||||
|
|
||||||
for result in results:
|
|
||||||
plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result)
|
|
||||||
|
|
||||||
return self.result_container
|
return self.result_container
|
||||||
|
|
|
@ -4,6 +4,7 @@ import typing
|
||||||
|
|
||||||
|
|
||||||
class EngineRef:
|
class EngineRef:
|
||||||
|
"""Reference by names to an engine and category"""
|
||||||
|
|
||||||
__slots__ = 'name', 'category'
|
__slots__ = 'name', 'category'
|
||||||
|
|
||||||
|
|
|
@ -1040,9 +1040,7 @@ def preferences():
|
||||||
themes = themes,
|
themes = themes,
|
||||||
plugins = plugins,
|
plugins = plugins,
|
||||||
doi_resolvers = settings['doi_resolvers'],
|
doi_resolvers = settings['doi_resolvers'],
|
||||||
current_doi_resolver = get_doi_resolver(
|
current_doi_resolver = get_doi_resolver(request.preferences),
|
||||||
request.args, request.preferences.get_value('doi_resolver')
|
|
||||||
),
|
|
||||||
allowed_plugins = allowed_plugins,
|
allowed_plugins = allowed_plugins,
|
||||||
theme = get_current_theme_name(),
|
theme = get_current_theme_name(),
|
||||||
preferences_url_params = request.preferences.get_as_url_params(),
|
preferences_url_params = request.preferences.get_as_url_params(),
|
||||||
|
|
Loading…
Reference in New Issue