From 015401be3dcba9988a32f340a80441050665d6dc Mon Sep 17 00:00:00 2001 From: Alexandre Flament Date: Tue, 13 Apr 2021 15:21:53 +0200 Subject: [PATCH] [mod] refactoring: processors searx.search.processor.abstract.EngineProcessor: * manages suspend time * adds the results to the ResultContainer (extend_container method) * handles exceptions (handle_exception method) --- searx/engines/__init__.py | 2 - searx/search/__init__.py | 2 +- searx/search/checker/impl.py | 3 +- searx/search/processors/abstract.py | 82 +++++++++++++++++++ searx/search/processors/offline.py | 33 +------- searx/search/processors/online.py | 118 +++++++--------------------- 6 files changed, 118 insertions(+), 122 deletions(-) diff --git a/searx/engines/__init__.py b/searx/engines/__init__.py index 95eda6dde..730f8b837 100644 --- a/searx/engines/__init__.py +++ b/searx/engines/__init__.py @@ -51,8 +51,6 @@ engine_default_args = {'paging': False, 'shortcut': '-', 'disabled': False, 'enable_http': False, - 'suspend_end_time': 0, - 'continuous_errors': 0, 'time_range_support': False, 'engine_type': 'online', 'display_error_messages': True, diff --git a/searx/search/__init__.py b/searx/search/__init__.py index f777e8595..8855cf58c 100644 --- a/searx/search/__init__.py +++ b/searx/search/__init__.py @@ -111,7 +111,7 @@ class Search: if request_params is None: continue - with threading.RLock(): + with processor.lock: processor.engine.stats['sent_search_count'] += 1 # append request to list diff --git a/searx/search/checker/impl.py b/searx/search/checker/impl.py index e54b3f68d..1893a82b9 100644 --- a/searx/search/checker/impl.py +++ b/searx/search/checker/impl.py @@ -4,7 +4,6 @@ import typing import types import functools import itertools -import threading from time import time from urllib.parse import urlparse @@ -385,7 +384,7 @@ class Checker: engineref_category = search_query.engineref_list[0].category params = self.processor.get_params(search_query, engineref_category) if params is not None: - with threading.RLock(): + with self.processor.lock: self.processor.engine.stats['sent_search_count'] += 1 self.processor.search(search_query.query, params, result_container, time(), 5) return result_container diff --git a/searx/search/processors/abstract.py b/searx/search/processors/abstract.py index 26dab069f..dcd925669 100644 --- a/searx/search/processors/abstract.py +++ b/searx/search/processors/abstract.py @@ -1,7 +1,14 @@ # SPDX-License-Identifier: AGPL-3.0-or-later +import threading from abc import abstractmethod, ABC +from time import time + from searx import logger +from searx.engines import settings +from searx.network import get_time_for_thread +from searx.metrology.error_recorder import record_exception, record_error +from searx.exceptions import SearxEngineAccessDeniedException logger = logger.getChild('searx.search.processor') @@ -9,11 +16,86 @@ logger = logger.getChild('searx.search.processor') class EngineProcessor(ABC): + __slots__ = 'engine', 'engine_name', 'suspend_end_time', 'suspend_reason', 'continuous_errors', 'lock' + def __init__(self, engine, engine_name): self.engine = engine self.engine_name = engine_name + self.suspend_end_time = 0 + self.suspend_reason = None + self.continuous_errors = 0 + self.lock = threading.RLock() + + @property + def is_suspended(self): + return self.suspend_end_time >= time() + + def _suspend(self, suspended_time, suspend_reason): + with self.lock: + # update continuous_errors / suspend_end_time + self.continuous_errors += 1 + if suspended_time is None: + suspended_time = min(settings['search']['max_ban_time_on_fail'], + self.continuous_errors * settings['search']['ban_time_on_fail']) + self.suspend_end_time = time() + suspended_time + self.suspend_reason = suspend_reason + logger.debug('Suspend engine for %i seconds', suspended_time) + + def _resume(self): + with self.lock: + # reset the suspend variables + self.continuous_errors = 0 + self.suspend_end_time = 0 + self.suspend_reason = None + + def handle_exception(self, result_container, reason, exception, suspend=False): + # update result_container + result_container.add_unresponsive_engine(self.engine_name, reason or str(exception)) + # metrics + with self.lock: + self.engine.stats['errors'] += 1 + if exception: + record_exception(self.engine_name, exception) + else: + record_error(self.engine_name, reason) + # suspend the engine ? + if suspend: + suspended_time = None + if isinstance(exception, SearxEngineAccessDeniedException): + suspended_time = exception.suspended_time + self._suspend(suspended_time, reason or str(exception)) # pylint: disable=no-member + + def _extend_container_basic(self, result_container, start_time, search_results): + # update result_container + result_container.extend(self.engine_name, search_results) + engine_time = time() - start_time + page_load_time = get_time_for_thread() + result_container.add_timing(self.engine_name, engine_time, page_load_time) + # metrics + with self.lock: + self.engine.stats['engine_time'] += engine_time + self.engine.stats['engine_time_count'] += 1 + # update stats with the total HTTP time + if page_load_time is not None and 'page_load_time' in self.engine.stats: + self.engine.stats['page_load_time'] += page_load_time + self.engine.stats['page_load_count'] += 1 + + def extend_container(self, result_container, start_time, search_results): + if getattr(threading.current_thread(), '_timeout', False): + # the main thread is not waiting anymore + self.handle_exception(result_container, 'Timeout', None) + else: + # check if the engine accepted the request + if search_results is not None: + self._extend_container_basic(result_container, start_time, search_results) + self._resume() def get_params(self, search_query, engine_category): + # skip suspended engines + if self.is_suspended: + logger.debug('Engine currently suspended: %s', self.engine_name) + return None + # if paging is not supported, skip if search_query.pageno > 1 and not self.engine.paging: return None diff --git a/searx/search/processors/offline.py b/searx/search/processors/offline.py index ede8eb5e1..5186b346a 100644 --- a/searx/search/processors/offline.py +++ b/searx/search/processors/offline.py @@ -1,51 +1,26 @@ # SPDX-License-Identifier: AGPL-3.0-or-later -import threading -from time import time from searx import logger -from searx.metrology.error_recorder import record_exception, record_error from searx.search.processors.abstract import EngineProcessor -logger = logger.getChild('search.processor.offline') +logger = logger.getChild('searx.search.processor.offline') class OfflineProcessor(EngineProcessor): engine_type = 'offline' - def _record_stats_on_error(self, result_container, start_time): - engine_time = time() - start_time - result_container.add_timing(self.engine_name, engine_time, engine_time) - - with threading.RLock(): - self.engine.stats['errors'] += 1 - def _search_basic(self, query, params): return self.engine.search(query, params) def search(self, query, params, result_container, start_time, timeout_limit): try: search_results = self._search_basic(query, params) - - if search_results: - result_container.extend(self.engine_name, search_results) - - engine_time = time() - start_time - result_container.add_timing(self.engine_name, engine_time, engine_time) - with threading.RLock(): - self.engine.stats['engine_time'] += engine_time - self.engine.stats['engine_time_count'] += 1 - + self.extend_container(result_container, start_time, search_results) except ValueError as e: - record_exception(self.engine_name, e) - self._record_stats_on_error(result_container, start_time) + # do not record the error logger.exception('engine {0} : invalid input : {1}'.format(self.engine_name, e)) except Exception as e: - record_exception(self.engine_name, e) - self._record_stats_on_error(result_container, start_time) - result_container.add_unresponsive_engine(self.engine_name, 'unexpected crash', str(e)) + self.handle_exception(result_container, 'unexpected crash', e) logger.exception('engine {0} : exception : {1}'.format(self.engine_name, e)) - else: - if getattr(threading.current_thread(), '_timeout', False): - record_error(self.engine_name, 'Timeout') diff --git a/searx/search/processors/online.py b/searx/search/processors/online.py index 66719ea9b..5b4aeee82 100644 --- a/searx/search/processors/online.py +++ b/searx/search/processors/online.py @@ -1,23 +1,21 @@ # SPDX-License-Identifier: AGPL-3.0-or-later from time import time -import threading import asyncio import httpx import searx.network -from searx.engines import settings from searx import logger from searx.utils import gen_useragent from searx.exceptions import (SearxEngineAccessDeniedException, SearxEngineCaptchaException, SearxEngineTooManyRequestsException,) -from searx.metrology.error_recorder import record_exception, record_error +from searx.metrology.error_recorder import record_error from searx.search.processors.abstract import EngineProcessor -logger = logger.getChild('search.processor.online') +logger = logger.getChild('searx.search.processor.online') def default_request_params(): @@ -41,11 +39,6 @@ class OnlineProcessor(EngineProcessor): if params is None: return None - # skip suspended engines - if self.engine.suspend_end_time >= time(): - logger.debug('Engine currently suspended: %s', self.engine_name) - return None - # add default params params.update(default_request_params()) @@ -130,89 +123,38 @@ class OnlineProcessor(EngineProcessor): # set the network searx.network.set_context_network_name(self.engine_name) - # suppose everything will be alright - http_exception = False - suspended_time = None - try: # send requests and parse the results search_results = self._search_basic(query, params) - - # check if the engine accepted the request - if search_results is not None: - # yes, so add results - result_container.extend(self.engine_name, search_results) - - # update engine time when there is no exception - engine_time = time() - start_time - page_load_time = searx.network.get_time_for_thread() - result_container.add_timing(self.engine_name, engine_time, page_load_time) - with threading.RLock(): - self.engine.stats['engine_time'] += engine_time - self.engine.stats['engine_time_count'] += 1 - # update stats with the total HTTP time - self.engine.stats['page_load_time'] += page_load_time - self.engine.stats['page_load_count'] += 1 - except Exception as e: - record_exception(self.engine_name, e) - - # Timing - engine_time = time() - start_time - page_load_time = searx.network.get_time_for_thread() - result_container.add_timing(self.engine_name, engine_time, page_load_time) - - # Record the errors - with threading.RLock(): - self.engine.stats['errors'] += 1 - - if (issubclass(e.__class__, (httpx.TimeoutException, asyncio.TimeoutError))): - result_container.add_unresponsive_engine(self.engine_name, 'HTTP timeout') - # requests timeout (connect or read) - logger.error("engine {0} : HTTP requests timeout" + self.extend_container(result_container, start_time, search_results) + except (httpx.TimeoutException, asyncio.TimeoutError) as e: + # requests timeout (connect or read) + self.handle_exception(result_container, 'HTTP timeout', e, suspend=True) + logger.error("engine {0} : HTTP requests timeout" + "(search duration : {1} s, timeout: {2} s) : {3}" + .format(self.engine_name, time() - start_time, + timeout_limit, + e.__class__.__name__)) + except (httpx.HTTPError, httpx.StreamError) as e: + # other requests exception + self.handle_exception(result_container, 'HTTP error', e, suspend=True) + logger.exception("engine {0} : requests exception" "(search duration : {1} s, timeout: {2} s) : {3}" - .format(self.engine_name, engine_time, timeout_limit, e.__class__.__name__)) - http_exception = True - elif (issubclass(e.__class__, (httpx.HTTPError, httpx.StreamError))): - result_container.add_unresponsive_engine(self.engine_name, 'HTTP error') - # other requests exception - logger.exception("engine {0} : requests exception" - "(search duration : {1} s, timeout: {2} s) : {3}" - .format(self.engine_name, engine_time, timeout_limit, e)) - http_exception = True - elif (issubclass(e.__class__, SearxEngineCaptchaException)): - result_container.add_unresponsive_engine(self.engine_name, 'CAPTCHA required') - logger.exception('engine {0} : CAPTCHA'.format(self.engine_name)) - suspended_time = e.suspended_time # pylint: disable=no-member - elif (issubclass(e.__class__, SearxEngineTooManyRequestsException)): - result_container.add_unresponsive_engine(self.engine_name, 'too many requests') - logger.exception('engine {0} : Too many requests'.format(self.engine_name)) - suspended_time = e.suspended_time # pylint: disable=no-member - elif (issubclass(e.__class__, SearxEngineAccessDeniedException)): - result_container.add_unresponsive_engine(self.engine_name, 'blocked') - logger.exception('engine {0} : Searx is blocked'.format(self.engine_name)) - suspended_time = e.suspended_time # pylint: disable=no-member - else: - result_container.add_unresponsive_engine(self.engine_name, 'unexpected crash') - # others errors - logger.exception('engine {0} : exception : {1}'.format(self.engine_name, e)) - else: - if getattr(threading.current_thread(), '_timeout', False): - record_error(self.engine_name, 'Timeout') - - # suspend the engine if there is an HTTP error - # or suspended_time is defined - with threading.RLock(): - if http_exception or suspended_time: - # update continuous_errors / suspend_end_time - self.engine.continuous_errors += 1 - if suspended_time is None: - suspended_time = min(settings['search']['max_ban_time_on_fail'], - self.engine.continuous_errors * settings['search']['ban_time_on_fail']) - self.engine.suspend_end_time = time() + suspended_time - else: - # reset the suspend variables - self.engine.continuous_errors = 0 - self.engine.suspend_end_time = 0 + .format(self.engine_name, time() - start_time, + timeout_limit, + e)) + except SearxEngineCaptchaException as e: + self.handle_exception(result_container, 'CAPTCHA required', e, suspend=True) + logger.exception('engine {0} : CAPTCHA'.format(self.engine_name)) + except SearxEngineTooManyRequestsException as e: + self.handle_exception(result_container, 'too many requests', e, suspend=True) + logger.exception('engine {0} : Too many requests'.format(self.engine_name)) + except SearxEngineAccessDeniedException as e: + self.handle_exception(result_container, 'blocked', e, suspend=True) + logger.exception('engine {0} : Searx is blocked'.format(self.engine_name)) + except Exception as e: + self.handle_exception(result_container, 'unexpected crash', e) + logger.exception('engine {0} : exception : {1}'.format(self.engine_name, e)) def get_default_tests(self): tests = {}