[DRAFT] simplify searx.metrics implementation

- Moved code of the wrapper functions to to methods of class HistogramStorage
  and class CounterStorage.

- Renamed global histogram and counter objects to HISTOGRAM_STORAGE and
  COUNTER_STORAGE.

- The imports of names from the metrics module in the application code has been
  reduced to:

      from searx import metrics

   By this convention the wrapper functions can be replaced by the globals from:

      metrics.HISTOGRAM_STORAGE
      metrics.COUNTER_STORAGE

- comment out the context manager from searx.metrics.histogram_observe_time / we
  do not have a usage of this context manager.

Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
This commit is contained in:
Markus Heiser 2022-09-21 15:17:02 +02:00
parent 0047f4226e
commit 2b69780f0d
8 changed files with 82 additions and 104 deletions

View file

@ -17,7 +17,7 @@ from searx.plugins import plugins
from searx.search.models import EngineRef, SearchQuery
from searx.engines import load_engines
from searx.network import initialize as initialize_network, check_network_configuration
from searx.metrics import initialize as initialize_metrics, counter_inc, histogram_observe_time
from searx import metrics
from searx.search.processors import PROCESSORS, initialize as initialize_processors
from searx.search.checker import initialize as initialize_checker
@ -31,7 +31,7 @@ def initialize(settings_engines=None, enable_checker=False, check_network=False,
initialize_network(settings_engines, settings['outgoing'])
if check_network:
check_network_configuration()
initialize_metrics([engine['name'] for engine in settings_engines], enable_metrics)
metrics.initialize([engine['name'] for engine in settings_engines], enable_metrics)
initialize_processors(settings_engines)
if enable_checker:
initialize_checker()
@ -98,7 +98,7 @@ class Search:
if request_params is None:
continue
counter_inc('engine', engineref.name, 'search', 'count', 'sent')
metrics.COUNTER_STORAGE.inc('engine', engineref.name, 'search', 'count', 'sent')
# append request to list
requests.append((engineref.name, self.search_query.query, request_params))

View file

@ -19,7 +19,7 @@ from searx.utils import gen_useragent
from searx.results import ResultContainer
from searx.search.models import SearchQuery, EngineRef
from searx.search.processors import EngineProcessor
from searx.metrics import counter_inc
from searx import metrics
logger = logger.getChild('searx.search.checker')
@ -414,7 +414,7 @@ class Checker:
engineref_category = search_query.engineref_list[0].category
params = self.processor.get_params(search_query, engineref_category)
if params is not None:
counter_inc('engine', search_query.engineref_list[0].name, 'search', 'count', 'sent')
metrics.COUNTER_STORAGE.inc('engine', search_query.engineref_list[0].name, 'search', 'count', 'sent')
self.processor.search(search_query.query, params, result_container, default_timer(), 5)
return result_container

View file

@ -13,7 +13,7 @@ from typing import Dict, Union
from searx import settings, logger
from searx.engines import engines
from searx.network import get_time_for_thread, get_network
from searx.metrics import histogram_observe, counter_inc, count_exception, count_error
from searx import metrics
from searx.exceptions import SearxEngineAccessDeniedException, SearxEngineResponseException
from searx.utils import get_engine_from_settings
@ -95,11 +95,11 @@ class EngineProcessor(ABC):
error_message = exception_or_message
result_container.add_unresponsive_engine(self.engine_name, error_message)
# metrics
counter_inc('engine', self.engine_name, 'search', 'count', 'error')
metrics.COUNTER_STORAGE.inc('engine', self.engine_name, 'search', 'count', 'error')
if isinstance(exception_or_message, BaseException):
count_exception(self.engine_name, exception_or_message)
metrics.count_exception(self.engine_name, exception_or_message)
else:
count_error(self.engine_name, exception_or_message)
metrics.count_error(self.engine_name, exception_or_message)
# suspend the engine ?
if suspend:
suspended_time = None
@ -114,10 +114,10 @@ class EngineProcessor(ABC):
page_load_time = get_time_for_thread()
result_container.add_timing(self.engine_name, engine_time, page_load_time)
# metrics
counter_inc('engine', self.engine_name, 'search', 'count', 'successful')
histogram_observe(engine_time, 'engine', self.engine_name, 'time', 'total')
metrics.COUNTER_STORAGE.inc('engine', self.engine_name, 'search', 'count', 'successful')
metrics.HISTOGRAM_STORAGE.observe(engine_time, 'engine', self.engine_name, 'time', 'total')
if page_load_time is not None:
histogram_observe(page_load_time, 'engine', self.engine_name, 'time', 'http')
metrics.HISTOGRAM_STORAGE.observe(page_load_time, 'engine', self.engine_name, 'time', 'http')
def extend_container(self, result_container, start_time, search_results):
if getattr(threading.current_thread(), '_timeout', False):

View file

@ -16,7 +16,8 @@ from searx.exceptions import (
SearxEngineCaptchaException,
SearxEngineTooManyRequestsException,
)
from searx.metrics.error_recorder import count_error
from searx import metrics
from .abstract import EngineProcessor
@ -113,7 +114,7 @@ class OnlineProcessor(EngineProcessor):
status_code = str(response.status_code or '')
reason = response.reason_phrase or ''
hostname = response.url.host
count_error(
metrics.count_error(
self.engine_name,
'{} redirects, maximum: {}'.format(len(response.history), soft_max_redirects),
(status_code, reason, hostname),