searxngRebrandZaclys/searx/search/processors/online.py

239 lines
8.6 KiB
Python
Raw Normal View History

# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Processores for engine-type: ``online``
"""
# pylint: disable=use-dict-literal
from timeit import default_timer
2021-03-18 18:59:01 +00:00
import asyncio
import ssl
2021-03-18 18:59:01 +00:00
import httpx
import searx.network
from searx.utils import gen_useragent
from searx.exceptions import (
SearxEngineAccessDeniedException,
SearxEngineCaptchaException,
SearxEngineTooManyRequestsException,
)
2021-04-14 15:23:15 +00:00
from searx.metrics.error_recorder import count_error
from .abstract import EngineProcessor
2020-12-17 15:49:48 +00:00
def default_request_params():
"""Default request parameters for ``online`` engines."""
2020-12-17 15:49:48 +00:00
return {
# fmt: off
2020-12-17 15:49:48 +00:00
'method': 'GET',
'headers': {},
'data': {},
'url': '',
'cookies': {},
'auth': None
# fmt: on
2020-12-17 15:49:48 +00:00
}
class OnlineProcessor(EngineProcessor):
"""Processor class for ``online`` engines."""
engine_type = 'online'
def initialize(self):
# set timeout for all HTTP requests
searx.network.set_timeout_for_thread(self.engine.timeout, start_time=default_timer())
# reset the HTTP total time
searx.network.reset_time_for_thread()
# set the network
searx.network.set_context_network_name(self.engine_name)
super().initialize()
def get_params(self, search_query, engine_category):
"""Returns a set of :ref:`request params <engine request online>` or ``None``
if request is not supported.
"""
params = super().get_params(search_query, engine_category)
if params is None:
return None
# add default params
2020-12-17 15:49:48 +00:00
params.update(default_request_params())
# add an user agent
params['headers']['User-Agent'] = gen_useragent()
# add Accept-Language header
if self.engine.send_accept_language_header and search_query.locale:
ac_lang = search_query.locale.language
if search_query.locale.territory:
ac_lang = "%s-%s,%s;q=0.9,*;q=0.5" % (
search_query.locale.language,
search_query.locale.territory,
search_query.locale.language,
)
params['headers']['Accept-Language'] = ac_lang
[fix] searxng_extra/update/update_engine_descriptions.py (part 1) Follow up of #2269 The script to update the descriptions of the engines does no longer work since PR #2269 has been merged. searx/engines/wikipedia.py ========================== 1. There was a misusage of zh-classical.wikipedia.org: - `zh-classical` is dedicate to classical Chinese [1] which is not traditional Chinese [2]. - zh.wikipedia.org has LanguageConverter enabled [3] and is going to dynamically show simplified or traditional Chinese according to the HTTP Accept-Language header. 2. The update_engine_descriptions.py needs a list of all wikipedias. The implementation from #2269 included only a reduced list: - https://meta.wikimedia.org/wiki/Wikipedia_article_depth - https://meta.wikimedia.org/wiki/List_of_Wikipedias searxng_extra/update/update_engine_descriptions.py ================================================== Before PR #2269 there was a match_language() function that did an approximation using various methods. With PR #2269 there are only the types in the data model of the languages, which can be recognized by babel. The approximation methods, which are needed (only here) in the determination of the descriptions, must be replaced by other methods. [1] https://en.wikipedia.org/wiki/Classical_Chinese [2] https://en.wikipedia.org/wiki/Traditional_Chinese_characters [3] https://www.mediawiki.org/wiki/Writing_systems#LanguageConverter Closes: https://github.com/searxng/searxng/issues/2330 Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
2023-04-04 13:17:12 +00:00
self.logger.debug('HTTP Accept-Language: %s', params['headers'].get('Accept-Language', ''))
return params
def _send_http_request(self, params):
# create dictionary which contain all
# information about the request
request_args = dict(headers=params['headers'], cookies=params['cookies'], auth=params['auth'])
# verify
# if not None, it overrides the verify value defined in the network.
# use False to accept any server certificate
# use a path to file to specify a server certificate
verify = params.get('verify')
if verify is not None:
request_args['verify'] = params['verify']
# max_redirects
max_redirects = params.get('max_redirects')
if max_redirects:
request_args['max_redirects'] = max_redirects
# allow_redirects
if 'allow_redirects' in params:
request_args['allow_redirects'] = params['allow_redirects']
# soft_max_redirects
soft_max_redirects = params.get('soft_max_redirects', max_redirects or 0)
# raise_for_status
request_args['raise_for_httperror'] = params.get('raise_for_httperror', True)
# specific type of request (GET or POST)
if params['method'] == 'GET':
req = searx.network.get
else:
req = searx.network.post
request_args['data'] = params['data']
# send the request
response = req(params['url'], **request_args)
# check soft limit of the redirect count
if len(response.history) > soft_max_redirects:
# unexpected redirect : record an error
# but the engine might still return valid results.
status_code = str(response.status_code or '')
2021-03-18 18:59:01 +00:00
reason = response.reason_phrase or ''
hostname = response.url.host
count_error(
self.engine_name,
'{} redirects, maximum: {}'.format(len(response.history), soft_max_redirects),
(status_code, reason, hostname),
secondary=True,
)
return response
def _search_basic(self, query, params):
# update request parameters dependent on
# search-engine (contained in engines folder)
self.engine.request(query, params)
# ignoring empty urls
if params['url'] is None:
return None
if not params['url']:
return None
# send request
response = self._send_http_request(params)
# parse the response
response.search_params = params
return self.engine.response(response)
def search(self, query, params, result_container, start_time, timeout_limit):
# set timeout for all HTTP requests
searx.network.set_timeout_for_thread(timeout_limit, start_time=start_time)
# reset the HTTP total time
searx.network.reset_time_for_thread()
# set the network
searx.network.set_context_network_name(self.engine_name)
try:
# send requests and parse the results
search_results = self._search_basic(query, params)
self.extend_container(result_container, start_time, search_results)
except ssl.SSLError as e:
# requests timeout (connect or read)
self.handle_exception(result_container, e, suspend=True)
self.logger.error("SSLError {}, verify={}".format(e, searx.network.get_network(self.engine_name).verify))
except (httpx.TimeoutException, asyncio.TimeoutError) as e:
# requests timeout (connect or read)
self.handle_exception(result_container, e, suspend=True)
self.logger.error(
"HTTP requests timeout (search duration : {0} s, timeout: {1} s) : {2}".format(
default_timer() - start_time, timeout_limit, e.__class__.__name__
)
)
except (httpx.HTTPError, httpx.StreamError) as e:
# other requests exception
self.handle_exception(result_container, e, suspend=True)
self.logger.exception(
"requests exception (search duration : {0} s, timeout: {1} s) : {2}".format(
default_timer() - start_time, timeout_limit, e
)
)
except SearxEngineCaptchaException as e:
self.handle_exception(result_container, e, suspend=True)
self.logger.exception('CAPTCHA')
except SearxEngineTooManyRequestsException as e:
self.handle_exception(result_container, e, suspend=True)
self.logger.exception('Too many requests')
except SearxEngineAccessDeniedException as e:
self.handle_exception(result_container, e, suspend=True)
self.logger.exception('Searx is blocked')
except Exception as e: # pylint: disable=broad-except
self.handle_exception(result_container, e)
self.logger.exception('exception : {0}'.format(e))
2020-12-24 08:28:16 +00:00
def get_default_tests(self):
tests = {}
tests['simple'] = {
'matrix': {'query': ('life', 'computer')},
2020-12-24 08:28:16 +00:00
'result_container': ['not_empty'],
}
if getattr(self.engine, 'paging', False):
tests['paging'] = {
'matrix': {'query': 'time', 'pageno': (1, 2, 3)},
2020-12-24 08:28:16 +00:00
'result_container': ['not_empty'],
'test': ['unique_results'],
2020-12-24 08:28:16 +00:00
}
if 'general' in self.engine.categories:
# avoid documentation about HTML tags (<time> and <input type="time">)
tests['paging']['matrix']['query'] = 'news'
2020-12-24 08:28:16 +00:00
if getattr(self.engine, 'time_range', False):
tests['time_range'] = {
'matrix': {'query': 'news', 'time_range': (None, 'day')},
2020-12-24 08:28:16 +00:00
'result_container': ['not_empty'],
'test': ['unique_results'],
2020-12-24 08:28:16 +00:00
}
if getattr(self.engine, 'traits', False):
2020-12-24 08:28:16 +00:00
tests['lang_fr'] = {
'matrix': {'query': 'paris', 'lang': 'fr'},
'result_container': ['not_empty', ('has_language', 'fr')],
2020-12-24 08:28:16 +00:00
}
tests['lang_en'] = {
'matrix': {'query': 'paris', 'lang': 'en'},
'result_container': ['not_empty', ('has_language', 'en')],
2020-12-24 08:28:16 +00:00
}
if getattr(self.engine, 'safesearch', False):
tests['safesearch'] = {'matrix': {'query': 'porn', 'safesearch': (0, 2)}, 'test': ['unique_results']}
2020-12-24 08:28:16 +00:00
return tests