2013-10-14 21:09:13 +00:00
|
|
|
|
2013-10-16 22:32:32 +00:00
|
|
|
'''
|
|
|
|
searx is free software: you can redistribute it and/or modify
|
|
|
|
it under the terms of the GNU Affero General Public License as published by
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
(at your option) any later version.
|
|
|
|
|
|
|
|
searx is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU Affero General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU Affero General Public License
|
|
|
|
along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
|
|
|
|
|
|
|
(C) 2013- by Adam Tauber, <asciimoo@gmail.com>
|
|
|
|
'''
|
|
|
|
|
2014-02-05 19:24:31 +00:00
|
|
|
import sys
|
2017-06-06 20:20:20 +00:00
|
|
|
import threading
|
|
|
|
from os.path import realpath, dirname
|
2018-03-01 04:30:48 +00:00
|
|
|
from babel.localedata import locale_identifiers
|
2020-12-09 16:33:18 +00:00
|
|
|
from urllib.parse import urlparse
|
2016-07-04 20:46:43 +00:00
|
|
|
from flask_babel import gettext
|
2014-07-07 11:59:27 +00:00
|
|
|
from operator import itemgetter
|
2014-02-05 19:24:31 +00:00
|
|
|
from searx import settings
|
2015-01-09 03:13:05 +00:00
|
|
|
from searx import logger
|
2020-10-05 11:50:33 +00:00
|
|
|
from searx.data import ENGINES_LANGUAGES
|
[mod] don't dump traceback of SearxEngineResponseException on init
When initing engines a "SearxEngineResponseException" is logged very verbose,
including full traceback information:
ERROR:searx.engines:yggtorrent engine: Fail to initialize
Traceback (most recent call last):
File "share/searx/searx/engines/__init__.py", line 293, in engine_init
init_fn(get_engine_from_settings(engine_name))
File "share/searx/searx/engines/yggtorrent.py", line 42, in init
resp = http_get(url, allow_redirects=False)
File "share/searx/searx/poolrequests.py", line 197, in get
return request('get', url, **kwargs)
File "share/searx/searx/poolrequests.py", line 190, in request
raise_for_httperror(response)
File "share/searx/searx/raise_for_httperror.py", line 60, in raise_for_httperror
raise_for_captcha(resp)
File "share/searx/searx/raise_for_httperror.py", line 43, in raise_for_captcha
raise_for_cloudflare_captcha(resp)
File "share/searx/searx/raise_for_httperror.py", line 30, in raise_for_cloudflare_captcha
raise SearxEngineCaptchaException(message='Cloudflare CAPTCHA', suspended_time=3600 * 24 * 15)
searx.exceptions.SearxEngineCaptchaException: Cloudflare CAPTCHA, suspended_time=1296000
For SearxEngineResponseException this is not needed. Those types of exceptions
can be a normal use case. E.g. for CAPTCHA errors like shown in the example
above. It should be enough to log a warning for such issues:
WARNING:searx.engines:yggtorrent engine: Fail to initialize // Cloudflare CAPTCHA, suspended_time=1296000
closes: #2612
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
2021-03-05 16:26:22 +00:00
|
|
|
from searx.exceptions import SearxEngineResponseException
|
[httpx] replace searx.poolrequests by searx.network
settings.yml:
* outgoing.networks:
* can contains network definition
* propertiers: enable_http, verify, http2, max_connections, max_keepalive_connections,
keepalive_expiry, local_addresses, support_ipv4, support_ipv6, proxies, max_redirects, retries
* retries: 0 by default, number of times searx retries to send the HTTP request (using different IP & proxy each time)
* local_addresses can be "192.168.0.1/24" (it supports IPv6)
* support_ipv4 & support_ipv6: both True by default
see https://github.com/searx/searx/pull/1034
* each engine can define a "network" section:
* either a full network description
* either reference an existing network
* all HTTP requests of engine use the same HTTP configuration (it was not the case before, see proxy configuration in master)
2021-04-05 08:43:33 +00:00
|
|
|
from searx.network import get, initialize as initialize_network, set_context_network_name
|
2021-02-26 06:20:50 +00:00
|
|
|
from searx.utils import load_module, match_language, get_engine_from_settings, gen_useragent
|
2015-01-09 03:13:05 +00:00
|
|
|
|
|
|
|
|
|
|
|
logger = logger.getChild('engines')
|
2013-10-14 21:09:13 +00:00
|
|
|
|
|
|
|
engine_dir = dirname(realpath(__file__))
|
2013-10-23 21:54:46 +00:00
|
|
|
|
2013-10-15 17:11:43 +00:00
|
|
|
engines = {}
|
2013-10-14 21:09:13 +00:00
|
|
|
|
2013-10-17 19:06:28 +00:00
|
|
|
categories = {'general': []}
|
|
|
|
|
2018-03-01 04:30:48 +00:00
|
|
|
babel_langs = [lang_parts[0] + '-' + lang_parts[-1] if len(lang_parts) > 1 else lang_parts[0]
|
|
|
|
for lang_parts in (lang_code.split('_') for lang_code in locale_identifiers())]
|
2013-10-17 19:06:28 +00:00
|
|
|
|
2014-01-31 14:45:18 +00:00
|
|
|
engine_shortcuts = {}
|
2016-02-19 14:13:01 +00:00
|
|
|
engine_default_args = {'paging': False,
|
|
|
|
'categories': ['general'],
|
2016-08-06 04:34:56 +00:00
|
|
|
'supported_languages': [],
|
2016-02-19 14:13:01 +00:00
|
|
|
'safesearch': False,
|
|
|
|
'timeout': settings['outgoing']['request_timeout'],
|
|
|
|
'shortcut': '-',
|
2016-02-19 23:21:56 +00:00
|
|
|
'disabled': False,
|
2021-03-08 10:35:08 +00:00
|
|
|
'enable_http': False,
|
2016-02-19 23:21:56 +00:00
|
|
|
'suspend_end_time': 0,
|
2016-07-17 16:42:30 +00:00
|
|
|
'continuous_errors': 0,
|
2019-09-23 15:14:32 +00:00
|
|
|
'time_range_support': False,
|
2020-12-16 12:41:32 +00:00
|
|
|
'engine_type': 'online',
|
2020-03-29 11:14:06 +00:00
|
|
|
'display_error_messages': True,
|
2020-02-01 10:01:17 +00:00
|
|
|
'tokens': []}
|
2014-01-31 14:45:18 +00:00
|
|
|
|
2014-01-19 21:59:01 +00:00
|
|
|
|
2014-12-13 18:26:40 +00:00
|
|
|
def load_engine(engine_data):
|
2019-07-26 07:04:00 +00:00
|
|
|
engine_name = engine_data['name']
|
|
|
|
if '_' in engine_name:
|
|
|
|
logger.error('Engine name contains underscore: "{}"'.format(engine_name))
|
2016-09-28 20:30:05 +00:00
|
|
|
sys.exit(1)
|
|
|
|
|
2019-07-26 07:04:00 +00:00
|
|
|
if engine_name.lower() != engine_name:
|
|
|
|
logger.warn('Engine name is not lowercase: "{}", converting to lowercase'.format(engine_name))
|
|
|
|
engine_name = engine_name.lower()
|
|
|
|
engine_data['name'] = engine_name
|
|
|
|
|
2016-09-28 20:30:05 +00:00
|
|
|
engine_module = engine_data['engine']
|
|
|
|
|
2020-08-31 16:59:27 +00:00
|
|
|
try:
|
|
|
|
engine = load_module(engine_module + '.py', engine_dir)
|
2020-11-16 08:37:13 +00:00
|
|
|
except (SyntaxError, KeyboardInterrupt, SystemExit, SystemError, ImportError, RuntimeError):
|
2020-09-07 13:39:26 +00:00
|
|
|
logger.exception('Fatal exception in engine "{}"'.format(engine_module))
|
|
|
|
sys.exit(1)
|
2020-08-31 16:59:27 +00:00
|
|
|
except:
|
|
|
|
logger.exception('Cannot load engine "{}"'.format(engine_module))
|
|
|
|
return None
|
2014-01-31 03:35:23 +00:00
|
|
|
|
2020-11-16 11:44:07 +00:00
|
|
|
for param_name, param_value in engine_data.items():
|
2013-10-23 21:54:46 +00:00
|
|
|
if param_name == 'engine':
|
2020-11-16 11:44:07 +00:00
|
|
|
pass
|
|
|
|
elif param_name == 'categories':
|
|
|
|
if param_value == 'none':
|
2013-11-04 20:46:23 +00:00
|
|
|
engine.categories = []
|
|
|
|
else:
|
2020-11-16 11:44:07 +00:00
|
|
|
engine.categories = list(map(str.strip, param_value.split(',')))
|
|
|
|
else:
|
|
|
|
setattr(engine, param_name, param_value)
|
2014-01-31 14:45:18 +00:00
|
|
|
|
2016-11-30 17:43:03 +00:00
|
|
|
for arg_name, arg_value in engine_default_args.items():
|
2016-02-19 14:13:01 +00:00
|
|
|
if not hasattr(engine, arg_name):
|
|
|
|
setattr(engine, arg_name, arg_value)
|
2015-01-31 22:11:45 +00:00
|
|
|
|
2014-01-31 14:45:18 +00:00
|
|
|
# checking required variables
|
2013-10-25 21:41:14 +00:00
|
|
|
for engine_attr in dir(engine):
|
|
|
|
if engine_attr.startswith('_'):
|
|
|
|
continue
|
2017-07-19 19:30:18 +00:00
|
|
|
if engine_attr == 'inactive' and getattr(engine, engine_attr) is True:
|
|
|
|
return None
|
2014-01-20 01:31:20 +00:00
|
|
|
if getattr(engine, engine_attr) is None:
|
2015-01-09 03:13:05 +00:00
|
|
|
logger.error('Missing engine config attribute: "{0}.{1}"'
|
2015-05-02 13:45:17 +00:00
|
|
|
.format(engine.name, engine_attr))
|
2013-10-25 21:41:14 +00:00
|
|
|
sys.exit(1)
|
2014-01-31 14:45:18 +00:00
|
|
|
|
2016-12-15 06:34:43 +00:00
|
|
|
# assign supported languages from json file
|
2020-10-05 11:50:33 +00:00
|
|
|
if engine_data['name'] in ENGINES_LANGUAGES:
|
|
|
|
setattr(engine, 'supported_languages', ENGINES_LANGUAGES[engine_data['name']])
|
2016-12-15 06:34:43 +00:00
|
|
|
|
2018-03-01 04:30:48 +00:00
|
|
|
# find custom aliases for non standard language codes
|
|
|
|
if hasattr(engine, 'supported_languages'):
|
|
|
|
if hasattr(engine, 'language_aliases'):
|
|
|
|
language_aliases = getattr(engine, 'language_aliases')
|
|
|
|
else:
|
|
|
|
language_aliases = {}
|
|
|
|
|
|
|
|
for engine_lang in getattr(engine, 'supported_languages'):
|
|
|
|
iso_lang = match_language(engine_lang, babel_langs, fallback=None)
|
|
|
|
if iso_lang and iso_lang != engine_lang and not engine_lang.startswith(iso_lang) and \
|
|
|
|
iso_lang not in getattr(engine, 'supported_languages'):
|
|
|
|
language_aliases[iso_lang] = engine_lang
|
|
|
|
|
2019-01-07 02:48:11 +00:00
|
|
|
setattr(engine, 'language_aliases', language_aliases)
|
2018-03-01 04:30:48 +00:00
|
|
|
|
2021-01-24 08:58:57 +00:00
|
|
|
# language_support
|
|
|
|
setattr(engine, 'language_support', len(getattr(engine, 'supported_languages', [])) > 0)
|
|
|
|
|
2016-12-15 06:34:43 +00:00
|
|
|
# assign language fetching method if auxiliary method exists
|
|
|
|
if hasattr(engine, '_fetch_supported_languages'):
|
2021-02-26 06:20:50 +00:00
|
|
|
headers = {
|
|
|
|
'User-Agent': gen_useragent(),
|
|
|
|
'Accept-Language': 'ja-JP,ja;q=0.8,en-US;q=0.5,en;q=0.3', # bing needs a non-English language
|
|
|
|
}
|
2016-12-15 06:34:43 +00:00
|
|
|
setattr(engine, 'fetch_supported_languages',
|
2021-02-26 06:20:50 +00:00
|
|
|
lambda: engine._fetch_supported_languages(get(engine.supported_languages_url, headers=headers)))
|
2016-12-15 06:34:43 +00:00
|
|
|
|
2014-01-19 21:59:01 +00:00
|
|
|
engine.stats = {
|
2020-11-26 14:12:11 +00:00
|
|
|
'sent_search_count': 0, # sent search
|
|
|
|
'search_count': 0, # succesful search
|
2014-01-19 21:59:01 +00:00
|
|
|
'result_count': 0,
|
2016-11-05 12:45:20 +00:00
|
|
|
'engine_time': 0,
|
|
|
|
'engine_time_count': 0,
|
2014-01-19 21:59:01 +00:00
|
|
|
'score_count': 0,
|
|
|
|
'errors': 0
|
|
|
|
}
|
2014-01-31 14:45:18 +00:00
|
|
|
|
2020-12-16 12:41:32 +00:00
|
|
|
engine_type = getattr(engine, 'engine_type', 'online')
|
|
|
|
|
|
|
|
if engine_type != 'offline':
|
2019-09-23 15:14:32 +00:00
|
|
|
engine.stats['page_load_time'] = 0
|
|
|
|
engine.stats['page_load_count'] = 0
|
|
|
|
|
2016-05-19 05:38:43 +00:00
|
|
|
# tor related settings
|
|
|
|
if settings['outgoing'].get('using_tor_proxy'):
|
|
|
|
# use onion url if using tor.
|
|
|
|
if hasattr(engine, 'onion_url'):
|
|
|
|
engine.search_url = engine.onion_url + getattr(engine, 'search_path', '')
|
|
|
|
elif 'onions' in engine.categories:
|
|
|
|
# exclude onion engines if not using tor.
|
|
|
|
return None
|
|
|
|
|
|
|
|
engine.timeout += settings['outgoing'].get('extra_proxy_timeout', 0)
|
|
|
|
|
2016-02-19 14:13:01 +00:00
|
|
|
for category_name in engine.categories:
|
|
|
|
categories.setdefault(category_name, []).append(engine)
|
|
|
|
|
|
|
|
if engine.shortcut in engine_shortcuts:
|
|
|
|
logger.error('Engine config error: ambigious shortcut: {0}'.format(engine.shortcut))
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
engine_shortcuts[engine.shortcut] = engine.name
|
2013-10-15 16:19:06 +00:00
|
|
|
|
2014-12-13 18:26:40 +00:00
|
|
|
return engine
|
2014-01-31 14:45:18 +00:00
|
|
|
|
2014-01-19 21:59:01 +00:00
|
|
|
|
2016-11-05 12:45:20 +00:00
|
|
|
def to_percentage(stats, maxvalue):
|
|
|
|
for engine_stat in stats:
|
|
|
|
if maxvalue:
|
|
|
|
engine_stat['percentage'] = int(engine_stat['avg'] / maxvalue * 100)
|
|
|
|
else:
|
|
|
|
engine_stat['percentage'] = 0
|
|
|
|
return stats
|
|
|
|
|
|
|
|
|
2020-02-01 10:01:17 +00:00
|
|
|
def get_engines_stats(preferences):
|
2014-01-05 22:13:53 +00:00
|
|
|
# TODO refactor
|
2013-10-27 00:50:24 +00:00
|
|
|
pageloads = []
|
2016-11-05 12:45:20 +00:00
|
|
|
engine_times = []
|
2013-10-27 00:50:24 +00:00
|
|
|
results = []
|
2013-10-27 13:21:23 +00:00
|
|
|
scores = []
|
2013-10-27 19:45:21 +00:00
|
|
|
errors = []
|
2014-01-05 22:13:53 +00:00
|
|
|
scores_per_result = []
|
2013-10-26 23:02:28 +00:00
|
|
|
|
2016-11-05 12:45:20 +00:00
|
|
|
max_pageload = max_engine_times = max_results = max_score = max_errors = max_score_per_result = 0 # noqa
|
2013-10-26 23:02:28 +00:00
|
|
|
for engine in engines.values():
|
2020-02-01 10:01:17 +00:00
|
|
|
if not preferences.validate_token(engine):
|
|
|
|
continue
|
|
|
|
|
2013-10-26 23:02:28 +00:00
|
|
|
if engine.stats['search_count'] == 0:
|
|
|
|
continue
|
2020-02-01 10:01:17 +00:00
|
|
|
|
2014-01-19 21:59:01 +00:00
|
|
|
results_num = \
|
|
|
|
engine.stats['result_count'] / float(engine.stats['search_count'])
|
2016-11-05 12:45:20 +00:00
|
|
|
|
|
|
|
if engine.stats['engine_time_count'] != 0:
|
|
|
|
this_engine_time = engine.stats['engine_time'] / float(engine.stats['engine_time_count']) # noqa
|
|
|
|
else:
|
|
|
|
this_engine_time = 0
|
|
|
|
|
2013-10-27 13:21:23 +00:00
|
|
|
if results_num:
|
2014-01-19 21:59:01 +00:00
|
|
|
score = engine.stats['score_count'] / float(engine.stats['search_count']) # noqa
|
2014-01-05 22:13:53 +00:00
|
|
|
score_per_result = score / results_num
|
2013-10-27 13:21:23 +00:00
|
|
|
else:
|
2014-01-05 22:13:53 +00:00
|
|
|
score = score_per_result = 0.0
|
2016-11-05 12:45:20 +00:00
|
|
|
|
2020-12-16 12:41:32 +00:00
|
|
|
if engine.engine_type != 'offline':
|
2019-09-23 15:14:32 +00:00
|
|
|
load_times = 0
|
|
|
|
if engine.stats['page_load_count'] != 0:
|
|
|
|
load_times = engine.stats['page_load_time'] / float(engine.stats['page_load_count']) # noqa
|
|
|
|
max_pageload = max(load_times, max_pageload)
|
|
|
|
pageloads.append({'avg': load_times, 'name': engine.name})
|
|
|
|
|
2016-11-05 12:45:20 +00:00
|
|
|
max_engine_times = max(this_engine_time, max_engine_times)
|
|
|
|
max_results = max(results_num, max_results)
|
2013-10-27 13:21:23 +00:00
|
|
|
max_score = max(score, max_score)
|
2014-01-05 22:13:53 +00:00
|
|
|
max_score_per_result = max(score_per_result, max_score_per_result)
|
2013-10-27 19:45:21 +00:00
|
|
|
max_errors = max(max_errors, engine.stats['errors'])
|
2016-11-05 12:45:20 +00:00
|
|
|
|
|
|
|
engine_times.append({'avg': this_engine_time, 'name': engine.name})
|
2013-10-27 00:50:24 +00:00
|
|
|
results.append({'avg': results_num, 'name': engine.name})
|
2013-10-27 13:21:23 +00:00
|
|
|
scores.append({'avg': score, 'name': engine.name})
|
2013-10-27 19:45:21 +00:00
|
|
|
errors.append({'avg': engine.stats['errors'], 'name': engine.name})
|
2014-01-19 21:59:01 +00:00
|
|
|
scores_per_result.append({
|
|
|
|
'avg': score_per_result,
|
|
|
|
'name': engine.name
|
|
|
|
})
|
2013-10-27 00:50:24 +00:00
|
|
|
|
2016-11-05 12:45:20 +00:00
|
|
|
pageloads = to_percentage(pageloads, max_pageload)
|
|
|
|
engine_times = to_percentage(engine_times, max_engine_times)
|
|
|
|
results = to_percentage(results, max_results)
|
|
|
|
scores = to_percentage(scores, max_score)
|
|
|
|
scores_per_result = to_percentage(scores_per_result, max_score_per_result)
|
2020-11-16 08:37:13 +00:00
|
|
|
errors = to_percentage(errors, max_errors)
|
2013-10-27 19:45:21 +00:00
|
|
|
|
2014-01-19 21:59:01 +00:00
|
|
|
return [
|
2016-11-05 12:45:20 +00:00
|
|
|
(
|
|
|
|
gettext('Engine time (sec)'),
|
|
|
|
sorted(engine_times, key=itemgetter('avg'))
|
|
|
|
),
|
2014-01-19 21:59:01 +00:00
|
|
|
(
|
2014-01-21 23:17:49 +00:00
|
|
|
gettext('Page loads (sec)'),
|
|
|
|
sorted(pageloads, key=itemgetter('avg'))
|
|
|
|
),
|
|
|
|
(
|
|
|
|
gettext('Number of results'),
|
2014-01-19 21:59:01 +00:00
|
|
|
sorted(results, key=itemgetter('avg'), reverse=True)
|
|
|
|
),
|
|
|
|
(
|
2014-01-21 23:17:49 +00:00
|
|
|
gettext('Scores'),
|
|
|
|
sorted(scores, key=itemgetter('avg'), reverse=True)
|
|
|
|
),
|
|
|
|
(
|
|
|
|
gettext('Scores per result'),
|
2014-01-19 21:59:01 +00:00
|
|
|
sorted(scores_per_result, key=itemgetter('avg'), reverse=True)
|
|
|
|
),
|
2014-01-21 23:17:49 +00:00
|
|
|
(
|
|
|
|
gettext('Errors'),
|
|
|
|
sorted(errors, key=itemgetter('avg'), reverse=True)
|
|
|
|
),
|
2014-01-19 21:59:01 +00:00
|
|
|
]
|
2014-12-13 18:26:40 +00:00
|
|
|
|
|
|
|
|
2017-07-21 12:27:25 +00:00
|
|
|
def load_engines(engine_list):
|
2016-05-19 05:38:43 +00:00
|
|
|
global engines, engine_shortcuts
|
2017-07-21 12:27:25 +00:00
|
|
|
engines.clear()
|
2016-05-19 05:38:43 +00:00
|
|
|
engine_shortcuts.clear()
|
2016-12-27 16:25:19 +00:00
|
|
|
for engine_data in engine_list:
|
2020-08-31 16:59:27 +00:00
|
|
|
engine = load_engine(engine_data)
|
|
|
|
if engine is not None:
|
2017-04-01 10:00:54 +00:00
|
|
|
engines[engine.name] = engine
|
2017-07-21 12:27:25 +00:00
|
|
|
return engines
|
|
|
|
|
|
|
|
|
|
|
|
def initialize_engines(engine_list):
|
|
|
|
load_engines(engine_list)
|
[httpx] replace searx.poolrequests by searx.network
settings.yml:
* outgoing.networks:
* can contains network definition
* propertiers: enable_http, verify, http2, max_connections, max_keepalive_connections,
keepalive_expiry, local_addresses, support_ipv4, support_ipv6, proxies, max_redirects, retries
* retries: 0 by default, number of times searx retries to send the HTTP request (using different IP & proxy each time)
* local_addresses can be "192.168.0.1/24" (it supports IPv6)
* support_ipv4 & support_ipv6: both True by default
see https://github.com/searx/searx/pull/1034
* each engine can define a "network" section:
* either a full network description
* either reference an existing network
* all HTTP requests of engine use the same HTTP configuration (it was not the case before, see proxy configuration in master)
2021-04-05 08:43:33 +00:00
|
|
|
initialize_network(engine_list, settings['outgoing'])
|
2019-07-26 07:04:00 +00:00
|
|
|
|
|
|
|
def engine_init(engine_name, init_fn):
|
2020-12-09 20:23:20 +00:00
|
|
|
try:
|
[httpx] replace searx.poolrequests by searx.network
settings.yml:
* outgoing.networks:
* can contains network definition
* propertiers: enable_http, verify, http2, max_connections, max_keepalive_connections,
keepalive_expiry, local_addresses, support_ipv4, support_ipv6, proxies, max_redirects, retries
* retries: 0 by default, number of times searx retries to send the HTTP request (using different IP & proxy each time)
* local_addresses can be "192.168.0.1/24" (it supports IPv6)
* support_ipv4 & support_ipv6: both True by default
see https://github.com/searx/searx/pull/1034
* each engine can define a "network" section:
* either a full network description
* either reference an existing network
* all HTTP requests of engine use the same HTTP configuration (it was not the case before, see proxy configuration in master)
2021-04-05 08:43:33 +00:00
|
|
|
set_context_network_name(engine_name)
|
2020-12-09 20:23:20 +00:00
|
|
|
init_fn(get_engine_from_settings(engine_name))
|
[mod] don't dump traceback of SearxEngineResponseException on init
When initing engines a "SearxEngineResponseException" is logged very verbose,
including full traceback information:
ERROR:searx.engines:yggtorrent engine: Fail to initialize
Traceback (most recent call last):
File "share/searx/searx/engines/__init__.py", line 293, in engine_init
init_fn(get_engine_from_settings(engine_name))
File "share/searx/searx/engines/yggtorrent.py", line 42, in init
resp = http_get(url, allow_redirects=False)
File "share/searx/searx/poolrequests.py", line 197, in get
return request('get', url, **kwargs)
File "share/searx/searx/poolrequests.py", line 190, in request
raise_for_httperror(response)
File "share/searx/searx/raise_for_httperror.py", line 60, in raise_for_httperror
raise_for_captcha(resp)
File "share/searx/searx/raise_for_httperror.py", line 43, in raise_for_captcha
raise_for_cloudflare_captcha(resp)
File "share/searx/searx/raise_for_httperror.py", line 30, in raise_for_cloudflare_captcha
raise SearxEngineCaptchaException(message='Cloudflare CAPTCHA', suspended_time=3600 * 24 * 15)
searx.exceptions.SearxEngineCaptchaException: Cloudflare CAPTCHA, suspended_time=1296000
For SearxEngineResponseException this is not needed. Those types of exceptions
can be a normal use case. E.g. for CAPTCHA errors like shown in the example
above. It should be enough to log a warning for such issues:
WARNING:searx.engines:yggtorrent engine: Fail to initialize // Cloudflare CAPTCHA, suspended_time=1296000
closes: #2612
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
2021-03-05 16:26:22 +00:00
|
|
|
except SearxEngineResponseException as exc:
|
|
|
|
logger.warn('%s engine: Fail to initialize // %s', engine_name, exc)
|
2020-12-09 20:23:20 +00:00
|
|
|
except Exception:
|
|
|
|
logger.exception('%s engine: Fail to initialize', engine_name)
|
|
|
|
else:
|
|
|
|
logger.debug('%s engine: Initialized', engine_name)
|
2019-07-26 07:04:00 +00:00
|
|
|
|
2018-02-17 13:30:06 +00:00
|
|
|
for engine_name, engine in engines.items():
|
2017-07-21 12:27:25 +00:00
|
|
|
if hasattr(engine, 'init'):
|
2018-02-17 13:30:06 +00:00
|
|
|
init_fn = getattr(engine, 'init')
|
2019-07-26 07:04:00 +00:00
|
|
|
if init_fn:
|
|
|
|
logger.debug('%s engine: Starting background initialization', engine_name)
|
|
|
|
threading.Thread(target=engine_init, args=(engine_name, init_fn)).start()
|