searxngRebrandZaclys/searx/engines/__init__.py

292 lines
10 KiB
Python
Raw Normal View History

2013-10-14 21:09:13 +00:00
2013-10-16 22:32:32 +00:00
'''
searx is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
searx is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2013- by Adam Tauber, <asciimoo@gmail.com>
'''
2014-02-05 19:24:31 +00:00
import sys
import threading
from os.path import realpath, dirname
from babel.localedata import locale_identifiers
from flask_babel import gettext
2014-07-07 11:59:27 +00:00
from operator import itemgetter
2014-02-05 19:24:31 +00:00
from searx import settings
2015-01-09 03:13:05 +00:00
from searx import logger
from searx.data import ENGINES_LANGUAGES
from searx.poolrequests import get, get_proxy_cycles
from searx.utils import load_module, match_language, get_engine_from_settings
2015-01-09 03:13:05 +00:00
logger = logger.getChild('engines')
2013-10-14 21:09:13 +00:00
engine_dir = dirname(realpath(__file__))
2013-10-23 21:54:46 +00:00
2013-10-15 17:11:43 +00:00
engines = {}
2013-10-14 21:09:13 +00:00
2013-10-17 19:06:28 +00:00
categories = {'general': []}
babel_langs = [lang_parts[0] + '-' + lang_parts[-1] if len(lang_parts) > 1 else lang_parts[0]
for lang_parts in (lang_code.split('_') for lang_code in locale_identifiers())]
2013-10-17 19:06:28 +00:00
engine_shortcuts = {}
2016-02-19 14:13:01 +00:00
engine_default_args = {'paging': False,
'categories': ['general'],
'language_support': True,
'supported_languages': [],
2016-02-19 14:13:01 +00:00
'safesearch': False,
'timeout': settings['outgoing']['request_timeout'],
'shortcut': '-',
'disabled': False,
'suspend_end_time': 0,
2016-07-17 16:42:30 +00:00
'continuous_errors': 0,
'time_range_support': False,
'offline': False,
'display_error_messages': True,
'tokens': []}
2014-12-13 18:26:40 +00:00
def load_engine(engine_data):
engine_name = engine_data['name']
if '_' in engine_name:
logger.error('Engine name contains underscore: "{}"'.format(engine_name))
sys.exit(1)
if engine_name.lower() != engine_name:
logger.warn('Engine name is not lowercase: "{}", converting to lowercase'.format(engine_name))
engine_name = engine_name.lower()
engine_data['name'] = engine_name
engine_module = engine_data['engine']
try:
engine = load_module(engine_module + '.py', engine_dir)
except (SyntaxError, KeyboardInterrupt, SystemExit, SystemError, ImportError, RuntimeError):
logger.exception('Fatal exception in engine "{}"'.format(engine_module))
sys.exit(1)
except:
logger.exception('Cannot load engine "{}"'.format(engine_module))
return None
2014-01-31 03:35:23 +00:00
for param_name, param_value in engine_data.items():
2013-10-23 21:54:46 +00:00
if param_name == 'engine':
pass
elif param_name == 'categories':
if param_value == 'none':
engine.categories = []
else:
engine.categories = list(map(str.strip, param_value.split(',')))
elif param_name == 'proxies':
engine.proxies = get_proxy_cycles(param_value)
else:
setattr(engine, param_name, param_value)
2016-11-30 17:43:03 +00:00
for arg_name, arg_value in engine_default_args.items():
2016-02-19 14:13:01 +00:00
if not hasattr(engine, arg_name):
setattr(engine, arg_name, arg_value)
# checking required variables
2013-10-25 21:41:14 +00:00
for engine_attr in dir(engine):
if engine_attr.startswith('_'):
continue
if engine_attr == 'inactive' and getattr(engine, engine_attr) is True:
return None
2014-01-20 01:31:20 +00:00
if getattr(engine, engine_attr) is None:
2015-01-09 03:13:05 +00:00
logger.error('Missing engine config attribute: "{0}.{1}"'
.format(engine.name, engine_attr))
2013-10-25 21:41:14 +00:00
sys.exit(1)
# assign supported languages from json file
if engine_data['name'] in ENGINES_LANGUAGES:
setattr(engine, 'supported_languages', ENGINES_LANGUAGES[engine_data['name']])
# find custom aliases for non standard language codes
if hasattr(engine, 'supported_languages'):
if hasattr(engine, 'language_aliases'):
language_aliases = getattr(engine, 'language_aliases')
else:
language_aliases = {}
for engine_lang in getattr(engine, 'supported_languages'):
iso_lang = match_language(engine_lang, babel_langs, fallback=None)
if iso_lang and iso_lang != engine_lang and not engine_lang.startswith(iso_lang) and \
iso_lang not in getattr(engine, 'supported_languages'):
language_aliases[iso_lang] = engine_lang
setattr(engine, 'language_aliases', language_aliases)
# assign language fetching method if auxiliary method exists
if hasattr(engine, '_fetch_supported_languages'):
setattr(engine, 'fetch_supported_languages',
lambda: engine._fetch_supported_languages(get(engine.supported_languages_url)))
engine.stats = {
'sent_search_count': 0, # sent search
'search_count': 0, # succesful search
'result_count': 0,
'engine_time': 0,
'engine_time_count': 0,
'score_count': 0,
'errors': 0
}
if not engine.offline:
engine.stats['page_load_time'] = 0
engine.stats['page_load_count'] = 0
# tor related settings
if settings['outgoing'].get('using_tor_proxy'):
# use onion url if using tor.
if hasattr(engine, 'onion_url'):
engine.search_url = engine.onion_url + getattr(engine, 'search_path', '')
elif 'onions' in engine.categories:
# exclude onion engines if not using tor.
return None
engine.timeout += settings['outgoing'].get('extra_proxy_timeout', 0)
2016-02-19 14:13:01 +00:00
for category_name in engine.categories:
categories.setdefault(category_name, []).append(engine)
if engine.shortcut in engine_shortcuts:
logger.error('Engine config error: ambigious shortcut: {0}'.format(engine.shortcut))
sys.exit(1)
engine_shortcuts[engine.shortcut] = engine.name
2013-10-15 16:19:06 +00:00
2014-12-13 18:26:40 +00:00
return engine
def to_percentage(stats, maxvalue):
for engine_stat in stats:
if maxvalue:
engine_stat['percentage'] = int(engine_stat['avg'] / maxvalue * 100)
else:
engine_stat['percentage'] = 0
return stats
def get_engines_stats(preferences):
2014-01-05 22:13:53 +00:00
# TODO refactor
2013-10-27 00:50:24 +00:00
pageloads = []
engine_times = []
2013-10-27 00:50:24 +00:00
results = []
2013-10-27 13:21:23 +00:00
scores = []
errors = []
2014-01-05 22:13:53 +00:00
scores_per_result = []
2013-10-26 23:02:28 +00:00
max_pageload = max_engine_times = max_results = max_score = max_errors = max_score_per_result = 0 # noqa
2013-10-26 23:02:28 +00:00
for engine in engines.values():
if not preferences.validate_token(engine):
continue
2013-10-26 23:02:28 +00:00
if engine.stats['search_count'] == 0:
continue
results_num = \
engine.stats['result_count'] / float(engine.stats['search_count'])
if engine.stats['engine_time_count'] != 0:
this_engine_time = engine.stats['engine_time'] / float(engine.stats['engine_time_count']) # noqa
else:
this_engine_time = 0
2013-10-27 13:21:23 +00:00
if results_num:
score = engine.stats['score_count'] / float(engine.stats['search_count']) # noqa
2014-01-05 22:13:53 +00:00
score_per_result = score / results_num
2013-10-27 13:21:23 +00:00
else:
2014-01-05 22:13:53 +00:00
score = score_per_result = 0.0
if not engine.offline:
load_times = 0
if engine.stats['page_load_count'] != 0:
load_times = engine.stats['page_load_time'] / float(engine.stats['page_load_count']) # noqa
max_pageload = max(load_times, max_pageload)
pageloads.append({'avg': load_times, 'name': engine.name})
max_engine_times = max(this_engine_time, max_engine_times)
max_results = max(results_num, max_results)
2013-10-27 13:21:23 +00:00
max_score = max(score, max_score)
2014-01-05 22:13:53 +00:00
max_score_per_result = max(score_per_result, max_score_per_result)
max_errors = max(max_errors, engine.stats['errors'])
engine_times.append({'avg': this_engine_time, 'name': engine.name})
2013-10-27 00:50:24 +00:00
results.append({'avg': results_num, 'name': engine.name})
2013-10-27 13:21:23 +00:00
scores.append({'avg': score, 'name': engine.name})
errors.append({'avg': engine.stats['errors'], 'name': engine.name})
scores_per_result.append({
'avg': score_per_result,
'name': engine.name
})
2013-10-27 00:50:24 +00:00
pageloads = to_percentage(pageloads, max_pageload)
engine_times = to_percentage(engine_times, max_engine_times)
results = to_percentage(results, max_results)
scores = to_percentage(scores, max_score)
scores_per_result = to_percentage(scores_per_result, max_score_per_result)
errors = to_percentage(errors, max_errors)
return [
(
gettext('Engine time (sec)'),
sorted(engine_times, key=itemgetter('avg'))
),
(
2014-01-21 23:17:49 +00:00
gettext('Page loads (sec)'),
sorted(pageloads, key=itemgetter('avg'))
),
(
gettext('Number of results'),
sorted(results, key=itemgetter('avg'), reverse=True)
),
(
2014-01-21 23:17:49 +00:00
gettext('Scores'),
sorted(scores, key=itemgetter('avg'), reverse=True)
),
(
gettext('Scores per result'),
sorted(scores_per_result, key=itemgetter('avg'), reverse=True)
),
2014-01-21 23:17:49 +00:00
(
gettext('Errors'),
sorted(errors, key=itemgetter('avg'), reverse=True)
),
]
2014-12-13 18:26:40 +00:00
def load_engines(engine_list):
global engines, engine_shortcuts
engines.clear()
engine_shortcuts.clear()
2016-12-27 16:25:19 +00:00
for engine_data in engine_list:
engine = load_engine(engine_data)
if engine is not None:
engines[engine.name] = engine
return engines
def initialize_engines(engine_list):
load_engines(engine_list)
def engine_init(engine_name, init_fn):
init_fn(get_engine_from_settings(engine_name))
logger.debug('%s engine: Initialized', engine_name)
2018-02-17 13:30:06 +00:00
for engine_name, engine in engines.items():
if hasattr(engine, 'init'):
2018-02-17 13:30:06 +00:00
init_fn = getattr(engine, 'init')
if init_fn:
logger.debug('%s engine: Starting background initialization', engine_name)
threading.Thread(target=engine_init, args=(engine_name, init_fn)).start()