2014-09-13 16:25:25 +00:00
|
|
|
'''
|
|
|
|
searx is free software: you can redistribute it and/or modify
|
|
|
|
it under the terms of the GNU Affero General Public License as published by
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
(at your option) any later version.
|
|
|
|
|
|
|
|
searx is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU Affero General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU Affero General Public License
|
|
|
|
along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
|
|
|
|
|
|
|
(C) 2013- by Adam Tauber, <asciimoo@gmail.com>
|
|
|
|
'''
|
|
|
|
|
2016-07-31 21:39:58 +00:00
|
|
|
import gc
|
2016-11-30 17:43:03 +00:00
|
|
|
import sys
|
2014-12-05 18:24:11 +00:00
|
|
|
import threading
|
2014-12-14 00:18:01 +00:00
|
|
|
from time import time
|
2016-09-05 22:36:33 +00:00
|
|
|
from uuid import uuid4
|
2020-08-06 15:42:46 +00:00
|
|
|
from _thread import start_new_thread
|
2020-07-03 13:25:04 +00:00
|
|
|
|
2017-07-09 20:09:46 +00:00
|
|
|
from flask_babel import gettext
|
2016-12-29 10:08:19 +00:00
|
|
|
import requests.exceptions
|
2016-07-31 21:39:58 +00:00
|
|
|
import searx.poolrequests as requests_lib
|
2014-02-07 00:19:07 +00:00
|
|
|
from searx.engines import (
|
2019-01-25 19:54:23 +00:00
|
|
|
categories, engines, settings
|
2014-02-07 00:19:07 +00:00
|
|
|
)
|
2016-11-19 19:53:51 +00:00
|
|
|
from searx.answerers import ask
|
2020-07-03 13:25:04 +00:00
|
|
|
from searx.external_bang import get_bang_url
|
2016-04-08 14:38:05 +00:00
|
|
|
from searx.utils import gen_useragent
|
2017-03-01 23:11:51 +00:00
|
|
|
from searx.query import RawTextQuery, SearchQuery, VALID_LANGUAGE_CODE
|
2015-10-03 15:26:07 +00:00
|
|
|
from searx.results import ResultContainer
|
2015-01-09 03:13:05 +00:00
|
|
|
from searx import logger
|
2016-10-22 11:10:31 +00:00
|
|
|
from searx.plugins import plugins
|
2017-01-20 17:52:47 +00:00
|
|
|
from searx.exceptions import SearxParameterException
|
2014-07-07 11:59:27 +00:00
|
|
|
|
2016-11-30 17:43:03 +00:00
|
|
|
|
2015-01-09 03:13:05 +00:00
|
|
|
logger = logger.getChild('search')
|
|
|
|
|
2019-08-02 11:50:51 +00:00
|
|
|
max_request_timeout = settings.get('outgoing', {}).get('max_request_timeout' or None)
|
|
|
|
if max_request_timeout is None:
|
|
|
|
logger.info('max_request_timeout={0}'.format(max_request_timeout))
|
|
|
|
else:
|
|
|
|
if isinstance(max_request_timeout, float):
|
|
|
|
logger.info('max_request_timeout={0} second(s)'.format(max_request_timeout))
|
|
|
|
else:
|
|
|
|
logger.critical('outgoing.max_request_timeout if defined has to be float')
|
|
|
|
from sys import exit
|
2020-07-03 13:25:04 +00:00
|
|
|
|
2019-08-02 11:50:51 +00:00
|
|
|
exit(1)
|
2014-07-07 11:59:27 +00:00
|
|
|
|
|
|
|
|
2017-07-23 09:56:57 +00:00
|
|
|
def send_http_request(engine, request_params):
|
2016-12-29 10:08:19 +00:00
|
|
|
# create dictionary which contain all
|
|
|
|
# informations about the request
|
|
|
|
request_args = dict(
|
|
|
|
headers=request_params['headers'],
|
|
|
|
cookies=request_params['cookies'],
|
|
|
|
verify=request_params['verify']
|
|
|
|
)
|
|
|
|
|
2020-09-01 13:57:35 +00:00
|
|
|
# setting engine based proxies
|
|
|
|
if hasattr(engine, 'proxies'):
|
|
|
|
request_args['proxies'] = engine.proxies
|
|
|
|
|
2016-12-29 10:08:19 +00:00
|
|
|
# specific type of request (GET or POST)
|
|
|
|
if request_params['method'] == 'GET':
|
|
|
|
req = requests_lib.get
|
|
|
|
else:
|
|
|
|
req = requests_lib.post
|
|
|
|
request_args['data'] = request_params['data']
|
2016-11-05 12:45:20 +00:00
|
|
|
|
2016-12-29 10:08:19 +00:00
|
|
|
# send the request
|
2017-07-23 09:56:57 +00:00
|
|
|
return req(request_params['url'], **request_args)
|
2016-11-05 12:45:20 +00:00
|
|
|
|
|
|
|
|
2019-09-23 15:14:32 +00:00
|
|
|
def search_one_http_request(engine, query, request_params):
|
2016-11-05 12:45:20 +00:00
|
|
|
# update request parameters dependent on
|
|
|
|
# search-engine (contained in engines folder)
|
|
|
|
engine.request(query, request_params)
|
|
|
|
|
2016-12-29 10:08:19 +00:00
|
|
|
# ignoring empty urls
|
2016-11-05 12:45:20 +00:00
|
|
|
if request_params['url'] is None:
|
2019-07-17 08:38:45 +00:00
|
|
|
return None
|
2016-11-05 12:45:20 +00:00
|
|
|
|
|
|
|
if not request_params['url']:
|
2019-07-17 08:38:45 +00:00
|
|
|
return None
|
2016-11-05 12:45:20 +00:00
|
|
|
|
|
|
|
# send request
|
2017-07-23 09:56:57 +00:00
|
|
|
response = send_http_request(engine, request_params)
|
2016-11-05 12:45:20 +00:00
|
|
|
|
2016-12-29 10:08:19 +00:00
|
|
|
# parse the response
|
|
|
|
response.search_params = request_params
|
|
|
|
return engine.response(response)
|
|
|
|
|
|
|
|
|
2019-09-23 15:14:32 +00:00
|
|
|
def search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
|
2017-07-23 09:56:57 +00:00
|
|
|
# set timeout for all HTTP requests
|
|
|
|
requests_lib.set_timeout_for_thread(timeout_limit, start_time=start_time)
|
|
|
|
# reset the HTTP total time
|
|
|
|
requests_lib.reset_time_for_thread()
|
|
|
|
|
|
|
|
#
|
2016-12-29 10:08:19 +00:00
|
|
|
engine = engines[engine_name]
|
|
|
|
|
2017-07-23 09:56:57 +00:00
|
|
|
# suppose everything will be alright
|
|
|
|
requests_exception = False
|
|
|
|
|
2016-12-29 10:08:19 +00:00
|
|
|
try:
|
|
|
|
# send requests and parse the results
|
2019-09-23 15:14:32 +00:00
|
|
|
search_results = search_one_http_request(engine, query, request_params)
|
2016-11-05 12:45:20 +00:00
|
|
|
|
2019-07-17 08:38:45 +00:00
|
|
|
# check if the engine accepted the request
|
|
|
|
if search_results is not None:
|
|
|
|
# yes, so add results
|
|
|
|
result_container.extend(engine_name, search_results)
|
|
|
|
|
|
|
|
# update engine time when there is no exception
|
|
|
|
engine_time = time() - start_time
|
|
|
|
page_load_time = requests_lib.get_time_for_thread()
|
|
|
|
result_container.add_timing(engine_name, engine_time, page_load_time)
|
|
|
|
with threading.RLock():
|
|
|
|
engine.stats['engine_time'] += engine_time
|
|
|
|
engine.stats['engine_time_count'] += 1
|
|
|
|
# update stats with the total HTTP time
|
|
|
|
engine.stats['page_load_time'] += page_load_time
|
|
|
|
engine.stats['page_load_count'] += 1
|
2016-12-29 10:08:19 +00:00
|
|
|
|
|
|
|
except Exception as e:
|
2019-07-17 08:38:45 +00:00
|
|
|
# Timing
|
|
|
|
engine_time = time() - start_time
|
|
|
|
page_load_time = requests_lib.get_time_for_thread()
|
|
|
|
result_container.add_timing(engine_name, engine_time, page_load_time)
|
2017-07-23 09:56:57 +00:00
|
|
|
|
2019-07-17 08:38:45 +00:00
|
|
|
# Record the errors
|
2017-07-23 09:56:57 +00:00
|
|
|
with threading.RLock():
|
|
|
|
engine.stats['errors'] += 1
|
2016-12-29 10:08:19 +00:00
|
|
|
|
|
|
|
if (issubclass(e.__class__, requests.exceptions.Timeout)):
|
2020-04-17 14:31:02 +00:00
|
|
|
result_container.add_unresponsive_engine(engine_name, 'timeout')
|
2016-12-29 10:08:19 +00:00
|
|
|
# requests timeout (connect or read)
|
|
|
|
logger.error("engine {0} : HTTP requests timeout"
|
|
|
|
"(search duration : {1} s, timeout: {2} s) : {3}"
|
2019-07-17 08:38:45 +00:00
|
|
|
.format(engine_name, engine_time, timeout_limit, e.__class__.__name__))
|
2016-12-29 10:08:19 +00:00
|
|
|
requests_exception = True
|
2016-12-30 17:08:48 +00:00
|
|
|
elif (issubclass(e.__class__, requests.exceptions.RequestException)):
|
2020-04-17 14:31:02 +00:00
|
|
|
result_container.add_unresponsive_engine(engine_name, 'request exception')
|
2016-12-29 10:08:19 +00:00
|
|
|
# other requests exception
|
|
|
|
logger.exception("engine {0} : requests exception"
|
|
|
|
"(search duration : {1} s, timeout: {2} s) : {3}"
|
2019-07-17 08:38:45 +00:00
|
|
|
.format(engine_name, engine_time, timeout_limit, e))
|
2016-12-29 10:08:19 +00:00
|
|
|
requests_exception = True
|
|
|
|
else:
|
2020-04-17 14:31:02 +00:00
|
|
|
result_container.add_unresponsive_engine(engine_name, 'unexpected crash', str(e))
|
2016-12-29 10:08:19 +00:00
|
|
|
# others errors
|
|
|
|
logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
|
2014-12-19 19:01:01 +00:00
|
|
|
|
2017-07-23 09:56:57 +00:00
|
|
|
# suspend or not the engine if there are HTTP errors
|
|
|
|
with threading.RLock():
|
2016-12-29 10:08:19 +00:00
|
|
|
if requests_exception:
|
2017-07-23 09:56:57 +00:00
|
|
|
# update continuous_errors / suspend_end_time
|
|
|
|
engine.continuous_errors += 1
|
2018-08-19 13:29:52 +00:00
|
|
|
engine.suspend_end_time = time() + min(settings['search']['max_ban_time_on_fail'],
|
2018-08-19 13:32:32 +00:00
|
|
|
engine.continuous_errors * settings['search']['ban_time_on_fail'])
|
2017-07-23 09:56:57 +00:00
|
|
|
else:
|
|
|
|
# no HTTP error (perhaps an engine error)
|
|
|
|
# anyway, reset the suspend variables
|
|
|
|
engine.continuous_errors = 0
|
|
|
|
engine.suspend_end_time = 0
|
2016-11-05 12:45:20 +00:00
|
|
|
|
|
|
|
|
2020-09-14 08:25:29 +00:00
|
|
|
def record_offline_engine_stats_on_error(engine, result_container, start_time):
|
|
|
|
engine_time = time() - start_time
|
|
|
|
result_container.add_timing(engine.name, engine_time, engine_time)
|
|
|
|
|
|
|
|
with threading.RLock():
|
|
|
|
engine.stats['errors'] += 1
|
|
|
|
|
|
|
|
|
|
|
|
def search_one_offline_request(engine, query, request_params):
|
|
|
|
return engine.search(query, request_params)
|
|
|
|
|
|
|
|
|
|
|
|
def search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
|
|
|
|
engine = engines[engine_name]
|
|
|
|
|
|
|
|
try:
|
|
|
|
search_results = search_one_offline_request(engine, query, request_params)
|
|
|
|
|
|
|
|
if search_results:
|
|
|
|
result_container.extend(engine_name, search_results)
|
|
|
|
|
|
|
|
engine_time = time() - start_time
|
|
|
|
result_container.add_timing(engine_name, engine_time, engine_time)
|
|
|
|
with threading.RLock():
|
|
|
|
engine.stats['engine_time'] += engine_time
|
|
|
|
engine.stats['engine_time_count'] += 1
|
|
|
|
|
|
|
|
except ValueError as e:
|
|
|
|
record_offline_engine_stats_on_error(engine, result_container, start_time)
|
|
|
|
logger.exception('engine {0} : invalid input : {1}'.format(engine_name, e))
|
|
|
|
except Exception as e:
|
|
|
|
record_offline_engine_stats_on_error(engine, result_container, start_time)
|
|
|
|
result_container.add_unresponsive_engine(engine_name, 'unexpected crash', str(e))
|
|
|
|
logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
|
|
|
|
|
|
|
|
|
|
|
|
def search_one_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
|
|
|
|
if engines[engine_name].offline:
|
|
|
|
return search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit) # noqa
|
|
|
|
return search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit)
|
|
|
|
|
|
|
|
|
2016-12-30 16:37:46 +00:00
|
|
|
def search_multiple_requests(requests, result_container, start_time, timeout_limit):
|
2016-09-05 22:36:33 +00:00
|
|
|
search_id = uuid4().__str__()
|
2016-11-05 12:45:20 +00:00
|
|
|
|
|
|
|
for engine_name, query, request_params in requests:
|
2014-12-05 18:24:11 +00:00
|
|
|
th = threading.Thread(
|
2016-12-29 10:08:19 +00:00
|
|
|
target=search_one_request_safe,
|
2016-12-30 16:37:46 +00:00
|
|
|
args=(engine_name, query, request_params, result_container, start_time, timeout_limit),
|
2016-09-05 22:36:33 +00:00
|
|
|
name=search_id,
|
2014-12-05 18:24:11 +00:00
|
|
|
)
|
2014-12-19 12:59:41 +00:00
|
|
|
th._engine_name = engine_name
|
2014-12-05 18:24:11 +00:00
|
|
|
th.start()
|
|
|
|
|
|
|
|
for th in threading.enumerate():
|
2016-09-05 22:36:33 +00:00
|
|
|
if th.name == search_id:
|
2016-11-05 12:45:20 +00:00
|
|
|
remaining_time = max(0.0, timeout_limit - (time() - start_time))
|
2014-12-14 00:18:01 +00:00
|
|
|
th.join(remaining_time)
|
2020-07-30 21:28:54 +00:00
|
|
|
if th.is_alive():
|
2020-04-17 14:31:02 +00:00
|
|
|
result_container.add_unresponsive_engine(th._engine_name, 'timeout')
|
2015-01-09 03:13:05 +00:00
|
|
|
logger.warning('engine timeout: {0}'.format(th._engine_name))
|
2014-12-14 00:18:01 +00:00
|
|
|
|
2014-12-05 18:24:11 +00:00
|
|
|
|
2014-09-13 16:25:25 +00:00
|
|
|
# get default reqest parameter
|
2014-07-07 11:59:27 +00:00
|
|
|
def default_request_params():
|
|
|
|
return {
|
2014-12-29 20:31:04 +00:00
|
|
|
'method': 'GET',
|
|
|
|
'headers': {},
|
|
|
|
'data': {},
|
|
|
|
'url': '',
|
|
|
|
'cookies': {},
|
|
|
|
'verify': True
|
|
|
|
}
|
2014-07-07 11:59:27 +00:00
|
|
|
|
|
|
|
|
2019-07-26 06:51:04 +00:00
|
|
|
# remove duplicate queries.
|
|
|
|
# FIXME: does not fix "!music !soundcloud", because the categories are 'none' and 'music'
|
|
|
|
def deduplicate_query_engines(query_engines):
|
|
|
|
uniq_query_engines = {q["category"] + '|' + q["name"]: q for q in query_engines}
|
|
|
|
return uniq_query_engines.values()
|
|
|
|
|
|
|
|
|
2016-11-02 13:52:22 +00:00
|
|
|
def get_search_query_from_webapp(preferences, form):
|
2017-01-20 17:52:47 +00:00
|
|
|
# no text for the query ?
|
|
|
|
if not form.get('q'):
|
|
|
|
raise SearxParameterException('q', '')
|
2016-10-22 11:10:31 +00:00
|
|
|
|
|
|
|
# set blocked engines
|
|
|
|
disabled_engines = preferences.engines.get_disabled()
|
|
|
|
|
|
|
|
# parse query, if tags are set, which change
|
|
|
|
# the serch engine or search-language
|
2016-11-02 13:52:22 +00:00
|
|
|
raw_text_query = RawTextQuery(form['q'], disabled_engines)
|
2016-10-22 11:10:31 +00:00
|
|
|
|
|
|
|
# set query
|
2020-09-14 08:11:49 +00:00
|
|
|
query = raw_text_query.getQuery()
|
2016-10-22 11:10:31 +00:00
|
|
|
|
2017-01-20 17:52:47 +00:00
|
|
|
# get and check page number
|
|
|
|
pageno_param = form.get('pageno', '1')
|
|
|
|
if not pageno_param.isdigit() or int(pageno_param) < 1:
|
|
|
|
raise SearxParameterException('pageno', pageno_param)
|
|
|
|
query_pageno = int(pageno_param)
|
|
|
|
|
|
|
|
# get language
|
2016-12-14 03:36:40 +00:00
|
|
|
# set specific language if set on request, query or preferences
|
2016-10-22 11:10:31 +00:00
|
|
|
# TODO support search with multible languages
|
|
|
|
if len(raw_text_query.languages):
|
|
|
|
query_lang = raw_text_query.languages[-1]
|
2016-12-14 03:36:40 +00:00
|
|
|
elif 'language' in form:
|
|
|
|
query_lang = form.get('language')
|
|
|
|
else:
|
|
|
|
query_lang = preferences.get_value('language')
|
2016-10-22 11:10:31 +00:00
|
|
|
|
2017-01-20 17:52:47 +00:00
|
|
|
# check language
|
2017-03-01 23:11:51 +00:00
|
|
|
if not VALID_LANGUAGE_CODE.match(query_lang):
|
2017-01-20 17:52:47 +00:00
|
|
|
raise SearxParameterException('language', query_lang)
|
|
|
|
|
|
|
|
# get safesearch
|
|
|
|
if 'safesearch' in form:
|
|
|
|
query_safesearch = form.get('safesearch')
|
|
|
|
# first check safesearch
|
|
|
|
if not query_safesearch.isdigit():
|
|
|
|
raise SearxParameterException('safesearch', query_safesearch)
|
|
|
|
query_safesearch = int(query_safesearch)
|
|
|
|
else:
|
|
|
|
query_safesearch = preferences.get_value('safesearch')
|
|
|
|
|
|
|
|
# safesearch : second check
|
|
|
|
if query_safesearch < 0 or query_safesearch > 2:
|
|
|
|
raise SearxParameterException('safesearch', query_safesearch)
|
|
|
|
|
|
|
|
# get time_range
|
2016-11-02 13:52:22 +00:00
|
|
|
query_time_range = form.get('time_range')
|
2016-10-22 11:10:31 +00:00
|
|
|
|
2017-01-20 17:52:47 +00:00
|
|
|
# check time_range
|
2017-01-21 19:21:32 +00:00
|
|
|
if query_time_range not in ('None', None, '', 'day', 'week', 'month', 'year'):
|
2017-01-20 17:52:47 +00:00
|
|
|
raise SearxParameterException('time_range', query_time_range)
|
|
|
|
|
|
|
|
# query_engines
|
2016-10-22 11:10:31 +00:00
|
|
|
query_engines = raw_text_query.engines
|
|
|
|
|
2019-08-02 11:50:51 +00:00
|
|
|
# timeout_limit
|
|
|
|
query_timeout = raw_text_query.timeout_limit
|
|
|
|
if query_timeout is None and 'timeout_limit' in form:
|
|
|
|
raw_time_limit = form.get('timeout_limit')
|
2019-08-02 18:04:37 +00:00
|
|
|
if raw_time_limit in ['None', '']:
|
|
|
|
raw_time_limit = None
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
query_timeout = float(raw_time_limit)
|
|
|
|
except ValueError:
|
|
|
|
raise SearxParameterException('timeout_limit', raw_time_limit)
|
2019-08-02 11:50:51 +00:00
|
|
|
|
2017-01-20 17:52:47 +00:00
|
|
|
# query_categories
|
|
|
|
query_categories = []
|
|
|
|
|
2016-10-22 11:10:31 +00:00
|
|
|
# if engines are calculated from query,
|
|
|
|
# set categories by using that informations
|
|
|
|
if query_engines and raw_text_query.specific:
|
2018-03-22 10:02:24 +00:00
|
|
|
additional_categories = set()
|
|
|
|
for engine in query_engines:
|
|
|
|
if 'from_bang' in engine and engine['from_bang']:
|
|
|
|
additional_categories.add('none')
|
|
|
|
else:
|
|
|
|
additional_categories.add(engine['category'])
|
|
|
|
query_categories = list(additional_categories)
|
2016-10-22 11:10:31 +00:00
|
|
|
|
|
|
|
# otherwise, using defined categories to
|
|
|
|
# calculate which engines should be used
|
|
|
|
else:
|
|
|
|
# set categories/engines
|
|
|
|
load_default_categories = True
|
2016-11-02 13:52:22 +00:00
|
|
|
for pd_name, pd in form.items():
|
2016-10-22 11:10:31 +00:00
|
|
|
if pd_name == 'categories':
|
2020-08-06 15:42:46 +00:00
|
|
|
query_categories.extend(categ for categ in map(str.strip, pd.split(',')) if categ in categories)
|
2016-10-22 11:10:31 +00:00
|
|
|
elif pd_name == 'engines':
|
|
|
|
pd_engines = [{'category': engines[engine].categories[0],
|
|
|
|
'name': engine}
|
2020-08-06 15:42:46 +00:00
|
|
|
for engine in map(str.strip, pd.split(',')) if engine in engines]
|
2016-10-22 11:10:31 +00:00
|
|
|
if pd_engines:
|
|
|
|
query_engines.extend(pd_engines)
|
|
|
|
load_default_categories = False
|
|
|
|
elif pd_name.startswith('category_'):
|
|
|
|
category = pd_name[9:]
|
|
|
|
|
|
|
|
# if category is not found in list, skip
|
|
|
|
if category not in categories:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if pd != 'off':
|
|
|
|
# add category to list
|
|
|
|
query_categories.append(category)
|
|
|
|
elif category in query_categories:
|
|
|
|
# remove category from list if property is set to 'off'
|
|
|
|
query_categories.remove(category)
|
|
|
|
|
|
|
|
if not load_default_categories:
|
|
|
|
if not query_categories:
|
|
|
|
query_categories = list(set(engine['category']
|
2016-12-15 10:59:21 +00:00
|
|
|
for engine in query_engines))
|
2014-02-07 00:19:07 +00:00
|
|
|
else:
|
2014-10-19 10:41:04 +00:00
|
|
|
# if no category is specified for this search,
|
|
|
|
# using user-defined default-configuration which
|
|
|
|
# (is stored in cookie)
|
2016-10-22 11:10:31 +00:00
|
|
|
if not query_categories:
|
|
|
|
cookie_categories = preferences.get_value('categories')
|
2014-02-07 00:19:07 +00:00
|
|
|
for ccateg in cookie_categories:
|
|
|
|
if ccateg in categories:
|
2016-10-22 11:10:31 +00:00
|
|
|
query_categories.append(ccateg)
|
2014-09-13 16:25:25 +00:00
|
|
|
|
2014-10-19 10:41:04 +00:00
|
|
|
# if still no category is specified, using general
|
|
|
|
# as default-category
|
2016-10-22 11:10:31 +00:00
|
|
|
if not query_categories:
|
|
|
|
query_categories = ['general']
|
2014-02-07 00:19:07 +00:00
|
|
|
|
2014-10-19 10:41:04 +00:00
|
|
|
# using all engines for that search, which are
|
|
|
|
# declared under the specific categories
|
2016-10-22 11:10:31 +00:00
|
|
|
for categ in query_categories:
|
|
|
|
query_engines.extend({'category': categ,
|
|
|
|
'name': engine.name}
|
|
|
|
for engine in categories[categ]
|
|
|
|
if (engine.name, categ) not in disabled_engines)
|
|
|
|
|
2019-07-26 06:51:04 +00:00
|
|
|
query_engines = deduplicate_query_engines(query_engines)
|
2020-07-03 13:25:04 +00:00
|
|
|
external_bang = raw_text_query.external_bang
|
2019-07-26 06:51:04 +00:00
|
|
|
|
2019-07-16 14:27:29 +00:00
|
|
|
return (SearchQuery(query, query_engines, query_categories,
|
2019-08-02 11:50:51 +00:00
|
|
|
query_lang, query_safesearch, query_pageno,
|
2020-07-03 13:25:04 +00:00
|
|
|
query_time_range, query_timeout, preferences,
|
|
|
|
external_bang=external_bang),
|
2019-07-16 14:27:29 +00:00
|
|
|
raw_text_query)
|
2014-02-09 00:07:18 +00:00
|
|
|
|
2016-10-22 11:10:31 +00:00
|
|
|
|
2020-08-12 07:42:27 +00:00
|
|
|
class Search:
|
2016-10-22 11:10:31 +00:00
|
|
|
"""Search information container"""
|
|
|
|
|
|
|
|
def __init__(self, search_query):
|
|
|
|
# init vars
|
2020-08-12 07:42:27 +00:00
|
|
|
super().__init__()
|
2016-10-22 11:10:31 +00:00
|
|
|
self.search_query = search_query
|
|
|
|
self.result_container = ResultContainer()
|
2020-09-14 11:21:21 +00:00
|
|
|
self.start_time = None
|
2019-08-02 11:50:51 +00:00
|
|
|
self.actual_timeout = None
|
2016-02-19 23:21:56 +00:00
|
|
|
|
2020-09-14 11:21:21 +00:00
|
|
|
def search_external_bang(self):
|
|
|
|
"""
|
|
|
|
Check if there is a external bang.
|
|
|
|
If yes, update self.result_container and return True
|
|
|
|
"""
|
2020-07-03 13:25:04 +00:00
|
|
|
if self.search_query.external_bang:
|
|
|
|
self.result_container.redirect_url = get_bang_url(self.search_query)
|
|
|
|
|
|
|
|
# This means there was a valid bang and the
|
|
|
|
# rest of the search does not need to be continued
|
2020-08-06 15:42:46 +00:00
|
|
|
if isinstance(self.result_container.redirect_url, str):
|
2020-09-14 11:21:21 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def search_answerers(self):
|
|
|
|
"""
|
|
|
|
Check if an answer return a result.
|
|
|
|
If yes, update self.result_container and return True
|
|
|
|
"""
|
2016-11-19 19:53:51 +00:00
|
|
|
answerers_results = ask(self.search_query)
|
|
|
|
|
|
|
|
if answerers_results:
|
|
|
|
for results in answerers_results:
|
|
|
|
self.result_container.extend('answer', results)
|
2020-09-14 11:21:21 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def _is_accepted(self, engine_name, engine):
|
|
|
|
if not self.search_query.preferences.validate_token(engine):
|
|
|
|
return False
|
|
|
|
|
|
|
|
# skip suspended engines
|
|
|
|
if engine.suspend_end_time >= time():
|
|
|
|
logger.debug('Engine currently suspended: %s', engine_name)
|
|
|
|
return False
|
|
|
|
|
|
|
|
# if paging is not supported, skip
|
|
|
|
if self.search_query.pageno > 1 and not engine.paging:
|
|
|
|
return False
|
|
|
|
|
|
|
|
# if time_range is not supported, skip
|
|
|
|
if self.search_query.time_range and not engine.time_range_support:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
def _get_params(self, selected_engine, user_agent):
|
|
|
|
if selected_engine['name'] not in engines:
|
|
|
|
return None, None
|
|
|
|
|
|
|
|
engine = engines[selected_engine['name']]
|
|
|
|
|
|
|
|
if not self._is_accepted(selected_engine['name'], engine):
|
|
|
|
return None, None
|
|
|
|
|
|
|
|
# set default request parameters
|
|
|
|
request_params = {}
|
|
|
|
if not engine.offline:
|
|
|
|
request_params = default_request_params()
|
|
|
|
request_params['headers']['User-Agent'] = user_agent
|
|
|
|
|
|
|
|
if hasattr(engine, 'language') and engine.language:
|
|
|
|
request_params['language'] = engine.language
|
|
|
|
else:
|
|
|
|
request_params['language'] = self.search_query.lang
|
|
|
|
|
|
|
|
request_params['safesearch'] = self.search_query.safesearch
|
|
|
|
request_params['time_range'] = self.search_query.time_range
|
|
|
|
|
|
|
|
request_params['category'] = selected_engine['category']
|
|
|
|
request_params['pageno'] = self.search_query.pageno
|
|
|
|
|
|
|
|
return request_params, engine.timeout
|
|
|
|
|
|
|
|
# do search-request
|
|
|
|
def _get_requests(self):
|
|
|
|
global number_of_searches
|
2016-11-19 19:53:51 +00:00
|
|
|
|
2014-09-13 16:25:25 +00:00
|
|
|
# init vars
|
2014-07-07 11:59:27 +00:00
|
|
|
requests = []
|
2014-09-13 16:25:25 +00:00
|
|
|
|
|
|
|
# set default useragent
|
2014-10-19 10:41:04 +00:00
|
|
|
# user_agent = request.headers.get('User-Agent', '')
|
2014-07-07 11:59:27 +00:00
|
|
|
user_agent = gen_useragent()
|
|
|
|
|
2016-11-05 12:45:20 +00:00
|
|
|
# max of all selected engine timeout
|
2019-08-02 11:50:51 +00:00
|
|
|
default_timeout = 0
|
2016-11-05 12:45:20 +00:00
|
|
|
|
2014-09-13 16:25:25 +00:00
|
|
|
# start search-reqest for all selected engines
|
2020-09-14 11:21:21 +00:00
|
|
|
for selected_engine in self.search_query.engines:
|
2014-09-13 16:25:25 +00:00
|
|
|
# set default request parameters
|
2020-09-14 11:21:21 +00:00
|
|
|
request_params, engine_timeout = self._get_params(selected_engine, user_agent)
|
|
|
|
if request_params is None:
|
|
|
|
continue
|
2014-09-13 16:25:25 +00:00
|
|
|
|
2016-11-05 12:45:20 +00:00
|
|
|
# append request to list
|
2020-09-14 11:21:21 +00:00
|
|
|
requests.append((selected_engine['name'], self.search_query.query, request_params))
|
2014-07-07 11:59:27 +00:00
|
|
|
|
2019-08-02 11:50:51 +00:00
|
|
|
# update default_timeout
|
2020-09-14 11:21:21 +00:00
|
|
|
default_timeout = max(default_timeout, engine_timeout)
|
2019-08-02 11:50:51 +00:00
|
|
|
|
|
|
|
# adjust timeout
|
2020-09-14 11:21:21 +00:00
|
|
|
actual_timeout = default_timeout
|
2019-08-02 11:50:51 +00:00
|
|
|
query_timeout = self.search_query.timeout_limit
|
|
|
|
|
|
|
|
if max_request_timeout is None and query_timeout is None:
|
|
|
|
# No max, no user query: default_timeout
|
|
|
|
pass
|
|
|
|
elif max_request_timeout is None and query_timeout is not None:
|
|
|
|
# No max, but user query: From user query except if above default
|
2020-09-14 11:21:21 +00:00
|
|
|
actual_timeout = min(default_timeout, query_timeout)
|
2019-08-02 11:50:51 +00:00
|
|
|
elif max_request_timeout is not None and query_timeout is None:
|
|
|
|
# Max, no user query: Default except if above max
|
2020-09-14 11:21:21 +00:00
|
|
|
actual_timeout = min(default_timeout, max_request_timeout)
|
2019-08-02 11:50:51 +00:00
|
|
|
elif max_request_timeout is not None and query_timeout is not None:
|
|
|
|
# Max & user query: From user query except if above max
|
2020-09-14 11:21:21 +00:00
|
|
|
actual_timeout = min(query_timeout, max_request_timeout)
|
2019-08-02 11:50:51 +00:00
|
|
|
|
|
|
|
logger.debug("actual_timeout={0} (default_timeout={1}, ?timeout_limit={2}, max_request_timeout={3})"
|
|
|
|
.format(self.actual_timeout, default_timeout, query_timeout, max_request_timeout))
|
|
|
|
|
2020-09-14 11:21:21 +00:00
|
|
|
return requests, actual_timeout
|
|
|
|
|
|
|
|
def search_standard(self):
|
|
|
|
"""
|
|
|
|
Update self.result_container, self.actual_timeout
|
|
|
|
"""
|
|
|
|
requests, self.actual_timeout = self._get_requests()
|
|
|
|
|
2019-08-02 11:50:51 +00:00
|
|
|
# send all search-request
|
2016-11-05 12:45:20 +00:00
|
|
|
if requests:
|
2020-09-14 11:21:21 +00:00
|
|
|
search_multiple_requests(requests, self.result_container, self.start_time, self.actual_timeout)
|
2016-11-05 12:45:20 +00:00
|
|
|
start_new_thread(gc.collect, tuple())
|
2014-09-13 16:25:25 +00:00
|
|
|
|
2014-09-28 14:51:41 +00:00
|
|
|
# return results, suggestions, answers and infoboxes
|
2020-09-14 11:21:21 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
# do search-request
|
|
|
|
def search(self):
|
|
|
|
self.start_time = time()
|
|
|
|
|
|
|
|
if not self.search_external_bang():
|
|
|
|
if not self.search_answerers():
|
|
|
|
self.search_standard()
|
|
|
|
|
2016-10-22 11:10:31 +00:00
|
|
|
return self.result_container
|
|
|
|
|
|
|
|
|
|
|
|
class SearchWithPlugins(Search):
|
2016-10-22 12:01:53 +00:00
|
|
|
"""Similar to the Search class but call the plugins."""
|
|
|
|
|
2017-01-02 11:06:04 +00:00
|
|
|
def __init__(self, search_query, ordered_plugin_list, request):
|
2020-08-12 07:42:27 +00:00
|
|
|
super().__init__(search_query)
|
2017-01-02 11:06:04 +00:00
|
|
|
self.ordered_plugin_list = ordered_plugin_list
|
2016-10-22 11:10:31 +00:00
|
|
|
self.request = request
|
|
|
|
|
|
|
|
def search(self):
|
2017-01-02 11:06:04 +00:00
|
|
|
if plugins.call(self.ordered_plugin_list, 'pre_search', self.request, self):
|
2020-08-12 07:42:27 +00:00
|
|
|
super().search()
|
2016-10-22 11:10:31 +00:00
|
|
|
|
2017-01-02 11:06:04 +00:00
|
|
|
plugins.call(self.ordered_plugin_list, 'post_search', self.request, self)
|
2016-10-22 12:01:53 +00:00
|
|
|
|
|
|
|
results = self.result_container.get_ordered_results()
|
|
|
|
|
|
|
|
for result in results:
|
2017-01-02 11:06:04 +00:00
|
|
|
plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result)
|
2016-10-22 12:01:53 +00:00
|
|
|
|
2016-10-22 11:10:31 +00:00
|
|
|
return self.result_container
|