2013-10-14 21:09:13 +00:00
|
|
|
|
2013-10-16 22:32:32 +00:00
|
|
|
'''
|
|
|
|
searx is free software: you can redistribute it and/or modify
|
|
|
|
it under the terms of the GNU Affero General Public License as published by
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
(at your option) any later version.
|
|
|
|
|
|
|
|
searx is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU Affero General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU Affero General Public License
|
|
|
|
along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
|
|
|
|
|
|
|
(C) 2013- by Adam Tauber, <asciimoo@gmail.com>
|
|
|
|
'''
|
|
|
|
|
2013-10-14 21:09:13 +00:00
|
|
|
from os.path import realpath, dirname, splitext, join
|
|
|
|
from imp import load_source
|
2013-10-15 16:19:06 +00:00
|
|
|
import grequests
|
2013-10-15 21:20:26 +00:00
|
|
|
from itertools import izip_longest, chain
|
2013-10-16 21:03:26 +00:00
|
|
|
from operator import itemgetter
|
2013-10-19 15:36:44 +00:00
|
|
|
from urlparse import urlparse
|
2013-10-19 17:01:06 +00:00
|
|
|
from searx import settings
|
2013-10-23 21:54:46 +00:00
|
|
|
import ConfigParser
|
|
|
|
import sys
|
2013-10-26 23:23:13 +00:00
|
|
|
from datetime import datetime
|
2013-10-14 21:09:13 +00:00
|
|
|
|
|
|
|
engine_dir = dirname(realpath(__file__))
|
2013-10-23 21:54:46 +00:00
|
|
|
searx_dir = join(engine_dir, '../../')
|
|
|
|
|
|
|
|
engines_config = ConfigParser.SafeConfigParser()
|
|
|
|
engines_config.read(join(searx_dir, 'engines.cfg'))
|
2013-10-27 12:55:18 +00:00
|
|
|
number_of_searches = 0
|
2013-10-14 21:09:13 +00:00
|
|
|
|
2013-10-15 17:11:43 +00:00
|
|
|
engines = {}
|
2013-10-14 21:09:13 +00:00
|
|
|
|
2013-10-17 19:06:28 +00:00
|
|
|
categories = {'general': []}
|
|
|
|
|
2013-10-23 21:54:46 +00:00
|
|
|
def load_module(filename):
|
2013-10-19 17:01:06 +00:00
|
|
|
modname = splitext(filename)[0]
|
2013-10-23 21:54:46 +00:00
|
|
|
if modname in sys.modules:
|
|
|
|
del sys.modules[modname]
|
2013-10-14 21:09:13 +00:00
|
|
|
filepath = join(engine_dir, filename)
|
2013-10-23 21:54:46 +00:00
|
|
|
module = load_source(modname, filepath)
|
|
|
|
module.name = modname
|
|
|
|
return module
|
|
|
|
|
2013-10-26 16:44:58 +00:00
|
|
|
if not engines_config.sections():
|
|
|
|
print '[E] Error no engines found. Edit your engines.cfg'
|
|
|
|
exit(2)
|
|
|
|
|
2013-11-10 20:16:25 +00:00
|
|
|
for engine_config_name in engines_config.sections():
|
|
|
|
engine_data = engines_config.options(engine_config_name)
|
|
|
|
engine = load_module(engines_config.get(engine_config_name, 'engine')+'.py')
|
|
|
|
engine.name = engine_config_name
|
2013-10-23 21:54:46 +00:00
|
|
|
for param_name in engine_data:
|
|
|
|
if param_name == 'engine':
|
|
|
|
continue
|
|
|
|
if param_name == 'categories':
|
2013-11-10 20:16:25 +00:00
|
|
|
if engines_config.get(engine_config_name, param_name) == 'none':
|
2013-11-04 20:46:23 +00:00
|
|
|
engine.categories = []
|
|
|
|
else:
|
2013-11-10 20:16:25 +00:00
|
|
|
engine.categories = map(str.strip, engines_config.get(engine_config_name, param_name).split(','))
|
2013-10-23 21:54:46 +00:00
|
|
|
continue
|
2013-11-10 20:16:25 +00:00
|
|
|
setattr(engine, param_name, engines_config.get(engine_config_name, param_name))
|
2013-10-25 21:41:14 +00:00
|
|
|
for engine_attr in dir(engine):
|
|
|
|
if engine_attr.startswith('_'):
|
|
|
|
continue
|
|
|
|
if getattr(engine, engine_attr) == None:
|
|
|
|
print '[E] Engine config error: Missing attribute "{0}.{1}"'.format(engine.name, engine_attr)
|
|
|
|
sys.exit(1)
|
2013-10-23 21:54:46 +00:00
|
|
|
engines[engine.name] = engine
|
2013-10-27 19:45:21 +00:00
|
|
|
engine.stats = {'result_count': 0, 'search_count': 0, 'page_load_time': 0, 'score_count': 0, 'errors': 0}
|
2013-10-23 21:54:46 +00:00
|
|
|
if hasattr(engine, 'categories'):
|
2013-10-17 19:06:28 +00:00
|
|
|
for category_name in engine.categories:
|
|
|
|
categories.setdefault(category_name, []).append(engine)
|
2013-10-23 21:54:46 +00:00
|
|
|
else:
|
|
|
|
categories['general'].append(engine)
|
2013-10-15 16:19:06 +00:00
|
|
|
|
|
|
|
def default_request_params():
|
2013-10-19 20:34:46 +00:00
|
|
|
return {'method': 'GET', 'headers': {}, 'data': {}, 'url': '', 'cookies': {}}
|
2013-10-15 16:19:06 +00:00
|
|
|
|
2013-10-25 20:00:56 +00:00
|
|
|
def make_callback(engine_name, results, callback, params):
|
2013-11-09 17:37:25 +00:00
|
|
|
# creating a callback wrapper for the search engine results
|
2013-10-15 16:19:06 +00:00
|
|
|
def process_callback(response, **kwargs):
|
2013-10-15 21:20:26 +00:00
|
|
|
cb_res = []
|
2013-10-26 00:21:55 +00:00
|
|
|
response.search_params = params
|
2013-10-26 23:23:13 +00:00
|
|
|
engines[engine_name].stats['page_load_time'] += (datetime.now() - params['started']).total_seconds()
|
2013-10-27 19:45:21 +00:00
|
|
|
try:
|
|
|
|
search_results = callback(response)
|
|
|
|
except Exception, e:
|
|
|
|
engines[engine_name].stats['errors'] += 1
|
|
|
|
results[engine_name] = cb_res
|
|
|
|
print '[E] Error with engine "{0}":\n\t{1}'.format(engine_name, str(e))
|
|
|
|
return
|
|
|
|
for result in search_results:
|
2013-10-15 17:11:43 +00:00
|
|
|
result['engine'] = engine_name
|
2013-10-15 21:20:26 +00:00
|
|
|
cb_res.append(result)
|
|
|
|
results[engine_name] = cb_res
|
2013-10-15 16:19:06 +00:00
|
|
|
return process_callback
|
|
|
|
|
2013-11-09 17:37:25 +00:00
|
|
|
def highlight_content(content, query):
|
|
|
|
# ignoring html contents
|
|
|
|
# TODO better html content detection
|
|
|
|
if content.find('<') != -1:
|
|
|
|
return content
|
|
|
|
for chunk in query.split():
|
|
|
|
content = content.replace(chunk, '<b>{0}</b>'.format(chunk))
|
|
|
|
|
|
|
|
return content
|
|
|
|
|
2013-11-03 23:18:07 +00:00
|
|
|
def search(query, request, selected_engines):
|
2013-10-27 12:55:18 +00:00
|
|
|
global engines, categories, number_of_searches
|
2013-10-15 16:19:06 +00:00
|
|
|
requests = []
|
2013-10-15 21:20:26 +00:00
|
|
|
results = {}
|
2013-10-27 12:55:18 +00:00
|
|
|
number_of_searches += 1
|
2013-10-15 16:19:06 +00:00
|
|
|
user_agent = request.headers.get('User-Agent', '')
|
2013-11-02 23:40:45 +00:00
|
|
|
|
2013-10-22 16:57:20 +00:00
|
|
|
for selected_engine in selected_engines:
|
|
|
|
if selected_engine['name'] not in engines:
|
2013-10-15 20:18:08 +00:00
|
|
|
continue
|
2013-11-02 23:40:45 +00:00
|
|
|
|
2013-10-22 16:57:20 +00:00
|
|
|
engine = engines[selected_engine['name']]
|
2013-11-02 23:40:45 +00:00
|
|
|
|
2013-10-20 18:20:10 +00:00
|
|
|
request_params = default_request_params()
|
|
|
|
request_params['headers']['User-Agent'] = user_agent
|
2013-10-22 16:57:20 +00:00
|
|
|
request_params['category'] = selected_engine['category']
|
2013-10-26 23:23:13 +00:00
|
|
|
request_params['started'] = datetime.now()
|
2013-10-20 18:20:10 +00:00
|
|
|
request_params = engine.request(query, request_params)
|
2013-11-02 23:40:45 +00:00
|
|
|
|
2013-10-25 20:00:56 +00:00
|
|
|
callback = make_callback(selected_engine['name'], results, engine.response, request_params)
|
2013-11-02 23:40:45 +00:00
|
|
|
|
|
|
|
request_args = dict(headers = request_params['headers']
|
|
|
|
,hooks = dict(response=callback)
|
|
|
|
,cookies = request_params['cookies']
|
|
|
|
,timeout = settings.request_timeout
|
|
|
|
)
|
|
|
|
|
2013-10-15 16:19:06 +00:00
|
|
|
if request_params['method'] == 'GET':
|
2013-11-02 23:40:45 +00:00
|
|
|
req = grequests.get
|
2013-10-15 16:19:06 +00:00
|
|
|
else:
|
2013-11-02 23:40:45 +00:00
|
|
|
req = grequests.post
|
|
|
|
request_args['data'] = request_params['data']
|
|
|
|
|
2013-11-04 20:06:53 +00:00
|
|
|
# ignoring empty urls
|
|
|
|
if not request_params['url']:
|
|
|
|
continue
|
|
|
|
|
2013-11-02 23:40:45 +00:00
|
|
|
requests.append(req(request_params['url'], **request_args))
|
2013-10-15 16:19:06 +00:00
|
|
|
grequests.map(requests)
|
2013-10-26 23:02:28 +00:00
|
|
|
for engine_name,engine_results in results.items():
|
|
|
|
engines[engine_name].stats['search_count'] += 1
|
|
|
|
engines[engine_name].stats['result_count'] += len(engine_results)
|
2013-10-20 18:47:00 +00:00
|
|
|
flat_res = filter(None, chain.from_iterable(izip_longest(*results.values())))
|
2013-10-16 21:03:26 +00:00
|
|
|
flat_len = len(flat_res)
|
2013-10-27 18:06:30 +00:00
|
|
|
engines_len = len(selected_engines)
|
2013-10-16 21:03:26 +00:00
|
|
|
results = []
|
|
|
|
# deduplication + scoring
|
|
|
|
for i,res in enumerate(flat_res):
|
2013-10-19 15:36:44 +00:00
|
|
|
res['parsed_url'] = urlparse(res['url'])
|
2013-10-27 13:34:35 +00:00
|
|
|
res['engines'] = [res['engine']]
|
2013-11-10 20:16:25 +00:00
|
|
|
weight = 1.0
|
|
|
|
if hasattr(engines[res['engine']], 'weight'):
|
|
|
|
weight = float(engines[res['engine']].weight)
|
|
|
|
elif res['engine'] in settings.weights:
|
|
|
|
weight = float(settings.weights[res['engine']])
|
|
|
|
score = int((flat_len - i)/engines_len)*weight+1
|
2013-10-16 21:03:26 +00:00
|
|
|
duplicated = False
|
|
|
|
for new_res in results:
|
2013-11-02 17:07:48 +00:00
|
|
|
p1 = res['parsed_url'].path[:-1] if res['parsed_url'].path.endswith('/') else res['parsed_url'].path
|
|
|
|
p2 = new_res['parsed_url'].path[:-1] if new_res['parsed_url'].path.endswith('/') else new_res['parsed_url'].path
|
2013-10-19 15:36:44 +00:00
|
|
|
if res['parsed_url'].netloc == new_res['parsed_url'].netloc and\
|
2013-11-02 17:07:48 +00:00
|
|
|
p1 == p2 and\
|
2013-10-23 10:24:51 +00:00
|
|
|
res['parsed_url'].query == new_res['parsed_url'].query and\
|
|
|
|
res.get('template') == new_res.get('template'):
|
2013-10-16 21:03:26 +00:00
|
|
|
duplicated = new_res
|
|
|
|
break
|
|
|
|
if duplicated:
|
2013-10-18 06:24:58 +00:00
|
|
|
if len(res.get('content', '')) > len(duplicated.get('content', '')):
|
2013-10-16 21:03:26 +00:00
|
|
|
duplicated['content'] = res['content']
|
|
|
|
duplicated['score'] += score
|
2013-10-27 13:34:35 +00:00
|
|
|
duplicated['engines'].append(res['engine'])
|
2013-10-19 15:36:44 +00:00
|
|
|
if duplicated['parsed_url'].scheme == 'https':
|
|
|
|
continue
|
|
|
|
elif res['parsed_url'].scheme == 'https':
|
2013-10-27 13:23:53 +00:00
|
|
|
duplicated['url'] = res['parsed_url'].geturl()
|
2013-11-02 17:04:13 +00:00
|
|
|
duplicated['parsed_url'] = res['parsed_url']
|
2013-10-16 21:03:26 +00:00
|
|
|
else:
|
|
|
|
res['score'] = score
|
|
|
|
results.append(res)
|
|
|
|
|
2013-10-27 13:39:23 +00:00
|
|
|
for result in results:
|
2013-11-09 17:37:25 +00:00
|
|
|
if 'content' in result:
|
|
|
|
result['content'] = highlight_content(result['content'], query)
|
2013-10-27 13:39:23 +00:00
|
|
|
for res_engine in result['engines']:
|
|
|
|
engines[result['engine']].stats['score_count'] += result['score']
|
|
|
|
|
2013-10-16 21:03:26 +00:00
|
|
|
return sorted(results, key=itemgetter('score'), reverse=True)
|
2013-10-26 23:02:28 +00:00
|
|
|
|
|
|
|
def get_engines_stats():
|
2013-10-27 00:50:24 +00:00
|
|
|
pageloads = []
|
|
|
|
results = []
|
2013-10-27 13:21:23 +00:00
|
|
|
scores = []
|
2013-10-27 19:45:21 +00:00
|
|
|
errors = []
|
2013-10-26 23:02:28 +00:00
|
|
|
|
2013-10-27 19:45:21 +00:00
|
|
|
max_pageload = max_results = max_score = max_errors = 0
|
2013-10-26 23:02:28 +00:00
|
|
|
for engine in engines.values():
|
|
|
|
if engine.stats['search_count'] == 0:
|
|
|
|
continue
|
2013-10-26 23:23:13 +00:00
|
|
|
results_num = engine.stats['result_count']/float(engine.stats['search_count'])
|
|
|
|
load_times = engine.stats['page_load_time']/float(engine.stats['search_count'])
|
2013-10-27 13:21:23 +00:00
|
|
|
if results_num:
|
2013-10-27 13:48:16 +00:00
|
|
|
score = engine.stats['score_count'] / float(engine.stats['search_count'])
|
2013-10-27 13:21:23 +00:00
|
|
|
else:
|
|
|
|
score = 0
|
2013-10-27 00:50:24 +00:00
|
|
|
max_results = max(results_num, max_results)
|
|
|
|
max_pageload = max(load_times, max_pageload)
|
2013-10-27 13:21:23 +00:00
|
|
|
max_score = max(score, max_score)
|
2013-10-27 19:45:21 +00:00
|
|
|
max_errors = max(max_errors, engine.stats['errors'])
|
2013-10-27 00:50:24 +00:00
|
|
|
pageloads.append({'avg': load_times, 'name': engine.name})
|
|
|
|
results.append({'avg': results_num, 'name': engine.name})
|
2013-10-27 13:21:23 +00:00
|
|
|
scores.append({'avg': score, 'name': engine.name})
|
2013-10-27 19:45:21 +00:00
|
|
|
errors.append({'avg': engine.stats['errors'], 'name': engine.name})
|
2013-10-27 00:50:24 +00:00
|
|
|
|
|
|
|
for engine in pageloads:
|
|
|
|
engine['percentage'] = int(engine['avg']/max_pageload*100)
|
|
|
|
|
|
|
|
for engine in results:
|
|
|
|
engine['percentage'] = int(engine['avg']/max_results*100)
|
|
|
|
|
2013-10-27 13:21:23 +00:00
|
|
|
for engine in scores:
|
|
|
|
engine['percentage'] = int(engine['avg']/max_score*100)
|
|
|
|
|
2013-10-27 19:45:21 +00:00
|
|
|
for engine in errors:
|
|
|
|
if max_errors:
|
|
|
|
engine['percentage'] = int(engine['avg']/max_errors*100)
|
|
|
|
else:
|
|
|
|
engine['percentage'] = 0
|
|
|
|
|
2013-10-26 23:02:28 +00:00
|
|
|
|
2013-10-27 13:55:47 +00:00
|
|
|
return [('Page loads (sec)', sorted(pageloads, key=itemgetter('avg')))
|
2013-10-27 00:50:24 +00:00
|
|
|
,('Number of results', sorted(results, key=itemgetter('avg'), reverse=True))
|
2013-10-27 13:21:23 +00:00
|
|
|
,('Scores', sorted(scores, key=itemgetter('avg'), reverse=True))
|
2013-10-27 19:45:21 +00:00
|
|
|
,('Errors', sorted(errors, key=itemgetter('avg'), reverse=True))
|
2013-10-27 00:50:24 +00:00
|
|
|
]
|