mirror of
https://github.com/searxng/searxng
synced 2024-01-01 19:24:07 +01:00
Merge branch 'master' of https://github.com/asciimoo/searx
This commit is contained in:
commit
9e72ebe064
13 changed files with 154 additions and 150 deletions
|
|
@ -0,0 +1,22 @@
|
|||
from os import environ
|
||||
from os.path import realpath, dirname, join
|
||||
try:
|
||||
from yaml import load
|
||||
except:
|
||||
from sys import exit, stderr
|
||||
stderr.write('[E] install pyyaml\n')
|
||||
exit(2)
|
||||
|
||||
|
||||
searx_dir = realpath(dirname(realpath(__file__))+'/../')
|
||||
engine_dir = dirname(realpath(__file__))
|
||||
|
||||
if 'SEARX_SETTINGS_PATH' in environ:
|
||||
settings_path = environ['SEARX_SETTINGS_PATH']
|
||||
else:
|
||||
settings_path = join(searx_dir, 'settings.yml')
|
||||
|
||||
|
||||
with open(settings_path) as settings_yaml:
|
||||
settings = load(settings_yaml)
|
||||
|
||||
|
|
@ -23,16 +23,12 @@ from itertools import izip_longest, chain
|
|||
from operator import itemgetter
|
||||
from urlparse import urlparse
|
||||
from searx import settings
|
||||
from searx.utils import get_useragent
|
||||
import ConfigParser
|
||||
from searx.utils import gen_useragent
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
engine_dir = dirname(realpath(__file__))
|
||||
searx_dir = join(engine_dir, '../../')
|
||||
|
||||
engines_config = ConfigParser.SafeConfigParser()
|
||||
engines_config.read(join(searx_dir, 'engines.cfg'))
|
||||
number_of_searches = 0
|
||||
|
||||
engines = {}
|
||||
|
|
@ -48,24 +44,23 @@ def load_module(filename):
|
|||
module.name = modname
|
||||
return module
|
||||
|
||||
if not engines_config.sections():
|
||||
print '[E] Error no engines found. Edit your engines.cfg'
|
||||
if not 'engines' in settings or not settings['engines']:
|
||||
print '[E] Error no engines found. Edit your settings.yml'
|
||||
exit(2)
|
||||
|
||||
for engine_config_name in engines_config.sections():
|
||||
engine_data = engines_config.options(engine_config_name)
|
||||
engine = load_module(engines_config.get(engine_config_name, 'engine')+'.py')
|
||||
engine.name = engine_config_name
|
||||
for engine_data in settings['engines']:
|
||||
engine_name = engine_data['engine']
|
||||
engine = load_module(engine_name+'.py')
|
||||
for param_name in engine_data:
|
||||
if param_name == 'engine':
|
||||
continue
|
||||
if param_name == 'categories':
|
||||
if engines_config.get(engine_config_name, param_name) == 'none':
|
||||
if engine_data['categories'] == 'none':
|
||||
engine.categories = []
|
||||
else:
|
||||
engine.categories = map(str.strip, engines_config.get(engine_config_name, param_name).split(','))
|
||||
engine.categories = map(str.strip, engine_data['categories'].split(','))
|
||||
continue
|
||||
setattr(engine, param_name, engines_config.get(engine_config_name, param_name))
|
||||
setattr(engine, param_name, engine_data[param_name])
|
||||
for engine_attr in dir(engine):
|
||||
if engine_attr.startswith('_'):
|
||||
continue
|
||||
|
|
@ -118,8 +113,6 @@ def score_results(results):
|
|||
weight = 1.0
|
||||
if hasattr(engines[res['engine']], 'weight'):
|
||||
weight = float(engines[res['engine']].weight)
|
||||
elif res['engine'] in settings.weights:
|
||||
weight = float(settings.weights[res['engine']])
|
||||
score = int((flat_len - i)/engines_len)*weight+1
|
||||
duplicated = False
|
||||
for new_res in results:
|
||||
|
|
@ -153,7 +146,7 @@ def search(query, request, selected_engines):
|
|||
suggestions = set()
|
||||
number_of_searches += 1
|
||||
#user_agent = request.headers.get('User-Agent', '')
|
||||
user_agent = get_useragent()
|
||||
user_agent = gen_useragent()
|
||||
|
||||
for selected_engine in selected_engines:
|
||||
if selected_engine['name'] not in engines:
|
||||
|
|
@ -172,7 +165,7 @@ def search(query, request, selected_engines):
|
|||
request_args = dict(headers = request_params['headers']
|
||||
,hooks = dict(response=callback)
|
||||
,cookies = request_params['cookies']
|
||||
,timeout = settings.request_timeout
|
||||
,timeout = settings['server']['request_timeout']
|
||||
)
|
||||
|
||||
if request_params['method'] == 'GET':
|
||||
|
|
|
|||
|
|
@ -1,16 +0,0 @@
|
|||
|
||||
port = 8888
|
||||
|
||||
secret_key = "ultrasecretkey" # change this!
|
||||
|
||||
debug = True
|
||||
|
||||
request_timeout = 5.0 # seconds
|
||||
|
||||
weights = {} # 'search_engine_name': float(weight) | default is 1.0
|
||||
|
||||
blacklist = [] # search engine blacklist
|
||||
|
||||
categories = {} # custom search engine categories
|
||||
|
||||
base_url = None # "https://your.domain.tld/" or None (to use request parameters)
|
||||
|
|
@ -10,7 +10,6 @@
|
|||
<ul>
|
||||
<li>Maybe Searx won’t offer you as personalised results as Google, but it doesn't make a profile about you</li>
|
||||
<li>Searx doesn't care about what you search, never shares anything with a third party, and it can't be used to compromise you</li>
|
||||
<li>Searx doesn't make money on ads and it isn't customised based on your interests. You get the pure search results</li>
|
||||
<li>Searx is a free software, the code is 100% open and you can help to make it better. See more on <a href="https://gmail.com/asciimoo/searx">github</a></li>
|
||||
</ul>
|
||||
<p>If you do care about privacy, want to be a conscious user, moreover believe
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import codecs
|
|||
import cStringIO
|
||||
import re
|
||||
|
||||
def get_useragent():
|
||||
def gen_useragent():
|
||||
# TODO
|
||||
return "Mozilla/5.0 (X11; Linux x86_64; rv:26.0) Gecko/20100101 Firefox/26.0"
|
||||
|
||||
|
|
|
|||
|
|
@ -22,13 +22,7 @@ import sys
|
|||
if __name__ == "__main__":
|
||||
sys.path.append(os.path.realpath(os.path.dirname(os.path.realpath(__file__))+'/../'))
|
||||
|
||||
# first argument is for specifying settings module, used mostly by robot tests
|
||||
from sys import argv
|
||||
if len(argv) == 2:
|
||||
from importlib import import_module
|
||||
settings = import_module('searx.' + argv[1])
|
||||
else:
|
||||
from searx import settings
|
||||
from searx import settings
|
||||
|
||||
from flask import Flask, request, render_template, url_for, Response, make_response, redirect
|
||||
from searx.engines import search, categories, engines, get_engines_stats
|
||||
|
|
@ -41,7 +35,7 @@ from searx.utils import highlight_content, html_to_text
|
|||
|
||||
|
||||
app = Flask(__name__)
|
||||
app.secret_key = settings.secret_key
|
||||
app.secret_key = settings['server']['secret_key']
|
||||
|
||||
|
||||
opensearch_xml = '''<?xml version="1.0" encoding="utf-8"?>
|
||||
|
|
@ -58,8 +52,8 @@ opensearch_xml = '''<?xml version="1.0" encoding="utf-8"?>
|
|||
|
||||
|
||||
def get_base_url():
|
||||
if settings.base_url:
|
||||
hostname = settings.base_url
|
||||
if settings['server']['base_url']:
|
||||
hostname = settings['server']['base_url']
|
||||
else:
|
||||
scheme = 'http'
|
||||
if request.is_secure:
|
||||
|
|
@ -252,9 +246,9 @@ def run():
|
|||
from gevent import monkey
|
||||
monkey.patch_all()
|
||||
|
||||
app.run(debug = settings.debug
|
||||
,use_debugger = settings.debug
|
||||
,port = settings.port
|
||||
app.run(debug = settings['server']['debug']
|
||||
,use_debugger = settings['server']['debug']
|
||||
,port = settings['server']['port']
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue