Replace Flask by Starlette (2/n)

This commit is contained in:
Alexandre Flament 2021-08-13 19:02:01 +02:00
parent 78561ce7bb
commit 1a3c73cf6f
30 changed files with 994 additions and 2235 deletions

View file

@ -146,4 +146,5 @@ unset MORTY_KEY
# Start uwsgi
printf 'Listen on %s\n' "${BIND_ADDRESS}"
exec su-exec searx:searx uwsgi --master --http-socket "${BIND_ADDRESS}" "${UWSGI_SETTINGS_PATH}"
export SEARX_BIND_ADDRESS="${BIND_ADDRESS}"
exec su-exec searx:searx python3 -m searx.webapp

View file

@ -1,12 +1,11 @@
certifi==2021.5.30
babel==2.9.1
flask-babel==2.0.0
flask==2.0.1
jinja2==3.0.1
lxml==4.6.3
pygments==2.9.0
python-dateutil==2.8.2
pyyaml==5.4.1
aiohttp[speedup]==3.7.4.post0
httpx[http2]==0.17.1
Brotli==1.0.9
uvloop==0.16.0
@ -16,6 +15,6 @@ setproctitle==1.2.2
starlette==0.16.0
starlette-i18n==1.0.0
starlette-context==0.3.3
python-multipart==0.0.5
uvicorn[standard]==0.14.0
gunicorn==20.1.0
python-multipart==0.0.5

File diff suppressed because it is too large Load diff

View file

@ -2,7 +2,7 @@ import hashlib
import random
import string
import uuid
from flask_babel import gettext
from searx.i18n import gettext
# required answerer attribute
# specifies which search query keywords triggers this answerer

View file

@ -1,7 +1,7 @@
from functools import reduce
from operator import mul
from flask_babel import gettext
from searx.i18n import gettext
keywords = ('min',

View file

@ -10,7 +10,7 @@ from json import loads
from urllib.parse import urlencode
from functools import partial
from flask_babel import gettext
from searx.i18n import gettext
from searx.data import OSM_KEYS_TAGS, CURRENCIES
from searx.utils import searx_useragent

View file

@ -4,7 +4,7 @@
"""
from json import loads
from flask_babel import gettext
from searx.i18n import gettext
# about
about = {

View file

@ -3,10 +3,10 @@
PubMed (Scholar publications)
"""
from flask_babel import gettext
from lxml import etree
from datetime import datetime
from urllib.parse import urlencode
from searx.i18n import gettext
from searx.network import get
# about

View file

@ -29,8 +29,8 @@ from datetime import (
)
from json import loads
from urllib.parse import urlencode
from flask_babel import gettext
from searx.i18n import gettext
from searx.utils import match_language
from searx.exceptions import SearxEngineAPIException
from searx.network import raise_for_httperror

View file

@ -1,77 +0,0 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pylint: disable=missing-module-docstring,missing-function-docstring
from urllib.parse import urlparse
from werkzeug.middleware.proxy_fix import ProxyFix
from werkzeug.serving import WSGIRequestHandler
from searx import settings
class ReverseProxyPathFix:
'''Wrap the application in this middleware and configure the
front-end server to add these headers, to let you quietly bind
this to a URL other than / and to an HTTP scheme that is
different than what is used locally.
http://flask.pocoo.org/snippets/35/
In nginx:
location /myprefix {
proxy_pass http://127.0.0.1:8000;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Scheme $scheme;
proxy_set_header X-Script-Name /myprefix;
}
:param wsgi_app: the WSGI application
'''
# pylint: disable=too-few-public-methods
def __init__(self, wsgi_app):
self.wsgi_app = wsgi_app
self.script_name = None
self.scheme = None
self.server = None
if settings['server']['base_url']:
# If base_url is specified, then these values from are given
# preference over any Flask's generics.
base_url = urlparse(settings['server']['base_url'])
self.script_name = base_url.path
if self.script_name.endswith('/'):
# remove trailing slash to avoid infinite redirect on the index
# see https://github.com/searx/searx/issues/2729
self.script_name = self.script_name[:-1]
self.scheme = base_url.scheme
self.server = base_url.netloc
def __call__(self, environ, start_response):
script_name = self.script_name or environ.get('HTTP_X_SCRIPT_NAME', '')
if script_name:
environ['SCRIPT_NAME'] = script_name
path_info = environ['PATH_INFO']
if path_info.startswith(script_name):
environ['PATH_INFO'] = path_info[len(script_name):]
scheme = self.scheme or environ.get('HTTP_X_SCHEME', '')
if scheme:
environ['wsgi.url_scheme'] = scheme
server = self.server or environ.get('HTTP_X_FORWARDED_HOST', '')
if server:
environ['HTTP_HOST'] = server
return self.wsgi_app(environ, start_response)
def patch_application(app):
# serve pages with HTTP/1.1
WSGIRequestHandler.protocol_version = "HTTP/{}".format(settings['server']['http_protocol_version'])
# patch app to handle non root url-s behind proxy & wsgi
app.wsgi_app = ReverseProxyPathFix(ProxyFix(app.wsgi_app))

50
searx/i18n.py Normal file
View file

@ -0,0 +1,50 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pylint: disable=missing-module-docstring,missing-function-docstring
import babel
import babel.numbers
import babel.dates
import babel.support
from starlette_i18n import (
i18n,
load_gettext_translations,
)
from starlette_i18n import gettext_lazy as gettext
__all__ = (
'gettext',
'format_decimal',
'format_date',
'initialize_i18n'
)
def format_decimal(number, format=None): # pylint: disable=redefined-builtin
locale = i18n.get_locale()
return babel.numbers.format_decimal(number, format=format, locale=locale)
def format_date(date=None, format='medium', rebase=False): # pylint: disable=redefined-builtin
if rebase:
raise ValueError('rebase=True not implemented')
locale = i18n.get_locale()
if format in ('full', 'long', 'medium', 'short'):
format = locale.date_formats[format]
pattern = babel.dates.parse_pattern(format)
return pattern.apply(date, locale)
def monkeypatch():
old_i18n_Locale_parse = i18n.Locale.parse
def i18n_Locale_parse(identifier, sep='_', resolve_likely_subtags=True):
if identifier == 'oc':
identifier = 'fr'
return old_i18n_Locale_parse(identifier, sep, resolve_likely_subtags)
setattr(i18n.Locale, 'parse', i18n_Locale_parse)
def initialize_i18n(translations_path):
monkeypatch()
load_gettext_translations(directory=translations_path, domain="messages")

View file

@ -3,7 +3,6 @@
# pylint: disable=missing-module-docstring, missing-function-docstring, global-statement
import asyncio
import logging
import threading
import httpcore
import httpx
@ -12,7 +11,7 @@ from python_socks import (
parse_proxy_url,
ProxyConnectionError,
ProxyTimeoutError,
ProxyError
ProxyError,
)
from searx import logger
@ -26,33 +25,38 @@ else:
uvloop.install()
logger = logger.getChild('searx.http.client')
logger = logger.getChild("searx.http.client")
LOOP = None
LOOP_LOCK = threading.Lock()
SSLCONTEXTS = {}
TRANSPORT_KWARGS = {
'backend': 'asyncio',
'trust_env': False,
"backend": "asyncio",
"trust_env": False,
}
# pylint: disable=protected-access
async def close_connections_for_url(
connection_pool: httpcore.AsyncConnectionPool,
url: httpcore._utils.URL ):
connection_pool: httpcore.AsyncConnectionPool, url: httpcore._utils.URL
):
origin = httpcore._utils.url_to_origin(url)
logger.debug('Drop connections for %r', origin)
logger.debug("Drop connections for %r", origin)
connections_to_close = connection_pool._connections_for_origin(origin)
for connection in connections_to_close:
await connection_pool._remove_from_pool(connection)
try:
await connection.aclose()
except httpcore.NetworkError as e:
logger.warning('Error closing an existing connection', exc_info=e)
logger.warning("Error closing an existing connection", exc_info=e)
# pylint: enable=protected-access
def get_sslcontexts(proxy_url=None, cert=None, verify=True, trust_env=True, http2=False):
def get_sslcontexts(
proxy_url=None, cert=None, verify=True, trust_env=True, http2=False
):
global SSLCONTEXTS
key = (proxy_url, cert, verify, trust_env, http2)
if key not in SSLCONTEXTS:
@ -98,7 +102,7 @@ class AsyncProxyTransportFixed(AsyncProxyTransport):
except httpcore.RemoteProtocolError as e:
# in case of httpcore.RemoteProtocolError: Server disconnected
await close_connections_for_url(self, url)
logger.warning('httpcore.RemoteProtocolError: retry', exc_info=e)
logger.warning("httpcore.RemoteProtocolError: retry", exc_info=e)
# retry
except (httpcore.NetworkError, httpcore.ProtocolError) as e:
# httpcore.WriteError on HTTP/2 connection leaves a new opened stream
@ -124,35 +128,50 @@ class AsyncHTTPTransportFixed(httpx.AsyncHTTPTransport):
# raised by _keepalive_sweep()
# from https://github.com/encode/httpcore/blob/4b662b5c42378a61e54d673b4c949420102379f5/httpcore/_backends/asyncio.py#L198 # pylint: disable=line-too-long
await close_connections_for_url(self._pool, url)
logger.warning('httpcore.CloseError: retry', exc_info=e)
logger.warning("httpcore.CloseError: retry", exc_info=e)
# retry
except httpcore.RemoteProtocolError as e:
# in case of httpcore.RemoteProtocolError: Server disconnected
await close_connections_for_url(self._pool, url)
logger.warning('httpcore.RemoteProtocolError: retry', exc_info=e)
logger.warning("httpcore.RemoteProtocolError: retry", exc_info=e)
# retry
except (httpcore.ProtocolError, httpcore.NetworkError) as e:
await close_connections_for_url(self._pool, url)
raise e
def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit, retries):
def get_transport_for_socks_proxy(
verify, http2, local_address, proxy_url, limit, retries
):
global TRANSPORT_KWARGS
# support socks5h (requests compatibility):
# https://requests.readthedocs.io/en/master/user/advanced/#socks
# socks5:// hostname is resolved on client side
# socks5h:// hostname is resolved on proxy side
rdns = False
socks5h = 'socks5h://'
socks5h = "socks5h://"
if proxy_url.startswith(socks5h):
proxy_url = 'socks5://' + proxy_url[len(socks5h):]
proxy_url = "socks5://" + proxy_url[len(socks5h) :]
rdns = True
proxy_type, proxy_host, proxy_port, proxy_username, proxy_password = parse_proxy_url(proxy_url)
verify = get_sslcontexts(proxy_url, None, True, False, http2) if verify is True else verify
(
proxy_type,
proxy_host,
proxy_port,
proxy_username,
proxy_password,
) = parse_proxy_url(proxy_url)
verify = (
get_sslcontexts(proxy_url, None, True, False, http2)
if verify is True
else verify
)
return AsyncProxyTransportFixed(
proxy_type=proxy_type, proxy_host=proxy_host, proxy_port=proxy_port,
username=proxy_username, password=proxy_password,
proxy_type=proxy_type,
proxy_host=proxy_host,
proxy_port=proxy_port,
username=proxy_username,
password=proxy_password,
rdns=rdns,
loop=get_loop(),
verify=verify,
@ -168,7 +187,9 @@ def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit
def get_transport(verify, http2, local_address, proxy_url, limit, retries):
global TRANSPORT_KWARGS
verify = get_sslcontexts(None, None, True, False, http2) if verify is True else verify
verify = (
get_sslcontexts(None, None, True, False, http2) if verify is True else verify
)
return AsyncHTTPTransportFixed(
# pylint: disable=protected-access
verify=verify,
@ -184,7 +205,7 @@ def get_transport(verify, http2, local_address, proxy_url, limit, retries):
def iter_proxies(proxies):
# https://www.python-httpx.org/compatibility/#proxy-keys
if isinstance(proxies, str):
yield 'all://', proxies
yield "all://", proxies
elif isinstance(proxies, dict):
for pattern, proxy_url in proxies.items():
yield pattern, proxy_url
@ -192,22 +213,31 @@ def iter_proxies(proxies):
def new_client(
# pylint: disable=too-many-arguments
enable_http, verify, enable_http2,
max_connections, max_keepalive_connections, keepalive_expiry,
proxies, local_address, retries, max_redirects ):
enable_http,
verify,
enable_http2,
max_connections,
max_keepalive_connections,
keepalive_expiry,
proxies,
local_address,
retries,
max_redirects,
):
limit = httpx.Limits(
max_connections=max_connections,
max_keepalive_connections=max_keepalive_connections,
keepalive_expiry=keepalive_expiry
keepalive_expiry=keepalive_expiry,
)
# See https://www.python-httpx.org/advanced/#routing
mounts = {}
for pattern, proxy_url in iter_proxies(proxies):
if not enable_http and (pattern == 'http' or pattern.startswith('http://')):
if not enable_http and (pattern == "http" or pattern.startswith("http://")):
continue
if (proxy_url.startswith('socks4://')
or proxy_url.startswith('socks5://')
or proxy_url.startswith('socks5h://')
if (
proxy_url.startswith("socks4://")
or proxy_url.startswith("socks5://")
or proxy_url.startswith("socks5h://")
):
mounts[pattern] = get_transport_for_socks_proxy(
verify, enable_http2, local_address, proxy_url, limit, retries
@ -218,10 +248,39 @@ def new_client(
)
if not enable_http:
mounts['http://'] = AsyncHTTPTransportNoHttp()
mounts["http://"] = AsyncHTTPTransportNoHttp()
transport = get_transport(verify, enable_http2, local_address, None, limit, retries)
return httpx.AsyncClient(transport=transport, mounts=mounts, max_redirects=max_redirects)
return httpx.AsyncClient(
transport=transport, mounts=mounts, max_redirects=max_redirects
)
def create_loop():
# pylint: disable=consider-using-with
global LOOP_LOCK
LOOP_LOCK.acquire()
if LOOP:
return
def loop_thread():
global LOOP
try:
LOOP = asyncio.new_event_loop()
except: # pylint: disable=bare-except
logger.exception('Error on asyncio.new_event_loop()')
finally:
LOOP_LOCK.release()
if LOOP:
LOOP.run_forever()
thread = threading.Thread(
target=loop_thread,
name="asyncio_loop",
daemon=True,
)
thread.start()
LOOP_LOCK.acquire()
def get_loop():
@ -229,20 +288,10 @@ def get_loop():
if LOOP:
return LOOP
loop_ready = threading.Lock()
loop_ready.acquire()
def loop_thread():
global LOOP
LOOP = asyncio.new_event_loop()
loop_ready.release()
LOOP.run_forever()
thread = threading.Thread(
target=loop_thread,
name='asyncio_loop',
daemon=True,
)
thread.start()
loop_ready.acquire()
create_loop()
return LOOP
def set_loop(loop):
global LOOP
LOOP = loop

View file

@ -3,7 +3,6 @@
# pylint: disable=global-statement
# pylint: disable=missing-module-docstring, missing-class-docstring, missing-function-docstring
import atexit
import asyncio
import ipaddress
from itertools import cycle
@ -11,38 +10,44 @@ from itertools import cycle
import httpx
from .client import new_client, get_loop
from searx import logger
DEFAULT_NAME = '__DEFAULT__'
DEFAULT_NAME = "__DEFAULT__"
NETWORKS = {}
# requests compatibility when reading proxy settings from settings.yml
PROXY_PATTERN_MAPPING = {
'http': 'http://',
'https': 'https://',
'socks4': 'socks4://',
'socks5': 'socks5://',
'socks5h': 'socks5h://',
'http:': 'http://',
'https:': 'https://',
'socks4:': 'socks4://',
'socks5:': 'socks5://',
'socks5h:': 'socks5h://',
"http": "http://",
"https": "https://",
"socks4": "socks4://",
"socks5": "socks5://",
"socks5h": "socks5h://",
"http:": "http://",
"https:": "https://",
"socks4:": "socks4://",
"socks5:": "socks5://",
"socks5h:": "socks5h://",
}
ADDRESS_MAPPING = {
'ipv4': '0.0.0.0',
'ipv6': '::'
}
ADDRESS_MAPPING = {"ipv4": "0.0.0.0", "ipv6": "::"}
class Network:
__slots__ = (
'enable_http', 'verify', 'enable_http2',
'max_connections', 'max_keepalive_connections', 'keepalive_expiry',
'local_addresses', 'proxies', 'max_redirects', 'retries', 'retry_on_http_error',
'_local_addresses_cycle', '_proxies_cycle', '_clients'
"enable_http",
"verify",
"enable_http2",
"max_connections",
"max_keepalive_connections",
"keepalive_expiry",
"local_addresses",
"proxies",
"max_redirects",
"retries",
"retry_on_http_error",
"_local_addresses_cycle",
"_proxies_cycle",
"_clients",
)
def __init__(
@ -58,7 +63,8 @@ class Network:
local_addresses=None,
retries=0,
retry_on_http_error=None,
max_redirects=30 ):
max_redirects=30,
):
self.enable_http = enable_http
self.verify = verify
@ -78,13 +84,13 @@ class Network:
def check_parameters(self):
for address in self.iter_ipaddresses():
if '/' in address:
if "/" in address:
ipaddress.ip_network(address, False)
else:
ipaddress.ip_address(address)
if self.proxies is not None and not isinstance(self.proxies, (str, dict)):
raise ValueError('proxies type has to be str, dict or None')
raise ValueError("proxies type has to be str, dict or None")
def iter_ipaddresses(self):
local_addresses = self.local_addresses
@ -99,7 +105,7 @@ class Network:
while True:
count = 0
for address in self.iter_ipaddresses():
if '/' in address:
if "/" in address:
for a in ipaddress.ip_network(address, False).hosts():
yield str(a)
count += 1
@ -115,7 +121,7 @@ class Network:
return
# https://www.python-httpx.org/compatibility/#proxy-keys
if isinstance(self.proxies, str):
yield 'all://', [self.proxies]
yield "all://", [self.proxies]
else:
for pattern, proxy_url in self.proxies.items():
pattern = PROXY_PATTERN_MAPPING.get(pattern, pattern)
@ -129,7 +135,10 @@ class Network:
proxy_settings[pattern] = cycle(proxy_urls)
while True:
# pylint: disable=stop-iteration-return
yield tuple((pattern, next(proxy_url_cycle)) for pattern, proxy_url_cycle in proxy_settings.items())
yield tuple(
(pattern, next(proxy_url_cycle))
for pattern, proxy_url_cycle in proxy_settings.items()
)
def get_client(self, verify=None, max_redirects=None):
verify = self.verify if verify is None else verify
@ -148,32 +157,43 @@ class Network:
dict(proxies),
local_address,
0,
max_redirects
max_redirects,
)
return self._clients[key]
async def aclose(self):
async def close_client(client):
async def close_client(client: httpx.AsyncClient):
try:
await client.aclose()
except httpx.HTTPError:
pass
await asyncio.gather(*[close_client(client) for client in self._clients.values()], return_exceptions=False)
await asyncio.gather(
*[close_client(client) for client in self._clients.values()],
return_exceptions=False
)
@staticmethod
def get_kwargs_clients(kwargs):
kwargs_clients = {}
if 'verify' in kwargs:
kwargs_clients['verify'] = kwargs.pop('verify')
if 'max_redirects' in kwargs:
kwargs_clients['max_redirects'] = kwargs.pop('max_redirects')
if "verify" in kwargs:
kwargs_clients["verify"] = kwargs.pop("verify")
if "max_redirects" in kwargs:
kwargs_clients["max_redirects"] = kwargs.pop("max_redirects")
return kwargs_clients
def is_valid_respones(self, response):
# pylint: disable=too-many-boolean-expressions
if ((self.retry_on_http_error is True and 400 <= response.status_code <= 599)
or (isinstance(self.retry_on_http_error, list) and response.status_code in self.retry_on_http_error)
or (isinstance(self.retry_on_http_error, int) and response.status_code == self.retry_on_http_error)
if (
(self.retry_on_http_error is True and 400 <= response.status_code <= 599)
or (
isinstance(self.retry_on_http_error, list)
and response.status_code in self.retry_on_http_error
)
or (
isinstance(self.retry_on_http_error, int)
and response.status_code == self.retry_on_http_error
)
):
return False
return True
@ -209,39 +229,52 @@ class Network:
@classmethod
async def aclose_all(cls):
global NETWORKS
await asyncio.gather(*[network.aclose() for network in NETWORKS.values()], return_exceptions=False)
await asyncio.gather(
*[network.aclose() for network in NETWORKS.values()],
return_exceptions=False
)
@classmethod
def close_all(cls):
future = asyncio.run_coroutine_threadsafe(Network.aclose_all(), get_loop())
future.result()
def get_network(name=None):
global NETWORKS
return NETWORKS.get(name or DEFAULT_NAME)
if name:
return NETWORKS.get(name)
if DEFAULT_NAME not in NETWORKS:
NETWORKS[DEFAULT_NAME] = Network({})
return NETWORKS[DEFAULT_NAME]
def initialize(settings_engines=None, settings_outgoing=None):
# pylint: disable=import-outside-toplevel)
from searx.engines import engines
from searx import settings
# pylint: enable=import-outside-toplevel)
global NETWORKS
settings_engines = settings_engines or settings['engines']
settings_outgoing = settings_outgoing or settings['outgoing']
settings_engines = settings_engines or settings["engines"]
settings_outgoing = settings_outgoing or settings["outgoing"]
# default parameters for AsyncHTTPTransport
# see https://github.com/encode/httpx/blob/e05a5372eb6172287458b37447c30f650047e1b8/httpx/_transports/default.py#L108-L121 # pylint: disable=line-too-long
default_params = {
'enable_http': False,
'verify': True,
'enable_http2': settings_outgoing['enable_http2'],
'max_connections': settings_outgoing['pool_connections'],
'max_keepalive_connections': settings_outgoing['pool_maxsize'],
'keepalive_expiry': settings_outgoing['keepalive_expiry'],
'local_addresses': settings_outgoing['source_ips'],
'proxies': settings_outgoing['proxies'],
'max_redirects': settings_outgoing['max_redirects'],
'retries': settings_outgoing['retries'],
'retry_on_http_error': None,
"enable_http": False,
"verify": True,
"enable_http2": settings_outgoing["enable_http2"],
"max_connections": settings_outgoing["pool_connections"],
"max_keepalive_connections": settings_outgoing["pool_maxsize"],
"keepalive_expiry": settings_outgoing["keepalive_expiry"],
"local_addresses": settings_outgoing["source_ips"],
"proxies": settings_outgoing["proxies"],
"max_redirects": settings_outgoing["max_redirects"],
"retries": settings_outgoing["retries"],
"retry_on_http_error": None,
}
def new_network(params):
@ -254,22 +287,22 @@ def initialize(settings_engines=None, settings_outgoing=None):
def iter_networks():
nonlocal settings_engines
for engine_spec in settings_engines:
engine_name = engine_spec['name']
engine_name = engine_spec["name"]
engine = engines.get(engine_name)
if engine is None:
continue
network = getattr(engine, 'network', None)
network = getattr(engine, "network", None)
yield engine_name, engine, network
if NETWORKS:
done()
Network.close_all()
NETWORKS.clear()
NETWORKS[DEFAULT_NAME] = new_network({})
NETWORKS['ipv4'] = new_network({'local_addresses': '0.0.0.0'})
NETWORKS['ipv6'] = new_network({'local_addresses': '::'})
NETWORKS["ipv4"] = new_network({"local_addresses": "0.0.0.0"})
NETWORKS["ipv6"] = new_network({"local_addresses": "::"})
# define networks from outgoing.networks
for network_name, network in settings_outgoing['networks'].items():
for network_name, network in settings_outgoing["networks"].items():
NETWORKS[network_name] = new_network(network)
# define networks from engines.[i].network (except references)
@ -289,29 +322,3 @@ def initialize(settings_engines=None, settings_outgoing=None):
for engine_name, engine, network in iter_networks():
if isinstance(network, str):
NETWORKS[engine_name] = NETWORKS[network]
@atexit.register
def done():
"""Close all HTTP client
Avoid a warning at exit
see https://github.com/encode/httpx/blob/1a6e254f72d9fd5694a1c10a28927e193ab4f76b/httpx/_client.py#L1785
Note: since Network.aclose has to be async, it is not possible to call this method on Network.__del__
So Network.aclose is called here using atexit.register
"""
global NETWORKS
try:
loop = get_loop()
if loop:
future = asyncio.run_coroutine_threadsafe(Network.aclose_all(), loop)
# wait 3 seconds to close the HTTP clients
future.result(3)
except:
logger.exception('Exception while closing clients')
finally:
NETWORKS.clear()
NETWORKS[DEFAULT_NAME] = Network()

View file

@ -21,6 +21,8 @@ from os import listdir, makedirs, remove, stat, utime
from os.path import abspath, basename, dirname, exists, join
from shutil import copyfile
import babel.support
from searx import logger, settings
@ -63,9 +65,19 @@ class PluginStore():
plugins = load_external_plugins(plugins)
for plugin in plugins:
for plugin_attr, plugin_attr_type in required_attrs:
if not hasattr(plugin, plugin_attr) or not isinstance(getattr(plugin, plugin_attr), plugin_attr_type):
if not hasattr(plugin, plugin_attr):
logger.critical('missing attribute "{0}", cannot load plugin: {1}'.format(plugin_attr, plugin))
exit(3)
attr = getattr(plugin, plugin_attr)
if isinstance(attr, babel.support.LazyProxy):
attr = attr.value
if not isinstance(attr, plugin_attr_type):
type_attr = str(type(attr))
logger.critical(
'attribute "{0}" is of type {2}, must be {3}, cannot load plugin: {1}'
.format(plugin_attr, plugin, type_attr, plugin_attr_type)
)
exit(3)
for plugin_attr, plugin_attr_type in optional_attrs:
if not hasattr(plugin, plugin_attr) or not isinstance(getattr(plugin, plugin_attr), plugin_attr_type):
setattr(plugin, plugin_attr, plugin_attr_type())

View file

@ -16,7 +16,7 @@ along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2018, 2020 by Vaclav Zouzalik
'''
from flask_babel import gettext
from searx.i18n import gettext
import hashlib
import re

View file

@ -1,4 +1,4 @@
from flask_babel import gettext
from searx.i18n import gettext
name = gettext('Infinite scroll')
description = gettext('Automatically load next page when scrolling to bottom of current page')

View file

@ -1,8 +1,8 @@
from urllib.parse import urlparse, parse_qsl
from flask_babel import gettext
import re
from searx import settings
from searx import settings
from searx.i18n import gettext
regex = re.compile(r'10\.\d{4,9}/[^\s]+')

View file

@ -14,7 +14,7 @@ along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2015 by Adam Tauber, <asciimoo@gmail.com>
'''
from flask_babel import gettext
from searx.i18n import gettext
name = gettext('Search on category select')
description = gettext('Perform search immediately if a category selected. '
'Disable to select multiple categories. (JavaScript required)')

View file

@ -14,7 +14,7 @@ along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2015 by Adam Tauber, <asciimoo@gmail.com>
'''
from flask_babel import gettext
from searx.i18n import gettext
import re
name = gettext('Self Informations')
description = gettext('Displays your IP if the query is "ip" and your user agent if the query contains "user agent".')

View file

@ -14,11 +14,11 @@ along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2015 by Adam Tauber, <asciimoo@gmail.com>
'''
from flask_babel import gettext
import re
from urllib.parse import urlunparse, parse_qsl, urlencode
from searx.i18n import gettext
regexes = {re.compile(r'utm_[^&]+'),
re.compile(r'(wkey|wemail)[^&]*'),
re.compile(r'(_hsenc|_hsmi|hsCtaTracking|__hssc|__hstc|__hsfp)[^&]*'),

View file

@ -1,4 +1,4 @@
from flask_babel import gettext
from searx.i18n import gettext
name = gettext('Vim-like hotkeys')
description = gettext('Navigate search results with Vim-like hotkeys '

View file

@ -437,7 +437,7 @@ class Preferences:
return urlsafe_b64encode(compress(urlencode(settings_kv).encode())).decode()
def parse_encoded_data(self, input_data):
"""parse (base64) preferences from request (``flask.request.form['preferences']``)"""
"""parse (base64) preferences from request (``context.form['preferences']``)"""
bin_data = decompress(urlsafe_b64decode(input_data))
dict_data = {}
for x, y in parse_qs(bin_data.decode('ascii')).items():
@ -445,7 +445,7 @@ class Preferences:
self.parse_dict(dict_data)
def parse_dict(self, input_data):
"""parse preferences from request (``flask.request.form``)"""
"""parse preferences from request (``context.form``)"""
for user_setting_name, user_setting in input_data.items():
if user_setting_name in self.key_value_settings:
if self.key_value_settings[user_setting_name].locked:
@ -468,7 +468,7 @@ class Preferences:
self.unknown_params[user_setting_name] = user_setting
def parse_form(self, input_data):
"""Parse formular (``<input>``) data from a ``flask.request.form``"""
"""Parse formular (``<input>``) data from a ``context.form``"""
disabled_engines = []
enabled_categories = []
disabled_plugins = []

View file

@ -11,6 +11,7 @@ class CustomUvicornWorker(uvicorn.workers.UvicornWorker):
class StandaloneApplication(gunicorn.app.base.BaseApplication):
# pylint: disable=abstract-method
def __init__(self, app, options=None):
self.options = options or {}
@ -18,8 +19,11 @@ class StandaloneApplication(gunicorn.app.base.BaseApplication):
super().__init__()
def load_config(self):
config = {key: value for key, value in self.options.items()
if key in self.cfg.settings and value is not None}
config = {
key: value
for key, value in self.options.items()
if key in self.cfg.settings and value is not None
}
for key, value in config.items():
self.cfg.set(key.lower(), value)
@ -28,44 +32,46 @@ class StandaloneApplication(gunicorn.app.base.BaseApplication):
def number_of_workers():
return 1 # (multiprocessing.cpu_count() * 2) + 1
return multiprocessing.cpu_count() + 1
def run_production(app):
config_kwargs = {
"loop": "uvloop",
"http": "httptools",
"proxy_headers": True,
}
base_url = settings['server']['base_url'] or None
base_url = settings["server"]["base_url"] or None
if base_url:
# ? config_kwargs['proxy_headers'] = True
config_kwargs['root_path'] = settings['server']['base_url']
config_kwargs["root_path"] = settings["server"]["base_url"]
CustomUvicornWorker.CONFIG_KWARGS.update(config_kwargs)
options = {
'proc_name': 'searxng',
'bind': '%s:%s' % (settings['server']['bind_address'], settings['server']['port']),
'workers': number_of_workers(),
'worker_class': 'searx.run.CustomUvicornWorker',
'loglevel': 'debug',
'capture_output': True,
"proc_name": "searxng",
"bind": "%s:%s"
% (settings["server"]["bind_address"], settings["server"]["port"]),
"workers": number_of_workers(),
"worker_class": "searx.run.CustomUvicornWorker",
"loglevel": "debug",
"capture_output": True,
}
StandaloneApplication(app, options).run()
def run_debug():
kwargs = {
'reload': True,
'loop': 'auto',
'http': 'auto',
'ws': 'none',
'host': settings['server']['bind_address'],
'port': settings['server']['port'],
"reload": True,
"loop": "auto",
"http": "auto",
"ws": "none",
"host": settings["server"]["bind_address"],
"port": settings["server"]["port"],
"proxy_headers": True,
}
base_url = settings['server']['base_url']
base_url = settings["server"]["base_url"]
if base_url:
kwargs['proxy_headers'] = True
kwargs['root_path'] = settings['server']['base_url']
kwargs["root_path"] = settings["server"]["base_url"]
uvicorn.run('searx.__main__:app', **kwargs)
uvicorn.run("searx.webapp:app", **kwargs)

View file

@ -3,9 +3,9 @@
# pylint: disable=missing-module-docstring, missing-function-docstring
import typing
import threading
from timeit import default_timer
import asyncio
from uuid import uuid4
from timeit import default_timer
from searx import settings
from searx.answerers import ask
@ -19,6 +19,7 @@ from searx.network import initialize as initialize_network
from searx.metrics import initialize as initialize_metrics, counter_inc, histogram_observe_time
from searx.search.processors import PROCESSORS, initialize as initialize_processors
from searx.search.checker import initialize as initialize_checker
from searx.search.threadnopoolexecutor import ThreadNoPoolExecutor
logger = logger.getChild('search')
@ -126,30 +127,33 @@ class Search:
return requests, actual_timeout
def search_multiple_requests(self, requests):
async def search_multiple_requests(self, requests):
# pylint: disable=protected-access
search_id = uuid4().__str__()
futures = []
loop = asyncio.get_running_loop()
executor = ThreadNoPoolExecutor(thread_name_prefix=str(uuid4()))
for engine_name, query, request_params in requests:
th = threading.Thread( # pylint: disable=invalid-name
target=PROCESSORS[engine_name].search,
args=(query, request_params, self.result_container, self.start_time, self.actual_timeout),
name=search_id,
future = loop.run_in_executor(
executor,
PROCESSORS[engine_name].search,
query,
request_params,
self.result_container,
self.start_time,
self.actual_timeout,
)
th._timeout = False
th._engine_name = engine_name
th.start()
future._engine_name = engine_name
futures.append(future)
for th in threading.enumerate(): # pylint: disable=invalid-name
if th.name == search_id:
remaining_time = max(0.0, self.actual_timeout - (default_timer() - self.start_time))
th.join(remaining_time)
if th.is_alive():
th._timeout = True
self.result_container.add_unresponsive_engine(th._engine_name, 'timeout')
logger.warning('engine timeout: {0}'.format(th._engine_name))
_, pending = await asyncio.wait(futures, return_when=asyncio.ALL_COMPLETED, timeout=remaining_time)
for future in pending:
# th._timeout = True
self.result_container.add_unresponsive_engine(future._engine_name, 'timeout')
logger.warning('engine timeout: {0}'.format(future._engine_name))
def search_standard(self):
async def search_standard(self):
"""
Update self.result_container, self.actual_timeout
"""
@ -157,17 +161,17 @@ class Search:
# send all search-request
if requests:
self.search_multiple_requests(requests)
await self.search_multiple_requests(requests)
# return results, suggestions, answers and infoboxes
return True
# do search-request
def search(self):
async def search(self):
self.start_time = default_timer()
if not self.search_external_bang():
if not self.search_answerers():
self.search_standard()
await self.search_standard()
return self.result_container
@ -181,9 +185,9 @@ class SearchWithPlugins(Search):
self.ordered_plugin_list = ordered_plugin_list
self.request = request
def search(self):
async def search(self):
if plugins.call(self.ordered_plugin_list, 'pre_search', self.request, self):
super().search()
await super().search()
plugins.call(self.ordered_plugin_list, 'post_search', self.request, self)

View file

@ -0,0 +1,43 @@
import threading
from concurrent.futures import ThreadPoolExecutor
from concurrent.futures._base import Future
class ThreadNoPoolExecutor(ThreadPoolExecutor):
def __init__(self, max_workers=None, thread_name_prefix='',
initializer=None, initargs=()):
# pylint: disable=super-init-not-called
if max_workers:
raise NotImplementedError('max_workers not supported')
if initializer:
raise NotImplementedError('initializer not supported')
if initargs:
raise NotImplementedError('initargs not supported')
self.thread_name_prefix = thread_name_prefix
def submit(self, fn, *args, **kwargs): # pylint: disable=arguments-differ
f = Future()
def worker():
if not f.set_running_or_notify_cancel():
return
try:
result = fn(*args, **kwargs)
except BaseException as exc:
f.set_exception(exc)
else:
f.set_result(result)
t = threading.Thread(
target=worker,
name=self.thread_name_prefix + '_engine',
daemon=True
)
t.start()
return f
# submit.__text_signature__ = ThreadPoolExecutor.submit.__text_signature__
# submit.__doc__ = ThreadPoolExecutor.submit.__doc__
def shutdown(self, wait=True):
pass

192
searx/templates.py Normal file
View file

@ -0,0 +1,192 @@
import os
from typing import Optional
from urllib.parse import parse_qs, urlencode, urlsplit
import jinja2
import babel.support
from pygments import highlight
from pygments.lexers import get_lexer_by_name
from pygments.formatters import HtmlFormatter # pylint: disable=no-name-in-module
from starlette.requests import Request
from starlette.templating import Jinja2Templates
from starlette_context import context
from starlette.routing import NoMatchFound
from starlette_i18n import i18n
from searx import logger, settings
from searx.webutils import (
get_static_files,
get_result_templates,
get_themes,
)
# about static
logger.debug('static directory is %s', settings['ui']['static_path'])
static_files = get_static_files(settings['ui']['static_path'])
# about templates
logger.debug('templates directory is %s', settings['ui']['templates_path'])
default_theme = settings['ui']['default_theme']
templates_path = settings['ui']['templates_path']
themes = get_themes(templates_path)
result_templates = get_result_templates(templates_path)
global_favicons = []
for indice, theme in enumerate(themes):
global_favicons.append([])
theme_img_path = os.path.join(settings['ui']['static_path'], 'themes', theme, 'img', 'icons')
for (dirpath, dirnames, filenames) in os.walk(theme_img_path):
global_favicons[indice].extend(filenames)
def get_current_theme_name(request: Request, override: Optional[str] =None) -> str:
"""Returns theme name.
Checks in this order:
1. override
2. cookies
3. settings"""
if override and (override in themes or override == '__common__'):
return override
theme_name = request.query_params.get('theme', context.preferences.get_value('theme')) # pylint: disable=no-member
if theme_name not in themes:
theme_name = default_theme
return theme_name
def get_result_template(theme_name: str, template_name: str) -> str:
themed_path = theme_name + '/result_templates/' + template_name
if themed_path in result_templates:
return themed_path
return 'result_templates/' + template_name
# code-highlighter
def code_highlighter(codelines, language=None):
if not language:
language = 'text'
try:
# find lexer by programing language
lexer = get_lexer_by_name(language, stripall=True)
except Exception as e: # pylint: disable=broad-except
logger.exception(e, exc_info=True)
# if lexer is not found, using default one
lexer = get_lexer_by_name('text', stripall=True)
html_code = ''
tmp_code = ''
last_line = None
# parse lines
for line, code in codelines:
if not last_line:
line_code_start = line
# new codeblock is detected
if last_line is not None and\
last_line + 1 != line:
# highlight last codepart
formatter = HtmlFormatter(
linenos='inline', linenostart=line_code_start, cssclass="code-highlight"
)
html_code = html_code + highlight(tmp_code, lexer, formatter)
# reset conditions for next codepart
tmp_code = ''
line_code_start = line
# add codepart
tmp_code += code + '\n'
# update line
last_line = line
# highlight last codepart
formatter = HtmlFormatter(linenos='inline', linenostart=line_code_start, cssclass="code-highlight")
html_code = html_code + highlight(tmp_code, lexer, formatter)
return html_code
class I18NTemplates(Jinja2Templates):
"""Custom Jinja2Templates with i18n support
"""
@staticmethod
def url_for_theme(endpoint: str, override_theme=None, **values):
request = context.request # pylint: disable=no-member
# starlette migration
if '_external' in values:
del values['_external']
if 'filename' in values:
values['path'] = values['filename']
del values['filename']
#
if endpoint == 'static' and values.get('path'):
theme_name = get_current_theme_name(request, override=override_theme)
filename_with_theme = "themes/{}/{}".format(theme_name, values['path'])
if filename_with_theme in static_files:
values['path'] = filename_with_theme
return request.url_for(endpoint, **values)
try:
url_for_args = {}
for k in ('path', 'filename'):
if k in values:
v = values.pop(k)
url_for_args[k] = v
url = request.url_for(endpoint, **url_for_args)
_url = urlsplit(url)
_query = parse_qs(_url.query)
_query.update(values)
querystr = urlencode(_query, doseq=True)
return _url._replace(query=querystr).geturl()
# if anchor is not None:
# rv += f"#{url_quote(anchor)}"
except NoMatchFound as e:
error_message = "url_for, endpoint='%s' not found (values=%s)" % (endpoint, str(values))
logger.error(error_message)
context.errors.append(error_message) # pylint: disable=no-member
raise e
@staticmethod
def ugettext(message):
translations = i18n.get_locale().translations
if isinstance(message, babel.support.LazyProxy):
message = message.value
return translations.ugettext(message)
@staticmethod
def ungettext(*args):
translations = i18n.get_locale().translations
return translations.ungettext(*args)
def _create_env(self, directory: str) -> "jinja2.Environment":
loader = jinja2.FileSystemLoader(directory)
env = jinja2.Environment(
loader=loader,
autoescape=True,
trim_blocks=True,
lstrip_blocks=True,
auto_reload=False,
extensions=[
'jinja2.ext.loopcontrols',
'jinja2.ext.i18n'
],
)
env.filters["code_highlighter"] = code_highlighter
env.globals["url_for"] = I18NTemplates.url_for_theme
env.install_gettext_callables( # pylint: disable=no-member
I18NTemplates.ugettext,
I18NTemplates.ungettext,
newstyle=True
)
return env

View file

@ -47,13 +47,7 @@ class SearxRobotLayer():
webapp = join(abspath(dirname(realpath(__file__))), 'webapp.py')
exe = 'python'
# The Flask app is started by Flask.run(...), don't enable Flask's debug
# mode, the debugger from Flask will cause wired process model, where
# the server never dies. Further read:
#
# - debug mode: https://flask.palletsprojects.com/quickstart/#debug-mode
# - Flask.run(..): https://flask.palletsprojects.com/api/#flask.Flask.run
# Disable debug mode
os.environ['SEARX_DEBUG'] = '0'
# set robot settings path

File diff suppressed because it is too large Load diff

View file

@ -4,7 +4,6 @@ import csv
import hashlib
import hmac
import re
import inspect
from io import StringIO
from codecs import getincrementalencoder
@ -123,18 +122,3 @@ def highlight_content(content, query):
content, flags=re.I | re.U)
return content
def is_flask_run_cmdline():
"""Check if the application was started using "flask run" command line
Inspect the callstack.
See https://github.com/pallets/flask/blob/master/src/flask/__main__.py
Returns:
bool: True if the application was started using "flask run".
"""
frames = inspect.stack()
if len(frames) < 2:
return False
return frames[-2].filename.endswith('flask/cli.py')

View file

@ -1,14 +1,28 @@
# -*- coding: utf-8 -*-
import json
import unittest
from urllib.parse import ParseResult
from mock import Mock
from searx.testing import SearxTestCase
# from searx.testing import SearxTestCase
from searx.search import Search
import searx.search.processors
from starlette.testclient import TestClient
class ViewsTestCase(SearxTestCase):
class ViewsTestCase(unittest.TestCase):
def setattr4test(self, obj, attr, value):
"""
setattr(obj, attr, value)
but reset to the previous value in the cleanup.
"""
previous_value = getattr(obj, attr)
def cleanup_patch():
setattr(obj, attr, previous_value)
self.addCleanup(cleanup_patch)
setattr(obj, attr, value)
def setUp(self):
# skip init function (no external HTTP request)
@ -16,10 +30,8 @@ class ViewsTestCase(SearxTestCase):
pass
self.setattr4test(searx.search.processors, 'initialize_processor', dummy)
from searx import webapp # pylint disable=import-outside-toplevel
webapp.app.config['TESTING'] = True # to get better error messages
self.app = webapp.app.test_client()
from searx import webapp, templates # pylint disable=import-outside-toplevel
self.client = TestClient(webapp.app)
# set some defaults
test_results = [
@ -69,51 +81,51 @@ class ViewsTestCase(SearxTestCase):
self.setattr4test(Search, 'search', search_mock)
def get_current_theme_name_mock(override=None):
def get_current_theme_name_mock(request, override=None):
if override:
return override
return 'oscar'
self.setattr4test(webapp, 'get_current_theme_name', get_current_theme_name_mock)
self.setattr4test(templates, 'get_current_theme_name', get_current_theme_name_mock)
self.maxDiff = None # to see full diffs
def test_index_empty(self):
result = self.app.post('/')
result = self.client.post('/')
self.assertEqual(result.status_code, 200)
self.assertIn(b'<div class="text-hide center-block" id="main-logo">'
+ b'<img class="center-block img-responsive" src="/static/themes/oscar/img/logo_searx_a.png"'
+ b' alt="searx logo" />searx</div>', result.data)
def test_index_html_post(self):
result = self.app.post('/', data={'q': 'test'})
result = self.client.post('/', data={'q': 'test'})
self.assertEqual(result.status_code, 308)
self.assertEqual(result.location, 'http://localhost/search')
def test_index_html_get(self):
result = self.app.post('/?q=test')
result = self.client.post('/?q=test')
self.assertEqual(result.status_code, 308)
self.assertEqual(result.location, 'http://localhost/search?q=test')
def test_search_empty_html(self):
result = self.app.post('/search', data={'q': ''})
result = self.client.post('/search', data={'q': ''})
self.assertEqual(result.status_code, 200)
self.assertIn(b'<span class="instance pull-left"><a href="/">searxng</a></span>', result.data)
def test_search_empty_json(self):
result = self.app.post('/search', data={'q': '', 'format': 'json'})
result = self.client.post('/search', data={'q': '', 'format': 'json'})
self.assertEqual(result.status_code, 400)
def test_search_empty_csv(self):
result = self.app.post('/search', data={'q': '', 'format': 'csv'})
result = self.client.post('/search', data={'q': '', 'format': 'csv'})
self.assertEqual(result.status_code, 400)
def test_search_empty_rss(self):
result = self.app.post('/search', data={'q': '', 'format': 'rss'})
result = self.client.post('/search', data={'q': '', 'format': 'rss'})
self.assertEqual(result.status_code, 400)
def test_search_html(self):
result = self.app.post('/search', data={'q': 'test'})
result = self.client.post('/search', data={'q': 'test'})
self.assertIn(
b'<h4 class="result_header" id="result-2"><img width="32" height="32" class="favicon"'
@ -127,12 +139,12 @@ class ViewsTestCase(SearxTestCase):
)
def test_index_json(self):
result = self.app.post('/', data={'q': 'test', 'format': 'json'})
result = self.client.post('/', data={'q': 'test', 'format': 'json'})
self.assertEqual(result.status_code, 308)
def test_search_json(self):
result = self.app.post('/search', data={'q': 'test', 'format': 'json'})
result_dict = json.loads(result.data.decode())
result = self.client.post('/search', data={'q': 'test', 'format': 'json'})
result_dict = result.json()
self.assertEqual('test', result_dict['query'])
self.assertEqual(len(result_dict['results']), 2)
@ -140,11 +152,11 @@ class ViewsTestCase(SearxTestCase):
self.assertEqual(result_dict['results'][0]['url'], 'http://first.test.xyz')
def test_index_csv(self):
result = self.app.post('/', data={'q': 'test', 'format': 'csv'})
result = self.client.post('/', data={'q': 'test', 'format': 'csv'})
self.assertEqual(result.status_code, 308)
def test_search_csv(self):
result = self.app.post('/search', data={'q': 'test', 'format': 'csv'})
result = self.client.post('/search', data={'q': 'test', 'format': 'csv'})
self.assertEqual(
b'title,url,content,host,engine,score,type\r\n'
@ -154,11 +166,11 @@ class ViewsTestCase(SearxTestCase):
)
def test_index_rss(self):
result = self.app.post('/', data={'q': 'test', 'format': 'rss'})
result = self.client.post('/', data={'q': 'test', 'format': 'rss'})
self.assertEqual(result.status_code, 308)
def test_search_rss(self):
result = self.app.post('/search', data={'q': 'test', 'format': 'rss'})
result = self.client.post('/search', data={'q': 'test', 'format': 'rss'})
self.assertIn(
b'<description>Search results for "test" - searx</description>',
@ -186,12 +198,12 @@ class ViewsTestCase(SearxTestCase):
)
def test_about(self):
result = self.app.get('/about')
result = self.client.get('/about')
self.assertEqual(result.status_code, 200)
self.assertIn(b'<h1>About <a href="/">searxng</a></h1>', result.data)
def test_preferences(self):
result = self.app.get('/preferences')
result = self.client.get('/preferences')
self.assertEqual(result.status_code, 200)
self.assertIn(
b'<form method="post" action="/preferences" id="search_form">',
@ -207,7 +219,7 @@ class ViewsTestCase(SearxTestCase):
)
def test_browser_locale(self):
result = self.app.get('/preferences', headers={'Accept-Language': 'zh-tw;q=0.8'})
result = self.client.get('/preferences', headers={'Accept-Language': 'zh-tw;q=0.8'})
self.assertEqual(result.status_code, 200)
self.assertIn(
b'<option value="zh_TW" selected="selected">',
@ -221,26 +233,26 @@ class ViewsTestCase(SearxTestCase):
)
def test_stats(self):
result = self.app.get('/stats')
result = self.client.get('/stats')
self.assertEqual(result.status_code, 200)
self.assertIn(b'<h1>Engine stats</h1>', result.data)
def test_robots_txt(self):
result = self.app.get('/robots.txt')
result = self.client.get('/robots.txt')
self.assertEqual(result.status_code, 200)
self.assertIn(b'Allow: /', result.data)
def test_opensearch_xml(self):
result = self.app.get('/opensearch.xml')
result = self.client.get('/opensearch.xml')
self.assertEqual(result.status_code, 200)
self.assertIn(b'<Description>a privacy-respecting, hackable metasearch engine</Description>', result.data)
def test_favicon(self):
result = self.app.get('/favicon.ico')
result = self.client.get('/favicon.ico')
self.assertEqual(result.status_code, 200)
def test_config(self):
result = self.app.get('/config')
result = self.client.get('/config')
self.assertEqual(result.status_code, 200)
json_result = result.get_json()
self.assertTrue(json_result)