forked from zaclys/searxng
Merge pull request #325 from searxng/dependabot/pip/master/pylint-2.11.1
Bump pylint from 2.10.2 to 2.11.1
This commit is contained in:
commit
0d9959e649
@ -62,6 +62,7 @@ confidence=
|
||||
disable=bad-whitespace,
|
||||
duplicate-code,
|
||||
missing-function-docstring,
|
||||
consider-using-f-string,
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
|
@ -2,7 +2,7 @@ mock==4.0.3
|
||||
nose2[coverage_plugin]==0.10.0
|
||||
cov-core==1.15.0
|
||||
pycodestyle==2.7.0
|
||||
pylint==2.10.2
|
||||
pylint==2.11.1
|
||||
splinter==0.15.0
|
||||
selenium==3.141.0
|
||||
twine==3.4.2
|
||||
|
@ -35,7 +35,7 @@ def init(engine_settings):
|
||||
if 'command' not in engine_settings:
|
||||
raise ValueError('engine command : missing configuration key: command')
|
||||
|
||||
global command, working_dir, result_template, delimiter, parse_regex, timeout, environment_variables
|
||||
global command, working_dir, delimiter, parse_regex, environment_variables
|
||||
|
||||
command = engine_settings['command']
|
||||
|
||||
|
@ -56,7 +56,6 @@ def search(query, request_params):
|
||||
results.
|
||||
|
||||
"""
|
||||
global _my_offline_engine # pylint: disable=global-statement
|
||||
ret_val = []
|
||||
|
||||
result_list = json.loads(_my_offline_engine)
|
||||
|
@ -31,8 +31,6 @@ def init(_engine_settings):
|
||||
)
|
||||
|
||||
def search(query, _params):
|
||||
global _redis_client # pylint: disable=global-statement
|
||||
|
||||
if not exact_match_only:
|
||||
return search_keys(query)
|
||||
|
||||
@ -55,8 +53,6 @@ def search(query, _params):
|
||||
return []
|
||||
|
||||
def search_keys(query):
|
||||
global _redis_client # pylint: disable=global-statement
|
||||
|
||||
ret = []
|
||||
for key in _redis_client.scan_iter(
|
||||
match='*{}*'.format(query)
|
||||
|
@ -35,7 +35,6 @@ def sqlite_cursor():
|
||||
* https://docs.python.org/3/library/sqlite3.html#sqlite3.connect
|
||||
* https://www.sqlite.org/uri.html
|
||||
"""
|
||||
global database # pylint: disable=global-statement
|
||||
uri = 'file:' + database + '?mode=ro'
|
||||
with contextlib.closing(sqlite3.connect(uri, uri=True)) as connect:
|
||||
connect.row_factory = sqlite3.Row
|
||||
@ -44,7 +43,6 @@ def sqlite_cursor():
|
||||
|
||||
|
||||
def search(query, params):
|
||||
global query_str, result_template # pylint: disable=global-statement
|
||||
results = []
|
||||
|
||||
query_params = {
|
||||
|
@ -37,7 +37,6 @@ cookies = dict()
|
||||
|
||||
|
||||
def init(engine_settings=None):
|
||||
global cookies
|
||||
# initial cookies
|
||||
resp = http_get(url, allow_redirects=False)
|
||||
if resp.ok:
|
||||
|
@ -45,7 +45,6 @@ def resolve_bang_definition(bang_definition, query):
|
||||
|
||||
|
||||
def get_bang_definition_and_autocomplete(bang, external_bangs_db=None):
|
||||
global EXTERNAL_BANGS
|
||||
if external_bangs_db is None:
|
||||
external_bangs_db = EXTERNAL_BANGS
|
||||
|
||||
@ -78,7 +77,6 @@ def get_bang_url(search_query, external_bangs_db=None):
|
||||
:param search_query: This is a search_query object which contains preferences and the submitted queries.
|
||||
:return: None if the bang was invalid, else a string of the redirect url.
|
||||
"""
|
||||
global EXTERNAL_BANGS
|
||||
if external_bangs_db is None:
|
||||
external_bangs_db = EXTERNAL_BANGS
|
||||
|
||||
|
@ -57,7 +57,7 @@ def initialize_locales(directory):
|
||||
"""Initialize global names :py:obj:`LOCALE_NAMES`, :py:obj:`UI_LOCALE_CODES` and
|
||||
:py:obj:`RTL_LOCALES`.
|
||||
"""
|
||||
global LOCALE_NAMES, UI_LOCALE_CODES, RTL_LOCALES # pylint: disable=global-statement
|
||||
global UI_LOCALE_CODES # pylint: disable=global-statement
|
||||
for dirname in sorted(os.listdir(directory)):
|
||||
# Based on https://flask-babel.tkte.ch/_modules/flask_babel.html#Babel.list_translations
|
||||
if not os.path.isdir( os.path.join(directory, dirname, 'LC_MESSAGES') ):
|
||||
|
@ -43,24 +43,20 @@ THREADLOCAL = threading.local()
|
||||
"""Thread-local data is data for thread specific values."""
|
||||
|
||||
def reset_time_for_thread():
|
||||
global THREADLOCAL
|
||||
THREADLOCAL.total_time = 0
|
||||
|
||||
|
||||
def get_time_for_thread():
|
||||
"""returns thread's total time or None"""
|
||||
global THREADLOCAL
|
||||
return THREADLOCAL.__dict__.get('total_time')
|
||||
|
||||
|
||||
def set_timeout_for_thread(timeout, start_time=None):
|
||||
global THREADLOCAL
|
||||
THREADLOCAL.timeout = timeout
|
||||
THREADLOCAL.start_time = start_time
|
||||
|
||||
|
||||
def set_context_network_name(network_name):
|
||||
global THREADLOCAL
|
||||
THREADLOCAL.network = get_network(network_name)
|
||||
|
||||
|
||||
@ -69,13 +65,11 @@ def get_context_network():
|
||||
|
||||
If unset, return value from :py:obj:`get_network`.
|
||||
"""
|
||||
global THREADLOCAL
|
||||
return THREADLOCAL.__dict__.get('network') or get_network()
|
||||
|
||||
|
||||
def request(method, url, **kwargs):
|
||||
"""same as requests/requests/api.py request(...)"""
|
||||
global THREADLOCAL
|
||||
time_before_request = default_timer()
|
||||
|
||||
# timeout (httpx)
|
||||
|
@ -53,7 +53,6 @@ async def close_connections_for_url(
|
||||
|
||||
|
||||
def get_sslcontexts(proxy_url=None, cert=None, verify=True, trust_env=True, http2=False):
|
||||
global SSLCONTEXTS
|
||||
key = (proxy_url, cert, verify, trust_env, http2)
|
||||
if key not in SSLCONTEXTS:
|
||||
SSLCONTEXTS[key] = httpx.create_ssl_context(cert, verify, trust_env, http2)
|
||||
@ -137,7 +136,6 @@ class AsyncHTTPTransportFixed(httpx.AsyncHTTPTransport):
|
||||
|
||||
|
||||
def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit, retries):
|
||||
global TRANSPORT_KWARGS
|
||||
# support socks5h (requests compatibility):
|
||||
# https://requests.readthedocs.io/en/master/user/advanced/#socks
|
||||
# socks5:// hostname is resolved on client side
|
||||
@ -167,7 +165,6 @@ def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit
|
||||
|
||||
|
||||
def get_transport(verify, http2, local_address, proxy_url, limit, retries):
|
||||
global TRANSPORT_KWARGS
|
||||
verify = get_sslcontexts(None, None, True, False, http2) if verify is True else verify
|
||||
return AsyncHTTPTransportFixed(
|
||||
# pylint: disable=protected-access
|
||||
@ -235,7 +232,6 @@ def new_client(
|
||||
|
||||
|
||||
def get_loop():
|
||||
global LOOP
|
||||
return LOOP
|
||||
|
||||
|
||||
|
@ -225,12 +225,10 @@ class Network:
|
||||
|
||||
@classmethod
|
||||
async def aclose_all(cls):
|
||||
global NETWORKS
|
||||
await asyncio.gather(*[network.aclose() for network in NETWORKS.values()], return_exceptions=False)
|
||||
|
||||
|
||||
def get_network(name=None):
|
||||
global NETWORKS
|
||||
return NETWORKS.get(name or DEFAULT_NAME)
|
||||
|
||||
|
||||
@ -240,8 +238,6 @@ def initialize(settings_engines=None, settings_outgoing=None):
|
||||
from searx import settings
|
||||
# pylint: enable=import-outside-toplevel)
|
||||
|
||||
global NETWORKS
|
||||
|
||||
settings_engines = settings_engines or settings['engines']
|
||||
settings_outgoing = settings_outgoing or settings['outgoing']
|
||||
|
||||
@ -328,7 +324,6 @@ def done():
|
||||
Note: since Network.aclose has to be async, it is not possible to call this method on Network.__del__
|
||||
So Network.aclose is called here using atexit.register
|
||||
"""
|
||||
global NETWORKS
|
||||
try:
|
||||
loop = get_loop()
|
||||
if loop:
|
||||
|
@ -18,7 +18,6 @@ def normalize_name(name):
|
||||
return unicodedata.normalize('NFKD', name).lower()
|
||||
|
||||
def name_to_iso4217(name):
|
||||
global CURRENCIES # pylint: disable=global-statement
|
||||
name = normalize_name(name)
|
||||
currency = CURRENCIES['names'].get(name, [name])
|
||||
if isinstance(currency, str):
|
||||
@ -26,7 +25,6 @@ def name_to_iso4217(name):
|
||||
return currency[0]
|
||||
|
||||
def iso4217_to_name(iso4217, language):
|
||||
global CURRENCIES # pylint: disable=global-statement
|
||||
return CURRENCIES['iso4217'].get(iso4217, {}).get(language, iso4217)
|
||||
|
||||
class OnlineCurrencyProcessor(OnlineProcessor):
|
||||
|
@ -72,31 +72,26 @@ class TestResolveBangDefinition(SearxTestCase):
|
||||
class TestGetBangDefinitionAndAutocomplete(SearxTestCase):
|
||||
|
||||
def test_found(self):
|
||||
global TEST_DB
|
||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('exam', external_bangs_db=TEST_DB)
|
||||
self.assertEqual(bang_definition, TEST_DB['trie']['exam']['*'])
|
||||
self.assertEqual(new_autocomplete, ['example'])
|
||||
|
||||
def test_found_optimized(self):
|
||||
global TEST_DB
|
||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('example', external_bangs_db=TEST_DB)
|
||||
self.assertEqual(bang_definition, TEST_DB['trie']['exam']['ple'])
|
||||
self.assertEqual(new_autocomplete, [])
|
||||
|
||||
def test_partial(self):
|
||||
global TEST_DB
|
||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('examp', external_bangs_db=TEST_DB)
|
||||
self.assertEqual(bang_definition, None)
|
||||
self.assertEqual(new_autocomplete, ['example'])
|
||||
|
||||
def test_partial2(self):
|
||||
global TEST_DB
|
||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('sea', external_bangs_db=TEST_DB)
|
||||
self.assertEqual(bang_definition, TEST_DB['trie']['sea']['*'])
|
||||
self.assertEqual(new_autocomplete, ['search', 'searching', 'seascapes', 'season'])
|
||||
|
||||
def test_error(self):
|
||||
global TEST_DB
|
||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('error', external_bangs_db=TEST_DB)
|
||||
self.assertEqual(bang_definition, None)
|
||||
self.assertEqual(new_autocomplete, [])
|
||||
@ -114,7 +109,6 @@ class TestExternalBangJson(SearxTestCase):
|
||||
self.assertEqual(result, None)
|
||||
|
||||
def test_get_bang_url(self):
|
||||
global TEST_DB
|
||||
url = get_bang_url(SearchQuery('test', engineref_list=[], external_bang='example'), external_bangs_db=TEST_DB)
|
||||
self.assertEqual(url, 'https://example.com/test')
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user