forked from zaclys/searxng
[pylint] fix global-variable-not-assigned issues
If there is no write access, there is no need for global. Remove global statement if there is no assignment. global-variable-not-assigned: Using global for names but no assignment is done Used when a variable is defined through the "global" statement but no assignment to this variable is done. In Pylint 2.11 the global-variable-not-assigned checker now catches global variables that are never reassigned in a local scope and catches (reassigned) functions [1][2] [1] https://pylint.pycqa.org/en/latest/whatsnew/2.11.html [2] https://github.com/PyCQA/pylint/issues/1375 Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
This commit is contained in:
parent
fe6470cbe6
commit
443bf35e09
@ -35,7 +35,7 @@ def init(engine_settings):
|
||||
if 'command' not in engine_settings:
|
||||
raise ValueError('engine command : missing configuration key: command')
|
||||
|
||||
global command, working_dir, result_template, delimiter, parse_regex, timeout, environment_variables
|
||||
global command, working_dir, delimiter, parse_regex, environment_variables
|
||||
|
||||
command = engine_settings['command']
|
||||
|
||||
|
@ -56,7 +56,6 @@ def search(query, request_params):
|
||||
results.
|
||||
|
||||
"""
|
||||
global _my_offline_engine # pylint: disable=global-statement
|
||||
ret_val = []
|
||||
|
||||
result_list = json.loads(_my_offline_engine)
|
||||
|
@ -31,8 +31,6 @@ def init(_engine_settings):
|
||||
)
|
||||
|
||||
def search(query, _params):
|
||||
global _redis_client # pylint: disable=global-statement
|
||||
|
||||
if not exact_match_only:
|
||||
return search_keys(query)
|
||||
|
||||
@ -55,8 +53,6 @@ def search(query, _params):
|
||||
return []
|
||||
|
||||
def search_keys(query):
|
||||
global _redis_client # pylint: disable=global-statement
|
||||
|
||||
ret = []
|
||||
for key in _redis_client.scan_iter(
|
||||
match='*{}*'.format(query)
|
||||
|
@ -35,7 +35,6 @@ def sqlite_cursor():
|
||||
* https://docs.python.org/3/library/sqlite3.html#sqlite3.connect
|
||||
* https://www.sqlite.org/uri.html
|
||||
"""
|
||||
global database # pylint: disable=global-statement
|
||||
uri = 'file:' + database + '?mode=ro'
|
||||
with contextlib.closing(sqlite3.connect(uri, uri=True)) as connect:
|
||||
connect.row_factory = sqlite3.Row
|
||||
@ -44,7 +43,6 @@ def sqlite_cursor():
|
||||
|
||||
|
||||
def search(query, params):
|
||||
global query_str, result_template # pylint: disable=global-statement
|
||||
results = []
|
||||
|
||||
query_params = {
|
||||
|
@ -37,7 +37,6 @@ cookies = dict()
|
||||
|
||||
|
||||
def init(engine_settings=None):
|
||||
global cookies
|
||||
# initial cookies
|
||||
resp = http_get(url, allow_redirects=False)
|
||||
if resp.ok:
|
||||
|
@ -45,7 +45,6 @@ def resolve_bang_definition(bang_definition, query):
|
||||
|
||||
|
||||
def get_bang_definition_and_autocomplete(bang, external_bangs_db=None):
|
||||
global EXTERNAL_BANGS
|
||||
if external_bangs_db is None:
|
||||
external_bangs_db = EXTERNAL_BANGS
|
||||
|
||||
@ -78,7 +77,6 @@ def get_bang_url(search_query, external_bangs_db=None):
|
||||
:param search_query: This is a search_query object which contains preferences and the submitted queries.
|
||||
:return: None if the bang was invalid, else a string of the redirect url.
|
||||
"""
|
||||
global EXTERNAL_BANGS
|
||||
if external_bangs_db is None:
|
||||
external_bangs_db = EXTERNAL_BANGS
|
||||
|
||||
|
@ -57,7 +57,7 @@ def initialize_locales(directory):
|
||||
"""Initialize global names :py:obj:`LOCALE_NAMES`, :py:obj:`UI_LOCALE_CODES` and
|
||||
:py:obj:`RTL_LOCALES`.
|
||||
"""
|
||||
global LOCALE_NAMES, UI_LOCALE_CODES, RTL_LOCALES # pylint: disable=global-statement
|
||||
global UI_LOCALE_CODES # pylint: disable=global-statement
|
||||
for dirname in sorted(os.listdir(directory)):
|
||||
# Based on https://flask-babel.tkte.ch/_modules/flask_babel.html#Babel.list_translations
|
||||
if not os.path.isdir( os.path.join(directory, dirname, 'LC_MESSAGES') ):
|
||||
|
@ -43,24 +43,20 @@ THREADLOCAL = threading.local()
|
||||
"""Thread-local data is data for thread specific values."""
|
||||
|
||||
def reset_time_for_thread():
|
||||
global THREADLOCAL
|
||||
THREADLOCAL.total_time = 0
|
||||
|
||||
|
||||
def get_time_for_thread():
|
||||
"""returns thread's total time or None"""
|
||||
global THREADLOCAL
|
||||
return THREADLOCAL.__dict__.get('total_time')
|
||||
|
||||
|
||||
def set_timeout_for_thread(timeout, start_time=None):
|
||||
global THREADLOCAL
|
||||
THREADLOCAL.timeout = timeout
|
||||
THREADLOCAL.start_time = start_time
|
||||
|
||||
|
||||
def set_context_network_name(network_name):
|
||||
global THREADLOCAL
|
||||
THREADLOCAL.network = get_network(network_name)
|
||||
|
||||
|
||||
@ -69,13 +65,11 @@ def get_context_network():
|
||||
|
||||
If unset, return value from :py:obj:`get_network`.
|
||||
"""
|
||||
global THREADLOCAL
|
||||
return THREADLOCAL.__dict__.get('network') or get_network()
|
||||
|
||||
|
||||
def request(method, url, **kwargs):
|
||||
"""same as requests/requests/api.py request(...)"""
|
||||
global THREADLOCAL
|
||||
time_before_request = default_timer()
|
||||
|
||||
# timeout (httpx)
|
||||
|
@ -53,7 +53,6 @@ async def close_connections_for_url(
|
||||
|
||||
|
||||
def get_sslcontexts(proxy_url=None, cert=None, verify=True, trust_env=True, http2=False):
|
||||
global SSLCONTEXTS
|
||||
key = (proxy_url, cert, verify, trust_env, http2)
|
||||
if key not in SSLCONTEXTS:
|
||||
SSLCONTEXTS[key] = httpx.create_ssl_context(cert, verify, trust_env, http2)
|
||||
@ -137,7 +136,6 @@ class AsyncHTTPTransportFixed(httpx.AsyncHTTPTransport):
|
||||
|
||||
|
||||
def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit, retries):
|
||||
global TRANSPORT_KWARGS
|
||||
# support socks5h (requests compatibility):
|
||||
# https://requests.readthedocs.io/en/master/user/advanced/#socks
|
||||
# socks5:// hostname is resolved on client side
|
||||
@ -167,7 +165,6 @@ def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit
|
||||
|
||||
|
||||
def get_transport(verify, http2, local_address, proxy_url, limit, retries):
|
||||
global TRANSPORT_KWARGS
|
||||
verify = get_sslcontexts(None, None, True, False, http2) if verify is True else verify
|
||||
return AsyncHTTPTransportFixed(
|
||||
# pylint: disable=protected-access
|
||||
@ -235,7 +232,6 @@ def new_client(
|
||||
|
||||
|
||||
def get_loop():
|
||||
global LOOP
|
||||
return LOOP
|
||||
|
||||
|
||||
|
@ -225,12 +225,10 @@ class Network:
|
||||
|
||||
@classmethod
|
||||
async def aclose_all(cls):
|
||||
global NETWORKS
|
||||
await asyncio.gather(*[network.aclose() for network in NETWORKS.values()], return_exceptions=False)
|
||||
|
||||
|
||||
def get_network(name=None):
|
||||
global NETWORKS
|
||||
return NETWORKS.get(name or DEFAULT_NAME)
|
||||
|
||||
|
||||
@ -240,8 +238,6 @@ def initialize(settings_engines=None, settings_outgoing=None):
|
||||
from searx import settings
|
||||
# pylint: enable=import-outside-toplevel)
|
||||
|
||||
global NETWORKS
|
||||
|
||||
settings_engines = settings_engines or settings['engines']
|
||||
settings_outgoing = settings_outgoing or settings['outgoing']
|
||||
|
||||
@ -328,7 +324,6 @@ def done():
|
||||
Note: since Network.aclose has to be async, it is not possible to call this method on Network.__del__
|
||||
So Network.aclose is called here using atexit.register
|
||||
"""
|
||||
global NETWORKS
|
||||
try:
|
||||
loop = get_loop()
|
||||
if loop:
|
||||
|
@ -18,7 +18,6 @@ def normalize_name(name):
|
||||
return unicodedata.normalize('NFKD', name).lower()
|
||||
|
||||
def name_to_iso4217(name):
|
||||
global CURRENCIES # pylint: disable=global-statement
|
||||
name = normalize_name(name)
|
||||
currency = CURRENCIES['names'].get(name, [name])
|
||||
if isinstance(currency, str):
|
||||
@ -26,7 +25,6 @@ def name_to_iso4217(name):
|
||||
return currency[0]
|
||||
|
||||
def iso4217_to_name(iso4217, language):
|
||||
global CURRENCIES # pylint: disable=global-statement
|
||||
return CURRENCIES['iso4217'].get(iso4217, {}).get(language, iso4217)
|
||||
|
||||
class OnlineCurrencyProcessor(OnlineProcessor):
|
||||
|
@ -72,31 +72,26 @@ class TestResolveBangDefinition(SearxTestCase):
|
||||
class TestGetBangDefinitionAndAutocomplete(SearxTestCase):
|
||||
|
||||
def test_found(self):
|
||||
global TEST_DB
|
||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('exam', external_bangs_db=TEST_DB)
|
||||
self.assertEqual(bang_definition, TEST_DB['trie']['exam']['*'])
|
||||
self.assertEqual(new_autocomplete, ['example'])
|
||||
|
||||
def test_found_optimized(self):
|
||||
global TEST_DB
|
||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('example', external_bangs_db=TEST_DB)
|
||||
self.assertEqual(bang_definition, TEST_DB['trie']['exam']['ple'])
|
||||
self.assertEqual(new_autocomplete, [])
|
||||
|
||||
def test_partial(self):
|
||||
global TEST_DB
|
||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('examp', external_bangs_db=TEST_DB)
|
||||
self.assertEqual(bang_definition, None)
|
||||
self.assertEqual(new_autocomplete, ['example'])
|
||||
|
||||
def test_partial2(self):
|
||||
global TEST_DB
|
||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('sea', external_bangs_db=TEST_DB)
|
||||
self.assertEqual(bang_definition, TEST_DB['trie']['sea']['*'])
|
||||
self.assertEqual(new_autocomplete, ['search', 'searching', 'seascapes', 'season'])
|
||||
|
||||
def test_error(self):
|
||||
global TEST_DB
|
||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('error', external_bangs_db=TEST_DB)
|
||||
self.assertEqual(bang_definition, None)
|
||||
self.assertEqual(new_autocomplete, [])
|
||||
@ -114,7 +109,6 @@ class TestExternalBangJson(SearxTestCase):
|
||||
self.assertEqual(result, None)
|
||||
|
||||
def test_get_bang_url(self):
|
||||
global TEST_DB
|
||||
url = get_bang_url(SearchQuery('test', engineref_list=[], external_bang='example'), external_bangs_db=TEST_DB)
|
||||
self.assertEqual(url, 'https://example.com/test')
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user