diff --git a/searx/engines/acgsou.py b/searx/engines/acgsou.py index 553b49b69..a436df283 100644 --- a/searx/engines/acgsou.py +++ b/searx/engines/acgsou.py @@ -41,7 +41,6 @@ def response(resp): # defaults filesize = 0 magnet_link = "magnet:?xt=urn:btih:{}&tr=http://tracker.acgsou.com:2710/announce" - torrent_link = "" try: category = extract_text(result.xpath(xpath_category)[0]) diff --git a/searx/engines/arxiv.py b/searx/engines/arxiv.py index e54de4a58..6e231c382 100644 --- a/searx/engines/arxiv.py +++ b/searx/engines/arxiv.py @@ -61,7 +61,7 @@ def response(resp): content = content_string.format(doi_content="", abstract_content=abstract) if len(content) > 300: - content = content[0:300] + "..." + content = content[0:300] + "..." # TODO: center snippet on query term publishedDate = datetime.strptime(entry.xpath('.//published')[0].text, '%Y-%m-%dT%H:%M:%SZ') diff --git a/searx/engines/base.py b/searx/engines/base.py index 0114f9798..3648d7ed0 100755 --- a/searx/engines/base.py +++ b/searx/engines/base.py @@ -80,10 +80,7 @@ def response(resp): date = datetime.now() # needed in case no dcdate is available for an item for item in entry: - if item.attrib["name"] == "dchdate": - harvestDate = item.text - - elif item.attrib["name"] == "dcdate": + if item.attrib["name"] == "dcdate": date = item.text elif item.attrib["name"] == "dctitle": diff --git a/searx/engines/bing_images.py b/searx/engines/bing_images.py index 3085e1deb..2bcf82b84 100644 --- a/searx/engines/bing_images.py +++ b/searx/engines/bing_images.py @@ -18,7 +18,7 @@ from json import loads from searx.utils import match_language from searx.engines.bing import language_aliases -from searx.engines.bing import _fetch_supported_languages, supported_languages_url # NOQA +from searx.engines.bing import _fetch_supported_languages, supported_languages_url # NOQA # pylint: disable=unused-import # engine dependent config categories = ['images'] diff --git a/searx/engines/bing_news.py b/searx/engines/bing_news.py index aeef7d19a..5489c7549 100644 --- a/searx/engines/bing_news.py +++ b/searx/engines/bing_news.py @@ -17,7 +17,7 @@ from urllib.parse import urlencode, urlparse, parse_qsl from lxml import etree from searx.utils import list_get, match_language from searx.engines.bing import language_aliases -from searx.engines.bing import _fetch_supported_languages, supported_languages_url # NOQA +from searx.engines.bing import _fetch_supported_languages, supported_languages_url # NOQA # pylint: disable=unused-import # engine dependent config categories = ['news'] diff --git a/searx/engines/bing_videos.py b/searx/engines/bing_videos.py index 4393ea590..143c71a3e 100644 --- a/searx/engines/bing_videos.py +++ b/searx/engines/bing_videos.py @@ -16,7 +16,7 @@ from urllib.parse import urlencode from searx.utils import match_language from searx.engines.bing import language_aliases -from searx.engines.bing import _fetch_supported_languages, supported_languages_url # NOQA +from searx.engines.bing import _fetch_supported_languages, supported_languages_url # NOQA # pylint: disable=unused-import categories = ['videos'] paging = True diff --git a/searx/engines/currency_convert.py b/searx/engines/currency_convert.py index 0e91d31e9..f41c135b9 100644 --- a/searx/engines/currency_convert.py +++ b/searx/engines/currency_convert.py @@ -41,8 +41,6 @@ def request(query, params): from_currency = name_to_iso4217(from_currency.strip()) to_currency = name_to_iso4217(to_currency.strip()) - q = (from_currency + to_currency).upper() - params['url'] = url.format(from_currency, to_currency) params['amount'] = amount params['from'] = from_currency diff --git a/searx/engines/duckduckgo.py b/searx/engines/duckduckgo.py index 21cd926aa..c1c984623 100644 --- a/searx/engines/duckduckgo.py +++ b/searx/engines/duckduckgo.py @@ -49,11 +49,11 @@ correction_xpath = '//div[@id="did_you_mean"]//a' # match query's language to a region code that duckduckgo will accept -def get_region_code(lang, lang_list=[]): +def get_region_code(lang, lang_list=None): if lang == 'all': return None - lang_code = match_language(lang, lang_list, language_aliases, 'wt-WT') + lang_code = match_language(lang, lang_list or [], language_aliases, 'wt-WT') lang_parts = lang_code.split('-') # country code goes first diff --git a/searx/engines/duckduckgo_definitions.py b/searx/engines/duckduckgo_definitions.py index 5758022d2..5a7649173 100644 --- a/searx/engines/duckduckgo_definitions.py +++ b/searx/engines/duckduckgo_definitions.py @@ -16,7 +16,7 @@ from lxml import html from searx import logger from searx.data import WIKIDATA_UNITS from searx.engines.duckduckgo import language_aliases -from searx.engines.duckduckgo import _fetch_supported_languages, supported_languages_url # NOQA +from searx.engines.duckduckgo import _fetch_supported_languages, supported_languages_url # NOQA # pylint: disable=unused-import from searx.utils import extract_text, html_to_text, match_language, get_string_replaces_function from searx.external_urls import get_external_url, get_earth_coordinates_url, area_to_osm_zoom diff --git a/searx/engines/duckduckgo_images.py b/searx/engines/duckduckgo_images.py index 943b74880..438a8d54c 100644 --- a/searx/engines/duckduckgo_images.py +++ b/searx/engines/duckduckgo_images.py @@ -16,7 +16,7 @@ from json import loads from urllib.parse import urlencode from searx.engines.duckduckgo import get_region_code -from searx.engines.duckduckgo import _fetch_supported_languages, supported_languages_url # NOQA +from searx.engines.duckduckgo import _fetch_supported_languages, supported_languages_url # NOQA # pylint: disable=unused-import from searx.poolrequests import get # engine dependent config diff --git a/searx/engines/filecrop.py b/searx/engines/filecrop.py index eef5be6e8..0331e7b19 100644 --- a/searx/engines/filecrop.py +++ b/searx/engines/filecrop.py @@ -8,7 +8,7 @@ search_url = url + '/search.php?{query}&size_i=0&size_f=100000000&engine_r=1&eng paging = True -class FilecropResultParser(HTMLParser): +class FilecropResultParser(HTMLParser): # pylint: disable=W0223 # (see https://bugs.python.org/issue31844) def __init__(self): HTMLParser.__init__(self) diff --git a/searx/engines/google_images.py b/searx/engines/google_images.py index 9669d81c3..a3daf6070 100644 --- a/searx/engines/google_images.py +++ b/searx/engines/google_images.py @@ -29,10 +29,7 @@ from lxml import html from flask_babel import gettext from searx import logger from searx.utils import extract_text, eval_xpath -from searx.engines.google import _fetch_supported_languages, supported_languages_url # NOQA - -# pylint: disable=unused-import -# pylint: enable=unused-import +from searx.engines.google import _fetch_supported_languages, supported_languages_url # NOQA # pylint: disable=unused-import from searx.engines.google import ( get_lang_country, diff --git a/searx/engines/google_news.py b/searx/engines/google_news.py index 6cfc0c8e7..f1b7cfa79 100644 --- a/searx/engines/google_news.py +++ b/searx/engines/google_news.py @@ -13,7 +13,7 @@ from urllib.parse import urlencode from lxml import html from searx.utils import match_language -from searx.engines.google import _fetch_supported_languages, supported_languages_url # NOQA +from searx.engines.google import _fetch_supported_languages, supported_languages_url # NOQA # pylint: disable=unused-import # search-url categories = ['news'] diff --git a/searx/engines/peertube.py b/searx/engines/peertube.py index 58ff38c02..e43b2a6b7 100644 --- a/searx/engines/peertube.py +++ b/searx/engines/peertube.py @@ -90,6 +90,5 @@ def response(resp): def _fetch_supported_languages(resp): - ret_val = {} peertube_languages = list(loads(resp.text).keys()) return peertube_languages diff --git a/searx/engines/pubmed.py b/searx/engines/pubmed.py index 7eb2e92f9..07c45709e 100644 --- a/searx/engines/pubmed.py +++ b/searx/engines/pubmed.py @@ -81,7 +81,7 @@ def response(resp): pass if len(content) > 300: - content = content[0:300] + "..." + content = content[0:300] + "..." # TODO: center snippet on query term res_dict = {'url': url, diff --git a/searx/engines/yahoo_news.py b/searx/engines/yahoo_news.py index 11941cfe1..793d1104a 100644 --- a/searx/engines/yahoo_news.py +++ b/searx/engines/yahoo_news.py @@ -14,7 +14,7 @@ from datetime import datetime, timedelta from urllib.parse import urlencode from lxml import html from searx.engines.yahoo import parse_url, language_aliases -from searx.engines.yahoo import _fetch_supported_languages, supported_languages_url # NOQA +from searx.engines.yahoo import _fetch_supported_languages, supported_languages_url # NOQA # pylint: disable=unused-import from dateutil import parser from searx.utils import extract_text, extract_url, match_language diff --git a/searx/plugins/__init__.py b/searx/plugins/__init__.py index fe7ae0578..661b4f6aa 100644 --- a/searx/plugins/__init__.py +++ b/searx/plugins/__init__.py @@ -158,8 +158,8 @@ def prepare_package_resources(pkg, name): def sha_sum(filename): with open(filename, "rb") as f: - bytes = f.read() - return sha256(bytes).hexdigest() + file_content_bytes = f.read() + return sha256(file_content_bytes).hexdigest() plugins = PluginStore() diff --git a/searx/query.py b/searx/query.py index 7effda15a..422cd57b5 100644 --- a/searx/query.py +++ b/searx/query.py @@ -92,15 +92,15 @@ class RawTextQuery: or lang == english_name or lang.replace('-', ' ') == country)\ and lang not in self.languages: - searx_query_part = True - lang_parts = lang_id.split('-') - if len(lang_parts) == 2: - self.languages.append(lang_parts[0] + '-' + lang_parts[1].upper()) - else: - self.languages.append(lang_id) - # to ensure best match (first match is not necessarily the best one) - if lang == lang_id: - break + searx_query_part = True + lang_parts = lang_id.split('-') + if len(lang_parts) == 2: + self.languages.append(lang_parts[0] + '-' + lang_parts[1].upper()) + else: + self.languages.append(lang_id) + # to ensure best match (first match is not necessarily the best one) + if lang == lang_id: + break # user may set a valid, yet not selectable language if VALID_LANGUAGE_CODE.match(lang): diff --git a/searx/results.py b/searx/results.py index e0bbe3358..46f44e1ad 100644 --- a/searx/results.py +++ b/searx/results.py @@ -298,7 +298,7 @@ class ResultContainer: gresults = [] categoryPositions = {} - for i, res in enumerate(results): + for res in results: # FIXME : handle more than one category per engine res['category'] = engines[res['engine']].categories[0] diff --git a/searx/search.py b/searx/search.py index b8ada3901..a3b80249e 100644 --- a/searx/search.py +++ b/searx/search.py @@ -43,9 +43,8 @@ else: logger.info('max_request_timeout={0} second(s)'.format(max_request_timeout)) else: logger.critical('outgoing.max_request_timeout if defined has to be float') - from sys import exit - - exit(1) + import sys + sys.exit(1) class EngineRef: diff --git a/searx/utils.py b/searx/utils.py index 4eed87f21..738f2c4d5 100644 --- a/searx/utils.py +++ b/searx/utils.py @@ -52,7 +52,7 @@ class HTMLTextExtractorException(Exception): pass -class HTMLTextExtractor(HTMLParser): +class HTMLTextExtractor(HTMLParser): # pylint: disable=W0223 # (see https://bugs.python.org/issue31844) def __init__(self): HTMLParser.__init__(self) @@ -74,18 +74,18 @@ class HTMLTextExtractor(HTMLParser): def is_valid_tag(self): return not self.tags or self.tags[-1] not in blocked_tags - def handle_data(self, d): + def handle_data(self, data): if not self.is_valid_tag(): return - self.result.append(d) + self.result.append(data) - def handle_charref(self, number): + def handle_charref(self, name): if not self.is_valid_tag(): return - if number[0] in ('x', 'X'): - codepoint = int(number[1:], 16) + if name[0] in ('x', 'X'): + codepoint = int(name[1:], 16) else: - codepoint = int(number) + codepoint = int(name) self.result.append(chr(codepoint)) def handle_entityref(self, name): @@ -380,7 +380,7 @@ def _get_lang_to_lc_dict(lang_list): return value -def _match_language(lang_code, lang_list=[], custom_aliases={}): +def _match_language(lang_code, lang_list=[], custom_aliases={}): # pylint: disable=W0102 """auxiliary function to match lang_code in lang_list""" # replace language code with a custom alias if necessary if lang_code in custom_aliases: @@ -403,7 +403,7 @@ def _match_language(lang_code, lang_list=[], custom_aliases={}): return _get_lang_to_lc_dict(lang_list).get(lang_code, None) -def match_language(locale_code, lang_list=[], custom_aliases={}, fallback='en-US'): +def match_language(locale_code, lang_list=[], custom_aliases={}, fallback='en-US'): # pylint: disable=W0102 """get the language code from lang_list that best matches locale_code""" # try to get language from given locale_code language = _match_language(locale_code, lang_list, custom_aliases) diff --git a/searx/webapp.py b/searx/webapp.py index 6186fe6ce..e73322a77 100755 --- a/searx/webapp.py +++ b/searx/webapp.py @@ -466,7 +466,7 @@ def pre_request(): else: try: preferences.parse_dict(request.form) - except Exception as e: + except Exception: logger.exception('invalid settings') request.errors.append(gettext('Invalid settings')) @@ -819,7 +819,6 @@ def preferences(): # render preferences image_proxy = request.preferences.get_value('image_proxy') - lang = request.preferences.get_value('language') disabled_engines = request.preferences.engines.get_disabled() allowed_plugins = request.preferences.plugins.get_enabled() diff --git a/tests/unit/test_plugins.py b/tests/unit/test_plugins.py index 969630c40..9ef4cd692 100644 --- a/tests/unit/test_plugins.py +++ b/tests/unit/test_plugins.py @@ -31,10 +31,10 @@ class PluginStoreTest(SearxTestCase): request = Mock() store.call([], 'asdf', request, Mock()) - self.assertFalse(testplugin.asdf.called) + self.assertFalse(testplugin.asdf.called) # pylint: disable=E1101 store.call([testplugin], 'asdf', request, Mock()) - self.assertTrue(testplugin.asdf.called) + self.assertTrue(testplugin.asdf.called) # pylint: disable=E1101 class SelfIPTest(SearxTestCase): diff --git a/tests/unit/test_preferences.py b/tests/unit/test_preferences.py index bee436027..510d4985a 100644 --- a/tests/unit/test_preferences.py +++ b/tests/unit/test_preferences.py @@ -5,8 +5,8 @@ from searx.testing import SearxTestCase class PluginStub: - def __init__(self, id, default_on): - self.id = id + def __init__(self, plugin_id, default_on): + self.id = plugin_id self.default_on = default_on @@ -15,11 +15,11 @@ class TestSettings(SearxTestCase): def test_map_setting_invalid_initialization(self): with self.assertRaises(MissingArgumentException): - setting = MapSetting(3, wrong_argument={'0': 0}) + MapSetting(3, wrong_argument={'0': 0}) def test_map_setting_invalid_default_value(self): with self.assertRaises(ValidationException): - setting = MapSetting(3, map={'dog': 1, 'bat': 2}) + MapSetting(3, map={'dog': 1, 'bat': 2}) def test_map_setting_invalid_choice(self): setting = MapSetting(2, map={'dog': 1, 'bat': 2}) @@ -36,18 +36,14 @@ class TestSettings(SearxTestCase): setting.parse('bat') self.assertEqual(setting.get_value(), 2) - def test_enum_setting_invalid_initialization(self): - with self.assertRaises(MissingArgumentException): - setting = EnumStringSetting('cat', wrong_argument=[0, 1, 2]) - # enum settings def test_enum_setting_invalid_initialization(self): with self.assertRaises(MissingArgumentException): - setting = EnumStringSetting('cat', wrong_argument=[0, 1, 2]) + EnumStringSetting('cat', wrong_argument=[0, 1, 2]) def test_enum_setting_invalid_default_value(self): with self.assertRaises(ValidationException): - setting = EnumStringSetting(3, choices=[0, 1, 2]) + EnumStringSetting(3, choices=[0, 1, 2]) def test_enum_setting_invalid_choice(self): setting = EnumStringSetting(0, choices=[0, 1, 2]) @@ -67,11 +63,11 @@ class TestSettings(SearxTestCase): # multiple choice settings def test_multiple_setting_invalid_initialization(self): with self.assertRaises(MissingArgumentException): - setting = MultipleChoiceSetting(['2'], wrong_argument=['0', '1', '2']) + MultipleChoiceSetting(['2'], wrong_argument=['0', '1', '2']) def test_multiple_setting_invalid_default_value(self): with self.assertRaises(ValidationException): - setting = MultipleChoiceSetting(['3', '4'], choices=['0', '1', '2']) + MultipleChoiceSetting(['3', '4'], choices=['0', '1', '2']) def test_multiple_setting_invalid_choice(self): setting = MultipleChoiceSetting(['1', '2'], choices=['0', '1', '2'])