forked from zaclys/searxng
[mod] make python code pylint 2.16.1 compliant
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
This commit is contained in:
parent
7320b0c796
commit
4c06837a50
|
@ -404,4 +404,4 @@ known-third-party=enchant
|
||||||
|
|
||||||
# Exceptions that will emit a warning when being caught. Defaults to
|
# Exceptions that will emit a warning when being caught. Defaults to
|
||||||
# "Exception"
|
# "Exception"
|
||||||
overgeneral-exceptions=Exception
|
overgeneral-exceptions=builtins.Exception
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
"""This module implements functions needed for the autocompleter.
|
"""This module implements functions needed for the autocompleter.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
# pylint: disable=use-dict-literal
|
||||||
|
|
||||||
from json import loads
|
from json import loads
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
# lint: pylint
|
# lint: pylint
|
||||||
"""Semantic Scholar (Science)
|
"""Semantic Scholar (Science)
|
||||||
"""
|
"""
|
||||||
|
# pylint: disable=use-dict-literal
|
||||||
|
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
from searx.utils import html_to_text
|
from searx.utils import html_to_text
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
"""Docker Hub (IT)
|
"""Docker Hub (IT)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
# pylint: disable=use-dict-literal
|
||||||
|
|
||||||
from json import loads
|
from json import loads
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
"""
|
"""
|
||||||
Gigablast (Web)
|
Gigablast (Web)
|
||||||
"""
|
"""
|
||||||
# pylint: disable=invalid-name
|
# pylint: disable=invalid-name, use-dict-literal
|
||||||
|
|
||||||
import re
|
import re
|
||||||
from time import time
|
from time import time
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
"""Processores for engine-type: ``online``
|
"""Processores for engine-type: ``online``
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
# pylint: disable=use-dict-literal
|
||||||
|
|
||||||
from timeit import default_timer
|
from timeit import default_timer
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
|
@ -5,6 +5,8 @@
|
||||||
"""WebbApp
|
"""WebbApp
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
# pylint: disable=use-dict-literal
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import hmac
|
import hmac
|
||||||
import json
|
import json
|
||||||
|
|
|
@ -9,6 +9,7 @@ Output file: :origin:`searx/data/ahmia_blacklist.txt` (:origin:`CI Update data
|
||||||
.. _Ahmia's blacklist: https://ahmia.fi/blacklist/
|
.. _Ahmia's blacklist: https://ahmia.fi/blacklist/
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
# pylint: disable=use-dict-literal
|
||||||
|
|
||||||
from os.path import join
|
from os.path import join
|
||||||
|
|
||||||
|
@ -21,6 +22,7 @@ URL = 'https://ahmia.fi/blacklist/banned/'
|
||||||
def fetch_ahmia_blacklist():
|
def fetch_ahmia_blacklist():
|
||||||
resp = requests.get(URL, timeout=3.0)
|
resp = requests.get(URL, timeout=3.0)
|
||||||
if resp.status_code != 200:
|
if resp.status_code != 200:
|
||||||
|
# pylint: disable=broad-exception-raised
|
||||||
raise Exception("Error fetching Ahmia blacklist, HTTP code " + resp.status_code)
|
raise Exception("Error fetching Ahmia blacklist, HTTP code " + resp.status_code)
|
||||||
return resp.text.split()
|
return resp.text.split()
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# lint: pylint
|
# lint: pylint
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
|
||||||
"""Fetch firefox useragent signatures
|
"""Fetch firefox useragent signatures
|
||||||
|
|
||||||
Output file: :origin:`searx/data/useragents.json` (:origin:`CI Update data ...
|
Output file: :origin:`searx/data/useragents.json` (:origin:`CI Update data ...
|
||||||
<.github/workflows/data-update.yml>`).
|
<.github/workflows/data-update.yml>`).
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
# pylint: disable=use-dict-literal
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
|
@ -40,6 +40,7 @@ useragents = {
|
||||||
def fetch_firefox_versions():
|
def fetch_firefox_versions():
|
||||||
resp = requests.get(URL, timeout=2.0)
|
resp = requests.get(URL, timeout=2.0)
|
||||||
if resp.status_code != 200:
|
if resp.status_code != 200:
|
||||||
|
# pylint: disable=broad-exception-raised
|
||||||
raise Exception("Error fetching firefox versions, HTTP code " + resp.status_code)
|
raise Exception("Error fetching firefox versions, HTTP code " + resp.status_code)
|
||||||
dom = html.fromstring(resp.text)
|
dom = html.fromstring(resp.text)
|
||||||
versions = []
|
versions = []
|
||||||
|
|
Loading…
Reference in New Issue