forked from zaclys/searxng
		
	Merge pull request #2173 from searxng/dependabot/pip/master/pylint-2.16.1
Bump pylint from 2.15.10 to 2.16.1
This commit is contained in:
		
						commit
						3ebbdc179c
					
				
					 10 changed files with 13 additions and 4 deletions
				
			
		| 
						 | 
				
			
			@ -404,4 +404,4 @@ known-third-party=enchant
 | 
			
		|||
 | 
			
		||||
# Exceptions that will emit a warning when being caught. Defaults to
 | 
			
		||||
# "Exception"
 | 
			
		||||
overgeneral-exceptions=Exception
 | 
			
		||||
overgeneral-exceptions=builtins.Exception
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -2,7 +2,7 @@ mock==5.0.1
 | 
			
		|||
nose2[coverage_plugin]==0.12.0
 | 
			
		||||
cov-core==1.15.0
 | 
			
		||||
black==22.12.0
 | 
			
		||||
pylint==2.15.10
 | 
			
		||||
pylint==2.16.1
 | 
			
		||||
splinter==0.19.0
 | 
			
		||||
selenium==4.8.0
 | 
			
		||||
twine==4.0.2
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -3,6 +3,7 @@
 | 
			
		|||
"""This module implements functions needed for the autocompleter.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
# pylint: disable=use-dict-literal
 | 
			
		||||
 | 
			
		||||
from json import loads
 | 
			
		||||
from urllib.parse import urlencode
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -2,6 +2,7 @@
 | 
			
		|||
# lint: pylint
 | 
			
		||||
"""Semantic Scholar (Science)
 | 
			
		||||
"""
 | 
			
		||||
# pylint: disable=use-dict-literal
 | 
			
		||||
 | 
			
		||||
from urllib.parse import urlencode
 | 
			
		||||
from searx.utils import html_to_text
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -3,6 +3,7 @@
 | 
			
		|||
"""Docker Hub (IT)
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
# pylint: disable=use-dict-literal
 | 
			
		||||
 | 
			
		||||
from json import loads
 | 
			
		||||
from urllib.parse import urlencode
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -3,7 +3,7 @@
 | 
			
		|||
"""
 | 
			
		||||
 Gigablast (Web)
 | 
			
		||||
"""
 | 
			
		||||
# pylint: disable=invalid-name
 | 
			
		||||
# pylint: disable=invalid-name, use-dict-literal
 | 
			
		||||
 | 
			
		||||
import re
 | 
			
		||||
from time import time
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -4,6 +4,7 @@
 | 
			
		|||
"""Processores for engine-type: ``online``
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
# pylint: disable=use-dict-literal
 | 
			
		||||
 | 
			
		||||
from timeit import default_timer
 | 
			
		||||
import asyncio
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -5,6 +5,8 @@
 | 
			
		|||
"""WebbApp
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
# pylint: disable=use-dict-literal
 | 
			
		||||
 | 
			
		||||
import hashlib
 | 
			
		||||
import hmac
 | 
			
		||||
import json
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -9,6 +9,7 @@ Output file: :origin:`searx/data/ahmia_blacklist.txt` (:origin:`CI Update data
 | 
			
		|||
.. _Ahmia's blacklist: https://ahmia.fi/blacklist/
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
# pylint: disable=use-dict-literal
 | 
			
		||||
 | 
			
		||||
from os.path import join
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -21,6 +22,7 @@ URL = 'https://ahmia.fi/blacklist/banned/'
 | 
			
		|||
def fetch_ahmia_blacklist():
 | 
			
		||||
    resp = requests.get(URL, timeout=3.0)
 | 
			
		||||
    if resp.status_code != 200:
 | 
			
		||||
        # pylint: disable=broad-exception-raised
 | 
			
		||||
        raise Exception("Error fetching Ahmia blacklist, HTTP code " + resp.status_code)
 | 
			
		||||
    return resp.text.split()
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -1,13 +1,13 @@
 | 
			
		|||
#!/usr/bin/env python
 | 
			
		||||
# lint: pylint
 | 
			
		||||
# SPDX-License-Identifier: AGPL-3.0-or-later
 | 
			
		||||
 | 
			
		||||
"""Fetch firefox useragent signatures
 | 
			
		||||
 | 
			
		||||
Output file: :origin:`searx/data/useragents.json` (:origin:`CI Update data ...
 | 
			
		||||
<.github/workflows/data-update.yml>`).
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
# pylint: disable=use-dict-literal
 | 
			
		||||
 | 
			
		||||
import json
 | 
			
		||||
import re
 | 
			
		||||
| 
						 | 
				
			
			@ -40,6 +40,7 @@ useragents = {
 | 
			
		|||
def fetch_firefox_versions():
 | 
			
		||||
    resp = requests.get(URL, timeout=2.0)
 | 
			
		||||
    if resp.status_code != 200:
 | 
			
		||||
        # pylint: disable=broad-exception-raised
 | 
			
		||||
        raise Exception("Error fetching firefox versions, HTTP code " + resp.status_code)
 | 
			
		||||
    dom = html.fromstring(resp.text)
 | 
			
		||||
    versions = []
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
	Add table
		
		Reference in a new issue