mirror of
				https://github.com/searxng/searxng
				synced 2024-01-01 19:24:07 +01:00 
			
		
		
		
	Improves PEP8 compatibility.
This commit is contained in:
		
							parent
							
								
									b22dd51bd7
								
							
						
					
					
						commit
						22fd0746d9
					
				
					 12 changed files with 28 additions and 34 deletions
				
			
		|  | @ -17,16 +17,16 @@ along with searx. If not, see < http://www.gnu.org/licenses/ >. | ||||||
| ''' | ''' | ||||||
| 
 | 
 | ||||||
| from os.path import realpath, dirname, splitext, join | from os.path import realpath, dirname, splitext, join | ||||||
|  | import sys | ||||||
| from imp import load_source | from imp import load_source | ||||||
| import grequests |  | ||||||
| from itertools import izip_longest, chain | from itertools import izip_longest, chain | ||||||
| from operator import itemgetter | from operator import itemgetter | ||||||
| from urlparse import urlparse | from urlparse import urlparse | ||||||
|  | from datetime import datetime | ||||||
|  | import grequests | ||||||
|  | from flask.ext.babel import gettext | ||||||
| from searx import settings | from searx import settings | ||||||
| from searx.utils import gen_useragent | from searx.utils import gen_useragent | ||||||
| import sys |  | ||||||
| from datetime import datetime |  | ||||||
| from flask.ext.babel import gettext |  | ||||||
| 
 | 
 | ||||||
| engine_dir = dirname(realpath(__file__)) | engine_dir = dirname(realpath(__file__)) | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -1,6 +1,6 @@ | ||||||
| from lxml import html |  | ||||||
| from urllib import urlencode | from urllib import urlencode | ||||||
| from cgi import escape | from cgi import escape | ||||||
|  | from lxml import html | ||||||
| 
 | 
 | ||||||
| base_url = 'http://www.bing.com/' | base_url = 'http://www.bing.com/' | ||||||
| search_string = 'search?{query}&first={offset}' | search_string = 'search?{query}&first={offset}' | ||||||
|  |  | ||||||
|  | @ -1,6 +1,6 @@ | ||||||
| from urllib import urlencode | from urllib import urlencode | ||||||
| from lxml import html |  | ||||||
| from json import loads | from json import loads | ||||||
|  | from lxml import html | ||||||
| 
 | 
 | ||||||
| categories = ['videos'] | categories = ['videos'] | ||||||
| locale = 'en_US' | locale = 'en_US' | ||||||
|  |  | ||||||
|  | @ -1,6 +1,6 @@ | ||||||
| from urllib import urlencode | from urllib import urlencode | ||||||
| from lxml import html |  | ||||||
| from urlparse import urljoin | from urlparse import urljoin | ||||||
|  | from lxml import html | ||||||
| 
 | 
 | ||||||
| categories = ['images'] | categories = ['images'] | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -1,7 +1,7 @@ | ||||||
| from lxml import html |  | ||||||
| from urlparse import urljoin | from urlparse import urljoin | ||||||
| from cgi import escape | from cgi import escape | ||||||
| from urllib import quote | from urllib import quote | ||||||
|  | from lxml import html | ||||||
| 
 | 
 | ||||||
| categories = ['videos', 'music'] | categories = ['videos', 'music'] | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -1,7 +1,7 @@ | ||||||
| from lxml import html |  | ||||||
| from urlparse import urljoin | from urlparse import urljoin | ||||||
| from cgi import escape | from cgi import escape | ||||||
| from urllib import urlencode | from urllib import urlencode | ||||||
|  | from lxml import html | ||||||
| 
 | 
 | ||||||
| categories = ['it'] | categories = ['it'] | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -1,7 +1,7 @@ | ||||||
| from urllib import urlencode | from urllib import urlencode | ||||||
| from HTMLParser import HTMLParser | from HTMLParser import HTMLParser | ||||||
| from xpath import extract_text |  | ||||||
| from lxml import html | from lxml import html | ||||||
|  | from xpath import extract_text | ||||||
| 
 | 
 | ||||||
| base_url = 'http://vimeo.com' | base_url = 'http://vimeo.com' | ||||||
| search_url = base_url + '/search?{query}' | search_url = base_url + '/search?{query}' | ||||||
|  |  | ||||||
|  | @ -1,8 +1,8 @@ | ||||||
| #!/usr/bin/env python | #!/usr/bin/env python | ||||||
| 
 | 
 | ||||||
| from urllib import urlencode | from urllib import urlencode | ||||||
| from searx.engines.xpath import extract_text, extract_url |  | ||||||
| from lxml import html | from lxml import html | ||||||
|  | from searx.engines.xpath import extract_text, extract_url | ||||||
| 
 | 
 | ||||||
| categories = ['general'] | categories = ['general'] | ||||||
| search_url = 'http://search.yahoo.com/search?{query}&b={offset}' | search_url = 'http://search.yahoo.com/search?{query}&b={offset}' | ||||||
|  |  | ||||||
|  | @ -3,7 +3,8 @@ from urllib import urlencode | ||||||
| 
 | 
 | ||||||
| categories = ['videos'] | categories = ['videos'] | ||||||
| 
 | 
 | ||||||
| search_url = 'https://gdata.youtube.com/feeds/api/videos?alt=json&{query}&start-index={index}&max-results=25'  # noqa | search_url = ('https://gdata.youtube.com/feeds/api/videos' | ||||||
|  |               '?alt=json&{query}&start-index={index}&max-results=25')  # noqa | ||||||
| 
 | 
 | ||||||
| paging = True | paging = True | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -1,11 +1,10 @@ | ||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| 
 | 
 | ||||||
| from plone.testing import layered |  | ||||||
| from robotsuite import RobotTestSuite |  | ||||||
| from searx.testing import SEARXROBOTLAYER |  | ||||||
| 
 |  | ||||||
| import os | import os | ||||||
| import unittest2 as unittest | import unittest2 as unittest | ||||||
|  | from plone.testing import layered | ||||||
|  | from robotsuite import RobotTestSuite | ||||||
|  | from searx.testing import SEARXROBOTLAYER | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def test_suite(): | def test_suite(): | ||||||
|  |  | ||||||
|  | @ -1,12 +1,10 @@ | ||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| 
 | 
 | ||||||
|  | import json | ||||||
|  | from urlparse import ParseResult | ||||||
| from mock import patch | from mock import patch | ||||||
| from searx import webapp | from searx import webapp | ||||||
| from searx.testing import SearxTestCase | from searx.testing import SearxTestCase | ||||||
| from urlparse import ParseResult |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| import json |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class ViewsTestCase(SearxTestCase): | class ViewsTestCase(SearxTestCase): | ||||||
|  |  | ||||||
|  | @ -21,21 +21,17 @@ import json | ||||||
| import cStringIO | import cStringIO | ||||||
| import os | import os | ||||||
| 
 | 
 | ||||||
| from flask import Flask, request, render_template | from flask import ( | ||||||
| from flask import url_for, Response, make_response, redirect |     Flask, request, render_template, url_for, Response, make_response, | ||||||
| from flask import send_from_directory |     redirect, send_from_directory | ||||||
| 
 | ) | ||||||
| from searx import settings, searx_dir |  | ||||||
| from searx.engines import search |  | ||||||
| from searx.engines import categories |  | ||||||
| from searx.engines import engines |  | ||||||
| from searx.engines import get_engines_stats |  | ||||||
| from searx.engines import engine_shortcuts |  | ||||||
| from searx.utils import UnicodeWriter |  | ||||||
| from searx.utils import highlight_content, html_to_text |  | ||||||
| from searx.languages import language_codes |  | ||||||
| 
 |  | ||||||
| from flask.ext.babel import Babel | from flask.ext.babel import Babel | ||||||
|  | from searx import settings, searx_dir | ||||||
|  | from searx.engines import ( | ||||||
|  |     search, categories, engines, get_engines_stats, engine_shortcuts | ||||||
|  | ) | ||||||
|  | from searx.utils import UnicodeWriter, highlight_content, html_to_text | ||||||
|  | from searx.languages import language_codes | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| app = Flask( | app = Flask( | ||||||
|  |  | ||||||
		Loading…
	
	Add table
		
		Reference in a new issue
	
	 Gabor Nagy
						Gabor Nagy