mirror of
				https://github.com/searxng/searxng
				synced 2024-01-01 19:24:07 +01:00 
			
		
		
		
	Removed the keywords from the settings in qwant engine
This commit is contained in:
		
							parent
							
								
									f05087b93a
								
							
						
					
					
						commit
						e0774c849c
					
				
					 3 changed files with 26 additions and 16 deletions
				
			
		|  | @ -19,7 +19,10 @@ categories = None | ||||||
| paging = True | paging = True | ||||||
| language_support = True | language_support = True | ||||||
| 
 | 
 | ||||||
| search_url_keyword = None | category_to_keyword = {'general': 'web', | ||||||
|  |                        'images': 'images', | ||||||
|  |                        'news': 'news', | ||||||
|  |                        'social media': 'social'} | ||||||
| 
 | 
 | ||||||
| # search-url | # search-url | ||||||
| url = 'https://api.qwant.com/api/search/{keyword}?count=10&offset={offset}&f=&{query}' | url = 'https://api.qwant.com/api/search/{keyword}?count=10&offset={offset}&f=&{query}' | ||||||
|  | @ -29,9 +32,15 @@ url = 'https://api.qwant.com/api/search/{keyword}?count=10&offset={offset}&f=&{q | ||||||
| def request(query, params): | def request(query, params): | ||||||
|     offset = (params['pageno'] - 1) * 10 |     offset = (params['pageno'] - 1) * 10 | ||||||
| 
 | 
 | ||||||
|     params['url'] = url.format(keyword=search_url_keyword, |     if categories[0] and categories[0] in category_to_keyword: | ||||||
|                                query=urlencode({'q': query}), | 
 | ||||||
|                                offset=offset) |         params['url'] = url.format(keyword=category_to_keyword[categories[0]], | ||||||
|  |                                    query=urlencode({'q': query}), | ||||||
|  |                                    offset=offset) | ||||||
|  |     else: | ||||||
|  |         params['url'] = url.format(keyword='web', | ||||||
|  |                                    query=urlencode({'q': query}), | ||||||
|  |                                    offset=offset) | ||||||
| 
 | 
 | ||||||
|     # add language tag if specified |     # add language tag if specified | ||||||
|     if params['language'] != 'all': |     if params['language'] != 'all': | ||||||
|  | @ -61,12 +70,12 @@ def response(resp): | ||||||
|         res_url = result['url'] |         res_url = result['url'] | ||||||
|         content = result['desc'] |         content = result['desc'] | ||||||
| 
 | 
 | ||||||
|         if search_url_keyword == 'web': |         if category_to_keyword.get(categories[0], '') == 'web': | ||||||
|             results.append({'title': title, |             results.append({'title': title, | ||||||
|                             'content': content, |                             'content': content, | ||||||
|                             'url': res_url}) |                             'url': res_url}) | ||||||
| 
 | 
 | ||||||
|         elif search_url_keyword == 'images': |         elif category_to_keyword.get(categories[0], '') == 'images': | ||||||
|             thumbnail_src = result['thumbnail'] |             thumbnail_src = result['thumbnail'] | ||||||
|             img_src = result['media'] |             img_src = result['media'] | ||||||
|             results.append({'template': 'images.html', |             results.append({'template': 'images.html', | ||||||
|  | @ -76,7 +85,8 @@ def response(resp): | ||||||
|                             'thumbnail_src': thumbnail_src, |                             'thumbnail_src': thumbnail_src, | ||||||
|                             'img_src': img_src}) |                             'img_src': img_src}) | ||||||
| 
 | 
 | ||||||
|         elif search_url_keyword == 'news' or search_url_keyword == 'social': |         elif (category_to_keyword.get(categories[0], '') == 'news' or | ||||||
|  |               category_to_keyword.get(categories[0], '') == 'social'): | ||||||
|             published_date = datetime.fromtimestamp(result['date'], None) |             published_date = datetime.fromtimestamp(result['date'], None) | ||||||
| 
 | 
 | ||||||
|             results.append({'url': res_url, |             results.append({'url': res_url, | ||||||
|  |  | ||||||
|  | @ -171,25 +171,21 @@ engines: | ||||||
|   - name : qwant |   - name : qwant | ||||||
|     engine : qwant |     engine : qwant | ||||||
|     shortcut : qw |     shortcut : qw | ||||||
|     search_url_keyword : web |  | ||||||
|     categories : general |     categories : general | ||||||
| 
 | 
 | ||||||
|   - name : qwant images |   - name : qwant images | ||||||
|     engine : qwant |     engine : qwant | ||||||
|     shortcut : qwi |     shortcut : qwi | ||||||
|     search_url_keyword : images |  | ||||||
|     categories : images |     categories : images | ||||||
| 
 | 
 | ||||||
|   - name : qwant news |   - name : qwant news | ||||||
|     engine : qwant |     engine : qwant | ||||||
|     shortcut : qwn |     shortcut : qwn | ||||||
|     search_url_keyword : news |  | ||||||
|     categories : news |     categories : news | ||||||
| 
 | 
 | ||||||
|   - name : qwant social |   - name : qwant social | ||||||
|     engine : qwant |     engine : qwant | ||||||
|     shortcut : qws |     shortcut : qws | ||||||
|     search_url_keyword : social |  | ||||||
|     categories : social media |     categories : social media | ||||||
| 
 | 
 | ||||||
|   - name : kickass |   - name : kickass | ||||||
|  |  | ||||||
|  | @ -11,15 +11,19 @@ class TestQwantEngine(SearxTestCase): | ||||||
|         dicto = defaultdict(dict) |         dicto = defaultdict(dict) | ||||||
|         dicto['pageno'] = 0 |         dicto['pageno'] = 0 | ||||||
|         dicto['language'] = 'fr_FR' |         dicto['language'] = 'fr_FR' | ||||||
|  |         qwant.categories = [''] | ||||||
|         params = qwant.request(query, dicto) |         params = qwant.request(query, dicto) | ||||||
|         self.assertIn('url', params) |         self.assertIn('url', params) | ||||||
|         self.assertIn(query, params['url']) |         self.assertIn(query, params['url']) | ||||||
|  |         self.assertIn('web', params['url']) | ||||||
|         self.assertIn('qwant.com', params['url']) |         self.assertIn('qwant.com', params['url']) | ||||||
|         self.assertIn('fr_fr', params['url']) |         self.assertIn('fr_fr', params['url']) | ||||||
| 
 | 
 | ||||||
|         dicto['language'] = 'all' |         dicto['language'] = 'all' | ||||||
|  |         qwant.categories = ['news'] | ||||||
|         params = qwant.request(query, dicto) |         params = qwant.request(query, dicto) | ||||||
|         self.assertFalse('fr' in params['url']) |         self.assertFalse('fr' in params['url']) | ||||||
|  |         self.assertIn('news', params['url']) | ||||||
| 
 | 
 | ||||||
|     def test_response(self): |     def test_response(self): | ||||||
|         self.assertRaises(AttributeError, qwant.response, None) |         self.assertRaises(AttributeError, qwant.response, None) | ||||||
|  | @ -68,7 +72,7 @@ class TestQwantEngine(SearxTestCase): | ||||||
|         } |         } | ||||||
|         """ |         """ | ||||||
|         response = mock.Mock(text=json) |         response = mock.Mock(text=json) | ||||||
|         qwant.search_url_keyword = 'web' |         qwant.categories = ['general'] | ||||||
|         results = qwant.response(response) |         results = qwant.response(response) | ||||||
|         self.assertEqual(type(results), list) |         self.assertEqual(type(results), list) | ||||||
|         self.assertEqual(len(results), 1) |         self.assertEqual(len(results), 1) | ||||||
|  | @ -113,7 +117,7 @@ class TestQwantEngine(SearxTestCase): | ||||||
|         } |         } | ||||||
|         """ |         """ | ||||||
|         response = mock.Mock(text=json) |         response = mock.Mock(text=json) | ||||||
|         qwant.search_url_keyword = 'images' |         qwant.categories = ['images'] | ||||||
|         results = qwant.response(response) |         results = qwant.response(response) | ||||||
|         self.assertEqual(type(results), list) |         self.assertEqual(type(results), list) | ||||||
|         self.assertEqual(len(results), 1) |         self.assertEqual(len(results), 1) | ||||||
|  | @ -158,7 +162,7 @@ class TestQwantEngine(SearxTestCase): | ||||||
|         } |         } | ||||||
|         """ |         """ | ||||||
|         response = mock.Mock(text=json) |         response = mock.Mock(text=json) | ||||||
|         qwant.search_url_keyword = 'news' |         qwant.categories = ['news'] | ||||||
|         results = qwant.response(response) |         results = qwant.response(response) | ||||||
|         self.assertEqual(type(results), list) |         self.assertEqual(type(results), list) | ||||||
|         self.assertEqual(len(results), 1) |         self.assertEqual(len(results), 1) | ||||||
|  | @ -202,7 +206,7 @@ class TestQwantEngine(SearxTestCase): | ||||||
|         } |         } | ||||||
|         """ |         """ | ||||||
|         response = mock.Mock(text=json) |         response = mock.Mock(text=json) | ||||||
|         qwant.search_url_keyword = 'social' |         qwant.categories = ['social media'] | ||||||
|         results = qwant.response(response) |         results = qwant.response(response) | ||||||
|         self.assertEqual(type(results), list) |         self.assertEqual(type(results), list) | ||||||
|         self.assertEqual(len(results), 1) |         self.assertEqual(len(results), 1) | ||||||
|  | @ -246,7 +250,7 @@ class TestQwantEngine(SearxTestCase): | ||||||
|         } |         } | ||||||
|         """ |         """ | ||||||
|         response = mock.Mock(text=json) |         response = mock.Mock(text=json) | ||||||
|         qwant.search_url_keyword = '' |         qwant.categories = [''] | ||||||
|         results = qwant.response(response) |         results = qwant.response(response) | ||||||
|         self.assertEqual(type(results), list) |         self.assertEqual(type(results), list) | ||||||
|         self.assertEqual(len(results), 0) |         self.assertEqual(len(results), 0) | ||||||
|  |  | ||||||
		Loading…
	
	Add table
		
		Reference in a new issue
	
	 Cqoicebordel
						Cqoicebordel