mirror of
				https://github.com/searxng/searxng
				synced 2024-01-01 19:24:07 +01:00 
			
		
		
		
	Fix conflicts
This commit is contained in:
		
						commit
						599ff39ddf
					
				
					 4 changed files with 90 additions and 1 deletions
				
			
		|  | @ -8,6 +8,7 @@ transifex-client==0.14.2 | |||
| selenium==3.141.0 | ||||
| twine==3.4.1 | ||||
| Pallets-Sphinx-Themes==1.2.3 | ||||
| docutils==0.16 | ||||
| Sphinx==3.5.3 | ||||
| sphinx-issues==1.2.0 | ||||
| sphinx-jinja==1.1.1 | ||||
|  |  | |||
							
								
								
									
										77
									
								
								searx/engines/wordnik.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										77
									
								
								searx/engines/wordnik.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,77 @@ | |||
| # SPDX-License-Identifier: AGPL-3.0-or-later | ||||
| """Wordnik (general) | ||||
| 
 | ||||
| """ | ||||
| 
 | ||||
| from lxml.html import fromstring | ||||
| from searx import logger | ||||
| from searx.utils import extract_text | ||||
| from searx.raise_for_httperror import raise_for_httperror | ||||
| 
 | ||||
| logger = logger.getChild('Wordnik engine') | ||||
| 
 | ||||
| # about | ||||
| about = { | ||||
|     "website": 'https://www.wordnik.com', | ||||
|     "wikidata_id": 'Q8034401', | ||||
|     "official_api_documentation": None, | ||||
|     "use_official_api": False, | ||||
|     "require_api_key": False, | ||||
|     "results": 'HTML', | ||||
| } | ||||
| 
 | ||||
| categories = ['general'] | ||||
| paging = False | ||||
| 
 | ||||
| URL = 'https://www.wordnik.com' | ||||
| SEARCH_URL = URL + '/words/{query}' | ||||
| 
 | ||||
| 
 | ||||
| def request(query, params): | ||||
|     params['url'] = SEARCH_URL.format(query=query) | ||||
|     logger.debug(f"query_url --> {params['url']}") | ||||
|     return params | ||||
| 
 | ||||
| 
 | ||||
| def response(resp): | ||||
|     results = [] | ||||
| 
 | ||||
|     raise_for_httperror(resp) | ||||
|     dom = fromstring(resp.text) | ||||
|     word = extract_text(dom.xpath('//*[@id="headword"]/text()')) | ||||
| 
 | ||||
|     definitions = [] | ||||
|     for src in dom.xpath('//*[@id="define"]//h3[@class="source"]'): | ||||
|         src_text = extract_text(src).strip() | ||||
|         if src_text.startswith('from '): | ||||
|             src_text = src_text[5:] | ||||
| 
 | ||||
|         src_defs = [] | ||||
|         for def_item in src.xpath('following-sibling::ul[1]/li'): | ||||
|             def_abbr = extract_text(def_item.xpath('.//abbr')).strip() | ||||
|             def_text = extract_text(def_item).strip() | ||||
|             if def_abbr: | ||||
|                 def_text = def_text[len(def_abbr):].strip() | ||||
|             src_defs.append((def_abbr, def_text)) | ||||
| 
 | ||||
|         definitions.append((src_text, src_defs)) | ||||
| 
 | ||||
|     if not definitions: | ||||
|         return results | ||||
| 
 | ||||
|     infobox = '' | ||||
|     for src_text, src_defs in definitions: | ||||
|         infobox += f"<small>{src_text}</small>" | ||||
|         infobox += "<ul>" | ||||
|         for def_abbr, def_text in src_defs: | ||||
|             if def_abbr: | ||||
|                 def_abbr += ": " | ||||
|             infobox += f"<li><i>{def_abbr}</i> {def_text}</li>" | ||||
|         infobox += "</ul>" | ||||
| 
 | ||||
|     results.append({ | ||||
|         'infobox': word, | ||||
|         'content': infobox, | ||||
|     }) | ||||
| 
 | ||||
|     return results | ||||
|  | @ -1271,6 +1271,14 @@ engines: | |||
|     categories: videos | ||||
|     disabled : True | ||||
| 
 | ||||
|   - name: wordnik | ||||
|     engine: wordnik | ||||
|     shortcut: def | ||||
|     base_url: https://www.wordnik.com/ | ||||
|     categories: general | ||||
|     timeout: 5.0 | ||||
|     disabled: True | ||||
| 
 | ||||
|   - name: słownik języka polskiego | ||||
|     engine: sjp | ||||
|     shortcut: sjp | ||||
|  |  | |||
|  | @ -1133,11 +1133,14 @@ class ReverseProxyPathFix: | |||
| 
 | ||||
|             base_url = urlparse(settings['server']['base_url']) | ||||
|             self.script_name = base_url.path | ||||
|             if self.script_name.endswith('/'): | ||||
|                 # remove trailing slash to avoid infinite redirect on the index | ||||
|                 # see https://github.com/searx/searx/issues/2729 | ||||
|                 self.script_name = self.script_name[:-1] | ||||
|             self.scheme = base_url.scheme | ||||
|             self.server = base_url.netloc | ||||
| 
 | ||||
|     def __call__(self, environ, start_response): | ||||
| 
 | ||||
|         script_name = self.script_name or environ.get('HTTP_X_SCRIPT_NAME', '') | ||||
|         if script_name: | ||||
|             environ['SCRIPT_NAME'] = script_name | ||||
|  |  | |||
		Loading…
	
	Add table
		
		Reference in a new issue
	
	 Plague Doctor
						Plague Doctor