mirror of
				https://github.com/searxng/searxng
				synced 2024-01-01 19:24:07 +01:00 
			
		
		
		
	[mod] searx.search: change function declaration: online then offline then utility functions.
This commit is contained in:
		
							parent
							
								
									50e717c452
								
							
						
					
					
						commit
						e8df5d3f7f
					
				
					 1 changed files with 42 additions and 42 deletions
				
			
		|  | @ -97,48 +97,6 @@ def search_one_http_request(engine, query, request_params): | |||
|     return engine.response(response) | ||||
| 
 | ||||
| 
 | ||||
| def search_one_offline_request(engine, query, request_params): | ||||
|     return engine.search(query, request_params) | ||||
| 
 | ||||
| 
 | ||||
| def search_one_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit): | ||||
|     if engines[engine_name].offline: | ||||
|         return search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit)  # noqa | ||||
|     return search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit) | ||||
| 
 | ||||
| 
 | ||||
| def search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit): | ||||
|     engine = engines[engine_name] | ||||
| 
 | ||||
|     try: | ||||
|         search_results = search_one_offline_request(engine, query, request_params) | ||||
| 
 | ||||
|         if search_results: | ||||
|             result_container.extend(engine_name, search_results) | ||||
| 
 | ||||
|             engine_time = time() - start_time | ||||
|             result_container.add_timing(engine_name, engine_time, engine_time) | ||||
|             with threading.RLock(): | ||||
|                 engine.stats['engine_time'] += engine_time | ||||
|                 engine.stats['engine_time_count'] += 1 | ||||
| 
 | ||||
|     except ValueError as e: | ||||
|         record_offline_engine_stats_on_error(engine, result_container, start_time) | ||||
|         logger.exception('engine {0} : invalid input : {1}'.format(engine_name, e)) | ||||
|     except Exception as e: | ||||
|         record_offline_engine_stats_on_error(engine, result_container, start_time) | ||||
|         result_container.add_unresponsive_engine(engine_name, 'unexpected crash', str(e)) | ||||
|         logger.exception('engine {0} : exception : {1}'.format(engine_name, e)) | ||||
| 
 | ||||
| 
 | ||||
| def record_offline_engine_stats_on_error(engine, result_container, start_time): | ||||
|     engine_time = time() - start_time | ||||
|     result_container.add_timing(engine.name, engine_time, engine_time) | ||||
| 
 | ||||
|     with threading.RLock(): | ||||
|         engine.stats['errors'] += 1 | ||||
| 
 | ||||
| 
 | ||||
| def search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit): | ||||
|     # set timeout for all HTTP requests | ||||
|     requests_lib.set_timeout_for_thread(timeout_limit, start_time=start_time) | ||||
|  | @ -214,6 +172,48 @@ def search_one_http_request_safe(engine_name, query, request_params, result_cont | |||
|             engine.suspend_end_time = 0 | ||||
| 
 | ||||
| 
 | ||||
| def record_offline_engine_stats_on_error(engine, result_container, start_time): | ||||
|     engine_time = time() - start_time | ||||
|     result_container.add_timing(engine.name, engine_time, engine_time) | ||||
| 
 | ||||
|     with threading.RLock(): | ||||
|         engine.stats['errors'] += 1 | ||||
| 
 | ||||
| 
 | ||||
| def search_one_offline_request(engine, query, request_params): | ||||
|     return engine.search(query, request_params) | ||||
| 
 | ||||
| 
 | ||||
| def search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit): | ||||
|     engine = engines[engine_name] | ||||
| 
 | ||||
|     try: | ||||
|         search_results = search_one_offline_request(engine, query, request_params) | ||||
| 
 | ||||
|         if search_results: | ||||
|             result_container.extend(engine_name, search_results) | ||||
| 
 | ||||
|             engine_time = time() - start_time | ||||
|             result_container.add_timing(engine_name, engine_time, engine_time) | ||||
|             with threading.RLock(): | ||||
|                 engine.stats['engine_time'] += engine_time | ||||
|                 engine.stats['engine_time_count'] += 1 | ||||
| 
 | ||||
|     except ValueError as e: | ||||
|         record_offline_engine_stats_on_error(engine, result_container, start_time) | ||||
|         logger.exception('engine {0} : invalid input : {1}'.format(engine_name, e)) | ||||
|     except Exception as e: | ||||
|         record_offline_engine_stats_on_error(engine, result_container, start_time) | ||||
|         result_container.add_unresponsive_engine(engine_name, 'unexpected crash', str(e)) | ||||
|         logger.exception('engine {0} : exception : {1}'.format(engine_name, e)) | ||||
| 
 | ||||
| 
 | ||||
| def search_one_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit): | ||||
|     if engines[engine_name].offline: | ||||
|         return search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit)  # noqa | ||||
|     return search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit) | ||||
| 
 | ||||
| 
 | ||||
| def search_multiple_requests(requests, result_container, start_time, timeout_limit): | ||||
|     search_id = uuid4().__str__() | ||||
| 
 | ||||
|  |  | |||
		Loading…
	
	Add table
		
		Reference in a new issue
	
	 Alexandre Flament
						Alexandre Flament