mirror of
https://github.com/searxng/searxng
synced 2024-01-01 19:24:07 +01:00
[prep] indent Wikipedia functions
This commit indents the methods from the wikipedia engine so that the following refactoring commit has a clean diff.
This commit is contained in:
parent
5dd28ff04b
commit
1e97bfab70
1 changed files with 52 additions and 52 deletions
|
@ -25,74 +25,74 @@ supported_languages_url = 'https://meta.wikimedia.org/wiki/List_of_Wikipedias'
|
|||
language_variants = {"zh": ("zh-cn", "zh-hk", "zh-mo", "zh-my", "zh-sg", "zh-tw")}
|
||||
|
||||
|
||||
# set language in base_url
|
||||
def url_lang(lang):
|
||||
lang_pre = lang.split('-')[0]
|
||||
if lang_pre == 'all' or lang_pre not in supported_languages and lang_pre not in language_aliases:
|
||||
return 'en'
|
||||
return match_language(lang, supported_languages, language_aliases).split('-')[0]
|
||||
# set language in base_url
|
||||
def url_lang(lang):
|
||||
lang_pre = lang.split('-')[0]
|
||||
if lang_pre == 'all' or lang_pre not in supported_languages and lang_pre not in language_aliases:
|
||||
return 'en'
|
||||
return match_language(lang, supported_languages, language_aliases).split('-')[0]
|
||||
|
||||
|
||||
# do search-request
|
||||
def request(query, params):
|
||||
if query.islower():
|
||||
query = query.title()
|
||||
# do search-request
|
||||
def request(query, params):
|
||||
if query.islower():
|
||||
query = query.title()
|
||||
|
||||
language = url_lang(params['language'])
|
||||
params['url'] = search_url.format(title=quote(query), language=language)
|
||||
language = url_lang(params['language'])
|
||||
params['url'] = search_url.format(title=quote(query), language=language)
|
||||
|
||||
if params['language'].lower() in language_variants.get(language, []):
|
||||
params['headers']['Accept-Language'] = params['language'].lower()
|
||||
if params['language'].lower() in language_variants.get(language, []):
|
||||
params['headers']['Accept-Language'] = params['language'].lower()
|
||||
|
||||
params['headers']['User-Agent'] = searx_useragent()
|
||||
params['raise_for_httperror'] = False
|
||||
params['soft_max_redirects'] = 2
|
||||
params['headers']['User-Agent'] = searx_useragent()
|
||||
params['raise_for_httperror'] = False
|
||||
params['soft_max_redirects'] = 2
|
||||
|
||||
return params
|
||||
return params
|
||||
|
||||
|
||||
# get response from search-request
|
||||
def response(resp):
|
||||
if resp.status_code == 404:
|
||||
return []
|
||||
# get response from search-request
|
||||
def response(resp):
|
||||
if resp.status_code == 404:
|
||||
return []
|
||||
|
||||
if resp.status_code == 400:
|
||||
try:
|
||||
api_result = loads(resp.text)
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
if (
|
||||
api_result['type'] == 'https://mediawiki.org/wiki/HyperSwitch/errors/bad_request'
|
||||
and api_result['detail'] == 'title-invalid-characters'
|
||||
):
|
||||
return []
|
||||
if resp.status_code == 400:
|
||||
try:
|
||||
api_result = loads(resp.text)
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
if (
|
||||
api_result['type'] == 'https://mediawiki.org/wiki/HyperSwitch/errors/bad_request'
|
||||
and api_result['detail'] == 'title-invalid-characters'
|
||||
):
|
||||
return []
|
||||
|
||||
raise_for_httperror(resp)
|
||||
raise_for_httperror(resp)
|
||||
|
||||
results = []
|
||||
api_result = loads(resp.text)
|
||||
results = []
|
||||
api_result = loads(resp.text)
|
||||
|
||||
# skip disambiguation pages
|
||||
if api_result.get('type') != 'standard':
|
||||
return []
|
||||
# skip disambiguation pages
|
||||
if api_result.get('type') != 'standard':
|
||||
return []
|
||||
|
||||
title = api_result['title']
|
||||
wikipedia_link = api_result['content_urls']['desktop']['page']
|
||||
title = api_result['title']
|
||||
wikipedia_link = api_result['content_urls']['desktop']['page']
|
||||
|
||||
results.append({'url': wikipedia_link, 'title': title})
|
||||
results.append({'url': wikipedia_link, 'title': title})
|
||||
|
||||
results.append(
|
||||
{
|
||||
'infobox': title,
|
||||
'id': wikipedia_link,
|
||||
'content': api_result.get('extract', ''),
|
||||
'img_src': api_result.get('thumbnail', {}).get('source'),
|
||||
'urls': [{'title': 'Wikipedia', 'url': wikipedia_link}],
|
||||
}
|
||||
)
|
||||
results.append(
|
||||
{
|
||||
'infobox': title,
|
||||
'id': wikipedia_link,
|
||||
'content': api_result.get('extract', ''),
|
||||
'img_src': api_result.get('thumbnail', {}).get('source'),
|
||||
'urls': [{'title': 'Wikipedia', 'url': wikipedia_link}],
|
||||
}
|
||||
)
|
||||
|
||||
return results
|
||||
return results
|
||||
|
||||
|
||||
# get supported languages from their site
|
||||
|
|
Loading…
Add table
Reference in a new issue