searxngRebrandZaclys/searx/engines/mediawiki.py

94 lines
2.4 KiB
Python
Raw Normal View History

# SPDX-License-Identifier: AGPL-3.0-or-later
"""
General mediawiki-engine (Web)
"""
from string import Formatter
from urllib.parse import urlencode, quote
2013-10-23 21:53:27 +00:00
2023-08-03 17:07:22 +00:00
from searx.utils import html_to_text
# about
about = {
"website": None,
"wikidata_id": None,
"official_api_documentation": 'http://www.mediawiki.org/wiki/API:Search',
"use_official_api": True,
"require_api_key": False,
"results": 'JSON',
}
# engine dependent config
categories = ['general']
paging = True
number_of_results = 1
2017-05-18 20:19:44 +00:00
search_type = 'nearmatch' # possible values: title, text, nearmatch
# search-url
base_url = 'https://{language}.wikipedia.org/'
search_postfix = (
'w/api.php?action=query'
'&list=search'
'&{query}'
'&format=json'
'&sroffset={offset}'
'&srlimit={limit}'
'&srwhat={searchtype}'
)
2013-10-23 21:53:27 +00:00
2014-01-20 01:31:20 +00:00
# do search-request
def request(query, params):
offset = (params['pageno'] - 1) * number_of_results
2014-12-16 16:10:20 +00:00
string_args = dict(
query=urlencode({'srsearch': query}), offset=offset, limit=number_of_results, searchtype=search_type
)
2014-12-16 16:10:20 +00:00
2014-09-04 21:53:13 +00:00
format_strings = list(Formatter().parse(base_url))
2013-10-23 21:53:27 +00:00
if params['language'] == 'all':
language = 'en'
else:
language = params['language'].split('-')[0]
# format_string [('https://', 'language', '', None), ('.wikipedia.org/', None, None, None)]
if any(x[1] == 'language' for x in format_strings):
string_args['language'] = language
# write search-language back to params, required in response
params['language'] = language
2014-01-20 01:31:20 +00:00
search_url = base_url + search_postfix
params['url'] = search_url.format(**string_args)
2013-10-23 21:53:27 +00:00
return params
# get response from search-request
2013-10-23 21:53:27 +00:00
def response(resp):
results = []
2023-08-03 17:07:22 +00:00
search_results = resp.json()
# return empty array if there are no results
if not search_results.get('query', {}).get('search'):
return []
# parse results
for result in search_results['query']['search']:
if result.get('snippet', '').startswith('#REDIRECT'):
continue
url = (
base_url.format(language=resp.search_params['language'])
+ 'wiki/'
+ quote(result['title'].replace(' ', '_').encode())
)
# append result
2023-08-03 17:07:22 +00:00
results.append({'url': url, 'title': result['title'], 'content': html_to_text(result.get('snippet', ''))})
# return results
return results