searxng/searx/autocomplete.py
Markus Heiser 5fca26f0b3 [mod] engines_languages.json: add new type engine_properties
This patch adds the boilerplate code, needed to fetch properties from engines.
In the past we only fetched *languages* but some engines need *regions* to
parameterize the engine request.

To fit into our *fetch language* procedures the boilerplate is implemented in
the `searxng_extra/update/update_languages.py` and the *engine_properties* are
stored along in the `searx/data/engines_languages.json`.

This implementation is downward compatible to the `_fetch_fetch_languages()`
infrastructure we have.  If there comes the day we have all
`_fetch_fetch_languages()` implementations moved to `_fetch_engine_properties()`
implementations, we can rename the files and scripts.

The new type `engine_properties` is a dictionary with keys `languages` and
`regions`.  The values are dictionaries to map from SearXNG's language & region
to option values the engine does use::

    engine_properties = {
        'type' : 'engine_properties',  # <-- !!!
        'regions': {
            # 'ca-ES' : <engine's region name>
        },
        'languages': {
            # 'ca' : <engine's language name>
        },
    }

Similar to the `supported_languages`, in the engine the properties are available
under the name `supported_properties`.

Initial we start with languages & regions, but in a wider sense the type is
named *engine properties*.  Engines can store in whatever options they need and
may be in the future there is a need to fetch additional or complete different
properties.

Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
2022-04-19 14:35:02 +02:00

169 lines
4.4 KiB
Python

# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""This module implements functions needed for the autocompleter.
"""
from json import loads
from urllib.parse import urlencode
from lxml import etree
from httpx import HTTPError
from searx import settings
from searx.data import ENGINES_LANGUAGES
from searx.network import get as http_get
from searx.exceptions import SearxEngineResponseException
# a _fetch_supported_properites() for XPath engines isn't available right now
# _brave = ENGINES_LANGUAGES['brave'].keys()
def get(*args, **kwargs):
if 'timeout' not in kwargs:
kwargs['timeout'] = settings['outgoing']['request_timeout']
kwargs['raise_for_httperror'] = True
return http_get(*args, **kwargs)
def brave(query, _lang):
# brave search autocompleter
url = 'https://search.brave.com/api/suggest?'
url += urlencode({'q': query})
country = 'all'
# if lang in _brave:
# country = lang
kwargs = {'cookies': {'country': country}}
resp = get(url, **kwargs)
results = []
if resp.ok:
data = resp.json()
for item in data[1]:
results.append(item)
return results
def dbpedia(query, _lang):
# dbpedia autocompleter, no HTTPS
autocomplete_url = 'https://lookup.dbpedia.org/api/search.asmx/KeywordSearch?'
response = get(autocomplete_url + urlencode(dict(QueryString=query)))
results = []
if response.ok:
dom = etree.fromstring(response.content)
results = dom.xpath('//Result/Label//text()')
return results
def duckduckgo(query, _lang):
# duckduckgo autocompleter
url = 'https://ac.duckduckgo.com/ac/?{0}&type=list'
resp = loads(get(url.format(urlencode(dict(q=query)))).text)
if len(resp) > 1:
return resp[1]
return []
def google(query, lang):
# google autocompleter
autocomplete_url = 'https://suggestqueries.google.com/complete/search?client=toolbar&'
response = get(autocomplete_url + urlencode(dict(hl=lang, q=query)))
results = []
if response.ok:
dom = etree.fromstring(response.text)
results = dom.xpath('//suggestion/@data')
return results
def seznam(query, _lang):
# seznam search autocompleter
url = 'https://suggest.seznam.cz/fulltext/cs?{query}'
resp = get(url.format(query=urlencode(
{'phrase': query, 'cursorPosition': len(query), 'format': 'json-2', 'highlight': '1', 'count': '6'}
)))
if not resp.ok:
return []
data = resp.json()
return [''.join(
[part.get('text', '') for part in item.get('text', [])]
) for item in data.get('result', []) if item.get('itemType', None) == 'ItemType.TEXT']
def startpage(query, lang):
# startpage autocompleter
lui = ENGINES_LANGUAGES['startpage'].get(lang, 'english')
url = 'https://startpage.com/suggestions?{query}'
resp = get(url.format(query=urlencode({'q': query, 'segment': 'startpage.udog', 'lui': lui})))
data = resp.json()
return [e['text'] for e in data.get('suggestions', []) if 'text' in e]
def swisscows(query, _lang):
# swisscows autocompleter
url = 'https://swisscows.ch/api/suggest?{query}&itemsCount=5'
resp = loads(get(url.format(query=urlencode({'query': query}))).text)
return resp
def qwant(query, lang):
# qwant autocompleter (additional parameter : lang=en_en&count=xxx )
url = 'https://api.qwant.com/api/suggest?{query}'
resp = get(url.format(query=urlencode({'q': query, 'lang': lang})))
results = []
if resp.ok:
data = loads(resp.text)
if data['status'] == 'success':
for item in data['data']['items']:
results.append(item['value'])
return results
def wikipedia(query, lang):
# wikipedia autocompleter
url = 'https://' + lang + '.wikipedia.org/w/api.php?action=opensearch&{0}&limit=10&namespace=0&format=json'
resp = loads(get(url.format(urlencode(dict(search=query)))).text)
if len(resp) > 1:
return resp[1]
return []
backends = {
'dbpedia': dbpedia,
'duckduckgo': duckduckgo,
'google': google,
'seznam': seznam,
'startpage': startpage,
'swisscows': swisscows,
'qwant': qwant,
'wikipedia': wikipedia,
'brave': brave,
}
def search_autocomplete(backend_name, query, lang):
backend = backends.get(backend_name)
if backend is None:
return []
try:
return backend(query, lang)
except (HTTPError, SearxEngineResponseException):
return []