[mod] engines_languages.json: add new type EngineProperties

This patch adds the boilerplate code, needed to fetch properties from engines.
In the past we only fetched *languages* but some engines need *regions* to
parameterize the engine request.

To fit into our *fetch language* procedures the boilerplate is implemented in
the `searxng_extra/update/update_languages.py` and the *engine_properties* are
stored along in the `searx/data/engines_languages.json`.

This implementation is downward compatible to the `_fetch_fetch_languages()`
infrastructure we have.  If there comes the day we have all
`_fetch_fetch_languages()` implementations moved to `_fetch_engine_properties()`
implementations, we can rename the files and scripts.

The new type `EngineProperties` is a dictionary with keys `languages` and
`regions`.  The values are dictionaries to map from SearXNG's language & region
to option values the engine does use::

    engine_properties = {
        'type' : 'engine_properties',  # <-- !!!
        'regions': {
            # 'ca-ES' : <engine's region name>
        },
        'languages': {
            # 'ca' : <engine's language name>
        },
    }

Similar to the `supported_languages`, in the engine the properties are available
under the name `supported_properties`.

Initial we start with languages & regions, but in a wider sense the type is
named *engine properties*.  Engines can store in whatever options they need and
may be in the future there is a need to fetch additional or complete different
properties.

Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
This commit is contained in:
Markus Heiser 2022-04-08 13:24:17 +02:00
parent 13ef9cc125
commit 3b10d63e2f
9 changed files with 171 additions and 70 deletions

View file

@ -1,14 +1,28 @@
#!/usr/bin/env python
# lint: pylint
# SPDX-License-Identifier: AGPL-3.0-or-later
"""This script generates languages.py from intersecting each engine's supported
languages.
"""This script generates :origin:`searx/languages.py` from intersecting each
engine's supported properites. The script checks all engines about a function::
def _fetch_engine_properties(resp, engine_properties):
...
and a variable named ``supported_properties_url``. The HTTP get response of
``supported_properties_url`` is passed to the ``_fetch_engine_properties``
function including a instance of :py:obj:`searx.engines.EngineProperties`.
Output files: :origin:`searx/data/engines_languages.json` and
:origin:`searx/languages.py` (:origin:`CI Update data ...
<.github/workflows/data-update.yml>`).
.. hint::
This implementation is backward compatible and supports the (depricated)
``_fetch_supported_languages`` interface.
On the long term the depricated implementations in the engines will be
replaced by ``_fetch_engine_properties``.
"""
# pylint: disable=invalid-name
@ -21,32 +35,67 @@ from babel.languages import get_global
from babel.core import parse_locale
from searx import settings, searx_dir
from searx.engines import load_engines, engines
from searx.network import set_timeout_for_thread
from searx import network
from searx.engines import load_engines, engines, EngineProperties
from searx.utils import gen_useragent
# Output files.
engines_languages_file = Path(searx_dir) / 'data' / 'engines_languages.json'
languages_file = Path(searx_dir) / 'languages.py'
# Fetchs supported languages for each engine and writes json file with those.
def fetch_supported_languages():
set_timeout_for_thread(10.0)
"""Fetchs supported languages for each engine and writes json file with those."""
network.set_timeout_for_thread(10.0)
engines_languages = {}
names = list(engines)
names.sort()
# The headers has been moved here from commit 9b6ffed06: Some engines (at
# least bing and startpage) return a different result list of supported
# languages depending on the IP location where the HTTP request comes from.
# The IP based results (from bing) can be avoided by setting a
# 'Accept-Language' in the HTTP request.
headers = {
'User-Agent': gen_useragent(),
'Accept-Language': "en-US,en;q=0.5", # bing needs to set the English language
}
for engine_name in names:
if hasattr(engines[engine_name], 'fetch_supported_languages'):
engines_languages[engine_name] = engines[engine_name].fetch_supported_languages()
print("fetched %s languages from engine %s" % (len(engines_languages[engine_name]), engine_name))
if type(engines_languages[engine_name]) == list: # pylint: disable=unidiomatic-typecheck
engines_languages[engine_name] = sorted(engines_languages[engine_name])
engine = engines[engine_name]
fetch_languages = getattr(engine, '_fetch_supported_languages', None)
fetch_properties = getattr(engine, '_fetch_engine_properties', None)
print("fetched languages from %s engines" % len(engines_languages))
if fetch_properties is not None:
resp = network.get(engine.supported_properties_url, headers=headers)
engine_properties = EngineProperties()
fetch_properties(resp, engine_properties)
print("%s: %s languages" % (engine_name, len(engine_properties.languages)))
print("%s: %s regions" % (engine_name, len(engine_properties.regions)))
engine_properties = engine_properties.asdict()
elif fetch_languages is not None:
# print("%s: using deepricated _fetch_fetch_languages()" % engine_name)
resp = network.get(engine.supported_languages_url, headers=headers)
engine_properties = fetch_languages(resp)
if isinstance(engine_properties, list):
engine_properties.sort()
print(
"%s: fetched language %s containing %s items"
% (engine_name, engine_properties.__class__.__name__, len(engine_properties))
)
else:
continue
engines_languages[engine_name] = engine_properties
print("fetched properties from %s engines" % len(engines_languages))
print("write json file: %s" % (engines_languages_file))
# write json file
with open(engines_languages_file, 'w', encoding='utf-8') as f:
json.dump(engines_languages, f, indent=2, sort_keys=True)
@ -124,17 +173,38 @@ def get_territory_name(lang_code):
return country_name
# Join all language lists.
def join_language_lists(engines_languages):
"""Join all languages of the engines into one list. The returned language list
contains language codes (``zh``) and region codes (``zh-TW``). The codes can
be parsed by babel::
babel.Locale.parse(language_list[n])
"""
# pylint: disable=too-many-branches
language_list = {}
for engine_name in engines_languages:
for lang_code in engines_languages[engine_name]:
engine = engines[engine_name]
engine_properties = engines_languages[engine_name]
if isinstance(engine_properties, dict) and engine_properties.get('type') == 'engine_properties':
# items of type 'engine_properties' do have regions & languages, the
# list of engine_codes should contain both.
engine_codes = engine_properties.get('regions', {})
engine_codes.update(engine_properties.get('languages', {}))
engine_codes = engine_codes.keys()
else:
engine_codes = engine_properties
engine_properties = {}
if isinstance(engine_codes, dict):
engine_codes = engine_codes.keys()
for lang_code in engine_codes:
# apply custom fixes if necessary
if lang_code in getattr(engines[engine_name], 'language_aliases', {}).values():
lang_code = next(
lc for lc, alias in engines[engine_name].language_aliases.items() if lang_code == alias
)
if lang_code in getattr(engine, 'language_aliases', {}).values():
lang_code = next(lc for lc, alias in engine.language_aliases.items() if lang_code == alias)
locale = get_locale(lang_code)
@ -198,6 +268,7 @@ def filter_language_list(all_languages):
engine_name
for engine_name in engines.keys()
if 'general' in engines[engine_name].categories
and hasattr(engines[engine_name], 'supported_languages')
and engines[engine_name].supported_languages
and not engines[engine_name].disabled
]