forked from zaclys/searxng
		
	Merge pull request #1814 from return42/fix-typos
[fix] typos / reported by @kianmeng in searx PR-3366
This commit is contained in:
		
						commit
						a3148e5115
					
				
					 52 changed files with 96 additions and 96 deletions
				
			
		| 
						 | 
				
			
			@ -7,7 +7,7 @@
 | 
			
		|||
;;
 | 
			
		||||
;;    If you get ``*** EPC Error ***`` (even after a jedi:install-server) in
 | 
			
		||||
;;    your emacs session, mostly you have jedi-mode enabled but the python
 | 
			
		||||
;;    enviroment is missed.  The python environment has to be next to the
 | 
			
		||||
;;    environment is missed.  The python environment has to be next to the
 | 
			
		||||
;;    ``<repo>/.dir-locals.el`` in::
 | 
			
		||||
;;
 | 
			
		||||
;;       ./local/py3
 | 
			
		||||
| 
						 | 
				
			
			@ -64,10 +64,10 @@
 | 
			
		|||
               (setq-local python-environment-directory
 | 
			
		||||
                           (expand-file-name "./local" prj-root))
 | 
			
		||||
 | 
			
		||||
               ;; to get in use of NVM enviroment, install https://github.com/rejeep/nvm.el
 | 
			
		||||
               ;; to get in use of NVM environment, install https://github.com/rejeep/nvm.el
 | 
			
		||||
               (setq-local nvm-dir (expand-file-name "./.nvm" prj-root))
 | 
			
		||||
 | 
			
		||||
               ;; use 'py3' enviroment as default
 | 
			
		||||
               ;; use 'py3' environment as default
 | 
			
		||||
               (setq-local python-environment-default-root-name
 | 
			
		||||
                           "py3")
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -323,7 +323,7 @@ Special thanks to `NLNet <https://nlnet.nl>`__ for sponsoring multiple features
 | 
			
		|||
 - Removed engines: faroo
 | 
			
		||||
 | 
			
		||||
Special thanks to `NLNet <https://nlnet.nl>`__ for sponsoring multiple features of this release.
 | 
			
		||||
Special thanks to https://www.accessibility.nl/english for making accessibilty audit.
 | 
			
		||||
Special thanks to https://www.accessibility.nl/english for making accessibility audit.
 | 
			
		||||
 | 
			
		||||
News
 | 
			
		||||
~~~~
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -106,7 +106,7 @@ update_conf() {
 | 
			
		|||
            # There is a new version
 | 
			
		||||
            if [ "$FORCE_CONF_UPDATE" -ne 0 ]; then
 | 
			
		||||
                # Replace the current configuration
 | 
			
		||||
                printf '⚠️  Automaticaly update %s to the new version\n' "${CONF}"
 | 
			
		||||
                printf '⚠️  Automatically update %s to the new version\n' "${CONF}"
 | 
			
		||||
                if [ ! -f "${OLD_CONF}" ]; then
 | 
			
		||||
                    printf 'The previous configuration is saved to %s\n' "${OLD_CONF}"
 | 
			
		||||
                    mv "${CONF}" "${OLD_CONF}"
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -10,7 +10,7 @@ threads = 4
 | 
			
		|||
# The right granted on the created socket
 | 
			
		||||
chmod-socket = 666
 | 
			
		||||
 | 
			
		||||
# Plugin to use and interpretor config
 | 
			
		||||
# Plugin to use and interpreter config
 | 
			
		||||
single-interpreter = true
 | 
			
		||||
master = true
 | 
			
		||||
plugin = python3
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -25,7 +25,7 @@ place the templates at::
 | 
			
		|||
 | 
			
		||||
  searx/templates/{theme_name}/result_templates/{template_name}
 | 
			
		||||
 | 
			
		||||
Futhermore, if you do not wish to expose these engines on a public instance, you
 | 
			
		||||
Furthermore, if you do not wish to expose these engines on a public instance, you
 | 
			
		||||
can still add them and limit the access by setting ``tokens`` as described in
 | 
			
		||||
section :ref:`private engines`.
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -39,7 +39,7 @@ Example
 | 
			
		|||
Scenario:
 | 
			
		||||
 | 
			
		||||
#. Recoll indexes a local filesystem mounted in ``/export/documents/reference``,
 | 
			
		||||
#. the Recoll search inteface can be reached at https://recoll.example.org/ and
 | 
			
		||||
#. the Recoll search interface can be reached at https://recoll.example.org/ and
 | 
			
		||||
#. the contents of this filesystem can be reached though https://download.example.org/reference
 | 
			
		||||
 | 
			
		||||
.. code:: yaml
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -21,7 +21,7 @@ above are added to ``settings.yml`` just commented out, as you have to
 | 
			
		|||
Please note that if you are not using HTTPS to access these engines, you have to enable
 | 
			
		||||
HTTP requests by setting ``enable_http`` to ``True``.
 | 
			
		||||
 | 
			
		||||
Futhermore, if you do not want to expose these engines on a public instance, you
 | 
			
		||||
Furthermore, if you do not want to expose these engines on a public instance, you
 | 
			
		||||
can still add them and limit the access by setting ``tokens`` as described in
 | 
			
		||||
section :ref:`private engines`.
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -341,7 +341,7 @@ Communication with search engines.
 | 
			
		|||
   outgoing:
 | 
			
		||||
     request_timeout: 2.0       # default timeout in seconds, can be override by engine
 | 
			
		||||
     max_request_timeout: 10.0  # the maximum timeout in seconds
 | 
			
		||||
     useragent_suffix: ""       # informations like an email address to the administrator
 | 
			
		||||
     useragent_suffix: ""       # information like an email address to the administrator
 | 
			
		||||
     pool_connections: 100      # Maximum number of allowable connections, or null
 | 
			
		||||
                                # for no limits. The default is 100.
 | 
			
		||||
     pool_maxsize: 10           # Number of allowable keep-alive connections, or null
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -145,7 +145,7 @@ engine, you must install the package ``mysql-connector-python``.
 | 
			
		|||
 | 
			
		||||
The authentication plugin is configurable by setting ``auth_plugin`` in the
 | 
			
		||||
attributes.  By default it is set to ``caching_sha2_password``.  This is an
 | 
			
		||||
example configuration for quering a MySQL server:
 | 
			
		||||
example configuration for querying a MySQL server:
 | 
			
		||||
 | 
			
		||||
.. code:: yaml
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -47,7 +47,7 @@ Engine File
 | 
			
		|||
   argument                type        information
 | 
			
		||||
   ======================= =========== ========================================================
 | 
			
		||||
   categories              list        pages, in which the engine is working
 | 
			
		||||
   paging                  boolean     support multible pages
 | 
			
		||||
   paging                  boolean     support multiple pages
 | 
			
		||||
   time_range_support      boolean     support search time range
 | 
			
		||||
   engine_type             str         - ``online`` :ref:`[ref] <demo online engine>` by
 | 
			
		||||
                                         default, other possibles values are:
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -202,7 +202,7 @@ To debug services from filtron and morty analogous use:
 | 
			
		|||
 | 
			
		||||
Another point we have to notice is that the service (:ref:`SearXNG <searxng.sh>`
 | 
			
		||||
runs under dedicated system user account with the same name (compare
 | 
			
		||||
:ref:`create searxng user`).  To get a shell from theses accounts, simply call:
 | 
			
		||||
:ref:`create searxng user`).  To get a shell from these accounts, simply call:
 | 
			
		||||
 | 
			
		||||
.. tabs::
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -300,7 +300,7 @@ of the container:
 | 
			
		|||
 | 
			
		||||
Now we can develop as usual in the working tree of our desktop system.  Every
 | 
			
		||||
time the software was changed, you have to restart the SearXNG service (in the
 | 
			
		||||
conatiner):
 | 
			
		||||
container):
 | 
			
		||||
 | 
			
		||||
.. tabs::
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -359,7 +359,7 @@ We build up a fully functional SearXNG suite in a archlinux container:
 | 
			
		|||
   $ sudo -H ./utils/lxc.sh install suite searxng-archlinux
 | 
			
		||||
 | 
			
		||||
To access HTTP from the desktop we installed nginx for the services inside the
 | 
			
		||||
conatiner:
 | 
			
		||||
container:
 | 
			
		||||
 | 
			
		||||
.. tabs::
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -15,7 +15,7 @@ generated and deployed at :docs:`github.io <.>`.  For build prerequisites read
 | 
			
		|||
:ref:`docs build`.
 | 
			
		||||
 | 
			
		||||
The source files of Searx's documentation are located at :origin:`docs`.  Sphinx
 | 
			
		||||
assumes source files to be encoded in UTF-8 by defaul.  Run :ref:`make docs.live
 | 
			
		||||
assumes source files to be encoded in UTF-8 by default.  Run :ref:`make docs.live
 | 
			
		||||
<make docs.live>` to build HTML while editing.
 | 
			
		||||
 | 
			
		||||
.. sidebar:: Further reading
 | 
			
		||||
| 
						 | 
				
			
			@ -227,13 +227,13 @@ To refer anchors use the `ref role`_ markup:
 | 
			
		|||
 | 
			
		||||
.. code:: reST
 | 
			
		||||
 | 
			
		||||
   Visit chapter :ref:`reST anchor`.  Or set hyperlink text manualy :ref:`foo
 | 
			
		||||
   Visit chapter :ref:`reST anchor`.  Or set hyperlink text manually :ref:`foo
 | 
			
		||||
   bar <reST anchor>`.
 | 
			
		||||
 | 
			
		||||
.. admonition:: ``:ref:`` role
 | 
			
		||||
   :class: rst-example
 | 
			
		||||
 | 
			
		||||
   Visist chapter :ref:`reST anchor`.  Or set hyperlink text manualy :ref:`foo
 | 
			
		||||
   Visist chapter :ref:`reST anchor`.  Or set hyperlink text manually :ref:`foo
 | 
			
		||||
   bar <reST anchor>`.
 | 
			
		||||
 | 
			
		||||
.. _reST ordinary ref:
 | 
			
		||||
| 
						 | 
				
			
			@ -494,8 +494,8 @@ Figures & Images
 | 
			
		|||
   is flexible.  To get best results in the generated output format, install
 | 
			
		||||
   ImageMagick_ and Graphviz_.
 | 
			
		||||
 | 
			
		||||
Searx's sphinx setup includes: :ref:`linuxdoc:kfigure`.  Scaleable here means;
 | 
			
		||||
scaleable in sense of the build process.  Normally in absence of a converter
 | 
			
		||||
Searx's sphinx setup includes: :ref:`linuxdoc:kfigure`.  Scalable here means;
 | 
			
		||||
scalable in sense of the build process.  Normally in absence of a converter
 | 
			
		||||
tool, the build process will break.  From the authors POV it’s annoying to care
 | 
			
		||||
about the build process when handling with images, especially since he has no
 | 
			
		||||
access to the build process.  With :ref:`linuxdoc:kfigure` the build process
 | 
			
		||||
| 
						 | 
				
			
			@ -503,7 +503,7 @@ continues and scales output quality in dependence of installed image processors.
 | 
			
		|||
 | 
			
		||||
If you want to add an image, you should use the ``kernel-figure`` (inheritance
 | 
			
		||||
of :dudir:`figure`) and ``kernel-image`` (inheritance of :dudir:`image`)
 | 
			
		||||
directives.  E.g. to insert a figure with a scaleable image format use SVG
 | 
			
		||||
directives.  E.g. to insert a figure with a scalable image format use SVG
 | 
			
		||||
(:ref:`svg image example`):
 | 
			
		||||
 | 
			
		||||
.. code:: reST
 | 
			
		||||
| 
						 | 
				
			
			@ -1185,7 +1185,7 @@ and *targets* (e.g. a ref to :ref:`row 2 of table's body <row body 2>`).
 | 
			
		|||
        - cell 4.4
 | 
			
		||||
 | 
			
		||||
      * - row 5
 | 
			
		||||
        - cell 5.1 with automatic span to rigth end
 | 
			
		||||
        - cell 5.1 with automatic span to right end
 | 
			
		||||
 | 
			
		||||
      * - row 6
 | 
			
		||||
        - cell 6.1
 | 
			
		||||
| 
						 | 
				
			
			@ -1237,7 +1237,7 @@ and *targets* (e.g. a ref to :ref:`row 2 of table's body <row body 2>`).
 | 
			
		|||
        - cell 4.4
 | 
			
		||||
 | 
			
		||||
      * - row 5
 | 
			
		||||
        - cell 5.1 with automatic span to rigth end
 | 
			
		||||
        - cell 5.1 with automatic span to right end
 | 
			
		||||
 | 
			
		||||
      * - row 6
 | 
			
		||||
        - cell 6.1
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -93,14 +93,14 @@ Parameters
 | 
			
		|||
 | 
			
		||||
  :default:
 | 
			
		||||
     ``Hash_plugin``, ``Search_on_category_select``,
 | 
			
		||||
     ``Self_Informations``, ``Tracker_URL_remover``,
 | 
			
		||||
     ``Self_Information``, ``Tracker_URL_remover``,
 | 
			
		||||
     ``Ahmia_blacklist``
 | 
			
		||||
 | 
			
		||||
  :values:
 | 
			
		||||
     .. enabled by default
 | 
			
		||||
 | 
			
		||||
     ``Hash_plugin``, ``Search_on_category_select``,
 | 
			
		||||
     ``Self_Informations``, ``Tracker_URL_remover``,
 | 
			
		||||
     ``Self_Information``, ``Tracker_URL_remover``,
 | 
			
		||||
     ``Ahmia_blacklist``,
 | 
			
		||||
 | 
			
		||||
     .. disabled by default
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -275,12 +275,12 @@ def is_engine_active(engine: Engine):
 | 
			
		|||
 | 
			
		||||
def register_engine(engine: Engine):
 | 
			
		||||
    if engine.name in engines:
 | 
			
		||||
        logger.error('Engine config error: ambigious name: {0}'.format(engine.name))
 | 
			
		||||
        logger.error('Engine config error: ambiguous name: {0}'.format(engine.name))
 | 
			
		||||
        sys.exit(1)
 | 
			
		||||
    engines[engine.name] = engine
 | 
			
		||||
 | 
			
		||||
    if engine.shortcut in engine_shortcuts:
 | 
			
		||||
        logger.error('Engine config error: ambigious shortcut: {0}'.format(engine.shortcut))
 | 
			
		||||
        logger.error('Engine config error: ambiguous shortcut: {0}'.format(engine.shortcut))
 | 
			
		||||
        sys.exit(1)
 | 
			
		||||
    engine_shortcuts[engine.shortcut] = engine.name
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -79,7 +79,7 @@ def response(resp):
 | 
			
		|||
    # * book / performing art / film / television  / media franchise / concert tour / playwright
 | 
			
		||||
    # * prepared food
 | 
			
		||||
    # * website / software / os / programming language / file format / software engineer
 | 
			
		||||
    # * compagny
 | 
			
		||||
    # * company
 | 
			
		||||
 | 
			
		||||
    content = ''
 | 
			
		||||
    heading = search_res.get('Heading', '')
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -40,7 +40,7 @@ def response(resp):
 | 
			
		|||
 | 
			
		||||
    search_res = loads(resp.text)
 | 
			
		||||
 | 
			
		||||
    # check if items are recieved
 | 
			
		||||
    # check if items are received
 | 
			
		||||
    if 'items' not in search_res:
 | 
			
		||||
        return []
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -326,14 +326,14 @@ def response(resp):
 | 
			
		|||
 | 
			
		||||
        # google *sections*
 | 
			
		||||
        if extract_text(eval_xpath(result, g_section_with_header)):
 | 
			
		||||
            logger.debug("ingoring <g-section-with-header>")
 | 
			
		||||
            logger.debug("ignoring <g-section-with-header>")
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            title_tag = eval_xpath_getindex(result, title_xpath, 0, default=None)
 | 
			
		||||
            if title_tag is None:
 | 
			
		||||
                # this not one of the common google results *section*
 | 
			
		||||
                logger.debug('ingoring item from the result_xpath list: missing title')
 | 
			
		||||
                logger.debug('ignoring item from the result_xpath list: missing title')
 | 
			
		||||
                continue
 | 
			
		||||
            title = extract_text(title_tag)
 | 
			
		||||
            url = eval_xpath_getindex(result, href_xpath, 0, None)
 | 
			
		||||
| 
						 | 
				
			
			@ -341,7 +341,7 @@ def response(resp):
 | 
			
		|||
                continue
 | 
			
		||||
            content = extract_text(eval_xpath_getindex(result, content_xpath, 0, default=None), allow_none=True)
 | 
			
		||||
            if content is None:
 | 
			
		||||
                logger.debug('ingoring item from the result_xpath list: missing content of title "%s"', title)
 | 
			
		||||
                logger.debug('ignoring item from the result_xpath list: missing content of title "%s"', title)
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            logger.debug('add link to results: %s', title)
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -141,7 +141,7 @@ def response(resp):
 | 
			
		|||
                padding = (4 - (len(jslog) % 4)) * "="
 | 
			
		||||
                jslog = b64decode(jslog + padding)
 | 
			
		||||
            except binascii.Error:
 | 
			
		||||
                # URL cant be read, skip this result
 | 
			
		||||
                # URL can't be read, skip this result
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            # now we have : b'[null, ... null,"https://www.cnn.com/.../index.html"]'
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -150,7 +150,7 @@ def response(resp):
 | 
			
		|||
 | 
			
		||||
        # ignore google *sections*
 | 
			
		||||
        if extract_text(eval_xpath(result, g_section_with_header)):
 | 
			
		||||
            logger.debug("ingoring <g-section-with-header>")
 | 
			
		||||
            logger.debug("ignoring <g-section-with-header>")
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        # ingnore articles without an image id / e.g. news articles
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -70,7 +70,7 @@ def response(resp):
 | 
			
		|||
        elif properties.get('osm_type') == 'R':
 | 
			
		||||
            osm_type = 'relation'
 | 
			
		||||
        else:
 | 
			
		||||
            # continue if invalide osm-type
 | 
			
		||||
            # continue if invalid osm-type
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        url = result_base_url.format(osm_type=osm_type, osm_id=properties.get('osm_id'))
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -209,7 +209,7 @@ def _fetch_supported_languages(resp):
 | 
			
		|||
    # native name, the English name of the writing script used by the language,
 | 
			
		||||
    # or occasionally something else entirely.
 | 
			
		||||
 | 
			
		||||
    # this cases are so special they need to be hardcoded, a couple of them are mispellings
 | 
			
		||||
    # this cases are so special they need to be hardcoded, a couple of them are misspellings
 | 
			
		||||
    language_names = {
 | 
			
		||||
        'english_uk': 'en-GB',
 | 
			
		||||
        'fantizhengwen': ['zh-TW', 'zh-HK'],
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -50,7 +50,7 @@ WIKIDATA_PROPERTIES = {
 | 
			
		|||
# SERVICE wikibase:label: https://en.wikibooks.org/wiki/SPARQL/SERVICE_-_Label#Manual_Label_SERVICE
 | 
			
		||||
# https://en.wikibooks.org/wiki/SPARQL/WIKIDATA_Precision,_Units_and_Coordinates
 | 
			
		||||
# https://www.mediawiki.org/wiki/Wikibase/Indexing/RDF_Dump_Format#Data_model
 | 
			
		||||
# optmization:
 | 
			
		||||
# optimization:
 | 
			
		||||
# * https://www.wikidata.org/wiki/Wikidata:SPARQL_query_service/query_optimization
 | 
			
		||||
# * https://github.com/blazegraph/database/wiki/QueryHints
 | 
			
		||||
QUERY_TEMPLATE = """
 | 
			
		||||
| 
						 | 
				
			
			@ -386,7 +386,7 @@ def get_attributes(language):
 | 
			
		|||
    add_amount('P2046')  # area
 | 
			
		||||
    add_amount('P281')  # postal code
 | 
			
		||||
    add_label('P38')  # currency
 | 
			
		||||
    add_amount('P2048')  # heigth (building)
 | 
			
		||||
    add_amount('P2048')  # height (building)
 | 
			
		||||
 | 
			
		||||
    # Media
 | 
			
		||||
    for p in [
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -50,7 +50,7 @@ def request(query, params):
 | 
			
		|||
# replace private user area characters to make text legible
 | 
			
		||||
def replace_pua_chars(text):
 | 
			
		||||
    pua_chars = {
 | 
			
		||||
        '\uf522': '\u2192',  # rigth arrow
 | 
			
		||||
        '\uf522': '\u2192',  # right arrow
 | 
			
		||||
        '\uf7b1': '\u2115',  # set of natural numbers
 | 
			
		||||
        '\uf7b4': '\u211a',  # set of rational numbers
 | 
			
		||||
        '\uf7b5': '\u211d',  # set of real numbers
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -53,7 +53,7 @@ Replacements are:
 | 
			
		|||
 | 
			
		||||
      0: none, 1: moderate, 2:strict
 | 
			
		||||
 | 
			
		||||
  If not supported, the URL paramter is an empty string.
 | 
			
		||||
  If not supported, the URL parameter is an empty string.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -114,7 +114,7 @@ time_range_support = False
 | 
			
		|||
 | 
			
		||||
time_range_url = '&hours={time_range_val}'
 | 
			
		||||
'''Time range URL parameter in the in :py:obj:`search_url`.  If no time range is
 | 
			
		||||
requested by the user, the URL paramter is an empty string.  The
 | 
			
		||||
requested by the user, the URL parameter is an empty string.  The
 | 
			
		||||
``{time_range_val}`` replacement is taken from the :py:obj:`time_range_map`.
 | 
			
		||||
 | 
			
		||||
.. code:: yaml
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -30,7 +30,7 @@ def get_external_url(url_id, item_id, alternative="default"):
 | 
			
		|||
    """Return an external URL or None if url_id is not found.
 | 
			
		||||
 | 
			
		||||
    url_id can take value from data/external_urls.json
 | 
			
		||||
    The "imdb_id" value is automaticaly converted according to the item_id value.
 | 
			
		||||
    The "imdb_id" value is automatically converted according to the item_id value.
 | 
			
		||||
 | 
			
		||||
    If item_id is None, the raw URL with the $1 is returned.
 | 
			
		||||
    """
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -17,7 +17,7 @@ along with searx. If not, see < http://www.gnu.org/licenses/ >.
 | 
			
		|||
from flask_babel import gettext
 | 
			
		||||
import re
 | 
			
		||||
 | 
			
		||||
name = gettext('Self Informations')
 | 
			
		||||
name = gettext('Self Information')
 | 
			
		||||
description = gettext('Displays your IP if the query is "ip" and your user agent if the query contains "user agent".')
 | 
			
		||||
default_on = True
 | 
			
		||||
preference_section = 'query'
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -52,7 +52,7 @@ class Setting:
 | 
			
		|||
        return self.value
 | 
			
		||||
 | 
			
		||||
    def save(self, name: str, resp: flask.Response):
 | 
			
		||||
        """Save cookie ``name`` in the HTTP reponse obect
 | 
			
		||||
        """Save cookie ``name`` in the HTTP response object
 | 
			
		||||
 | 
			
		||||
        If needed, its overwritten in the inheritance."""
 | 
			
		||||
        resp.set_cookie(name, self.value, max_age=COOKIE_MAX_AGE)
 | 
			
		||||
| 
						 | 
				
			
			@ -113,7 +113,7 @@ class MultipleChoiceSetting(Setting):
 | 
			
		|||
                self.value.append(choice)
 | 
			
		||||
 | 
			
		||||
    def save(self, name: str, resp: flask.Response):
 | 
			
		||||
        """Save cookie ``name`` in the HTTP reponse obect"""
 | 
			
		||||
        """Save cookie ``name`` in the HTTP response object"""
 | 
			
		||||
        resp.set_cookie(name, ','.join(self.value), max_age=COOKIE_MAX_AGE)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -146,7 +146,7 @@ class SetSetting(Setting):
 | 
			
		|||
        self.values = set(elements)
 | 
			
		||||
 | 
			
		||||
    def save(self, name: str, resp: flask.Response):
 | 
			
		||||
        """Save cookie ``name`` in the HTTP reponse obect"""
 | 
			
		||||
        """Save cookie ``name`` in the HTTP response object"""
 | 
			
		||||
        resp.set_cookie(name, ','.join(self.values), max_age=COOKIE_MAX_AGE)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -193,7 +193,7 @@ class MapSetting(Setting):
 | 
			
		|||
        self.key = data  # pylint: disable=attribute-defined-outside-init
 | 
			
		||||
 | 
			
		||||
    def save(self, name: str, resp: flask.Response):
 | 
			
		||||
        """Save cookie ``name`` in the HTTP reponse obect"""
 | 
			
		||||
        """Save cookie ``name`` in the HTTP response object"""
 | 
			
		||||
        if hasattr(self, 'key'):
 | 
			
		||||
            resp.set_cookie(name, self.key, max_age=COOKIE_MAX_AGE)
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -239,7 +239,7 @@ class BooleanChoices:
 | 
			
		|||
        return (k for k, v in self.choices.items() if not v)
 | 
			
		||||
 | 
			
		||||
    def save(self, resp: flask.Response):
 | 
			
		||||
        """Save cookie in the HTTP reponse obect"""
 | 
			
		||||
        """Save cookie in the HTTP response object"""
 | 
			
		||||
        disabled_changed = (k for k in self.disabled if self.default_choices[k])
 | 
			
		||||
        enabled_changed = (k for k in self.enabled if not self.default_choices[k])
 | 
			
		||||
        resp.set_cookie('disabled_{0}'.format(self.name), ','.join(disabled_changed), max_age=COOKIE_MAX_AGE)
 | 
			
		||||
| 
						 | 
				
			
			@ -496,7 +496,7 @@ class Preferences:
 | 
			
		|||
        return ret_val
 | 
			
		||||
 | 
			
		||||
    def save(self, resp: flask.Response):
 | 
			
		||||
        """Save cookie in the HTTP reponse obect"""
 | 
			
		||||
        """Save cookie in the HTTP response object"""
 | 
			
		||||
        for user_setting_name, user_setting in self.key_value_settings.items():
 | 
			
		||||
            # pylint: disable=unnecessary-dict-index-lookup
 | 
			
		||||
            if self.key_value_settings[user_setting_name].locked:
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -198,10 +198,10 @@ class BangParser(QueryPartParser):
 | 
			
		|||
            self.raw_text_query.enginerefs.append(EngineRef(value, 'none'))
 | 
			
		||||
            return True
 | 
			
		||||
 | 
			
		||||
        # check if prefix is equal with categorie name
 | 
			
		||||
        # check if prefix is equal with category name
 | 
			
		||||
        if value in categories:
 | 
			
		||||
            # using all engines for that search, which
 | 
			
		||||
            # are declared under that categorie name
 | 
			
		||||
            # are declared under that category name
 | 
			
		||||
            self.raw_text_query.enginerefs.extend(
 | 
			
		||||
                EngineRef(engine.name, value)
 | 
			
		||||
                for engine in categories[value]
 | 
			
		||||
| 
						 | 
				
			
			@ -219,7 +219,7 @@ class BangParser(QueryPartParser):
 | 
			
		|||
                    self._add_autocomplete(first_char + suggestion)
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        # check if query starts with categorie name
 | 
			
		||||
        # check if query starts with category name
 | 
			
		||||
        for category in categories:
 | 
			
		||||
            if category.startswith(value):
 | 
			
		||||
                self._add_autocomplete(first_char + category.replace(' ', '_'))
 | 
			
		||||
| 
						 | 
				
			
			@ -311,7 +311,7 @@ class RawTextQuery:
 | 
			
		|||
 | 
			
		||||
    def getFullQuery(self):
 | 
			
		||||
        """
 | 
			
		||||
        get full querry including whitespaces
 | 
			
		||||
        get full query including whitespaces
 | 
			
		||||
        """
 | 
			
		||||
        return '{0} {1}'.format(' '.join(self.query_parts), self.getQuery()).strip()
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -134,9 +134,9 @@ def result_score(result):
 | 
			
		|||
        if hasattr(engines[result_engine], 'weight'):
 | 
			
		||||
            weight *= float(engines[result_engine].weight)
 | 
			
		||||
 | 
			
		||||
    occurences = len(result['positions'])
 | 
			
		||||
    occurrences = len(result['positions'])
 | 
			
		||||
 | 
			
		||||
    return sum((occurences * weight) / position for position in result['positions'])
 | 
			
		||||
    return sum((occurrences * weight) / position for position in result['positions'])
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Timing(NamedTuple):
 | 
			
		||||
| 
						 | 
				
			
			@ -286,7 +286,7 @@ class ResultContainer:
 | 
			
		|||
        if 'template' not in result:
 | 
			
		||||
            result['template'] = 'default.html'
 | 
			
		||||
 | 
			
		||||
        # strip multiple spaces and cariage returns from content
 | 
			
		||||
        # strip multiple spaces and carriage returns from content
 | 
			
		||||
        if result.get('content'):
 | 
			
		||||
            result['content'] = WHITESPACE_REGEX.sub(' ', result['content'])
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -315,7 +315,7 @@ class ResultContainer:
 | 
			
		|||
                    return merged_result
 | 
			
		||||
                else:
 | 
			
		||||
                    # it's an image
 | 
			
		||||
                    # it's a duplicate if the parsed_url, template and img_src are differents
 | 
			
		||||
                    # it's a duplicate if the parsed_url, template and img_src are different
 | 
			
		||||
                    if result.get('img_src', '') == merged_result.get('img_src', ''):
 | 
			
		||||
                        return merged_result
 | 
			
		||||
        return None
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -70,7 +70,7 @@ def run(engine_name_list, verbose):
 | 
			
		|||
            stderr.write(f'{BOLD_SEQ}Engine {name:30}{RESET_SEQ}Checking\n')
 | 
			
		||||
        checker = searx.search.checker.Checker(processor)
 | 
			
		||||
        checker.run()
 | 
			
		||||
        if checker.test_results.succesfull:
 | 
			
		||||
        if checker.test_results.successful:
 | 
			
		||||
            stdout.write(f'{BOLD_SEQ}Engine {name:30}{RESET_SEQ}{GREEN}OK{RESET_SEQ}\n')
 | 
			
		||||
            if verbose:
 | 
			
		||||
                stdout.write(f'    {"found languages":15}: {" ".join(sorted(list(checker.test_results.languages)))}\n')
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -107,7 +107,7 @@ def run():
 | 
			
		|||
            logger.debug('Checking %s engine', name)
 | 
			
		||||
            checker = Checker(processor)
 | 
			
		||||
            checker.run()
 | 
			
		||||
            if checker.test_results.succesfull:
 | 
			
		||||
            if checker.test_results.successful:
 | 
			
		||||
                result['engines'][name] = {'success': True}
 | 
			
		||||
            else:
 | 
			
		||||
                result['engines'][name] = {'success': False, 'errors': checker.test_results.errors}
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -174,7 +174,7 @@ class TestResults:
 | 
			
		|||
        self.languages.add(language)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def succesfull(self):
 | 
			
		||||
    def successful(self):
 | 
			
		||||
        return len(self.errors) == 0
 | 
			
		||||
 | 
			
		||||
    def __iter__(self):
 | 
			
		||||
| 
						 | 
				
			
			@ -317,7 +317,7 @@ class ResultContainerTests:
 | 
			
		|||
            self._record_error('No result')
 | 
			
		||||
 | 
			
		||||
    def one_title_contains(self, title: str):
 | 
			
		||||
        """Check one of the title contains `title` (case insensitive comparaison)"""
 | 
			
		||||
        """Check one of the title contains `title` (case insensitive comparison)"""
 | 
			
		||||
        title = title.lower()
 | 
			
		||||
        for result in self.result_container.get_ordered_results():
 | 
			
		||||
            if title in result['title'].lower():
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -75,7 +75,7 @@ class OnlineProcessor(EngineProcessor):
 | 
			
		|||
 | 
			
		||||
    def _send_http_request(self, params):
 | 
			
		||||
        # create dictionary which contain all
 | 
			
		||||
        # informations about the request
 | 
			
		||||
        # information about the request
 | 
			
		||||
        request_args = dict(
 | 
			
		||||
            headers=params['headers'], cookies=params['cookies'], verify=params['verify'], auth=params['auth']
 | 
			
		||||
        )
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -52,7 +52,7 @@ search:
 | 
			
		|||
 | 
			
		||||
server:
 | 
			
		||||
  # If you change port, bind_address or base_url don't forget to rebuild
 | 
			
		||||
  # instance's enviroment (make buildenv)
 | 
			
		||||
  # instance's environment (make buildenv)
 | 
			
		||||
  port: 8888
 | 
			
		||||
  bind_address: "127.0.0.1"
 | 
			
		||||
  base_url: false  # Possible values: false or "https://example.org/location".
 | 
			
		||||
| 
						 | 
				
			
			@ -135,7 +135,7 @@ outgoing:
 | 
			
		|||
  request_timeout: 3.0
 | 
			
		||||
  # the maximum timeout in seconds
 | 
			
		||||
  # max_request_timeout: 10.0
 | 
			
		||||
  # suffix of searx_useragent, could contain informations like an email address
 | 
			
		||||
  # suffix of searx_useragent, could contain information like an email address
 | 
			
		||||
  # to the administrator
 | 
			
		||||
  useragent_suffix: ""
 | 
			
		||||
  # The maximum number of concurrent connections that may be established.
 | 
			
		||||
| 
						 | 
				
			
			@ -183,7 +183,7 @@ outgoing:
 | 
			
		|||
#   # these plugins are enabled if nothing is configured ..
 | 
			
		||||
#   - 'Hash plugin'
 | 
			
		||||
#   - 'Search on category select'
 | 
			
		||||
#   - 'Self Informations'
 | 
			
		||||
#   - 'Self Information'
 | 
			
		||||
#   - 'Tracker URL remover'
 | 
			
		||||
#   - 'Ahmia blacklist'  # activation depends on outgoing.using_tor_proxy
 | 
			
		||||
#   # these plugins are disabled if nothing is configured ..
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -121,10 +121,10 @@ def is_use_default_settings(user_settings):
 | 
			
		|||
    raise ValueError('Invalid value for use_default_settings')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def load_settings(load_user_setttings=True):
 | 
			
		||||
def load_settings(load_user_settings=True):
 | 
			
		||||
    default_settings_path = get_default_settings_path()
 | 
			
		||||
    user_settings_path = get_user_settings_path()
 | 
			
		||||
    if user_settings_path is None or not load_user_setttings:
 | 
			
		||||
    if user_settings_path is None or not load_user_settings:
 | 
			
		||||
        # no user settings
 | 
			
		||||
        return (load_yaml(default_settings_path), 'load the default settings from {}'.format(default_settings_path))
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -136,7 +136,7 @@ def load_settings(load_user_setttings=True):
 | 
			
		|||
        update_settings(default_settings, user_settings)
 | 
			
		||||
        return (
 | 
			
		||||
            default_settings,
 | 
			
		||||
            'merge the default settings ( {} ) and the user setttings ( {} )'.format(
 | 
			
		||||
            'merge the default settings ( {} ) and the user settings ( {} )'.format(
 | 
			
		||||
                default_settings_path, user_settings_path
 | 
			
		||||
            ),
 | 
			
		||||
        )
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -261,7 +261,7 @@
 | 
			
		|||
          <option value="GET" {% if method == 'GET' %}selected="selected"{% endif %}>GET</option>
 | 
			
		||||
        </select>
 | 
			
		||||
      </p>
 | 
			
		||||
      <div class="description">{{ _('Change how forms are submited, <a href="http://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Request_methods" rel="external">learn more about request methods</a>') }}</div>
 | 
			
		||||
      <div class="description">{{ _('Change how forms are submitted, <a href="http://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Request_methods" rel="external">learn more about request methods</a>') }}</div>
 | 
			
		||||
    </fieldset>
 | 
			
		||||
    {% endif %}
 | 
			
		||||
    {% if 'image_proxy' not in locked_preferences %}
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -273,7 +273,7 @@ def extract_url(xpath_results, base_url) -> str:
 | 
			
		|||
    raise ValueError('URL not found')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def dict_subset(dictionnary: MutableMapping, properties: Set[str]) -> Dict:
 | 
			
		||||
def dict_subset(dictionary: MutableMapping, properties: Set[str]) -> Dict:
 | 
			
		||||
    """Extract a subset of a dict
 | 
			
		||||
 | 
			
		||||
    Examples:
 | 
			
		||||
| 
						 | 
				
			
			@ -282,7 +282,7 @@ def dict_subset(dictionnary: MutableMapping, properties: Set[str]) -> Dict:
 | 
			
		|||
        >>> >> dict_subset({'A': 'a', 'B': 'b', 'C': 'c'}, ['A', 'D'])
 | 
			
		||||
        {'A': 'a'}
 | 
			
		||||
    """
 | 
			
		||||
    return {k: dictionnary[k] for k in properties if k in dictionnary}
 | 
			
		||||
    return {k: dictionary[k] for k in properties if k in dictionary}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_torrent_size(filesize: str, filesize_multiplier: str) -> Optional[int]:
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -54,7 +54,7 @@ def parse_lang(preferences: Preferences, form: Dict[str, str], raw_text_query: R
 | 
			
		|||
        return preferences.get_value('language')
 | 
			
		||||
    # get language
 | 
			
		||||
    # set specific language if set on request, query or preferences
 | 
			
		||||
    # TODO support search with multible languages
 | 
			
		||||
    # TODO support search with multiple languages
 | 
			
		||||
    if len(raw_text_query.languages):
 | 
			
		||||
        query_lang = raw_text_query.languages[-1]
 | 
			
		||||
    elif 'language' in form:
 | 
			
		||||
| 
						 | 
				
			
			@ -223,7 +223,7 @@ def get_search_query_from_webapp(
 | 
			
		|||
    disabled_engines = preferences.engines.get_disabled()
 | 
			
		||||
 | 
			
		||||
    # parse query, if tags are set, which change
 | 
			
		||||
    # the serch engine or search-language
 | 
			
		||||
    # the search engine or search-language
 | 
			
		||||
    raw_text_query = RawTextQuery(form['q'], disabled_engines)
 | 
			
		||||
 | 
			
		||||
    # set query
 | 
			
		||||
| 
						 | 
				
			
			@ -238,7 +238,7 @@ def get_search_query_from_webapp(
 | 
			
		|||
 | 
			
		||||
    if not is_locked('categories') and raw_text_query.specific:
 | 
			
		||||
        # if engines are calculated from query,
 | 
			
		||||
        # set categories by using that informations
 | 
			
		||||
        # set categories by using that information
 | 
			
		||||
        query_engineref_list = raw_text_query.enginerefs
 | 
			
		||||
    else:
 | 
			
		||||
        # otherwise, using defined categories to
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -244,7 +244,7 @@ def code_highlighter(codelines, language=None):
 | 
			
		|||
        language = 'text'
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        # find lexer by programing language
 | 
			
		||||
        # find lexer by programming language
 | 
			
		||||
        lexer = get_lexer_by_name(language, stripall=True)
 | 
			
		||||
 | 
			
		||||
    except Exception as e:  # pylint: disable=broad-except
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -42,7 +42,7 @@ class UnicodeWriter:
 | 
			
		|||
        # Fetch UTF-8 output from the queue ...
 | 
			
		||||
        data = self.queue.getvalue()
 | 
			
		||||
        data = data.strip('\x00')
 | 
			
		||||
        # ... and reencode it into the target encoding
 | 
			
		||||
        # ... and re-encode it into the target encoding
 | 
			
		||||
        data = self.encoder.encode(data)
 | 
			
		||||
        # write to the target stream
 | 
			
		||||
        self.stream.write(data.decode())
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -26,7 +26,7 @@ set_loggers(wikidata, 'wikidata')
 | 
			
		|||
locales_initialize()
 | 
			
		||||
 | 
			
		||||
# ORDER BY (with all the query fields) is important to keep a deterministic result order
 | 
			
		||||
# so multiple invokation of this script doesn't change currencies.json
 | 
			
		||||
# so multiple invocation of this script doesn't change currencies.json
 | 
			
		||||
SARQL_REQUEST = """
 | 
			
		||||
SELECT DISTINCT ?iso4217 ?unit ?unicode ?label ?alias WHERE {
 | 
			
		||||
  ?item wdt:P498 ?iso4217; rdfs:label ?label.
 | 
			
		||||
| 
						 | 
				
			
			@ -42,7 +42,7 @@ ORDER BY ?iso4217 ?unit ?unicode ?label ?alias
 | 
			
		|||
"""
 | 
			
		||||
 | 
			
		||||
# ORDER BY (with all the query fields) is important to keep a deterministic result order
 | 
			
		||||
# so multiple invokation of this script doesn't change currencies.json
 | 
			
		||||
# so multiple invocation of this script doesn't change currencies.json
 | 
			
		||||
SPARQL_WIKIPEDIA_NAMES_REQUEST = """
 | 
			
		||||
SELECT DISTINCT ?iso4217 ?article_name WHERE {
 | 
			
		||||
  ?item wdt:P498 ?iso4217 .
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -29,7 +29,7 @@ engines_languages_file = Path(searx_dir) / 'data' / 'engines_languages.json'
 | 
			
		|||
languages_file = Path(searx_dir) / 'languages.py'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Fetchs supported languages for each engine and writes json file with those.
 | 
			
		||||
# Fetches supported languages for each engine and writes json file with those.
 | 
			
		||||
def fetch_supported_languages():
 | 
			
		||||
    set_timeout_for_thread(10.0)
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -31,7 +31,7 @@ class TestLoad(SearxTestCase):
 | 
			
		|||
 | 
			
		||||
class TestDefaultSettings(SearxTestCase):
 | 
			
		||||
    def test_load(self):
 | 
			
		||||
        settings, msg = settings_loader.load_settings(load_user_setttings=False)
 | 
			
		||||
        settings, msg = settings_loader.load_settings(load_user_settings=False)
 | 
			
		||||
        self.assertTrue(msg.startswith('load the default settings from'))
 | 
			
		||||
        self.assertFalse(settings['general']['debug'])
 | 
			
		||||
        self.assertTrue(isinstance(settings['general']['instance_name'], str))
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -38,7 +38,7 @@ brand_env = 'utils' + sep + 'brand.env'
 | 
			
		|||
 | 
			
		||||
# Some defaults in the settings.yml are taken from the environment,
 | 
			
		||||
# e.g. SEARXNG_BIND_ADDRESS (:py:obj:`searx.settings_defaults.SHEMA`).  When the
 | 
			
		||||
# 'brand.env' file is created these enviroment variables should be unset first::
 | 
			
		||||
# 'brand.env' file is created these envirnoment variables should be unset first::
 | 
			
		||||
 | 
			
		||||
_unset = object()
 | 
			
		||||
for name, option in name_val:
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
							
								
								
									
										10
									
								
								utils/lib.sh
									
										
									
									
									
								
							
							
						
						
									
										10
									
								
								utils/lib.sh
									
										
									
									
									
								
							| 
						 | 
				
			
			@ -1011,8 +1011,8 @@ nginx_install_app() {
 | 
			
		|||
 | 
			
		||||
nginx_include_apps_enabled() {
 | 
			
		||||
 | 
			
		||||
    # Add the *NGINX_APPS_ENABLED* infrastruture to a nginx server block.  Such
 | 
			
		||||
    # infrastruture is already known from fedora and centos, including apps (location
 | 
			
		||||
    # Add the *NGINX_APPS_ENABLED* infrastructure to a nginx server block.  Such
 | 
			
		||||
    # infrastructure is already known from fedora and centos, including apps (location
 | 
			
		||||
    # directives) from the /etc/nginx/default.d folder into the *default* nginx
 | 
			
		||||
    # server.
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -1487,7 +1487,7 @@ _apt_pkg_info_is_updated=0
 | 
			
		|||
 | 
			
		||||
pkg_install() {
 | 
			
		||||
 | 
			
		||||
    # usage: TITEL='install foobar' pkg_install foopkg barpkg
 | 
			
		||||
    # usage: TITLE='install foobar' pkg_install foopkg barpkg
 | 
			
		||||
 | 
			
		||||
    rst_title "${TITLE:-installation of packages}" section
 | 
			
		||||
    echo -e "\npackage(s)::\n"
 | 
			
		||||
| 
						 | 
				
			
			@ -1523,7 +1523,7 @@ pkg_install() {
 | 
			
		|||
 | 
			
		||||
pkg_remove() {
 | 
			
		||||
 | 
			
		||||
    # usage: TITEL='remove foobar' pkg_remove foopkg barpkg
 | 
			
		||||
    # usage: TITLE='remove foobar' pkg_remove foopkg barpkg
 | 
			
		||||
 | 
			
		||||
    rst_title "${TITLE:-remove packages}" section
 | 
			
		||||
    echo -e "\npackage(s)::\n"
 | 
			
		||||
| 
						 | 
				
			
			@ -1662,7 +1662,7 @@ lxc_init_container_env() {
 | 
			
		|||
    # usage: lxc_init_container_env <name>
 | 
			
		||||
 | 
			
		||||
    # Create a /.lxcenv file in the root folder.  Call this once after the
 | 
			
		||||
    # container is inital started and before installing any boilerplate stuff.
 | 
			
		||||
    # container is initial started and before installing any boilerplate stuff.
 | 
			
		||||
 | 
			
		||||
    info_msg "create /.lxcenv in container $1"
 | 
			
		||||
    cat <<EOF | lxc exec "${1}" -- bash | prefix_stdout "[${_BBlue}${1}${_creset}] "
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -20,7 +20,7 @@ NVM_LOCAL_FOLDER=.nvm
 | 
			
		|||
[[ -z "${NVM_GIT_URL}" ]] &&  NVM_GIT_URL="https://github.com/nvm-sh/nvm.git"
 | 
			
		||||
[[ -z "${NVM_MIN_NODE_VER}" ]] && NVM_MIN_NODE_VER="16.13.0"
 | 
			
		||||
 | 
			
		||||
# initalize nvm environment
 | 
			
		||||
# initialize nvm environment
 | 
			
		||||
# -------------------------
 | 
			
		||||
 | 
			
		||||
nvm.env() {
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -108,7 +108,7 @@ show
 | 
			
		|||
  :suite:        show services of all (or <name>) containers from the LXC suite
 | 
			
		||||
  :images:       show information of local images
 | 
			
		||||
cmd
 | 
			
		||||
  use single qoutes to evaluate in container's bash, e.g.: 'echo \$(hostname)'
 | 
			
		||||
  use single quotes to evaluate in container's bash, e.g.: 'echo \$(hostname)'
 | 
			
		||||
  --             run command '...' in all containers of the LXC suite
 | 
			
		||||
  :<name>:       run command '...' in container <name>
 | 
			
		||||
install
 | 
			
		||||
| 
						 | 
				
			
			@ -179,7 +179,7 @@ main() {
 | 
			
		|||
                        lxc_delete_container "$2"
 | 
			
		||||
                    fi
 | 
			
		||||
                    ;;
 | 
			
		||||
                *) usage "uknown or missing container <name> $2"; exit 42;;
 | 
			
		||||
                *) usage "unknown or missing container <name> $2"; exit 42;;
 | 
			
		||||
            esac
 | 
			
		||||
            ;;
 | 
			
		||||
        start|stop)
 | 
			
		||||
| 
						 | 
				
			
			@ -191,7 +191,7 @@ main() {
 | 
			
		|||
                    info_msg "lxc $1 $2"
 | 
			
		||||
                    lxc "$1" "$2" | prefix_stdout "[${_BBlue}${i}${_creset}] "
 | 
			
		||||
                    ;;
 | 
			
		||||
                *) usage "uknown or missing container <name> $2"; exit 42;;
 | 
			
		||||
                *) usage "unknown or missing container <name> $2"; exit 42;;
 | 
			
		||||
            esac
 | 
			
		||||
            ;;
 | 
			
		||||
        show)
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -911,8 +911,8 @@ searxng.doc.rst() {
 | 
			
		|||
 | 
			
		||||
    eval "echo \"$(< "${REPO_ROOT}/docs/build-templates/searxng.rst")\""
 | 
			
		||||
 | 
			
		||||
    # I use ubuntu-20.04 here to demonstrate that versions are also suported,
 | 
			
		||||
    # normaly debian-* and ubuntu-* are most the same.
 | 
			
		||||
    # I use ubuntu-20.04 here to demonstrate that versions are also supported,
 | 
			
		||||
    # normally debian-* and ubuntu-* are most the same.
 | 
			
		||||
 | 
			
		||||
    for DIST_NAME in ubuntu-20.04 arch fedora; do
 | 
			
		||||
        (
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -28,7 +28,7 @@ disable-logging = true
 | 
			
		|||
# The right granted on the created socket
 | 
			
		||||
chmod-socket = 666
 | 
			
		||||
 | 
			
		||||
# Plugin to use and interpretor config
 | 
			
		||||
# Plugin to use and interpreter config
 | 
			
		||||
single-interpreter = true
 | 
			
		||||
 | 
			
		||||
# enable master process
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -28,7 +28,7 @@ disable-logging = true
 | 
			
		|||
# The right granted on the created socket
 | 
			
		||||
chmod-socket = 666
 | 
			
		||||
 | 
			
		||||
# Plugin to use and interpretor config
 | 
			
		||||
# Plugin to use and interpreter config
 | 
			
		||||
single-interpreter = true
 | 
			
		||||
 | 
			
		||||
# enable master process
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -31,7 +31,7 @@ disable-logging = true
 | 
			
		|||
# The right granted on the created socket
 | 
			
		||||
chmod-socket = 666
 | 
			
		||||
 | 
			
		||||
# Plugin to use and interpretor config
 | 
			
		||||
# Plugin to use and interpreter config
 | 
			
		||||
single-interpreter = true
 | 
			
		||||
 | 
			
		||||
# enable master process
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -31,7 +31,7 @@ disable-logging = true
 | 
			
		|||
# The right granted on the created socket
 | 
			
		||||
chmod-socket = 666
 | 
			
		||||
 | 
			
		||||
# Plugin to use and interpretor config
 | 
			
		||||
# Plugin to use and interpreter config
 | 
			
		||||
single-interpreter = true
 | 
			
		||||
 | 
			
		||||
# enable master process
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
	Add table
		
		Reference in a new issue