Merge pull request #8 from return42/manage-script

Replace Makefile boilerplate by shell scripts
This commit is contained in:
Alexandre Flament 2021-04-24 07:14:35 +02:00 committed by GitHub
commit a7b9eca98a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 1066 additions and 1205 deletions

View File

@ -40,8 +40,7 @@ jobs:
env:
FETCH_SCRIPT: ./searx_extra/update/${{ matrix.fetch }}
run: |
source local/py3/bin/activate
$FETCH_SCRIPT
V=1 manage pyenv.cmd python "$FETCH_SCRIPT"
- name: Create Pull Request
id: cpr

View File

@ -59,7 +59,7 @@ jobs:
- name: Install node dependencies
run: make V=1 node.env
- name: Build themes
run: make V=1 themes
run: make V=1 themes.all
documentation:
name: Documentation
@ -77,14 +77,15 @@ jobs:
python-version: '3.9'
architecture: 'x64'
- name: Build documentation
run: SEARX_DEBUG=1 make V=1 ci-gh-pages
run: |
make V=1 docs.clean docs.html
- name: Deploy
if: github.ref == 'refs/heads/master'
uses: JamesIves/github-pages-deploy-action@3.7.1
with:
GITHUB_TOKEN: ${{ github.token }}
BRANCH: gh-pages
FOLDER: build/gh-pages
FOLDER: dist/docs
CLEAN: true # Automatically remove deleted files from the deploy branch
SINGLE_COMMIT: True
COMMIT_MESSAGE: build from commit ${{ github.sha }}

311
Makefile
View File

@ -1,268 +1,107 @@
# -*- coding: utf-8; mode: makefile-gmake -*-
# SPDX-License-Identifier: AGPL-3.0-or-later
.DEFAULT_GOAL=help
export MTOOLS=./manage
include utils/makefile.include
PYOBJECTS = searx
DOC = docs
PY_SETUP_EXTRAS ?= \[test\]
PYLINT_SEARX_DISABLE_OPTION := I,C,R,W0105,W0212,W0511,W0603,W0613,W0621,W0702,W0703,W1401,E1136
PYLINT_ADDITIONAL_BUILTINS_FOR_ENGINES := supported_languages,language_aliases
include utils/makefile.python
include utils/makefile.sphinx
all: clean install
PHONY += help-min help-all help
PHONY += help
help: help-min
@echo ''
@echo 'to get more help: make help-all'
help:
@./manage --help
@echo '----'
@echo 'run - run developer instance'
@echo 'install - developer install of searx into virtualenv'
@echo 'uninstall - uninstall developer installation'
@echo 'clean - clean up working tree'
@echo 'search.checker - check search engines'
@echo 'test - run shell & CI tests'
@echo 'test.shell - test shell scripts'
@echo 'ci.test - run CI tests'
help-min:
@echo ' test - run developer tests'
@echo ' docs - build documentation'
@echo ' docs-live - autobuild HTML documentation while editing'
@echo ' run - run developer instance'
@echo ' install - developer install (./local)'
@echo ' uninstall - uninstall (./local)'
@echo ' gh-pages - build docs & deploy on gh-pages branch'
@echo ' clean - drop builds and environments'
@echo ' project - re-build generic files of the searx project'
@echo ' buildenv - re-build environment files (aka brand)'
@echo ' themes - re-build build the source of the themes'
@echo ' docker - build Docker image'
@echo ' node.env - download & install npm dependencies locally'
@echo ''
@$(MAKE) -e -s make-help
help-all: help-min
@echo ''
@$(MAKE) -e -s python-help
@echo ''
@$(MAKE) -e -s docs-help
PHONY += install
install: buildenv pyenvinstall
PHONY += uninstall
uninstall: pyenvuninstall
PHONY += clean
clean: pyclean docs-clean node.clean test.clean
$(call cmd,common_clean)
PHONY += run
run: buildenv pyenvinstall
run: install
$(Q) ( \
sleep 2 ; \
xdg-open http://127.0.0.1:8888/ ; \
) &
SEARX_DEBUG=1 $(PY_ENV)/bin/python ./searx/webapp.py
SEARX_DEBUG=1 ./manage pyenv.cmd python ./searx/webapp.py
# docs
# ----
PHONY += install uninstall
install uninstall:
$(Q)./manage pyenv.$@
sphinx-doc-prebuilds:: buildenv pyenvinstall prebuild-includes
PHONY += clean
clean: py.clean docs.clean node.clean test.clean
$(Q)./manage build_msg CLEAN "common files"
$(Q)find . -name '*.orig' -exec rm -f {} +
$(Q)find . -name '*.rej' -exec rm -f {} +
$(Q)find . -name '*~' -exec rm -f {} +
$(Q)find . -name '*.bak' -exec rm -f {} +
PHONY += docs
docs: sphinx-doc-prebuilds
$(call cmd,sphinx,html,docs,docs)
PHONY += search.checker search.checker.%
search.checker: install
$(Q)./manage pyenv.cmd searx-checker -v
PHONY += docs-live
docs-live: sphinx-doc-prebuilds
$(call cmd,sphinx_autobuild,html,docs,docs)
search.checker.%: install
$(Q)./manage pyenv.cmd searx-checker -v "$(subst _, ,$(patsubst search.checker.%,%,$@))"
PHONY += prebuild-includes
prebuild-includes:
$(Q)mkdir -p $(DOCS_BUILD)/includes
$(Q)./utils/searx.sh doc | cat > $(DOCS_BUILD)/includes/searx.rst
$(Q)./utils/filtron.sh doc | cat > $(DOCS_BUILD)/includes/filtron.rst
$(Q)./utils/morty.sh doc | cat > $(DOCS_BUILD)/includes/morty.rst
PHONY += test ci.test test.shell
ci.test: test.pep8 test.pylint test.unit test.robot
test: test.pep8 test.pylint test.unit test.robot test.shell
test.shell:
$(Q)shellcheck -x -s bash \
utils/brand.env \
./manage \
utils/lib.sh \
utils/filtron.sh \
utils/searx.sh \
utils/morty.sh \
utils/lxc.sh \
utils/lxc-searx.env \
.config.sh
$(Q)./manage build_msg TEST "$@ OK"
$(GH_PAGES)::
@echo "doc available at --> $(DOCS_URL)"
# wrap ./manage script
# update project files
# --------------------
MANAGE += buildenv
MANAGE += babel.compile
MANAGE += data.all data.languages data.useragents
MANAGE += docs.html docs.live docs.gh-pages docs.prebuild docs.clean
MANAGE += docker.build docker.push
MANAGE += gecko.driver
MANAGE += node.env node.clean
MANAGE += py.build py.clean
MANAGE += pyenv pyenv.install pyenv.uninstall
MANAGE += pypi.upload pypi.upload.test
MANAGE += test.pylint test.pep8 test.unit test.coverage test.robot test.clean
MANAGE += themes.all themes.oscar themes.simple themes.bootstrap
PHONY += project engines.languages useragents.update buildenv
PHONY += $(MANAGE)
project: buildenv useragents.update engines.languages
$(MANAGE):
$(Q)$(MTOOLS) $@
engines.languages: pyenvinstall
$(Q)echo "fetch languages .."
$(Q)$(PY_ENV_ACT); python ./searx_extra/update/update_languages.py
$(Q)echo "updated searx/data/engines_languages.json"
$(Q)echo "updated searx/languages.py"
# deprecated
useragents.update: pyenvinstall
$(Q)echo "fetch useragents .."
$(Q)$(PY_ENV_ACT); python ./searx_extra/update/update_firefox_version.py
$(Q)echo "updated searx/data/useragents.json with the most recent versions of Firefox."
PHONY += docs docs-clean docs-live docker themes
buildenv: pyenv
$(Q)$(PY_ENV_ACT); SEARX_DEBUG=1 python utils/build_env.py
docs: docs.html
$(Q)./manage build_msg WARN $@ is deprecated use docs.html
# node / npm
# ----------
docs-clean: docs.clean
$(Q)./manage build_msg WARN $@ is deprecated use docs.clean
node.env: buildenv
$(Q)./manage.sh npm_packages
docs-live: docs.live
$(Q)./manage build_msg WARN $@ is deprecated use docs.live
node.clean:
$(Q)echo "CLEAN locally installed npm dependencies"
$(Q)rm -rf \
./node_modules \
./package-lock.json \
./searx/static/themes/oscar/package-lock.json \
./searx/static/themes/oscar/node_modules \
./searx/static/themes/simple/package-lock.json \
./searx/static/themes/simple/node_modules
docker: docker.build
$(Q)./manage build_msg WARN $@ is deprecated use docker.build
# build themes
# ------------
PHONY += themes themes.oscar themes.simple
themes: buildenv themes.oscar themes.simple
quiet_cmd_lessc = LESSC $3
cmd_lessc = PATH="$$(npm bin):$$PATH" \
lessc --clean-css="--s1 --advanced --compatibility=ie9" "searx/static/$2" "searx/static/$3"
quiet_cmd_grunt = GRUNT $2
cmd_grunt = PATH="$$(npm bin):$$PATH" \
grunt --gruntfile "$2"
themes.oscar: node.env
$(Q)echo '[!] build oscar theme'
$(call cmd,grunt,searx/static/themes/oscar/gruntfile.js)
themes.simple: node.env
$(Q)echo '[!] build simple theme'
$(call cmd,grunt,searx/static/themes/simple/gruntfile.js)
# docker
# ------
PHONY += docker
docker: buildenv
$(Q)./manage.sh docker_build
docker.push: buildenv
$(Q)./manage.sh docker_build push
# gecko
# -----
PHONY += gecko.driver
gecko.driver:
$(PY_ENV_ACT); ./manage.sh install_geckodriver
# search.checker
# --------------
search.checker: pyenvinstall
$(Q)$(PY_ENV_ACT); searx-checker -v
ENGINE_TARGETS=$(patsubst searx/engines/%.py,search.checker.%,$(wildcard searx/engines/[!_]*.py))
$(ENGINE_TARGETS): pyenvinstall
$(Q)$(PY_ENV_ACT); searx-checker -v "$(subst _, ,$(patsubst search.checker.%,%,$@))"
# test
# ----
PHONY += test test.sh test.pylint test.pep8 test.unit test.coverage test.robot
test: buildenv test.pylint test.pep8 test.unit gecko.driver test.robot
PYLINT_FILES=\
searx/preferences.py \
searx/testing.py \
searx/engines/gigablast.py \
searx/engines/deviantart.py \
searx/engines/digg.py \
searx/engines/google.py \
searx/engines/google_news.py \
searx/engines/google_videos.py \
searx/engines/google_images.py \
searx/engines/mediathekviewweb.py \
searx/engines/meilisearch.py \
searx/engines/solidtorrents.py \
searx/engines/solr.py \
searx/engines/springer.py \
searx/engines/google_scholar.py \
searx/engines/yahoo_news.py \
searx/engines/apkmirror.py \
searx/engines/artic.py \
searx_extra/update/update_external_bangs.py
test.pylint: pyenvinstall
$(call cmd,pylint,$(PYLINT_FILES))
$(call cmd,pylint,\
--disable=$(PYLINT_SEARX_DISABLE_OPTION) \
--additional-builtins=$(PYLINT_ADDITIONAL_BUILTINS_FOR_ENGINES) \
searx/engines \
)
$(call cmd,pylint,\
--disable=$(PYLINT_SEARX_DISABLE_OPTION) \
--ignore=searx/engines \
searx tests \
)
# ignored rules:
# E402 module level import not at top of file
# W503 line break before binary operator
# ubu1604: uses shellcheck v0.3.7 (from 04/2015), no longer supported!
test.sh:
shellcheck -x -s bash utils/brand.env
shellcheck -x utils/lib.sh
shellcheck -x utils/filtron.sh
shellcheck -x utils/searx.sh
shellcheck -x utils/morty.sh
shellcheck -x utils/lxc.sh
shellcheck -x utils/lxc-searx.env
shellcheck -x .config.sh
test.pep8: pyenvinstall
@echo "TEST pycodestyle (formerly pep8)"
$(Q)$(PY_ENV_ACT); pycodestyle --exclude='searx/static, searx/languages.py, $(foreach f,$(PYLINT_FILES),$(f),)' \
--max-line-length=120 --ignore "E117,E252,E402,E722,E741,W503,W504,W605" searx tests
test.unit: pyenvinstall
@echo "TEST tests/unit"
$(Q)$(PY_ENV_ACT); python -m nose2 -s tests/unit
test.coverage: pyenvinstall
@echo "TEST unit test coverage"
$(Q)$(PY_ENV_ACT); \
python -m nose2 -C --log-capture --with-coverage --coverage searx -s tests/unit \
&& coverage report \
&& coverage html \
test.robot: pyenvinstall gecko.driver
@echo "TEST robot"
$(Q)$(PY_ENV_ACT); PYTHONPATH=. python searx/testing.py robot
test.clean:
@echo "CLEAN intermediate test stuff"
$(Q)rm -rf geckodriver.log .coverage coverage/
# travis
# ------
PHONY += ci.test
ci.test:
$(PY_ENV_BIN)/python -c "import yaml" || make clean
$(MAKE) test
travis.codecov:
$(Q)$(PY_ENV_BIN)/python -m pip install codecov
.PHONY: $(PHONY)
themes: themes.all
$(Q)./manage build_msg WARN $@ is deprecated use themes.all

View File

@ -49,9 +49,9 @@ Build docs
- dvisvgm_
Most of the sphinx requirements are installed from :origin:`setup.py` and the
docs can be build from scratch with ``make docs``. For better math and image
processing additional packages are needed. The XeTeX_ needed not only for PDF
creation, its also needed for :ref:`math` when HTML output is build.
docs can be build from scratch with ``make docs.html``. For better math and
image processing additional packages are needed. The XeTeX_ needed not only for
PDF creation, its also needed for :ref:`math` when HTML output is build.
To be able to do :ref:`sphinx:math-support` without CDNs, the math are rendered
as images (``sphinx.ext.imgmath`` extension).
@ -64,7 +64,7 @@ to ``imgmath``:
:start-after: # sphinx.ext.imgmath setup
:end-before: # sphinx.ext.imgmath setup END
If your docs build (``make docs``) shows warnings like this::
If your docs build (``make docs.html``) shows warnings like this::
WARNING: dot(1) not found, for better output quality install \
graphviz from https://www.graphviz.org

View File

@ -51,7 +51,7 @@ It's also possible to build searx from the embedded Dockerfile.
git clone https://github.com/searx/searx.git
cd searx
make docker
make docker.build
Public instance

View File

@ -31,7 +31,7 @@ might fail in some aspects we should not overlook.
The environment in which we run all our development processes matters!
The :ref:`makefile` and the :ref:`make pyenv` encapsulate a lot for us, but they
The :ref:`makefile` and the :ref:`make install` encapsulate a lot for us, but they
do not have access to all prerequisites. For example, there may have
dependencies on packages that are installed on the developer's desktop, but
usually are not preinstalled on a server or client system. Another examples
@ -356,7 +356,7 @@ daily usage:
.. code:: sh
$ sudo -H ./utils/lxc.sh cmd searx-archlinux \
make docs
make docs.html
.. _blog-lxcdev-202006 abstract:
@ -407,7 +407,7 @@ To get remarks from the suite of the archlinux container we can use:
...
[searx-archlinux] INFO: (eth0) filtron: http://10.174.184.156:4004/ http://10.174.184.156/searx
[searx-archlinux] INFO: (eth0) morty: http://10.174.184.156:3000/
[searx-archlinux] INFO: (eth0) docs-live: http://10.174.184.156:8080/
[searx-archlinux] INFO: (eth0) docs.live: http://10.174.184.156:8080/
[searx-archlinux] INFO: (eth0) IPv6: http://[fd42:573b:e0b3:e97e:216:3eff:fea5:9b65]
...

View File

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: AGPL-3.0-or-later
import sys, os
from sphinx_build_tools import load_sphinx_config
from pallets_sphinx_themes import ProjectLink
from searx import brand
@ -128,9 +128,3 @@ html_show_sourcelink = False
latex_documents = [
(master_doc, "searx-{}.tex".format(VERSION_STRING), html_title, author, "manual")
]
# ------------------------------------------------------------------------------
# Since loadConfig overwrites settings from the global namespace, it has to be
# the last statement in the conf.py file
# ------------------------------------------------------------------------------
load_sphinx_config(globals())

View File

@ -132,11 +132,11 @@ Here is an example which makes a complete rebuild:
.. code:: sh
$ make docs-clean docs
$ make docs.clean docs.html
...
The HTML pages are in dist/docs.
.. _make docs-live:
.. _make docs.live:
live build
----------
@ -144,19 +144,19 @@ live build
.. _sphinx-autobuild:
https://github.com/executablebooks/sphinx-autobuild/blob/master/README.md
.. sidebar:: docs-clean
.. sidebar:: docs.clean
It is recommended to assert a complete rebuild before deploying (use
``docs-clean``).
``docs.clean``).
Live build is like WYSIWYG. If you want to edit the documentation, its
recommended to use. The Makefile target ``docs-live`` builds the docs, opens
recommended to use. The Makefile target ``docs.live`` builds the docs, opens
URL in your favorite browser and rebuilds every time a reST file has been
changed.
.. code:: sh
$ make docs-live
$ make docs.live
...
The HTML pages are in dist/docs.
... Serving on http://0.0.0.0:8000
@ -169,7 +169,7 @@ argument. E.g to find and use a free port, use:
.. code:: sh
$ SPHINXOPTS="--port 0" make docs-live
$ SPHINXOPTS="--port 0" make docs.live
...
... Serving on http://0.0.0.0:50593
...
@ -180,21 +180,10 @@ argument. E.g to find and use a free port, use:
deploy on github.io
-------------------
To deploy documentation at :docs:`github.io <.>` use Makefile target
:ref:`make gh-pages`, which will builds the documentation, clones searx into a sub
folder ``gh-pages``, cleans it, copies the doc build into and runs all the
needed git add, commit and push:
To deploy documentation at :docs:`github.io <.>` use Makefile target :ref:`make
docs.gh-pages`, which builds the documentation and runs all the needed git add,
commit and push:
.. code:: sh
$ make docs-clean gh-pages
...
SPHINX docs --> file://<...>/dist/docs
The HTML pages are in dist/docs.
...
Cloning into 'gh-pages' ...
...
cd gh-pages; git checkout gh-pages >/dev/null
Switched to a new branch 'gh-pages'
...
doc available at --> https://searx.github.io/searx
$ make docs.clean docs.gh-pages

View File

@ -1,33 +1,33 @@
.. _makefile:
================
Makefile Targets
================
========
Makefile
========
.. _gnu-make: https://www.gnu.org/software/make/manual/make.html#Introduction
.. sidebar:: build environment
Before looking deeper at the targets, first read about :ref:`make pyenv`.
Before looking deeper at the targets, first read about :ref:`make
install`.
To install system requirements follow :ref:`buildhosts`.
With the aim to simplify development cycles, started with :pull:`1756` a
``Makefile`` based boilerplate was added. If you are not familiar with
Makefiles, we recommend to read gnu-make_ introduction.
All relevant build tasks are implemented in :origin:`manage.sh` and for CI or
IDE integration a small ``Makefile`` wrapper is available. If you are not
familiar with Makefiles, we recommend to read gnu-make_ introduction.
The usage is simple, just type ``make {target-name}`` to *build* a target.
Calling the ``help`` target gives a first overview (``make help``):
.. program-output:: bash -c "cd ..; make --no-print-directory help"
.. contents:: Contents
:depth: 2
:local:
:backlinks: entry
.. _make pyenv:
.. _make install:
Python environment
==================
@ -36,31 +36,42 @@ Python environment
``source ./local/py3/bin/activate``
With Makefile we do no longer need to build up the virtualenv manually (as
described in the :ref:`devquickstart` guide). Jump into your git working tree
and release a ``make pyenv``:
.. code:: sh
We do no longer need to build up the virtualenv manually. Jump into your git
working tree and release a ``make install`` to get a virtualenv with a
*developer install* of searx (:origin:`setup.py`). ::
$ cd ~/searx-clone
$ make pyenv
PYENV usage: source ./local/py3/bin/activate
$ make install
PYENV [virtualenv] installing ./requirements*.txt into local/py3
...
PYENV OK
PYENV [install] pip install -e 'searx[test]'
...
Successfully installed argparse-1.4.0 searx
BUILDENV INFO:searx:load the default settings from ./searx/settings.yml
BUILDENV INFO:searx:Initialisation done
BUILDENV build utils/brand.env
With target ``pyenv`` a development environment (aka virtualenv) was build up in
``./local/py3/``. To make a *developer install* of searx (:origin:`setup.py`)
into this environment, use make target ``install``:
.. code:: sh
If you release ``make install`` multiple times the installation will only
rebuild if the sha256 sum of the *requirement files* fails. With other words:
the check fails if you edit the requirements listed in
:origin:`requirements-dev.txt` and :origin:`requirements.txt`). ::
$ make install
PYENV usage: source ./local/py3/bin/activate
PYENV using virtualenv from ./local/py3
PYENV install .
You have never to think about intermediate targets like ``pyenv`` or
``install``, the ``Makefile`` chains them as requisites. Just run your main
target.
PYENV OK
PYENV [virtualenv] requirements.sha256 failed
[virtualenv] - 6cea6eb6def9e14a18bf32f8a3e... ./requirements-dev.txt
[virtualenv] - 471efef6c73558e391c3adb35f4... ./requirements.txt
...
PYENV [virtualenv] installing ./requirements*.txt into local/py3
...
PYENV OK
PYENV [install] pip install -e 'searx[test]'
...
Successfully installed argparse-1.4.0 searx
BUILDENV INFO:searx:load the default settings from ./searx/settings.yml
BUILDENV INFO:searx:Initialisation done
BUILDENV build utils/brand.env
.. sidebar:: drop environment
@ -68,10 +79,7 @@ target.
<make clean>` first.
If you think, something goes wrong with your ./local environment or you change
the :origin:`setup.py` file (or the requirements listed in
:origin:`requirements-dev.txt` and :origin:`requirements.txt`), you have to call
:ref:`make clean`.
the :origin:`setup.py` file, you have to call :ref:`make clean`.
.. _make run:
@ -81,77 +89,44 @@ the :origin:`setup.py` file (or the requirements listed in
To get up a running a developer instance simply call ``make run``. This enables
*debug* option in :origin:`searx/settings.yml`, starts a ``./searx/webapp.py``
instance, disables *debug* option again and opens the URL in your favorite WEB
browser (:man:`xdg-open`):
.. code:: sh
browser (:man:`xdg-open`)::
$ make run
PYENV usage: source ./local/py3/bin/activate
PYENV install .
./local/py3/bin/python ./searx/webapp.py
PYENV OK
SEARX_DEBUG=1 ./manage.sh pyenv.cmd python ./searx/webapp.py
...
INFO:werkzeug: * Running on http://127.0.0.1:8888/ (Press CTRL+C to quit)
...
.. _make clean:
``make clean``
==============
Drop all intermediate files, all builds, but keep sources untouched. Includes
target ``pyclean`` which drops ./local environment. Before calling ``make
clean`` stop all processes using :ref:`make pyenv`.
.. code:: sh
Drop all intermediate files, all builds, but keep sources untouched. Before
calling ``make clean`` stop all processes using :ref:`make install`. ::
$ make clean
CLEAN pyclean
CLEAN clean
CLEAN pyenv
PYENV [virtualenv] drop ./local/py3
CLEAN docs -- ./build/docs ./dist/docs
CLEAN locally installed npm dependencies
CLEAN test stuff
CLEAN common files
.. _make docs:
``make docs docs-live docs-clean``
==================================
``make docs docs.autobuild docs.clean``
=======================================
We describe the usage of the ``doc*`` targets in the :ref:`How to contribute /
We describe the usage of the ``doc.*`` targets in the :ref:`How to contribute /
Documentation <contrib docs>` section. If you want to edit the documentation
read our :ref:`make docs-live` section. If you are working in your own brand,
read our :ref:`make docs.live` section. If you are working in your own brand,
adjust your :ref:`settings global`.
.. _make books:
.. _make docs.gh-pages:
``make books/{name}.html books/{name}.pdf``
===========================================
.. _intersphinx: https://www.sphinx-doc.org/en/stable/ext/intersphinx.html
.. _XeTeX: https://tug.org/xetex/
.. sidebar:: info
To build PDF a XeTeX_ is needed, see :ref:`buildhosts`.
The ``books/{name}.*`` targets are building *books*. A *book* is a
sub-directory containing a ``conf.py`` file. One example is the user handbook
which can deployed separately (:origin:`docs/user/conf.py`). Such ``conf.py``
do inherit from :origin:`docs/conf.py` and overwrite values to fit *book's*
needs.
With the help of Intersphinx_ (:ref:`reST smart ref`) the links to searxs
documentation outside of the book will be bound by the object inventory of
``DOCS_URL``. Take into account that URLs will be picked from the inventary at
documentation's build time.
Use ``make docs-help`` to see which books available:
.. program-output:: bash -c "cd ..; make --no-print-directory docs-help"
:ellipsis: 0,-6
.. _make gh-pages:
``make gh-pages``
=================
``make docs.gh-pages``
======================
To deploy on github.io first adjust your :ref:`settings global`. For any
further read :ref:`deploy on github.io`.
@ -161,37 +136,66 @@ further read :ref:`deploy on github.io`.
``make test``
=============
Runs a series of tests: ``test.pep8``, ``test.unit``, ``test.robot`` and does
additional :ref:`pylint checks <make pylint>`. You can run tests selective,
e.g.:
.. code:: sh
Runs a series of tests: :ref:`make test.pylint`, ``test.pep8``, ``test.unit``
and ``test.robot``. You can run tests selective, e.g.::
$ make test.pep8 test.unit test.sh
. ./local/py3/bin/activate; ./manage.sh pep8_check
[!] Running pep8 check
. ./local/py3/bin/activate; ./manage.sh unit_tests
[!] Running unit tests
TEST test.pep8 OK
...
TEST test.unit OK
...
TEST test.sh OK
.. _make pylint:
.. _make test.sh:
``make pylint``
===============
``make test.sh``
================
:ref:`sh lint` / if you have changed some bash scripting run this test before
commit.
.. _make test.pylint:
``make test.pylint``
====================
.. _Pylint: https://www.pylint.org/
Before commiting its recommend to do some (more) linting. Pylint_ is known as
one of the best source-code, bug and quality checker for the Python programming
language. Pylint_ is not yet a quality gate within our searx project (like
:ref:`test.pep8 <make test>` it is), but Pylint_ can help to improve code
quality anyway. The pylint profile we use at searx project is found in
project's root folder :origin:`.pylintrc`.
Pylint_ is known as one of the best source-code, bug and quality checker for the
Python programming language. The pylint profile we use at searx project is
found in project's root folder :origin:`.pylintrc`.
Code quality is a ongoing process. Don't try to fix all messages from Pylint,
run Pylint and check if your changed lines are bringing up new messages. If so,
fix it. By this, code quality gets incremental better and if there comes the
day, the linting is balanced out, we might decide to add Pylint as a quality
gate.
.. _make search.checker:
``search.checker.{engine name}``
================================
To check all engines::
make search.checker
To check a engine with whitespace in the name like *google news* replace space
by underline::
make search.checker.google_news
To see HTTP requests and more use SEARX_DEBUG::
make SEARX_DEBUG=1 search.checker.google_news
.. _3xx: https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#3xx_redirection
To filter out HTTP redirects (3xx_)::
make SEARX_DEBUG=1 search.checker.google_news | grep -A1 "HTTP/1.1\" 3[0-9][0-9]"
...
Engine google news Checking
https://news.google.com:443 "GET /search?q=life&hl=en&lr=lang_en&ie=utf8&oe=utf8&ceid=US%3Aen&gl=US HTTP/1.1" 302 0
https://news.google.com:443 "GET /search?q=life&hl=en-US&lr=lang_en&ie=utf8&oe=utf8&ceid=US:en&gl=US HTTP/1.1" 200 None
--
https://news.google.com:443 "GET /search?q=computer&hl=en&lr=lang_en&ie=utf8&oe=utf8&ceid=US%3Aen&gl=US HTTP/1.1" 302 0
https://news.google.com:443 "GET /search?q=computer&hl=en-US&lr=lang_en&ie=utf8&oe=utf8&ceid=US:en&gl=US HTTP/1.1" 200 None
--
``make pybuild``
@ -200,9 +204,7 @@ gate.
.. _PyPi: https://pypi.org/
.. _twine: https://twine.readthedocs.io/en/latest/
Build Python packages in ``./dist/py``.
.. code:: sh
Build Python packages in ``./dist/py``::
$ make pybuild
...
@ -210,9 +212,11 @@ Build Python packages in ``./dist/py``.
running sdist
running egg_info
...
$ ls ./dist/py/
searx-0.15.0-py3-none-any.whl searx-0.15.0.tar.gz
running bdist_wheel
To upload packages to PyPi_, there is also a ``upload-pypi`` target. It needs
twine_ to be installed. Since you are not the owner of :pypi:`searx` you will
never need the latter.
$ ls ./dist
searx-0.18.0-py3-none-any.whl searx-0.18.0.tar.gz
To upload packages to PyPi_, there is also a ``pypi.upload`` target (to test use
``pypi.upload.test``). Since you are not the owner of :pypi:`searx` you will
never need to upload.

View File

@ -15,8 +15,8 @@ generated and deployed at :docs:`github.io <.>`. For build prerequisites read
:ref:`docs build`.
The source files of Searx's documentation are located at :origin:`docs`. Sphinx
assumes source files to be encoded in UTF-8 by defaul. Run :ref:`make docs-live
<make docs-live>` to build HTML while editing.
assumes source files to be encoded in UTF-8 by defaul. Run :ref:`make docs.live
<make docs.live>` to build HTML while editing.
.. sidebar:: Further reading
@ -1276,13 +1276,12 @@ Templating
.. sidebar:: Build environment
All *generic-doc* tasks are running in the :ref:`build environment <make
pyenv>`.
All *generic-doc* tasks are running in the :ref:`make install`.
Templating is suitable for documentation which is created generic at the build
time. The sphinx-jinja_ extension evaluates jinja_ templates in the :ref:`build
environment <make pyenv>` (with searx modules installed). We use this e.g. to
build chapter: :ref:`engines generic`. Below the jinja directive from the
time. The sphinx-jinja_ extension evaluates jinja_ templates in the :ref:`make
install` (with searx modules installed). We use this e.g. to build chapter:
:ref:`engines generic`. Below the jinja directive from the
:origin:`docs/admin/engines.rst` is shown:
.. literalinclude:: ../admin/engines.rst

View File

@ -1,21 +0,0 @@
# -*- coding: utf-8; mode: python -*-
"""Configuration for the Searx user handbook
"""
project = 'Searx User-HB'
version = release = VERSION_STRING
intersphinx_mapping['searx'] = (brand.DOCS_URL, None)
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index' # startdocname
, 'searx-user-hb.tex' # targetname
, '' # take title from .rst
, author # author
, 'howto' # documentclass
, False # toctree_only
),
]

View File

@ -119,15 +119,15 @@ of coffee).::
To build (live) documentation inside a archlinux_ container::
sudo -H ./utils/lxc.sh cmd searx-archlinux make docs-clean docs-live
sudo -H ./utils/lxc.sh cmd searx-archlinux make docs.clean docs.live
...
[I 200331 15:00:42 server:296] Serving on http://0.0.0.0:8080
To get IP of the container and the port number *live docs* is listening::
$ sudo ./utils/lxc.sh show suite | grep docs-live
$ sudo ./utils/lxc.sh show suite | grep docs.live
...
[searx-archlinux] INFO: (eth0) docs-live: http://n.n.n.12:8080/
[searx-archlinux] INFO: (eth0) docs.live: http://n.n.n.12:8080/
.. _lxc.sh help:

506
manage Executable file
View File

@ -0,0 +1,506 @@
#!/usr/bin/env bash
# -*- coding: utf-8; mode: sh indent-tabs-mode: nil -*-
# SPDX-License-Identifier: AGPL-3.0-or-later
# shellcheck disable=SC2031
# shellcheck source=utils/lib.sh
source "$(dirname "${BASH_SOURCE[0]}")/utils/lib.sh"
# shellcheck source=utils/brand.env
source "${REPO_ROOT}/utils/brand.env"
source_dot_config
# config
PYOBJECTS="searx"
PY_SETUP_EXTRAS='[test]'
NPM_PACKAGES="less@2.7 less-plugin-clean-css grunt-cli"
GECKODRIVER_VERSION="v0.28.0"
# SPHINXOPTS=
# These py files are linted by test.pylint(), all other files are linted by
# test.pep8()
PYLINT_FILES=(
searx/preferences.py
searx/testing.py
searx/engines/gigablast.py
searx/engines/deviantart.py
searx/engines/digg.py
searx/engines/google.py
searx/engines/google_news.py
searx/engines/google_videos.py
searx/engines/google_images.py
searx/engines/mediathekviewweb.py
searx/engines/meilisearch.py
searx/engines/solidtorrents.py
searx/engines/solr.py
searx/engines/springer.py
searx/engines/google_scholar.py
searx/engines/yahoo_news.py
searx/engines/apkmirror.py
searx/engines/artic.py
searx_extra/update/update_external_bangs.py
)
PYLINT_SEARX_DISABLE_OPTION="\
I,C,R,\
W0105,W0212,W0511,W0603,W0613,W0621,W0702,W0703,W1401,\
E1136"
PYLINT_ADDITIONAL_BUILTINS_FOR_ENGINES="supported_languages,language_aliases"
PYLINT_OPTIONS="-m pylint -j 0 --rcfile .pylintrc"
help() {
cat <<EOF
buildenv
rebuild ./utils/brand.env
babel.compile
pybabel compile ./searx/translations
data.*
all : update searx/languages.py and ./data/*
languages : update searx/data/engines_languages.json & searx/languages.py
useragents: update searx/data/useragents.json with the most recent versions of Firefox.
docs.*
html : build HTML documentation
live : autobuild HTML documentation while editing
gh-pages : deploy on gh-pages branch
prebuild : build reST include files (./${DOCS_BUILD}/includes)
clean : clean documentation build
docker
build : build docker image
push : build and push docker image
gecko.driver
download & install geckodriver if not already installed (required for
robot_tests)
node.*
env : download & install npm dependencies locally
clean : drop npm installations
py.*
build : Build python packages at ./${PYDIST}
clean : delete virtualenv and intermediate py files
pyenv.* :
install : developer install of searx into virtualenv
uninstall : uninstall developer installation
cmd ... : run command ... in virtualenv
OK : test if virtualenv is OK
pypi.upload:
Upload python packages to PyPi (to test use pypi.upload.test)
test.* :
pylint : lint PYLINT_FILES, searx/engines, searx & tests
pep8 : pycodestyle (pep8) for all files except PYLINT_FILES
unit : run unit tests
coverage : run unit tests with coverage
robot : run robot test
clean : clean intermediate test stuff
themes.* :
all : build all themes
oscar : build oscar theme
simple : build simple theme
EOF
}
if [ "$VERBOSE" = "1" ]; then
SPHINX_VERBOSE="-v"
PYLINT_VERBOSE="-v"
fi
# needed by sphinx-docs
export DOCS_BUILD
buildenv() {
SEARX_DEBUG=1 pyenv.cmd python utils/build_env.py 2>&1 \
| prefix_stdout "${_Blue}BUILDENV${_creset} "
return "${PIPESTATUS[0]}"
}
babel.compile() {
build_msg BABEL compile
pyenv.cmd pybabel compile -d "${REPO_ROOT}/searx/translations"
dump_return $?
}
data.all() {
data.languages
data.useragents
build_msg DATA "update searx/data/ahmia_blacklist.txt"
pyenv.cmd python searx_extra/update/update_ahmia_blacklist.py
build_msg DATA "update searx/data/wikidata_units.json"
pyenv.cmd python searx_extra/update/update_wikidata_units.py
build_msg DATA "update searx/data/currencies.json"
pyenv.cmd python searx_extra/update/update_currencies.py
}
data.languages() {
( set -e
build_msg ENGINES "fetch languages .."
pyenv.cmd python searx_extra/update/update_languages.py
build_msg ENGINES "update update searx/languages.py"
build_msg DATA "update searx/data/engines_languages.json"
)
dump_return $?
}
data.useragents() {
build_msg DATA "update searx/data/useragents.json"
pyenv.cmd python searx_extra/update/update_firefox_version.py
dump_return $?
}
docs.prebuild() {
build_msg DOCS "build ${DOCS_BUILD}/includes"
(
set -e
[ "$VERBOSE" = "1" ] && set -x
mkdir -p "${DOCS_BUILD}/includes"
./utils/searx.sh doc | cat > "${DOCS_BUILD}/includes/searx.rst"
./utils/filtron.sh doc | cat > "${DOCS_BUILD}/includes/filtron.rst"
./utils/morty.sh doc | cat > "${DOCS_BUILD}/includes/morty.rst"
)
dump_return $?
}
docker.push() {
docker.build push
}
# shellcheck disable=SC2119
docker.build() {
pyenv.install
build_msg DOCKER build
# run installation in a subprocess and activate pyenv
# See https://www.shellcheck.net/wiki/SC1001 and others ..
# shellcheck disable=SC2031,SC2230,SC2002,SC2236,SC2143,SC1001
( set -e
# shellcheck source=/dev/null
source "${PY_ENV_BIN}/activate"
# Check if it is a git repository
if [ ! -d .git ]; then
die 1 "This is not Git repository"
fi
if [ ! -x "$(which git)" ]; then
die 1 "git is not installed"
fi
if ! git remote get-url origin 2> /dev/null; then
die 1 "there is no remote origin"
fi
# This is a git repository
# "git describe" to get the Docker version (for example : v0.15.0-89-g0585788e)
# awk to remove the "v" and the "g"
SEARX_GIT_VERSION=$(git describe --match "v[0-9]*\.[0-9]*\.[0-9]*" HEAD 2>/dev/null | awk -F'-' '{OFS="-"; $1=substr($1, 2); if ($3) { $3=substr($3, 2); } print}')
# add the suffix "-dirty" if the repository has uncommited change
# /!\ HACK for searx/searx: ignore utils/brand.env
git update-index -q --refresh
if [ ! -z "$(git diff-index --name-only HEAD -- | grep -v 'utils/brand.env')" ]; then
SEARX_GIT_VERSION="${SEARX_GIT_VERSION}-dirty"
fi
# Get the last git commit id, will be added to the Searx version (see Dockerfile)
VERSION_GITCOMMIT=$(echo "$SEARX_GIT_VERSION" | cut -d- -f2-4)
build_msg DOCKER "Last commit : $VERSION_GITCOMMIT"
# Check consistency between the git tag and the searx/version.py file
# /! HACK : parse Python file with bash /!
# otherwise it is not possible build the docker image without all Python
# dependencies ( version.py loads __init__.py )
# SEARX_PYTHON_VERSION=$(python3 -c "import six; import searx.version; six.print_(searx.version.VERSION_STRING)")
SEARX_PYTHON_VERSION=$(cat searx/version.py | grep "\(VERSION_MAJOR\|VERSION_MINOR\|VERSION_BUILD\) =" | cut -d\= -f2 | sed -e 's/^[[:space:]]*//' | paste -sd "." -)
if [ "$(echo "$SEARX_GIT_VERSION" | cut -d- -f1)" != "$SEARX_PYTHON_VERSION" ]; then
err_msg "git tag: $SEARX_GIT_VERSION"
err_msg "searx/version.py: $SEARX_PYTHON_VERSION"
die 1 "Inconsistency between the last git tag and the searx/version.py file"
fi
# define the docker image name
GITHUB_USER=$(echo "${GIT_URL}" | sed 's/.*github\.com\/\([^\/]*\).*/\1/')
SEARX_IMAGE_NAME="${SEARX_IMAGE_NAME:-${GITHUB_USER:-searxng}/searxng}"
# build Docker image
build_msg DOCKER "Building image ${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION}"
sudo docker build \
--build-arg GIT_URL="${GIT_URL}" \
--build-arg SEARX_GIT_VERSION="${SEARX_GIT_VERSION}" \
--build-arg VERSION_GITCOMMIT="${VERSION_GITCOMMIT}" \
--build-arg LABEL_DATE="$(date -u +"%Y-%m-%dT%H:%M:%SZ")" \
--build-arg LABEL_VCS_REF="$(git rev-parse HEAD)" \
--build-arg LABEL_VCS_URL="${GIT_URL}" \
--build-arg TIMESTAMP_SETTINGS="$(git log -1 --format="%cd" --date=unix -- searx/settings.yml)" \
--build-arg TIMESTAMP_UWSGI="$(git log -1 --format="%cd" --date=unix -- dockerfiles/uwsgi.ini)" \
-t "${SEARX_IMAGE_NAME}:latest" -t "${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION}" .
if [ "$1" = "push" ]; then
sudo docker push "${SEARX_IMAGE_NAME}:latest"
sudo docker push "${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION}"
fi
)
dump_return $?
}
# shellcheck disable=SC2119
gecko.driver() {
pyenv.install
build_msg INSTALL "gecko.driver"
# run installation in a subprocess and activate pyenv
( set -e
# shellcheck source=/dev/null
source "${PY_ENV_BIN}/activate"
# TODO : check the current geckodriver version
geckodriver -V > /dev/null 2>&1 || NOTFOUND=1
set +e
if [ -z "$NOTFOUND" ]; then
build_msg INSTALL "geckodriver already installed"
return
fi
PLATFORM="$(python3 -c 'import platform; print(platform.system().lower(), platform.architecture()[0])')"
case "$PLATFORM" in
"linux 32bit" | "linux2 32bit") ARCH="linux32";;
"linux 64bit" | "linux2 64bit") ARCH="linux64";;
"windows 32 bit") ARCH="win32";;
"windows 64 bit") ARCH="win64";;
"mac 64bit") ARCH="macos";;
esac
GECKODRIVER_URL="https://github.com/mozilla/geckodriver/releases/download/$GECKODRIVER_VERSION/geckodriver-$GECKODRIVER_VERSION-$ARCH.tar.gz";
build_msg GECKO "Installing ${PY_ENV_BIN}/geckodriver from $GECKODRIVER_URL"
FILE="$(mktemp)"
wget -qO "$FILE" -- "$GECKODRIVER_URL" && tar xz -C "${PY_ENV_BIN}" -f "$FILE" geckodriver
rm -- "$FILE"
chmod 755 -- "${PY_ENV_BIN}/geckodriver"
)
dump_return $?
}
node.env() {
local err=0
pushd "${REPO_ROOT}" &> /dev/null
# shellcheck disable=SC2230
which npm &> /dev/null || die 1 'node.env - npm is not found!'
( set -e
# shellcheck disable=SC2030
PATH="$(npm bin):$PATH"
export PATH
build_msg INSTALL "npm install $NPM_PACKAGES"
# shellcheck disable=SC2086
npm install $NPM_PACKAGES
cd "${REPO_ROOT}/searx/static/themes/oscar"
build_msg INSTALL "($(pwd)) npm install"
npm install
build_msg INSTALL "($(pwd)) npm install"
cd "${REPO_ROOT}/searx/static/themes/simple"
npm install
)
err=$?
popd &> /dev/null
dump_return "$err"
}
node.clean() {
build_msg CLEAN "locally installed npm dependencies"
rm -rf \
./node_modules \
./package-lock.json \
./searx/static/themes/oscar/package-lock.json \
./searx/static/themes/oscar/node_modules \
./searx/static/themes/simple/package-lock.json \
./searx/static/themes/simple/node_modules
dump_return $?
}
py.build() {
build_msg BUILD "[pylint] python package ${PYDIST}"
pyenv.cmd python setup.py \
sdist -d "${PYDIST}" \
bdist_wheel --bdist-dir "${PYBUILD}" -d "${PYDIST}"
}
py.clean() {
build_msg CLEAN pyenv
( set -e
pyenv.drop
[ "$VERBOSE" = "1" ] && set -x
rm -rf "${PYDIST}" "${PYBUILD}" "${PY_ENV}" ./.tox ./*.egg-info
find . -name '*.pyc' -exec rm -f {} +
find . -name '*.pyo' -exec rm -f {} +
find . -name __pycache__ -exec rm -rf {} +
)
}
pyenv.check() {
cat <<EOF
import yaml
print('import yaml --> OK')
EOF
}
pyenv.install() {
if ! pyenv.OK; then
py.clean > /dev/null
fi
if pyenv.install.OK > /dev/null; then
return 0
fi
pyenv
pyenv.OK || die 42 "error while build pyenv (${PY_ENV_BIN})"
( set -e
build_msg PYENV "[install] pip install -e 'searx${PY_SETUP_EXTRAS}'"
"${PY_ENV_BIN}/python" -m pip install -e ".${PY_SETUP_EXTRAS}"
buildenv
) || die 42 "error while pip install (${PY_ENV_BIN})"
}
pyenv.uninstall() {
build_msg PYENV "[pyenv.uninstall] uninstall packages: ${PYOBJECTS}"
pyenv.cmd python setup.py develop --uninstall 2>&1 \
| prefix_stdout "${_Blue}PYENV ${_creset}[pyenv.uninstall] "
}
pypi.upload() {
py.clean
py.build
# https://github.com/pypa/twine
pyenv.cmd twine upload "${PYDIST}"/*
}
pypi.upload.test() {
py.clean
py.build
pyenv.cmd twine upload -r testpypi "${PYDIST}"/*
}
test.pylint() {
# shellcheck disable=SC2086
( set -e
build_msg TEST "[pylint] \$PYLINT_FILES"
pyenv.cmd python ${PYLINT_OPTIONS} ${PYLINT_VERBOSE} \
"${PYLINT_FILES[@]}"
build_msg TEST "[pylint] searx/engines"
pyenv.cmd python ${PYLINT_OPTIONS} ${PYLINT_VERBOSE} \
--disable="${PYLINT_SEARX_DISABLE_OPTION}" \
--additional-builtins="${PYLINT_ADDITIONAL_BUILTINS_FOR_ENGINES}" \
searx/engines
build_msg TEST "[pylint] searx tests"
pyenv.cmd python ${PYLINT_OPTIONS} ${PYLINT_VERBOSE} \
--disable="${PYLINT_SEARX_DISABLE_OPTION}" \
--ignore=searx/engines \
searx tests
)
dump_return $?
}
test.pep8() {
build_msg TEST 'pycodestyle (formerly pep8)'
local _exclude=""
printf -v _exclude '%s, ' "${PYLINT_FILES[@]}"
pyenv.cmd pycodestyle \
--exclude="searx/static, searx/languages.py, $_exclude " \
--max-line-length=120 \
--ignore "E117,E252,E402,E722,E741,W503,W504,W605" \
searx tests
dump_return $?
}
test.unit() {
build_msg TEST 'tests/unit'
pyenv.cmd python -m nose2 -s tests/unit
dump_return $?
}
test.coverage() {
build_msg TEST 'unit test coverage'
( set -e
pyenv.cmd python -m nose2 -C --log-capture --with-coverage --coverage searx -s tests/unit
pyenv.cmd coverage report
pyenv.cmd coverage html
)
dump_return $?
}
test.robot() {
build_msg TEST 'robot'
gecko.driver
PYTHONPATH=. pyenv.cmd python searx/testing.py robot
dump_return $?
}
test.clean() {
build_msg CLEAN "test stuff"
rm -rf geckodriver.log .coverage coverage/
dump_return $?
}
themes.all() {
( set -e
node.env
themes.oscar
themes.simple
)
dump_return $?
}
themes.oscar() {
local gruntfile=searx/static/themes/oscar/gruntfile.js
build_msg GRUNT "${gruntfile}"
PATH="$(npm bin):$PATH" grunt --gruntfile "${gruntfile}"
dump_return $?
}
themes.simple() {
local gruntfile=searx/static/themes/simple/gruntfile.js
build_msg GRUNT "${gruntfile}"
PATH="$(npm bin):$PATH" grunt --gruntfile "${gruntfile}"
dump_return $?
}
# shellcheck disable=SC2119
main() {
local _type
local cmd="$1"; shift
if [ "$cmd" == "" ]; then
help
err_msg "missing command"
return 42
fi
case "$cmd" in
--getenv) var="$1"; echo "${!var}";;
--help) help;;
--*)
help
err_msg "unknown option $cmd"
return 42
;;
*)
_type="$(type -t "$cmd")"
if [ "$_type" != 'function' ]; then
err_msg "unknown command $1 / use --help"
return 42
else
"$cmd" "$@"
fi
;;
esac
}
main "$@"

205
manage.sh
View File

@ -1,205 +0,0 @@
#!/bin/sh
export LANG=C
BASE_DIR="$(dirname -- "`readlink -f -- "$0"`")"
cd -- "$BASE_DIR"
set -e
# subshell
PYTHONPATH="$BASE_DIR"
SEARX_DIR="$BASE_DIR/searx"
ACTION="$1"
. "${BASE_DIR}/utils/brand.env"
#
# Python
#
update_packages() {
pip install --upgrade pip
pip install --upgrade setuptools
pip install -Ur "$BASE_DIR/requirements.txt"
}
update_dev_packages() {
update_packages
pip install -Ur "$BASE_DIR/requirements-dev.txt"
}
install_geckodriver() {
echo '[!] Checking geckodriver'
# TODO : check the current geckodriver version
set -e
geckodriver -V > /dev/null 2>&1 || NOTFOUND=1
set +e
if [ -z "$NOTFOUND" ]; then
return
fi
GECKODRIVER_VERSION="v0.28.0"
PLATFORM="`python3 -c "import platform; print(platform.system().lower(), platform.architecture()[0])"`"
case "$PLATFORM" in
"linux 32bit" | "linux2 32bit") ARCH="linux32";;
"linux 64bit" | "linux2 64bit") ARCH="linux64";;
"windows 32 bit") ARCH="win32";;
"windows 64 bit") ARCH="win64";;
"mac 64bit") ARCH="macos";;
esac
GECKODRIVER_URL="https://github.com/mozilla/geckodriver/releases/download/$GECKODRIVER_VERSION/geckodriver-$GECKODRIVER_VERSION-$ARCH.tar.gz";
if [ -z "$1" ]; then
if [ -z "$VIRTUAL_ENV" ]; then
printf "geckodriver can't be installed because VIRTUAL_ENV is not set, you should download it from\n %s" "$GECKODRIVER_URL"
exit
else
GECKODRIVER_DIR="$VIRTUAL_ENV/bin"
fi
else
GECKODRIVER_DIR="$1"
mkdir -p -- "$GECKODRIVER_DIR"
fi
printf "Installing %s/geckodriver from\n %s" "$GECKODRIVER_DIR" "$GECKODRIVER_URL"
FILE="`mktemp`"
wget -qO "$FILE" -- "$GECKODRIVER_URL" && tar xz -C "$GECKODRIVER_DIR" -f "$FILE" geckodriver
rm -- "$FILE"
chmod 777 -- "$GECKODRIVER_DIR/geckodriver"
}
locales() {
pybabel compile -d "$SEARX_DIR/translations"
}
#
# Web
#
npm_path_setup() {
which npm || (printf 'Error: npm is not found\n'; exit 1)
export PATH="$(npm bin)":$PATH
}
npm_packages() {
npm_path_setup
echo '[!] install NPM packages'
cd -- "$BASE_DIR"
npm install less@2.7 less-plugin-clean-css grunt-cli
echo '[!] install NPM packages for oscar theme'
cd -- "$BASE_DIR/searx/static/themes/oscar"
npm install
echo '[!] install NPM packages for simple theme'
cd -- "$BASE_DIR/searx/static/themes/simple"
npm install
}
docker_build() {
# Check if it is a git repository
if [ ! -d .git ]; then
echo "This is not Git repository"
exit 1
fi
if [ ! -x "$(which git)" ]; then
echo "git is not installed"
exit 1
fi
if [ ! git remote get-url origin 2> /dev/null ]; then
echo "there is no remote origin"
exit 1
fi
# This is a git repository
# "git describe" to get the Docker version (for example : v0.15.0-89-g0585788e)
# awk to remove the "v" and the "g"
SEARX_GIT_VERSION=$(git describe --match "v[0-9]*\.[0-9]*\.[0-9]*" HEAD 2>/dev/null | awk -F'-' '{OFS="-"; $1=substr($1, 2); if ($3) { $3=substr($3, 2); } print}')
# add the suffix "-dirty" if the repository has uncommited change
# /!\ HACK for searx/searx: ignore utils/brand.env
git update-index -q --refresh
if [ ! -z "$(git diff-index --name-only HEAD -- | grep -v 'utils/brand.env')" ]; then
SEARX_GIT_VERSION="${SEARX_GIT_VERSION}-dirty"
fi
# Get the last git commit id, will be added to the Searx version (see Dockerfile)
VERSION_GITCOMMIT=$(echo $SEARX_GIT_VERSION | cut -d- -f2-4)
echo "Last commit : $VERSION_GITCOMMIT"
# Check consistency between the git tag and the searx/version.py file
# /!\ HACK : parse Python file with bash /!\
# otherwise it is not possible build the docker image without all Python dependencies ( version.py loads __init__.py )
# SEARX_PYTHON_VERSION=$(python3 -c "import six; import searx.version; six.print_(searx.version.VERSION_STRING)")
SEARX_PYTHON_VERSION=$(cat searx/version.py | grep "\(VERSION_MAJOR\|VERSION_MINOR\|VERSION_BUILD\) =" | cut -d\= -f2 | sed -e 's/^[[:space:]]*//' | paste -sd "." -)
if [ $(echo "$SEARX_GIT_VERSION" | cut -d- -f1) != "$SEARX_PYTHON_VERSION" ]; then
echo "Inconsistency between the last git tag and the searx/version.py file"
echo "git tag: $SEARX_GIT_VERSION"
echo "searx/version.py: $SEARX_PYTHON_VERSION"
exit 1
fi
# define the docker image name
GITHUB_USER=$(echo "${GIT_URL}" | sed 's/.*github\.com\/\([^\/]*\).*/\1/')
SEARX_IMAGE_NAME="${SEARX_IMAGE_NAME:-${GITHUB_USER:-searxng}/searxng}"
# build Docker image
echo "Building image ${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION}"
sudo docker build \
--build-arg GIT_URL="${GIT_URL}" \
--build-arg SEARX_GIT_VERSION="${SEARX_GIT_VERSION}" \
--build-arg VERSION_GITCOMMIT="${VERSION_GITCOMMIT}" \
--build-arg LABEL_DATE=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
--build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \
--build-arg LABEL_VCS_URL="${GIT_URL}" \
--build-arg TIMESTAMP_SETTINGS=$(git log -1 --format="%cd" --date=unix -- searx/settings.yml) \
--build-arg TIMESTAMP_UWSGI=$(git log -1 --format="%cd" --date=unix -- dockerfiles/uwsgi.ini) \
-t ${SEARX_IMAGE_NAME}:latest -t ${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION} .
if [ "$1" = "push" ]; then
sudo docker push ${SEARX_IMAGE_NAME}:latest
sudo docker push ${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION}
fi
}
#
# Help
#
help() {
[ -z "$1" ] || printf 'Error: %s\n' "$1"
echo "Searx manage.sh help
Commands
========
help - This text
Build requirements
------------------
update_packages - Check & update production dependency changes
update_dev_packages - Check & update development and production dependency changes
install_geckodriver - Download & install geckodriver if not already installed (required for robot_tests)
npm_packages - Download & install npm dependencies
Build
-----
locales - Compile locales
Environment:
GIT_URL: ${GIT_URL}
ISSUE_URL: ${ISSUE_URL}
SEARX_URL: ${SEARX_URL}
DOCS_URL: ${DOCS_URL}
PUBLIC_INSTANCES: ${PUBLIC_INSTANCES}
"
}
[ "$(command -V "$ACTION" | grep ' function$')" = "" ] \
&& help "action not found" \
|| "$ACTION" "$2"

View File

@ -86,7 +86,7 @@ set_terminal_colors() {
_Red='\e[0;31m'
_Green='\e[0;32m'
_Yellow='\e[0;33m'
_Blue='\e[0;34m'
_Blue='\e[0;94m'
_Violet='\e[0;35m'
_Cyan='\e[0;36m'
@ -95,12 +95,12 @@ set_terminal_colors() {
_BRed='\e[1;31m'
_BGreen='\e[1;32m'
_BYellow='\e[1;33m'
_BBlue='\e[1;34m'
_BBlue='\e[1;94m'
_BPurple='\e[1;35m'
_BCyan='\e[1;36m'
}
if [ ! -p /dev/stdout ]; then
if [ ! -p /dev/stdout ] && [ ! "$TERM" = 'dumb' ] && [ ! "$TERM" = 'unknown' ]; then
set_terminal_colors
fi
@ -152,6 +152,22 @@ err_msg() { echo -e "${_BRed}ERROR:${_creset} $*" >&2; }
warn_msg() { echo -e "${_BBlue}WARN:${_creset} $*" >&2; }
info_msg() { echo -e "${_BYellow}INFO:${_creset} $*" >&2; }
build_msg() {
local tag="$1 "
shift
echo -e "${_Blue}${tag:0:10}${_creset}$*"
}
dump_return() {
# Use this as last command in your function to prompt an ERROR message if
# the exit code is not zero.
local err=$1
[ "$err" -ne "0" ] && err_msg "${FUNCNAME[1]} exit with error ($err)"
return "$err"
}
clean_stdin() {
if [[ $(uname -s) != 'Darwin' ]]; then
while read -r -n1 -t 0.1; do : ; done
@ -496,6 +512,295 @@ service_is_available() {
return "$exit_val"
}
# python
# ------
PY="${PY:=3}"
PYTHON="${PYTHON:=python$PY}"
PY_ENV="${PY_ENV:=local/py${PY}}"
PY_ENV_BIN="${PY_ENV}/bin"
PY_ENV_REQ="${PY_ENV_REQ:=${REPO_ROOT}/requirements*.txt}"
# List of python packages (folders) or modules (files) installed by command:
# pyenv.install
PYOBJECTS="${PYOBJECTS:=.}"
# folder where the python distribution takes place
PYDIST="${PYDIST:=dist}"
# folder where the intermediate build files take place
PYBUILD="${PYBUILD:=build/py${PY}}"
# https://www.python.org/dev/peps/pep-0508/#extras
#PY_SETUP_EXTRAS='[develop,test]'
PY_SETUP_EXTRAS="${PY_SETUP_EXTRAS:=[develop,test]}"
PIP_BOILERPLATE=( pip wheel setuptools )
# shellcheck disable=SC2120
pyenv() {
# usage: pyenv [vtenv_opts ...]
#
# vtenv_opts: see 'pip install --help'
#
# Builds virtualenv with 'requirements*.txt' (PY_ENV_REQ) installed. The
# virtualenv will be reused by validating sha256sum of the requirement
# files.
required_commands \
sha256sum "${PYTHON}" \
|| exit
local pip_req=()
if ! pyenv.OK > /dev/null; then
rm -f "${PY_ENV}/${PY_ENV_REQ}.sha256"
pyenv.drop > /dev/null
build_msg PYENV "[virtualenv] installing ${PY_ENV_REQ} into ${PY_ENV}"
"${PYTHON}" -m venv "$@" "${PY_ENV}"
"${PY_ENV_BIN}/python" -m pip install -U "${PIP_BOILERPLATE[@]}"
for i in ${PY_ENV_REQ}; do
pip_req=( "${pip_req[@]}" "-r" "$i" )
done
(
[ "$VERBOSE" = "1" ] && set -x
# shellcheck disable=SC2086
"${PY_ENV_BIN}/python" -m pip install "${pip_req[@]}" \
&& sha256sum ${PY_ENV_REQ} > "${PY_ENV}/requirements.sha256"
)
fi
pyenv.OK
}
_pyenv_OK=''
pyenv.OK() {
# probes if pyenv exists and runs the script from pyenv.check
[ "$_pyenv_OK" == "OK" ] && return 0
if [ ! -f "${PY_ENV_BIN}/python" ]; then
build_msg PYENV "[virtualenv] missing ${PY_ENV_BIN}/python"
return 1
fi
if [ ! -f "${PY_ENV}/requirements.sha256" ] \
|| ! sha256sum --check --status <"${PY_ENV}/requirements.sha256" 2>/dev/null; then
build_msg PYENV "[virtualenv] requirements.sha256 failed"
sed 's/^/ [virtualenv] - /' <"${PY_ENV}/requirements.sha256"
return 1
fi
pyenv.check \
| "${PY_ENV_BIN}/python" 2>&1 \
| prefix_stdout "${_Blue}PYENV ${_creset}[check] "
local err=${PIPESTATUS[1]}
if [ "$err" -ne "0" ]; then
build_msg PYENV "[check] python test failed"
return "$err"
fi
build_msg PYENV "OK"
_pyenv_OK="OK"
return 0
}
pyenv.drop() {
build_msg PYENV "[virtualenv] drop ${PY_ENV}"
rm -rf "${PY_ENV}"
_pyenv_OK=''
}
pyenv.check() {
# Prompts a python script with additional checks. Used by pyenv.OK to check
# if virtualenv is ready to install python objects. This function should be
# overwritten by the application script.
local imp=""
for i in "${PIP_BOILERPLATE[@]}"; do
imp="$imp, $i"
done
cat <<EOF
import ${imp#,*}
EOF
}
pyenv.install() {
if ! pyenv.OK; then
py.clean > /dev/null
fi
if ! pyenv.install.OK > /dev/null; then
build_msg PYENV "[install] ${PYOBJECTS}"
if ! pyenv.OK >/dev/null; then
pyenv
fi
for i in ${PYOBJECTS}; do
build_msg PYENV "[install] pip install -e '$i${PY_SETUP_EXTRAS}'"
"${PY_ENV_BIN}/python" -m pip install -e "$i${PY_SETUP_EXTRAS}"
done
fi
pyenv.install.OK
}
_pyenv_install_OK=''
pyenv.install.OK() {
[ "$_pyenv_install_OK" == "OK" ] && return 0
local imp=""
local err=""
if [ "." = "${PYOBJECTS}" ]; then
imp="import $(basename "$(pwd)")"
else
# shellcheck disable=SC2086
for i in ${PYOBJECTS}; do imp="$imp, $i"; done
imp="import ${imp#,*} "
fi
(
[ "$VERBOSE" = "1" ] && set -x
"${PY_ENV_BIN}/python" -c "import sys; sys.path.pop(0); $imp;" 2>/dev/null
)
err=$?
if [ "$err" -ne "0" ]; then
build_msg PYENV "[install] python installation test failed"
return "$err"
fi
build_msg PYENV "[install] OK"
_pyenv_install_OK="OK"
return 0
}
pyenv.uninstall() {
build_msg PYENV "[uninstall] ${PYOBJECTS}"
if [ "." = "${PYOBJECTS}" ]; then
pyenv.cmd python setup.py develop --uninstall 2>&1 \
| prefix_stdout "${_Blue}PYENV ${_creset}[pyenv.uninstall] "
else
pyenv.cmd python -m pip uninstall --yes ${PYOBJECTS} 2>&1 \
| prefix_stdout "${_Blue}PYENV ${_creset}[pyenv.uninstall] "
fi
}
pyenv.cmd() {
pyenv.install
( set -e
# shellcheck source=/dev/null
source "${PY_ENV_BIN}/activate"
[ "$VERBOSE" = "1" ] && set -x
"$@"
)
}
# Sphinx doc
# ----------
GH_PAGES="build/gh-pages"
DOCS_DIST="${DOCS_DIST:=dist/docs}"
DOCS_BUILD="${DOCS_BUILD:=build/docs}"
docs.html() {
build_msg SPHINX "HTML ./docs --> file://$(readlink -e "$(pwd)/$DOCS_DIST")"
pyenv.install
docs.prebuild
# shellcheck disable=SC2086
PATH="${PY_ENV_BIN}:${PATH}" pyenv.cmd sphinx-build \
${SPHINX_VERBOSE} ${SPHINXOPTS} \
-b html -c ./docs -d "${DOCS_BUILD}/.doctrees" ./docs "${DOCS_DIST}"
dump_return $?
}
docs.live() {
build_msg SPHINX "autobuild ./docs --> file://$(readlink -e "$(pwd)/$DOCS_DIST")"
pyenv.install
docs.prebuild
# shellcheck disable=SC2086
PATH="${PY_ENV_BIN}:${PATH}" pyenv.cmd sphinx-autobuild \
${SPHINX_VERBOSE} ${SPHINXOPTS} --open-browser --host 0.0.0.0 \
-b html -c ./docs -d "${DOCS_BUILD}/.doctrees" ./docs "${DOCS_DIST}"
dump_return $?
}
docs.clean() {
build_msg CLEAN "docs -- ${DOCS_BUILD} ${DOCS_DIST}"
# shellcheck disable=SC2115
rm -rf "${GH_PAGES}" "${DOCS_BUILD}" "${DOCS_DIST}"
dump_return $?
}
docs.prebuild() {
# Dummy function to run some actions before sphinx-doc build gets started.
# This finction needs to be overwritten by the application script.
true
dump_return $?
}
# shellcheck disable=SC2155
docs.gh-pages() {
# The commit history in the gh-pages branch makes no sense, the history only
# inflates the repository unnecessarily. Therefore a *new orphan* branch
# is created each time we deploy on the gh-pages branch.
docs.clean
docs.prebuild
docs.html
[ "$VERBOSE" = "1" ] && set -x
local head="$(git rev-parse HEAD)"
local branch="$(git name-rev --name-only HEAD)"
local remote="$(git config branch."${branch}".remote)"
local remote_url="$(git config remote."${remote}".url)"
build_msg GH-PAGES "prepare folder: ${GH_PAGES}"
build_msg GH-PAGES "remote of the gh-pages branch: ${remote} / ${remote_url}"
build_msg GH-PAGES "current branch: ${branch}"
# prepare the *orphan* gh-pages working tree
(
git worktree remove -f "${GH_PAGES}"
git branch -D gh-pages
) &> /dev/null || true
git worktree add --no-checkout "${GH_PAGES}" "${remote}/master"
pushd "${GH_PAGES}" &> /dev/null
git checkout --orphan gh-pages
git rm -rfq .
popd &> /dev/null
cp -r "${DOCS_DIST}"/* "${GH_PAGES}"/
touch "${GH_PAGES}/.nojekyll"
cat > "${GH_PAGES}/404.html" <<EOF
<html><head><META http-equiv='refresh' content='0;URL=index.html'></head></html>
EOF
pushd "${GH_PAGES}" &> /dev/null
git add --all .
git commit -q -m "gh-pages build from: ${branch}@${head} (${remote_url})"
git push -f "${remote}" gh-pages
popd &> /dev/null
set +x
build_msg GH-PAGES "deployed"
}
# golang
# ------
@ -1382,6 +1687,12 @@ LXC_ENV_FOLDER=
if in_container; then
# shellcheck disable=SC2034
LXC_ENV_FOLDER="lxc-env/$(hostname)/"
PY_ENV="${LXC_ENV_FOLDER}${PY_ENV}"
PY_ENV_BIN="${LXC_ENV_FOLDER}${PY_ENV_BIN}"
PYDIST="${LXC_ENV_FOLDER}${PYDIST}"
PYBUILD="${LXC_ENV_FOLDER}${PYBUILD}"
DOCS_DIST="${LXC_ENV_FOLDER}${DOCS_DIST}"
DOCS_BUILD="${LXC_ENV_FOLDER}${DOCS_BUILD}"
fi
lxc_init_container_env() {

View File

@ -142,7 +142,7 @@ main() {
local _usage="unknown or missing $1 command $2"
# don't check prerequisite when in recursion
if [[ ! $1 == __* ]]; then
if [[ ! $1 == __* ]] && [[ ! $1 == --help ]]; then
if ! in_container; then
! required_commands lxc && lxd_info && exit 42
fi

View File

@ -1,4 +1,5 @@
# -*- coding: utf-8; mode: makefile-gmake -*-
# SPDX-License-Identifier: AGPL-3.0-or-later
ifeq (,$(wildcard /.lxcenv.mk))
PHONY += lxc-activate lxc-purge
@ -10,60 +11,26 @@ else
include /.lxcenv.mk
endif
PHONY += make-help
ifeq (,$(wildcard /.lxcenv.mk))
make-help:
else
make-help: lxc-help
endif
@echo 'options:'
@echo ' make V=0|1 [targets] 0 => quiet build (default), 1 => verbose build'
@echo ' make V=2 [targets] 2 => give reason for rebuild of target'
quiet_cmd_common_clean = CLEAN $@
cmd_common_clean = \
find . -name '*.orig' -exec rm -f {} + ;\
find . -name '*.rej' -exec rm -f {} + ;\
find . -name '*~' -exec rm -f {} + ;\
find . -name '*.bak' -exec rm -f {} + ;\
FMT = cat
ifeq ($(shell which fmt >/dev/null 2>&1; echo $$?), 0)
FMT = fmt
endif
# MS-Windows
#
# For a minimal *make-environment*, I'am using the gnu-tools from:
#
# - GNU MCU Eclipse Windows Build Tools, which brings 'make', 'rm' etc.
# https://github.com/gnu-mcu-eclipse/windows-build-tools/releases
#
# - git for Windows, which brings 'find', 'grep' etc.
# https://git-scm.com/download/win
# normpath
#
# System-dependent normalization of the path name
#
# usage: $(call normpath,/path/to/file)
normpath = $1
ifeq ($(OS),Windows_NT)
normpath = $(subst /,\,$1)
endif
# stolen from linux/Makefile
#
ifeq ("$(origin V)", "command line")
KBUILD_VERBOSE = $(V)
VERBOSE = $(V)
endif
ifndef KBUILD_VERBOSE
KBUILD_VERBOSE = 0
ifndef VERBOSE
VERBOSE = 0
endif
ifeq ($(KBUILD_VERBOSE),1)
export VERBOSE
ifeq ($(VERBOSE),1)
quiet =
Q =
else
@ -75,14 +42,8 @@ endif
#
# Convenient variables
comma := ,
quote := "
#" this comment is only for emacs highlighting
squote := '
#' this comment is only for emacs highlighting
empty :=
space := $(empty) $(empty)
space_escape := _-_SPACE_-_
# Find any prerequisites that is newer than target or that does not exist.
# PHONY targets skipped in both cases.
@ -107,7 +68,7 @@ any-prereq = $(filter-out $(PHONY),$?) $(filter-out $(PHONY) $(wildcard $^),$^)
# (5) No dir/.target.cmd file (used to store command line)
# (6) No dir/.target.cmd file and target not listed in $(targets)
# This is a good hint that there is a bug in the kbuild file
ifeq ($(KBUILD_VERBOSE),2)
ifeq ($(VERBOSE),2)
why = \
$(if $(filter $@, $(PHONY)),- due to target is PHONY, \
$(if $(wildcard $@), \

View File

@ -1,269 +0,0 @@
# -*- coding: utf-8; mode: makefile-gmake -*-
# list of python packages (folders) or modules (files) of this build
PYOBJECTS ?=
SITE_PYTHON ?=$(dir $(abspath $(lastword $(MAKEFILE_LIST))))site-python
export PYTHONPATH := $(SITE_PYTHON):$$PYTHONPATH
export PY_ENV PYDIST PYBUILD
# folder where the python distribution takes place
PYDIST = ./$(LXC_ENV_FOLDER)dist
# folder where the python intermediate build files take place
PYBUILD = ./$(LXC_ENV_FOLDER)build
# python version to use
PY ?=3
# $(PYTHON) points to the python interpreter from the OS! The python from the
# OS is needed e.g. to create a virtualenv. For tasks inside the virtualenv the
# interpeter from '$(PY_ENV_BIN)/python' is used.
PYTHON ?= python$(PY)
PIP ?= pip$(PY)
PIP_INST ?= --user
# https://www.python.org/dev/peps/pep-0508/#extras
#PY_SETUP_EXTRAS ?= \[develop,test\]
PY_SETUP_EXTRAS ?=
PYDEBUG ?= --pdb
PYLINT_RC ?= .pylintrc
TEST_FOLDER ?= ./tests
TEST ?= .
PY_ENV = ./$(LXC_ENV_FOLDER)local/py$(PY)
PY_ENV_BIN = $(PY_ENV)/bin
PY_ENV_ACT = . $(PY_ENV_BIN)/activate
ifeq ($(OS),Windows_NT)
PYTHON = python
PY_ENV_BIN = $(PY_ENV)/Scripts
PY_ENV_ACT = $(PY_ENV_BIN)/activate
endif
VTENV_OPTS ?=
python-help::
@echo 'makefile.python:'
@echo ' pyenv | pyenv[un]install'
@echo ' build $(PY_ENV) & [un]install python objects'
@echo ' targts using pyenv $(PY_ENV):'
@echo ' pylint - run pylint *linting*'
@echo ' pytest - run *tox* test on python objects'
@echo ' pydebug - run tests within a PDB debug session'
@echo ' pybuild - build python packages ($(PYDIST) $(PYBUILD))'
@echo ' pyclean - clean intermediate python objects'
@echo ' targets using system users environment:'
@echo ' py[un]install - [un]install python objects in editable mode'
@echo ' upload-pypi - upload $(PYDIST)/* files to PyPi'
@echo 'options:'
@echo ' make PY=3.7 [targets] => to eval targets with python 3.7 ($(PY))'
@echo ' make PIP_INST= => to set/unset pip install options ($(PIP_INST))'
@echo ' make TEST=. => choose test from $(TEST_FOLDER) (default "." runs all)'
@echo ' make DEBUG= => target "debug": do not invoke PDB on errors'
@echo ' make PY_SETUP_EXTRAS => also install extras_require from setup.py \[develop,test\]'
@echo ' when using target "pydebug", set breakpoints within py-source by adding::'
@echo ' DEBUG()'
# ------------------------------------------------------------------------------
# OS requirements
# ------------------------------------------------------------------------------
PHONY += msg-python-exe python-exe
msg-python-exe:
@echo "\n $(PYTHON) is required\n\n\
Make sure you have $(PYTHON) installed, grab it from\n\
https://www.python.org or install it from your package\n\
manager. On debian based OS these requirements are\n\
installed by::\n\n\
sudo -H add-apt-repository ppa:deadsnakes/ppa\n\
sudo -H apt update\n\
sudo -H apt-get install $(PYTHON) $(PYTHON)-venv\n"
ifeq ($(shell which $(PYTHON) >/dev/null 2>&1; echo $$?), 1)
python-exe: msg-python-exe
$(error The '$(PYTHON)' command was not found)
else
python-exe:
@:
endif
msg-pip-exe:
@echo "\n $(PIP) is required\n\n\
Make sure you have updated pip installed, grab it from\n\
https://pip.pypa.io or install it from your package\n\
manager. On debian based OS these requirements are\n\
installed by::\n\n\
sudo -H apt-get install python$(PY)-pip\n" | $(FMT)
ifeq ($(shell which $(PIP) >/dev/null 2>&1; echo $$?), 1)
pip-exe: msg-pip-exe
$(error The '$(PIP)' command was not found)
else
pip-exe:
@:
endif
# ------------------------------------------------------------------------------
# commands
# ------------------------------------------------------------------------------
# $2 path to folder with setup.py, this uses pip from the OS
quiet_cmd_pyinstall = INSTALL $2
cmd_pyinstall = $(PIP) $(PIP_VERBOSE) install $(PIP_INST) -e $2$(PY_SETUP_EXTRAS)
# $2 path to folder with setup.py, this uses pip from pyenv (not OS!)
quiet_cmd_pyenvinstall = PYENV install $2
cmd_pyenvinstall = \
if ! cat $(PY_ENV)/requirements.sha256 2>/dev/null | sha256sum --check --status 2>/dev/null; then \
rm -f $(PY_ENV)/requirements.sha256; \
$(PY_ENV_BIN)/python -m pip $(PIP_VERBOSE) install -e $2$(PY_SETUP_EXTRAS) &&\
sha256sum requirements*.txt > $(PY_ENV)/requirements.sha256 ;\
else \
echo "PYENV $2 already installed"; \
fi
# Uninstall the package. Since pip does not uninstall the no longer needed
# depencies (something like autoremove) the depencies remain.
# $2 package name to uninstall, this uses pip from the OS.
quiet_cmd_pyuninstall = UNINSTALL $2
cmd_pyuninstall = $(PIP) $(PIP_VERBOSE) uninstall --yes $2
# $2 path to folder with setup.py, this uses pip from pyenv (not OS!)
quiet_cmd_pyenvuninstall = PYENV uninstall $2
cmd_pyenvuninstall = $(PY_ENV_BIN)/python -m pip $(PIP_VERBOSE) uninstall --yes $2
# $2 path to folder where virtualenv take place
quiet_cmd_virtualenv = PYENV usage: $ source ./$@/bin/activate
cmd_virtualenv = \
if [ -d "./$(PY_ENV)" -a -x "./$(PY_ENV_BIN)/python" ]; then \
echo "PYENV using virtualenv from $2"; \
else \
$(PYTHON) -m venv $(VTENV_OPTS) $2; \
$(PY_ENV_BIN)/python -m pip install $(PIP_VERBOSE) -U pip wheel setuptools; \
$(PY_ENV_BIN)/python -m pip install $(PIP_VERBOSE) -r requirements.txt; \
fi
# $2 path to lint
quiet_cmd_pylint = LINT $@
cmd_pylint = $(PY_ENV_BIN)/python -m pylint -j 0 --rcfile $(PYLINT_RC) $2
quiet_cmd_pytest = TEST $@
cmd_pytest = $(PY_ENV_BIN)/python -m tox -vv
# setuptools, pip, easy_install its a mess full of cracks, a documentation hell
# and broken by design ... all sucks, I really, really hate all this ... aaargh!
#
# About python packaging see `Python Packaging Authority`_. Most of the names
# here are mapped to ``setup(<name1>=..., <name2>=...)`` arguments in
# ``setup.py``. See `Packaging and distributing projects`_ about ``setup(...)``
# arguments. If this is all new for you, start with `PyPI Quick and Dirty`_.
#
# Further read:
#
# - pythonwheels_
# - setuptools_
# - packaging_
# - sdist_
# - installing_
#
# .. _`Python Packaging Authority`: https://www.pypa.io
# .. _`Packaging and distributing projects`: https://packaging.python.org/guides/distributing-packages-using-setuptools/
# .. _`PyPI Quick and Dirty`: https://hynek.me/articles/sharing-your-labor-of-love-pypi-quick-and-dirty/
# .. _pythonwheels: https://pythonwheels.com/
# .. _setuptools: https://setuptools.readthedocs.io/en/latest/setuptools.html
# .. _packaging: https://packaging.python.org/guides/distributing-packages-using-setuptools/#packaging-and-distributing-projects
# .. _sdist: https://packaging.python.org/guides/distributing-packages-using-setuptools/#source-distributions
# .. _bdist_wheel: https://packaging.python.org/guides/distributing-packages-using-setuptools/#pure-python-wheels
# .. _installing: https://packaging.python.org/tutorials/installing-packages/
#
quiet_cmd_pybuild = BUILD $@
cmd_pybuild = $(PY_ENV_BIN)/python setup.py \
sdist -d $(PYDIST) \
bdist_wheel --bdist-dir $(PYBUILD) -d $(PYDIST)
quiet_cmd_pyclean = CLEAN $@
# remove 'build' folder since bdist_wheel does not care the --bdist-dir
cmd_pyclean = \
rm -rf $(PYDIST) $(PYBUILD) $(PY_ENV) ./.tox *.egg-info ;\
find . -name '*.pyc' -exec rm -f {} + ;\
find . -name '*.pyo' -exec rm -f {} + ;\
find . -name __pycache__ -exec rm -rf {} +
# ------------------------------------------------------------------------------
# targets
# ------------------------------------------------------------------------------
# for installation use the pip from the OS!
PHONY += pyinstall
pyinstall: pip-exe
$(call cmd,pyinstall,.)
PHONY += pyuninstall
pyuninstall: pip-exe
$(call cmd,pyuninstall,$(PYOBJECTS))
# for installation use the pip from PY_ENV (not the OS)!
PHONY += pyenvinstall
pyenvinstall: $(PY_ENV)
$(call cmd,pyenvinstall,.)
PHONY += pyenvuninstall
pyenvuninstall: $(PY_ENV)
$(call cmd,pyenvuninstall,$(PYOBJECTS))
PHONY += pyclean
pyclean:
$(call cmd,pyclean)
# to build *local* environment, python from the OS is needed!
pyenv: $(PY_ENV)
$(PY_ENV): python-exe
$(call cmd,virtualenv,$(PY_ENV))
PHONY += pylint-exe
pylint-exe: $(PY_ENV)
@$(PY_ENV_BIN)/python -m pip $(PIP_VERBOSE) install pylint
PHONY += pylint
pylint: pylint-exe
$(call cmd,pylint,$(PYOBJECTS))
PHONY += pybuild
pybuild: $(PY_ENV)
$(call cmd,pybuild)
PHONY += pytest
pytest: $(PY_ENV)
$(call cmd,pytest)
PHONY += pydebug
# set breakpoint with:
# DEBUG()
# e.g. to run tests in debug mode in emacs use:
# 'M-x pdb' ... 'make pydebug'
pydebug: $(PY_ENV)
DEBUG=$(DEBUG) $(PY_ENV_BIN)/pytest $(DEBUG) -v $(TEST_FOLDER)/$(TEST)
# runs python interpreter from ./local/py<N>/bin/python
pyenv-python: pyenvinstall
$(PY_ENV_BIN)/python -i
# With 'dependency_links=' setuptools supports dependencies on packages hosted
# on other reposetories then PyPi, see "Packages Not On PyPI" [1]. The big
# drawback is, due to security reasons (I don't know where the security gate on
# PyPi is), this feature is not supported by pip [2]. Thats why an upload to
# PyPi is required and since uploads via setuptools is not recommended, we have
# to imstall / use twine ... its really a mess.
#
# [1] https://python-packaging.readthedocs.io/en/latest/dependencies.html#packages-not-on-pypi
# [2] https://github.com/pypa/pip/pull/1519
# https://github.com/pypa/twine
PHONY += upload-pypi upload-pypi-test
upload-pypi: pyclean pyenvinstall pybuild
@$(PY_ENV_BIN)/twine upload $(PYDIST)/*
upload-pypi-test: pyclean pyenvinstall pybuild
@$(PY_ENV_BIN)/twine upload -r testpypi $(PYDIST)/*
.PHONY: $(PHONY)

View File

@ -1,199 +0,0 @@
# -*- coding: utf-8; mode: makefile-gmake -*-
export DOCS_FOLDER DOCS_BUILD DOCS_DIST BOOKS_FOLDER BOOKS_DIST
# You can set these variables from the command line.
SPHINXOPTS ?=
SPHINXBUILD ?= $(PY_ENV_BIN)/sphinx-build
SPHINX_CONF ?= conf.py
DOCS_FOLDER = ./docs
DOCS_BUILD = ./$(LXC_ENV_FOLDER)build/docs
DOCS_DIST = ./$(LXC_ENV_FOLDER)dist/docs
GH_PAGES ?= build/gh-pages
BOOKS_FOLDER = ./docs
BOOKS_DIST = ./$(LXC_ENV_FOLDER)dist/books
ifeq ($(KBUILD_VERBOSE),1)
SPHINX_VERBOSE = "-v"
else
SPHINX_VERBOSE =
endif
docs-help:
@echo 'makefile.sphinx:'
@echo ' docs-clean - clean intermediate doc objects'
@echo ' $(GH_PAGES) - create & upload github pages'
@echo ' sphinx-pdf - run sphinx latex & pdf targets'
@echo ''
@echo ' books/{name}.html : build only the HTML of document {name}'
@echo ' valid values for books/{name}.html are:'
@echo ' $(BOOKS_HTML)' | $(FMT)
@echo ' books/{name}.pdf : build only the PDF of document {name}'
@echo ' valid values for books/{name}.pdf are:'
@echo ' $(BOOKS_PDF) ' | $(FMT)
# ------------------------------------------------------------------------------
# requirements
# ------------------------------------------------------------------------------
PHONY += msg-texlive texlive
ifeq ($(shell which xelatex >/dev/null 2>&1; echo $$?), 1)
texlive: msg-TeXLive
$(error The 'xelatex' command was not found)
else
texlive:
@:
endif
msg-texlive:
$(Q)echo "\n\
The TeX/PDF output and the *math* extension require TexLive and latexmk:\n\n\
Make sure you have a updated TeXLive with XeTeX engine installed, grab it\n\
it from https://www.tug.org/texlive or install it from your package manager.\n\n\
Install latexmk from your package manager or visit https://ctan.org/pkg/latexmk\n\n\
Sphinx-doc produce (Xe)LaTeX files which might use additional TeX-packages\n\
and fonts. To process these LaTeX files, a TexLive installation with the\n\
additional packages is required. On debian based OS these requirements\n\
are installed by::\n\n\
sudo -H apt-get install\n\
latexmk\n\
texlive-base texlive-xetex texlive-latex-recommended\n\
texlive-extra-utils dvipng ttf-dejavu\n"
# ------------------------------------------------------------------------------
# commands
# ------------------------------------------------------------------------------
# $2 sphinx builder e.g. "html"
# $3 path where configuration file (conf.py) is located
# $4 sourcedir
# $5 dest subfolder e.g. "man" for man pages at $(DOCS_DIST)/man
quiet_cmd_sphinx = SPHINX $@ --> file://$(abspath $(DOCS_DIST)/$5)
cmd_sphinx = SPHINX_CONF=$(abspath $4/$(SPHINX_CONF))\
$(SPHINXBUILD) $(SPHINX_VERBOSE) $(SPHINXOPTS)\
-b $2 -c $3 -d $(DOCS_BUILD)/.doctrees $4 $(DOCS_DIST)/$5
quiet_cmd_sphinx_autobuild = SPHINX $@ --> file://$(abspath $(DOCS_DIST)/$5)
cmd_sphinx_autobuild = PATH="$(PY_ENV_BIN):$(PATH)" $(PY_ENV_BIN)/sphinx-autobuild $(SPHINX_VERBOSE) --open-browser --host 0.0.0.0 $(SPHINXOPTS)\
-b $2 -c $3 -d $(DOCS_BUILD)/.doctrees $4 $(DOCS_DIST)/$5
quiet_cmd_sphinx_clean = CLEAN $@
cmd_sphinx_clean = rm -rf $(DOCS_BUILD) $(DOCS_DIST) $(GH_PAGES)/* $(GH_PAGES)/.buildinfo
# ------------------------------------------------------------------------------
# targets
# ------------------------------------------------------------------------------
# build PDF of whole documentation in: $(DOCS_DIST)/pdf
PHONY += sphinx-pdf
sphinx-pdf: sphinx-latex
$(Q)cd $(DOCS_BUILD)/latex/; make all-pdf
$(Q)mkdir -p $(DOCS_DIST)/pdf
$(Q)cp $(DOCS_BUILD)/latex/*.pdf $(DOCS_DIST)/pdf
@echo "SPHINX *.pdf --> file://$(abspath $(DOCS_DIST)/pdf)"
PHONY += sphinx-latex
sphinx-latex: pyenvinstall texlive
$(SPHINXBUILD) $(SPHINX_VERBOSE) $(SPHINXOPTS)\
-b latex \
-c $(DOCS_FOLDER) \
-d $(DOCS_BUILD)/.doctrees \
$(DOCS_FOLDER) \
$(DOCS_BUILD)/latex
# Sphinx projects, we call them *books* (what is more common). Books are
# folders under $(BOOKS_FOLDER) containing a conf.py file. The HTML output goes
# to folder $(BOOKS_DIST)/<name> while PDF is placed (BOOKS_DIST)/<name>/pdf
BOOKS=$(patsubst $(BOOKS_FOLDER)/%/conf.py,books/%,$(wildcard $(BOOKS_FOLDER)/*/conf.py))
# fine grained targets
BOOKS_HTML = $(patsubst %,%.html,$(BOOKS))
BOOKS_CLEAN = $(patsubst %,%.clean,$(BOOKS))
BOOKS_LATEX = $(patsubst %,%.latex,$(BOOKS))
BOOKS_PDF = $(patsubst %,%.pdf,$(BOOKS))
BOOKS_LIVE = $(patsubst %,%.live,$(BOOKS))
$(BOOKS_DIST):
mkdir -p $(BOOKS_DIST)
PHONY += $(BOOKS_HTML)
$(BOOKS_HTML): pyenvinstall | $(BOOKS_DIST)
SPHINX_CONF=$(patsubst books/%.html,%,$@)/conf.py \
$(SPHINXBUILD) $(SPHINX_VERBOSE) $(SPHINXOPTS)\
-b html \
-c $(DOCS_FOLDER) \
-d $(DOCS_BUILD)/books/$(patsubst books/%.html,%,$@)/.doctrees \
$(BOOKS_FOLDER)/$(patsubst books/%.html,%,$@) \
$(BOOKS_DIST)/$(patsubst books/%.html,%,$@)
@echo "SPHINX $@ --> file://$(abspath $(BOOKS_DIST)/$(patsubst books/%.html,%,$@))"
PHONY += $(BOOKS_HTML)
$(BOOKS_LIVE): pyenvinstall | $(BOOKS_DIST)
PATH="$(PY_ENV_BIN):$(PATH)" \
SPHINX_CONF=$(patsubst books/%.live,%,$@)/conf.py \
$(PY_ENV_BIN)/sphinx-autobuild --poll -B --host 0.0.0.0 --port 8080 $(SPHINX_VERBOSE) $(SPHINXOPTS)\
-b html \
-c $(DOCS_FOLDER) \
-d $(DOCS_BUILD)/books/$(patsubst books/%.live,%,$@)/.doctrees \
$(BOOKS_FOLDER)/$(patsubst books/%.live,%,$@) \
$(BOOKS_DIST)/$(patsubst books/%.live,%,$@)
$(BOOKS_PDF): %.pdf : %.latex
$(Q)cd $(DOCS_BUILD)/latex/$(patsubst books/%.pdf,%,$@); make all-pdf
$(Q)mkdir -p $(BOOKS_DIST)/$(patsubst books/%.pdf,%,$@)/pdf
$(Q)cp -v $(DOCS_BUILD)/latex/$(patsubst books/%.pdf,%,$@)/*.pdf $(BOOKS_DIST)/$(patsubst books/%.pdf,%,$@)/pdf
@echo "SPHINX $@ --> file://$(abspath $(BOOKS_DIST)/$(patsubst books/%.pdf,%,$@))/pdf"
PHONY += $(BOOKS_LATEX)
$(BOOKS_LATEX): pyenvinstall | $(BOOKS_DIST)
SPHINX_CONF=$(patsubst books/%.latex,%,$@)/conf.py \
$(SPHINXBUILD) $(SPHINX_VERBOSE) $(SPHINXOPTS)\
-b latex \
-c $(DOCS_FOLDER) \
-d $(DOCS_BUILD)/books/$(patsubst books/%.latex,%,$@)/.doctrees \
$(BOOKS_FOLDER)/$(patsubst books/%.latex,%,$@) \
$(DOCS_BUILD)/latex/$(patsubst books/%.latex,%,$@)
@echo "SPHINX $@ --> file://$(abspath $(DOCS_BUILD)/latex/$(patsubst books/%.latex,%,$@))"
$(BOOKS_CLEAN):
$(Q)rm -rf $(BOOKS_DIST)/$(patsubst books/%.clean,%,$@) \
$(DOCS_BUILD)/books/$(patsubst books/%.clean,%,$@) \
$(DOCS_BUILD)/latex/$(patsubst books/%.clean,%,$@)
# github pages
PHONY += prepare-gh-pages
prepare-gh-pages:
cp -r $(DOCS_DIST)/* $(GH_PAGES)/
touch $(GH_PAGES)/.nojekyll
echo "<html><head><META http-equiv='refresh' content='0;URL=index.html'></head></html>" > $(GH_PAGES)/404.html
PHONY += gh-pages
gh-pages: docs-clean docs
- git worktree remove -f $(GH_PAGES) || exit 0
- git branch -D gh-pages || exit 0
git worktree add --no-checkout $(GH_PAGES) master
cd $(GH_PAGES); git checkout --orphan gh-pages && git rm -rfq .
$(MAKE) prepare-gh-pages
cd $(GH_PAGES);\
git add --all . ;\
git commit -q -m "make gh-pages: from $(shell git config --get remote.origin.url)@$(shell git rev-parse HEAD)" ;\
git push -f origin gh-pages
PHONY += ci-gh-pages
ci-gh-pages: docs-clean docs
rm -Rf $(GH_PAGES)
mkdir -p $(GH_PAGES)
$(MAKE) prepare-gh-pages
PHONY += docs-clean
docs-clean: $(BOOKS_CLEAN)
$(call cmd,sphinx_clean)
.PHONY: $(PHONY)

View File

@ -418,9 +418,9 @@ install_settings() {
err_msg "you have to install searx first"
exit 42
fi
mkdir -p "$(dirname ${SEARX_SETTINGS_PATH})"
mkdir -p "$(dirname "${SEARX_SETTINGS_PATH}")"
if [[ ! -f ${SEARX_SETTINGS_PATH} ]]; then
if [[ ! -f "${SEARX_SETTINGS_PATH}" ]]; then
info_msg "install settings ${SEARX_SETTINGS_TEMPLATE}"
info_msg " --> ${SEARX_SETTINGS_PATH}"
cp "${SEARX_SETTINGS_TEMPLATE}" "${SEARX_SETTINGS_PATH}"
@ -481,7 +481,7 @@ pyenv_is_available() {
create_pyenv() {
rst_title "Create virtualenv (python)" section
echo
if [[ ! -f "${SEARX_SRC}/manage.sh" ]]; then
if [[ ! -f "${SEARX_SRC}/manage" ]]; then
err_msg "to create pyenv for searx, searx has to be cloned first"
return 42
fi

View File

@ -1,48 +0,0 @@
# -*- coding: utf-8; mode: python -*-
"""Implement some sphinx-build tools.
"""
import os
import sys
from sphinx.util.pycompat import execfile_
# ------------------------------------------------------------------------------
def load_sphinx_config(namespace):
# ------------------------------------------------------------------------------
u"""Load an additional configuration file into *namespace*.
The name of the configuration file is taken from the environment
``SPHINX_CONF``. The external configuration file extends (or overwrites) the
configuration values from the origin ``conf.py``. With this you are able to
maintain *build themes*. To your docs/conf.py add::
from sphinx_build_tools import load_sphinx_config
...
# Since loadConfig overwrites settings from the global namespace, it has to be
# the last statement in the conf.py file
load_sphinx_config(globals())
"""
config_file = os.environ.get("SPHINX_CONF", None)
if (config_file is not None
and os.path.normpath(namespace["__file__"]) != os.path.normpath(config_file) ):
config_file = os.path.abspath(config_file)
if os.path.isfile(config_file):
sys.stdout.write(
"load additional sphinx-config: %s\n"
% config_file)
config = namespace.copy()
config['__file__'] = config_file
execfile_(config_file, config)
del config['__file__']
namespace.update(config)
else:
sys.stderr.write(
"WARNING: additional sphinx-config not found: %s\n"
% config_file)