mirror of
https://github.com/searxng/searxng.git
synced 2025-08-04 19:06:44 +02:00
[fix] sh: apply format
Related https://github.com/searxng/searxng/issues/4803
This commit is contained in:
parent
2311d16497
commit
e9ecdcc350
18 changed files with 644 additions and 548 deletions
|
@ -37,16 +37,16 @@ setup_ownership() {
|
||||||
local type="$2"
|
local type="$2"
|
||||||
|
|
||||||
case "$type" in
|
case "$type" in
|
||||||
file | directory) ;;
|
file | directory) ;;
|
||||||
*)
|
*)
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
!!!
|
!!!
|
||||||
!!! ERROR
|
!!! ERROR
|
||||||
!!! "$type" is not a valid type, exiting...
|
!!! "$type" is not a valid type, exiting...
|
||||||
!!!
|
!!!
|
||||||
EOF
|
EOF
|
||||||
exit 1
|
exit 1
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
target_ownership=$(stat -c %U:%G "$target")
|
target_ownership=$(stat -c %U:%G "$target")
|
||||||
|
|
58
manage
58
manage
|
@ -65,7 +65,7 @@ while IFS= read -r line; do
|
||||||
if [ "$line" != "tests/unit/settings/syntaxerror_settings.yml" ]; then
|
if [ "$line" != "tests/unit/settings/syntaxerror_settings.yml" ]; then
|
||||||
YAMLLINT_FILES+=("$line")
|
YAMLLINT_FILES+=("$line")
|
||||||
fi
|
fi
|
||||||
done <<< "$(git ls-files './tests/*.yml' './searx/*.yml' './utils/templates/etc/searxng/*.yml' '.github/*.yml' '.github/*/*.yml')"
|
done <<<"$(git ls-files './tests/*.yml' './searx/*.yml' './utils/templates/etc/searxng/*.yml' '.github/*.yml' '.github/*/*.yml')"
|
||||||
|
|
||||||
RST_FILES=(
|
RST_FILES=(
|
||||||
'README.rst'
|
'README.rst'
|
||||||
|
@ -113,7 +113,6 @@ environment ...
|
||||||
EOF
|
EOF
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
if [ "$VERBOSE" = "1" ]; then
|
if [ "$VERBOSE" = "1" ]; then
|
||||||
SPHINX_VERBOSE="-v"
|
SPHINX_VERBOSE="-v"
|
||||||
PYLINT_VERBOSE="-v"
|
PYLINT_VERBOSE="-v"
|
||||||
|
@ -126,14 +125,14 @@ webapp.run() {
|
||||||
local parent_proc="$$"
|
local parent_proc="$$"
|
||||||
(
|
(
|
||||||
if [ "${LIVE_THEME}" ]; then
|
if [ "${LIVE_THEME}" ]; then
|
||||||
( themes.live "${LIVE_THEME}" )
|
(themes.live "${LIVE_THEME}")
|
||||||
kill $parent_proc
|
kill $parent_proc
|
||||||
fi
|
fi
|
||||||
)&
|
) &
|
||||||
(
|
(
|
||||||
sleep 3
|
sleep 3
|
||||||
xdg-open http://127.0.0.1:8888/
|
xdg-open http://127.0.0.1:8888/
|
||||||
)&
|
) &
|
||||||
SEARXNG_DEBUG=1 pyenv.cmd python -m searx.webapp
|
SEARXNG_DEBUG=1 pyenv.cmd python -m searx.webapp
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -143,10 +142,11 @@ gecko.driver() {
|
||||||
|
|
||||||
build_msg INSTALL "gecko.driver"
|
build_msg INSTALL "gecko.driver"
|
||||||
# run installation in a subprocess and activate pyenv
|
# run installation in a subprocess and activate pyenv
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
pyenv.activate
|
pyenv.activate
|
||||||
|
|
||||||
INSTALLED_VERSION=$(geckodriver -V 2> /dev/null | head -1 | awk '{ print "v" $2}') || INSTALLED_VERSION=""
|
INSTALLED_VERSION=$(geckodriver -V 2>/dev/null | head -1 | awk '{ print "v" $2}') || INSTALLED_VERSION=""
|
||||||
set +e
|
set +e
|
||||||
if [ "${INSTALLED_VERSION}" = "${GECKODRIVER_VERSION}" ]; then
|
if [ "${INSTALLED_VERSION}" = "${GECKODRIVER_VERSION}" ]; then
|
||||||
build_msg INSTALL "geckodriver already installed"
|
build_msg INSTALL "geckodriver already installed"
|
||||||
|
@ -154,13 +154,13 @@ gecko.driver() {
|
||||||
fi
|
fi
|
||||||
PLATFORM="$(python -c 'import platform; print(platform.system().lower(), platform.architecture()[0])')"
|
PLATFORM="$(python -c 'import platform; print(platform.system().lower(), platform.architecture()[0])')"
|
||||||
case "$PLATFORM" in
|
case "$PLATFORM" in
|
||||||
"linux 32bit" | "linux2 32bit") ARCH="linux32";;
|
"linux 32bit" | "linux2 32bit") ARCH="linux32" ;;
|
||||||
"linux 64bit" | "linux2 64bit") ARCH="linux64";;
|
"linux 64bit" | "linux2 64bit") ARCH="linux64" ;;
|
||||||
"windows 32 bit") ARCH="win32";;
|
"windows 32 bit") ARCH="win32" ;;
|
||||||
"windows 64 bit") ARCH="win64";;
|
"windows 64 bit") ARCH="win64" ;;
|
||||||
"mac 64bit") ARCH="macos";;
|
"mac 64bit") ARCH="macos" ;;
|
||||||
esac
|
esac
|
||||||
GECKODRIVER_URL="https://github.com/mozilla/geckodriver/releases/download/$GECKODRIVER_VERSION/geckodriver-$GECKODRIVER_VERSION-$ARCH.tar.gz";
|
GECKODRIVER_URL="https://github.com/mozilla/geckodriver/releases/download/$GECKODRIVER_VERSION/geckodriver-$GECKODRIVER_VERSION-$ARCH.tar.gz"
|
||||||
|
|
||||||
build_msg GECKO "Installing ${PY_ENV_BIN}/geckodriver from $GECKODRIVER_URL"
|
build_msg GECKO "Installing ${PY_ENV_BIN}/geckodriver from $GECKODRIVER_URL"
|
||||||
|
|
||||||
|
@ -175,13 +175,14 @@ gecko.driver() {
|
||||||
py.build() {
|
py.build() {
|
||||||
build_msg BUILD "python package ${PYDIST}"
|
build_msg BUILD "python package ${PYDIST}"
|
||||||
pyenv.cmd python setup.py \
|
pyenv.cmd python setup.py \
|
||||||
sdist -d "${PYDIST}" \
|
sdist -d "${PYDIST}" \
|
||||||
bdist_wheel --bdist-dir "${PYBUILD}" -d "${PYDIST}"
|
bdist_wheel --bdist-dir "${PYBUILD}" -d "${PYDIST}"
|
||||||
}
|
}
|
||||||
|
|
||||||
py.clean() {
|
py.clean() {
|
||||||
build_msg CLEAN pyenv
|
build_msg CLEAN pyenv
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
pyenv.drop
|
pyenv.drop
|
||||||
[ "$VERBOSE" = "1" ] && set -x
|
[ "$VERBOSE" = "1" ] && set -x
|
||||||
rm -rf "${PYDIST}" "${PYBUILD}" "${PY_ENV}" ./.tox ./*.egg-info
|
rm -rf "${PYDIST}" "${PYBUILD}" "${PY_ENV}" ./.tox ./*.egg-info
|
||||||
|
@ -192,7 +193,7 @@ py.clean() {
|
||||||
}
|
}
|
||||||
|
|
||||||
pyenv.check() {
|
pyenv.check() {
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
import yaml
|
import yaml
|
||||||
print('import yaml --> OK')
|
print('import yaml --> OK')
|
||||||
EOF
|
EOF
|
||||||
|
@ -201,13 +202,14 @@ EOF
|
||||||
pyenv.install() {
|
pyenv.install() {
|
||||||
|
|
||||||
if ! pyenv.OK; then
|
if ! pyenv.OK; then
|
||||||
py.clean > /dev/null
|
py.clean >/dev/null
|
||||||
fi
|
fi
|
||||||
if pyenv.install.OK > /dev/null; then
|
if pyenv.install.OK >/dev/null; then
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
pyenv
|
pyenv
|
||||||
build_msg PYENV "[install] pip install --use-pep517 --no-build-isolation -e 'searx${PY_SETUP_EXTRAS}'"
|
build_msg PYENV "[install] pip install --use-pep517 --no-build-isolation -e 'searx${PY_SETUP_EXTRAS}'"
|
||||||
"${PY_ENV_BIN}/python" -m pip install --use-pep517 --no-build-isolation -e ".${PY_SETUP_EXTRAS}"
|
"${PY_ENV_BIN}/python" -m pip install --use-pep517 --no-build-isolation -e ".${PY_SETUP_EXTRAS}"
|
||||||
|
@ -220,8 +222,8 @@ pyenv.install() {
|
||||||
|
|
||||||
pyenv.uninstall() {
|
pyenv.uninstall() {
|
||||||
build_msg PYENV "[pyenv.uninstall] uninstall packages: ${PYOBJECTS}"
|
build_msg PYENV "[pyenv.uninstall] uninstall packages: ${PYOBJECTS}"
|
||||||
pyenv.cmd python setup.py develop --uninstall 2>&1 \
|
pyenv.cmd python setup.py develop --uninstall 2>&1 |
|
||||||
| prefix_stdout "${_Blue}PYENV ${_creset}[pyenv.uninstall] "
|
prefix_stdout "${_Blue}PYENV ${_creset}[pyenv.uninstall] "
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -243,7 +245,7 @@ docs.prebuild() {
|
||||||
set -e
|
set -e
|
||||||
[ "$VERBOSE" = "1" ] && set -x
|
[ "$VERBOSE" = "1" ] && set -x
|
||||||
mkdir -p "${DOCS_BUILD}/includes"
|
mkdir -p "${DOCS_BUILD}/includes"
|
||||||
./utils/searxng.sh searxng.doc.rst > "${DOCS_BUILD}/includes/searxng.rst"
|
./utils/searxng.sh searxng.doc.rst >"${DOCS_BUILD}/includes/searxng.rst"
|
||||||
pyenv.cmd searxng_extra/docs_prebuild
|
pyenv.cmd searxng_extra/docs_prebuild
|
||||||
)
|
)
|
||||||
dump_return $?
|
dump_return $?
|
||||||
|
@ -253,7 +255,8 @@ docs.prebuild() {
|
||||||
main() {
|
main() {
|
||||||
|
|
||||||
local _type
|
local _type
|
||||||
local cmd="$1"; shift
|
local cmd="$1"
|
||||||
|
shift
|
||||||
|
|
||||||
if [ "$cmd" == "" ]; then
|
if [ "$cmd" == "" ]; then
|
||||||
help
|
help
|
||||||
|
@ -262,8 +265,11 @@ main() {
|
||||||
fi
|
fi
|
||||||
|
|
||||||
case "$cmd" in
|
case "$cmd" in
|
||||||
--getenv) var="$1"; echo "${!var}";;
|
--getenv)
|
||||||
--help) help;;
|
var="$1"
|
||||||
|
echo "${!var}"
|
||||||
|
;;
|
||||||
|
--help) help ;;
|
||||||
--*)
|
--*)
|
||||||
help
|
help
|
||||||
err_msg "unknown option $cmd"
|
err_msg "unknown option $cmd"
|
||||||
|
|
|
@ -8,11 +8,11 @@ build.env.export() {
|
||||||
GIT_BRANCH="$(git branch | grep '\*' | cut -d' ' -f2-)"
|
GIT_BRANCH="$(git branch | grep '\*' | cut -d' ' -f2-)"
|
||||||
GIT_REMOTE="$(git config "branch.${GIT_BRANCH}.remote")"
|
GIT_REMOTE="$(git config "branch.${GIT_BRANCH}.remote")"
|
||||||
GIT_URL="$(git config --get "remote.${GIT_REMOTE}.url")"
|
GIT_URL="$(git config --get "remote.${GIT_REMOTE}.url")"
|
||||||
if [[ "${GIT_URL}" == git@* ]]; then
|
if [[ ${GIT_URL} == git@* ]]; then
|
||||||
GIT_URL="${GIT_URL/://}"
|
GIT_URL="${GIT_URL/://}"
|
||||||
GIT_URL="${GIT_URL/git@/https://}"
|
GIT_URL="${GIT_URL/git@/https://}"
|
||||||
fi
|
fi
|
||||||
if [[ "${GIT_URL}" == *.git ]]; then
|
if [[ ${GIT_URL} == *.git ]]; then
|
||||||
GIT_URL="${GIT_URL%.git}"
|
GIT_URL="${GIT_URL%.git}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
@ -27,6 +27,6 @@ build.env.export() {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pushd "${REPO_ROOT}" &> /dev/null
|
pushd "${REPO_ROOT}" &>/dev/null
|
||||||
build.env.export
|
build.env.export
|
||||||
popd &> /dev/null
|
popd &>/dev/null
|
||||||
|
|
379
utils/lib.sh
379
utils/lib.sh
File diff suppressed because it is too large
Load diff
|
@ -23,14 +23,14 @@
|
||||||
# configure golang environment
|
# configure golang environment
|
||||||
# ----------------------------
|
# ----------------------------
|
||||||
|
|
||||||
[[ -z "${GO_VERSION}" ]] && GO_VERSION="go1.17.3"
|
[[ -z ${GO_VERSION} ]] && GO_VERSION="go1.17.3"
|
||||||
|
|
||||||
GO_DL_URL="https://golang.org/dl"
|
GO_DL_URL="https://golang.org/dl"
|
||||||
|
|
||||||
# implement go functions
|
# implement go functions
|
||||||
# -----------------------
|
# -----------------------
|
||||||
|
|
||||||
go.help(){
|
go.help() {
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
go.:
|
go.:
|
||||||
ls : list golang binary archives (stable)
|
ls : list golang binary archives (stable)
|
||||||
|
@ -40,7 +40,7 @@ go.:
|
||||||
EOF
|
EOF
|
||||||
}
|
}
|
||||||
|
|
||||||
go.ls(){
|
go.ls() {
|
||||||
python <<EOF
|
python <<EOF
|
||||||
import sys, json, requests
|
import sys, json, requests
|
||||||
resp = requests.get("${GO_DL_URL}/?mode=json&include=all")
|
resp = requests.get("${GO_DL_URL}/?mode=json&include=all")
|
||||||
|
@ -54,7 +54,7 @@ for ver in json.loads(resp.text):
|
||||||
EOF
|
EOF
|
||||||
}
|
}
|
||||||
|
|
||||||
go.ver_info(){
|
go.ver_info() {
|
||||||
|
|
||||||
# print information about a golang distribution. To print filename
|
# print information about a golang distribution. To print filename
|
||||||
# sha256 and size of the archive that fits to your OS and host:
|
# sha256 and size of the archive that fits to your OS and host:
|
||||||
|
@ -84,15 +84,15 @@ EOF
|
||||||
}
|
}
|
||||||
|
|
||||||
go.os() {
|
go.os() {
|
||||||
local OS
|
local OS
|
||||||
case "$(command uname -a)xx" in
|
case "$(command uname -a)xx" in
|
||||||
Linux\ *) OS=linux ;;
|
Linux\ *) OS=linux ;;
|
||||||
Darwin\ *) OS=darwin ;;
|
Darwin\ *) OS=darwin ;;
|
||||||
FreeBSD\ *) OS=freebsd ;;
|
FreeBSD\ *) OS=freebsd ;;
|
||||||
CYGWIN* | MSYS* | MINGW*) OS=windows ;;
|
CYGWIN* | MSYS* | MINGW*) OS=windows ;;
|
||||||
*) die 42 "OS is unknown: $(command uname -a)" ;;
|
*) die 42 "OS is unknown: $(command uname -a)" ;;
|
||||||
esac
|
esac
|
||||||
echo "${OS}"
|
echo "${OS}"
|
||||||
}
|
}
|
||||||
|
|
||||||
go.arch() {
|
go.arch() {
|
||||||
|
@ -104,7 +104,7 @@ go.arch() {
|
||||||
"armv8") ARCH=arm64 ;;
|
"armv8") ARCH=arm64 ;;
|
||||||
.*386.*) ARCH=386 ;;
|
.*386.*) ARCH=386 ;;
|
||||||
ppc64*) ARCH=ppc64le ;;
|
ppc64*) ARCH=ppc64le ;;
|
||||||
*) die 42 "ARCH is unknown: $(command uname -m)" ;;
|
*) die 42 "ARCH is unknown: $(command uname -m)" ;;
|
||||||
esac
|
esac
|
||||||
echo "${ARCH}"
|
echo "${ARCH}"
|
||||||
}
|
}
|
||||||
|
@ -140,14 +140,14 @@ go.golang() {
|
||||||
info_msg "Download go binary ${fname} (${size}B)"
|
info_msg "Download go binary ${fname} (${size}B)"
|
||||||
cache_download "${GO_DL_URL}/${fname}" "${fname}"
|
cache_download "${GO_DL_URL}/${fname}" "${fname}"
|
||||||
|
|
||||||
pushd "${CACHE}" &> /dev/null
|
pushd "${CACHE}" &>/dev/null
|
||||||
echo "${sha} ${fname}" > "${fname}.sha256"
|
echo "${sha} ${fname}" >"${fname}.sha256"
|
||||||
if ! sha256sum -c "${fname}.sha256" >/dev/null; then
|
if ! sha256sum -c "${fname}.sha256" >/dev/null; then
|
||||||
die 42 "downloaded file ${fname} checksum does not match"
|
die 42 "downloaded file ${fname} checksum does not match"
|
||||||
else
|
else
|
||||||
info_msg "${fname} checksum OK"
|
info_msg "${fname} checksum OK"
|
||||||
fi
|
fi
|
||||||
popd &> /dev/null
|
popd &>/dev/null
|
||||||
|
|
||||||
info_msg "install golang"
|
info_msg "install golang"
|
||||||
tee_stderr 0.1 <<EOF | sudo -i -u "${user}" | prefix_stdout "${userpr}"
|
tee_stderr 0.1 <<EOF | sudo -i -u "${user}" | prefix_stdout "${userpr}"
|
||||||
|
@ -201,7 +201,7 @@ go.bash() {
|
||||||
sudo -i -u "${user}" bash --init-file "~${user}/.go_env"
|
sudo -i -u "${user}" bash --init-file "~${user}/.go_env"
|
||||||
}
|
}
|
||||||
|
|
||||||
go.version(){
|
go.version() {
|
||||||
local user
|
local user
|
||||||
user="${1:-${USERNAME}}"
|
user="${1:-${USERNAME}}"
|
||||||
sudo -i -u "${user}" <<EOF
|
sudo -i -u "${user}" <<EOF
|
||||||
|
|
|
@ -17,8 +17,8 @@ declare main_cmd
|
||||||
|
|
||||||
NVM_LOCAL_FOLDER=.nvm
|
NVM_LOCAL_FOLDER=.nvm
|
||||||
|
|
||||||
[[ -z "${NVM_GIT_URL}" ]] && NVM_GIT_URL="https://github.com/nvm-sh/nvm.git"
|
[[ -z ${NVM_GIT_URL} ]] && NVM_GIT_URL="https://github.com/nvm-sh/nvm.git"
|
||||||
[[ -z "${NVM_MIN_NODE_VER}" ]] && NVM_MIN_NODE_VER="16.13.0"
|
[[ -z ${NVM_MIN_NODE_VER} ]] && NVM_MIN_NODE_VER="16.13.0"
|
||||||
|
|
||||||
# initialize nvm environment
|
# initialize nvm environment
|
||||||
# -------------------------
|
# -------------------------
|
||||||
|
@ -35,7 +35,7 @@ nvm.is_installed() {
|
||||||
[[ -f "${NVM_DIR}/nvm.sh" ]]
|
[[ -f "${NVM_DIR}/nvm.sh" ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
if [[ -z "${NVM_DIR}" ]]; then
|
if [[ -z ${NVM_DIR} ]]; then
|
||||||
# nvm is not pre-installed in $HOME. Prepare for using nvm from <repo-root>
|
# nvm is not pre-installed in $HOME. Prepare for using nvm from <repo-root>
|
||||||
NVM_DIR="$(git rev-parse --show-toplevel)/${NVM_LOCAL_FOLDER}"
|
NVM_DIR="$(git rev-parse --show-toplevel)/${NVM_LOCAL_FOLDER}"
|
||||||
fi
|
fi
|
||||||
|
@ -105,7 +105,7 @@ nvm.install() {
|
||||||
info_msg "install (update) NVM at ${NVM_DIR}"
|
info_msg "install (update) NVM at ${NVM_DIR}"
|
||||||
if nvm.is_installed; then
|
if nvm.is_installed; then
|
||||||
info_msg "already cloned at: ${NVM_DIR}"
|
info_msg "already cloned at: ${NVM_DIR}"
|
||||||
pushd "${NVM_DIR}" &> /dev/null
|
pushd "${NVM_DIR}" &>/dev/null
|
||||||
git fetch --all | prefix_stdout " ${_Yellow}||${_creset} "
|
git fetch --all | prefix_stdout " ${_Yellow}||${_creset} "
|
||||||
else
|
else
|
||||||
# delete any leftovers from previous installations
|
# delete any leftovers from previous installations
|
||||||
|
@ -114,14 +114,14 @@ nvm.install() {
|
||||||
fi
|
fi
|
||||||
info_msg "clone: ${NVM_GIT_URL}"
|
info_msg "clone: ${NVM_GIT_URL}"
|
||||||
git clone "${NVM_GIT_URL}" "${NVM_DIR}" 2>&1 | prefix_stdout " ${_Yellow}||${_creset} "
|
git clone "${NVM_GIT_URL}" "${NVM_DIR}" 2>&1 | prefix_stdout " ${_Yellow}||${_creset} "
|
||||||
pushd "${NVM_DIR}" &> /dev/null
|
pushd "${NVM_DIR}" &>/dev/null
|
||||||
git config --local advice.detachedHead false
|
git config --local advice.detachedHead false
|
||||||
fi
|
fi
|
||||||
NVM_VERSION_TAG="$(git rev-list --tags --max-count=1)"
|
NVM_VERSION_TAG="$(git rev-list --tags --max-count=1)"
|
||||||
NVM_VERSION_TAG="$(git describe --abbrev=0 --tags --match "v[0-9]*" "${NVM_VERSION_TAG}")"
|
NVM_VERSION_TAG="$(git describe --abbrev=0 --tags --match "v[0-9]*" "${NVM_VERSION_TAG}")"
|
||||||
info_msg "checkout ${NVM_VERSION_TAG}"
|
info_msg "checkout ${NVM_VERSION_TAG}"
|
||||||
git checkout "${NVM_VERSION_TAG}" 2>&1 | prefix_stdout " ${_Yellow}||${_creset} "
|
git checkout "${NVM_VERSION_TAG}" 2>&1 | prefix_stdout " ${_Yellow}||${_creset} "
|
||||||
popd &> /dev/null
|
popd &>/dev/null
|
||||||
if [ -f "${REPO_ROOT}/.nvm_packages" ]; then
|
if [ -f "${REPO_ROOT}/.nvm_packages" ]; then
|
||||||
cp "${REPO_ROOT}/.nvm_packages" "${NVM_DIR}/default-packages"
|
cp "${REPO_ROOT}/.nvm_packages" "${NVM_DIR}/default-packages"
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -12,7 +12,7 @@ REDIS_GROUP="searxng-redis"
|
||||||
REDIS_SERVICE_NAME="searxng-redis"
|
REDIS_SERVICE_NAME="searxng-redis"
|
||||||
REDIS_SYSTEMD_UNIT="${SYSTEMD_UNITS}/${REDIS_SERVICE_NAME}.service"
|
REDIS_SYSTEMD_UNIT="${SYSTEMD_UNITS}/${REDIS_SERVICE_NAME}.service"
|
||||||
|
|
||||||
redis.help(){
|
redis.help() {
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
redis.:
|
redis.:
|
||||||
remove : delete user (${REDIS_USER}) and remove service (${REDIS_SERVICE_NAME})
|
remove : delete user (${REDIS_USER}) and remove service (${REDIS_SERVICE_NAME})
|
||||||
|
@ -21,7 +21,6 @@ redis.:
|
||||||
EOF
|
EOF
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
redis.remove() {
|
redis.remove() {
|
||||||
sudo_or_exit
|
sudo_or_exit
|
||||||
(
|
(
|
||||||
|
@ -36,7 +35,6 @@ redis.shell() {
|
||||||
interactive_shell "${REDIS_USER}"
|
interactive_shell "${REDIS_USER}"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
redis.userdel() {
|
redis.userdel() {
|
||||||
sudo_or_exit
|
sudo_or_exit
|
||||||
drop_service_account "${REDIS_USER}"
|
drop_service_account "${REDIS_USER}"
|
||||||
|
|
|
@ -41,28 +41,28 @@ container.build() {
|
||||||
|
|
||||||
# Setup arch specific
|
# Setup arch specific
|
||||||
case $parch in
|
case $parch in
|
||||||
"X64" | "x86_64" | "amd64")
|
"X64" | "x86_64" | "amd64")
|
||||||
dockerfile="Dockerfile"
|
dockerfile="Dockerfile"
|
||||||
arch="amd64"
|
arch="amd64"
|
||||||
variant=""
|
variant=""
|
||||||
platform="linux/$arch"
|
platform="linux/$arch"
|
||||||
;;
|
;;
|
||||||
"ARM64" | "aarch64" | "arm64")
|
"ARM64" | "aarch64" | "arm64")
|
||||||
dockerfile="Dockerfile"
|
dockerfile="Dockerfile"
|
||||||
arch="arm64"
|
arch="arm64"
|
||||||
variant=""
|
variant=""
|
||||||
platform="linux/$arch"
|
platform="linux/$arch"
|
||||||
;;
|
;;
|
||||||
"ARMV7" | "armhf" | "armv7l" | "armv7")
|
"ARMV7" | "armhf" | "armv7l" | "armv7")
|
||||||
dockerfile="Dockerfile"
|
dockerfile="Dockerfile"
|
||||||
arch="arm"
|
arch="arm"
|
||||||
variant="v7"
|
variant="v7"
|
||||||
platform="linux/$arch/$variant"
|
platform="linux/$arch/$variant"
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
err_msg "Unsupported architecture; $parch"
|
err_msg "Unsupported architecture; $parch"
|
||||||
exit 1
|
exit 1
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
info_msg "Selected platform: $platform"
|
info_msg "Selected platform: $platform"
|
||||||
|
|
||||||
|
@ -117,7 +117,7 @@ container.build() {
|
||||||
--tag="localhost/$CONTAINER_IMAGE_ORGANIZATION/$CONTAINER_IMAGE_NAME:builder" \
|
--tag="localhost/$CONTAINER_IMAGE_ORGANIZATION/$CONTAINER_IMAGE_NAME:builder" \
|
||||||
--file="./container/$dockerfile" \
|
--file="./container/$dockerfile" \
|
||||||
.
|
.
|
||||||
build_msg CONTAINER "Image \"builder\" built"
|
build_msg CONTAINER 'Image "builder" built'
|
||||||
|
|
||||||
# shellcheck disable=SC2086
|
# shellcheck disable=SC2086
|
||||||
"$container_engine" $params_build \
|
"$container_engine" $params_build \
|
||||||
|
@ -161,25 +161,25 @@ container.test() {
|
||||||
|
|
||||||
# Setup arch specific
|
# Setup arch specific
|
||||||
case $parch in
|
case $parch in
|
||||||
"X64" | "x86_64" | "amd64")
|
"X64" | "x86_64" | "amd64")
|
||||||
arch="amd64"
|
arch="amd64"
|
||||||
variant=""
|
variant=""
|
||||||
platform="linux/$arch"
|
platform="linux/$arch"
|
||||||
;;
|
;;
|
||||||
"ARM64" | "aarch64" | "arm64")
|
"ARM64" | "aarch64" | "arm64")
|
||||||
arch="arm64"
|
arch="arm64"
|
||||||
variant=""
|
variant=""
|
||||||
platform="linux/$arch"
|
platform="linux/$arch"
|
||||||
;;
|
;;
|
||||||
"ARMV7" | "armhf" | "armv7l" | "armv7")
|
"ARMV7" | "armhf" | "armv7l" | "armv7")
|
||||||
arch="arm"
|
arch="arm"
|
||||||
variant="v7"
|
variant="v7"
|
||||||
platform="linux/$arch/$variant"
|
platform="linux/$arch/$variant"
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
err_msg "Unsupported architecture; $parch"
|
err_msg "Unsupported architecture; $parch"
|
||||||
exit 1
|
exit 1
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
build_msg CONTAINER "Selected platform: $platform"
|
build_msg CONTAINER "Selected platform: $platform"
|
||||||
|
|
||||||
|
@ -224,25 +224,25 @@ container.push() {
|
||||||
|
|
||||||
for arch in "${release_archs[@]}"; do
|
for arch in "${release_archs[@]}"; do
|
||||||
case $arch in
|
case $arch in
|
||||||
"X64" | "x86_64" | "amd64")
|
"X64" | "x86_64" | "amd64")
|
||||||
archs+=("amd64")
|
archs+=("amd64")
|
||||||
variants+=("")
|
variants+=("")
|
||||||
platforms+=("linux/${archs[-1]}")
|
platforms+=("linux/${archs[-1]}")
|
||||||
;;
|
;;
|
||||||
"ARM64" | "aarch64" | "arm64")
|
"ARM64" | "aarch64" | "arm64")
|
||||||
archs+=("arm64")
|
archs+=("arm64")
|
||||||
variants+=("")
|
variants+=("")
|
||||||
platforms+=("linux/${archs[-1]}")
|
platforms+=("linux/${archs[-1]}")
|
||||||
;;
|
;;
|
||||||
"ARMV7" | "armv7" | "armhf" | "arm")
|
"ARMV7" | "armv7" | "armhf" | "arm")
|
||||||
archs+=("arm")
|
archs+=("arm")
|
||||||
variants+=("v7")
|
variants+=("v7")
|
||||||
platforms+=("linux/${archs[-1]}/${variants[-1]}")
|
platforms+=("linux/${archs[-1]}/${variants[-1]}")
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
err_msg "Unsupported architecture; $arch"
|
err_msg "Unsupported architecture; $arch"
|
||||||
exit 1
|
exit 1
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
|
||||||
data.help(){
|
data.help() {
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
data.:
|
data.:
|
||||||
all : update searx/sxng_locales.py and searx/data/*
|
all : update searx/sxng_locales.py and searx/data/*
|
||||||
|
@ -13,12 +13,13 @@ EOF
|
||||||
}
|
}
|
||||||
|
|
||||||
data.all() {
|
data.all() {
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
|
|
||||||
pyenv.activate
|
pyenv.activate
|
||||||
data.traits
|
data.traits
|
||||||
data.useragents
|
data.useragents
|
||||||
data.locales
|
data.locales
|
||||||
|
|
||||||
build_msg DATA "update searx/data/osm_keys_tags.json"
|
build_msg DATA "update searx/data/osm_keys_tags.json"
|
||||||
pyenv.cmd python searxng_extra/update/update_osm_keys_tags.py
|
pyenv.cmd python searxng_extra/update/update_osm_keys_tags.py
|
||||||
|
@ -35,9 +36,9 @@ data.all() {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
data.traits() {
|
data.traits() {
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
pyenv.activate
|
pyenv.activate
|
||||||
build_msg DATA "update searx/data/engine_traits.json"
|
build_msg DATA "update searx/data/engine_traits.json"
|
||||||
python searxng_extra/update/update_engine_traits.py
|
python searxng_extra/update/update_engine_traits.py
|
||||||
|
@ -53,7 +54,8 @@ data.useragents() {
|
||||||
}
|
}
|
||||||
|
|
||||||
data.locales() {
|
data.locales() {
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
pyenv.activate
|
pyenv.activate
|
||||||
build_msg DATA "update searx/data/locales.json"
|
build_msg DATA "update searx/data/locales.json"
|
||||||
python searxng_extra/update/update_locales.py
|
python searxng_extra/update/update_locales.py
|
||||||
|
@ -61,8 +63,9 @@ data.locales() {
|
||||||
dump_return $?
|
dump_return $?
|
||||||
}
|
}
|
||||||
|
|
||||||
data.currencies(){
|
data.currencies() {
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
pyenv.activate
|
pyenv.activate
|
||||||
build_msg DATA "update searx/data/currencies.json"
|
build_msg DATA "update searx/data/currencies.json"
|
||||||
python searxng_extra/update/update_currencies.py
|
python searxng_extra/update/update_currencies.py
|
||||||
|
|
|
@ -6,7 +6,7 @@ declare _creset
|
||||||
|
|
||||||
export NODE_MINIMUM_VERSION="18.17.0"
|
export NODE_MINIMUM_VERSION="18.17.0"
|
||||||
|
|
||||||
node.help(){
|
node.help() {
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
node.:
|
node.:
|
||||||
env : download & install SearXNG's npm dependencies locally
|
env : download & install SearXNG's npm dependencies locally
|
||||||
|
@ -24,7 +24,8 @@ nodejs.ensure() {
|
||||||
|
|
||||||
node.env() {
|
node.env() {
|
||||||
nodejs.ensure
|
nodejs.ensure
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
build_msg INSTALL "[npm] ./client/simple/package.json"
|
build_msg INSTALL "[npm] ./client/simple/package.json"
|
||||||
npm --prefix client/simple install
|
npm --prefix client/simple install
|
||||||
)
|
)
|
||||||
|
@ -43,20 +44,22 @@ node.clean() {
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
build_msg CLEAN "themes -- locally installed npm dependencies"
|
build_msg CLEAN "themes -- locally installed npm dependencies"
|
||||||
( set -e
|
(
|
||||||
npm --prefix client/simple run clean \
|
set -e
|
||||||
| prefix_stdout "${_Blue}CLEAN ${_creset} "
|
npm --prefix client/simple run clean |
|
||||||
if [ "${PIPESTATUS[0]}" -ne "0" ]; then
|
prefix_stdout "${_Blue}CLEAN ${_creset} "
|
||||||
|
if [ "${PIPESTATUS[0]}" -ne "0" ]; then
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
)
|
)
|
||||||
build_msg CLEAN "locally installed developer and CI tools"
|
build_msg CLEAN "locally installed developer and CI tools"
|
||||||
( set -e
|
(
|
||||||
npm --prefix . run clean \
|
set -e
|
||||||
| prefix_stdout "${_Blue}CLEAN ${_creset} "
|
npm --prefix . run clean |
|
||||||
if [ "${PIPESTATUS[0]}" -ne "0" ]; then
|
prefix_stdout "${_Blue}CLEAN ${_creset} "
|
||||||
|
if [ "${PIPESTATUS[0]}" -ne "0" ]; then
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
)
|
)
|
||||||
dump_return $?
|
dump_return $?
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
|
||||||
|
|
||||||
STATIC_BUILD_COMMIT="[build] /static"
|
STATIC_BUILD_COMMIT="[build] /static"
|
||||||
STATIC_BUILT_PATHS=(
|
STATIC_BUILT_PATHS=(
|
||||||
'searx/templates/simple/icons.html'
|
'searx/templates/simple/icons.html'
|
||||||
|
@ -9,7 +8,7 @@ STATIC_BUILT_PATHS=(
|
||||||
'client/simple/package-lock.json'
|
'client/simple/package-lock.json'
|
||||||
)
|
)
|
||||||
|
|
||||||
static.help(){
|
static.help() {
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
static.build.: ${STATIC_BUILD_COMMIT}
|
static.build.: ${STATIC_BUILD_COMMIT}
|
||||||
commit : build & commit /static folder
|
commit : build & commit /static folder
|
||||||
|
@ -57,8 +56,8 @@ static.build.drop() {
|
||||||
|
|
||||||
# get only last (option -n1) local commit not in remotes
|
# get only last (option -n1) local commit not in remotes
|
||||||
branch="$(git branch --show-current)"
|
branch="$(git branch --show-current)"
|
||||||
last_commit_id="$(git log -n1 "${branch}" --pretty=format:'%h'\
|
last_commit_id="$(git log -n1 "${branch}" --pretty=format:'%h' \
|
||||||
--not --exclude="${branch}" --branches --remotes)"
|
--not --exclude="${branch}" --branches --remotes)"
|
||||||
|
|
||||||
if [ -z "${last_commit_id}" ]; then
|
if [ -z "${last_commit_id}" ]; then
|
||||||
err_msg "there are no local commits"
|
err_msg "there are no local commits"
|
||||||
|
@ -96,7 +95,8 @@ static.build.commit() {
|
||||||
# drop existing commit from previous build
|
# drop existing commit from previous build
|
||||||
static.build.drop &>/dev/null
|
static.build.drop &>/dev/null
|
||||||
|
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
# fix & build the themes
|
# fix & build the themes
|
||||||
themes.fix
|
themes.fix
|
||||||
themes.lint
|
themes.lint
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
|
||||||
test.help(){
|
test.help() {
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
test.:
|
test.:
|
||||||
yamllint : lint YAML files (YAMLLINT_FILES)
|
yamllint : lint YAML files (YAMLLINT_FILES)
|
||||||
|
@ -22,13 +22,14 @@ if [ "$VERBOSE" = "1" ]; then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
test.yamllint() {
|
test.yamllint() {
|
||||||
build_msg TEST "[yamllint] \$YAMLLINT_FILES"
|
build_msg TEST "[yamllint] ${YAMLLINT_FILES[*]}"
|
||||||
pyenv.cmd yamllint --strict --format parsable "${YAMLLINT_FILES[@]}"
|
pyenv.cmd yamllint --strict --format parsable "${YAMLLINT_FILES[@]}"
|
||||||
dump_return $?
|
dump_return $?
|
||||||
}
|
}
|
||||||
|
|
||||||
test.pylint() {
|
test.pylint() {
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
pyenv.activate
|
pyenv.activate
|
||||||
PYLINT_OPTIONS="--rcfile .pylintrc"
|
PYLINT_OPTIONS="--rcfile .pylintrc"
|
||||||
|
|
||||||
|
@ -41,10 +42,10 @@ test.pylint() {
|
||||||
build_msg TEST "[pylint] ./searx ./searxng_extra ./tests"
|
build_msg TEST "[pylint] ./searx ./searxng_extra ./tests"
|
||||||
# shellcheck disable=SC2086
|
# shellcheck disable=SC2086
|
||||||
pylint ${PYLINT_OPTIONS} ${PYLINT_VERBOSE} \
|
pylint ${PYLINT_OPTIONS} ${PYLINT_VERBOSE} \
|
||||||
--ignore=searx/engines \
|
--ignore=searx/engines \
|
||||||
searx searx/searxng.msg \
|
searx searx/searxng.msg \
|
||||||
searxng_extra searxng_extra/docs_prebuild \
|
searxng_extra searxng_extra/docs_prebuild \
|
||||||
tests
|
tests
|
||||||
)
|
)
|
||||||
dump_return $?
|
dump_return $?
|
||||||
}
|
}
|
||||||
|
@ -63,13 +64,13 @@ test.types.dev() {
|
||||||
build_msg TEST "[pyright/types] suppress warnings related to intentional monkey patching"
|
build_msg TEST "[pyright/types] suppress warnings related to intentional monkey patching"
|
||||||
# We run Pyright in the virtual environment because pyright executes
|
# We run Pyright in the virtual environment because pyright executes
|
||||||
# "python" to determine the Python version.
|
# "python" to determine the Python version.
|
||||||
pyenv.cmd npx --no-install pyright -p pyrightconfig.json \
|
pyenv.cmd npx --no-install pyright -p pyrightconfig.json |
|
||||||
| grep -E '\.py:[0-9]+:[0-9]+'\
|
grep -E '\.py:[0-9]+:[0-9]+' |
|
||||||
| grep -v '/engines/.*.py.* - warning: "logger" is not defined'\
|
grep -v '/engines/.*.py.* - warning: "logger" is not defined' |
|
||||||
| grep -v '/plugins/.*.py.* - error: "logger" is not defined'\
|
grep -v '/plugins/.*.py.* - error: "logger" is not defined' |
|
||||||
| grep -v '/engines/.*.py.* - warning: "supported_languages" is not defined' \
|
grep -v '/engines/.*.py.* - warning: "supported_languages" is not defined' |
|
||||||
| grep -v '/engines/.*.py.* - warning: "language_aliases" is not defined' \
|
grep -v '/engines/.*.py.* - warning: "language_aliases" is not defined' |
|
||||||
| grep -v '/engines/.*.py.* - warning: "categories" is not defined'
|
grep -v '/engines/.*.py.* - warning: "categories" is not defined'
|
||||||
# ignore exit value from pyright
|
# ignore exit value from pyright
|
||||||
# dump_return ${PIPESTATUS[0]}
|
# dump_return ${PIPESTATUS[0]}
|
||||||
return 0
|
return 0
|
||||||
|
@ -88,13 +89,13 @@ test.types.ci() {
|
||||||
build_msg TEST "[pyright] suppress warnings related to intentional monkey patching"
|
build_msg TEST "[pyright] suppress warnings related to intentional monkey patching"
|
||||||
# We run Pyright in the virtual environment because pyright executes
|
# We run Pyright in the virtual environment because pyright executes
|
||||||
# "python" to determine the Python version.
|
# "python" to determine the Python version.
|
||||||
pyenv.cmd npx --no-install pyright -p pyrightconfig-ci.json \
|
pyenv.cmd npx --no-install pyright -p pyrightconfig-ci.json |
|
||||||
| grep -E '\.py:[0-9]+:[0-9]+'\
|
grep -E '\.py:[0-9]+:[0-9]+' |
|
||||||
| grep -v '/engines/.*.py.* - warning: "logger" is not defined'\
|
grep -v '/engines/.*.py.* - warning: "logger" is not defined' |
|
||||||
| grep -v '/plugins/.*.py.* - error: "logger" is not defined'\
|
grep -v '/plugins/.*.py.* - error: "logger" is not defined' |
|
||||||
| grep -v '/engines/.*.py.* - warning: "supported_languages" is not defined' \
|
grep -v '/engines/.*.py.* - warning: "supported_languages" is not defined' |
|
||||||
| grep -v '/engines/.*.py.* - warning: "language_aliases" is not defined' \
|
grep -v '/engines/.*.py.* - warning: "language_aliases" is not defined' |
|
||||||
| grep -v '/engines/.*.py.* - warning: "categories" is not defined'
|
grep -v '/engines/.*.py.* - warning: "categories" is not defined'
|
||||||
# ignore exit value from pyright
|
# ignore exit value from pyright
|
||||||
# dump_return ${PIPESTATUS[0]}
|
# dump_return ${PIPESTATUS[0]}
|
||||||
return 0
|
return 0
|
||||||
|
@ -121,7 +122,8 @@ test.unit() {
|
||||||
|
|
||||||
test.coverage() {
|
test.coverage() {
|
||||||
build_msg TEST 'unit test coverage'
|
build_msg TEST 'unit test coverage'
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
pyenv.activate
|
pyenv.activate
|
||||||
# shellcheck disable=SC2086
|
# shellcheck disable=SC2086
|
||||||
python -m nose2 ${TEST_NOSE2_VERBOSE} -C --log-capture --with-coverage --coverage searx -s tests/unit
|
python -m nose2 ${TEST_NOSE2_VERBOSE} -C --log-capture --with-coverage --coverage searx -s tests/unit
|
||||||
|
@ -142,7 +144,7 @@ test.rst() {
|
||||||
build_msg TEST "[reST markup] ${RST_FILES[*]}"
|
build_msg TEST "[reST markup] ${RST_FILES[*]}"
|
||||||
|
|
||||||
for rst in "${RST_FILES[@]}"; do
|
for rst in "${RST_FILES[@]}"; do
|
||||||
pyenv.cmd rst2html --halt error "$rst" > /dev/null || die 42 "fix issue in $rst"
|
pyenv.cmd rst2html --halt error "$rst" >/dev/null || die 42 "fix issue in $rst"
|
||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -160,7 +162,7 @@ test.pybabel() {
|
||||||
}
|
}
|
||||||
|
|
||||||
test.clean() {
|
test.clean() {
|
||||||
build_msg CLEAN "test stuff"
|
build_msg CLEAN "test stuff"
|
||||||
rm -rf geckodriver.log .coverage coverage/
|
rm -rf geckodriver.log .coverage coverage/
|
||||||
dump_return $?
|
dump_return $?
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
|
||||||
themes.help(){
|
themes.help() {
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
themes.:
|
themes.:
|
||||||
all : test & build all themes
|
all : test & build all themes
|
||||||
|
@ -13,14 +13,16 @@ EOF
|
||||||
}
|
}
|
||||||
|
|
||||||
themes.all() {
|
themes.all() {
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
vite.simple.build
|
vite.simple.build
|
||||||
)
|
)
|
||||||
dump_return $?
|
dump_return $?
|
||||||
}
|
}
|
||||||
|
|
||||||
themes.simple() {
|
themes.simple() {
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
build_msg SIMPLE "theme: run build (simple)"
|
build_msg SIMPLE "theme: run build (simple)"
|
||||||
vite.simple.build
|
vite.simple.build
|
||||||
)
|
)
|
||||||
|
@ -28,7 +30,8 @@ themes.simple() {
|
||||||
}
|
}
|
||||||
|
|
||||||
themes.fix() {
|
themes.fix() {
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
build_msg SIMPLE "theme: fix (all themes)"
|
build_msg SIMPLE "theme: fix (all themes)"
|
||||||
vite.simple.fix
|
vite.simple.fix
|
||||||
)
|
)
|
||||||
|
@ -36,7 +39,8 @@ themes.fix() {
|
||||||
}
|
}
|
||||||
|
|
||||||
themes.lint() {
|
themes.lint() {
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
build_msg SIMPLE "theme: lint (all themes)"
|
build_msg SIMPLE "theme: lint (all themes)"
|
||||||
vite.simple.lint
|
vite.simple.lint
|
||||||
)
|
)
|
||||||
|
@ -44,7 +48,8 @@ themes.lint() {
|
||||||
}
|
}
|
||||||
|
|
||||||
themes.test() {
|
themes.test() {
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
# we run a build to test (in CI)
|
# we run a build to test (in CI)
|
||||||
build_msg SIMPLE "theme: run build (to test)"
|
build_msg SIMPLE "theme: run build (to test)"
|
||||||
vite.simple.build
|
vite.simple.build
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
declare _Blue
|
declare _Blue
|
||||||
declare _creset
|
declare _creset
|
||||||
|
|
||||||
vite.help(){
|
vite.help() {
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
vite.: .. to be done ..
|
vite.: .. to be done ..
|
||||||
simple.:
|
simple.:
|
||||||
|
@ -30,7 +30,8 @@ VITE_SIMPLE_THEME="${REPO_ROOT}/client/simple"
|
||||||
# }
|
# }
|
||||||
|
|
||||||
vite.simple.build() {
|
vite.simple.build() {
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
templates.simple.pygments
|
templates.simple.pygments
|
||||||
|
|
||||||
node.env
|
node.env
|
||||||
|
@ -39,19 +40,21 @@ vite.simple.build() {
|
||||||
pushd "${VITE_SIMPLE_THEME}"
|
pushd "${VITE_SIMPLE_THEME}"
|
||||||
npm install
|
npm install
|
||||||
npm run build
|
npm run build
|
||||||
popd &> /dev/null
|
popd &>/dev/null
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
vite.simple.fix() {
|
vite.simple.fix() {
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
node.env
|
node.env
|
||||||
npm --prefix client/simple run fix
|
npm --prefix client/simple run fix
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
vite.simple.lint() {
|
vite.simple.lint() {
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
node.env
|
node.env
|
||||||
npm --prefix client/simple run lint
|
npm --prefix client/simple run lint
|
||||||
)
|
)
|
||||||
|
@ -59,8 +62,8 @@ vite.simple.lint() {
|
||||||
|
|
||||||
templates.simple.pygments() {
|
templates.simple.pygments() {
|
||||||
build_msg PYGMENTS "searxng_extra/update/update_pygments.py"
|
build_msg PYGMENTS "searxng_extra/update/update_pygments.py"
|
||||||
pyenv.cmd python searxng_extra/update/update_pygments.py \
|
pyenv.cmd python searxng_extra/update/update_pygments.py |
|
||||||
| prefix_stdout "${_Blue}PYGMENTS ${_creset} "
|
prefix_stdout "${_Blue}PYGMENTS ${_creset} "
|
||||||
if [ "${PIPESTATUS[0]}" -ne "0" ]; then
|
if [ "${PIPESTATUS[0]}" -ne "0" ]; then
|
||||||
build_msg PYGMENTS "building LESS files for pygments failed"
|
build_msg PYGMENTS "building LESS files for pygments failed"
|
||||||
return 1
|
return 1
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
|
||||||
weblate.help(){
|
weblate.help() {
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
weblate.:
|
weblate.:
|
||||||
push.translations: push translation changes from SearXNG to Weblate's counterpart
|
push.translations: push translation changes from SearXNG to Weblate's counterpart
|
||||||
|
@ -19,8 +19,9 @@ weblate.translations.worktree() {
|
||||||
#
|
#
|
||||||
# remote weblate https://translate.codeberg.org/git/searxng/searxng/
|
# remote weblate https://translate.codeberg.org/git/searxng/searxng/
|
||||||
|
|
||||||
( set -e
|
(
|
||||||
if ! git remote get-url weblate 2> /dev/null; then
|
set -e
|
||||||
|
if ! git remote get-url weblate 2>/dev/null; then
|
||||||
git remote add weblate https://translate.codeberg.org/git/searxng/searxng/
|
git remote add weblate https://translate.codeberg.org/git/searxng/searxng/
|
||||||
fi
|
fi
|
||||||
if [ -d "${TRANSLATIONS_WORKTREE}" ]; then
|
if [ -d "${TRANSLATIONS_WORKTREE}" ]; then
|
||||||
|
@ -49,7 +50,8 @@ weblate.to.translations() {
|
||||||
# 4. In translations worktree, merge changes of branch 'translations' from
|
# 4. In translations worktree, merge changes of branch 'translations' from
|
||||||
# remote 'weblate' and push it on branch 'translations' of 'origin'
|
# remote 'weblate' and push it on branch 'translations' of 'origin'
|
||||||
|
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
pyenv.activate
|
pyenv.activate
|
||||||
if [ "$(wlc lock-status)" != "locked: True" ]; then
|
if [ "$(wlc lock-status)" != "locked: True" ]; then
|
||||||
die 1 "weblate must be locked, currently: $(wlc lock-status)"
|
die 1 "weblate must be locked, currently: $(wlc lock-status)"
|
||||||
|
@ -77,14 +79,18 @@ weblate.translations.commit() {
|
||||||
# create a commit in the local branch (master)
|
# create a commit in the local branch (master)
|
||||||
|
|
||||||
local existing_commit_hash commit_body commit_message exitcode
|
local existing_commit_hash commit_body commit_message exitcode
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
pyenv.activate
|
pyenv.activate
|
||||||
# lock change on weblate
|
# lock change on weblate
|
||||||
wlc lock
|
wlc lock
|
||||||
|
|
||||||
# get translations branch in git worktree (TRANSLATIONS_WORKTREE)
|
# get translations branch in git worktree (TRANSLATIONS_WORKTREE)
|
||||||
weblate.translations.worktree
|
weblate.translations.worktree
|
||||||
existing_commit_hash=$(cd "${TRANSLATIONS_WORKTREE}"; git log -n1 --pretty=format:'%h')
|
existing_commit_hash=$(
|
||||||
|
cd "${TRANSLATIONS_WORKTREE}"
|
||||||
|
git log -n1 --pretty=format:'%h'
|
||||||
|
)
|
||||||
|
|
||||||
# pull weblate commits
|
# pull weblate commits
|
||||||
weblate.to.translations
|
weblate.to.translations
|
||||||
|
@ -95,20 +101,23 @@ weblate.translations.commit() {
|
||||||
# compile translations
|
# compile translations
|
||||||
build_msg BABEL 'compile translation catalogs into binary MO files'
|
build_msg BABEL 'compile translation catalogs into binary MO files'
|
||||||
pybabel compile --statistics \
|
pybabel compile --statistics \
|
||||||
-d "searx/translations"
|
-d "searx/translations"
|
||||||
|
|
||||||
# update searx/data/translation_labels.json
|
# update searx/data/translation_labels.json
|
||||||
data.locales
|
data.locales
|
||||||
|
|
||||||
# git add/commit (no push)
|
# git add/commit (no push)
|
||||||
commit_body=$(cd "${TRANSLATIONS_WORKTREE}"; git log --pretty=format:'%h - %as - %aN <%ae>' "${existing_commit_hash}..HEAD")
|
commit_body=$(
|
||||||
|
cd "${TRANSLATIONS_WORKTREE}"
|
||||||
|
git log --pretty=format:'%h - %as - %aN <%ae>' "${existing_commit_hash}..HEAD"
|
||||||
|
)
|
||||||
commit_message=$(echo -e "[l10n] update translations from Weblate\n\n${commit_body}")
|
commit_message=$(echo -e "[l10n] update translations from Weblate\n\n${commit_body}")
|
||||||
git add searx/translations
|
git add searx/translations
|
||||||
git add searx/data/locales.json
|
git add searx/data/locales.json
|
||||||
git commit -m "${commit_message}"
|
git commit -m "${commit_message}"
|
||||||
)
|
)
|
||||||
exitcode=$?
|
exitcode=$?
|
||||||
( # make sure to always unlock weblate
|
( # make sure to always unlock weblate
|
||||||
set -e
|
set -e
|
||||||
pyenv.cmd wlc unlock
|
pyenv.cmd wlc unlock
|
||||||
)
|
)
|
||||||
|
@ -133,9 +142,10 @@ weblate.push.translations() {
|
||||||
# 5. Notify Weblate to pull updated 'master' & 'translations' branch.
|
# 5. Notify Weblate to pull updated 'master' & 'translations' branch.
|
||||||
|
|
||||||
local messages_pot diff_messages_pot last_commit_hash last_commit_detail \
|
local messages_pot diff_messages_pot last_commit_hash last_commit_detail \
|
||||||
exitcode
|
exitcode
|
||||||
messages_pot="${TRANSLATIONS_WORKTREE}/searx/translations/messages.pot"
|
messages_pot="${TRANSLATIONS_WORKTREE}/searx/translations/messages.pot"
|
||||||
( set -e
|
(
|
||||||
|
set -e
|
||||||
pyenv.activate
|
pyenv.activate
|
||||||
# get translations branch in git worktree (TRANSLATIONS_WORKTREE)
|
# get translations branch in git worktree (TRANSLATIONS_WORKTREE)
|
||||||
weblate.translations.worktree
|
weblate.translations.worktree
|
||||||
|
@ -143,12 +153,14 @@ weblate.push.translations() {
|
||||||
# update messages.pot in the master branch
|
# update messages.pot in the master branch
|
||||||
build_msg BABEL 'extract messages from source files and generate POT file'
|
build_msg BABEL 'extract messages from source files and generate POT file'
|
||||||
pybabel extract -F babel.cfg \
|
pybabel extract -F babel.cfg \
|
||||||
-o "${messages_pot}" \
|
-o "${messages_pot}" \
|
||||||
"searx/"
|
"searx/"
|
||||||
|
|
||||||
# stop if there is no meaningful change in the master branch
|
# stop if there is no meaningful change in the master branch
|
||||||
diff_messages_pot=$(cd "${TRANSLATIONS_WORKTREE}";\
|
diff_messages_pot=$(
|
||||||
git diff -- "searx/translations/messages.pot")
|
cd "${TRANSLATIONS_WORKTREE}"
|
||||||
|
git diff -- "searx/translations/messages.pot"
|
||||||
|
)
|
||||||
if ! echo "$diff_messages_pot" | grep -qE "[\+\-](msgid|msgstr)"; then
|
if ! echo "$diff_messages_pot" | grep -qE "[\+\-](msgid|msgstr)"; then
|
||||||
build_msg BABEL 'no changes detected, exiting'
|
build_msg BABEL 'no changes detected, exiting'
|
||||||
return 42
|
return 42
|
||||||
|
@ -160,7 +172,7 @@ weblate.push.translations() {
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
if [ "$exitcode" -gt 0 ]; then
|
if [ "$exitcode" -gt 0 ]; then
|
||||||
return $exitcode
|
return $exitcode
|
||||||
fi
|
fi
|
||||||
(
|
(
|
||||||
set -e
|
set -e
|
||||||
|
@ -192,7 +204,7 @@ weblate.push.translations() {
|
||||||
-d "${TRANSLATIONS_WORKTREE}/searx/translations"
|
-d "${TRANSLATIONS_WORKTREE}/searx/translations"
|
||||||
|
|
||||||
# git add/commit/push
|
# git add/commit/push
|
||||||
last_commit_hash=$(git log -n1 --pretty=format:'%h')
|
last_commit_hash=$(git log -n1 --pretty=format:'%h')
|
||||||
last_commit_detail=$(git log -n1 --pretty=format:'%h - %as - %aN <%ae>' "${last_commit_hash}")
|
last_commit_detail=$(git log -n1 --pretty=format:'%h - %as - %aN <%ae>' "${last_commit_hash}")
|
||||||
|
|
||||||
pushd "${TRANSLATIONS_WORKTREE}"
|
pushd "${TRANSLATIONS_WORKTREE}"
|
||||||
|
@ -207,7 +219,7 @@ weblate.push.translations() {
|
||||||
wlc pull
|
wlc pull
|
||||||
)
|
)
|
||||||
exitcode=$?
|
exitcode=$?
|
||||||
( # make sure to always unlock weblate
|
( # make sure to always unlock weblate
|
||||||
set -e
|
set -e
|
||||||
pyenv.activate
|
pyenv.activate
|
||||||
wlc unlock
|
wlc unlock
|
||||||
|
|
|
@ -5,10 +5,10 @@ valkey.distro.setup() {
|
||||||
# shellcheck disable=SC2034
|
# shellcheck disable=SC2034
|
||||||
|
|
||||||
case $DIST_ID in
|
case $DIST_ID in
|
||||||
ubuntu|debian)
|
ubuntu | debian)
|
||||||
VALKEY_PACKAGES="valkey-server"
|
VALKEY_PACKAGES="valkey-server"
|
||||||
;;
|
;;
|
||||||
arch|fedora|centos)
|
arch | fedora | centos)
|
||||||
VALKEY_PACKAGES="valkey"
|
VALKEY_PACKAGES="valkey"
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
|
@ -36,13 +36,13 @@ valkey.backports() {
|
||||||
esac
|
esac
|
||||||
}
|
}
|
||||||
|
|
||||||
valkey.install(){
|
valkey.install() {
|
||||||
info_msg "installing valkey ..."
|
info_msg "installing valkey ..."
|
||||||
valkey.distro.setup
|
valkey.distro.setup
|
||||||
|
|
||||||
case $DIST_ID in
|
case $DIST_ID in
|
||||||
debian|ubuntu)
|
debian | ubuntu)
|
||||||
apt-cache show "${VALKEY_PACKAGES}" &> /dev/null || valkey.backports
|
apt-cache show "${VALKEY_PACKAGES}" &>/dev/null || valkey.backports
|
||||||
pkg_install "${VALKEY_PACKAGES}"
|
pkg_install "${VALKEY_PACKAGES}"
|
||||||
|
|
||||||
# do some fix ...
|
# do some fix ...
|
||||||
|
@ -54,7 +54,7 @@ valkey.install(){
|
||||||
|
|
||||||
systemd_activate_service valkey-server
|
systemd_activate_service valkey-server
|
||||||
;;
|
;;
|
||||||
arch|fedora|centos)
|
arch | fedora | centos)
|
||||||
pkg_install "${VALKEY_PACKAGES}"
|
pkg_install "${VALKEY_PACKAGES}"
|
||||||
systemd_activate_service valkey
|
systemd_activate_service valkey
|
||||||
;;
|
;;
|
||||||
|
|
152
utils/lxc.sh
152
utils/lxc.sh
|
@ -60,19 +60,17 @@ REMOTE_IMAGES=()
|
||||||
CONTAINERS=()
|
CONTAINERS=()
|
||||||
LOCAL_IMAGES=()
|
LOCAL_IMAGES=()
|
||||||
|
|
||||||
for ((i=0; i<${#LXC_SUITE[@]}; i+=2)); do
|
for ((i = 0; i < ${#LXC_SUITE[@]}; i += 2)); do
|
||||||
REMOTE_IMAGES=("${REMOTE_IMAGES[@]}" "${LXC_SUITE[i]}")
|
REMOTE_IMAGES=("${REMOTE_IMAGES[@]}" "${LXC_SUITE[i]}")
|
||||||
CONTAINERS=("${CONTAINERS[@]}" "${LXC_HOST_PREFIX}-${LXC_SUITE[i+1]}")
|
CONTAINERS=("${CONTAINERS[@]}" "${LXC_HOST_PREFIX}-${LXC_SUITE[i + 1]}")
|
||||||
LOCAL_IMAGES=("${LOCAL_IMAGES[@]}" "${LXC_SUITE[i+1]}")
|
LOCAL_IMAGES=("${LOCAL_IMAGES[@]}" "${LXC_SUITE[i + 1]}")
|
||||||
done
|
done
|
||||||
|
|
||||||
HOST_USER="${SUDO_USER:-$USER}"
|
HOST_USER="${SUDO_USER:-$USER}"
|
||||||
HOST_USER_ID=$(id -u "${HOST_USER}")
|
HOST_USER_ID=$(id -u "${HOST_USER}")
|
||||||
HOST_GROUP_ID=$(id -g "${HOST_USER}")
|
HOST_GROUP_ID=$(id -g "${HOST_USER}")
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------
|
|
||||||
usage() {
|
usage() {
|
||||||
# ----------------------------------------------------------------------------
|
|
||||||
_cmd="$(basename "$0")"
|
_cmd="$(basename "$0")"
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
usage::
|
usage::
|
||||||
|
@ -110,12 +108,12 @@ install
|
||||||
|
|
||||||
EOF
|
EOF
|
||||||
usage_containers
|
usage_containers
|
||||||
[ -n "${1+x}" ] && err_msg "$1"
|
[ -n "${1+x}" ] && err_msg "$1"
|
||||||
}
|
}
|
||||||
|
|
||||||
usage_containers() {
|
usage_containers() {
|
||||||
lxc_suite_install_info
|
lxc_suite_install_info
|
||||||
[ -n "${1+x}" ] && err_msg "$1"
|
[ -n "${1+x}" ] && err_msg "$1"
|
||||||
}
|
}
|
||||||
|
|
||||||
lxd_info() {
|
lxd_info() {
|
||||||
|
@ -138,7 +136,7 @@ main() {
|
||||||
lxc_distro_setup
|
lxc_distro_setup
|
||||||
|
|
||||||
# don't check prerequisite when in recursion
|
# don't check prerequisite when in recursion
|
||||||
if [[ ! $1 == __* ]] && [[ ! $1 == --help ]]; then
|
if [[ $1 != __* ]] && [[ $1 != --help ]]; then
|
||||||
if ! in_container; then
|
if ! in_container; then
|
||||||
! required_commands lxc && lxd_info && exit 42
|
! required_commands lxc && lxd_info && exit 42
|
||||||
fi
|
fi
|
||||||
|
@ -146,27 +144,40 @@ main() {
|
||||||
fi
|
fi
|
||||||
|
|
||||||
case $1 in
|
case $1 in
|
||||||
--getenv) var="$2"; echo "${!var}"; exit 0;;
|
--getenv)
|
||||||
-h|--help) usage; exit 0;;
|
var="$2"
|
||||||
|
echo "${!var}"
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
-h | --help)
|
||||||
|
usage
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
|
||||||
build)
|
build)
|
||||||
sudo_or_exit
|
sudo_or_exit
|
||||||
case $2 in
|
case $2 in
|
||||||
${LXC_HOST_PREFIX}-*) build_container "$2" ;;
|
${LXC_HOST_PREFIX}-*) build_container "$2" ;;
|
||||||
''|--|containers) build_all_containers ;;
|
'' | -- | containers) build_all_containers ;;
|
||||||
*) usage "$_usage"; exit 42;;
|
*)
|
||||||
|
usage "$_usage"
|
||||||
|
exit 42
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
copy)
|
copy)
|
||||||
case $2 in
|
case $2 in
|
||||||
''|images) lxc_copy_images_locally;;
|
'' | images) lxc_copy_images_locally ;;
|
||||||
*) usage "$_usage"; exit 42;;
|
*)
|
||||||
|
usage "$_usage"
|
||||||
|
exit 42
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
remove)
|
remove)
|
||||||
sudo_or_exit
|
sudo_or_exit
|
||||||
case $2 in
|
case $2 in
|
||||||
''|--|containers) remove_containers ;;
|
'' | -- | containers) remove_containers ;;
|
||||||
images) lxc_delete_images_locally ;;
|
images) lxc_delete_images_locally ;;
|
||||||
${LXC_HOST_PREFIX}-*)
|
${LXC_HOST_PREFIX}-*)
|
||||||
! lxc_exists "$2" && warn_msg "container not yet exists: $2" && exit 0
|
! lxc_exists "$2" && warn_msg "container not yet exists: $2" && exit 0
|
||||||
|
@ -174,19 +185,25 @@ main() {
|
||||||
lxc_delete_container "$2"
|
lxc_delete_container "$2"
|
||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
*) usage "unknown or missing container <name> $2"; exit 42;;
|
*)
|
||||||
|
usage "unknown or missing container <name> $2"
|
||||||
|
exit 42
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
start|stop)
|
start | stop)
|
||||||
sudo_or_exit
|
sudo_or_exit
|
||||||
case $2 in
|
case $2 in
|
||||||
''|--|containers) lxc_cmd "$1" ;;
|
'' | -- | containers) lxc_cmd "$1" ;;
|
||||||
${LXC_HOST_PREFIX}-*)
|
${LXC_HOST_PREFIX}-*)
|
||||||
! lxc_exists "$2" && usage_containers "unknown container: $2" && exit 42
|
! lxc_exists "$2" && usage_containers "unknown container: $2" && exit 42
|
||||||
info_msg "lxc $1 $2"
|
info_msg "lxc $1 $2"
|
||||||
lxc "$1" "$2" | prefix_stdout "[${_BBlue}${i}${_creset}] "
|
lxc "$1" "$2" | prefix_stdout "[${_BBlue}${i}${_creset}] "
|
||||||
;;
|
;;
|
||||||
*) usage "unknown or missing container <name> $2"; exit 42;;
|
*)
|
||||||
|
usage "unknown or missing container <name> $2"
|
||||||
|
exit 42
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
show)
|
show)
|
||||||
|
@ -195,10 +212,10 @@ main() {
|
||||||
suite)
|
suite)
|
||||||
case $3 in
|
case $3 in
|
||||||
${LXC_HOST_PREFIX}-*)
|
${LXC_HOST_PREFIX}-*)
|
||||||
lxc exec -t "$3" -- "${LXC_REPO_ROOT}/utils/lxc.sh" __show suite \
|
lxc exec -t "$3" -- "${LXC_REPO_ROOT}/utils/lxc.sh" __show suite |
|
||||||
| prefix_stdout "[${_BBlue}$3${_creset}] "
|
prefix_stdout "[${_BBlue}$3${_creset}] "
|
||||||
;;
|
;;
|
||||||
*) show_suite;;
|
*) show_suite ;;
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
images) show_images ;;
|
images) show_images ;;
|
||||||
|
@ -207,7 +224,7 @@ main() {
|
||||||
${LXC_HOST_PREFIX}-*)
|
${LXC_HOST_PREFIX}-*)
|
||||||
! lxc_exists "$3" && usage_containers "unknown container: $3" && exit 42
|
! lxc_exists "$3" && usage_containers "unknown container: $3" && exit 42
|
||||||
lxc config show "$3" | prefix_stdout "[${_BBlue}${3}${_creset}] "
|
lxc config show "$3" | prefix_stdout "[${_BBlue}${3}${_creset}] "
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
rst_title "container configurations"
|
rst_title "container configurations"
|
||||||
echo
|
echo
|
||||||
|
@ -230,7 +247,10 @@ main() {
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
*) usage "$_usage"; exit 42;;
|
*)
|
||||||
|
usage "$_usage"
|
||||||
|
exit 42
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
__show)
|
__show)
|
||||||
|
@ -243,30 +263,36 @@ main() {
|
||||||
sudo_or_exit
|
sudo_or_exit
|
||||||
shift
|
shift
|
||||||
case $1 in
|
case $1 in
|
||||||
--) shift; lxc_exec "$@" ;;
|
--)
|
||||||
|
shift
|
||||||
|
lxc_exec "$@"
|
||||||
|
;;
|
||||||
${LXC_HOST_PREFIX}-*)
|
${LXC_HOST_PREFIX}-*)
|
||||||
! lxc_exists "$1" && usage_containers "unknown container: $1" && exit 42
|
! lxc_exists "$1" && usage_containers "unknown container: $1" && exit 42
|
||||||
local name=$1
|
local name=$1
|
||||||
shift
|
shift
|
||||||
lxc_exec_cmd "${name}" "$@"
|
lxc_exec_cmd "${name}" "$@"
|
||||||
;;
|
;;
|
||||||
*) usage_containers "unknown container: $1" && exit 42
|
*) usage_containers "unknown container: $1" && exit 42 ;;
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
install)
|
install)
|
||||||
sudo_or_exit
|
sudo_or_exit
|
||||||
case $2 in
|
case $2 in
|
||||||
suite|base)
|
suite | base)
|
||||||
case $3 in
|
case $3 in
|
||||||
${LXC_HOST_PREFIX}-*)
|
${LXC_HOST_PREFIX}-*)
|
||||||
! lxc_exists "$3" && usage_containers "unknown container: $3" && exit 42
|
! lxc_exists "$3" && usage_containers "unknown container: $3" && exit 42
|
||||||
lxc_exec_cmd "$3" "${LXC_REPO_ROOT}/utils/lxc.sh" __install "$2"
|
lxc_exec_cmd "$3" "${LXC_REPO_ROOT}/utils/lxc.sh" __install "$2"
|
||||||
;;
|
;;
|
||||||
''|--) lxc_exec "${LXC_REPO_ROOT}/utils/lxc.sh" __install "$2" ;;
|
'' | --) lxc_exec "${LXC_REPO_ROOT}/utils/lxc.sh" __install "$2" ;;
|
||||||
*) usage_containers "unknown container: $3" && exit 42
|
*) usage_containers "unknown container: $3" && exit 42 ;;
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
*) usage "$_usage"; exit 42 ;;
|
*)
|
||||||
|
usage "$_usage"
|
||||||
|
exit 42
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
__install)
|
__install)
|
||||||
|
@ -281,12 +307,17 @@ main() {
|
||||||
echo
|
echo
|
||||||
echo ".. generic utils/lxc.sh documentation"
|
echo ".. generic utils/lxc.sh documentation"
|
||||||
;;
|
;;
|
||||||
-*) usage "unknown option $1"; exit 42;;
|
-*)
|
||||||
*) usage "unknown or missing command $1"; exit 42;;
|
usage "unknown option $1"
|
||||||
|
exit 42
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
usage "unknown or missing command $1"
|
||||||
|
exit 42
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
build_all_containers() {
|
build_all_containers() {
|
||||||
rst_title "Build all LXC containers of suite"
|
rst_title "Build all LXC containers of suite"
|
||||||
echo
|
echo
|
||||||
|
@ -310,11 +341,11 @@ build_container() {
|
||||||
local image
|
local image
|
||||||
local boilerplate_script
|
local boilerplate_script
|
||||||
|
|
||||||
for ((i=0; i<${#LXC_SUITE[@]}; i+=2)); do
|
for ((i = 0; i < ${#LXC_SUITE[@]}; i += 2)); do
|
||||||
if [ "${LXC_HOST_PREFIX}-${LXC_SUITE[i+1]}" = "$1" ]; then
|
if [ "${LXC_HOST_PREFIX}-${LXC_SUITE[i + 1]}" = "$1" ]; then
|
||||||
remote_image="${LXC_SUITE[i]}"
|
remote_image="${LXC_SUITE[i]}"
|
||||||
container="${LXC_HOST_PREFIX}-${LXC_SUITE[i+1]}"
|
container="${LXC_HOST_PREFIX}-${LXC_SUITE[i + 1]}"
|
||||||
image="${LXC_SUITE[i+1]}"
|
image="${LXC_SUITE[i + 1]}"
|
||||||
boilerplate_script="${image}_boilerplate"
|
boilerplate_script="${image}_boilerplate"
|
||||||
boilerplate_script="${!boilerplate_script}"
|
boilerplate_script="${!boilerplate_script}"
|
||||||
break
|
break
|
||||||
|
@ -335,8 +366,8 @@ build_container() {
|
||||||
lxc_install_boilerplate "${container}" "$boilerplate_script"
|
lxc_install_boilerplate "${container}" "$boilerplate_script"
|
||||||
echo
|
echo
|
||||||
rst_title "install LXC base packages" section
|
rst_title "install LXC base packages" section
|
||||||
lxc_exec_cmd "${container}" "${LXC_REPO_ROOT}/utils/lxc.sh" __install base \
|
lxc_exec_cmd "${container}" "${LXC_REPO_ROOT}/utils/lxc.sh" __install base |
|
||||||
| prefix_stdout "[${_BBlue}${container}${_creset}] "
|
prefix_stdout "[${_BBlue}${container}${_creset}] "
|
||||||
echo
|
echo
|
||||||
lxc list "$container"
|
lxc list "$container"
|
||||||
}
|
}
|
||||||
|
@ -348,7 +379,7 @@ remove_containers() {
|
||||||
lxc list "$LXC_HOST_PREFIX-"
|
lxc list "$LXC_HOST_PREFIX-"
|
||||||
echo -en "\\n${_BRed}LXC containers to delete::${_creset}\\n\\n ${CONTAINERS[*]}\\n" | $FMT
|
echo -en "\\n${_BRed}LXC containers to delete::${_creset}\\n\\n ${CONTAINERS[*]}\\n" | $FMT
|
||||||
local default=Ny
|
local default=Ny
|
||||||
[[ $FORCE_TIMEOUT = 0 ]] && default=Yn
|
[[ $FORCE_TIMEOUT == 0 ]] && default=Yn
|
||||||
if ask_yn "Do you really want to delete these containers" $default; then
|
if ask_yn "Do you really want to delete these containers" $default; then
|
||||||
for i in "${CONTAINERS[@]}"; do
|
for i in "${CONTAINERS[@]}"; do
|
||||||
lxc_delete_container "$i"
|
lxc_delete_container "$i"
|
||||||
|
@ -363,8 +394,8 @@ remove_containers() {
|
||||||
|
|
||||||
lxc_copy_images_locally() {
|
lxc_copy_images_locally() {
|
||||||
rst_title "copy images" section
|
rst_title "copy images" section
|
||||||
for ((i=0; i<${#LXC_SUITE[@]}; i+=2)); do
|
for ((i = 0; i < ${#LXC_SUITE[@]}; i += 2)); do
|
||||||
lxc_image_copy "${LXC_SUITE[i]}" "${LXC_SUITE[i+1]}"
|
lxc_image_copy "${LXC_SUITE[i]}" "${LXC_SUITE[i + 1]}"
|
||||||
done
|
done
|
||||||
# lxc image list local: && wait_key
|
# lxc image list local: && wait_key
|
||||||
}
|
}
|
||||||
|
@ -391,7 +422,7 @@ lxc_delete_images_locally() {
|
||||||
lxc image list local:
|
lxc image list local:
|
||||||
}
|
}
|
||||||
|
|
||||||
show_images(){
|
show_images() {
|
||||||
rst_title "local images"
|
rst_title "local images"
|
||||||
echo
|
echo
|
||||||
lxc image list local:
|
lxc image list local:
|
||||||
|
@ -408,11 +439,10 @@ show_images(){
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# container
|
# container
|
||||||
# ---------
|
# ---------
|
||||||
|
|
||||||
show_suite(){
|
show_suite() {
|
||||||
rst_title "LXC suite ($LXC_HOST_PREFIX-*)"
|
rst_title "LXC suite ($LXC_HOST_PREFIX-*)"
|
||||||
echo
|
echo
|
||||||
lxc list "$LXC_HOST_PREFIX-"
|
lxc list "$LXC_HOST_PREFIX-"
|
||||||
|
@ -421,8 +451,8 @@ show_suite(){
|
||||||
if ! lxc_exists "$i"; then
|
if ! lxc_exists "$i"; then
|
||||||
warn_msg "container ${_BBlue}$i${_creset} does not yet exists"
|
warn_msg "container ${_BBlue}$i${_creset} does not yet exists"
|
||||||
else
|
else
|
||||||
lxc exec -t "${i}" -- "${LXC_REPO_ROOT}/utils/lxc.sh" __show suite \
|
lxc exec -t "${i}" -- "${LXC_REPO_ROOT}/utils/lxc.sh" __show suite |
|
||||||
| prefix_stdout "[${_BBlue}${i}${_creset}] "
|
prefix_stdout "[${_BBlue}${i}${_creset}] "
|
||||||
echo
|
echo
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
@ -469,8 +499,8 @@ lxc_init_all_containers() {
|
||||||
local image_name
|
local image_name
|
||||||
local container_name
|
local container_name
|
||||||
|
|
||||||
for ((i=0; i<${#LXC_SUITE[@]}; i+=2)); do
|
for ((i = 0; i < ${#LXC_SUITE[@]}; i += 2)); do
|
||||||
lxc_init_container "${LXC_SUITE[i+1]}" "${LXC_HOST_PREFIX}-${LXC_SUITE[i+1]}"
|
lxc_init_container "${LXC_SUITE[i + 1]}" "${LXC_HOST_PREFIX}-${LXC_SUITE[i + 1]}"
|
||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -487,8 +517,8 @@ lxc_config_container() {
|
||||||
|
|
||||||
info_msg "[${_BBlue}$1${_creset}] map uid/gid from host to container"
|
info_msg "[${_BBlue}$1${_creset}] map uid/gid from host to container"
|
||||||
# https://lxd.readthedocs.io/en/latest/userns-idmap/#custom-idmaps
|
# https://lxd.readthedocs.io/en/latest/userns-idmap/#custom-idmaps
|
||||||
echo -e -n "uid $HOST_USER_ID 0\\ngid $HOST_GROUP_ID 0"\
|
echo -e -n "uid $HOST_USER_ID 0\\ngid $HOST_GROUP_ID 0" |
|
||||||
| lxc config set "$1" raw.idmap -
|
lxc config set "$1" raw.idmap -
|
||||||
|
|
||||||
info_msg "[${_BBlue}$1${_creset}] share ${REPO_ROOT} (repo_share) from HOST into container"
|
info_msg "[${_BBlue}$1${_creset}] share ${REPO_ROOT} (repo_share) from HOST into container"
|
||||||
# https://lxd.readthedocs.io/en/latest/instances/#type-disk
|
# https://lxd.readthedocs.io/en/latest/instances/#type-disk
|
||||||
|
@ -504,15 +534,15 @@ lxc_boilerplate_all_containers() {
|
||||||
local boilerplate_script
|
local boilerplate_script
|
||||||
local image_name
|
local image_name
|
||||||
|
|
||||||
for ((i=0; i<${#LXC_SUITE[@]}; i+=2)); do
|
for ((i = 0; i < ${#LXC_SUITE[@]}; i += 2)); do
|
||||||
|
|
||||||
image_name="${LXC_SUITE[i+1]}"
|
image_name="${LXC_SUITE[i + 1]}"
|
||||||
boilerplate_script="${image_name}_boilerplate"
|
boilerplate_script="${image_name}_boilerplate"
|
||||||
boilerplate_script="${!boilerplate_script}"
|
boilerplate_script="${!boilerplate_script}"
|
||||||
|
|
||||||
lxc_install_boilerplate "${LXC_HOST_PREFIX}-${image_name}" "$boilerplate_script"
|
lxc_install_boilerplate "${LXC_HOST_PREFIX}-${image_name}" "$boilerplate_script"
|
||||||
|
|
||||||
if [[ -z "${boilerplate_script}" ]]; then
|
if [[ -z ${boilerplate_script} ]]; then
|
||||||
err_msg "[${_BBlue}${container_name}${_creset}] no boilerplate for image '${image_name}'"
|
err_msg "[${_BBlue}${container_name}${_creset}] no boilerplate for image '${image_name}'"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
@ -546,10 +576,10 @@ EOF
|
||||||
if lxc start -q "${container_name}" &>/dev/null; then
|
if lxc start -q "${container_name}" &>/dev/null; then
|
||||||
sleep 5 # guest needs some time to come up and get an IP
|
sleep 5 # guest needs some time to come up and get an IP
|
||||||
fi
|
fi
|
||||||
if [[ -n "${boilerplate_script}" ]]; then
|
if [[ -n ${boilerplate_script} ]]; then
|
||||||
echo "${boilerplate_script}" \
|
echo "${boilerplate_script}" |
|
||||||
| lxc exec "${container_name}" -- bash \
|
lxc exec "${container_name}" -- bash |
|
||||||
| prefix_stdout "[${_BBlue}${container_name}${_creset}] "
|
prefix_stdout "[${_BBlue}${container_name}${_creset}] "
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -568,6 +598,4 @@ check_connectivity() {
|
||||||
return $ret_val
|
return $ret_val
|
||||||
}
|
}
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------
|
|
||||||
main "$@"
|
main "$@"
|
||||||
# ----------------------------------------------------------------------------
|
|
||||||
|
|
211
utils/searxng.sh
211
utils/searxng.sh
|
@ -46,7 +46,7 @@ if in_container; then
|
||||||
SEARXNG_URL="http://$(primary_ip)/searxng"
|
SEARXNG_URL="http://$(primary_ip)/searxng"
|
||||||
fi
|
fi
|
||||||
SEARXNG_URL_PATH="$(echo "${SEARXNG_URL}" | sed -e 's,^.*://[^/]*\(/.*\),\1,g')"
|
SEARXNG_URL_PATH="$(echo "${SEARXNG_URL}" | sed -e 's,^.*://[^/]*\(/.*\),\1,g')"
|
||||||
[[ "${SEARXNG_URL_PATH}" == "${SEARXNG_URL}" ]] && SEARXNG_URL_PATH=/
|
[[ ${SEARXNG_URL_PATH} == "${SEARXNG_URL}" ]] && SEARXNG_URL_PATH=/
|
||||||
|
|
||||||
# Apache settings
|
# Apache settings
|
||||||
|
|
||||||
|
@ -98,7 +98,7 @@ case $DIST_ID-$DIST_VERS in
|
||||||
SEARXNG_BUILD_PACKAGES="${SEARXNG_BUILD_PACKAGES_debian}"
|
SEARXNG_BUILD_PACKAGES="${SEARXNG_BUILD_PACKAGES_debian}"
|
||||||
APACHE_PACKAGES="$APACHE_PACKAGES libapache2-mod-proxy-uwsgi"
|
APACHE_PACKAGES="$APACHE_PACKAGES libapache2-mod-proxy-uwsgi"
|
||||||
;;
|
;;
|
||||||
ubuntu-*|debian-*)
|
ubuntu-* | debian-*)
|
||||||
SEARXNG_PACKAGES="${SEARXNG_PACKAGES_debian} python-is-python3"
|
SEARXNG_PACKAGES="${SEARXNG_PACKAGES_debian} python-is-python3"
|
||||||
SEARXNG_BUILD_PACKAGES="${SEARXNG_BUILD_PACKAGES_debian}"
|
SEARXNG_BUILD_PACKAGES="${SEARXNG_BUILD_PACKAGES_debian}"
|
||||||
;;
|
;;
|
||||||
|
@ -114,9 +114,7 @@ esac
|
||||||
|
|
||||||
_service_prefix=" ${_Yellow}|${SERVICE_USER}|${_creset} "
|
_service_prefix=" ${_Yellow}|${SERVICE_USER}|${_creset} "
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------
|
|
||||||
usage() {
|
usage() {
|
||||||
# ----------------------------------------------------------------------------
|
|
||||||
|
|
||||||
# shellcheck disable=SC1117
|
# shellcheck disable=SC1117
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
|
@ -148,7 +146,7 @@ instance:
|
||||||
cmd : run command in SearXNG instance's environment (e.g. bash)
|
cmd : run command in SearXNG instance's environment (e.g. bash)
|
||||||
EOF
|
EOF
|
||||||
searxng.instance.env
|
searxng.instance.env
|
||||||
[[ -n ${1} ]] && err_msg "$1"
|
[[ -n ${1} ]] && err_msg "$1"
|
||||||
}
|
}
|
||||||
|
|
||||||
searxng.instance.env() {
|
searxng.instance.env() {
|
||||||
|
@ -170,52 +168,68 @@ EOF
|
||||||
|
|
||||||
main() {
|
main() {
|
||||||
case $1 in
|
case $1 in
|
||||||
install|remove|instance)
|
install | remove | instance)
|
||||||
nginx_distro_setup
|
nginx_distro_setup
|
||||||
apache_distro_setup
|
apache_distro_setup
|
||||||
uWSGI_distro_setup
|
uWSGI_distro_setup
|
||||||
required_commands \
|
required_commands \
|
||||||
sudo systemctl install git wget curl \
|
sudo systemctl install git wget curl ||
|
||||||
|| exit
|
exit
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
local _usage="unknown or missing $1 command $2"
|
local _usage="unknown or missing $1 command $2"
|
||||||
|
|
||||||
case $1 in
|
case $1 in
|
||||||
--getenv) var="$2"; echo "${!var}"; exit 0;;
|
--getenv)
|
||||||
--cmd) shift; "$@";;
|
var="$2"
|
||||||
-h|--help) usage; exit 0;;
|
echo "${!var}"
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
--cmd)
|
||||||
|
shift
|
||||||
|
"$@"
|
||||||
|
;;
|
||||||
|
-h | --help)
|
||||||
|
usage
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
install)
|
install)
|
||||||
sudo_or_exit
|
sudo_or_exit
|
||||||
case $2 in
|
case $2 in
|
||||||
all) searxng.install.all;;
|
all) searxng.install.all ;;
|
||||||
user) searxng.install.user;;
|
user) searxng.install.user ;;
|
||||||
pyenv) searxng.install.pyenv;;
|
pyenv) searxng.install.pyenv ;;
|
||||||
searxng-src) searxng.install.clone;;
|
searxng-src) searxng.install.clone ;;
|
||||||
settings) searxng.install.settings;;
|
settings) searxng.install.settings ;;
|
||||||
uwsgi) searxng.install.uwsgi;;
|
uwsgi) searxng.install.uwsgi ;;
|
||||||
packages) searxng.install.packages;;
|
packages) searxng.install.packages ;;
|
||||||
buildhost) searxng.install.buildhost;;
|
buildhost) searxng.install.buildhost ;;
|
||||||
nginx) searxng.nginx.install;;
|
nginx) searxng.nginx.install ;;
|
||||||
apache) searxng.apache.install;;
|
apache) searxng.apache.install ;;
|
||||||
valkey) searxng.install.valkey;;
|
valkey) searxng.install.valkey ;;
|
||||||
*) usage "$_usage"; exit 42;;
|
*)
|
||||||
|
usage "$_usage"
|
||||||
|
exit 42
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
remove)
|
remove)
|
||||||
sudo_or_exit
|
sudo_or_exit
|
||||||
case $2 in
|
case $2 in
|
||||||
all) searxng.remove.all;;
|
all) searxng.remove.all ;;
|
||||||
user) drop_service_account "${SERVICE_USER}";;
|
user) drop_service_account "${SERVICE_USER}" ;;
|
||||||
pyenv) searxng.remove.pyenv;;
|
pyenv) searxng.remove.pyenv ;;
|
||||||
settings) searxng.remove.settings;;
|
settings) searxng.remove.settings ;;
|
||||||
uwsgi) searxng.remove.uwsgi;;
|
uwsgi) searxng.remove.uwsgi ;;
|
||||||
apache) searxng.apache.remove;;
|
apache) searxng.apache.remove ;;
|
||||||
remove) searxng.nginx.remove;;
|
remove) searxng.nginx.remove ;;
|
||||||
valkey) searxng.remove.valkey;;
|
valkey) searxng.remove.valkey ;;
|
||||||
redis) searxng.remove.redis;;
|
redis) searxng.remove.redis ;;
|
||||||
*) usage "$_usage"; exit 42;;
|
*)
|
||||||
|
usage "$_usage"
|
||||||
|
exit 42
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
instance)
|
instance)
|
||||||
|
@ -234,19 +248,30 @@ main() {
|
||||||
;;
|
;;
|
||||||
cmd)
|
cmd)
|
||||||
sudo_or_exit
|
sudo_or_exit
|
||||||
shift; shift; searxng.instance.exec "$@"
|
shift
|
||||||
|
shift
|
||||||
|
searxng.instance.exec "$@"
|
||||||
;;
|
;;
|
||||||
get_setting)
|
get_setting)
|
||||||
shift; shift; searxng.instance.get_setting "$@"
|
shift
|
||||||
|
shift
|
||||||
|
searxng.instance.get_setting "$@"
|
||||||
;;
|
;;
|
||||||
call)
|
call)
|
||||||
# call a function in instance's environment
|
# call a function in instance's environment
|
||||||
shift; shift; searxng.instance.self.call "$@"
|
shift
|
||||||
|
shift
|
||||||
|
searxng.instance.self.call "$@"
|
||||||
;;
|
;;
|
||||||
_call)
|
_call)
|
||||||
shift; shift; "$@"
|
shift
|
||||||
|
shift
|
||||||
|
"$@"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
usage "$_usage"
|
||||||
|
exit 42
|
||||||
;;
|
;;
|
||||||
*) usage "$_usage"; exit 42;;
|
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
|
@ -314,7 +339,7 @@ In your instance, valkey DB connector is configured at:
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! [[ ${valkey_url} = valkey://localhost:6379/* ]]; then
|
if ! [[ ${valkey_url} == valkey://localhost:6379/* ]]; then
|
||||||
err_msg "SearXNG instance can't connect valkey DB / check valkey & your settings"
|
err_msg "SearXNG instance can't connect valkey DB / check valkey & your settings"
|
||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
|
@ -352,7 +377,7 @@ searxng.remove.all() {
|
||||||
fi
|
fi
|
||||||
|
|
||||||
valkey_url=$(searxng.instance.get_setting valkey.url)
|
valkey_url=$(searxng.instance.get_setting valkey.url)
|
||||||
if ! [[ ${valkey_url} = unix://${VALKEY_HOME}/run/valkey.sock* ]]; then
|
if ! [[ ${valkey_url} == unix://${VALKEY_HOME}/run/valkey.sock* ]]; then
|
||||||
searxng.remove.valkey
|
searxng.remove.valkey
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
@ -369,9 +394,9 @@ searxng.remove.all() {
|
||||||
searxng.install.user() {
|
searxng.install.user() {
|
||||||
rst_title "SearXNG -- install user" section
|
rst_title "SearXNG -- install user" section
|
||||||
echo
|
echo
|
||||||
if getent passwd "${SERVICE_USER}" > /dev/null; then
|
if getent passwd "${SERVICE_USER}" >/dev/null; then
|
||||||
echo "user already exists"
|
echo "user already exists"
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
tee_stderr 1 <<EOF | bash | prefix_stdout
|
tee_stderr 1 <<EOF | bash | prefix_stdout
|
||||||
|
@ -390,7 +415,7 @@ searxng.install.packages() {
|
||||||
|
|
||||||
searxng.install.buildhost() {
|
searxng.install.buildhost() {
|
||||||
TITLE="SearXNG -- install buildhost packages" pkg_install \
|
TITLE="SearXNG -- install buildhost packages" pkg_install \
|
||||||
"${SEARXNG_PACKAGES} ${SEARXNG_BUILD_PACKAGES}"
|
"${SEARXNG_PACKAGES} ${SEARXNG_BUILD_PACKAGES}"
|
||||||
}
|
}
|
||||||
|
|
||||||
searxng.install.clone() {
|
searxng.install.clone() {
|
||||||
|
@ -399,11 +424,11 @@ searxng.install.clone() {
|
||||||
die 42 "To clone SearXNG, first install user ${SERVICE_USER}."
|
die 42 "To clone SearXNG, first install user ${SERVICE_USER}."
|
||||||
fi
|
fi
|
||||||
echo
|
echo
|
||||||
if ! sudo -i -u "${SERVICE_USER}" ls -d "$REPO_ROOT" > /dev/null; then
|
if ! sudo -i -u "${SERVICE_USER}" ls -d "$REPO_ROOT" >/dev/null; then
|
||||||
die 42 "user '${SERVICE_USER}' missed read permission: $REPO_ROOT"
|
die 42 "user '${SERVICE_USER}' missed read permission: $REPO_ROOT"
|
||||||
fi
|
fi
|
||||||
# SERVICE_HOME="$(sudo -i -u "${SERVICE_USER}" echo \$HOME 2>/dev/null)"
|
# SERVICE_HOME="$(sudo -i -u "${SERVICE_USER}" echo \$HOME 2>/dev/null)"
|
||||||
if [[ ! "${SERVICE_HOME}" ]]; then
|
if [[ ! ${SERVICE_HOME} ]]; then
|
||||||
err_msg "to clone SearXNG sources, user ${SERVICE_USER} hast to be created first"
|
err_msg "to clone SearXNG sources, user ${SERVICE_USER} hast to be created first"
|
||||||
return 42
|
return 42
|
||||||
fi
|
fi
|
||||||
|
@ -412,7 +437,7 @@ searxng.install.clone() {
|
||||||
info_msg "create local branch ${GIT_BRANCH} from start point: origin/${GIT_BRANCH}"
|
info_msg "create local branch ${GIT_BRANCH} from start point: origin/${GIT_BRANCH}"
|
||||||
git branch "${GIT_BRANCH}" "origin/${GIT_BRANCH}"
|
git branch "${GIT_BRANCH}" "origin/${GIT_BRANCH}"
|
||||||
fi
|
fi
|
||||||
if [[ ! $(git rev-parse --abbrev-ref HEAD) == "${GIT_BRANCH}" ]]; then
|
if [[ $(git rev-parse --abbrev-ref HEAD) != "${GIT_BRANCH}" ]]; then
|
||||||
warn_msg "take into account, installing branch $GIT_BRANCH while current branch is $(git rev-parse --abbrev-ref HEAD)"
|
warn_msg "take into account, installing branch $GIT_BRANCH while current branch is $(git rev-parse --abbrev-ref HEAD)"
|
||||||
fi
|
fi
|
||||||
# export SERVICE_HOME
|
# export SERVICE_HOME
|
||||||
|
@ -421,10 +446,10 @@ searxng.install.clone() {
|
||||||
# https://github.com/searxng/searxng/issues/1251
|
# https://github.com/searxng/searxng/issues/1251
|
||||||
git config --system --add safe.directory "${REPO_ROOT}/.git"
|
git config --system --add safe.directory "${REPO_ROOT}/.git"
|
||||||
git_clone "$REPO_ROOT" "${SEARXNG_SRC}" \
|
git_clone "$REPO_ROOT" "${SEARXNG_SRC}" \
|
||||||
"$GIT_BRANCH" "${SERVICE_USER}"
|
"$GIT_BRANCH" "${SERVICE_USER}"
|
||||||
git config --system --add safe.directory "${SEARXNG_SRC}"
|
git config --system --add safe.directory "${SEARXNG_SRC}"
|
||||||
|
|
||||||
pushd "${SEARXNG_SRC}" > /dev/null
|
pushd "${SEARXNG_SRC}" >/dev/null
|
||||||
tee_stderr 0.1 <<EOF | sudo -H -u "${SERVICE_USER}" -i 2>&1 | prefix_stdout "$_service_prefix"
|
tee_stderr 0.1 <<EOF | sudo -H -u "${SERVICE_USER}" -i 2>&1 | prefix_stdout "$_service_prefix"
|
||||||
cd "${SEARXNG_SRC}"
|
cd "${SEARXNG_SRC}"
|
||||||
git remote set-url origin ${GIT_URL}
|
git remote set-url origin ${GIT_URL}
|
||||||
|
@ -432,7 +457,7 @@ git config user.email "${ADMIN_EMAIL}"
|
||||||
git config user.name "${ADMIN_NAME}"
|
git config user.name "${ADMIN_NAME}"
|
||||||
git config --list
|
git config --list
|
||||||
EOF
|
EOF
|
||||||
popd > /dev/null
|
popd >/dev/null
|
||||||
}
|
}
|
||||||
|
|
||||||
searxng.install.link_src() {
|
searxng.install.link_src() {
|
||||||
|
@ -482,7 +507,7 @@ searxng.remove.pyenv() {
|
||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
info_msg "remove pyenv activation from ~/.profile"
|
info_msg "remove pyenv activation from ~/.profile"
|
||||||
tee_stderr 0.1 <<EOF | sudo -H -u "${SERVICE_USER}" -i 2>&1 | prefix_stdout "$_service_prefix"
|
tee_stderr 0.1 <<EOF | sudo -H -u "${SERVICE_USER}" -i 2>&1 | prefix_stdout "$_service_prefix"
|
||||||
grep -v 'source ${SEARXNG_PYENV}/bin/activate' ~/.profile > ~/.profile.##
|
grep -v 'source ${SEARXNG_PYENV}/bin/activate' ~/.profile > ~/.profile.##
|
||||||
mv ~/.profile.## ~/.profile
|
mv ~/.profile.## ~/.profile
|
||||||
EOF
|
EOF
|
||||||
|
@ -499,9 +524,9 @@ searxng.install.settings() {
|
||||||
mkdir -p "$(dirname "${SEARXNG_SETTINGS_PATH}")"
|
mkdir -p "$(dirname "${SEARXNG_SETTINGS_PATH}")"
|
||||||
|
|
||||||
DEFAULT_SELECT=1 \
|
DEFAULT_SELECT=1 \
|
||||||
install_template --no-eval \
|
install_template --no-eval \
|
||||||
"${SEARXNG_SETTINGS_PATH}" \
|
"${SEARXNG_SETTINGS_PATH}" \
|
||||||
"${SERVICE_USER}" "${SERVICE_GROUP}"
|
"${SERVICE_USER}" "${SERVICE_GROUP}"
|
||||||
|
|
||||||
tee_stderr 0.1 <<EOF | sudo -H -i 2>&1 | prefix_stdout "root"
|
tee_stderr 0.1 <<EOF | sudo -H -i 2>&1 | prefix_stdout "root"
|
||||||
sed -i -e "s/ultrasecretkey/$(openssl rand -hex 16)/g" "${SEARXNG_SETTINGS_PATH}"
|
sed -i -e "s/ultrasecretkey/$(openssl rand -hex 16)/g" "${SEARXNG_SETTINGS_PATH}"
|
||||||
|
@ -535,9 +560,9 @@ pip install -U --use-pep517 --no-build-isolation -e .
|
||||||
EOF
|
EOF
|
||||||
rst_para "update instance's settings.yml from ${SEARXNG_SETTINGS_PATH}"
|
rst_para "update instance's settings.yml from ${SEARXNG_SETTINGS_PATH}"
|
||||||
DEFAULT_SELECT=2 \
|
DEFAULT_SELECT=2 \
|
||||||
install_template --no-eval \
|
install_template --no-eval \
|
||||||
"${SEARXNG_SETTINGS_PATH}" \
|
"${SEARXNG_SETTINGS_PATH}" \
|
||||||
"${SERVICE_USER}" "${SERVICE_GROUP}"
|
"${SERVICE_USER}" "${SERVICE_GROUP}"
|
||||||
|
|
||||||
sudo -H -i <<EOF
|
sudo -H -i <<EOF
|
||||||
sed -i -e "s/ultrasecretkey/$(openssl rand -hex 16)/g" "${SEARXNG_SETTINGS_PATH}"
|
sed -i -e "s/ultrasecretkey/$(openssl rand -hex 16)/g" "${SEARXNG_SETTINGS_PATH}"
|
||||||
|
@ -574,10 +599,10 @@ searxng.install.uwsgi.socket() {
|
||||||
# Emperor will run the vassal using the UID/GID of the vassal
|
# Emperor will run the vassal using the UID/GID of the vassal
|
||||||
# configuration file [1] (user and group of the app .ini file).
|
# configuration file [1] (user and group of the app .ini file).
|
||||||
# [1] https://uwsgi-docs.readthedocs.io/en/latest/Emperor.html#tyrant-mode-secure-multi-user-hosting
|
# [1] https://uwsgi-docs.readthedocs.io/en/latest/Emperor.html#tyrant-mode-secure-multi-user-hosting
|
||||||
uWSGI_install_app --variant=socket "${SEARXNG_UWSGI_APP}" "${SERVICE_USER}" "${SERVICE_GROUP}"
|
uWSGI_install_app --variant=socket "${SEARXNG_UWSGI_APP}" "${SERVICE_USER}" "${SERVICE_GROUP}"
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
uWSGI_install_app --variant=socket "${SEARXNG_UWSGI_APP}"
|
uWSGI_install_app --variant=socket "${SEARXNG_UWSGI_APP}"
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
sleep 5
|
sleep 5
|
||||||
|
@ -588,9 +613,9 @@ searxng.install.uwsgi.socket() {
|
||||||
|
|
||||||
searxng.uwsgi.available() {
|
searxng.uwsgi.available() {
|
||||||
if [[ ${SEARXNG_UWSGI_USE_SOCKET} == true ]]; then
|
if [[ ${SEARXNG_UWSGI_USE_SOCKET} == true ]]; then
|
||||||
[[ -S "${SEARXNG_UWSGI_SOCKET}" ]]
|
[[ -S ${SEARXNG_UWSGI_SOCKET} ]]
|
||||||
exit_val=$?
|
exit_val=$?
|
||||||
if [[ $exit_val = 0 ]]; then
|
if [[ $exit_val == 0 ]]; then
|
||||||
info_msg "uWSGI socket is located at: ${SEARXNG_UWSGI_SOCKET}"
|
info_msg "uWSGI socket is located at: ${SEARXNG_UWSGI_SOCKET}"
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
|
@ -617,11 +642,10 @@ searxng.install.valkey() {
|
||||||
valkey.install
|
valkey.install
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
searxng.instance.localtest() {
|
searxng.instance.localtest() {
|
||||||
rst_title "Test SearXNG instance locally" section
|
rst_title "Test SearXNG instance locally" section
|
||||||
rst_para "Activate debug mode, start a minimal SearXNG "\
|
rst_para "Activate debug mode, start a minimal SearXNG " \
|
||||||
"service and debug a HTTP request/response cycle."
|
"service and debug a HTTP request/response cycle."
|
||||||
|
|
||||||
if service_is_available "http://${SEARXNG_INTERNAL_HTTP}" &>/dev/null; then
|
if service_is_available "http://${SEARXNG_INTERNAL_HTTP}" &>/dev/null; then
|
||||||
err_msg "URL/port http://${SEARXNG_INTERNAL_HTTP} is already in use, you"
|
err_msg "URL/port http://${SEARXNG_INTERNAL_HTTP} is already in use, you"
|
||||||
|
@ -632,7 +656,7 @@ searxng.instance.localtest() {
|
||||||
fi
|
fi
|
||||||
echo
|
echo
|
||||||
searxng.instance.debug.on
|
searxng.instance.debug.on
|
||||||
tee_stderr 0.1 <<EOF | sudo -H -u "${SERVICE_USER}" -i 2>&1 | prefix_stdout "$_service_prefix"
|
tee_stderr 0.1 <<EOF | sudo -H -u "${SERVICE_USER}" -i 2>&1 | prefix_stdout "$_service_prefix"
|
||||||
export SEARXNG_SETTINGS_PATH="${SEARXNG_SETTINGS_PATH}"
|
export SEARXNG_SETTINGS_PATH="${SEARXNG_SETTINGS_PATH}"
|
||||||
cd ${SEARXNG_SRC}
|
cd ${SEARXNG_SRC}
|
||||||
timeout 10 python searx/webapp.py &
|
timeout 10 python searx/webapp.py &
|
||||||
|
@ -714,7 +738,7 @@ This installs SearXNG's uWSGI app as Nginx site. The Nginx site is located at:
|
||||||
${NGINX_APPS_AVAILABLE}/${NGINX_SEARXNG_SITE} and requires a uWSGI."
|
${NGINX_APPS_AVAILABLE}/${NGINX_SEARXNG_SITE} and requires a uWSGI."
|
||||||
searxng.install.http.pre
|
searxng.install.http.pre
|
||||||
|
|
||||||
if ! nginx_is_installed ; then
|
if ! nginx_is_installed; then
|
||||||
err_msg "Nginx packages are not installed"
|
err_msg "Nginx packages are not installed"
|
||||||
if ! ask_yn "Do you really want to continue and install Nginx packages?" Yn; then
|
if ! ask_yn "Do you really want to continue and install Nginx packages?" Yn; then
|
||||||
return
|
return
|
||||||
|
@ -755,8 +779,8 @@ searxng.instance.exec() {
|
||||||
die 42 "can't execute: instance does not exist (missed account ${SERVICE_USER})"
|
die 42 "can't execute: instance does not exist (missed account ${SERVICE_USER})"
|
||||||
fi
|
fi
|
||||||
sudo -H -i -u "${SERVICE_USER}" \
|
sudo -H -i -u "${SERVICE_USER}" \
|
||||||
SEARXNG_UWSGI_USE_SOCKET="${SEARXNG_UWSGI_USE_SOCKET}" \
|
SEARXNG_UWSGI_USE_SOCKET="${SEARXNG_UWSGI_USE_SOCKET}" \
|
||||||
"$@"
|
"$@"
|
||||||
}
|
}
|
||||||
|
|
||||||
searxng.instance.self.call() {
|
searxng.instance.self.call() {
|
||||||
|
@ -775,7 +799,7 @@ EOF
|
||||||
searxng.instance.debug.on() {
|
searxng.instance.debug.on() {
|
||||||
warn_msg "Do not enable debug in a production environment!"
|
warn_msg "Do not enable debug in a production environment!"
|
||||||
info_msg "try to enable debug mode ..."
|
info_msg "try to enable debug mode ..."
|
||||||
tee_stderr 0.1 <<EOF | sudo -H -i 2>&1 | prefix_stdout "$_service_prefix"
|
tee_stderr 0.1 <<EOF | sudo -H -i 2>&1 | prefix_stdout "$_service_prefix"
|
||||||
cd ${SEARXNG_SRC}
|
cd ${SEARXNG_SRC}
|
||||||
sed -i -e "s/debug: false/debug: true/g" "$SEARXNG_SETTINGS_PATH"
|
sed -i -e "s/debug: false/debug: true/g" "$SEARXNG_SETTINGS_PATH"
|
||||||
EOF
|
EOF
|
||||||
|
@ -784,7 +808,7 @@ EOF
|
||||||
|
|
||||||
searxng.instance.debug.off() {
|
searxng.instance.debug.off() {
|
||||||
info_msg "try to disable debug mode ..."
|
info_msg "try to disable debug mode ..."
|
||||||
tee_stderr 0.1 <<EOF | sudo -H -i 2>&1 | prefix_stdout "$_service_prefix"
|
tee_stderr 0.1 <<EOF | sudo -H -i 2>&1 | prefix_stdout "$_service_prefix"
|
||||||
cd ${SEARXNG_SRC}
|
cd ${SEARXNG_SRC}
|
||||||
sed -i -e "s/debug: true/debug: false/g" "$SEARXNG_SETTINGS_PATH"
|
sed -i -e "s/debug: true/debug: false/g" "$SEARXNG_SETTINGS_PATH"
|
||||||
EOF
|
EOF
|
||||||
|
@ -805,7 +829,7 @@ searxng.instance.inspect() {
|
||||||
echo
|
echo
|
||||||
|
|
||||||
case $DIST_ID-$DIST_VERS in
|
case $DIST_ID-$DIST_VERS in
|
||||||
ubuntu-*|debian-*)
|
ubuntu-* | debian-*)
|
||||||
# For uWSGI debian uses the LSB init process; for each configuration
|
# For uWSGI debian uses the LSB init process; for each configuration
|
||||||
# file new uWSGI daemon instance is started with additional option.
|
# file new uWSGI daemon instance is started with additional option.
|
||||||
service uwsgi status "${SERVICE_NAME}"
|
service uwsgi status "${SERVICE_NAME}"
|
||||||
|
@ -818,16 +842,16 @@ searxng.instance.inspect() {
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
echo -e "// use ${_BCyan}CTRL-C${_creset} to stop monitoring the log"
|
echo -e "// use ${_BCyan}CTRL-C${_creset} to stop monitoring the log"
|
||||||
read -r -s -n1 -t 5
|
read -r -s -n1 -t 5
|
||||||
echo
|
echo
|
||||||
|
|
||||||
while true; do
|
while true; do
|
||||||
trap break 2
|
trap break 2
|
||||||
case $DIST_ID-$DIST_VERS in
|
case $DIST_ID-$DIST_VERS in
|
||||||
ubuntu-*|debian-*) tail -f "/var/log/uwsgi/app/${SERVICE_NAME%.*}.log" ;;
|
ubuntu-* | debian-*) tail -f "/var/log/uwsgi/app/${SERVICE_NAME%.*}.log" ;;
|
||||||
arch-*) journalctl -f -u "uwsgi@${SERVICE_NAME%.*}" ;;
|
arch-*) journalctl -f -u "uwsgi@${SERVICE_NAME%.*}" ;;
|
||||||
fedora-*) journalctl -f -u uwsgi ;;
|
fedora-*) journalctl -f -u uwsgi ;;
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
|
|
||||||
|
@ -870,10 +894,10 @@ searxng.doc.rst() {
|
||||||
local arch_build="${SEARXNG_BUILD_PACKAGES_arch}"
|
local arch_build="${SEARXNG_BUILD_PACKAGES_arch}"
|
||||||
local fedora_build="${SEARXNG_BUILD_PACKAGES_fedora}"
|
local fedora_build="${SEARXNG_BUILD_PACKAGES_fedora}"
|
||||||
debian="$(echo "${debian}" | sed 's/.*/ & \\/' | sed '$ s/.$//')"
|
debian="$(echo "${debian}" | sed 's/.*/ & \\/' | sed '$ s/.$//')"
|
||||||
arch="$(echo "${arch}" | sed 's/.*/ & \\/' | sed '$ s/.$//')"
|
arch="$(echo "${arch}" | sed 's/.*/ & \\/' | sed '$ s/.$//')"
|
||||||
fedora="$(echo "${fedora}" | sed 's/.*/ & \\/' | sed '$ s/.$//')"
|
fedora="$(echo "${fedora}" | sed 's/.*/ & \\/' | sed '$ s/.$//')"
|
||||||
debian_build="$(echo "${debian_build}" | sed 's/.*/ & \\/' | sed '$ s/.$//')"
|
debian_build="$(echo "${debian_build}" | sed 's/.*/ & \\/' | sed '$ s/.$//')"
|
||||||
arch_build="$(echo "${arch_build}" | sed 's/.*/ & \\/' | sed '$ s/.$//')"
|
arch_build="$(echo "${arch_build}" | sed 's/.*/ & \\/' | sed '$ s/.$//')"
|
||||||
fedora_build="$(echo "${fedora_build}" | sed 's/.*/ & \\/' | sed '$ s/.$//')"
|
fedora_build="$(echo "${fedora_build}" | sed 's/.*/ & \\/' | sed '$ s/.$//')"
|
||||||
|
|
||||||
if [[ ${SEARXNG_UWSGI_USE_SOCKET} == true ]]; then
|
if [[ ${SEARXNG_UWSGI_USE_SOCKET} == true ]]; then
|
||||||
|
@ -882,7 +906,7 @@ searxng.doc.rst() {
|
||||||
uwsgi_variant=':socket'
|
uwsgi_variant=':socket'
|
||||||
fi
|
fi
|
||||||
|
|
||||||
eval "echo \"$(< "${REPO_ROOT}/docs/build-templates/searxng.rst")\""
|
eval "echo \"$(<"${REPO_ROOT}/docs/build-templates/searxng.rst")\""
|
||||||
|
|
||||||
# I use ubuntu-20.04 here to demonstrate that versions are also supported,
|
# I use ubuntu-20.04 here to demonstrate that versions are also supported,
|
||||||
# normally debian-* and ubuntu-* are most the same.
|
# normally debian-* and ubuntu-* are most the same.
|
||||||
|
@ -897,7 +921,8 @@ searxng.doc.rst() {
|
||||||
echo -e "\n.. START searxng uwsgi-description $DIST_NAME"
|
echo -e "\n.. START searxng uwsgi-description $DIST_NAME"
|
||||||
|
|
||||||
case $DIST_ID-$DIST_VERS in
|
case $DIST_ID-$DIST_VERS in
|
||||||
ubuntu-*|debian-*) cat <<EOF
|
ubuntu-* | debian-*)
|
||||||
|
cat <<EOF
|
||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
|
@ -913,8 +938,9 @@ searxng.doc.rst() {
|
||||||
disable: sudo -H rm ${uWSGI_APPS_ENABLED}/${SEARXNG_UWSGI_APP}
|
disable: sudo -H rm ${uWSGI_APPS_ENABLED}/${SEARXNG_UWSGI_APP}
|
||||||
|
|
||||||
EOF
|
EOF
|
||||||
;;
|
;;
|
||||||
arch-*) cat <<EOF
|
arch-*)
|
||||||
|
cat <<EOF
|
||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
|
@ -931,8 +957,9 @@ EOF
|
||||||
disable: sudo -H systemctl disable uwsgi@${SEARXNG_UWSGI_APP%.*}
|
disable: sudo -H systemctl disable uwsgi@${SEARXNG_UWSGI_APP%.*}
|
||||||
|
|
||||||
EOF
|
EOF
|
||||||
;;
|
;;
|
||||||
fedora-*|centos-7) cat <<EOF
|
fedora-* | centos-7)
|
||||||
|
cat <<EOF
|
||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
|
@ -945,46 +972,44 @@ EOF
|
||||||
disable: sudo -H rm ${uWSGI_APPS_ENABLED}/${SEARXNG_UWSGI_APP}
|
disable: sudo -H rm ${uWSGI_APPS_ENABLED}/${SEARXNG_UWSGI_APP}
|
||||||
|
|
||||||
EOF
|
EOF
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
echo -e ".. END searxng uwsgi-description $DIST_NAME"
|
echo -e ".. END searxng uwsgi-description $DIST_NAME"
|
||||||
|
|
||||||
local _show_cursor="" # prevent from prefix_stdout's trailing show-cursor
|
local _show_cursor="" # prevent from prefix_stdout's trailing show-cursor
|
||||||
|
|
||||||
echo -e "\n.. START searxng uwsgi-appini $DIST_NAME"
|
echo -e "\n.. START searxng uwsgi-appini $DIST_NAME"
|
||||||
echo ".. code:: bash"
|
echo ".. code:: bash"
|
||||||
echo
|
echo
|
||||||
eval "echo \"$(< "${TEMPLATES}/${uWSGI_APPS_AVAILABLE}/${SEARXNG_UWSGI_APP}${uwsgi_variant}")\"" | prefix_stdout " "
|
eval "echo \"$(<"${TEMPLATES}/${uWSGI_APPS_AVAILABLE}/${SEARXNG_UWSGI_APP}${uwsgi_variant}")\"" | prefix_stdout " "
|
||||||
echo -e "\n.. END searxng uwsgi-appini $DIST_NAME"
|
echo -e "\n.. END searxng uwsgi-appini $DIST_NAME"
|
||||||
|
|
||||||
echo -e "\n.. START nginx socket"
|
echo -e "\n.. START nginx socket"
|
||||||
echo ".. code:: nginx"
|
echo ".. code:: nginx"
|
||||||
echo
|
echo
|
||||||
eval "echo \"$(< "${TEMPLATES}/${NGINX_APPS_AVAILABLE}/${NGINX_SEARXNG_SITE}:socket")\"" | prefix_stdout " "
|
eval "echo \"$(<"${TEMPLATES}/${NGINX_APPS_AVAILABLE}/${NGINX_SEARXNG_SITE}:socket")\"" | prefix_stdout " "
|
||||||
echo -e "\n.. END nginx socket"
|
echo -e "\n.. END nginx socket"
|
||||||
|
|
||||||
echo -e "\n.. START nginx http"
|
echo -e "\n.. START nginx http"
|
||||||
echo ".. code:: nginx"
|
echo ".. code:: nginx"
|
||||||
echo
|
echo
|
||||||
eval "echo \"$(< "${TEMPLATES}/${NGINX_APPS_AVAILABLE}/${NGINX_SEARXNG_SITE}")\"" | prefix_stdout " "
|
eval "echo \"$(<"${TEMPLATES}/${NGINX_APPS_AVAILABLE}/${NGINX_SEARXNG_SITE}")\"" | prefix_stdout " "
|
||||||
echo -e "\n.. END nginx http"
|
echo -e "\n.. END nginx http"
|
||||||
|
|
||||||
echo -e "\n.. START apache socket"
|
echo -e "\n.. START apache socket"
|
||||||
echo ".. code:: apache"
|
echo ".. code:: apache"
|
||||||
echo
|
echo
|
||||||
eval "echo \"$(< "${TEMPLATES}/${APACHE_SITES_AVAILABLE}/${APACHE_SEARXNG_SITE}:socket")\"" | prefix_stdout " "
|
eval "echo \"$(<"${TEMPLATES}/${APACHE_SITES_AVAILABLE}/${APACHE_SEARXNG_SITE}:socket")\"" | prefix_stdout " "
|
||||||
echo -e "\n.. END apache socket"
|
echo -e "\n.. END apache socket"
|
||||||
|
|
||||||
echo -e "\n.. START apache http"
|
echo -e "\n.. START apache http"
|
||||||
echo ".. code:: apache"
|
echo ".. code:: apache"
|
||||||
echo
|
echo
|
||||||
eval "echo \"$(< "${TEMPLATES}/${APACHE_SITES_AVAILABLE}/${APACHE_SEARXNG_SITE}")\"" | prefix_stdout " "
|
eval "echo \"$(<"${TEMPLATES}/${APACHE_SITES_AVAILABLE}/${APACHE_SEARXNG_SITE}")\"" | prefix_stdout " "
|
||||||
echo -e "\n.. END apache http"
|
echo -e "\n.. END apache http"
|
||||||
)
|
)
|
||||||
done
|
done
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------
|
|
||||||
main "$@"
|
main "$@"
|
||||||
# ----------------------------------------------------------------------------
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue