diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index d9a548b3b..01b917f6e 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -6,6 +6,8 @@ on: pull_request: # The branches below must be a subset of the branches above branches: [ "dev" ] + paths-ignore: + - 'src/tests/**' schedule: - cron: '43 12 * * 3' diff --git a/.github/workflows/n_updater.sh b/.github/workflows/n_updater.sh deleted file mode 100644 index a8b0b0eec..000000000 --- a/.github/workflows/n_updater.sh +++ /dev/null @@ -1,78 +0,0 @@ -#!/bin/bash - -#================================================= -# N UPDATING HELPER -#================================================= - -# This script is meant to be run by GitHub Actions. -# It is derived from the Updater script from the YunoHost-Apps organization. -# It aims to automate the update of `n`, the Node version management system. - -#================================================= -# FETCHING LATEST RELEASE AND ITS ASSETS -#================================================= - -# Fetching information -source helpers/nodejs -current_version="$n_version" -repo="tj/n" -# Some jq magic is needed, because the latest upstream release is not always the latest version (e.g. security patches for older versions) -version=$(curl --silent "https://api.github.com/repos/$repo/releases" | jq -r '.[] | select( .prerelease != true ) | .tag_name' | sort -V | tail -1) - -# Later down the script, we assume the version has only digits and dots -# Sometimes the release name starts with a "v", so let's filter it out. -if [[ ${version:0:1} == "v" || ${version:0:1} == "V" ]]; then - version=${version:1} -fi - -# Setting up the environment variables -echo "Current version: $current_version" -echo "Latest release from upstream: $version" -echo "VERSION=$version" >> $GITHUB_ENV -# For the time being, let's assume the script will fail -echo "PROCEED=false" >> $GITHUB_ENV - -# Proceed only if the retrieved version is greater than the current one -if ! dpkg --compare-versions "$current_version" "lt" "$version" ; then - echo "::warning ::No new version available" - exit 0 -# Proceed only if a PR for this new version does not already exist -elif git ls-remote -q --exit-code --heads https://github.com/${GITHUB_REPOSITORY:-YunoHost/yunohost}.git ci-auto-update-n-v$version ; then - echo "::warning ::A branch already exists for this update" - exit 0 -fi - -#================================================= -# UPDATE SOURCE FILES -#================================================= - -asset_url="https://github.com/tj/n/archive/v${version}.tar.gz" - -echo "Handling asset at $asset_url" - -# Create the temporary directory -tempdir="$(mktemp -d)" - -# Download sources and calculate checksum -filename=${asset_url##*/} -curl --silent -4 -L $asset_url -o "$tempdir/$filename" -checksum=$(sha256sum "$tempdir/$filename" | head -c 64) - -# Delete temporary directory -rm -rf $tempdir - -echo "Calculated checksum for n v${version} is $checksum" - -#================================================= -# GENERIC FINALIZATION -#================================================= - -# Replace new version in helper -sed -i -E "s/^n_version=.*$/n_version=$version/" helpers/nodejs - -# Replace checksum in helper -sed -i -E "s/^n_checksum=.*$/n_checksum=$checksum/" helpers/nodejs - -# The Action will proceed only if the PROCEED environment variable is set to true -echo "PROCEED=true" >> $GITHUB_ENV -exit 0 diff --git a/.github/workflows/n_updater.yml b/.github/workflows/n_updater.yml index 4c422c14c..ce3e9c925 100644 --- a/.github/workflows/n_updater.yml +++ b/.github/workflows/n_updater.yml @@ -21,7 +21,8 @@ jobs: git config --global user.name 'yunohost-bot' git config --global user.email 'yunohost-bot@users.noreply.github.com' # Run the updater script - /bin/bash .github/workflows/n_updater.sh + wget https://raw.githubusercontent.com/tj/n/master/bin/n --output-document=helpers/vendor/n/n + [[ -z "$(git diff helpers/vendor/n/n)" ]] || echo "PROCEED=true" >> $GITHUB_ENV - name: Commit changes id: commit if: ${{ env.PROCEED == 'true' }} diff --git a/.gitlab/ci/doc.gitlab-ci.yml b/.gitlab/ci/doc.gitlab-ci.yml index 4f6ea6ba1..183d153a4 100644 --- a/.gitlab/ci/doc.gitlab-ci.yml +++ b/.gitlab/ci/doc.gitlab-ci.yml @@ -7,7 +7,6 @@ generate-helpers-doc: image: "before-install" needs: [] before_script: - - apt-get update -y && apt-get install git hub -y - git config --global user.email "yunohost@yunohost.org" - git config --global user.name "$GITHUB_USER" script: diff --git a/.gitlab/ci/lint.gitlab-ci.yml b/.gitlab/ci/lint.gitlab-ci.yml index 7a8fbf1fb..1eeb71eab 100644 --- a/.gitlab/ci/lint.gitlab-ci.yml +++ b/.gitlab/ci/lint.gitlab-ci.yml @@ -3,34 +3,33 @@ ######################################## # later we must fix lint and format-check jobs and remove "allow_failure" -lint39: +lint311: stage: lint image: "before-install" needs: [] allow_failure: true script: - - tox -e py39-lint + - tox -e py311-lint -invalidcode39: +invalidcode311: stage: lint image: "before-install" needs: [] script: - - tox -e py39-invalidcode + - tox -e py311-invalidcode mypy: stage: lint image: "before-install" needs: [] script: - - tox -e py39-mypy + - tox -e py311-mypy black: stage: lint image: "before-install" needs: [] before_script: - - apt-get update -y && apt-get install git hub -y - git config --global user.email "yunohost@yunohost.org" - git config --global user.name "$GITHUB_USER" - hub clone --branch ${CI_COMMIT_REF_NAME} "https://$GITHUB_TOKEN:x-oauth-basic@github.com/YunoHost/yunohost.git" github_repo @@ -38,7 +37,7 @@ black: script: # create a local branch that will overwrite distant one - git checkout -b "ci-format-${CI_COMMIT_REF_NAME}" --no-track - - tox -e py39-black-run + - tox -e py311-black-run - '[ $(git diff | wc -l) != 0 ] || exit 0' # stop if there is nothing to commit - git commit -am "[CI] Format code with Black" || true - git push -f origin "ci-format-${CI_COMMIT_REF_NAME}":"ci-format-${CI_COMMIT_REF_NAME}" diff --git a/.gitlab/ci/test.gitlab-ci.yml b/.gitlab/ci/test.gitlab-ci.yml index b0ffd3db5..2c6e1717d 100644 --- a/.gitlab/ci/test.gitlab-ci.yml +++ b/.gitlab/ci/test.gitlab-ci.yml @@ -1,7 +1,6 @@ .install_debs: &install_debs - apt-get update -o Acquire::Retries=3 - DEBIAN_FRONTEND=noninteractive SUDO_FORCE_REMOVE=yes apt --assume-yes -o Dpkg::Options::="--force-confold" --allow-downgrades install ${CI_PROJECT_DIR}/*.deb - - pip3 install -U mock pip pytest pytest-cov pytest-mock pytest-sugar requests-mock tox ansi2html black jinja2 "packaging<22" .test-stage: stage: test diff --git a/.gitlab/ci/translation.gitlab-ci.yml b/.gitlab/ci/translation.gitlab-ci.yml index 83db2b5a4..387860e40 100644 --- a/.gitlab/ci/translation.gitlab-ci.yml +++ b/.gitlab/ci/translation.gitlab-ci.yml @@ -16,7 +16,6 @@ autofix-translated-strings: image: "before-install" needs: [] before_script: - - apt-get update -y && apt-get install git hub -y - git config --global user.email "yunohost@yunohost.org" - git config --global user.name "$GITHUB_USER" - hub clone --branch ${CI_COMMIT_REF_NAME} "https://$GITHUB_TOKEN:x-oauth-basic@github.com/YunoHost/yunohost.git" github_repo diff --git a/bin/yunomdns b/bin/yunomdns index 1bdcf88ca..da9233045 100755 --- a/bin/yunomdns +++ b/bin/yunomdns @@ -132,12 +132,8 @@ def main() -> bool: ) continue - # Only broadcast IPv4 because IPv6 is buggy ... because we ain't using python3-ifaddr >= 0.1.7 - # Buster only ships 0.1.6 - # Bullseye ships 0.1.7 - # To be re-enabled once we're on bullseye... - # ips: List[str] = interfaces[interface]["ipv4"] + interfaces[interface]["ipv6"] - ips: List[str] = interfaces[interface]["ipv4"] + # Broadcast IPv4 and IPv6 + ips: List[str] = interfaces[interface]["ipv4"] + interfaces[interface]["ipv6"] # If at least one IP is listed if not ips: diff --git a/conf/dovecot/dovecot.conf b/conf/dovecot/dovecot.conf index e614c3796..14d407563 100644 --- a/conf/dovecot/dovecot.conf +++ b/conf/dovecot/dovecot.conf @@ -13,9 +13,8 @@ protocols = imap sieve {% if pop3_enabled == "True" %}pop3{% endif %} mail_plugins = $mail_plugins quota notify push_notification ############################################################################### - -# generated 2020-08-18, Mozilla Guideline v5.6, Dovecot 2.3.4, OpenSSL 1.1.1d, intermediate configuration -# https://ssl-config.mozilla.org/#server=dovecot&version=2.3.4&config=intermediate&openssl=1.1.1d&guideline=5.6 +# generated 2023-06-13, Mozilla Guideline v5.7, Dovecot 2.3.19, OpenSSL 3.0.9, intermediate configuration +# https://ssl-config.mozilla.org/#server=dovecot&version=2.3.19&config=intermediate&openssl=3.0.9&guideline=5.7 ssl = required @@ -32,7 +31,7 @@ ssl_dh = = 1024 bits smtpd_tls_dh1024_param_file = /usr/share/yunohost/ffdhe2048.pem -tls_medium_cipherlist = ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384 +tls_medium_cipherlist = ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-CHACHA20-POLY1305 {% else %} -# generated 2020-08-18, Mozilla Guideline v5.6, Postfix 3.4.14, OpenSSL 1.1.1d, modern configuration -# https://ssl-config.mozilla.org/#server=postfix&version=3.4.14&config=modern&openssl=1.1.1d&guideline=5.6 +# generated 2023-06-13, Mozilla Guideline v5.7, Postfix 3.7.5, OpenSSL 3.0.9, modern configuration +# https://ssl-config.mozilla.org/#server=postfix&version=3.7.5&config=modern&openssl=3.0.9&guideline=5.7 smtpd_tls_mandatory_protocols = !SSLv2, !SSLv3, !TLSv1, !TLSv1.1, !TLSv1.2 smtpd_tls_protocols = !SSLv2, !SSLv3, !TLSv1, !TLSv1.1, !TLSv1.2 diff --git a/debian/changelog b/debian/changelog index 14f2ee73d..b368e0768 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,167 @@ +yunohost (12.0.0) unstable; urgency=low + + - Tmp changelog to prepare Bookworm + + -- Alexandre Aubin Thu, 04 May 2023 20:30:19 +0200 + +yunohost (11.1.21.3) stable; urgency=low + + - Fix again /var/www/.well-known/ynh-diagnosis/ perms which are too broad and could be exploited to serve malicious files x_x (84984ad8) + + -- Alexandre Aubin Mon, 12 Jun 2023 17:41:26 +0200 + +yunohost (11.1.21.2) stable; urgency=low + + - Aleks loves xargs syntax >_> (313a1647) + + -- Alexandre Aubin Mon, 12 Jun 2023 00:25:44 +0200 + +yunohost (11.1.21.1) stable; urgency=low + + - Fix stupid issue with code that changes /dev/null perms... (e6f134bc) + + -- Alexandre Aubin Mon, 12 Jun 2023 00:02:47 +0200 + +yunohost (11.1.21) stable; urgency=low + + - users: more verbose logs for user_group_update operations ([#1668](https://github.com/yunohost/yunohost/pull/1668)) + - apps: fix auto-catalog update cron job which was broken because --apps doesnt exist anymore (1552944f) + - apps: Add a 'yunohost app shell' command to open a shell into an app environment ([#1656](https://github.com/yunohost/yunohost/pull/1656)) + - security/regenconf: fix security issue where apps' system conf would be owned by the app, which can enable priviledge escalation (daf51e94) + - security/regenconf: force systemd, nginx, php and fail2ban conf to be owned by root (e649c092) + - security/nginx: use /var/www/.well-known folder for ynh diagnosis and acme challenge, because /tmp/ could be manipulated by user to serve maliciously crafted files (d42c9983) + - i18n: Translations updated for French, Polish, Ukrainian + + Thanks to all contributors <3 ! (Kay0u, Kuba Bazan, ppr, sudo, Tagada, tituspijean, Tymofii-Lytvynenko) + + -- Alexandre Aubin Sun, 11 Jun 2023 19:20:27 +0200 + +yunohost (11.1.20) stable; urgency=low + + - appsv2: fix funky current_version not being defined when hydrating pre-upgrade notifications (8fa823b4) + - helpers: using YNH_APP_ID instead of YNH_APP_INSTANCE_NAME during ynh_setup_source download, for more consistency and because tests was actually failing since a while because of this (e59a4f84) + - helpers: improve error message for corrupt source in ynh_setup_source, it's more relevant to cite the source url rather than the downloaded output path (d698c4c3) + - nginx: Update "worker" Content-Security-Policy header when in experimental security mode ([#1664](https://github.com/yunohost/yunohost/pull/1664)) + - i18n: Translations updated for French, Indonesian, Russian, Slovak + + Thanks to all contributors <3 ! (axolotle, Éric Gaspar, Ilya, Jose Riha, Neko Nekowazarashi, Yann Autissier) + + -- Alexandre Aubin Sat, 20 May 2023 18:57:26 +0200 + +yunohost (11.1.19) stable; urgency=low + + - helpers: Upgrade n to version 9.1.0 ([#1646](https://github.com/yunohost/yunohost/pull/1646)) + - appsv2: in perm resource, fix handling of additional urls containing vars to replace (8fbdd228) + - appsv2: fix version-specific upgrade notification hydration ([#1655](https://github.com/yunohost/yunohost/pull/1655)) + - appsv2/regenconf: prevent set -u to be enabled during regen-conf triggered from inside appsv2 scripts (a7350a7e) + - refactoring: various renaming in configpanel ([#1649](https://github.com/yunohost/yunohost/pull/1649)) + - i18n: Translations updated for Arabic, Basque, Indonesian + + Thanks to all contributors <3 ! (axolotle, ButterflyOfFire, Kayou, Neko Nekowazarashi, tituspijean, xabirequejo) + + -- Alexandre Aubin Mon, 08 May 2023 16:04:06 +0200 + +yunohost (11.1.18) stable; urgency=low + + - appsv2: always set an 'app' setting equal to app id to be able to use __APP__ in markdown templates ([#1645](https://github.com/yunohost/yunohost/pull/1645)) + - appsv2: fix edge-case when validating packager-provided infos for permissions resource (aa43e6c2) + - appsv2: Support using any variables/setting in permissions declaration ([#1637](https://github.com/yunohost/yunohost/pull/1637)) + - dns: Add support for Porkbun through Lexicon ([#1638](https://github.com/yunohost/yunohost/pull/1638)) + - diagnosis: Report out-of-catalog/broken/bad quality apps as warning instead of error ([#1641](https://github.com/yunohost/yunohost/pull/1641)) + - user: .ssh directory should be executable ([#1642](https://github.com/yunohost/yunohost/pull/1642)) + - i18n: Translations updated for Arabic, Basque, French, Galician + + Thanks to all contributors <3 ! (ButterflyOfFire, José M, ppr, tituspijean, xabirequejo) + + -- Alexandre Aubin Fri, 14 Apr 2023 17:20:58 +0200 + +yunohost (11.1.17) stable; urgency=low + + - domains: fix autodns for gandi root domain ([#1634](https://github.com/yunohost/yunohost/pull/1634)) + - helpers: fix previous change about using YNH_APP_ACTION ... which is not defined in config panel context (8c25aa9b) + - appsv2: for the dir/subdirs of data_dir, create parent folders if they don't exist (9a4267ff) + - quality: Split utils/config.py ([#1635](https://github.com/yunohost/yunohost/pull/1635)) + - quality: Rework questions/options tests ([#1629](https://github.com/yunohost/yunohost/pull/1629)) + + Thanks to all contributors <3 ! (axolotle, Kayou) + + -- Alexandre Aubin Wed, 05 Apr 2023 16:00:09 +0200 + +yunohost (11.1.16) stable; urgency=low + + - apps: fix i18n panel+section names ([#1630](https://github.com/yunohost/yunohost/pull/1630)) + - appsv2: don't remove yhh-deps virtual package if it doesn't exist. Otherwise when apt fails to install dependency, we end up with another error about failing to remove the ynh-deps package (3656c199) + - appsv2: add validation for expected types for permissions stuff (b2596f32) + - appsv2: add support for subdirs property in data_dir (4b46f322) + - appsv2: various fixes regarding sources toml parsing/caching (14bf2ee4) + - appsv2: add documentation about the new 'autoupdate' mechanism for app sources (63981aac) + - ynh_setup_source: fix buggy checksum mismatch handling, can't compute the sha256sum after we delete the file @_@ (1b2fa91f) + - users: fix quota parsing being wrong by a factor 1000 ... doveadm returns kilos, not bytes (821aedef) + - backup: fix boring issue where archive is a broken symlink... (a95d10e5) + + Thanks to all contributors <3 ! (axolotle) + + -- Alexandre Aubin Sun, 02 Apr 2023 20:29:33 +0200 + +yunohost (11.1.15) stable; urgency=low + + - doc: Fix version number in autogenerated resource doc (5b58e0e6) + - helpers: Fix documentation for ynh_setup_source (7491dd4c) + - helpers: fix ynh_setup_source, 'source_id' may contain slashes x_x (eaf7a290) + - helpers/nodejs: simplify 'n' script install and maintenance ([#1627](https://github.com/yunohost/yunohost/pull/1627)) + + -- Alexandre Aubin Sat, 11 Mar 2023 16:50:50 +0100 + +yunohost (11.1.14) stable; urgency=low + + - helpers: simplify --time display option for ynh_script_progression .. we don't care about displaying time when below 10 sc (8731f77a) + - appsv2: add support for a 'sources' app resources to modernize and replace app.src format ([#1615](https://github.com/yunohost/yunohost/pull/1615)) + - i18n: Translations updated for Arabic, Polish, Ukrainian + + Thanks to all contributors <3 ! (ButterflyOfFire, Grzegorz Cichocki, Tymofii-Lytvynenko) + + -- Alexandre Aubin Thu, 09 Mar 2023 15:34:17 +0100 + +yunohost (11.1.13) stable; urgency=low + + - appsv2: fix port already used detection ([#1622](https://github.com/yunohost/yunohost/pull/1622)) + - appsv2: when hydrating template, the data may be not-string, eg ports are int (72986842) + - [i18n] Translations updated for Arabic, French, Galician, German, Occitan + + Thanks to all contributors <3 ! (ButterflyOfFire, Christian Wehrli, José M, Kay0u, ppr) + + -- Alexandre Aubin Fri, 03 Mar 2023 22:57:14 +0100 + +yunohost (11.1.12.2) stable; urgency=low + + - helpers: omg base64 wraps the output by default :| (d04f2085) + + -- Alexandre Aubin Wed, 01 Mar 2023 22:12:51 +0100 + +yunohost (11.1.12.1) stable; urgency=low + + - helper: fix previous tweak about debugging diff for manually modified files on the CI @_@ (fd304008) + + -- Alexandre Aubin Wed, 01 Mar 2023 08:08:55 +0100 + +yunohost (11.1.12) stable; urgency=low + + - apps: add '--continue-on-failure' to 'yunohost app upgrade ([#1602](https://github.com/yunohost/yunohost/pull/1602)) + - appsv2: Create parent dirs when provisioning install_dir ([#1609](https://github.com/yunohost/yunohost/pull/1609)) + - appsv2: set `w` as default permission on `install_dir` folder ([#1611](https://github.com/yunohost/yunohost/pull/1611)) + - appsv2: Handle undefined main permission url ([#1620](https://github.com/yunohost/yunohost/pull/1620)) + - apps/helpers: tweak behavior of checksum helper in CI context to help debug why file appear as 'manually modified' ([#1618](https://github.com/yunohost/yunohost/pull/1618)) + - apps/helpers: more robust way to grep that the service correctly started ? ([#1617](https://github.com/yunohost/yunohost/pull/1617)) + - regenconf: sometimes ntp doesnt exist (97c0128c) + - nginx/security: fix empty webadmin allowlist breaking nginx conf... (e458d881) + - misc: automatic get rid of /etc/profile.d/check_yunohost_is_installed.sh when yunohost is postinstalled (20e8805e) + - settings: Fix pop3_enabled parsing returning 0/1 instead of True/False ... (b40c0de3) + - [i18n] Translations updated for French, Galician, Italian, Polish + + Thanks to all contributors <3 ! (Éric Gaspar, John Schmidt, José M, Krakinou, Kuba Bazan, Laurent Peuch, ppr, tituspijean) + + -- Alexandre Aubin Tue, 28 Feb 2023 23:08:02 +0100 + yunohost (11.1.11.2) stable; urgency=low - Rebump version to flag as stable, not testing >_> diff --git a/debian/control b/debian/control index 0258eaac7..3674a62a4 100644 --- a/debian/control +++ b/debian/control @@ -2,7 +2,7 @@ Source: yunohost Section: utils Priority: extra Maintainer: YunoHost Contributors -Build-Depends: debhelper (>=9), debhelper-compat (= 13), dh-python, python3-all (>= 3.7), python3-yaml, python3-jinja2 +Build-Depends: debhelper (>=9), debhelper-compat (= 13), dh-python, python3-all (>= 3.11), python3-yaml, python3-jinja2 Standards-Version: 3.9.6 Homepage: https://yunohost.org/ @@ -14,9 +14,9 @@ Depends: ${python3:Depends}, ${misc:Depends} , python3-psutil, python3-requests, python3-dnspython, python3-openssl , python3-miniupnpc, python3-dbus, python3-jinja2 , python3-toml, python3-packaging, python3-publicsuffix2 - , python3-ldap, python3-zeroconf (>= 0.36), python3-lexicon, + , python3-ldap, python3-zeroconf (>=0.47), python3-lexicon, , python-is-python3 - , nginx, nginx-extras (>=1.18) + , nginx, nginx-extras (>=1.22) , apt, apt-transport-https, apt-utils, dirmngr , openssh-server, iptables, fail2ban, bind9-dnsutils , openssl, ca-certificates, netcat-openbsd, iproute2 @@ -32,23 +32,18 @@ Depends: ${python3:Depends}, ${misc:Depends} Recommends: yunohost-admin , ntp, inetutils-ping | iputils-ping , bash-completion, rsyslog - , php7.4-common, php7.4-fpm, php7.4-ldap, php7.4-intl - , mariadb-server, php7.4-mysql - , php7.4-gd, php7.4-curl, php-php-gettext - , python3-pip , unattended-upgrades , libdbd-ldap-perl, libnet-dns-perl - , metronome (>=3.14.0) Conflicts: iptables-persistent , apache2 , bind9 - , nginx-extras (>= 1.19) - , openssl (>= 1.1.1o-0) - , slapd (>= 2.4.58) - , dovecot-core (>= 1:2.3.14) - , redis-server (>= 5:6.1) - , fail2ban (>= 0.11.3) - , iptables (>= 1.8.8) + , nginx-extras (>= 1.23) + , openssl (>= 3.1) + , slapd (>= 2.6) + , dovecot-core (>= 1:2.4) + , redis-server (>= 5:7.1) + , fail2ban (>= 1.1) + , iptables (>= 1.8.10) Description: manageable and configured self-hosting server YunoHost aims to make self-hosting accessible to everyone. It configures an email, Web and IM server alongside a LDAP base. It also provides diff --git a/doc/generate_resource_doc.py b/doc/generate_resource_doc.py index 272845104..201d25265 100644 --- a/doc/generate_resource_doc.py +++ b/doc/generate_resource_doc.py @@ -2,7 +2,7 @@ import ast import datetime import subprocess -version = (open("../debian/changelog").readlines()[0].split()[1].strip("()"),) +version = open("../debian/changelog").readlines()[0].split()[1].strip("()") today = datetime.datetime.now().strftime("%d/%m/%Y") diff --git a/helpers/apps b/helpers/apps index 85b74de15..4b253ff90 100644 --- a/helpers/apps +++ b/helpers/apps @@ -111,3 +111,95 @@ ynh_remove_apps() { done fi } + +# Spawn a Bash shell with the app environment loaded +# +# usage: ynh_spawn_app_shell --app="app" +# | arg: -a, --app= - the app ID +# +# examples: +# ynh_spawn_app_shell --app="APP" <<< 'echo "$USER"' +# ynh_spawn_app_shell --app="APP" < /tmp/some_script.bash +# +# Requires YunoHost version 11.0.* or higher, and that the app relies on packaging v2 or higher. +# The spawned shell will have environment variables loaded and environment files sourced +# from the app's service configuration file (defaults to $app.service, overridable by the packager with `service` setting). +# If the app relies on a specific PHP version, then `php` will be aliased that version. +ynh_spawn_app_shell() { + # Declare an array to define the options of this helper. + local legacy_args=a + local -A args_array=([a]=app=) + local app + # Manage arguments with getopts + ynh_handle_getopts_args "$@" + + # Force Bash to be used to run this helper + if [[ ! $0 =~ \/?bash$ ]] + then + ynh_print_err --message="Please use Bash as shell" + exit 1 + fi + + # Make sure the app is installed + local installed_apps_list=($(yunohost app list --output-as json --quiet | jq -r .apps[].id)) + if [[ " ${installed_apps_list[*]} " != *" ${app} "* ]] + then + ynh_print_err --message="$app is not in the apps list" + exit 1 + fi + + # Make sure the app has its own user + if ! id -u "$app" &>/dev/null; then + ynh_print_err --message="There is no \"$app\" system user" + exit 1 + fi + + # Make sure the app has an install_dir setting + local install_dir=$(ynh_app_setting_get --app=$app --key=install_dir) + if [ -z "$install_dir" ] + then + ynh_print_err --message="$app has no install_dir setting (does it use packaging format >=2?)" + exit 1 + fi + + # Load the app's service name, or default to $app + local service=$(ynh_app_setting_get --app=$app --key=service) + [ -z "$service" ] && service=$app; + + # Export HOME variable + export HOME=$install_dir; + + # Load the Environment variables from the app's service + local env_var=$(systemctl show $service.service -p "Environment" --value) + [ -n "$env_var" ] && export $env_var; + + # Force `php` to its intended version + # We use `eval`+`export` since `alias` is not propagated to subshells, even with `export` + local phpversion=$(ynh_app_setting_get --app=$app --key=phpversion) + if [ -n "$phpversion" ] + then + eval "php() { php${phpversion} \"\$@\"; }" + export -f php + fi + + # Source the EnvironmentFiles from the app's service + local env_files=($(systemctl show $service.service -p "EnvironmentFiles" --value)) + if [ ${#env_files[*]} -gt 0 ] + then + # set -/+a enables and disables new variables being automatically exported. Needed when using `source`. + set -a + for file in ${env_files[*]} + do + [[ $file = /* ]] && source $file + done + set +a + fi + + # cd into the WorkingDirectory set in the service, or default to the install_dir + local env_dir=$(systemctl show $service.service -p "WorkingDirectory" --value) + [ -z $env_dir ] && env_dir=$install_dir; + cd $env_dir + + # Spawn the app shell + su -s /bin/bash $app +} diff --git a/helpers/apt b/helpers/apt index c36f4aa27..a2f2d3de8 100644 --- a/helpers/apt +++ b/helpers/apt @@ -370,7 +370,13 @@ ynh_remove_app_dependencies() { apt-mark unhold ${dep_app}-ynh-deps fi - ynh_package_autopurge ${dep_app}-ynh-deps # Remove the fake package and its dependencies if they not still used. + # Remove the fake package and its dependencies if they not still used. + # (except if dpkg doesn't know anything about the package, + # which should be symptomatic of a failed install, and we don't want bash to report an error) + if dpkg-query --show ${dep_app}-ynh-deps &>/dev/null + then + ynh_package_autopurge ${dep_app}-ynh-deps + fi } # Install packages from an extra repository properly. diff --git a/helpers/backup b/helpers/backup index 1aa43240c..ade3ce5e5 100644 --- a/helpers/backup +++ b/helpers/backup @@ -329,7 +329,8 @@ ynh_store_file_checksum() { if [ ${PACKAGE_CHECK_EXEC:-0} -eq 1 ]; then # Using a base64 is in fact more reversible than "replace / and space by _" ... So we can in fact obtain the original file path in an easy reliable way ... - local file_path_base64=$(echo "$file" | base64) + local file_path_base64=$(echo "$file" | base64 -w0) + mkdir -p /var/cache/yunohost/appconfbackup/ cat $file > /var/cache/yunohost/appconfbackup/original_${file_path_base64} fi @@ -374,7 +375,7 @@ ynh_backup_if_checksum_is_different() { ynh_print_warn "File $file has been manually modified since the installation or last upgrade. So it has been duplicated in $backup_file_checksum" echo "$backup_file_checksum" # Return the name of the backup file if [ ${PACKAGE_CHECK_EXEC:-0} -eq 1 ]; then - local file_path_base64=$(echo "$file" | base64) + local file_path_base64=$(echo "$file" | base64 -w0) if test -e /var/cache/yunohost/appconfbackup/original_${file_path_base64} then ynh_print_warn "Diff with the original file:" diff --git a/helpers/logging b/helpers/logging index 4601e0b39..ab5d564aa 100644 --- a/helpers/logging +++ b/helpers/logging @@ -308,8 +308,8 @@ ynh_script_progression() { local progression_bar="${progress_string2:0:$effective_progression}${progress_string1:0:$expected_progression}${progress_string0:0:$left_progression}" local print_exec_time="" - if [ $time -eq 1 ]; then - print_exec_time=" [$(date +%Hh%Mm,%Ss --date="0 + $exec_time sec")]" + if [ $time -eq 1 ] && [ "$exec_time" -gt 10 ]; then + print_exec_time=" [$(bc <<< "scale=1; $exec_time / 60" ) minutes]" fi ynh_print_info "[$progression_bar] > ${message}${print_exec_time}" diff --git a/helpers/mysql b/helpers/mysql index a5290f794..eade5804e 100644 --- a/helpers/mysql +++ b/helpers/mysql @@ -210,6 +210,9 @@ ynh_mysql_setup_db() { # If $db_pwd is not provided, use new_db_pwd instead for db_pwd db_pwd="${db_pwd:-$new_db_pwd}" + # Dirty patch for super-legacy apps + dpkg --list | grep -q "^ii mariadb-server" || { ynh_print_warn "Packager: you called ynh_mysql_setup_db without declaring a dependency to mariadb-server. Please add it to your apt dependencies !"; ynh_apt install mariadb-server; } + ynh_mysql_create_db "$db_name" "$db_user" "$db_pwd" ynh_app_setting_set --app=$app --key=mysqlpwd --value=$db_pwd } diff --git a/helpers/nodejs b/helpers/nodejs index b692bfc70..e3ccf82dd 100644 --- a/helpers/nodejs +++ b/helpers/nodejs @@ -1,32 +1,10 @@ #!/bin/bash -n_version=9.0.1 -n_checksum=ad305e8ee9111aa5b08e6dbde23f01109401ad2d25deecacd880b3f9ea45702b n_install_dir="/opt/node_n" node_version_path="$n_install_dir/n/versions/node" # N_PREFIX is the directory of n, it needs to be loaded as a environment variable. export N_PREFIX="$n_install_dir" -# Install Node version management -# -# [internal] -# -# usage: ynh_install_n -# -# Requires YunoHost version 2.7.12 or higher. -ynh_install_n() { - # Build an app.src for n - echo "SOURCE_URL=https://github.com/tj/n/archive/v${n_version}.tar.gz -SOURCE_SUM=${n_checksum}" >"$YNH_APP_BASEDIR/conf/n.src" - # Download and extract n - ynh_setup_source --dest_dir="$n_install_dir/git" --source_id=n - # Install n - ( - cd "$n_install_dir/git" - PREFIX=$N_PREFIX make install 2>&1 - ) -} - # Load the version of node for an app, and set variables. # # usage: ynh_use_nodejs @@ -133,14 +111,10 @@ ynh_install_nodejs() { test -x /usr/bin/node && mv /usr/bin/node /usr/bin/node_n test -x /usr/bin/npm && mv /usr/bin/npm /usr/bin/npm_n - # If n is not previously setup, install it - if ! $n_install_dir/bin/n --version >/dev/null 2>&1; then - ynh_install_n - elif dpkg --compare-versions "$($n_install_dir/bin/n --version)" lt $n_version; then - ynh_install_n - fi - - # Modify the default N_PREFIX in n script + # Install (or update if YunoHost vendor/ folder updated since last install) n + mkdir -p $n_install_dir/bin/ + cp /usr/share/yunohost/helpers.d/vendor/n/n $n_install_dir/bin/n + # Tweak for n to understand it's installed in $N_PREFIX ynh_replace_string --match_string="^N_PREFIX=\${N_PREFIX-.*}$" --replace_string="N_PREFIX=\${N_PREFIX-$N_PREFIX}" --target_file="$n_install_dir/bin/n" # Restore /usr/local/bin in PATH diff --git a/helpers/utils b/helpers/utils index f80c22901..52d7c734f 100644 --- a/helpers/utils +++ b/helpers/utils @@ -22,7 +22,10 @@ YNH_APP_BASEDIR=${YNH_APP_BASEDIR:-$(realpath ..)} ynh_exit_properly() { local exit_code=$? - rm -rf "/var/cache/yunohost/download/" + if [[ "${YNH_APP_ACTION:-}" =~ ^install$|^upgrade$|^restore$ ]] + then + rm -rf "/var/cache/yunohost/download/" + fi if [ "$exit_code" -eq 0 ]; then exit 0 # Exit without error if the script ended correctly @@ -62,7 +65,7 @@ ynh_abort_if_errors() { } # When running an app script with packaging format >= 2, auto-enable ynh_abort_if_errors except for remove script -if dpkg --compare-versions ${YNH_APP_PACKAGING_FORMAT:-0} ge 2 && [[ ${YNH_APP_ACTION} != "remove" ]] +if [[ "${YNH_CONTEXT:-}" != "regenconf" ]] && dpkg --compare-versions ${YNH_APP_PACKAGING_FORMAT:-0} ge 2 && [[ ${YNH_APP_ACTION} != "remove" ]] then ynh_abort_if_errors fi @@ -71,39 +74,79 @@ fi # # usage: ynh_setup_source --dest_dir=dest_dir [--source_id=source_id] [--keep="file1 file2"] [--full_replace] # | arg: -d, --dest_dir= - Directory where to setup sources -# | arg: -s, --source_id= - Name of the source, defaults to `app` +# | arg: -s, --source_id= - Name of the source, defaults to `main` (when the sources resource exists in manifest.toml) or (legacy) `app` otherwise # | arg: -k, --keep= - Space-separated list of files/folders that will be backup/restored in $dest_dir, such as a config file you don't want to overwrite. For example 'conf.json secrets.json logs/' # | arg: -r, --full_replace= - Remove previous sources before installing new sources # +# #### New 'sources' resources +# +# (See also the resources documentation which may be more complete?) +# +# This helper will read infos from the 'sources' resources in the manifest.toml of the app +# and expect a structure like: +# +# ```toml +# [resources.sources] +# [resources.sources.main] +# url = "https://some.address.to/download/the/app/archive" +# sha256 = "0123456789abcdef" # The sha256 sum of the asset obtained from the URL +# ``` +# +# ##### Optional flags +# +# ```text +# format = "tar.gz"/xz/bz2 # automatically guessed from the extension of the URL, but can be set explicitly. Will use `tar` to extract +# "zip" # automatically guessed from the extension of the URL, but can be set explicitly. Will use `unzip` to extract +# "docker" # useful to extract files from an already-built docker image (instead of rebuilding them locally). Will use `docker-image-extract` to extract +# "whatever" # an arbitrary value, not really meaningful except to imply that the file won't be extracted +# +# in_subdir = true # default, there's an intermediate subdir in the archive before accessing the actual files +# false # sources are directly in the archive root +# n # (special cases) an integer representing a number of subdirs levels to get rid of +# +# extract = true # default if file is indeed an archive such as .zip, .tar.gz, .tar.bz2, ... +# = false # default if file 'format' is not set and the file is not to be extracted because it is not an archive but a script or binary or whatever asset. +# # in which case the file will only be `mv`ed to the location possibly renamed using the `rename` value +# +# rename = "whatever_your_want" # to be used for convenience when `extract` is false and the default name of the file is not practical +# platform = "linux/amd64" # (defaults to "linux/$YNH_ARCH") to be used in conjonction with `format = "docker"` to specify which architecture to extract for +# ``` +# +# You may also define assets url and checksum per-architectures such as: +# ```toml +# [resources.sources] +# [resources.sources.main] +# amd64.url = "https://some.address.to/download/the/app/archive/when/amd64" +# amd64.sha256 = "0123456789abcdef" +# armhf.url = "https://some.address.to/download/the/app/archive/when/armhf" +# armhf.sha256 = "fedcba9876543210" +# ``` +# +# In which case ynh_setup_source --dest_dir="$install_dir" will automatically pick the appropriate source depending on the arch +# +# +# +# #### Legacy format '.src' +# # This helper will read `conf/${source_id}.src`, download and install the sources. # # The src file need to contains: # ``` # SOURCE_URL=Address to download the app archive -# SOURCE_SUM=Control sum -# # (Optional) Program to check the integrity (sha256sum, md5sum...). Default: sha256 -# SOURCE_SUM_PRG=sha256 -# # (Optional) Archive format. Default: tar.gz +# SOURCE_SUM=Sha256 sum # SOURCE_FORMAT=tar.gz -# # (Optional) Put false if sources are directly in the archive root. Default: true -# # Instead of true, SOURCE_IN_SUBDIR could be the number of sub directories to remove. # SOURCE_IN_SUBDIR=false -# # (Optionnal) Name of the local archive (offline setup support). Default: ${src_id}.${src_format} # SOURCE_FILENAME=example.tar.gz -# # (Optional) If it set as false don't extract the source. Default: true -# # (Useful to get a debian package or a python wheel.) # SOURCE_EXTRACT=(true|false) -# # (Optionnal) Name of the plateform. Default: "linux/$YNH_ARCH" # SOURCE_PLATFORM=linux/arm64/v8 # ``` # # The helper will: -# - Check if there is a local source archive in `/opt/yunohost-apps-src/$APP_ID/$SOURCE_FILENAME` -# - Download `$SOURCE_URL` if there is no local archive -# - Check the integrity with `$SOURCE_SUM_PRG -c --status` +# - Download the specific URL if there is no local archive +# - Check the integrity with the specific sha256 sum # - Uncompress the archive to `$dest_dir`. -# - If `$SOURCE_IN_SUBDIR` is true, the first level directory of the archive will be removed. -# - If `$SOURCE_IN_SUBDIR` is a numeric value, the N first level directories will be removed. +# - If `in_subdir` is true, the first level directory of the archive will be removed. +# - If `in_subdir` is a numeric value, the N first level directories will be removed. # - Patches named `sources/patches/${src_id}-*.patch` will be applied to `$dest_dir` # - Extra files in `sources/extra_files/$src_id` will be copied to dest_dir # @@ -118,22 +161,66 @@ ynh_setup_source() { local full_replace # Manage arguments with getopts ynh_handle_getopts_args "$@" - source_id="${source_id:-app}" keep="${keep:-}" full_replace="${full_replace:-0}" - local src_file_path="$YNH_APP_BASEDIR/conf/${source_id}.src" + if test -e $YNH_APP_BASEDIR/manifest.toml && cat $YNH_APP_BASEDIR/manifest.toml | toml_to_json | jq -e '.resources.sources' >/dev/null + then + source_id="${source_id:-main}" + local sources_json=$(cat $YNH_APP_BASEDIR/manifest.toml | toml_to_json | jq ".resources.sources[\"$source_id\"]") + if jq -re ".url" <<< "$sources_json" + then + local arch_prefix="" + else + local arch_prefix=".$YNH_ARCH" + fi - # Load value from configuration file (see above for a small doc about this file - # format) - local src_url=$(grep 'SOURCE_URL=' "$src_file_path" | cut --delimiter='=' --fields=2-) - local src_sum=$(grep 'SOURCE_SUM=' "$src_file_path" | cut --delimiter='=' --fields=2-) - local src_sumprg=$(grep 'SOURCE_SUM_PRG=' "$src_file_path" | cut --delimiter='=' --fields=2-) - local src_format=$(grep 'SOURCE_FORMAT=' "$src_file_path" | cut --delimiter='=' --fields=2-) - local src_in_subdir=$(grep 'SOURCE_IN_SUBDIR=' "$src_file_path" | cut --delimiter='=' --fields=2-) - local src_filename=$(grep 'SOURCE_FILENAME=' "$src_file_path" | cut --delimiter='=' --fields=2-) - local src_extract=$(grep 'SOURCE_EXTRACT=' "$src_file_path" | cut --delimiter='=' --fields=2-) - local src_plateform=$(grep 'SOURCE_PLATFORM=' "$src_file_path" | cut --delimiter='=' --fields=2-) + local src_url="$(jq -r "$arch_prefix.url" <<< "$sources_json" | sed 's/^null$//')" + local src_sum="$(jq -r "$arch_prefix.sha256" <<< "$sources_json" | sed 's/^null$//')" + local src_sumprg="sha256sum" + local src_format="$(jq -r ".format" <<< "$sources_json" | sed 's/^null$//')" + local src_in_subdir="$(jq -r ".in_subdir" <<< "$sources_json" | sed 's/^null$//')" + local src_extract="$(jq -r ".extract" <<< "$sources_json" | sed 's/^null$//')" + local src_platform="$(jq -r ".platform" <<< "$sources_json" | sed 's/^null$//')" + local src_rename="$(jq -r ".rename" <<< "$sources_json" | sed 's/^null$//')" + + [[ -n "$src_url" ]] || ynh_die "No URL defined for source $source_id$arch_prefix ?" + [[ -n "$src_sum" ]] || ynh_die "No sha256 sum defined for source $source_id$arch_prefix ?" + + if [[ -z "$src_format" ]] + then + if [[ "$src_url" =~ ^.*\.zip$ ]] || [[ "$src_url" =~ ^.*/zipball/.*$ ]] + then + src_format="zip" + elif [[ "$src_url" =~ ^.*\.tar\.gz$ ]] || [[ "$src_url" =~ ^.*\.tgz$ ]] || [[ "$src_url" =~ ^.*/tar\.gz/.*$ ]] || [[ "$src_url" =~ ^.*/tarball/.*$ ]] + then + src_format="tar.gz" + elif [[ "$src_url" =~ ^.*\.tar\.xz$ ]] + then + src_format="tar.xz" + elif [[ "$src_url" =~ ^.*\.tar\.bz2$ ]] + then + src_format="tar.bz2" + elif [[ -z "$src_extract" ]] + then + src_extract="false" + fi + fi + else + source_id="${source_id:-app}" + local src_file_path="$YNH_APP_BASEDIR/conf/${source_id}.src" + + # Load value from configuration file (see above for a small doc about this file + # format) + local src_url=$(grep 'SOURCE_URL=' "$src_file_path" | cut --delimiter='=' --fields=2-) + local src_sum=$(grep 'SOURCE_SUM=' "$src_file_path" | cut --delimiter='=' --fields=2-) + local src_sumprg=$(grep 'SOURCE_SUM_PRG=' "$src_file_path" | cut --delimiter='=' --fields=2-) + local src_format=$(grep 'SOURCE_FORMAT=' "$src_file_path" | cut --delimiter='=' --fields=2-) + local src_in_subdir=$(grep 'SOURCE_IN_SUBDIR=' "$src_file_path" | cut --delimiter='=' --fields=2-) + local src_rename=$(grep 'SOURCE_FILENAME=' "$src_file_path" | cut --delimiter='=' --fields=2-) + local src_extract=$(grep 'SOURCE_EXTRACT=' "$src_file_path" | cut --delimiter='=' --fields=2-) + local src_platform=$(grep 'SOURCE_PLATFORM=' "$src_file_path" | cut --delimiter='=' --fields=2-) + fi # Default value src_sumprg=${src_sumprg:-sha256sum} @@ -141,33 +228,53 @@ ynh_setup_source() { src_format=${src_format:-tar.gz} src_format=$(echo "$src_format" | tr '[:upper:]' '[:lower:]') src_extract=${src_extract:-true} - if [ "$src_filename" = "" ]; then - src_filename="${source_id}.${src_format}" + + if [[ "$src_extract" != "true" ]] && [[ "$src_extract" != "false" ]] + then + ynh_die "For source $source_id, expected either 'true' or 'false' for the extract parameter" fi - # (Unused?) mecanism where one can have the file in a special local cache to not have to download it... - local local_src="/opt/yunohost-apps-src/${YNH_APP_ID}/${src_filename}" - mkdir -p /var/cache/yunohost/download/${YNH_APP_ID}/ - src_filename="/var/cache/yunohost/download/${YNH_APP_ID}/${src_filename}" + # (Unused?) mecanism where one can have the file in a special local cache to not have to download it... + local local_src="/opt/yunohost-apps-src/${YNH_APP_ID}/${source_id}" + + # Gotta use this trick with 'dirname' because source_id may contain slashes x_x + mkdir -p $(dirname /var/cache/yunohost/download/${YNH_APP_ID}/${source_id}) + src_filename="/var/cache/yunohost/download/${YNH_APP_ID}/${source_id}" if [ "$src_format" = "docker" ]; then - src_plateform="${src_plateform:-"linux/$YNH_ARCH"}" + src_platform="${src_platform:-"linux/$YNH_ARCH"}" elif test -e "$local_src"; then cp $local_src $src_filename else [ -n "$src_url" ] || ynh_die "Couldn't parse SOURCE_URL from $src_file_path ?" - # NB. we have to declare the var as local first, - # otherwise 'local foo=$(false) || echo 'pwet'" does'nt work - # because local always return 0 ... - local out - # Timeout option is here to enforce the timeout on dns query and tcp connect (c.f. man wget) - out=$(wget --tries 3 --no-dns-cache --timeout 900 --no-verbose --output-document=$src_filename $src_url 2>&1) \ - || ynh_die --message="$out" + # If the file was prefetched but somehow doesn't match the sum, rm and redownload it + if [ -e "$src_filename" ] && ! echo "${src_sum} ${src_filename}" | ${src_sumprg} --check --status + then + rm -f "$src_filename" + fi + + # Only redownload the file if it wasnt prefetched + if [ ! -e "$src_filename" ] + then + # NB. we have to declare the var as local first, + # otherwise 'local foo=$(false) || echo 'pwet'" does'nt work + # because local always return 0 ... + local out + # Timeout option is here to enforce the timeout on dns query and tcp connect (c.f. man wget) + out=$(wget --tries 3 --no-dns-cache --timeout 900 --no-verbose --output-document=$src_filename $src_url 2>&1) \ + || ynh_die --message="$out" + fi + # Check the control sum - echo "${src_sum} ${src_filename}" | ${src_sumprg} --check --status \ - || ynh_die --message="Corrupt source for ${src_filename}: Expected ${src_sum} but got $(${src_sumprg} ${src_filename} | cut --delimiter=' ' --fields=1) (size: $(du -hs ${src_filename} | cut --delimiter=' ' --fields=1))." + if ! echo "${src_sum} ${src_filename}" | ${src_sumprg} --check --status + then + local actual_sum="$(${src_sumprg} ${src_filename} | cut --delimiter=' ' --fields=1)" + local actual_size="$(du -hs ${src_filename} | cut --fields=1)" + rm -f ${src_filename} + ynh_die --message="Corrupt source for ${src_url}: Expected sha256sum to be ${src_sum} but got ${actual_sum} (size: ${actual_size})." + fi fi # Keep files to be backup/restored at the end of the helper @@ -199,11 +306,16 @@ ynh_setup_source() { _ynh_apply_default_permissions $dest_dir fi - if ! "$src_extract"; then - mv $src_filename $dest_dir - elif [ "$src_format" = "docker" ]; then - /usr/share/yunohost/helpers.d/vendor/docker-image-extract/docker-image-extract -p $src_plateform -o $dest_dir $src_url 2>&1 - elif [ "$src_format" = "zip" ]; then + if [[ "$src_extract" == "false" ]]; then + if [[ -z "$src_rename" ]] + then + mv $src_filename $dest_dir + else + mv $src_filename $dest_dir/$src_rename + fi + elif [[ "$src_format" == "docker" ]]; then + /usr/share/yunohost/helpers.d/vendor/docker-image-extract/docker-image-extract -p $src_platform -o $dest_dir $src_url 2>&1 + elif [[ "$src_format" == "zip" ]]; then # Zip format # Using of a temp directory, because unzip doesn't manage --strip-components if $src_in_subdir; then @@ -959,8 +1071,10 @@ _ynh_apply_default_permissions() { fi fi - # Crons should be owned by root otherwise they probably don't run - if echo "$target" | grep -q '^/etc/cron' + # Crons should be owned by root + # Also we don't want systemd conf, nginx conf or others stuff to be owned by the app, + # otherwise they could self-edit their own systemd conf and escalate privilege + if echo "$target" | grep -q '^/etc/cron\|/etc/php\|/etc/nginx/conf.d\|/etc/fail2ban\|/etc/systemd/system' then chmod 400 $target chown root:root $target @@ -970,3 +1084,7 @@ _ynh_apply_default_permissions() { int_to_bool() { sed -e 's/^1$/True/g' -e 's/^0$/False/g' } + +toml_to_json() { + python3 -c 'import toml, json, sys; print(json.dumps(toml.load(sys.stdin)))' +} diff --git a/helpers/vendor/n/LICENSE b/helpers/vendor/n/LICENSE new file mode 100644 index 000000000..8e04e8467 --- /dev/null +++ b/helpers/vendor/n/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 TJ Holowaychuk + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/helpers/vendor/n/README.md b/helpers/vendor/n/README.md new file mode 100644 index 000000000..9a29a3936 --- /dev/null +++ b/helpers/vendor/n/README.md @@ -0,0 +1 @@ +This is taken from https://github.com/tj/n/ diff --git a/helpers/vendor/n/n b/helpers/vendor/n/n new file mode 100755 index 000000000..2a877c45b --- /dev/null +++ b/helpers/vendor/n/n @@ -0,0 +1,1635 @@ +#!/usr/bin/env bash +# shellcheck disable=SC2155 +# Disabled "Declare and assign separately to avoid masking return values": https://github.com/koalaman/shellcheck/wiki/SC2155 + +# +# log +# + +log() { + printf " ${SGR_CYAN}%10s${SGR_RESET} : ${SGR_FAINT}%s${SGR_RESET}\n" "$1" "$2" +} + +# +# verbose_log +# Can suppress with --quiet. +# Like log but to stderr rather than stdout, so can also be used from "display" routines. +# + +verbose_log() { + if [[ "${SHOW_VERBOSE_LOG}" == "true" ]]; then + >&2 printf " ${SGR_CYAN}%10s${SGR_RESET} : ${SGR_FAINT}%s${SGR_RESET}\n" "$1" "$2" + fi +} + +# +# Exit with the given +# + +abort() { + >&2 printf "\n ${SGR_RED}Error: %s${SGR_RESET}\n\n" "$*" && exit 1 +} + +# +# Synopsis: trace message ... +# Debugging output to stderr, not used in production code. +# + +function trace() { + >&2 printf "trace: %s\n" "$*" +} + +# +# Synopsis: echo_red message ... +# Highlight message in colour (on stdout). +# + +function echo_red() { + printf "${SGR_RED}%s${SGR_RESET}\n" "$*" +} + +# +# Synopsis: n_grep +# grep wrapper to ensure consistent grep options and circumvent aliases. +# + +function n_grep() { + GREP_OPTIONS='' command grep "$@" +} + +# +# Setup and state +# + +VERSION="v9.1.0" + +N_PREFIX="${N_PREFIX-/usr/local}" +N_PREFIX=${N_PREFIX%/} +readonly N_PREFIX + +N_CACHE_PREFIX="${N_CACHE_PREFIX-${N_PREFIX}}" +N_CACHE_PREFIX=${N_CACHE_PREFIX%/} +CACHE_DIR="${N_CACHE_PREFIX}/n/versions" +readonly N_CACHE_PREFIX CACHE_DIR + +N_NODE_MIRROR=${N_NODE_MIRROR:-${NODE_MIRROR:-https://nodejs.org/dist}} +N_NODE_MIRROR=${N_NODE_MIRROR%/} +readonly N_NODE_MIRROR + +N_NODE_DOWNLOAD_MIRROR=${N_NODE_DOWNLOAD_MIRROR:-https://nodejs.org/download} +N_NODE_DOWNLOAD_MIRROR=${N_NODE_DOWNLOAD_MIRROR%/} +readonly N_NODE_DOWNLOAD_MIRROR + +# Using xz instead of gzip is enabled by default, if xz compatibility checks pass. +# User may set N_USE_XZ to 0 to disable, or set to anything else to enable. +# May also be overridden by command line flags. + +# Normalise external values to true/false +if [[ "${N_USE_XZ}" = "0" ]]; then + N_USE_XZ="false" +elif [[ -n "${N_USE_XZ+defined}" ]]; then + N_USE_XZ="true" +fi +# Not setting to readonly. Overriden by CLI flags, and update_xz_settings_for_version. + +N_MAX_REMOTE_MATCHES=${N_MAX_REMOTE_MATCHES:-20} +# modified by update_mirror_settings_for_version +g_mirror_url=${N_NODE_MIRROR} +g_mirror_folder_name="node" + +# Options for curl and wget. +# Defining commands in variables is fraught (https://mywiki.wooledge.org/BashFAQ/050) +# but we can follow the simple case and store arguments in an array. + +GET_SHOWS_PROGRESS="false" +# --location to follow redirects +# --fail to avoid happily downloading error page from web server for 404 et al +# --show-error to show why failed (on stderr) +CURL_OPTIONS=( "--location" "--fail" "--show-error" ) +if [[ -t 1 ]]; then + CURL_OPTIONS+=( "--progress-bar" ) + command -v curl &> /dev/null && GET_SHOWS_PROGRESS="true" +else + CURL_OPTIONS+=( "--silent" ) +fi +WGET_OPTIONS=( "-q" "-O-" ) + +# Legacy support using unprefixed env. No longer documented in README. +if [ -n "$HTTP_USER" ];then + if [ -z "$HTTP_PASSWORD" ]; then + abort "Must specify HTTP_PASSWORD when supplying HTTP_USER" + fi + CURL_OPTIONS+=( "-u $HTTP_USER:$HTTP_PASSWORD" ) + WGET_OPTIONS+=( "--http-password=$HTTP_PASSWORD" + "--http-user=$HTTP_USER" ) +elif [ -n "$HTTP_PASSWORD" ]; then + abort "Must specify HTTP_USER when supplying HTTP_PASSWORD" +fi + +# Set by set_active_node +g_active_node= + +# set by various lookups to allow mixed logging and return value from function, especially for engine and node +g_target_node= + +DOWNLOAD=false # set to opt-out of activate (install), and opt-in to download (run, exec) +ARCH= +SHOW_VERBOSE_LOG="true" + +# ANSI escape codes +# https://en.wikipedia.org/wiki/ANSI_escape_code +# https://no-color.org +# https://bixense.com/clicolors + +USE_COLOR="true" +if [[ -n "${CLICOLOR_FORCE+defined}" && "${CLICOLOR_FORCE}" != "0" ]]; then + USE_COLOR="true" +elif [[ -n "${NO_COLOR+defined}" || "${CLICOLOR}" = "0" || ! -t 1 ]]; then + USE_COLOR="false" +fi +readonly USE_COLOR +# Select Graphic Rendition codes +if [[ "${USE_COLOR}" = "true" ]]; then + # KISS and use codes rather than tput, avoid dealing with missing tput or TERM. + readonly SGR_RESET="\033[0m" + readonly SGR_FAINT="\033[2m" + readonly SGR_RED="\033[31m" + readonly SGR_CYAN="\033[36m" +else + readonly SGR_RESET= + readonly SGR_FAINT= + readonly SGR_RED= + readonly SGR_CYAN= +fi + +# +# set_arch to override $(uname -a) +# + +set_arch() { + if test -n "$1"; then + ARCH="$1" + else + abort "missing -a|--arch value" + fi +} + +# +# Synopsis: set_insecure +# Globals modified: +# - CURL_OPTIONS +# - WGET_OPTIONS +# + +function set_insecure() { + CURL_OPTIONS+=( "--insecure" ) + WGET_OPTIONS+=( "--no-check-certificate" ) +} + +# +# Synposis: display_major_version numeric-version +# +display_major_version() { + local version=$1 + version="${version#v}" + version="${version%%.*}" + echo "${version}" +} + +# +# Synopsis: update_mirror_settings_for_version version +# e.g. means using download mirror and folder is nightly +# Globals modified: +# - g_mirror_url +# - g_mirror_folder_name +# + +function update_mirror_settings_for_version() { + if is_download_folder "$1" ; then + g_mirror_folder_name="$1" + g_mirror_url="${N_NODE_DOWNLOAD_MIRROR}/${g_mirror_folder_name}" + elif is_download_version "$1"; then + [[ "$1" =~ ^([^/]+)/(.*) ]] + local remote_folder="${BASH_REMATCH[1]}" + g_mirror_folder_name="${remote_folder}" + g_mirror_url="${N_NODE_DOWNLOAD_MIRROR}/${g_mirror_folder_name}" + fi +} + +# +# Synopsis: update_xz_settings_for_version numeric-version +# Globals modified: +# - N_USE_XZ +# + +function update_xz_settings_for_version() { + # tarballs in xz format were available in later version of iojs, but KISS and only use xz from v4. + if [[ "${N_USE_XZ}" = "true" ]]; then + local major_version="$(display_major_version "$1")" + if [[ "${major_version}" -lt 4 ]]; then + N_USE_XZ="false" + fi + fi +} + +# +# Synopsis: update_arch_settings_for_version numeric-version +# Globals modified: +# - ARCH +# + +function update_arch_settings_for_version() { + local tarball_platform="$(display_tarball_platform)" + if [[ -z "${ARCH}" && "${tarball_platform}" = "darwin-arm64" ]]; then + # First native builds were for v16, but can use x64 in rosetta for older versions. + local major_version="$(display_major_version "$1")" + if [[ "${major_version}" -lt 16 ]]; then + ARCH=x64 + fi + fi +} + +# +# Synopsis: is_lts_codename version +# + +function is_lts_codename() { + # https://github.com/nodejs/Release/blob/master/CODENAMES.md + # e.g. argon, Boron + [[ "$1" =~ ^([Aa]rgon|[Bb]oron|[Cc]arbon|[Dd]ubnium|[Ee]rbium|[Ff]ermium|[Gg]allium|[Hh]ydrogen|[Ii]ron|[Jj]od)$ ]] +} + +# +# Synopsis: is_download_folder version +# + +function is_download_folder() { + # e.g. nightly + [[ "$1" =~ ^(next-nightly|nightly|rc|release|test|v8-canary)$ ]] +} + +# +# Synopsis: is_download_version version +# + +function is_download_version() { + # e.g. nightly/, nightly/latest, nightly/v11 + if [[ "$1" =~ ^([^/]+)/(.*) ]]; then + local remote_folder="${BASH_REMATCH[1]}" + is_download_folder "${remote_folder}" + return + fi + return 2 +} + +# +# Synopsis: is_numeric_version version +# + +function is_numeric_version() { + # e.g. 6, v7.1, 8.11.3 + [[ "$1" =~ ^[v]{0,1}[0-9]+(\.[0-9]+){0,2}$ ]] +} + +# +# Synopsis: is_exact_numeric_version version +# + +function is_exact_numeric_version() { + # e.g. 6, v7.1, 8.11.3 + [[ "$1" =~ ^[v]{0,1}[0-9]+\.[0-9]+\.[0-9]+$ ]] +} + +# +# Synopsis: is_node_support_version version +# Reference: https://github.com/nodejs/package-maintenance/issues/236#issue-474783582 +# + +function is_node_support_version() { + [[ "$1" =~ ^(active|lts_active|lts_latest|lts|current|supported)$ ]] +} + +# +# Synopsis: display_latest_node_support_alias version +# Map aliases onto existing n aliases, current and lts +# + +function display_latest_node_support_alias() { + case "$1" in + "active") printf "current" ;; + "lts_active") printf "lts" ;; + "lts_latest") printf "lts" ;; + "lts") printf "lts" ;; + "current") printf "current" ;; + "supported") printf "current" ;; + *) printf "unexpected-version" + esac +} + +# +# Functions used when showing versions installed +# + +enter_fullscreen() { + # Set cursor to be invisible + tput civis 2> /dev/null + # Save screen contents + tput smcup 2> /dev/null + stty -echo +} + +leave_fullscreen() { + # Set cursor to normal + tput cnorm 2> /dev/null + # Restore screen contents + tput rmcup 2> /dev/null + stty echo +} + +handle_sigint() { + leave_fullscreen + S="$?" + kill 0 + exit $S +} + +handle_sigtstp() { + leave_fullscreen + kill -s SIGSTOP $$ +} + +# +# Output usage information. +# + +display_help() { + cat <<-EOF + +Usage: n [options] [COMMAND] [args] + +Commands: + + n Display downloaded Node.js versions and install selection + n latest Install the latest Node.js release (downloading if necessary) + n lts Install the latest LTS Node.js release (downloading if necessary) + n Install Node.js (downloading if necessary) + n install Install Node.js (downloading if necessary) + n run [args ...] Execute downloaded Node.js with [args ...] + n which Output path for downloaded node + n exec [args...] Execute command with modified PATH, so downloaded node and npm first + n rm Remove the given downloaded version(s) + n prune Remove all downloaded versions except the installed version + n --latest Output the latest Node.js version available + n --lts Output the latest LTS Node.js version available + n ls Output downloaded versions + n ls-remote [version] Output matching versions available for download + n uninstall Remove the installed Node.js + +Options: + + -V, --version Output version of n + -h, --help Display help information + -p, --preserve Preserve npm and npx during install of Node.js + -q, --quiet Disable curl output. Disable log messages processing "auto" and "engine" labels. + -d, --download Download if necessary, and don't make active + -a, --arch Override system architecture + --all ls-remote displays all matches instead of last 20 + --insecure Turn off certificate checking for https requests (may be needed from behind a proxy server) + --use-xz/--no-use-xz Override automatic detection of xz support and enable/disable use of xz compressed node downloads. + +Aliases: + + install: i + latest: current + ls: list + lsr: ls-remote + lts: stable + rm: - + run: use, as + which: bin + +Versions: + + Numeric version numbers can be complete or incomplete, with an optional leading 'v'. + Versions can also be specified by label, or codename, + and other downloadable releases by / + + 4.9.1, 8, v6.1 Numeric versions + lts Newest Long Term Support official release + latest, current Newest official release + auto Read version from file: .n-node-version, .node-version, .nvmrc, or package.json + engine Read version from package.json + boron, carbon Codenames for release streams + lts_latest Node.js support aliases + + and nightly, rc/10 et al + +EOF +} + +err_no_installed_print_help() { + display_help + abort "no downloaded versions yet, see above help for commands" +} + +# +# Synopsis: next_version_installed selected_version +# Output version after selected (which may be blank under some circumstances). +# + +function next_version_installed() { + display_cache_versions | n_grep "$1" -A 1 | tail -n 1 +} + +# +# Synopsis: prev_version_installed selected_version +# Output version before selected (which may be blank under some circumstances). +# + +function prev_version_installed() { + display_cache_versions | n_grep "$1" -B 1 | head -n 1 +} + +# +# Output n version. +# + +display_n_version() { + echo "$VERSION" && exit 0 +} + +# +# Synopsis: set_active_node +# Checks cached downloads for a binary matching the active node. +# Globals modified: +# - g_active_node +# + +function set_active_node() { + g_active_node= + local node_path="$(command -v node)" + if [[ -x "${node_path}" ]]; then + local installed_version=$(node --version) + installed_version=${installed_version#v} + for dir in "${CACHE_DIR}"/*/ ; do + local folder_name="${dir%/}" + folder_name="${folder_name##*/}" + if diff &> /dev/null \ + "${CACHE_DIR}/${folder_name}/${installed_version}/bin/node" \ + "${node_path}" ; then + g_active_node="${folder_name}/${installed_version}" + break + fi + done + fi +} + +# +# Display sorted versions directories paths. +# + +display_versions_paths() { + find "$CACHE_DIR" -maxdepth 2 -type d \ + | sed 's|'"$CACHE_DIR"'/||g' \ + | n_grep -E "/[0-9]+\.[0-9]+\.[0-9]+" \ + | sed 's|/|.|' \ + | sort -k 1,1 -k 2,2n -k 3,3n -k 4,4n -t . \ + | sed 's|\.|/|' +} + +# +# Display installed versions with +# + +display_versions_with_selected() { + local selected="$1" + echo + for version in $(display_versions_paths); do + if test "$version" = "$selected"; then + printf " ${SGR_CYAN}ο${SGR_RESET} %s\n" "$version" + else + printf " ${SGR_FAINT}%s${SGR_RESET}\n" "$version" + fi + done + echo + printf "Use up/down arrow keys to select a version, return key to install, d to delete, q to quit" +} + +# +# Synopsis: display_cache_versions +# + +function display_cache_versions() { + for folder_and_version in $(display_versions_paths); do + echo "${folder_and_version}" + done +} + +# +# Display current node --version and others installed. +# + +menu_select_cache_versions() { + enter_fullscreen + set_active_node + local selected="${g_active_node}" + + clear + display_versions_with_selected "${selected}" + + trap handle_sigint INT + trap handle_sigtstp SIGTSTP + + ESCAPE_SEQ=$'\033' + UP=$'A' + DOWN=$'B' + CTRL_P=$'\020' + CTRL_N=$'\016' + + while true; do + read -rsn 1 key + case "$key" in + "$ESCAPE_SEQ") + # Handle ESC sequences followed by other characters, i.e. arrow keys + read -rsn 1 -t 1 tmp + # See "[" if terminal in normal mode, and "0" in application mode + if [[ "$tmp" == "[" || "$tmp" == "O" ]]; then + read -rsn 1 -t 1 arrow + case "$arrow" in + "$UP") + clear + selected="$(prev_version_installed "${selected}")" + display_versions_with_selected "${selected}" + ;; + "$DOWN") + clear + selected="$(next_version_installed "${selected}")" + display_versions_with_selected "${selected}" + ;; + esac + fi + ;; + "d") + if [[ -n "${selected}" ]]; then + clear + # Note: prev/next is constrained to min/max + local after_delete_selection="$(next_version_installed "${selected}")" + if [[ "${after_delete_selection}" == "${selected}" ]]; then + after_delete_selection="$(prev_version_installed "${selected}")" + fi + remove_versions "${selected}" + + if [[ "${after_delete_selection}" == "${selected}" ]]; then + clear + leave_fullscreen + echo "All downloaded versions have been deleted from cache." + exit + fi + + selected="${after_delete_selection}" + display_versions_with_selected "${selected}" + fi + ;; + # Vim or Emacs 'up' key + "k"|"$CTRL_P") + clear + selected="$(prev_version_installed "${selected}")" + display_versions_with_selected "${selected}" + ;; + # Vim or Emacs 'down' key + "j"|"$CTRL_N") + clear + selected="$(next_version_installed "${selected}")" + display_versions_with_selected "${selected}" + ;; + "q") + clear + leave_fullscreen + exit + ;; + "") + # enter key returns empty string + leave_fullscreen + [[ -n "${selected}" ]] && activate "${selected}" + exit + ;; + esac + done +} + +# +# Move up a line and erase. +# + +erase_line() { + printf "\033[1A\033[2K" +} + +# +# Disable PaX mprotect for +# + +disable_pax_mprotect() { + test -z "$1" && abort "binary required" + local binary="$1" + + # try to disable mprotect via XATTR_PAX header + local PAXCTL="$(PATH="/sbin:/usr/sbin:$PATH" command -v paxctl-ng 2>&1)" + local PAXCTL_ERROR=1 + if [ -x "$PAXCTL" ]; then + $PAXCTL -l && $PAXCTL -m "$binary" >/dev/null 2>&1 + PAXCTL_ERROR="$?" + fi + + # try to disable mprotect via PT_PAX header + if [ "$PAXCTL_ERROR" != 0 ]; then + PAXCTL="$(PATH="/sbin:/usr/sbin:$PATH" command -v paxctl 2>&1)" + if [ -x "$PAXCTL" ]; then + $PAXCTL -Cm "$binary" >/dev/null 2>&1 + fi + fi +} + +# +# clean_copy_folder +# + +clean_copy_folder() { + local source="$1" + local target="$2" + if [[ -d "${source}" ]]; then + rm -rf "${target}" + cp -fR "${source}" "${target}" + fi +} + +# +# Activate +# + +activate() { + local version="$1" + local dir="$CACHE_DIR/$version" + local original_node="$(command -v node)" + local installed_node="${N_PREFIX}/bin/node" + log "copying" "$version" + + + # Ideally we would just copy from cache to N_PREFIX, but there are some complications + # - various linux versions use symlinks for folders in /usr/local and also error when copy folder onto symlink + # - we have used cp for years, so keep using it for backwards compatibility (instead of say rsync) + # - we allow preserving npm + # - we want to be somewhat robust to changes in tarball contents, so use find instead of hard-code expected subfolders + # + # This code was purist and concise for a long time. + # Now twice as much code, but using same code path for all uses, and supporting more setups. + + # Copy lib before bin so symlink targets exist. + # lib + mkdir -p "$N_PREFIX/lib" + # Copy everything except node_modules. + find "$dir/lib" -mindepth 1 -maxdepth 1 \! -name node_modules -exec cp -fR "{}" "$N_PREFIX/lib" \; + if [[ -z "${N_PRESERVE_NPM}" ]]; then + mkdir -p "$N_PREFIX/lib/node_modules" + # Copy just npm, skipping possible added global modules after download. Clean copy to avoid version change problems. + clean_copy_folder "$dir/lib/node_modules/npm" "$N_PREFIX/lib/node_modules/npm" + fi + # Takes same steps for corepack (experimental in node 16.9.0) as for npm, to avoid version problems. + if [[ -e "$dir/lib/node_modules/corepack" && -z "${N_PRESERVE_COREPACK}" ]]; then + mkdir -p "$N_PREFIX/lib/node_modules" + clean_copy_folder "$dir/lib/node_modules/corepack" "$N_PREFIX/lib/node_modules/corepack" + fi + + # bin + mkdir -p "$N_PREFIX/bin" + # Remove old node to avoid potential problems with firewall getting confused on Darwin by overwrite. + rm -f "$N_PREFIX/bin/node" + # Copy bin items by hand, in case user has installed global npm modules into cache. + cp -f "$dir/bin/node" "$N_PREFIX/bin" + [[ -e "$dir/bin/node-waf" ]] && cp -f "$dir/bin/node-waf" "$N_PREFIX/bin" # v0.8.x + if [[ -z "${N_PRESERVE_COREPACK}" ]]; then + [[ -e "$dir/bin/corepack" ]] && cp -fR "$dir/bin/corepack" "$N_PREFIX/bin" # from 16.9.0 + fi + if [[ -z "${N_PRESERVE_NPM}" ]]; then + [[ -e "$dir/bin/npm" ]] && cp -fR "$dir/bin/npm" "$N_PREFIX/bin" + [[ -e "$dir/bin/npx" ]] && cp -fR "$dir/bin/npx" "$N_PREFIX/bin" + fi + + # include + mkdir -p "$N_PREFIX/include" + find "$dir/include" -mindepth 1 -maxdepth 1 -exec cp -fR "{}" "$N_PREFIX/include" \; + + # share + mkdir -p "$N_PREFIX/share" + # Copy everything except man, at it is a symlink on some Linux (e.g. archlinux). + find "$dir/share" -mindepth 1 -maxdepth 1 \! -name man -exec cp -fR "{}" "$N_PREFIX/share" \; + mkdir -p "$N_PREFIX/share/man" + find "$dir/share/man" -mindepth 1 -maxdepth 1 -exec cp -fR "{}" "$N_PREFIX/share/man" \; + + disable_pax_mprotect "${installed_node}" + + local active_node="$(command -v node)" + if [[ -e "${active_node}" && -e "${installed_node}" && "${active_node}" != "${installed_node}" ]]; then + # Installed and active are different which might be a PATH problem. List both to give user some clues. + log "installed" "$("${installed_node}" --version) to ${installed_node}" + log "active" "$("${active_node}" --version) at ${active_node}" + else + local npm_version_str="" + local installed_npm="${N_PREFIX}/bin/npm" + local active_npm="$(command -v npm)" + if [[ -z "${N_PRESERVE_NPM}" && -e "${active_npm}" && -e "${installed_npm}" && "${active_npm}" = "${installed_npm}" ]]; then + npm_version_str=" (with npm $(npm --version))" + fi + + log "installed" "$("${installed_node}" --version)${npm_version_str}" + + # Extra tips for changed location. + if [[ -e "${active_node}" && -e "${original_node}" && "${active_node}" != "${original_node}" ]]; then + printf '\nNote: the node command changed location and the old location may be remembered in your current shell.\n' + log old "${original_node}" + log new "${active_node}" + printf 'If "node --version" shows the old version then start a new shell, or reset the location hash with:\nhash -r (for bash, zsh, ash, dash, and ksh)\nrehash (for csh and tcsh)\n' + fi + fi +} + +# +# Install +# + +install() { + [[ -z "$1" ]] && abort "version required" + local version + get_latest_resolved_version "$1" || return 2 + version="${g_target_node}" + [[ -n "${version}" ]] || abort "no version found for '$1'" + update_mirror_settings_for_version "$1" + update_xz_settings_for_version "${version}" + update_arch_settings_for_version "${version}" + + local dir="${CACHE_DIR}/${g_mirror_folder_name}/${version}" + + # Note: decompression flags ignored with default Darwin tar which autodetects. + if test "$N_USE_XZ" = "true"; then + local tarflag="-Jx" + else + local tarflag="-zx" + fi + + if test -d "$dir"; then + if [[ ! -e "$dir/n.lock" ]] ; then + if [[ "$DOWNLOAD" == "false" ]] ; then + activate "${g_mirror_folder_name}/${version}" + fi + exit + fi + fi + + log installing "${g_mirror_folder_name}-v$version" + + local url="$(tarball_url "$version")" + is_ok "${url}" || abort "download preflight failed for '$version' (${url})" + + log mkdir "$dir" + mkdir -p "$dir" || abort "sudo required (or change ownership, or define N_PREFIX)" + touch "$dir/n.lock" + + cd "${dir}" || abort "Failed to cd to ${dir}" + + log fetch "$url" + do_get "${url}" | tar "$tarflag" --strip-components=1 --no-same-owner -f - + pipe_results=( "${PIPESTATUS[@]}" ) + if [[ "${pipe_results[0]}" -ne 0 ]]; then + abort "failed to download archive for $version" + fi + if [[ "${pipe_results[1]}" -ne 0 ]]; then + abort "failed to extract archive for $version" + fi + [ "$GET_SHOWS_PROGRESS" = "true" ] && erase_line + rm -f "$dir/n.lock" + + disable_pax_mprotect bin/node + + if [[ "$DOWNLOAD" == "false" ]]; then + activate "${g_mirror_folder_name}/$version" + fi +} + +# +# Be more silent. +# + +set_quiet() { + SHOW_VERBOSE_LOG="false" + command -v curl > /dev/null && CURL_OPTIONS+=( "--silent" ) && GET_SHOWS_PROGRESS="false" +} + +# +# Synopsis: do_get [option...] url +# Call curl or wget with combination of global and passed options. +# + +function do_get() { + if command -v curl &> /dev/null; then + curl "${CURL_OPTIONS[@]}" "$@" + elif command -v wget &> /dev/null; then + wget "${WGET_OPTIONS[@]}" "$@" + else + abort "curl or wget command required" + fi +} + +# +# Synopsis: do_get_index [option...] url +# Call curl or wget with combination of global and passed options, +# with options tweaked to be more suitable for getting index. +# + +function do_get_index() { + if command -v curl &> /dev/null; then + # --silent to suppress progress et al + curl --silent --compressed "${CURL_OPTIONS[@]}" "$@" + elif command -v wget &> /dev/null; then + wget "${WGET_OPTIONS[@]}" "$@" + else + abort "curl or wget command required" + fi +} + +# +# Synopsis: remove_versions version ... +# + +function remove_versions() { + [[ -z "$1" ]] && abort "version(s) required" + while [[ $# -ne 0 ]]; do + local version + get_latest_resolved_version "$1" || break + version="${g_target_node}" + if [[ -n "${version}" ]]; then + update_mirror_settings_for_version "$1" + local dir="${CACHE_DIR}/${g_mirror_folder_name}/${version}" + if [[ -s "${dir}" ]]; then + rm -rf "${dir}" + else + echo "$1 (${version}) not in downloads cache" + fi + else + echo "No version found for '$1'" + fi + shift + done +} + +# +# Synopsis: prune_cache +# + +function prune_cache() { + set_active_node + + for folder_and_version in $(display_versions_paths); do + if [[ "${folder_and_version}" != "${g_active_node}" ]]; then + echo "${folder_and_version}" + rm -rf "${CACHE_DIR:?}/${folder_and_version}" + fi + done +} + +# +# Synopsis: find_cached_version version +# Finds cache directory for resolved version. +# Globals modified: +# - g_cached_version + +function find_cached_version() { + [[ -z "$1" ]] && abort "version required" + local version + get_latest_resolved_version "$1" || exit 1 + version="${g_target_node}" + [[ -n "${version}" ]] || abort "no version found for '$1'" + + update_mirror_settings_for_version "$1" + g_cached_version="${CACHE_DIR}/${g_mirror_folder_name}/${version}" + if [[ ! -d "${g_cached_version}" && "${DOWNLOAD}" == "true" ]]; then + (install "${version}") + fi + [[ -d "${g_cached_version}" ]] || abort "'$1' (${version}) not in downloads cache" +} + + +# +# Synopsis: display_bin_path_for_version version +# + +function display_bin_path_for_version() { + find_cached_version "$1" + echo "${g_cached_version}/bin/node" +} + +# +# Synopsis: run_with_version version [args...] +# Run the given of node with [args ..] +# + +function run_with_version() { + find_cached_version "$1" + shift # remove version from parameters + exec "${g_cached_version}/bin/node" "$@" +} + +# +# Synopsis: exec_with_version command [args...] +# Modify the path to include and execute command. +# + +function exec_with_version() { + find_cached_version "$1" + shift # remove version from parameters + PATH="${g_cached_version}/bin:$PATH" exec "$@" +} + +# +# Synopsis: is_ok url +# Check the HEAD response of . +# + +function is_ok() { + # Note: both curl and wget can follow redirects, as present on some mirrors (e.g. https://npm.taobao.org/mirrors/node). + # The output is complicated with redirects, so keep it simple and use command status rather than parse output. + if command -v curl &> /dev/null; then + do_get --silent --head "$1" > /dev/null || return 1 + else + do_get --spider "$1" > /dev/null || return 1 + fi +} + +# +# Synopsis: can_use_xz +# Test system to see if xz decompression is supported by tar. +# + +function can_use_xz() { + # Be conservative and only enable if xz is likely to work. Unfortunately we can't directly query tar itself. + # For research, see https://github.com/shadowspawn/nvh/issues/8 + local uname_s="$(uname -s)" + if [[ "${uname_s}" = "Linux" ]] && command -v xz &> /dev/null ; then + # tar on linux is likely to support xz if it is available as a command + return 0 + elif [[ "${uname_s}" = "Darwin" ]]; then + local macos_version="$(sw_vers -productVersion)" + local macos_major_version="$(echo "${macos_version}" | cut -d '.' -f 1)" + local macos_minor_version="$(echo "${macos_version}" | cut -d '.' -f 2)" + if [[ "${macos_major_version}" -gt 10 || "${macos_minor_version}" -gt 8 ]]; then + # tar on recent Darwin has xz support built-in + return 0 + fi + fi + return 2 # not supported +} + +# +# Synopsis: display_tarball_platform +# + +function display_tarball_platform() { + # https://en.wikipedia.org/wiki/Uname + + local os="unexpected_os" + local uname_a="$(uname -a)" + case "${uname_a}" in + Linux*) os="linux" ;; + Darwin*) os="darwin" ;; + SunOS*) os="sunos" ;; + AIX*) os="aix" ;; + CYGWIN*) >&2 echo_red "Cygwin is not supported by n" ;; + MINGW*) >&2 echo_red "Git BASH (MSYS) is not supported by n" ;; + esac + + local arch="unexpected_arch" + local uname_m="$(uname -m)" + case "${uname_m}" in + x86_64) arch=x64 ;; + i386 | i686) arch="x86" ;; + aarch64) arch=arm64 ;; + armv8l) arch=arm64 ;; # armv8l probably supports arm64, and there is no specific armv8l build so give it a go + *) + # e.g. armv6l, armv7l, arm64 + arch="${uname_m}" + ;; + esac + # Override from command line, or version specific adjustment. + [ -n "$ARCH" ] && arch="$ARCH" + + echo "${os}-${arch}" +} + +# +# Synopsis: display_compatible_file_field +# display for current platform, as per field in index.tab, which is different than actual download +# + +function display_compatible_file_field { + local compatible_file_field="$(display_tarball_platform)" + if [[ -z "${ARCH}" && "${compatible_file_field}" = "darwin-arm64" ]]; then + # Look for arm64 for native but also x64 for older versions which can run in rosetta. + # (Downside is will get an install error if install version above 16 with x64 and not arm64.) + compatible_file_field="osx-arm64-tar|osx-x64-tar" + elif [[ "${compatible_file_field}" =~ darwin-(.*) ]]; then + compatible_file_field="osx-${BASH_REMATCH[1]}-tar" + fi + echo "${compatible_file_field}" +} + +# +# Synopsis: tarball_url version +# + +function tarball_url() { + local version="$1" + local ext=gz + [ "$N_USE_XZ" = "true" ] && ext="xz" + echo "${g_mirror_url}/v${version}/node-v${version}-$(display_tarball_platform).tar.${ext}" +} + +# +# Synopsis: get_file_node_version filename +# Sets g_target_node +# + +function get_file_node_version() { + g_target_node= + local filepath="$1" + verbose_log "found" "${filepath}" + # read returns a non-zero status but does still work if there is no line ending + local version + <"${filepath}" read -r version + # trim possible trailing \d from a Windows created file + version="${version%%[[:space:]]}" + verbose_log "read" "${version}" + g_target_node="${version}" +} + +# +# Synopsis: get_package_engine_version\ +# Sets g_target_node +# + +function get_package_engine_version() { + g_target_node= + local filepath="$1" + verbose_log "found" "${filepath}" + command -v node &> /dev/null || abort "an active version of node is required to read 'engines' from package.json" + local range + range="$(node -e "package = require('${filepath}'); if (package && package.engines && package.engines.node) console.log(package.engines.node)")" + verbose_log "read" "${range}" + [[ -n "${range}" ]] || return 2 + if [[ "*" == "${range}" ]]; then + verbose_log "target" "current" + g_target_node="current" + return + fi + + local version + if [[ "${range}" =~ ^([>~^=]|\>\=)?v?([0-9]+(\.[0-9]+){0,2})(.[xX*])?$ ]]; then + local operator="${BASH_REMATCH[1]}" + version="${BASH_REMATCH[2]}" + case "${operator}" in + '' | =) ;; + \> | \>=) version="current" ;; + \~) [[ "${version}" =~ ^([0-9]+\.[0-9]+)\.[0-9]+$ ]] && version="${BASH_REMATCH[1]}" ;; + ^) [[ "${version}" =~ ^([0-9]+) ]] && version="${BASH_REMATCH[1]}" ;; + esac + verbose_log "target" "${version}" + else + command -v npx &> /dev/null || abort "an active version of npx is required to use complex 'engine' ranges from package.json" + verbose_log "resolving" "${range}" + local version_per_line="$(n lsr --all)" + local versions_one_line=$(echo "${version_per_line}" | tr '\n' ' ') + # Using semver@7 so works with older versions of node. + # shellcheck disable=SC2086 + version=$(npm_config_yes=true npx --quiet semver@7 -r "${range}" ${versions_one_line} | tail -n 1) + fi + g_target_node="${version}" +} + +# +# Synopsis: get_nvmrc_version +# Sets g_target_node +# + +function get_nvmrc_version() { + g_target_node= + local filepath="$1" + verbose_log "found" "${filepath}" + local version + <"${filepath}" read -r version + verbose_log "read" "${version}" + # Translate from nvm aliases + case "${version}" in + lts/\*) version="lts" ;; + lts/*) version="${version:4}" ;; + node) version="current" ;; + *) ;; + esac + g_target_node="${version}" +} + +# +# Synopsis: get_engine_version [error-message] +# Sets g_target_node +# + +function get_engine_version() { + g_target_node= + local error_message="${1-package.json not found}" + local parent + parent="${PWD}" + while [[ -n "${parent}" ]]; do + if [[ -e "${parent}/package.json" ]]; then + get_package_engine_version "${parent}/package.json" + else + parent=${parent%/*} + continue + fi + break + done + [[ -n "${parent}" ]] || abort "${error_message}" + [[ -n "${g_target_node}" ]] || abort "did not find supported version of node in 'engines' field of package.json" +} + +# +# Synopsis: get_auto_version +# Sets g_target_node +# + +function get_auto_version() { + g_target_node= + # Search for a version control file first + local parent + parent="${PWD}" + while [[ -n "${parent}" ]]; do + if [[ -e "${parent}/.n-node-version" ]]; then + get_file_node_version "${parent}/.n-node-version" + elif [[ -e "${parent}/.node-version" ]]; then + get_file_node_version "${parent}/.node-version" + elif [[ -e "${parent}/.nvmrc" ]]; then + get_nvmrc_version "${parent}/.nvmrc" + else + parent=${parent%/*} + continue + fi + break + done + # Fallback to package.json + [[ -n "${parent}" ]] || get_engine_version "no file found for auto version (.n-node-version, .node-version, .nvmrc, or package.json)" + [[ -n "${g_target_node}" ]] || abort "file found for auto did not contain target version of node" +} + +# +# Synopsis: get_latest_resolved_version version +# Sets g_target_node +# + +function get_latest_resolved_version() { + g_target_node= + local version=${1} + simple_version=${version#node/} # Only place supporting node/ [sic] + if is_exact_numeric_version "${simple_version}"; then + # Just numbers, already resolved, no need to lookup first. + simple_version="${simple_version#v}" + g_target_node="${simple_version}" + else + # Complicated recognising exact version, KISS and lookup. + g_target_node=$(N_MAX_REMOTE_MATCHES=1 display_remote_versions "$version") + fi +} + +# +# Synopsis: display_remote_index +# index.tab reference: https://github.com/nodejs/nodejs-dist-indexer +# Index fields are: version date files npm v8 uv zlib openssl modules lts security +# KISS and just return fields we currently care about: version files lts +# + +display_remote_index() { + local index_url="${g_mirror_url}/index.tab" + # tail to remove header line + do_get_index "${index_url}" | tail -n +2 | cut -f 1,3,10 + if [[ "${PIPESTATUS[0]}" -ne 0 ]]; then + # Reminder: abort will only exit subshell, but consistent error display + abort "failed to download version index (${index_url})" + fi +} + +# +# Synopsis: display_match_limit limit +# + +function display_match_limit(){ + if [[ "$1" -gt 1 && "$1" -lt 32000 ]]; then + echo "Listing remote... Displaying $1 matches (use --all to see all)." + fi +} + +# +# Synopsis: display_remote_versions version +# + +function display_remote_versions() { + local version="$1" + update_mirror_settings_for_version "${version}" + local match='.' + local match_count="${N_MAX_REMOTE_MATCHES}" + + # Transform some labels before processing further. + if is_node_support_version "${version}"; then + version="$(display_latest_node_support_alias "${version}")" + match_count=1 + elif [[ "${version}" = "auto" ]]; then + # suppress stdout logging so lsr layout same as usual for scripting + get_auto_version || return 2 + version="${g_target_node}" + elif [[ "${version}" = "engine" ]]; then + # suppress stdout logging so lsr layout same as usual for scripting + get_engine_version || return 2 + version="${g_target_node}" + fi + + if [[ -z "${version}" ]]; then + match='.' + elif [[ "${version}" = "lts" || "${version}" = "stable" ]]; then + match_count=1 + # Codename is last field, first one with a name is newest lts + match="${TAB_CHAR}[a-zA-Z]+\$" + elif [[ "${version}" = "latest" || "${version}" = "current" ]]; then + match_count=1 + match='.' + elif is_numeric_version "${version}"; then + version="v${version#v}" + # Avoid restriction message if exact version + is_exact_numeric_version "${version}" && match_count=1 + # Quote any dots in version so they are literal for expression + match="${version//\./\.}" + # Avoid 1.2 matching 1.23 + match="^${match}[^0-9]" + elif is_lts_codename "${version}"; then + # Capitalise (could alternatively make grep case insensitive) + codename="$(echo "${version:0:1}" | tr '[:lower:]' '[:upper:]')${version:1}" + # Codename is last field + match="${TAB_CHAR}${codename}\$" + elif is_download_folder "${version}"; then + match='.' + elif is_download_version "${version}"; then + version="${version#"${g_mirror_folder_name}"/}" + if [[ "${version}" = "latest" || "${version}" = "current" ]]; then + match_count=1 + match='.' + else + version="v${version#v}" + match="${version//\./\.}" + match="^${match}" # prefix + if is_numeric_version "${version}"; then + # Exact numeric match + match="${match}[^0-9]" + fi + fi + else + abort "invalid version '$1'" + fi + display_match_limit "${match_count}" + + # Implementation notes: + # - using awk rather than head so do not close pipe early on curl + # - restrict search to compatible files as not always available, or not at same time + # - return status of curl command (i.e. PIPESTATUS[0]) + display_remote_index \ + | n_grep -E "$(display_compatible_file_field)" \ + | n_grep -E "${match}" \ + | awk "NR<=${match_count}" \ + | cut -f 1 \ + | n_grep -E -o '[^v].*' + return "${PIPESTATUS[0]}" +} + +# +# Synopsis: delete_with_echo target +# + +function delete_with_echo() { + if [[ -e "$1" ]]; then + echo "$1" + rm -rf "$1" + fi +} + +# +# Synopsis: uninstall_installed +# Uninstall the installed node and npm (leaving alone the cache), +# so undo install, and may expose possible system installed versions. +# + +uninstall_installed() { + # npm: https://docs.npmjs.com/misc/removing-npm + # rm -rf /usr/local/{lib/node{,/.npm,_modules},bin,share/man}/npm* + # node: https://stackabuse.com/how-to-uninstall-node-js-from-mac-osx/ + # Doing it by hand rather than scanning cache, so still works if cache deleted first. + # This covers tarballs for at least node 4 through 10. + + while true; do + read -r -p "Do you wish to delete node and npm from ${N_PREFIX}? " yn + case $yn in + [Yy]* ) break ;; + [Nn]* ) exit ;; + * ) echo "Please answer yes or no.";; + esac + done + + echo "" + echo "Uninstalling node and npm" + delete_with_echo "${N_PREFIX}/bin/node" + delete_with_echo "${N_PREFIX}/bin/npm" + delete_with_echo "${N_PREFIX}/bin/npx" + delete_with_echo "${N_PREFIX}/bin/corepack" + delete_with_echo "${N_PREFIX}/include/node" + delete_with_echo "${N_PREFIX}/lib/dtrace/node.d" + delete_with_echo "${N_PREFIX}/lib/node_modules/npm" + delete_with_echo "${N_PREFIX}/lib/node_modules/corepack" + delete_with_echo "${N_PREFIX}/share/doc/node" + delete_with_echo "${N_PREFIX}/share/man/man1/node.1" + delete_with_echo "${N_PREFIX}/share/systemtap/tapset/node.stp" +} + +# +# Synopsis: show_permission_suggestions +# + +function show_permission_suggestions() { + echo "Suggestions:" + echo "- run n with sudo, or" + echo "- define N_PREFIX to a writeable location, or" +} + +# +# Synopsis: show_diagnostics +# Show environment and check for common problems. +# + +function show_diagnostics() { + echo "This information is to help you diagnose issues, and useful when reporting an issue." + echo "Note: some output may contain passwords. Redact before sharing." + + printf "\n\nCOMMAND LOCATIONS AND VERSIONS\n" + + printf "\nbash\n" + command -v bash && bash --version + + printf "\nn\n" + command -v n && n --version + + printf "\nnode\n" + if command -v node &> /dev/null; then + command -v node && node --version + node -e 'if (process.versions.v8) console.log("JavaScript engine: v8");' + + printf "\nnpm\n" + command -v npm && npm --version + fi + + printf "\ntar\n" + if command -v tar &> /dev/null; then + command -v tar && tar --version + else + echo_red "tar not found. Needed for extracting downloads." + fi + + printf "\ncurl or wget\n" + if command -v curl &> /dev/null; then + command -v curl && curl --version + elif command -v wget &> /dev/null; then + command -v wget && wget --version + else + echo_red "Neither curl nor wget found. Need one of them for downloads." + fi + + printf "\nuname\n" + uname -a + + printf "\n\nSETTINGS\n" + + printf "\nn\n" + echo "node mirror: ${N_NODE_MIRROR}" + echo "node downloads mirror: ${N_NODE_DOWNLOAD_MIRROR}" + echo "install destination: ${N_PREFIX}" + [[ -n "${N_PREFIX}" ]] && echo "PATH: ${PATH}" + echo "ls-remote max matches: ${N_MAX_REMOTE_MATCHES}" + [[ -n "${N_PRESERVE_NPM}" ]] && echo "installs preserve npm by default" + [[ -n "${N_PRESERVE_COREPACK}" ]] && echo "installs preserve corepack by default" + + printf "\nProxy\n" + # disable "var is referenced but not assigned": https://github.com/koalaman/shellcheck/wiki/SC2154 + # shellcheck disable=SC2154 + [[ -n "${http_proxy}" ]] && echo "http_proxy: ${http_proxy}" + # shellcheck disable=SC2154 + [[ -n "${https_proxy}" ]] && echo "https_proxy: ${https_proxy}" + if command -v curl &> /dev/null; then + # curl supports lower case and upper case! + # shellcheck disable=SC2154 + [[ -n "${all_proxy}" ]] && echo "all_proxy: ${all_proxy}" + [[ -n "${ALL_PROXY}" ]] && echo "ALL_PROXY: ${ALL_PROXY}" + [[ -n "${HTTP_PROXY}" ]] && echo "HTTP_PROXY: ${HTTP_PROXY}" + [[ -n "${HTTPS_PROXY}" ]] && echo "HTTPS_PROXY: ${HTTPS_PROXY}" + if [[ -e "${CURL_HOME}/.curlrc" ]]; then + echo "have \$CURL_HOME/.curlrc" + elif [[ -e "${HOME}/.curlrc" ]]; then + echo "have \$HOME/.curlrc" + fi + elif command -v wget &> /dev/null; then + if [[ -e "${WGETRC}" ]]; then + echo "have \$WGETRC" + elif [[ -e "${HOME}/.wgetrc" ]]; then + echo "have \$HOME/.wgetrc" + fi + fi + + printf "\n\nCHECKS\n" + + printf "\nChecking n install destination is in PATH...\n" + local install_bin="${N_PREFIX}/bin" + local path_wth_guards=":${PATH}:" + if [[ "${path_wth_guards}" =~ :${install_bin}/?: ]]; then + printf "good\n" + else + echo_red "'${install_bin}' is not in PATH" + fi + if command -v node &> /dev/null; then + printf "\nChecking n install destination priority in PATH...\n" + local node_dir="$(dirname "$(command -v node)")" + + local index=0 + local path_entry + local path_entries + local install_bin_index=0 + local node_index=999 + IFS=':' read -ra path_entries <<< "${PATH}" + for path_entry in "${path_entries[@]}"; do + (( index++ )) + [[ "${path_entry}" =~ ^${node_dir}/?$ ]] && node_index="${index}" + [[ "${path_entry}" =~ ^${install_bin}/?$ ]] && install_bin_index="${index}" + done + if [[ "${node_index}" -lt "${install_bin_index}" ]]; then + echo_red "There is a version of node installed which will be found in PATH before the n installed version." + else + printf "good\n" + fi + fi + + # Check npm too. Simpler check than for PATH and node, more like the runtime logging for active/installed node. + if [[ -z "${N_PRESERVE_NPM}" ]]; then + printf "\nChecking npm install destination...\n" + local installed_npm="${N_PREFIX}/bin/npm" + local active_npm="$(command -v npm)" + if [[ -e "${active_npm}" && -e "${installed_npm}" && "${active_npm}" != "${installed_npm}" ]]; then + echo_red "There is an active version of npm shadowing the version installed by n. Check order of entries in PATH." + log "installed" "${installed_npm}" + log "active" "${active_npm}" + else + printf "good\n" + fi + fi + + printf "\nChecking permissions for cache folder...\n" + # Most likely problem is ownership rather than than permissions as such. + local cache_root="${N_PREFIX}/n" + if [[ -e "${N_PREFIX}" && ! -w "${N_PREFIX}" && ! -e "${cache_root}" ]]; then + echo_red "You do not have write permission to create: ${cache_root}" + show_permission_suggestions + echo "- make a folder you own:" + echo " sudo mkdir -p \"${cache_root}\"" + echo " sudo chown $(whoami) \"${cache_root}\"" + elif [[ -e "${cache_root}" && ! -w "${cache_root}" ]]; then + echo_red "You do not have write permission to: ${cache_root}" + show_permission_suggestions + echo "- change folder ownership to yourself:" + echo " sudo chown -R $(whoami) \"${cache_root}\"" + elif [[ ! -e "${cache_root}" ]]; then + echo "Cache folder does not exist: ${cache_root}" + echo "This is normal if you have not done an install yet, as cache is only created when needed." + elif [[ -e "${CACHE_DIR}" && ! -w "${CACHE_DIR}" ]]; then + echo_red "You do not have write permission to: ${CACHE_DIR}" + show_permission_suggestions + echo "- change folder ownership to yourself:" + echo " sudo chown -R $(whoami) \"${CACHE_DIR}\"" + else + echo "good" + fi + + if [[ -e "${N_PREFIX}" ]]; then + # Most likely problem is ownership rather than than permissions as such. + printf "\nChecking permissions for install folders...\n" + local install_writeable="true" + for subdir in bin lib include share; do + if [[ -e "${N_PREFIX}/${subdir}" && ! -w "${N_PREFIX}/${subdir}" ]]; then + install_writeable="false" + echo_red "You do not have write permission to: ${N_PREFIX}/${subdir}" + break + fi + done + if [[ "${install_writeable}" = "true" ]]; then + echo "good" + else + show_permission_suggestions + echo "- change folder ownerships to yourself:" + echo " (cd \"${N_PREFIX}\" && sudo chown -R $(whoami) bin lib include share)" + fi + fi + + printf "\nChecking mirror is reachable...\n" + if is_ok "${N_NODE_MIRROR}/"; then + printf "good\n" + else + echo_red "mirror not reachable" + printf "Showing failing command and output\n" + if command -v curl &> /dev/null; then + ( set -x; do_get --head "${N_NODE_MIRROR}/" ) + else + ( set -x; do_get --spider "${N_NODE_MIRROR}/" ) + printf "\n" + fi + fi +} + +# +# Handle arguments. +# + +# First pass. Process the options so they can come before or after commands, +# particularly for `n lsr --all` and `n install --arch x686` +# which feel pretty natural. + +unprocessed_args=() +positional_arg="false" + +while [[ $# -ne 0 ]]; do + case "$1" in + --all) N_MAX_REMOTE_MATCHES=32000 ;; + -V|--version) display_n_version ;; + -h|--help|help) display_help; exit ;; + -q|--quiet) set_quiet ;; + -d|--download) DOWNLOAD="true" ;; + --insecure) set_insecure ;; + -p|--preserve) N_PRESERVE_NPM="true" N_PRESERVE_COREPACK="true" ;; + --no-preserve) N_PRESERVE_NPM="" N_PRESERVE_COREPACK="" ;; + --use-xz) N_USE_XZ="true" ;; + --no-use-xz) N_USE_XZ="false" ;; + --latest) display_remote_versions latest; exit ;; + --stable) display_remote_versions lts; exit ;; # [sic] old terminology + --lts) display_remote_versions lts; exit ;; + -a|--arch) shift; set_arch "$1";; # set arch and continue + exec|run|as|use) + unprocessed_args+=( "$1" ) + positional_arg="true" + ;; + *) + if [[ "${positional_arg}" == "true" ]]; then + unprocessed_args+=( "$@" ) + break + fi + unprocessed_args+=( "$1" ) + ;; + esac + shift +done + +if [[ -z "${N_USE_XZ+defined}" ]]; then + N_USE_XZ="true" # Default to using xz + can_use_xz || N_USE_XZ="false" +fi + +set -- "${unprocessed_args[@]}" + +if test $# -eq 0; then + test -z "$(display_versions_paths)" && err_no_installed_print_help + menu_select_cache_versions +else + while test $# -ne 0; do + case "$1" in + bin|which) display_bin_path_for_version "$2"; exit ;; + run|as|use) shift; run_with_version "$@"; exit ;; + exec) shift; exec_with_version "$@"; exit ;; + doctor) show_diagnostics; exit ;; + rm|-) shift; remove_versions "$@"; exit ;; + prune) prune_cache; exit ;; + latest) install latest; exit ;; + stable) install stable; exit ;; + lts) install lts; exit ;; + ls|list) display_versions_paths; exit ;; + lsr|ls-remote|list-remote) shift; display_remote_versions "$1"; exit ;; + uninstall) uninstall_installed; exit ;; + i|install) shift; install "$1"; exit ;; + N_TEST_DISPLAY_LATEST_RESOLVED_VERSION) shift; get_latest_resolved_version "$1" > /dev/null || exit 2; echo "${g_target_node}"; exit ;; + *) install "$1"; exit ;; + esac + shift + done +fi diff --git a/hooks/backup/27-data_xmpp b/hooks/backup/27-data_xmpp index 2cd93e02b..253aacdc2 100644 --- a/hooks/backup/27-data_xmpp +++ b/hooks/backup/27-data_xmpp @@ -9,5 +9,5 @@ source /usr/share/yunohost/helpers # Backup destination backup_dir="${1}/data/xmpp" -ynh_backup /var/lib/metronome "${backup_dir}/var_lib_metronome" -ynh_backup /var/xmpp-upload/ "${backup_dir}/var_xmpp-upload" +[[ ! -d /var/lib/metronome ]] || ynh_backup /var/lib/metronome "${backup_dir}/var_lib_metronome" --not_mandatory +[[ ! -d /var/xmpp-upload ]] || ynh_backup /var/xmpp-upload/ "${backup_dir}/var_xmpp-upload" --not_mandatory diff --git a/hooks/conf_regen/01-yunohost b/hooks/conf_regen/01-yunohost index d0e6fb783..1d7a449e4 100755 --- a/hooks/conf_regen/01-yunohost +++ b/hooks/conf_regen/01-yunohost @@ -97,7 +97,7 @@ EOF # Cron job that upgrade the app list everyday cat >$pending_dir/etc/cron.daily/yunohost-fetch-apps-catalog < /dev/null) & +sleep \$((RANDOM%3600)); yunohost tools update apps > /dev/null EOF # Cron job that renew lets encrypt certificates if there's any that needs renewal @@ -178,9 +178,20 @@ do_post_regen() { chown root:admins /home/yunohost.backup/archives chown root:root /var/cache/yunohost + [ ! -e /var/www/.well-known/ynh-diagnosis/ ] || chmod 775 /var/www/.well-known/ynh-diagnosis/ + # NB: x permission for 'others' is important for ssl-cert (and maybe mdns), otherwise slapd will fail to start because can't access the certs chmod 755 /etc/yunohost + find /etc/systemd/system/*.service -type f | xargs -r chown root:root + find /etc/systemd/system/*.service -type f | xargs -r chmod 0644 + + if ls -l /etc/php/*/fpm/pool.d/*.conf + then + chown root:root /etc/php/*/fpm/pool.d/*.conf + chmod 644 /etc/php/*/fpm/pool.d/*.conf + fi + # Certs # We do this with find because there could be a lot of them... chown -R root:ssl-cert /etc/yunohost/certs diff --git a/hooks/conf_regen/15-nginx b/hooks/conf_regen/15-nginx index 28d9e90fb..9eabcd8b7 100755 --- a/hooks/conf_regen/15-nginx +++ b/hooks/conf_regen/15-nginx @@ -144,6 +144,12 @@ do_pre_regen() { do_post_regen() { regen_conf_files=$1 + if ls -l /etc/nginx/conf.d/*.d/*.conf + then + chown root:root /etc/nginx/conf.d/*.d/*.conf + chmod 644 /etc/nginx/conf.d/*.d/*.conf + fi + [ -z "$regen_conf_files" ] && exit 0 # create NGINX conf directories for domains diff --git a/hooks/conf_regen/52-fail2ban b/hooks/conf_regen/52-fail2ban index d463892c7..db3cf0da7 100755 --- a/hooks/conf_regen/52-fail2ban +++ b/hooks/conf_regen/52-fail2ban @@ -24,6 +24,12 @@ do_pre_regen() { do_post_regen() { regen_conf_files=$1 + if ls -l /etc/fail2ban/jail.d/*.conf + then + chown root:root /etc/fail2ban/jail.d/*.conf + chmod 644 /etc/fail2ban/jail.d/*.conf + fi + [[ -z "$regen_conf_files" ]] \ || systemctl reload fail2ban } diff --git a/hooks/restore/27-data_xmpp b/hooks/restore/27-data_xmpp index 02a4c6703..f07ac6a33 100644 --- a/hooks/restore/27-data_xmpp +++ b/hooks/restore/27-data_xmpp @@ -1,4 +1,11 @@ backup_dir="$1/data/xmpp" -cp -a $backup_dir/var_lib_metronome/. /var/lib/metronome -cp -a $backup_dir/var_xmpp-upload/. /var/xmpp-upload +if [[ -e $backup_dir/var_lib_metronome/ ]] +then + cp -a $backup_dir/var_lib_metronome/. /var/lib/metronome +fi + +if [[ -e $backup_dir/var_xmpp-upload ]] +then + cp -a $backup_dir/var_xmpp-upload/. /var/xmpp-upload +fi diff --git a/locales/ar.json b/locales/ar.json index e34bb810b..2d3e1381e 100644 --- a/locales/ar.json +++ b/locales/ar.json @@ -245,5 +245,17 @@ "migration_0021_main_upgrade": "بداية التحديث الرئيسي…", "migration_0021_patching_sources_list": "تحديث ملف sources.lists…", "pattern_firstname": "يجب أن يكون اسماً أولياً صالحاً (على الأقل 3 حروف)", - "yunohost_configured": "تم إعداد YunoHost الآن" + "yunohost_configured": "تم إعداد YunoHost الآن", + "global_settings_setting_backup_compress_tar_archives": "ضغط النُسخ الاحتياطية", + "diagnosis_description_apps": "التطبيقات", + "danger": "خطر:", + "diagnosis_basesystem_hardware": "بنية الخادم هي {virt} {arch}", + "diagnosis_basesystem_hardware_model": "طراز الخادم {model}", + "diagnosis_mail_queue_ok": "هناك {nb_pending} رسائل بريد إلكتروني معلقة في قوائم انتظار البريد", + "diagnosis_mail_ehlo_ok": "يمكن الوصول إلى خادم بريد SMTP من الخارج وبالتالي فهو قادر على استقبال رسائل البريد الإلكتروني!", + "diagnosis_dns_good_conf": "تم إعداد سجلات نظام أسماء النطاقات DNS بشكل صحيح للنطاق {domain} (category {category})", + "diagnosis_ip_dnsresolution_working": "تحليل اسم النطاق يعمل!", + "diagnosis_mail_blacklist_listed_by": "إن عنوان الـ IP الخاص بك أو نطاقك {item} مُدرَج ضمن قائمة سوداء على {blacklist_name}", + "diagnosis_mail_outgoing_port_25_ok": "خادم بريد SMTP قادر على إرسال رسائل البريد الإلكتروني (منفذ البريد الصادر 25 غير محظور).", + "user_already_exists": "المستخدم '{user}' موجود مِن قَبل" } \ No newline at end of file diff --git a/locales/de.json b/locales/de.json index 8eefa7cd9..b61d0a431 100644 --- a/locales/de.json +++ b/locales/de.json @@ -692,5 +692,17 @@ "domain_config_cert_summary_abouttoexpire": "Das aktuelle Zertifikat läuft bald ab. Es sollte bald automatisch erneuert werden.", "domain_config_cert_summary_expired": "ACHTUNG: Das aktuelle Zertifikat ist nicht gültig! HTTPS wird gar nicht funktionieren!", "domain_config_cert_summary_letsencrypt": "Toll! Sie benutzen ein gültiges Let's Encrypt-Zertifikat!", - "domain_config_cert_summary_ok": "Gut, das aktuelle Zertifikat sieht gut aus!" + "domain_config_cert_summary_ok": "Gut, das aktuelle Zertifikat sieht gut aus!", + "app_change_url_require_full_domain": "{app} kann nicht auf diese neue URL verschoben werden, weil sie eine vollständige eigene Domäne benötigt (z.B. mit Pfad = /)", + "app_not_upgraded_broken_system_continue": "Die App '{failed_app}' konnte nicht aktualisiert werden und hat Ihr System in einen beschädigten Zustand versetzt (folglich wird --continue-on-failure ignoriert) und als Konsequenz wurde die Aktualisierung der folgenden Apps abgelehnt: {apps}", + "app_yunohost_version_not_supported": "Diese App setzt YunoHost >= {required} voraus aber die gegenwärtig installierte Version ist {current}", + "app_failed_to_upgrade_but_continue": "Die App {failed_app} konnte nicht aktualisiert werden und es wird anforderungsgemäss zur nächsten Aktualisierung fortgefahren. Starten sie 'yunohost log show {operation_logger_name}' um den Fehlerbericht zu sehen", + "app_not_upgraded_broken_system": "Die App '{failed_app}' konnte nicht aktualisiert werden und hat Ihr System in einen beschädigten Zustand versetzt und als Konzequenz wurde die Aktualisierung der folgenden Apps abgelehnt: {apps}", + "apps_failed_to_upgrade": "Diese Apps konnten nicht aktualisiert werden: {apps}", + "app_arch_not_supported": "Diese App kann nur auf bestimmten Architekturen {required} installiert werden, aber Ihre gegenwärtige Serverarchitektur ist {current}", + "app_not_enough_disk": "Diese App benötigt {required} freien Speicherplatz.", + "app_not_enough_ram": "Diese App benötigt {required} RAM um installiert/aktualisiert zu werden, aber es sind aktuell nur {current} verfügbar.", + "app_change_url_failed": "Kann die URL für {app} nicht ändern: {error}", + "app_change_url_script_failed": "Es ist ein Fehler im URL-Änderungs-Script aufgetreten", + "app_resource_failed": "Automatische Ressourcen-Allokation (provisioning), die Unterbindung des Zugriffts auf Ressourcen (deprovisioning) oder die Aktualisierung der Ressourcen für {app} schlug fehl: {error}" } \ No newline at end of file diff --git a/locales/en.json b/locales/en.json index 01be5d906..af5b5795e 100644 --- a/locales/en.json +++ b/locales/en.json @@ -26,7 +26,9 @@ "app_change_url_success": "{app} URL is now {domain}{path}", "app_config_unable_to_apply": "Failed to apply config panel values.", "app_config_unable_to_read": "Failed to read config panel values.", + "app_corrupt_source": "YunoHost was able to download the asset '{source_id}' ({url}) for {app}, but the asset doesn't match the expected checksum. This could mean that some temporary network failure happened on your server, OR the asset was somehow changed by the upstream maintainer (or a malicious actor?) and YunoHost packagers need to investigate and update the app manifest to reflect this change.\n Expected sha256 checksum: {expected_sha256}\n Downloaded sha256 checksum: {computed_sha256}\n Downloaded file size: {size}", "app_extraction_failed": "Could not extract the installation files", + "app_failed_to_download_asset": "Failed to download asset '{source_id}' ({url}) for {app}: {out}", "app_failed_to_upgrade_but_continue": "App {failed_app} failed to upgrade, continue to next upgrades as requested. Run 'yunohost log show {operation_logger_name}' to see failure log", "app_full_domain_unavailable": "Sorry, this app must be installed on a domain of its own, but other apps are already installed on the domain '{domain}'. You could use a subdomain dedicated to this app instead.", "app_id_invalid": "Invalid app ID", @@ -465,13 +467,17 @@ "group_creation_failed": "Could not create the group '{group}': {error}", "group_deleted": "Group '{group}' deleted", "group_deletion_failed": "Could not delete the group '{group}': {error}", + "group_mailalias_add": "The email alias '{mail}' will be added to the group '{group}'", + "group_mailalias_remove": "The email alias '{mail}' will be removed from the group '{group}'", "group_no_change": "Nothing to change for group '{group}'", "group_unknown": "The group '{group}' is unknown", "group_update_aliases": "Updating aliases for group '{group}'", "group_update_failed": "Could not update the group '{group}': {error}", "group_updated": "Group '{group}' updated", + "group_user_add": "The user '{user}' will be added to the group '{group}'", "group_user_already_in_group": "User {user} is already in group {group}", "group_user_not_in_group": "User {user} is not in group {group}", + "group_user_remove": "The user '{user}' will be removed from the group '{group}'", "hook_exec_failed": "Could not run script: {path}", "hook_exec_not_terminated": "Script did not finish properly: {path}", "hook_json_return_error": "Could not read return from hook {path}. Error: {msg}. Raw content: {raw_content}", @@ -570,7 +576,20 @@ "migration_0024_rebuild_python_venv_disclaimer_rebuild": "Rebuilding the virtualenv will be attempted for the following apps (NB: the operation may take some time!): {rebuild_apps}", "migration_0024_rebuild_python_venv_failed": "Failed to rebuild the Python virtualenv for {app}. The app may not work as long as this is not resolved. You should fix the situation by forcing the upgrade of this app using `yunohost app upgrade --force {app}`.", "migration_0024_rebuild_python_venv_in_progress": "Now attempting to rebuild the Python virtualenv for `{app}`", - "migration_description_0021_migrate_to_bullseye": "Upgrade the system to Debian Bullseye and YunoHost 11.x", + "migration_0027_cleaning_up": "Cleaning up cache and packages not useful anymore...", + "migration_0027_hgjghjghjgeneral_warning": "Please note that this migration is a delicate operation. The YunoHost team did its best to review and test it, but the migration might still break parts of the system or its apps.\n\nTherefore, it is recommended to:\n - Perform a backup of any critical data or app. More info on https://yunohost.org/backup;\n - Be patient after launching the migration: Depending on your Internet connection and hardware, it might take up to a few hours for everything to upgrade.", + "migration_0027_main_upgrade": "Starting main upgrade...", + "migration_0027_modified_files": "Please note that the following files were found to be manually modified and might be overwritten following the upgrade: {manually_modified_files}", + "migration_0027_not_buster2": "The current Debian distribution is not Bullseye! If you already ran the Bullseye->Bookworm migration, then this error is symptomatic of the fact that the migration procedure was not 100% succesful (otherwise YunoHost would have flagged it as completed). It is recommended to investigate what happened with the support team, who will need the **full** log of the `migration, which can be found in Tools > Logs in the webadmin.", + "migration_0027_not_enough_free_space": "Free space is pretty low in /var/! You should have at least 1GB free to run this migration.", + "migration_0027_patch_yunohost_conflicts": "Applying patch to workaround conflict issue...", + "migration_0027_patching_sources_list": "Patching the sources.lists...", + "migration_0027_problematic_apps_warning": "Please note that the following possibly problematic installed apps were detected. It looks like those were not installed from the YunoHost app catalog, or are not flagged as 'working'. Consequently, it cannot be guaranteed that they will still work after the upgrade: {problematic_apps}", + "migration_0027_start": "Starting migration to Bookworm", + "migration_0027_still_on_buster_after_main_upgrade": "Something went wrong during the main upgrade, the system appears to still be on Debian Bullseye", + "migration_0027_system_not_fully_up_to_date": "Your system is not fully up-to-date. Please perform a regular upgrade before running the migration to Bookworm.", + "migration_0027_yunohost_upgrade": "Starting YunoHost core upgrade...", + "migration_description_0021_migrate_to_bullseye": "Upgrade the system to Debian Bookworm and YunoHost 12", "migration_description_0022_php73_to_php74_pools": "Migrate php7.3-fpm 'pool' conf files to php7.4", "migration_description_0023_postgresql_11_to_13": "Migrate databases from PostgreSQL 11 to 13", "migration_description_0024_rebuild_python_venv": "Repair Python app after bullseye migration", diff --git a/locales/eu.json b/locales/eu.json index 675449fd3..0d424e6ca 100644 --- a/locales/eu.json +++ b/locales/eu.json @@ -168,7 +168,7 @@ "diagnosis_failed": "Ezinezkoa izan da '{category}' ataleko diagnostikoa lortzea: {error}", "diagnosis_ip_weird_resolvconf": "DNS ebazpena badabilela dirudi, baina antza denez moldatutako /etc/resolv.conf fitxategia erabiltzen ari zara.", "diagnosis_dns_bad_conf": "DNS balio batzuk falta dira edo ez dira zuzenak {domain} domeinurako ({category} atala)", - "diagnosis_diskusage_ok": "{mountpoint} fitxategi-sistemak ({device} euskarrian) edukieraren {free} (%{free_percent}a) ditu erabilgarri oraindik ({total} orotara)!", + "diagnosis_diskusage_ok": "{mountpoint} fitxategi-sistemak ({device} euskarrian) edukieraren {free} (%{free_percent}a) ditu oraindik erabilgarri ({total} orotara)!", "apps_catalog_update_success": "Aplikazioen katalogoa eguneratu da!", "certmanager_warning_subdomain_dns_record": "'{subdomain}' azpidomeinuak ez dauka '{domain}'(e)k duen IP bera. Ezaugarri batzuk ez dira erabilgarri egongo hau zuzendu arte eta ziurtagiri bat birsortu arte.", "app_argument_choice_invalid": "Hautatu ({choices}) aukeretako bat '{name}' argumenturako: '{value}' ez dago aukera horien artean", @@ -230,7 +230,7 @@ "certmanager_attempt_to_replace_valid_cert": "{domain} domeinurako egokia eta baliogarria den ziurtagiri bat ordezkatzen saiatzen ari zara! (Erabili --force mezu hau deuseztatu eta ziurtagiria ordezkatzeko)", "diagnosis_backports_in_sources_list": "Dirudienez apt (pakete kudeatzailea) backports biltegia erabiltzeko konfiguratuta dago. Zertan ari zaren ez badakizu, ez zenuke backports biltegietako aplikaziorik instalatu beharko, ezegonkortasun eta gatazkak eragin ditzaketelako sistemarekin.", "app_restore_failed": "Ezinezkoa izan da {app} lehengoratzea: {error}", - "diagnosis_apps_allgood": "Instalatutako aplikazioek oinarrizko pakete-jarraibideekin bat egiten dute", + "diagnosis_apps_allgood": "Instalatutako aplikazioak bat datoz oinarrizko pakete-jarraibideekin", "diagnosis_apps_bad_quality": "Aplikazio hau hondatuta dagoela dio YunoHosten aplikazioen katalogoak. Agian behin-behineko kontua da arduradunak arazoa konpondu bitartean. Oraingoz, ezin da aplikazioa eguneratu.", "diagnosis_apps_broken": "Aplikazio hau YunoHosten aplikazioen katalogoan hondatuta dagoela ageri da. Agian behin-behineko kontua da arduradunak konpondu bitartean. Oraingoz, ezin da aplikazioa eguneratu.", "diagnosis_apps_deprecated_practices": "Instalatutako aplikazio honen bertsioak oraindik darabiltza zaharkitutako pakete-jarraibideak. Eguneratzea hausnartu beharko zenuke.", @@ -711,7 +711,7 @@ "domain_config_cert_summary": "Ziurtagiriaren egoera", "domain_config_cert_summary_abouttoexpire": "Uneko ziurtagiria iraungitzear dago. Aurki berritu beharko litzateke automatikoki.", "domain_config_cert_summary_letsencrypt": "Primeran! Baliozko Let's Encrypt zirutagiria erabiltzen ari zara!", - "domain_config_cert_summary_ok": "Ados, uneko ziurtagiriak itzura ona du!", + "domain_config_cert_summary_ok": "Ados, uneko ziurtagiriak itxura ona du!", "domain_config_cert_validity": "Balizokotasuna", "global_settings_setting_admin_strength_help": "Betekizun hauek pasahitza lehenbizikoz sortzerakoan edo aldatzerakoan baino ez dira bete behar", "global_settings_setting_nginx_compatibility": "NGINXekin bateragarritasuna", @@ -752,5 +752,15 @@ "global_settings_setting_dns_exposure": "DNS ezarpenetan eta diagnostikoan kontuan hartzeko IP bertsioak", "global_settings_setting_dns_exposure_help": "Ohart ongi: honek gomendatutako DNS ezarpenei eta diagnostikoari eragiten die soilik. Ez du eraginik sistemaren ezarpenetan.", "diagnosis_ip_no_ipv6_tip_important": "IPv6 automatikoki ezarri ohi du sistemak edo hornitzaileak erabilgarri baldin badago. Bestela eskuz ezarri beharko dituzu aukera batzuk ondorengo dokumentazioan azaldu bezala: https://yunohost.org/#/ipv6.", - "pattern_fullname": "Baliozko izen oso bat izan behar da (gutxienez hiru karaktere)" + "pattern_fullname": "Baliozko izen oso bat izan behar da (gutxienez hiru karaktere)", + "app_change_url_failed": "Ezin izan da {app} aplikazioaren URLa aldatu: {error}", + "app_change_url_require_full_domain": "Ezin da {app} aplikazioa URL berri honetara aldatu domeinu oso bat behar duelako (i.e. with path = /)", + "app_change_url_script_failed": "Errorea gertatu da URLa aldatzeko aginduaren barnean", + "app_corrupt_source": "YunoHostek deskargatu du {app} aplikaziorako '{source_id}' ({url}) baliabidea baina ez dator bat espero zen 'checksum'arekin. Agian zerbitzariak interneteko konexioa galdu du tarte batez, EDO baliabidea nolabait moldatua izan da arduradunaren aldetik (edo partehartzaile maltzur batetik?) eta YunoHosten arduradunek egoera aztertu eta aplikazioaren manifestua eguneratu behar dute aldaketa hau kontuan hartzeko.\n Espero zen sha256 checksuma: {expected_sha256}\n Deskargatutakoaren sha256 checksuma: {computed_sha256}\n Deskargatutako fitxategiaren tamaina: {size}", + "app_failed_to_upgrade_but_continue": "{failed_app} aplikazioaren bertsio-berritzeak huts egin du, jarraitu hurrengo bertsio-berritzeetara eskatu bezala. Exekutatu 'yunohost log show {operation_logger_name}' errorearen erregistroa ikusteko", + "app_not_upgraded_broken_system": "{failed_app} aplikazioaren bertsio-berritzeak huts egin du eta sistema hondatu du, beraz, ondorengo aplikazioen bertsio-berritzeak ezeztatu dira: {apps}", + "app_not_upgraded_broken_system_continue": "{failed_app} aplikazioaren bertsio-berritzeak huts egin du eta sistema hondatu du (beraz, --continue-on-failure aukerari muzin egin zaio) eta ondorengo aplikazioen bertsio-berritzeak ezeztatu dira: {apps}", + "app_failed_to_download_asset": "{app} aplikaziorako '{source_id}' ({url}) baliabidea deskargatzeak huts egin du: {out}", + "apps_failed_to_upgrade": "Aplikazio hauen bertsio-berritzeak huts egin du: {apps}", + "apps_failed_to_upgrade_line": "\n * {app_id} (dagokion erregistroa ikusteko, exekutatu 'yunohost log show {operation_logger_name}')" } \ No newline at end of file diff --git a/locales/fr.json b/locales/fr.json index cf50488cc..f98470c99 100644 --- a/locales/fr.json +++ b/locales/fr.json @@ -755,5 +755,16 @@ "domain_config_xmpp_help": "NB : certaines fonctions XMPP nécessiteront la mise à jour de vos enregistrements DNS et la régénération de votre certificat Lets Encrypt pour être activées", "app_change_url_failed": "Impossible de modifier l'url de {app} : {error}", "app_change_url_require_full_domain": "{app} ne peut pas être déplacée vers cette nouvelle URL car elle nécessite un domaine complet (c'est-à-dire avec un chemin = /)", - "app_change_url_script_failed": "Une erreur s'est produite dans le script de modification de l'url" -} \ No newline at end of file + "app_change_url_script_failed": "Une erreur s'est produite dans le script de modification de l'url", + "app_failed_to_upgrade_but_continue": "La mise à jour de l'application {failed_app} a échoué, mais YunoHost va continuer avec les mises à jour suivantes comme demandé. Lancez 'yunohost log show {operation_logger_name}' pour voir le journal des échecs", + "app_not_upgraded_broken_system_continue": "L'application '{failed_app}' n'a pas réussi à se mettre à jour et a mis le système dans un état alternatif car quelque chose est au moins momentanément \"cassé\" (le paramètre --continue-on-failure est donc ignoré). La conséquence est que les mises à jour des applications suivantes ont été annulées : {apps}", + "app_not_upgraded_broken_system": "L'application '{failed_app}' n'a pas réussi à se mettre à jour et a mis le système dans un état de panne. Par conséquent, les mises à niveau des applications suivantes ont été annulées : {apps}", + "apps_failed_to_upgrade": "Ces applications n'ont pas pu être mises à jour : {apps}", + "apps_failed_to_upgrade_line": "\n * {app_id} (pour voir le journal correspondant, faites un 'yunohost log show {operation_logger_name}')", + "app_failed_to_download_asset": "Échec du téléchargement de la ressource '{source_id}' ({url}) pour {app} : {out}", + "app_corrupt_source": "YunoHost a pu télécharger la ressource '{source_id}' ({url}) pour {app}, malheureusement celle-ci ne correspond pas à la somme de contrôle attendue. Cela peut signifier qu'une défaillance temporaire du réseau s'est produite sur votre serveur, OU que la ressource a été modifiée par le mainteneur de l'application en amont (ou un acteur malveillant ?) et que les responsables du paquet de cette application pour YunoHost doivent investiguer et mettre à jour le manifeste de l'application pour refléter ce changement.\n Somme de contrôle sha256 attendue : {expected_sha256}\n Somme de contrôle sha256 téléchargée : {computed_sha256}\n Taille du fichier téléchargé : {size}", + "group_mailalias_add": "L'alias de courrier électronique '{mail}' sera ajouté au groupe '{group}'", + "group_user_add": "L'utilisateur '{user}' sera ajouté au groupe '{group}'", + "group_user_remove": "L'utilisateur '{user}' sera retiré du groupe '{group}'", + "group_mailalias_remove": "L'alias de courrier électronique '{mail}' sera supprimé du groupe '{group}'" +} diff --git a/locales/gl.json b/locales/gl.json index 80a94407f..b8b6e5cd0 100644 --- a/locales/gl.json +++ b/locales/gl.json @@ -752,5 +752,15 @@ "domain_config_xmpp_help": "Nota: algunhas características de XMPP para ser utilizadas precisan que teñas ao día os rexistros DNS e rexeneres os certificados Lets Encrypt", "app_change_url_failed": "Non se cambiou o url para {app}: {error}", "app_change_url_require_full_domain": "{app} non se pode mover a este novo URL porque require un dominio completo propio (ex. con ruta = /)", - "app_change_url_script_failed": "Algo fallou ao executar o script de cambio de url" + "app_change_url_script_failed": "Algo fallou ao executar o script de cambio de url", + "apps_failed_to_upgrade_line": "\n * {app_id} (para ver o rexistro correspondente executa 'yunohost log show {operation_logger_name}')", + "app_failed_to_upgrade_but_continue": "Fallou a actualización de {failed_app}, seguimos coas demáis actualizacións. Executa 'yunohost log show {operation_logger_name}' para ver o rexistro do fallo", + "app_not_upgraded_broken_system": "Fallou a actualización de '{failed_app}' e estragou o sistema, como consecuencia cancelouse a actualización das seguintes apps: {apps}", + "app_not_upgraded_broken_system_continue": "Fallou a actualización de '{failed_app}' e estragou o sistema (polo que ignórase --continue-on-failure), como consecuencia cancelouse a actualización das seguintes apps: {apps}", + "apps_failed_to_upgrade": "Fallou a actualización das seguintes aplicacións:{apps}", + "invalid_shell": "Intérprete de ordes non válido: {shell}", + "log_resource_snippet": "Aprovisionamento/desaprovisionamento/actualización dun recurso", + "app_resource_failed": "Fallou o aprovisionamento, desaprovisionamento ou actualización de recursos para {app}: {error}", + "app_failed_to_download_asset": "Fallou a descarga do recurso '{source_id}' ({url}) para {app}: {out}", + "app_corrupt_source": "YunoHost foi quen de descargar o recurso '{source_id}' ({url}) para {app}, pero a suma de comprobación para o recurso non concorda. Pode significar que houbo un fallo temporal na conexión do servidor á rede, OU que o recurso sufreu, dalgún xeito, cambios desde que os desenvolvedores orixinais (ou unha terceira parte maliciosa?), o equipo de YunoHost ten que investigar e actualizar o manifesto da app para mostrar este cambio.\n Suma sha256 agardada: {expected_sha256} \n Suma sha256 do descargado: {computed_sha256}\n Tamaño do ficheiro: {size}" } \ No newline at end of file diff --git a/locales/id.json b/locales/id.json index 722d88dd2..c6b023102 100644 --- a/locales/id.json +++ b/locales/id.json @@ -5,19 +5,19 @@ "app_already_installed": "{app} sudah terpasang", "app_already_up_to_date": "{app} sudah dalam versi mutakhir", "app_argument_required": "Argumen '{name}' dibutuhkan", - "app_change_url_identical_domains": "Domain)url_path yang lama dan baru identik ('{domain}{path}'), tak ada yang perlu dilakukan.", + "app_change_url_identical_domains": "Domain/url_path yang lama dan baru identik ('{domain}{path}'), tak ada yang perlu dilakukan.", "app_change_url_no_script": "Aplikasi '{app_name}' belum mendukung pengubahan URL. Mungkin Anda harus memperbaruinya.", "app_change_url_success": "URL {app} sekarang adalah {domain}{path}", "app_id_invalid": "ID aplikasi tidak sah", "app_install_failed": "Tidak dapat memasang {app}: {error}", - "app_install_files_invalid": "Berkas-berkas ini tidak dapat dipasang", - "app_install_script_failed": "Sebuah kesalahan terjadi pada script pemasangan aplikasi", + "app_install_files_invalid": "Berkas ini tidak dapat dipasang", + "app_install_script_failed": "Sebuah kesalahan terjadi pada skrip pemasangan aplikasi", "app_manifest_install_ask_admin": "Pilih seorang administrator untuk aplikasi ini", "app_manifest_install_ask_domain": "Pilih di domain mana aplikasi ini harus dipasang", "app_not_installed": "Tidak dapat menemukan {app} di daftar aplikasi yang terpasang: {all_apps}", - "app_not_properly_removed": "{app} belum dihapus dengan benar", - "app_remove_after_failed_install": "Menghapus aplikasi mengikuti kegagalan pemasangan...", - "app_removed": "{app} dihapus", + "app_not_properly_removed": "{app} belum dilepas dengan benar", + "app_remove_after_failed_install": "Melepas aplikasi setelah kegagalan pemasangan...", + "app_removed": "{app} dilepas", "app_restore_failed": "Tidak dapat memulihkan {app}: {error}", "app_upgrade_some_app_failed": "Beberapa aplikasi tidak dapat diperbarui", "app_upgraded": "{app} diperbarui", @@ -35,30 +35,361 @@ "app_upgrade_app_name": "Memperbarui {app}...", "app_upgrade_failed": "Tidak dapat memperbarui {app}: {error}", "app_start_install": "Memasang {app}...", - "app_start_remove": "Menghapus {app}...", + "app_start_remove": "Melepas {app}...", "app_manifest_install_ask_password": "Pilih kata sandi administrasi untuk aplikasi ini", - "app_upgrade_several_apps": "Aplikasi-aplikasi berikut akan diperbarui: {apps}", + "app_upgrade_several_apps": "Aplikasi berikut akan diperbarui: {apps}", "backup_app_failed": "Tidak dapat mencadangkan {app}", "backup_archive_name_exists": "Arsip cadangan dengan nama ini sudah ada.", - "backup_created": "Cadangan dibuat", + "backup_created": "Cadangan dibuat: {name}", "backup_creation_failed": "Tidak dapat membuat arsip cadangan", "backup_delete_error": "Tidak dapat menghapus '{path}'", - "backup_deleted": "Cadangan dihapus", + "backup_deleted": "Cadangan dihapus: {name}", "diagnosis_apps_issue": "Sebuah masalah ditemukan pada aplikasi {app}", "backup_applying_method_tar": "Membuat arsip TAR cadangan...", - "backup_method_tar_finished": "Arsip TAR cadanagan dibuat", + "backup_method_tar_finished": "Arsip TAR cadangan dibuat", "backup_nothings_done": "Tak ada yang harus disimpan", "certmanager_cert_install_success": "Sertifikat Let's Encrypt sekarang sudah terpasang pada domain '{domain}'", "backup_mount_archive_for_restore": "Menyiapkan arsip untuk pemulihan...", "aborting": "Membatalkan.", - "action_invalid": "Tindakan tidak sah '{action}'", + "action_invalid": "Tindakan tidak valid '{action}'", "app_action_cannot_be_ran_because_required_services_down": "Layanan yang dibutuhkan ini harus aktif untuk menjalankan tindakan ini: {services}. Coba memulai ulang layanan tersebut untuk melanjutkan (dan mungkin melakukan penyelidikan mengapa layanan tersebut nonaktif).", - "app_argument_choice_invalid": "Pilih nilai yang sah untuk argumen '{name}': '{value}' tidak termasuk pada pilihan yang tersedia ({choices})", - "app_argument_invalid": "Pilih nilai yang sah untuk argumen '{name}': {error}", + "app_argument_choice_invalid": "Pilih yang valid untuk argumen '{name}': '{value}' tidak termasuk pada pilihan yang tersedia ({choices})", + "app_argument_invalid": "Pilih yang valid untuk argumen '{name}': {error}", "app_extraction_failed": "Tidak dapat mengekstrak berkas pemasangan", "app_full_domain_unavailable": "Maaf, aplikasi ini harus dipasang pada domain sendiri, namun aplikasi lain sudah terpasang pada domain '{domain}'. Anda dapat menggunakan subdomain hanya untuk aplikasi ini.", - "app_location_unavailable": "URL ini mungkin tidak tersedia, atau terjadi konflik dengan aplikasi yang telah terpasang:\n{apps}", - "app_not_upgraded": "Aplikasi '{failed_app}' gagal diperbarui, oleh karena itu aplikasi-aplikasi berikut juga dibatalkan: {apps}", + "app_location_unavailable": "URL ini mungkin tidak tersedia atau terjadi konflik dengan aplikasi yang telah terpasang:\n{apps}", + "app_not_upgraded": "Aplikasi '{failed_app}' gagal diperbarui, oleh karena itu pembaruan aplikasi berikut juga dibatalkan: {apps}", "app_config_unable_to_apply": "Gagal menerapkan nilai-nilai panel konfigurasi.", - "app_config_unable_to_read": "Gagal membaca nilai-nilai panel konfigurasi." -} \ No newline at end of file + "app_config_unable_to_read": "Gagal membaca nilai-nilai panel konfigurasi.", + "permission_cannot_remove_main": "Menghapus izin utama tidak diperbolehkan", + "service_description_postgresql": "Menyimpan data aplikasi (basis data SQL)", + "restore_already_installed_app": "Aplikasi dengan ID '{app}' telah terpasang", + "app_change_url_require_full_domain": "{app} tidak dapat dipindah ke URL baru ini karena ini memerlukan domain penuh (tanpa jalur = /)", + "app_change_url_script_failed": "Galat terjadi di skrip pengubahan URL", + "app_not_enough_disk": "Aplikasi ini memerlukan {required} ruang kosong.", + "app_not_enough_ram": "Aplikasi ini memerlukan {required} RAM untuk pemasangan/pembaruan, tapi sekarang hanya tersedia {current} saja.", + "app_packaging_format_not_supported": "Aplikasi ini tidak dapat dipasang karena format pengemasan tidak didukung oleh YunoHost versi Anda. Anda sebaiknya memperbarui sistem Anda.", + "ask_admin_username": "Nama pengguna admin", + "backup_archive_broken_link": "Tidak dapat mengakses arsip cadangan (tautan rusak untuk {path})", + "backup_archive_open_failed": "Tidak dapat membuka arsip cadangan", + "certmanager_cert_install_success_selfsigned": "Sertifikat ditandai sendiri sekarang terpasang untuk '{domain}'", + "certmanager_cert_renew_failed": "Pembaruan ulang sertifikat Let's Encrypt gagal untuk {domains}", + "certmanager_cert_renew_success": "Sertifikat Let's Encrypt diperbarui untuk domain '{domain}'", + "diagnosis_apps_allgood": "Semua aplikasi yang dipasang mengikuti panduan penyusunan yang baik", + "diagnosis_basesystem_kernel": "Peladen memakai kernel Linux {kernel_version}", + "diagnosis_cache_still_valid": "(Tembolok masih valid untuk diagnosis {category}. Belum akan didiagnosis ulang!)", + "diagnosis_description_dnsrecords": "Rekaman DNS", + "diagnosis_description_ip": "Konektivitas internet", + "diagnosis_description_web": "Web", + "diagnosis_domain_expiration_error": "Beberapa domain akan kedaluwarsa SEGERA!", + "diagnosis_domain_expiration_not_found_details": "Informasi WHOIS untuk domain {domain} sepertinya tidak mengandung informasi tentang tanggal kedaluwarsa?", + "diagnosis_domain_expiration_warning": "Beberapa domain akan kedaluwarsa!", + "diagnosis_domain_expires_in": "{domain} kedaluwarsa dalam {days} hari.", + "diagnosis_everything_ok": "Sepertinya semuanya bagus untuk {category}!", + "diagnosis_ip_no_ipv6_tip": "Memiliki IPv6 tidaklah wajib agar sistem Anda bekerja, tapi itu akan membuat internet lebih sehat. IPv6 biasanya secara otomatis akan dikonfigurasikan oleh sistem atau penyedia peladen Anda jika tersedia. Jika belum dikonfigurasi, Anda mungkin harus mengonfigurasi beberapa hal secara manual seperti yang dijelaskan di dokumentasi di sini: https://yunohost.org/#/ipv6. Jika Anda tidak dapat mengaktifkan IPv6 atau terlalu rumit buat Anda, Anda bisa mengabaikan peringatan ini.", + "diagnosis_ip_no_ipv6_tip_important": "IPv6 biasanya secara otomatis akan dikonfigurasikan oleh sistem atau penyedia peladen Anda jika tersedia. Jika belum dikonfigurasi, Anda mungkin harus mengonfigurasi beberapa hal secara manual seperti yang dijelaskan di dokumentasi di sini: https://yunohost.org/#/ipv6.", + "diagnosis_ip_not_connected_at_all": "Peladen ini sepertinya tidak terhubung dengan internet sama sekali?", + "diagnosis_mail_queue_unavailable_details": "Galat: {error}", + "global_settings_setting_root_password_confirm": "Kata sandi root baru (konfirmasi)", + "global_settings_setting_smtp_allow_ipv6": "Perbolehkan IPv6", + "global_settings_setting_ssh_port": "Porta SSH", + "log_app_change_url": "Mengubah URL untuk aplikasi '{}'", + "log_app_config_set": "Menerapkan konfigurasi untuk aplikasi '{}'", + "log_app_install": "Memasang aplikasi '{}'", + "log_app_makedefault": "Membuat '{}' sebagai aplikasi baku", + "log_app_remove": "Melepas aplikasi '{}'", + "log_app_upgrade": "Memperbarui aplikasi '{}'", + "log_available_on_yunopaste": "Log ini sekarang sudah tersedia di {url}", + "log_backup_create": "Membuat arsip cadangan", + "log_backup_restore_app": "Memulihkan '{}' dari arsip cadangan", + "log_backup_restore_system": "Memulihkan sistem dari arsip cadangan", + "log_corrupted_md_file": "Berkas metadata YAML yang terkait dengan log rusak: '{md_file}\nGalat: {error}'", + "log_domain_config_set": "Memperbarui konfigurasi untuk domain '{}'", + "log_domain_main_domain": "Atur '{}' sebagai domain utama", + "log_domain_remove": "Hapus domain '{}' dari konfigurasi sistem", + "log_link_to_log": "Log penuh untuk tindakan ini: '{desc}'", + "log_settings_reset": "Atur ulang pengaturan", + "log_tools_migrations_migrate_forward": "Menjalankan migrasi", + "log_tools_reboot": "Mulai ulang peladen Anda", + "log_tools_shutdown": "Matikan peladen Anda", + "log_tools_upgrade": "Perbarui paket sistem", + "migration_0021_main_upgrade": "Memulai pembaruan utama...", + "migration_0021_start": "Memulai migrasi ke Bullseye", + "migration_0021_yunohost_upgrade": "Memulai pembaruan YunoHost Core...", + "permission_updated": "Izin '{permission}' diperbarui", + "registrar_infos": "Info registrar", + "restore_already_installed_apps": "Aplikasi berikut tidak dapat dipulihkan karena mereka sudah terpasang: {apps}", + "restore_backup_too_old": "Arsip cadangan ini tidak dapat dipulihkan karena ini dihasilkan dari YunoHost dengan versi yang terlalu tua.", + "restore_failed": "Tidak dapat memulihkan sistem", + "restore_nothings_done": "Tidak ada yang dipulihkan", + "restore_running_app_script": "Memulihkan aplikasi {app}...", + "root_password_changed": "kata sandi root telah diubah", + "root_password_desynchronized": "Kata sandi administrasi telah diubah tapi YunoHost tidak dapat mengubahnya menjadi kata sandi root!", + "server_reboot_confirm": "Peladen akan dimulai ulang segera, apakan Anda yakin [{answers}]", + "server_shutdown": "Peladen akan dimatikan", + "server_shutdown_confirm": "Peladen akan dimatikan segera, apakah Anda yakin? [{answers}]", + "service_add_failed": "Tidak dapat menambahkan layanan '{service}'", + "service_added": "Layanan '{service}' ditambahkan", + "service_already_stopped": "Layanan '{service}' telah dihentikan", + "service_cmd_exec_failed": "Tidak dapat menjalankan perintah '{command}'", + "service_description_dnsmasq": "Mengurus DNS", + "service_description_dovecot": "Digunakan untuk memperbolehkan klien surel mengakses surel (via IMAP dan POP3)", + "service_description_metronome": "Mengelola akun XMPP", + "service_description_postfix": "Digunakan untuk mengirim dan menerima surel", + "service_description_slapd": "Menyimpan info terkait pengguna, domain, dan sejenisnya", + "service_description_ssh": "Memperbolehkan Anda untuk terhubung secara jarak jauh dengan peladen Anda via terminal (protokol SSH)", + "service_description_yunohost-firewall": "Mengelola pembukaan dan penutupan porta koneksi ke layanan", + "unbackup_app": "{app} tidak akan disimpan", + "user_deleted": "Pengguna dihapus", + "user_deletion_failed": "Tidak dapat menghapus pengguna {user}: {error}", + "user_import_bad_file": "Berkas CSV Anda tidak secara benar diformat, akan diabaikan untuk menghindari potensi data hilang", + "yunohost_postinstall_end_tip": "Proses pasca-pemasangan sudah selesai! Untuk menyelesaikan pengaturan Anda, pertimbangkan:\n - diagnosis masalah yang mungkin lewat bagian 'Diagnosis' di webadmin (atau 'yunohost diagnosis run' di cmd);\n - baca bagian 'Finalizing your setup' dan 'Getting to know YunoHost' di dokumentasi admin: https://yunohost.org/admindoc.", + "app_already_installed_cant_change_url": "Aplikasi ini sudah terpasang. URL tidak dapat diubah hanya dengan ini. Periksa `app changeurl` jika tersedia.", + "app_requirements_checking": "Memeriksa persyaratan untuk {app}...", + "backup_create_size_estimation": "Arsip ini akan mengandung data dengan ukuran {size}.", + "certmanager_certificate_fetching_or_enabling_failed": "Mencoba untuk menggunakan sertifikat baru untuk {domain} tidak bisa...", + "certmanager_no_cert_file": "Tidak dapat membuka berkas sertifikat untuk domain {domain} (berkas: {file})", + "diagnosis_basesystem_hardware": "Arsitektur perangkat keras peladen adalah {virt} {arch}", + "diagnosis_basesystem_ynh_inconsistent_versions": "Anda menjalankan versi paket YunoHost yang tidak konsisten... sepertinya karena pembaruan yang gagal.", + "diagnosis_basesystem_ynh_single_version": "versi {package}: {version} ({repo})", + "diagnosis_description_services": "Status layanan", + "diagnosis_description_systemresources": "Sumber daya sistem", + "diagnosis_domain_not_found_details": "Domain {domain} tidak ada di basis data WHOIS atau sudah kedaluwarsa!", + "diagnosis_http_ok": "Domain {domain} bisa dicapai dengan HTTP dari luar jaringan lokal.", + "diagnosis_ip_connected_ipv4": "Peladen ini terhubung ke internet lewat IPv4!", + "diagnosis_ip_no_ipv6": "Peladen ini sepertinya tidak memiliki IPv6.", + "domain_cert_gen_failed": "Tidak dapat membuat sertifikat", + "done": "Selesai", + "log_domain_add": "Menambahkan domain '{}' ke konfigurasi sistem", + "main_domain_changed": "Domain utama telah diubah", + "service_already_started": "Layanan '{service}' telah berjalan", + "service_description_fail2ban": "Melindungi dari berbagai macam serangan dari Internet", + "service_description_yunohost-api": "Mengelola interaksi antara antarmuka web YunoHost dengan sistem", + "this_action_broke_dpkg": "Tindakan ini merusak dpkg/APT (pengelola paket sistem)... Anda bisa mencoba menyelesaikan masalah ini dengan masuk lewat SSH dan menjalankan `sudo apt install --fix-broken` dan/atau `sudo dpkg --configure -a`.", + "app_manifest_install_ask_init_admin_permission": "Siapa yang boleh mengakses fitur admin untuk aplikasi ini? (Ini bisa diubah nanti)", + "admins": "Admin", + "all_users": "Semua pengguna YunoHost", + "app_action_failed": "Gagal menjalankan tindakan {action} untuk aplikasi {app}", + "unrestore_app": "{app} akan dipulihkan", + "user_already_exists": "Pengguna '{user}' telah ada", + "user_created": "Pengguna dibuat", + "user_creation_failed": "Tidak dapat membuat pengguna {user}: {error}", + "user_home_creation_failed": "Tidak dapat membuat folder home '{home}' untuk pengguna", + "app_manifest_install_ask_init_main_permission": "Siapa yang boleh mengakses aplikasi ini? (Ini bisa diubah nanti)", + "ask_admin_fullname": "Nama lengkap admin", + "ask_fullname": "Nama lengkap", + "backup_abstract_method": "Metode pencadangan ini belum diimplementasikan", + "backup_csv_addition_failed": "Tidak dapat menambahkan berkas ke cadangan dengan berkas CSV", + "config_action_failed": "Gagal menjalankan tindakan '{action}': {error}", + "config_validate_color": "Harus warna heksadesimal RGB yang valid", + "danger": "Peringatan:", + "diagnosis_basesystem_host": "Peladen memakai Debian {debian_version}", + "diagnosis_domain_expiration_not_found": "Tidak dapat memeriksa tanggal kedaluwarsa untuk beberapa domain", + "diagnosis_http_could_not_diagnose_details": "Galat: {error}", + "app_manifest_install_ask_path": "Pilih jalur URL (setelah domain) dimana aplikasi ini akan dipasang", + "certmanager_cert_signing_failed": "Tidak dapat memverifikasi sertifikat baru", + "config_validate_url": "Harus URL web yang valid", + "diagnosis_description_ports": "Penyingkapan porta", + "diagnosis_failed_for_category": "Diagnosis gagal untuk kategori '{category}': {error}", + "mail_unavailable": "Alamat surel ini hanya untuk kelompok admin", + "main_domain_change_failed": "Tidak dapat mengubah domain utama", + "diagnosis_ip_global": "IP Global: {global}", + "diagnosis_ip_dnsresolution_working": "Resolusi nama domain bekerja!", + "diagnosis_ip_local": "IP Lokal: {local}", + "diagnosis_ip_no_ipv4": "Peladen ini sepertinya tidak memiliki IPv4.", + "diagnosis_mail_ehlo_could_not_diagnose_details": "Galat: {error}", + "global_settings_setting_ssh_password_authentication_help": "Izinkan autentikasi kata sandi untuk SSH", + "password_listed": "Kata sandi ini termasuk dalam daftar kata sandi yang sering digunakan di dunia. Mohon untuk memilih yang lebih unik.", + "permission_not_found": "Izin '{permission}' tidak ditemukan", + "restore_not_enough_disk_space": "Ruang tidak cukup (ruang: {free_space} B, ruang yang dibutuhkan: {needed_space} B, margin aman: {margin} B)", + "server_reboot": "Peladen akan dimulai ulang", + "service_description_nginx": "Menyediakan akses untuk semua situs yang dihos di peladen Anda", + "service_description_rspamd": "Filter spam dan fitur terkait surel lainnya", + "service_remove_failed": "Tidak dapat menghapus layanan '{service}'", + "user_unknown": "Pengguna tidak diketahui: {user}", + "user_update_failed": "Tidak dapat memperbarui pengguna {user}: {error}", + "yunohost_configured": "YunoHost sudah terkonfigurasi", + "global_settings_setting_pop3_enabled": "Aktifkan POP3", + "log_user_import": "Mengimpor pengguna", + "app_start_backup": "Mengumpulkan berkas untuk dicadangkan untuk {app}...", + "app_upgrade_script_failed": "Galat terjadi di skrip pembaruan aplikasi", + "backup_csv_creation_failed": "Tidak dapat membuat berkas CSV yang dibutuhkan untuk pemulihan", + "certmanager_attempt_to_renew_valid_cert": "Sertifikat untuk domain '{domain}' belum akan kedaluwarsa! (Anda bisa menggunakan --force jika Anda tahu apa yang Anda lakukan)", + "extracting": "Mengekstrak...", + "system_username_exists": "Nama pengguna telah ada di daftar pengguna sistem", + "upgrade_complete": "Pembaruan selesai", + "upgrading_packages": "Memperbarui paket...", + "diagnosis_description_apps": "Aplikasi", + "diagnosis_description_basesystem": "Basis sistem", + "global_settings_setting_pop3_enabled_help": "Aktifkan protokol POP3 untuk peladen surel", + "password_confirmation_not_the_same": "Kata sandi dan untuk konfirmasinya tidak sama", + "restore_complete": "Pemulihan selesai", + "user_import_success": "Pengguna berhasil diimpor", + "user_updated": "Informasi pengguna diubah", + "visitors": "Pengunjung", + "yunohost_already_installed": "YunoHost sudah terpasang", + "yunohost_installing": "Memasang YunoHost...", + "yunohost_not_installed": "YunoHost tidak terpasang dengan benar. Jalankan 'yunohost tools postinstall'", + "restore_removing_tmp_dir_failed": "Tidak dapat menghapus direktori sementara yang dulu", + "app_sources_fetch_failed": "Tidak dapat mengambil berkas sumber, apakah URL-nya benar?", + "installation_complete": "Pemasangan selesai", + "app_arch_not_supported": "Aplikasi ini hanya bisa dipasang pada arsitektur {required}, tapi arsitektur peladen Anda adalah {current}", + "diagnosis_basesystem_hardware_model": "Model peladen adalah {model}", + "app_yunohost_version_not_supported": "Aplikasi ini memerlukan YunoHost >= {required}, tapi versi yang terpasang adalah {current}", + "ask_new_path": "Jalur baru", + "backup_cleaning_failed": "Tidak dapat menghapus folder cadangan sementara", + "diagnosis_description_mail": "Surel", + "diagnosis_description_regenconf": "Konfigurasi sistem", + "diagnosis_display_tip": "Untuk melihat masalah yang ditemukan, Anda bisa ke bagian Diagnosis di administrasi web atau jalankan 'yunohost diagnosis show --issues --human-readable'.", + "diagnosis_domain_expiration_success": "Domain Anda sudah terdaftar dan belum akan kedaluwarsa dalam waktu dekat.", + "diagnosis_failed": "Gagal mengambil hasil diagnosis untuk kategori '{category}': {error}", + "global_settings_setting_portal_theme": "Tema portal", + "global_settings_setting_portal_theme_help": "Informasi lebih lanjut tentang tema portal kustom ada di https://yunohost.org/theming", + "global_settings_setting_ssh_password_authentication": "Autentikasi kata sandi", + "certmanager_attempt_to_renew_nonLE_cert": "Sertifikat untuk domain '{domain}' tidak diterbitkan oleh Let's Encrypt. Tidak dapat memperbarui secara otomatis!", + "certmanager_cert_install_failed": "Pemasangan sertifikat Let's Encrypt gagal untuk {domains}", + "certmanager_cert_install_failed_selfsigned": "Pemasangan sertifikat ditandai sendiri (self-signed) gagal untuk {domains}", + "config_validate_email": "Harus surel yang valid", + "config_apply_failed": "Gagal menerapkan konfigurasi baru: {error}", + "diagnosis_basesystem_ynh_main_version": "Peladen memakai YunoHost {main_version} ({repo})", + "diagnosis_cant_run_because_of_dep": "Tidak dapat menjalankan diagnosis untuk {category} ketika ada masalah utama yang terkait dengan {dep}.", + "diagnosis_services_conf_broken": "Konfigurasi rusak untuk layanan {service}!", + "diagnosis_services_running": "Layanan {service} berjalan!", + "diagnosis_swap_ok": "Sistem ini memiliki {total} swap!", + "downloading": "Mengunduh...", + "pattern_password": "Harus paling tidak 3 karakter", + "pattern_password_app": "Maaf, kata sandi tidak dapat mengandung karakter berikut: {forbidden_chars}", + "pattern_port_or_range": "Harus angka porta yang valid (cth. 0-65535) atau jangkauan porta (cth. 100:200)", + "permission_already_exist": "Izin '{permission}' sudah ada", + "permission_cant_add_to_all_users": "Izin '{permission}' tidak dapat ditambahkan ke semua pengguna.", + "permission_created": "Izin '{permission}' dibuat", + "permission_creation_failed": "Tidak dapat membuat izin '{permission}': {error}", + "permission_deleted": "Izin '{permission}' dihapus", + "service_description_mysql": "Menyimpan data aplikasi (basis data SQL)", + "mailbox_disabled": "Surel dimatikan untuk pengguna {user}", + "log_user_update": "Memperbarui informasi untuk pengguna '{}'", + "apps_catalog_obsolete_cache": "Tembolok katalog aplikasi kosong atau sudah tua.", + "backup_actually_backuping": "Membuat arsip cadangan dari berkas yang dikumpulkan...", + "backup_applying_method_copy": "Menyalin semua berkas ke cadangan...", + "backup_archive_app_not_found": "Tidak dapat menemukan {app} di arsip cadangan", + "config_validate_date": "Harus tanggal yang valid seperti format YYYY-MM-DD", + "config_validate_time": "Harus waktu yang valid seperti HH:MM", + "diagnosis_ip_connected_ipv6": "Peladen ini terhubung ke internet lewat IPv6!", + "diagnosis_services_bad_status": "Layanan {service} {status} :(", + "global_settings_setting_root_password": "Kata sandi root baru", + "log_app_action_run": "Menjalankan tindakan dari aplikasi '{}'", + "log_settings_reset_all": "Atur ulang semua pengaturan", + "log_settings_set": "Terapkan pengaturan", + "service_removed": "Layanan '{service}' dihapus", + "service_restart_failed": "Tidak dapat memulai ulang layanan '{service}'\n\nLog layanan baru-baru ini:{logs}", + "ssowat_conf_generated": "Konfigurasi SSOwat diperbarui", + "system_upgraded": "Sistem diperbarui", + "tools_upgrade": "Memperbarui paket sistem", + "upnp_dev_not_found": "Tidak ada perangkat UPnP yang ditemukan", + "upnp_enabled": "UPnP dinyalakan", + "upnp_port_open_failed": "Tidak dapat membuka porta lewat UPnP", + "app_change_url_failed": "Tidak dapat mengubah URL untuk {app}: {error}", + "app_restore_script_failed": "Galat terjadi di skrip pemulihan aplikasi", + "app_label_deprecated": "Perintah ini sudah usang! Silakan untuk menggunakan perintah baru 'yunohost user permission update' untuk mengelola label aplikasi.", + "app_make_default_location_already_used": "Tidak dapat membuat '{app}' menjadi aplikasi baku untuk domain, '{domain}' telah dipakai oleh '{other_app}'", + "app_manifest_install_ask_is_public": "Bolehkan aplikasi ini dibuka untuk pengunjung awanama?", + "upnp_disabled": "UPnP dimatikan", + "global_settings_setting_smtp_allow_ipv6_help": "Perbolehkan penggunaan IPv6 untuk menerima dan mengirim surel", + "domain_config_default_app": "Aplikasi baku", + "diagnosis_diskusage_verylow": "Penyimpanan {mountpoint} (di perangkat {device}) hanya tinggal memiliki {free} ({free_percent}%) ruang kosong yang tersedia (dari {total}). Direkomendasikan untuk membersihkan ruang penyimpanan!", + "domain_config_api_protocol": "Protokol API", + "domain_config_cert_summary_letsencrypt": "Bagus! Anda menggunakan sertifikat Let's Encrypt yang valid!", + "domain_config_mail_out": "Surel keluar", + "domain_deletion_failed": "Tidak dapat menghapus domain {domain}: {error}", + "backup_copying_to_organize_the_archive": "Menyalin {size}MB untuk menyusun arsip", + "backup_method_copy_finished": "Salinan cadangan telah selesai", + "certmanager_domain_cert_not_selfsigned": "Sertifikat untuk domain {domain} bukan disertifikasi sendiri. Apakah Anda yakin ingin mengubahnya? (Gunakan '--force' jika iya)", + "diagnosis_diskusage_ok": "Penyimpanan {mountpoint} (di perangkat {device}) masih memiliki {free} ({free_percent}%) ruang kosong yang tersedia (dari {total})!", + "diagnosis_http_nginx_conf_not_up_to_date": "Konfigurasi nginx domain ini sepertinya diubah secara manual, itu mencegah YunoHost untuk mendiagnosis apakah domain ini terhubung ke HTTP.", + "domain_created": "Domain dibuat", + "migrations_running_forward": "Menjalankan migrasi {id}...", + "permission_deletion_failed": "Tidak dapat menghapus izin '{permission}': {error}", + "domain_config_cert_no_checks": "Abaikan pemeriksaan diagnosis", + "domain_config_cert_renew": "Perbarui sertifikat Let's Encrypt", + "domain_config_cert_summary": "Status sertifikat", + "domain_config_cert_summary_expired": "PENTING: Sertifikat saat ini tidak valid! HTTPS tidak akan bekerja sama sekali!", + "port_already_opened": "Porta {port} telah dibuka untuk koneksi {ip_version}", + "migrations_success_forward": "Migrasi {id} selesai", + "not_enough_disk_space": "Ruang kosong tidak cukup di '{path}'", + "password_too_long": "Pilih kata sandi yang lebih pendek dari 127 karakter", + "regenconf_file_backed_up": "Berkas konfigurasi '{conf}' dicadangkan ke '{backup}'", + "domain_creation_failed": "Tidak dapat membuat domain {domain}: {error}", + "domain_deleted": "Domain dihapus", + "regex_with_only_domain": "Anda tidak dapat menggunakan regex untuk domain, hanya untuk jalur", + "diagnosis_diskusage_low": "Penyimpanan {mountpoint} (di perangkat {device}) hanya tinggal memiliki {free} ({free_percent}%) ruang kosong yang tersedia (dari {total}).", + "domain_config_cert_summary_ok": "Oke, sertifikat saat ini terlihat bagus!", + "app_failed_to_upgrade_but_continue": "Gagal memperbarui aplikasi {failed_app}, melanjutkan pembaruan berikutnya seperti yang diminta. Jalankan 'yunohost log show {operation_logger_name}' untuk melihat log kegagalan", + "certmanager_attempt_to_replace_valid_cert": "Anda sedang mencoba untuk menimpa sertifikat yang valid untuk domain {domain}! (Gunakan --force untuk melewati ini)", + "permission_protected": "Izin {permission} dilindungi. Anda tidak dapat menambahkan atau menghapus kelompok pengunjung ke/dari izin ini.", + "permission_require_account": "Izin {permission} hanya masuk akal untuk pengguna yang memiliki akun, maka ini tidak dapat diaktifkan untuk pengunjung.", + "permission_update_failed": "Tidak dapat memperbarui izin '{permission}': {error}", + "apps_failed_to_upgrade": "Aplikasi berikut gagal untuk diperbarui:{apps}", + "backup_archive_name_unknown": "Arsip cadangan lokal tidak diketahui yang bernama '{name}'", + "diagnosis_http_nginx_conf_not_up_to_date_details": "Untuk memperbaiki ini, periksa perbedaannya dari CLI menggunakan yunohost tools regen-conf nginx --dry-run --with-diff dan jika Anda sudah, terapkan perubahannya menggunakan yunohost tools regen-conf nginx --force.", + "domain_config_auth_token": "Token autentikasi", + "domain_config_cert_install": "Pasang sertifikat Let's Encrypt", + "domain_config_cert_summary_abouttoexpire": "Sertifikat saat ini akan kedaluwarsa. Akan secara otomatis diperbarui secepatnya.", + "domain_config_mail_in": "Surel datang", + "password_too_simple_1": "Panjang kata sandi harus paling tidak 8 karakter", + "password_too_simple_2": "Panjang kata sandi harus paling tidak 8 karakter dan mengandung digit, huruf kapital, dan huruf kecil", + "password_too_simple_3": "Panjang kata sandi harus paling tidak 8 karakter dan mengandung digit, huruf kapital, huruf kecil, dan karakter khusus", + "password_too_simple_4": "Panjang kata sandi harus paling tidak 12 karakter dan mengandung digit, huruf kapital, huruf kecil, dan karakter khusus", + "port_already_closed": "Porta {port} telah ditutup untuk koneksi {ip_version}", + "service_description_yunomdns": "Membuat Anda bisa menemukan peladen Anda menggunakan 'yunohost.local' di jaringan lokal Anda", + "regenconf_file_copy_failed": "Tidak dapat menyalin berkas konfigurasi baru '{new}' ke '{conf}'", + "regenconf_file_kept_back": "Berkas konfigurasi '{conf}' seharusnya dihapus oleh regen-conf (kategori {category}) tapi tidak jadi.", + "regenconf_file_manually_modified": "Berkas konfigurasi '{conf}' telah diubah secara manual dan tidak akan diperbarui", + "regenconf_file_manually_removed": "Berkas konfigurasi '{conf}' telah dihapus secara manual dan tidak akan dibikin", + "regenconf_file_remove_failed": "Tidak dapat menghapus berkas konfigurasi '{conf}'", + "regenconf_file_removed": "Berkas konfigurasi '{conf}' dihapus", + "regenconf_file_updated": "Berkas konfigurasi '{conf}' diperbarui", + "regenconf_now_managed_by_yunohost": "Berkas konfigurasi '{conf}' sekarang dikelola oleh YunoHost (kategori {category})", + "regenconf_updated": "Konfigurasi diperbarui untuk '{category}'", + "log_user_group_delete": "Menghapus kelompok '{}'", + "backup_archive_cant_retrieve_info_json": "Tidak dapat memuat info untuk arsip '{archive}'... Berkas info.json tidak dapat didapakan (atau bukan json yang valid).", + "diagnosis_mail_blacklist_reason": "Alasan pendaftarhitaman adalah: {reason}", + "diagnosis_ports_unreachable": "Porta {port} tidak tercapai dari luar.", + "diagnosis_ram_verylow": "Sistem hanya memiliki {available} ({available_percent}%) RAM yang tersedia! (dari {total})", + "diagnosis_regenconf_allgood": "Semua berkas konfigurasi sesuai dengan rekomendasi konfigurasi!", + "diagnosis_security_vulnerable_to_meltdown": "Sepertinya sistem Anda rentan terhadap kerentanan keamanan Meltdown", + "diagnosis_security_vulnerable_to_meltdown_details": "Untuk memperbaiki ini, sebaiknya perbarui sistem Anda dan mulai ulang untuk memuat kernel linux yang baru (atau hubungi penyedia peladen Anda jika itu tidak bekerja). Kunjungi https://meltdownattack.com/ untuk informasi lebih lanjut.", + "domain_exists": "Domain telah ada", + "domain_uninstall_app_first": "Aplikasi berikut masih terpasang di domain Anda:\n{apps}\n\nSilakan lepas mereka menggunakan 'yunohost app remove id_aplikasi' atau pindahkan ke domain lain menggunakan 'yunohost app change-url id_aplikasi' sebelum melanjutkan ke penghapusan domain", + "group_creation_failed": "Tidak dapat membuat kelompok '{group}': {error}", + "group_deleted": "Kelompok '{group}' dihapus", + "log_letsencrypt_cert_install": "Memasang sertifikat Let's Encrypt di domain '{}'", + "log_permission_create": "Membuat izin '{}'", + "log_permission_delete": "Menghapus izin '{}'", + "backup_with_no_backup_script_for_app": "Aplikasi '{app}' tidak memiliki skrip pencadangan. Mengabaikan.", + "backup_system_part_failed": "Tidak dapat mencadangkan bagian '{part}' sistem", + "log_user_create": "Menambahkan pengguna '{}'", + "log_user_delete": "Menghapus pengguna '{}'", + "log_user_group_create": "Membuat kelompok '{}'", + "log_user_group_update": "Memperbarui kelompok '{}'", + "log_user_permission_update": "Memperbarui akses untuk izin '{}'", + "mail_alias_remove_failed": "Tidak dapat menghapus alias surel '{mail}'", + "diagnosis_mail_blacklist_ok": "IP dan domain yang digunakan oleh peladen ini sepertinya tidak didaftarhitamkan", + "diagnosis_dns_point_to_doc": "Silakan periksa dokumentasi di https://yunohost.org/dns_config jika Anda masih membutuhkan bantuan untuk mengatur rekaman DNS.", + "diagnosis_regenconf_manually_modified": "Berkas konfigurasi {file} sepertinya telah diubah manual.", + "backup_with_no_restore_script_for_app": "{app} tidak memiliki skrip pemulihan, Anda tidak akan bisa secara otomatis memulihkan cadangan aplikasi ini.", + "config_no_panel": "Tidak dapat menemukan panel konfigurasi.", + "confirm_app_install_warning": "Peringatan: Aplikasi ini mungkin masih bisa bekerja, tapi tidak terintegrasi dengan baik dengan YunoHost. Beberapa fitur seperti SSO dan pencadangan mungkin tidak tersedia. Tetap pasang? [{answers}] ", + "diagnosis_ports_ok": "Porta {port} tercapai dari luar.", + "diagnosis_ports_partially_unreachable": "Porta {port} tidak tercapai dari luar lewat IPv{failed}.", + "domain_remove_confirm_apps_removal": "Menghapus domain ini akan melepas aplikasi berikut:\n{apps}\n\nApakah Anda yakin? [{answers}]", + "domains_available": "Domain yang tersedia:", + "global_settings_reset_success": "Atur ulang pengaturan global", + "group_created": "Kelompok '{group}' dibuat", + "group_deletion_failed": "Tidak dapat menghapus kelompok '{group}': {error}", + "group_updated": "Kelompok '{group}' diperbarui", + "invalid_credentials": "Nama pengguna atau kata sandi salah", + "log_letsencrypt_cert_renew": "Memperbarui sertifikat Let's Encrypt '{}'", + "log_selfsigned_cert_install": "Memasang sertifikat ditandai sendiri pada domain '{}'", + "log_user_permission_reset": "Mengatur ulang izin '{}'", + "domain_config_xmpp": "Pesan Langsung (XMPP)" +} diff --git a/locales/oc.json b/locales/oc.json index eb142879c..1c13fc6b5 100644 --- a/locales/oc.json +++ b/locales/oc.json @@ -379,7 +379,7 @@ "diagnosis_services_bad_status": "Lo servici {service} es {status} :(", "diagnosis_swap_ok": "Lo sistèma a {total} d’escambi !", "diagnosis_regenconf_allgood": "Totes los fichièrs de configuracion son confòrmes a la configuracion recomandada !", - "diagnosis_regenconf_manually_modified": "Lo fichièr de configuracion {file} foguèt modificat manualament.", + "diagnosis_regenconf_manually_modified": "Lo fichièr de configuracion {file} foguèt modificat manualament.", "diagnosis_regenconf_manually_modified_details": "Es probablament bon tan que sabètz çò que fasètz ;) !", "diagnosis_security_vulnerable_to_meltdown": "Semblatz èsser vulnerable a la vulnerabilitat de seguretat critica de Meltdown", "diagnosis_description_basesystem": "Sistèma de basa", diff --git a/locales/pl.json b/locales/pl.json index 2631b42ca..0b3dc5e73 100644 --- a/locales/pl.json +++ b/locales/pl.json @@ -80,7 +80,7 @@ "app_already_installed_cant_change_url": "Ta aplikacja jest już zainstalowana. URL nie może zostać zmieniony przy użyciu tej funkcji. Sprawdź czy można zmienić w `app changeurl`", "app_id_invalid": "Nieprawidłowy identyfikator aplikacji(ID)", "app_change_url_require_full_domain": "Nie można przenieść aplikacji {app} na nowy adres URL, ponieważ wymaga ona pełnej domeny (tj. ze ścieżką = /)", - "app_install_files_invalid": "Tych plików nie można zainstalować", + "app_install_files_invalid": "Te pliki nie mogą zostać zainstalowane.", "app_make_default_location_already_used": "Nie można ustawić '{app}' jako domyślnej aplikacji w domenie '{domain}' ponieważ jest już używana przez '{other_app}'", "app_change_url_identical_domains": "Stara i nowa domena/ścieżka_url są identyczne („{domain}{path}”), nic nie trzeba robić.", "app_config_unable_to_read": "Nie udało się odczytać wartości panelu konfiguracji.", @@ -136,7 +136,7 @@ "backup_archive_corrupted": "Wygląda na to, że archiwum kopii zapasowej '{archive}' jest uszkodzone: {error}", "backup_cleaning_failed": "Nie udało się wyczyścić folderu tymczasowej kopii zapasowej", "backup_create_size_estimation": "Archiwum będzie zawierać około {size} danych.", - "app_location_unavailable": "Ten adres URL jest niedostępny lub powoduje konflikt z już zainstalowanymi aplikacja(mi):\n{apps}", + "app_location_unavailable": "Ten adres URL jest niedostępny lub koliduje z już zainstalowanymi aplikacjami:\n{apps}", "app_restore_failed": "Nie można przywrócić {app}: {error}", "app_restore_script_failed": "Wystąpił błąd w skrypcie przywracania aplikacji", "app_full_domain_unavailable": "Przepraszamy, ta aplikacja musi być zainstalowana we własnej domenie, ale inna aplikacja jest już zainstalowana w tej domenie „{domain}”. Zamiast tego możesz użyć subdomeny dedykowanej tej aplikacji.", @@ -155,5 +155,64 @@ "app_manifest_install_ask_init_main_permission": "Kto powinien mieć dostęp do tej aplikacji? (Można to później zmienić)", "ask_admin_fullname": "Pełne imię i nazwisko administratora", "app_change_url_failed": "Nie udało się zmienić adresu URL aplikacji {app}: {error}", - "app_change_url_script_failed": "Wystąpił błąd w skrypcie zmiany adresu URL" -} \ No newline at end of file + "app_change_url_script_failed": "Wystąpił błąd w skrypcie zmiany adresu URL", + "app_failed_to_upgrade_but_continue": "Nie udało zaktualizować się aplikacji {failed_app}, przechodzenie do następnych aktualizacji według żądania. Uruchom komendę 'yunohost log show {operation_logger_name}', aby sprawdzić logi dotyczące błędów", + "certmanager_cert_signing_failed": "Nie udało się zarejestrować nowego certyfikatu", + "certmanager_cert_renew_success": "Pomyślne odnowienie certyfikatu Let's Encrypt dla domeny '{domain}'", + "backup_delete_error": "Nie udało się usunąć '{path}'", + "certmanager_attempt_to_renew_nonLE_cert": "Certyfikat dla domeny '{domain}' nie został wystawiony przez Let's Encrypt. Automatyczne odnowienie jest niemożliwe!", + "backup_archive_cant_retrieve_info_json": "Nieudane wczytanie informacji dla archiwum '{archive}'... Plik info.json nie może zostać odzyskany (lub jest niepoprawny).", + "backup_method_custom_finished": "Tworzenie kopii zapasowej według własnej metody '{method}' zakończone", + "backup_nothings_done": "Brak danych do zapisania", + "app_unsupported_remote_type": "Niewspierany typ zdalny użyty w aplikacji", + "backup_archive_name_unknown": "Nieznane, lokalne archiwum kopii zapasowej o nazwie '{name}'", + "backup_output_directory_not_empty": "Należy wybrać pusty katalog dla danych wyjściowych", + "certmanager_attempt_to_renew_valid_cert": "Certyfikat dla domeny '{domain}' nie jest bliski wygaśnięciu! (Możesz użyć komendy z dopiskiem --force jeśli wiesz co robisz)", + "certmanager_cert_install_success": "Pomyślna instalacja certyfikatu Let's Encrypt dla domeny '{domain}'", + "certmanager_attempt_to_replace_valid_cert": "Właśnie zamierzasz nadpisać dobry i poprawny certyfikat dla domeny '{domain}'! (Użyj komendy z dopiskiem --force, aby ominąć)", + "backup_method_copy_finished": "Zakończono tworzenie kopii zapasowej", + "certmanager_certificate_fetching_or_enabling_failed": "Próba użycia nowego certyfikatu dla {domain} zakończyła się niepowodzeniem...", + "backup_method_tar_finished": "Utworzono archiwum kopii zapasowej TAR", + "backup_mount_archive_for_restore": "Przygotowywanie archiwum do przywrócenia...", + "certmanager_cert_install_failed": "Nieudana instalacja certyfikatu Let's Encrypt dla {domains}", + "certmanager_cert_install_failed_selfsigned": "Nieudana instalacja certyfikatu self-signed dla {domains}", + "certmanager_cert_install_success_selfsigned": "Pomyślna instalacja certyfikatu self-signed dla domeny '{domain}'", + "certmanager_cert_renew_failed": "Nieudane odnowienie certyfikatu Let's Encrypt dla {domains}", + "apps_failed_to_upgrade": "Nieudana aktualizacja aplikacji: {apps}", + "backup_output_directory_required": "Musisz wybrać katalog dla kopii zapasowej", + "app_failed_to_download_asset": "Nie udało się pobrać zasobu '{source_id}' ({url}) dla {app}: {out}", + "backup_with_no_backup_script_for_app": "Aplikacja '{app}' nie posiada skryptu kopii zapasowej. Ignorowanie.", + "backup_with_no_restore_script_for_app": "Aplikacja {app} nie posiada skryptu przywracania, co oznacza, że nie będzie można automatycznie przywrócić kopii zapasowej tej aplikacji.", + "certmanager_acme_not_configured_for_domain": "Wyzwanie ACME nie może zostać uruchomione dla domeny {domain}, ponieważ jej konfiguracja nginx nie zawiera odpowiedniego fragmentu kodu... Upewnij się, że konfiguracja nginx jest aktualna, używając polecenia yunohost tools regen-conf nginx --dry-run --with-diff.", + "certmanager_domain_dns_ip_differs_from_public_ip": "Rekordy DNS dla domeny '{domain}' różnią się od adresu IP tego serwera. Sprawdź kategorię 'Rekordy DNS' (podstawowe) w diagnozie, aby uzyskać więcej informacji. Jeśli niedawno dokonałeś zmiany rekordu A, poczekaj, aż zostanie on zaktualizowany (można skorzystać z narzędzi online do sprawdzania propagacji DNS). (Jeśli wiesz, co robisz, użyj opcji '--no-checks', aby wyłączyć te sprawdzania.)", + "confirm_app_install_danger": "UWAGA! Ta aplikacja jest wciąż w fazie eksperymentalnej (jeśli nie działa jawnie)! Prawdopodobnie NIE powinieneś jej instalować, chyba że wiesz, co robisz. NIE ZOSTANIE udzielone wsparcie, jeśli ta aplikacja nie będzie działać poprawnie lub spowoduje uszkodzenie systemu... Jeśli mimo to jesteś gotów podjąć to ryzyko, wpisz '{answers}", + "confirm_app_install_thirdparty": "UWAGA! Ta aplikacja nie jest częścią katalogu aplikacji YunoHost. Instalowanie aplikacji innych firm może naruszyć integralność i bezpieczeństwo systemu. Prawdopodobnie NIE powinieneś jej instalować, chyba że wiesz, co robisz. NIE ZOSTANIE udzielone wsparcie, jeśli ta aplikacja nie będzie działać poprawnie lub spowoduje uszkodzenie systemu... Jeśli mimo to jesteś gotów podjąć to ryzyko, wpisz '{answers}'", + "config_apply_failed": "Nie udało się zastosować nowej konfiguracji: {error}", + "config_cant_set_value_on_section": "Nie można ustawić pojedynczej wartości dla całej sekcji konfiguracji.", + "config_no_panel": "Nie znaleziono panelu konfiguracji.", + "config_unknown_filter_key": "Klucz filtru '{filter_key}' jest niepoprawny.", + "config_validate_email": "Proszę podać poprawny adres e-mail", + "backup_hook_unknown": "Nieznany jest hook kopii zapasowej '{hook}'.", + "backup_no_uncompress_archive_dir": "Nie istnieje taki katalog nieskompresowanego archiwum.", + "backup_output_symlink_dir_broken": "Twój katalog archiwum '{path}' to uszkodzony dowiązanie symboliczne. Być może zapomniałeś o ponownym zamontowaniu lub podłączeniu nośnika przechowującego, do którego on wskazuje.", + "backup_system_part_failed": "Nie można wykonać kopii zapasowej części systemu '{part}'", + "config_validate_color": "Powinien być poprawnym szesnastkowym kodem koloru RGB.", + "config_validate_date": "Data powinna być poprawna w formacie RRRR-MM-DD", + "config_validate_time": "Podaj poprawny czas w formacie GG:MM", + "certmanager_domain_not_diagnosed_yet": "Nie ma jeszcze wyników diagnozy dla domeny {domain}. Proszę ponownie uruchomić diagnozę dla kategorii 'Rekordy DNS' i 'Strona internetowa' w sekcji diagnozy, aby sprawdzić, czy domena jest gotowa do użycia Let's Encrypt. (Jeśli wiesz, co robisz, użyj opcji '--no-checks', aby wyłączyć te sprawdzania.)", + "certmanager_cannot_read_cert": "Wystąpił problem podczas próby otwarcia bieżącego certyfikatu dla domeny {domain} (plik: {file}), przyczyna: {reason}", + "certmanager_no_cert_file": "Nie można odczytać pliku certyfikatu dla domeny {domain} (plik: {file}).", + "certmanager_self_ca_conf_file_not_found": "Nie można znaleźć pliku konfiguracyjnego dla autorytetu samopodpisującego (plik: {file})", + "backup_running_hooks": "Uruchamianie hooków kopii zapasowej...", + "backup_permission": "Uprawnienia kopii zapasowej dla aplikacji {app}", + "certmanager_domain_cert_not_selfsigned": "Certyfikat dla domeny {domain} nie jest samopodpisany. Czy na pewno chcesz go zastąpić? (Użyj opcji '--force', aby to zrobić.)", + "config_action_disabled": "Nie można uruchomić akcji '{action}', ponieważ jest ona wyłączona. Upewnij się, że spełnione są jej ograniczenia. Pomoc: {help}", + "config_action_failed": "Nie udało się uruchomić akcji '{action}': {error}", + "config_forbidden_readonly_type": "Typ '{type}' nie może być ustawiony jako tylko do odczytu. Użyj innego typu, aby wyświetlić tę wartość (odpowiednie ID argumentu: '{id}')", + "config_forbidden_keyword": "Słowo kluczowe '{keyword}' jest zastrzeżone. Nie można tworzyć ani używać panelu konfiguracji z pytaniem o tym identyfikatorze.", + "backup_output_directory_forbidden": "Wybierz inną ścieżkę docelową. Kopie zapasowe nie mogą być tworzone w podfolderach /bin, /boot, /dev, /etc, /lib, /root, /run, /sbin, /sys, /usr, /var ani /home/yunohost.backup/archives", + "confirm_app_insufficient_ram": "UWAGA! Ta aplikacja wymaga {required} pamięci RAM do zainstalowania/aktualizacji, a obecnie dostępne jest tylko {current}. Nawet jeśli aplikacja mogłaby działać, proces instalacji/aktualizacji wymaga dużej ilości pamięci RAM, więc serwer może się zawiesić i niepowodzenie może być katastrofalne. Jeśli mimo to jesteś gotów podjąć to ryzyko, wpisz '{answers}'", + "app_not_upgraded_broken_system": "Aplikacja '{failed_app}' nie powiodła się w procesie aktualizacji i spowodowała uszkodzenie systemu. W rezultacie anulowane zostały aktualizacje następujących aplikacji: {apps}", + "app_not_upgraded_broken_system_continue": "Aplikacja '{failed_app}' nie powiodła się w procesie aktualizacji i spowodowała uszkodzenie systemu (parametr --continue-on-failure jest ignorowany). W rezultacie anulowane zostały aktualizacje następujących aplikacji: {apps}", + "certmanager_domain_http_not_working": "Domena {domain} wydaje się niedostępna przez HTTP. Sprawdź kategorię 'Strona internetowa' diagnostyki, aby uzyskać więcej informacji. (Jeśli wiesz, co robisz, użyj opcji '--no-checks', aby wyłączyć te sprawdzania.)" +} diff --git a/locales/ru.json b/locales/ru.json index 2c4e703da..a9c9da3f1 100644 --- a/locales/ru.json +++ b/locales/ru.json @@ -325,5 +325,8 @@ "global_settings_setting_ssh_port": "SSH порт", "global_settings_setting_webadmin_allowlist_help": "IP-адреса, разрешенные для доступа к веб-интерфейсу администратора. Разделенные запятыми.", "global_settings_setting_webadmin_allowlist_enabled_help": "Разрешите доступ к веб-интерфейсу администратора только некоторым IP-адресам.", - "global_settings_setting_smtp_allow_ipv6_help": "Разрешить использование IPv6 для получения и отправки почты" -} \ No newline at end of file + "global_settings_setting_smtp_allow_ipv6_help": "Разрешить использование IPv6 для получения и отправки почты", + "admins": "Администраторы", + "all_users": "Все пользователи YunoHost", + "app_action_failed": "Не удалось выполнить действие {action} для приложения {app}" +} diff --git a/locales/sk.json b/locales/sk.json index 359b2e562..bead46713 100644 --- a/locales/sk.json +++ b/locales/sk.json @@ -248,5 +248,16 @@ "ask_fullname": "Celé meno", "all_users": "Všetci používatelia YunoHost", "app_manifest_install_ask_init_main_permission": "Kto má mať prístup k tejto aplikácii? (Nastavenie môžete neskôr zmeniť)", - "certmanager_cert_install_failed": "Inštalácia Let's Encrypt certifikátu pre {domains} skončila s chybou" -} \ No newline at end of file + "certmanager_cert_install_failed": "Inštalácia Let's Encrypt certifikátu pre {domains} skončila s chybou", + "app_arch_not_supported": "Túto aplikáciu možno nainštalovať iba na architektúrach {required}, ale Váš server beží na architektúre {current}", + "log_help_to_get_failed_log": "Akciu '{desc}' sa nepodarilo dokončiť. Ak potrebujete pomoc, zdieľajte, prosím, úplný záznam tejto operácie pomocou príkazu 'yunohost log share {name}'", + "operation_interrupted": "Bola akcia manuálne prerušená?", + "log_link_to_failed_log": "Akciu '{desc}' sa nepodarilo dokončiť. Ak potrebujete pomoc, poskytnite, prosím, úplný záznam tejto operácie kliknutím sem", + "app_change_url_failed": "Nepodarilo sa zmeniť URL adresu aplikácie {app}: {error}", + "app_yunohost_version_not_supported": "Táto aplikácia vyžaduje YunoHost >= {required}, ale aktuálne nainštalovaná verzia je {current}", + "config_action_failed": "Nepodarilo sa spustiť operáciu '{action}': {error}", + "app_change_url_script_failed": "Vo skripte na zmenu URL adresy sa vyskytla chyba", + "app_not_enough_disk": "Táto aplikácia vyžaduje {required} voľného miesta.", + "app_not_enough_ram": "Táto aplikácia vyžaduje {required} pamäte na inštaláciu/aktualizáciu, ale k dispozícii je momentálne iba {current}.", + "apps_failed_to_upgrade": "Nasledovné aplikácie nebolo možné aktualizovať: {apps}" +} diff --git a/locales/uk.json b/locales/uk.json index 0cac77575..07cbfe6da 100644 --- a/locales/uk.json +++ b/locales/uk.json @@ -234,7 +234,7 @@ "group_already_exist_on_system": "Група {group} вже існує в групах системи", "group_already_exist": "Група {group} вже існує", "good_practices_about_user_password": "Зараз ви збираєтеся поставити новий пароль користувача. Пароль повинен складатися не менше ніж з 8 символів, але хорошою практикою є використання більш довгого пароля (тобто гасла) і/або використання різних символів (великих, малих, цифр і спеціальних символів).", - "good_practices_about_admin_password": "Зараз ви збираєтеся поставити новий пароль адмініструванні. Пароль повинен складатися не менше ніж з 8 символів, але хорошою практикою є використання більш довгого пароля (тобто парольного гасла) і/або використання різних символів (великих, малих, цифр і спеціальних символів).", + "good_practices_about_admin_password": "Зараз ви збираєтеся поставити новий пароль адміністрування. Пароль повинен складатися не менше ніж з 8 символів, але хорошою практикою є використання більш довгого пароля (тобто парольної фрази) і/або використання різних символів (великих, малих, цифр і спеціальних символів).", "global_settings_setting_smtp_relay_password": "Пароль SMTP-ретрансляції", "global_settings_setting_smtp_relay_user": "Користувач SMTP-ретрансляції", "global_settings_setting_smtp_relay_port": "Порт SMTP-ретрансляції", @@ -278,7 +278,7 @@ "domain_cannot_remove_main": "Ви не можете вилучити '{domain}', бо це основний домен, спочатку вам потрібно встановити інший домен в якості основного за допомогою 'yunohost domain main-domain -n '; ось список доменів-кандидатів: {other_domains}", "disk_space_not_sufficient_update": "Недостатньо місця на диску для оновлення цього застосунку", "disk_space_not_sufficient_install": "Недостатньо місця на диску для встановлення цього застосунку", - "diagnosis_sshd_config_inconsistent_details": "Будь ласка, виконайте команду yunohost settings set security.ssh.ssh port -v YOUR_SSH_PORT, щоб визначити порт SSH, і перевіртеyunohost tools regen-conf ssh --dry-run --with-diff і yunohost tools regen-conf ssh --force, щоб скинути ваш конфіг на рекомендований YunoHost.", + "diagnosis_sshd_config_inconsistent_details": "Будь ласка, виконайте команду yunohost settings set security.ssh.ssh port -v ВАШ_SSH_ПОРТ, щоб визначити порт SSH, і перевіртеyunohost tools regen-conf ssh --dry-run --with-diff і yunohost tools regen-conf ssh --force, щоб скинути ваш конфіг на рекомендований YunoHost.", "diagnosis_sshd_config_inconsistent": "Схоже, що порт SSH був уручну змінений в /etc/ssh/sshd_config. Починаючи з версії YunoHost 4.2, доступний новий глобальний параметр 'security.ssh.ssh port', що дозволяє уникнути ручного редагування конфігурації.", "diagnosis_sshd_config_insecure": "Схоже, що конфігурація SSH була змінена вручну і є небезпечною, оскільки не містить директив 'AllowGroups' або 'AllowUsers' для обмеження доступу авторизованих користувачів.", "diagnosis_processes_killed_by_oom_reaper": "Деякі процеси було недавно вбито системою через брак пам'яті. Зазвичай це є симптомом нестачі пам'яті в системі або процесу, який з'їв дуже багато пам'яті. Зведення убитих процесів:\n{kills_summary}", @@ -346,7 +346,7 @@ "diagnosis_mail_outgoing_port_25_blocked_details": "Спочатку спробуйте розблокувати вихідний порт 25 в інтерфейсі вашого інтернет-маршрутизатора або в інтерфейсі вашого хостинг-провайдера. (Деякі хостинг-провайдери можуть вимагати, щоб ви відправили їм заявку в службу підтримки).", "diagnosis_mail_outgoing_port_25_blocked": "Поштовий сервер SMTP не може відправляти електронні листи на інші сервери, оскільки вихідний порт 25 заблоковано в IPv{ipversion}.", "app_manifest_install_ask_path": "Оберіть шлях URL (після домену), за яким має бути встановлено цей застосунок", - "yunohost_postinstall_end_tip": "Післявстановлення завершено! Щоб завершити доналаштування, будь ласка, розгляньте наступні варіанти:\n - додавання першого користувача через розділ 'Користувачі' вебадмініструванні (або 'yunohost user create ' в командному рядку);\n - діагностика можливих проблем через розділ 'Діагностика' вебадмініструванні (або 'yunohost diagnosis run' в командному рядку);\n - прочитання розділів 'Завершення встановлення' і 'Знайомство з YunoHost' у документації адміністратора: https://yunohost.org/admindoc.", + "yunohost_postinstall_end_tip": "Післявстановлення завершено! Щоб завершити доналаштування, будь ласка, розгляньте наступні варіанти:\n - діагностика можливих проблем через розділ 'Діагностика' вебадмініструванні (або 'yunohost diagnosis run' в командному рядку);\n - прочитання розділів 'Завершення встановлення' і 'Знайомство з YunoHost' у документації адміністратора: https://yunohost.org/admindoc.", "yunohost_not_installed": "YunoHost установлений неправильно. Будь ласка, запустіть 'yunohost tools postinstall'", "yunohost_installing": "Установлення YunoHost...", "yunohost_configured": "YunoHost вже налаштовано", @@ -488,7 +488,7 @@ "backup_method_custom_finished": "Користувацький спосіб резервного копіювання '{method}' завершено", "backup_method_copy_finished": "Резервне копіювання завершено", "backup_hook_unknown": "Гачок (hook) резервного копіювання '{hook}' невідомий", - "backup_deleted": "Резервна копія видалена", + "backup_deleted": "Резервна копія '{name}' видалена", "backup_delete_error": "Не вдалося видалити '{path}'", "backup_custom_mount_error": "Користувацький спосіб резервного копіювання не зміг пройти етап 'монтування'", "backup_custom_backup_error": "Користувацький спосіб резервного копіювання не зміг пройти етап 'резервне копіювання'", @@ -496,7 +496,7 @@ "backup_csv_addition_failed": "Не вдалося додати файли для резервного копіювання в CSV-файл", "backup_creation_failed": "Не вдалося створити архів резервного копіювання", "backup_create_size_estimation": "Архів буде містити близько {size} даних.", - "backup_created": "Резервна копія створена", + "backup_created": "Резервна копія '{name}' створена", "backup_couldnt_bind": "Не вдалося зв'язати {src} з {dest}.", "backup_copying_to_organize_the_archive": "Копіювання {size} МБ для організації архіву", "backup_cleaning_failed": "Не вдалося очистити тимчасовий каталог резервного копіювання", @@ -654,7 +654,7 @@ "global_settings_setting_admin_strength": "Надійність пароля адміністратора", "global_settings_setting_user_strength": "Надійність пароля користувача", "global_settings_setting_postfix_compatibility_help": "Компроміс між сумісністю і безпекою для сервера Postfix. Впливає на шифри (і інші аспекти, пов'язані з безпекою)", - "global_settings_setting_ssh_compatibility_help": "Компроміс між сумісністю і безпекою для SSH-сервера. Впливає на шифри (і інші аспекти, пов'язані з безпекою)", + "global_settings_setting_ssh_compatibility_help": "Компроміс між сумісністю і безпекою для SSH-сервера. Впливає на шифри (і інші аспекти, пов'язані з безпекою).", "global_settings_setting_ssh_password_authentication_help": "Дозволити автентифікацію паролем для SSH", "global_settings_setting_ssh_port": "SSH-порт", "global_settings_setting_webadmin_allowlist_help": "IP-адреси, яким дозволений доступ до вебадмініструванні. Через кому.", @@ -735,5 +735,36 @@ "visitors": "Відвідувачі", "password_confirmation_not_the_same": "Пароль і його підтвердження не збігаються", "password_too_long": "Будь ласка, виберіть пароль коротший за 127 символів", - "pattern_fullname": "Має бути дійсне повне ім’я (принаймні 3 символи)" -} \ No newline at end of file + "pattern_fullname": "Має бути дійсне повне ім’я (принаймні 3 символи)", + "app_failed_to_upgrade_but_continue": "Застосунок {failed_app} не вдалося оновити, продовжуйте наступні оновлення відповідно до запиту. Запустіть 'yunohost log show {operation_logger_name}', щоб побачити журнал помилок", + "app_not_upgraded_broken_system": "Застосунок '{failed_app}' не зміг оновитися і перевів систему в неробочий стан, і як наслідок, оновлення наступних застосунків було скасовано: {apps}", + "app_not_upgraded_broken_system_continue": "Застосунок '{failed_app}' не зміг оновитися і перевів систему у неробочий стан (тому --continue-on-failure ігнорується), і як наслідок, оновлення наступних застосунків було скасовано: {apps}", + "confirm_app_insufficient_ram": "НЕБЕЗПЕКА! Цей застосунок вимагає {required} оперативної пам'яті для встановлення/оновлення, але зараз доступно лише {current}. Навіть якби цей застосунок можна було б запустити, процес його встановлення/оновлення вимагає великої кількості оперативної пам'яті, тому ваш сервер може зависнути і вийти з ладу. Якщо ви все одно готові піти на цей ризик, введіть '{answers}'", + "invalid_shell": "Недійсна оболонка: {shell}", + "domain_config_default_app_help": "Користувачі будуть автоматично перенаправлятися на цей застосунок при відкритті цього домену. Якщо застосунок не вказано, люди будуть перенаправлені на форму входу на портал користувача.", + "domain_config_xmpp_help": "Примітка: для ввімкнення деяких функцій XMPP потрібно оновити записи DNS та відновити сертифікат Lets Encrypt", + "global_settings_setting_dns_exposure_help": "Примітка: Це стосується лише рекомендованої конфігурації DNS і діагностичних перевірок. Це не впливає на конфігурацію системи.", + "global_settings_setting_passwordless_sudo": "Дозвіл адміністраторам використовувати \"sudo\" без повторного введення пароля", + "app_change_url_failed": "Не вдалося змінити url для {app}: {error}", + "app_change_url_require_full_domain": "{app} не може бути переміщено на цю нову URL-адресу, оскільки для цього потрібен повний домен (тобто зі шляхом = /)", + "app_change_url_script_failed": "Виникла помилка всередині скрипта зміни URL-адреси", + "app_yunohost_version_not_supported": "Для роботи застосунку потрібен YunoHost мінімум версії {required}, але поточна встановлена версія {current}", + "app_arch_not_supported": "Цей застосунок можна встановити лише на архітектурах {required}, але архітектура вашого сервера {current}", + "global_settings_setting_dns_exposure": "Версії IP, які слід враховувати при конфігурації та діагностиці DNS", + "domain_cannot_add_muc_upload": "Ви не можете додавати домени, що починаються на 'muc.'. Такі імена зарезервовані для багатокористувацького чату XMPP, інтегрованого в YunoHost.", + "confirm_notifications_read": "ПОПЕРЕДЖЕННЯ: Перш ніж продовжити, перевірте сповіщення застосунку вище, там можуть бути важливі повідомлення. [{answers}]", + "global_settings_setting_portal_theme": "Тема порталу", + "global_settings_setting_portal_theme_help": "Подробиці щодо створення користувацьких тем порталу на https://yunohost.org/theming", + "diagnosis_ip_no_ipv6_tip_important": "Зазвичай IPv6 має бути автоматично налаштований системою або вашим провайдером, якщо він доступний. В іншому випадку, можливо, вам доведеться налаштувати деякі речі вручну, як описано в документації тут: https://yunohost.org/#/ipv6.", + "app_not_enough_disk": "Цей застосунок вимагає {required} вільного місця.", + "app_not_enough_ram": "Для встановлення/оновлення цього застосунку потрібно {required} оперативної пам'яті, але наразі доступно лише {current}.", + "app_resource_failed": "Не вдалося надати, позбавити або оновити ресурси для {app}: {error}", + "apps_failed_to_upgrade": "Ці застосунки не вдалося оновити:{apps}", + "apps_failed_to_upgrade_line": "\n * {app_id} (щоб побачити відповідний журнал, виконайте 'yunohost log show {operation_logger_name}')", + "group_mailalias_add": "Псевдонім електронної пошти '{mail}' буде додано до групи '{group}'", + "group_mailalias_remove": "Псевдонім електронної пошти '{mail}' буде вилучено з групи '{group}'", + "group_user_add": "Користувача '{user}' буде додано до групи '{group}'", + "group_user_remove": "Користувача '{user}' буде вилучено з групи '{group}'", + "app_corrupt_source": "YunoHost зміг завантажити ресурс '{source_id}' ({url}) для {app}, але він не відповідає очікуваній контрольній сумі. Це може означати, що на вашому сервері стався тимчасовий збій мережі, АБО ресурс був якимось чином змінений висхідним супровідником (або зловмисником?), і пакувальникам YunoHost потрібно дослідити і оновити маніфест застосунку, щоб відобразити цю зміну.\n Очікувана контрольна сума sha256: {expected_sha256}\n Обчислена контрольна сума sha256: {computed_sha256}\n Розмір завантаженого файлу: {size}", + "app_failed_to_download_asset": "Не вдалося завантажити ресурс '{source_id}' ({url}) для {app}: {out}" +} diff --git a/share/actionsmap.yml b/share/actionsmap.yml index 58787790c..e1de66bc8 100644 --- a/share/actionsmap.yml +++ b/share/actionsmap.yml @@ -954,6 +954,12 @@ app: help: Delete the key action: store_true + ### app_shell() + shell: + action_help: Open an interactive shell with the app environment already loaded + arguments: + app: + help: App ID ### app_register_url() register-url: diff --git a/share/registrar_list.toml b/share/registrar_list.toml index 01906becd..3f478a03f 100644 --- a/share/registrar_list.toml +++ b/share/registrar_list.toml @@ -501,6 +501,15 @@ [pointhq.auth_token] type = "string" redact = true + +[porkbun] + [porkbun.auth_key] + type = "string" + redact = true + + [porkbun.auth_secret] + type = "string" + redact = true [powerdns] [powerdns.auth_token] diff --git a/src/app.py b/src/app.py index 65402a024..55d351a84 100644 --- a/src/app.py +++ b/src/app.py @@ -48,11 +48,10 @@ from moulinette.utils.filesystem import ( chmod, ) -from yunohost.utils.config import ( - ConfigPanel, - ask_questions_and_parse_answers, - DomainQuestion, - PathQuestion, +from yunohost.utils.configpanel import ConfigPanel, ask_questions_and_parse_answers +from yunohost.utils.form import ( + DomainOption, + WebPathOption, hydrate_questions_with_choices, ) from yunohost.utils.i18n import _value_for_locale @@ -431,10 +430,10 @@ def app_change_url(operation_logger, app, domain, path): # Normalize path and domain format - domain = DomainQuestion.normalize(domain) - old_domain = DomainQuestion.normalize(old_domain) - path = PathQuestion.normalize(path) - old_path = PathQuestion.normalize(old_path) + domain = DomainOption.normalize(domain) + old_domain = DomainOption.normalize(old_domain) + path = WebPathOption.normalize(path) + old_path = WebPathOption.normalize(old_path) if (domain, path) == (old_domain, old_path): raise YunohostValidationError( @@ -534,7 +533,14 @@ def app_change_url(operation_logger, app, domain, path): hook_callback("post_app_change_url", env=env_dict) -def app_upgrade(app=[], url=None, file=None, force=False, no_safety_backup=False, continue_on_failure=False): +def app_upgrade( + app=[], + url=None, + file=None, + force=False, + no_safety_backup=False, + continue_on_failure=False, +): """ Upgrade app @@ -748,6 +754,7 @@ def app_upgrade(app=[], url=None, file=None, force=False, no_safety_backup=False ).apply( rollback_and_raise_exception_if_failure=True, operation_logger=operation_logger, + action="upgrade", ) # Boring stuff : the resource upgrade may have added/remove/updated setting @@ -857,8 +864,16 @@ def app_upgrade(app=[], url=None, file=None, force=False, no_safety_backup=False else: operation_logger.close() - logger.error(m18n.n("app_failed_to_upgrade_but_continue", failed_app=app_instance_name, operation_logger_name=operation_logger.name)) - failed_to_upgrade_apps.append((app_instance_name, operation_logger.name)) + logger.error( + m18n.n( + "app_failed_to_upgrade_but_continue", + failed_app=app_instance_name, + operation_logger_name=operation_logger.name, + ) + ) + failed_to_upgrade_apps.append( + (app_instance_name, operation_logger.name) + ) # Otherwise we're good and keep going ! now = int(time.time()) @@ -923,7 +938,11 @@ def app_upgrade(app=[], url=None, file=None, force=False, no_safety_backup=False if failed_to_upgrade_apps: apps = "" for app_id, operation_logger_name in failed_to_upgrade_apps: - apps += m18n.n("apps_failed_to_upgrade_line", app_id=app_id, operation_logger_name=operation_logger_name) + apps += m18n.n( + "apps_failed_to_upgrade_line", + app_id=app_id, + operation_logger_name=operation_logger_name, + ) logger.warning(m18n.n("apps_failed_to_upgrade", apps=apps)) @@ -1153,6 +1172,7 @@ def app_install( AppResourceManager(app_instance_name, wanted=manifest, current={}).apply( rollback_and_raise_exception_if_failure=True, operation_logger=operation_logger, + action="install", ) except (KeyboardInterrupt, EOFError, Exception) as e: shutil.rmtree(app_setting_path) @@ -1284,7 +1304,7 @@ def app_install( AppResourceManager( app_instance_name, wanted={}, current=manifest - ).apply(rollback_and_raise_exception_if_failure=False) + ).apply(rollback_and_raise_exception_if_failure=False, action="remove") else: # Remove all permission in LDAP for permission_name in user_permission_list()["permissions"].keys(): @@ -1423,7 +1443,9 @@ def app_remove(operation_logger, app, purge=False, force_workdir=None): from yunohost.utils.resources import AppResourceManager AppResourceManager(app, wanted={}, current=manifest).apply( - rollback_and_raise_exception_if_failure=False, purge_data_dir=purge + rollback_and_raise_exception_if_failure=False, + purge_data_dir=purge, + action="remove", ) else: # Remove all permission in LDAP @@ -1508,6 +1530,23 @@ def app_setting(app, key, value=None, delete=False): _set_app_settings(app, app_settings) +def app_shell(app): + """ + Open an interactive shell with the app environment already loaded + + Keyword argument: + app -- App ID + + """ + subprocess.run( + [ + "/bin/bash", + "-c", + "source /usr/share/yunohost/helpers && ynh_spawn_app_shell " + app, + ] + ) + + def app_register_url(app, domain, path): """ Book/register a web path for a given app @@ -1523,8 +1562,8 @@ def app_register_url(app, domain, path): permission_sync_to_user, ) - domain = DomainQuestion.normalize(domain) - path = PathQuestion.normalize(path) + domain = DomainOption.normalize(domain) + path = WebPathOption.normalize(path) # We cannot change the url of an app already installed simply by changing # the settings... @@ -1741,13 +1780,13 @@ class AppConfigPanel(ConfigPanel): save_path_tpl = os.path.join(APPS_SETTING_PATH, "{entity}/settings.yml") config_path_tpl = os.path.join(APPS_SETTING_PATH, "{entity}/config_panel.toml") - def _load_current_values(self): - self.values = self._call_config_script("show") - def _run_action(self, action): env = {key: str(value) for key, value in self.new_values.items()} self._call_config_script(action, env=env) + def _get_raw_settings(self): + self.values = self._call_config_script("show") + def _apply(self): env = {key: str(value) for key, value in self.new_values.items()} return_content = self._call_config_script("apply", env=env) @@ -1862,19 +1901,8 @@ def _get_app_settings(app): logger.error(m18n.n("app_not_correctly_installed", app=app)) return {} - # Stupid fix for legacy bullshit - # In the past, some setups did not have proper normalization for app domain/path - # Meaning some setups (as of January 2021) still have path=/foobar/ (with a trailing slash) - # resulting in stupid issue unless apps using ynh_app_normalize_path_stuff - # So we yolofix the settings if such an issue is found >_> - # A simple call to `yunohost app list` (which happens quite often) should be enough - # to migrate all app settings ... so this can probably be removed once we're past Bullseye... - if settings.get("path") != "/" and ( - settings.get("path", "").endswith("/") - or not settings.get("path", "/").startswith("/") - ): - settings["path"] = "/" + settings["path"].strip("/") - _set_app_settings(app, settings) + # Make the app id available as $app too + settings["app"] = app if app == settings["id"]: return settings @@ -2099,7 +2127,7 @@ def _hydrate_app_template(template, data): varname = stuff.strip("_").lower() if varname in data: - template = template.replace(stuff, data[varname]) + template = template.replace(stuff, str(data[varname])) return template @@ -2713,8 +2741,8 @@ def _get_conflicting_apps(domain, path, ignore_app=None): from yunohost.domain import _assert_domain_exists - domain = DomainQuestion.normalize(domain) - path = PathQuestion.normalize(path) + domain = DomainOption.normalize(domain) + path = WebPathOption.normalize(path) # Abort if domain is unknown _assert_domain_exists(domain) @@ -2904,10 +2932,10 @@ def _assert_system_is_sane_for_app(manifest, when): services = manifest.get("services", []) - # Some apps use php-fpm, php5-fpm or php7.x-fpm which is now php7.4-fpm + # Some apps use php-fpm, php5-fpm or php7.x-fpm which is now php8.2-fpm def replace_alias(service): - if service in ["php-fpm", "php5-fpm", "php7.0-fpm", "php7.3-fpm"]: - return "php7.4-fpm" + if service in ["php-fpm", "php5-fpm", "php7.0-fpm", "php7.3-fpm", "php7.4-fpm"]: + return "php8.2-fpm" else: return service @@ -2916,7 +2944,7 @@ def _assert_system_is_sane_for_app(manifest, when): # We only check those, mostly to ignore "custom" services # (added by apps) and because those are the most popular # services - service_filter = ["nginx", "php7.4-fpm", "mysql", "postfix"] + service_filter = ["nginx", "php8.2-fpm", "mysql", "postfix"] services = [str(s) for s in services if s in service_filter] if "nginx" not in services: @@ -2990,7 +3018,8 @@ def _notification_is_dismissed(name, settings): def _filter_and_hydrate_notifications(notifications, current_version=None, data={}): - def is_version_more_recent_than_current_version(name): + def is_version_more_recent_than_current_version(name, current_version): + current_version = str(current_version) # Boring code to handle the fact that "0.1 < 9999~ynh1" is False if "~" in name: @@ -3004,7 +3033,7 @@ def _filter_and_hydrate_notifications(notifications, current_version=None, data= for name, content_per_lang in notifications.items() if current_version is None or name == "main" - or is_version_more_recent_than_current_version(name) + or is_version_more_recent_than_current_version(name, current_version) } diff --git a/src/backup.py b/src/backup.py index 0cf73c4ae..a23a8d8e0 100644 --- a/src/backup.py +++ b/src/backup.py @@ -1204,7 +1204,7 @@ class RestoreManager: def _patch_legacy_php_versions_in_csv_file(self): """ - Apply dirty patch to redirect php5 and php7.0 files to php7.4 + Apply dirty patch to redirect php5 and php7.x files to php8.2 """ from yunohost.utils.legacy import LEGACY_PHP_VERSION_REPLACEMENTS @@ -1528,6 +1528,7 @@ class RestoreManager: AppResourceManager(app_instance_name, wanted=manifest, current={}).apply( rollback_and_raise_exception_if_failure=True, operation_logger=operation_logger, + action="restore", ) # Execute the app install script @@ -2375,6 +2376,7 @@ def backup_list(with_info=False, human_readable=False): # (we do a realpath() to resolve symlinks) archives = glob(f"{ARCHIVES_PATH}/*.tar.gz") + glob(f"{ARCHIVES_PATH}/*.tar") archives = {os.path.realpath(archive) for archive in archives} + archives = {archive for archive in archives if os.path.exists(archive)} archives = sorted(archives, key=lambda x: os.path.getctime(x)) # Extract only filename without the extension diff --git a/src/certificate.py b/src/certificate.py index 52e0d8c1b..76d3f32b7 100644 --- a/src/certificate.py +++ b/src/certificate.py @@ -41,8 +41,8 @@ from yunohost.log import OperationLogger logger = getActionLogger("yunohost.certmanager") CERT_FOLDER = "/etc/yunohost/certs/" -TMP_FOLDER = "/tmp/acme-challenge-private/" -WEBROOT_FOLDER = "/tmp/acme-challenge-public/" +TMP_FOLDER = "/var/www/.well-known/acme-challenge-private/" +WEBROOT_FOLDER = "/var/www/.well-known/acme-challenge-public/" SELF_CA_FILE = "/etc/ssl/certs/ca-yunohost_crt.pem" ACCOUNT_KEY_FILE = "/etc/yunohost/letsencrypt_account.pem" diff --git a/src/diagnosers/21-web.py b/src/diagnosers/21-web.py index 2050cd658..cc6edd7dc 100644 --- a/src/diagnosers/21-web.py +++ b/src/diagnosers/21-web.py @@ -60,9 +60,9 @@ class MyDiagnoser(Diagnoser): domains_to_check.append(domain) self.nonce = "".join(random.choice("0123456789abcedf") for i in range(16)) - rm("/tmp/.well-known/ynh-diagnosis/", recursive=True, force=True) - mkdir("/tmp/.well-known/ynh-diagnosis/", parents=True) - os.system("touch /tmp/.well-known/ynh-diagnosis/%s" % self.nonce) + rm("/var/www/.well-known/ynh-diagnosis/", recursive=True, force=True) + mkdir("/var/www/.well-known/ynh-diagnosis/", parents=True, mode=0o0775) + os.system("touch /var/www/.well-known/ynh-diagnosis/%s" % self.nonce) if not domains_to_check: return diff --git a/src/diagnosers/80-apps.py b/src/diagnosers/80-apps.py index 44ce86bcc..93cefeaaf 100644 --- a/src/diagnosers/80-apps.py +++ b/src/diagnosers/80-apps.py @@ -62,12 +62,12 @@ class MyDiagnoser(Diagnoser): # Check quality level in catalog if not app.get("from_catalog") or app["from_catalog"].get("state") != "working": - yield ("error", "diagnosis_apps_not_in_app_catalog") + yield ("warning", "diagnosis_apps_not_in_app_catalog") elif ( not isinstance(app["from_catalog"].get("level"), int) or app["from_catalog"]["level"] == 0 ): - yield ("error", "diagnosis_apps_broken") + yield ("warning", "diagnosis_apps_broken") elif app["from_catalog"]["level"] <= 4: yield ("warning", "diagnosis_apps_bad_quality") diff --git a/src/dns.py b/src/dns.py index 3a5e654ec..e3a26044c 100644 --- a/src/dns.py +++ b/src/dns.py @@ -960,6 +960,9 @@ def domain_dns_push(operation_logger, domain, dry_run=False, force=False, purge= f"Pushing {record['type']} records is not properly supported by Lexicon/Godaddy." ) continue + elif registrar == "gandi": + if record["name"] == base_dns_zone: + record["name"] = "@." + record["name"] record["action"] = action query = ( diff --git a/src/domain.py b/src/domain.py index 7839b988d..4f96d08c4 100644 --- a/src/domain.py +++ b/src/domain.py @@ -33,7 +33,8 @@ from yunohost.app import ( _get_conflicting_apps, ) from yunohost.regenconf import regen_conf, _force_clear_hashes, _process_regen_conf -from yunohost.utils.config import ConfigPanel, Question +from yunohost.utils.configpanel import ConfigPanel +from yunohost.utils.form import BaseOption from yunohost.utils.error import YunohostError, YunohostValidationError from yunohost.log import is_unit_operation @@ -527,7 +528,7 @@ def domain_config_set( """ Apply a new domain configuration """ - Question.operation_logger = operation_logger + BaseOption.operation_logger = operation_logger config = DomainConfigPanel(domain) return config.set(key, value, args, args_file, operation_logger=operation_logger) @@ -537,6 +538,83 @@ class DomainConfigPanel(ConfigPanel): save_path_tpl = f"{DOMAIN_SETTINGS_DIR}/{{entity}}.yml" save_mode = "diff" + def get(self, key="", mode="classic"): + result = super().get(key=key, mode=mode) + + if mode == "full": + for panel, section, option in self._iterate(): + # This injects: + # i18n: domain_config_cert_renew_help + # i18n: domain_config_default_app_help + # i18n: domain_config_xmpp_help + if m18n.key_exists(self.config["i18n"] + "_" + option["id"] + "_help"): + option["help"] = m18n.n( + self.config["i18n"] + "_" + option["id"] + "_help" + ) + return self.config + + return result + + def _get_raw_config(self): + toml = super()._get_raw_config() + + toml["feature"]["xmpp"]["xmpp"]["default"] = ( + 1 if self.entity == _get_maindomain() else 0 + ) + + # Optimize wether or not to load the DNS section, + # e.g. we don't want to trigger the whole _get_registary_config_section + # when just getting the current value from the feature section + filter_key = self.filter_key.split(".") if self.filter_key != "" else [] + if not filter_key or filter_key[0] == "dns": + from yunohost.dns import _get_registrar_config_section + + toml["dns"]["registrar"] = _get_registrar_config_section(self.entity) + + # FIXME: Ugly hack to save the registar id/value and reinject it in _get_raw_settings ... + self.registar_id = toml["dns"]["registrar"]["registrar"]["value"] + del toml["dns"]["registrar"]["registrar"]["value"] + + # Cert stuff + if not filter_key or filter_key[0] == "cert": + from yunohost.certificate import certificate_status + + status = certificate_status([self.entity], full=True)["certificates"][ + self.entity + ] + + toml["cert"]["cert"]["cert_summary"]["style"] = status["style"] + + # i18n: domain_config_cert_summary_expired + # i18n: domain_config_cert_summary_selfsigned + # i18n: domain_config_cert_summary_abouttoexpire + # i18n: domain_config_cert_summary_ok + # i18n: domain_config_cert_summary_letsencrypt + toml["cert"]["cert"]["cert_summary"]["ask"] = m18n.n( + f"domain_config_cert_summary_{status['summary']}" + ) + + # FIXME: Ugly hack to save the cert status and reinject it in _get_raw_settings ... + self.cert_status = status + + return toml + + def _get_raw_settings(self): + # TODO add mechanism to share some settings with other domains on the same zone + super()._get_raw_settings() + + # FIXME: Ugly hack to save the registar id/value and reinject it in _get_raw_settings ... + filter_key = self.filter_key.split(".") if self.filter_key != "" else [] + if not filter_key or filter_key[0] == "dns": + self.values["registrar"] = self.registar_id + + # FIXME: Ugly hack to save the cert status and reinject it in _get_raw_settings ... + if not filter_key or filter_key[0] == "cert": + self.values["cert_validity"] = self.cert_status["validity"] + self.values["cert_issuer"] = self.cert_status["CA_type"] + self.values["acme_eligible"] = self.cert_status["ACME_eligible"] + self.values["summary"] = self.cert_status["summary"] + def _apply(self): if ( "default_app" in self.future_values @@ -585,83 +663,6 @@ class DomainConfigPanel(ConfigPanel): if stuff_to_regen_conf: regen_conf(names=stuff_to_regen_conf) - def _get_toml(self): - toml = super()._get_toml() - - toml["feature"]["xmpp"]["xmpp"]["default"] = ( - 1 if self.entity == _get_maindomain() else 0 - ) - - # Optimize wether or not to load the DNS section, - # e.g. we don't want to trigger the whole _get_registary_config_section - # when just getting the current value from the feature section - filter_key = self.filter_key.split(".") if self.filter_key != "" else [] - if not filter_key or filter_key[0] == "dns": - from yunohost.dns import _get_registrar_config_section - - toml["dns"]["registrar"] = _get_registrar_config_section(self.entity) - - # FIXME: Ugly hack to save the registar id/value and reinject it in _load_current_values ... - self.registar_id = toml["dns"]["registrar"]["registrar"]["value"] - del toml["dns"]["registrar"]["registrar"]["value"] - - # Cert stuff - if not filter_key or filter_key[0] == "cert": - from yunohost.certificate import certificate_status - - status = certificate_status([self.entity], full=True)["certificates"][ - self.entity - ] - - toml["cert"]["cert"]["cert_summary"]["style"] = status["style"] - - # i18n: domain_config_cert_summary_expired - # i18n: domain_config_cert_summary_selfsigned - # i18n: domain_config_cert_summary_abouttoexpire - # i18n: domain_config_cert_summary_ok - # i18n: domain_config_cert_summary_letsencrypt - toml["cert"]["cert"]["cert_summary"]["ask"] = m18n.n( - f"domain_config_cert_summary_{status['summary']}" - ) - - # FIXME: Ugly hack to save the cert status and reinject it in _load_current_values ... - self.cert_status = status - - return toml - - def get(self, key="", mode="classic"): - result = super().get(key=key, mode=mode) - - if mode == "full": - for panel, section, option in self._iterate(): - # This injects: - # i18n: domain_config_cert_renew_help - # i18n: domain_config_default_app_help - # i18n: domain_config_xmpp_help - if m18n.key_exists(self.config["i18n"] + "_" + option["id"] + "_help"): - option["help"] = m18n.n( - self.config["i18n"] + "_" + option["id"] + "_help" - ) - return self.config - - return result - - def _load_current_values(self): - # TODO add mechanism to share some settings with other domains on the same zone - super()._load_current_values() - - # FIXME: Ugly hack to save the registar id/value and reinject it in _load_current_values ... - filter_key = self.filter_key.split(".") if self.filter_key != "" else [] - if not filter_key or filter_key[0] == "dns": - self.values["registrar"] = self.registar_id - - # FIXME: Ugly hack to save the cert status and reinject it in _load_current_values ... - if not filter_key or filter_key[0] == "cert": - self.values["cert_validity"] = self.cert_status["validity"] - self.values["cert_issuer"] = self.cert_status["CA_type"] - self.values["acme_eligible"] = self.cert_status["ACME_eligible"] - self.values["summary"] = self.cert_status["summary"] - def domain_action_run(domain, action, args=None): import urllib.parse diff --git a/src/firewall.py b/src/firewall.py index 310d263c6..ccd7e6d88 100644 --- a/src/firewall.py +++ b/src/firewall.py @@ -402,7 +402,13 @@ def firewall_upnp(action="status", no_refresh=False): # Discover UPnP device(s) logger.debug("discovering UPnP devices...") - nb_dev = upnpc.discover() + try: + nb_dev = upnpc.discover() + except Exception: + logger.warning("Failed to find any UPnP device on the network") + nb_dev = -1 + enabled = False + logger.debug("found %d UPnP device(s)", int(nb_dev)) if nb_dev < 1: logger.error(m18n.n("upnp_dev_not_found")) diff --git a/src/hook.py b/src/hook.py index 7f4cc28d4..4b07d1c17 100644 --- a/src/hook.py +++ b/src/hook.py @@ -452,6 +452,8 @@ def _hook_exec_bash(path, args, chdir, env, user, return_format, loggers): logger.debug("Executing command '%s'" % command) _env = os.environ.copy() + if "YNH_CONTEXT" in _env: + del _env["YNH_CONTEXT"] _env.update(env) # Remove the 'HOME' var which is causing some inconsistencies between diff --git a/src/regenconf.py b/src/regenconf.py index 69bedb262..74bbdb17c 100644 --- a/src/regenconf.py +++ b/src/regenconf.py @@ -139,6 +139,7 @@ def regen_conf( env["YNH_MAIN_DOMAINS"] = " ".join( domain_list(exclude_subdomains=True)["domains"] ) + env["YNH_CONTEXT"] = "regenconf" pre_result = hook_callback("conf_regen", names, pre_callback=_pre_call, env=env) diff --git a/src/settings.py b/src/settings.py index 4905049d6..6690ab3fd 100644 --- a/src/settings.py +++ b/src/settings.py @@ -21,7 +21,8 @@ import subprocess from moulinette import m18n from yunohost.utils.error import YunohostError, YunohostValidationError -from yunohost.utils.config import ConfigPanel, Question +from yunohost.utils.configpanel import ConfigPanel +from yunohost.utils.form import BaseOption from moulinette.utils.log import getActionLogger from yunohost.regenconf import regen_conf from yunohost.firewall import firewall_reload @@ -81,7 +82,7 @@ def settings_set(operation_logger, key=None, value=None, args=None, args_file=No value -- New value """ - Question.operation_logger = operation_logger + BaseOption.operation_logger = operation_logger settings = SettingsConfigPanel() key = translate_legacy_settings_to_configpanel_settings(key) return settings.set(key, value, args, args_file, operation_logger=operation_logger) @@ -124,6 +125,93 @@ class SettingsConfigPanel(ConfigPanel): def __init__(self, config_path=None, save_path=None, creation=False): super().__init__("settings") + def get(self, key="", mode="classic"): + result = super().get(key=key, mode=mode) + + if mode == "full": + for panel, section, option in self._iterate(): + if m18n.key_exists(self.config["i18n"] + "_" + option["id"] + "_help"): + option["help"] = m18n.n( + self.config["i18n"] + "_" + option["id"] + "_help" + ) + return self.config + + # Dirty hack to let settings_get() to work from a python script + if isinstance(result, str) and result in ["True", "False"]: + result = bool(result == "True") + + return result + + def reset(self, key="", operation_logger=None): + self.filter_key = key + + # Read config panel toml + self._get_config_panel() + + if not self.config: + raise YunohostValidationError("config_no_panel") + + # Replace all values with default values + self.values = self._get_default_values() + + BaseOption.operation_logger = operation_logger + + if operation_logger: + operation_logger.start() + + try: + self._apply() + except YunohostError: + raise + # Script got manually interrupted ... + # N.B. : KeyboardInterrupt does not inherit from Exception + except (KeyboardInterrupt, EOFError): + error = m18n.n("operation_interrupted") + logger.error(m18n.n("config_apply_failed", error=error)) + raise + # Something wrong happened in Yunohost's code (most probably hook_exec) + except Exception: + import traceback + + error = m18n.n("unexpected_error", error="\n" + traceback.format_exc()) + logger.error(m18n.n("config_apply_failed", error=error)) + raise + + logger.success(m18n.n("global_settings_reset_success")) + operation_logger.success() + + def _get_raw_config(self): + toml = super()._get_raw_config() + + # Dynamic choice list for portal themes + THEMEDIR = "/usr/share/ssowat/portal/assets/themes/" + try: + themes = [d for d in os.listdir(THEMEDIR) if os.path.isdir(THEMEDIR + d)] + except Exception: + themes = ["unsplash", "vapor", "light", "default", "clouds"] + toml["misc"]["portal"]["portal_theme"]["choices"] = themes + + return toml + + def _get_raw_settings(self): + super()._get_raw_settings() + + # Specific logic for those settings who are "virtual" settings + # and only meant to have a custom setter mapped to tools_rootpw + self.values["root_password"] = "" + self.values["root_password_confirm"] = "" + + # Specific logic for virtual setting "passwordless_sudo" + try: + from yunohost.utils.ldap import _get_ldap_interface + + ldap = _get_ldap_interface() + self.values["passwordless_sudo"] = "!authenticate" in ldap.search( + "ou=sudo", "cn=admins", ["sudoOption"] + )[0].get("sudoOption", []) + except Exception: + self.values["passwordless_sudo"] = False + def _apply(self): root_password = self.new_values.pop("root_password", None) root_password_confirm = self.new_values.pop("root_password_confirm", None) @@ -169,93 +257,6 @@ class SettingsConfigPanel(ConfigPanel): logger.error(f"Post-change hook for setting failed : {e}") raise - def _get_toml(self): - toml = super()._get_toml() - - # Dynamic choice list for portal themes - THEMEDIR = "/usr/share/ssowat/portal/assets/themes/" - try: - themes = [d for d in os.listdir(THEMEDIR) if os.path.isdir(THEMEDIR + d)] - except Exception: - themes = ["unsplash", "vapor", "light", "default", "clouds"] - toml["misc"]["portal"]["portal_theme"]["choices"] = themes - - return toml - - def _load_current_values(self): - super()._load_current_values() - - # Specific logic for those settings who are "virtual" settings - # and only meant to have a custom setter mapped to tools_rootpw - self.values["root_password"] = "" - self.values["root_password_confirm"] = "" - - # Specific logic for virtual setting "passwordless_sudo" - try: - from yunohost.utils.ldap import _get_ldap_interface - - ldap = _get_ldap_interface() - self.values["passwordless_sudo"] = "!authenticate" in ldap.search( - "ou=sudo", "cn=admins", ["sudoOption"] - )[0].get("sudoOption", []) - except Exception: - self.values["passwordless_sudo"] = False - - def get(self, key="", mode="classic"): - result = super().get(key=key, mode=mode) - - if mode == "full": - for panel, section, option in self._iterate(): - if m18n.key_exists(self.config["i18n"] + "_" + option["id"] + "_help"): - option["help"] = m18n.n( - self.config["i18n"] + "_" + option["id"] + "_help" - ) - return self.config - - # Dirty hack to let settings_get() to work from a python script - if isinstance(result, str) and result in ["True", "False"]: - result = bool(result == "True") - - return result - - def reset(self, key="", operation_logger=None): - self.filter_key = key - - # Read config panel toml - self._get_config_panel() - - if not self.config: - raise YunohostValidationError("config_no_panel") - - # Replace all values with default values - self.values = self._get_default_values() - - Question.operation_logger = operation_logger - - if operation_logger: - operation_logger.start() - - try: - self._apply() - except YunohostError: - raise - # Script got manually interrupted ... - # N.B. : KeyboardInterrupt does not inherit from Exception - except (KeyboardInterrupt, EOFError): - error = m18n.n("operation_interrupted") - logger.error(m18n.n("config_apply_failed", error=error)) - raise - # Something wrong happened in Yunohost's code (most probably hook_exec) - except Exception: - import traceback - - error = m18n.n("unexpected_error", error="\n" + traceback.format_exc()) - logger.error(m18n.n("config_apply_failed", error=error)) - raise - - logger.success(m18n.n("global_settings_reset_success")) - operation_logger.success() - # Meant to be a dict of setting_name -> function to call post_change_hooks = {} diff --git a/src/ssh.py b/src/ssh.py index 2ae5ffe46..8526e278f 100644 --- a/src/ssh.py +++ b/src/ssh.py @@ -81,7 +81,7 @@ def user_ssh_add_key(username, key, comment): parents=True, uid=user["uid"][0], ) - chmod(os.path.join(user["homeDirectory"][0], ".ssh"), 0o600) + chmod(os.path.join(user["homeDirectory"][0], ".ssh"), 0o700) # create empty file to set good permissions write_to_file(authorized_keys_file, "") diff --git a/src/tests/conftest.py b/src/tests/conftest.py index 393c33564..abba2ee19 100644 --- a/src/tests/conftest.py +++ b/src/tests/conftest.py @@ -1,5 +1,6 @@ import os import pytest +from unittest.mock import Mock import moulinette from moulinette import m18n, Moulinette @@ -23,11 +24,15 @@ def get_test_apps_dir(): @contextmanager -def message(mocker, key, **kwargs): - mocker.spy(m18n, "n") +def message(key, **kwargs): + m = Mock(wraps=m18n.n) + old_m18n = m18n.n + m18n.n = m yield - m18n.n.assert_any_call(key, **kwargs) - + try: + m.assert_any_call(key, **kwargs) + finally: + m18n.n = old_m18n @contextmanager def raiseYunohostError(mocker, key, **kwargs): diff --git a/src/tests/test_app_catalog.py b/src/tests/test_app_catalog.py index f7363dabe..40daf5873 100644 --- a/src/tests/test_app_catalog.py +++ b/src/tests/test_app_catalog.py @@ -5,6 +5,8 @@ import requests_mock import glob import shutil +from .conftest import message + from moulinette import m18n from moulinette.utils.filesystem import read_json, write_to_json, write_to_yaml @@ -258,13 +260,12 @@ def test_apps_catalog_load_with_conflicts_between_lists(mocker): assert "bar" in app_dict.keys() -def test_apps_catalog_load_with_oudated_api_version(mocker): +def test_apps_catalog_load_with_outdated_api_version(): # Initialize ... _initialize_apps_catalog_system() # Update with requests_mock.Mocker() as m: - mocker.spy(m18n, "n") m.register_uri("GET", APPS_CATALOG_DEFAULT_URL_FULL, text=DUMMY_APP_CATALOG) _update_apps_catalog() @@ -282,10 +283,8 @@ def test_apps_catalog_load_with_oudated_api_version(mocker): with requests_mock.Mocker() as m: # Mock the server response with a dummy apps catalog m.register_uri("GET", APPS_CATALOG_DEFAULT_URL_FULL, text=DUMMY_APP_CATALOG) - - mocker.spy(m18n, "n") - app_dict = _load_apps_catalog()["apps"] - m18n.n.assert_any_call("apps_catalog_update_success") + with message("apps_catalog_update_success"): + app_dict = _load_apps_catalog()["apps"] assert "foo" in app_dict.keys() assert "bar" in app_dict.keys() diff --git a/src/tests/test_apps.py b/src/tests/test_apps.py index 830aabf61..97254594a 100644 --- a/src/tests/test_apps.py +++ b/src/tests/test_apps.py @@ -112,7 +112,7 @@ def app_expected_files(domain, app): if app.startswith("legacy_app"): yield "/var/www/%s/index.html" % app yield "/etc/yunohost/apps/%s/settings.yml" % app - if "manifestv2" in app: + if "manifestv2" in app or "my_webapp" in app: yield "/etc/yunohost/apps/%s/manifest.toml" % app else: yield "/etc/yunohost/apps/%s/manifest.json" % app @@ -330,7 +330,7 @@ def test_app_from_catalog(): app_install( "my_webapp", - args=f"domain={main_domain}&path=/site&with_sftp=0&password=superpassword&is_public=1&with_mysql=0", + args=f"domain={main_domain}&path=/site&with_sftp=0&password=superpassword&is_public=1&with_mysql=0&phpversion=none", ) app_map_ = app_map(raw=True) assert main_domain in app_map_ @@ -392,9 +392,9 @@ def test_legacy_app_install_private(secondary_domain): assert app_is_not_installed(secondary_domain, "legacy_app") -def test_legacy_app_install_unknown_domain(mocker): +def test_legacy_app_install_unknown_domain(): with pytest.raises(YunohostError): - with message(mocker, "app_argument_invalid"): + with message("app_argument_invalid"): install_legacy_app("whatever.nope", "/legacy") assert app_is_not_installed("whatever.nope", "legacy_app") @@ -421,12 +421,12 @@ def test_legacy_app_install_multiple_instances(secondary_domain): assert app_is_not_installed(secondary_domain, "legacy_app__2") -def test_legacy_app_install_path_unavailable(mocker, secondary_domain): +def test_legacy_app_install_path_unavailable(secondary_domain): # These will be removed in teardown install_legacy_app(secondary_domain, "/legacy") with pytest.raises(YunohostError): - with message(mocker, "app_location_unavailable"): + with message("app_location_unavailable"): install_legacy_app(secondary_domain, "/") assert app_is_installed(secondary_domain, "legacy_app") @@ -442,19 +442,19 @@ def test_legacy_app_install_with_nginx_down(mocker, secondary_domain): install_legacy_app(secondary_domain, "/legacy") -def test_legacy_app_failed_install(mocker, secondary_domain): +def test_legacy_app_failed_install(secondary_domain): # This will conflict with the folder that the app # attempts to create, making the install fail mkdir("/var/www/legacy_app/", 0o750) with pytest.raises(YunohostError): - with message(mocker, "app_install_script_failed"): + with message("app_install_script_failed"): install_legacy_app(secondary_domain, "/legacy") assert app_is_not_installed(secondary_domain, "legacy_app") -def test_legacy_app_failed_remove(mocker, secondary_domain): +def test_legacy_app_failed_remove(secondary_domain): install_legacy_app(secondary_domain, "/legacy") # The remove script runs with set -eu and attempt to remove this @@ -486,52 +486,52 @@ def test_full_domain_app_with_conflicts(mocker, secondary_domain): install_full_domain_app(secondary_domain) -def test_systemfuckedup_during_app_install(mocker, secondary_domain): +def test_systemfuckedup_during_app_install(secondary_domain): with pytest.raises(YunohostError): - with message(mocker, "app_install_failed"): - with message(mocker, "app_action_broke_system"): + with message("app_install_failed"): + with message("app_action_broke_system"): install_break_yo_system(secondary_domain, breakwhat="install") assert app_is_not_installed(secondary_domain, "break_yo_system") -def test_systemfuckedup_during_app_remove(mocker, secondary_domain): +def test_systemfuckedup_during_app_remove(secondary_domain): install_break_yo_system(secondary_domain, breakwhat="remove") with pytest.raises(YunohostError): - with message(mocker, "app_action_broke_system"): - with message(mocker, "app_removed"): + with message("app_action_broke_system"): + with message("app_removed"): app_remove("break_yo_system") assert app_is_not_installed(secondary_domain, "break_yo_system") -def test_systemfuckedup_during_app_install_and_remove(mocker, secondary_domain): +def test_systemfuckedup_during_app_install_and_remove(secondary_domain): with pytest.raises(YunohostError): - with message(mocker, "app_install_failed"): - with message(mocker, "app_action_broke_system"): + with message("app_install_failed"): + with message("app_action_broke_system"): install_break_yo_system(secondary_domain, breakwhat="everything") assert app_is_not_installed(secondary_domain, "break_yo_system") -def test_systemfuckedup_during_app_upgrade(mocker, secondary_domain): +def test_systemfuckedup_during_app_upgrade(secondary_domain): install_break_yo_system(secondary_domain, breakwhat="upgrade") with pytest.raises(YunohostError): - with message(mocker, "app_action_broke_system"): + with message("app_action_broke_system"): app_upgrade( "break_yo_system", file=os.path.join(get_test_apps_dir(), "break_yo_system_ynh"), ) -def test_failed_multiple_app_upgrade(mocker, secondary_domain): +def test_failed_multiple_app_upgrade(secondary_domain): install_legacy_app(secondary_domain, "/legacy") install_break_yo_system(secondary_domain, breakwhat="upgrade") with pytest.raises(YunohostError): - with message(mocker, "app_not_upgraded"): + with message("app_not_upgraded"): app_upgrade( ["break_yo_system", "legacy_app"], file={ @@ -548,37 +548,51 @@ class TestMockedAppUpgrade: This class is here to test the logical workflow of app_upgrade and thus mock nearly all side effects """ + def setup_method(self, method): self.apps_list = [] self.upgradable_apps_list = [] def _mock_app_upgrade(self, mocker): # app list - self._installed_apps = mocker.patch("yunohost.app._installed_apps", side_effect=lambda: self.apps_list) + self._installed_apps = mocker.patch( + "yunohost.app._installed_apps", side_effect=lambda: self.apps_list + ) # just check if an app is really installed - mocker.patch("yunohost.app._is_installed", side_effect=lambda app: app in self.apps_list) + mocker.patch( + "yunohost.app._is_installed", side_effect=lambda app: app in self.apps_list + ) # app_dict = - mocker.patch("yunohost.app.app_info", side_effect=lambda app, full: { - "upgradable": "yes" if app in self.upgradable_apps_list else "no", - "manifest": {"id": app}, - "version": "?", - }) + mocker.patch( + "yunohost.app.app_info", + side_effect=lambda app, full: { + "upgradable": "yes" if app in self.upgradable_apps_list else "no", + "manifest": {"id": app}, + "version": "?", + }, + ) def custom_extract_app(app): - return ({ - "version": "?", - "packaging_format": 1, - "id": app, - "notifications": {"PRE_UPGRADE": None, "POST_UPGRADE": None}, - }, "MOCKED_BY_TEST") + return ( + { + "version": "?", + "packaging_format": 1, + "id": app, + "notifications": {"PRE_UPGRADE": None, "POST_UPGRADE": None}, + }, + "MOCKED_BY_TEST", + ) # return (manifest, extracted_app_folder) mocker.patch("yunohost.app._extract_app", side_effect=custom_extract_app) # for [(name, passed, values, err), ...] in - mocker.patch("yunohost.app._check_manifest_requirements", return_value=[(None, True, None, None)]) + mocker.patch( + "yunohost.app._check_manifest_requirements", + return_value=[(None, True, None, None)], + ) # raise on failure mocker.patch("yunohost.app._assert_system_is_sane_for_app", return_value=True) @@ -593,12 +607,15 @@ class TestMockedAppUpgrade: mocker.patch("os.path.exists", side_effect=custom_os_path_exists) # manifest = - mocker.patch("yunohost.app.read_toml", return_value={ - "arguments": {"install": []} - }) + mocker.patch( + "yunohost.app.read_toml", return_value={"arguments": {"install": []}} + ) # install_failed, failure_message_with_debug_instructions = - self.hook_exec_with_script_debug_if_failure = mocker.patch("yunohost.hook.hook_exec_with_script_debug_if_failure", return_value=(False, "")) + self.hook_exec_with_script_debug_if_failure = mocker.patch( + "yunohost.hook.hook_exec_with_script_debug_if_failure", + return_value=(False, ""), + ) # settings = mocker.patch("yunohost.app._get_app_settings", return_value={}) # return nothing @@ -644,7 +661,12 @@ class TestMockedAppUpgrade: app_upgrade() self.hook_exec_with_script_debug_if_failure.assert_called_once() - assert self.hook_exec_with_script_debug_if_failure.call_args.kwargs["env"]["YNH_APP_ID"] == "some_app" + assert ( + self.hook_exec_with_script_debug_if_failure.call_args.kwargs["env"][ + "YNH_APP_ID" + ] + == "some_app" + ) def test_app_upgrade_continue_on_failure(self, mocker): self._mock_app_upgrade(mocker) @@ -682,7 +704,10 @@ class TestMockedAppUpgrade: raise Exception() return True - mocker.patch("yunohost.app._assert_system_is_sane_for_app", side_effect=_assert_system_is_sane_for_app) + mocker.patch( + "yunohost.app._assert_system_is_sane_for_app", + side_effect=_assert_system_is_sane_for_app, + ) with pytest.raises(YunohostError): app_upgrade() diff --git a/src/tests/test_appurl.py b/src/tests/test_appurl.py index 351bb4e83..8e5b14d34 100644 --- a/src/tests/test_appurl.py +++ b/src/tests/test_appurl.py @@ -202,10 +202,6 @@ def test_normalize_permission_path_with_bad_regex(): ) # Full Regex - with pytest.raises(YunohostError): - _validate_and_sanitize_permission_url( - "re:" + maindomain + "/yolo?+/", maindomain + "/path", "test_permission" - ) with pytest.raises(YunohostError): _validate_and_sanitize_permission_url( "re:" + maindomain + "/yolo[1-9]**/", diff --git a/src/tests/test_backuprestore.py b/src/tests/test_backuprestore.py index 413d44470..873deec7d 100644 --- a/src/tests/test_backuprestore.py +++ b/src/tests/test_backuprestore.py @@ -55,7 +55,7 @@ def setup_function(function): if "with_legacy_app_installed" in markers: assert not app_is_installed("legacy_app") - install_app("legacy_app_ynh", "/yolo") + install_app("legacy_app_ynh", "/yolo", "&is_public=true") assert app_is_installed("legacy_app") if "with_backup_recommended_app_installed" in markers: @@ -236,10 +236,10 @@ def add_archive_system_from_4p2(): # -def test_backup_only_ldap(mocker): +def test_backup_only_ldap(): # Create the backup name = random_ascii(8) - with message(mocker, "backup_created", name=name): + with message("backup_created", name=name): backup_create(name=name, system=["conf_ldap"], apps=None) archives = backup_list()["archives"] @@ -253,7 +253,7 @@ def test_backup_only_ldap(mocker): def test_backup_system_part_that_does_not_exists(mocker): # Create the backup - with message(mocker, "backup_hook_unknown", hook="doesnt_exist"): + with message("backup_hook_unknown", hook="doesnt_exist"): with raiseYunohostError(mocker, "backup_nothings_done"): backup_create(system=["doesnt_exist"], apps=None) @@ -263,10 +263,10 @@ def test_backup_system_part_that_does_not_exists(mocker): # -def test_backup_and_restore_all_sys(mocker): +def test_backup_and_restore_all_sys(): name = random_ascii(8) # Create the backup - with message(mocker, "backup_created", name=name): + with message("backup_created", name=name): backup_create(name=name, system=[], apps=None) archives = backup_list()["archives"] @@ -284,7 +284,7 @@ def test_backup_and_restore_all_sys(mocker): assert not os.path.exists("/etc/ssowat/conf.json") # Restore the backup - with message(mocker, "restore_complete"): + with message("restore_complete"): backup_restore(name=archives[0], force=True, system=[], apps=None) # Check ssowat conf is back @@ -297,17 +297,17 @@ def test_backup_and_restore_all_sys(mocker): @pytest.mark.with_system_archive_from_4p2 -def test_restore_system_from_Ynh4p2(monkeypatch, mocker): +def test_restore_system_from_Ynh4p2(monkeypatch): name = random_ascii(8) # Backup current system - with message(mocker, "backup_created", name=name): + with message("backup_created", name=name): backup_create(name=name, system=[], apps=None) archives = backup_list()["archives"] assert len(archives) == 2 # Restore system archive from 3.8 try: - with message(mocker, "restore_complete"): + with message("restore_complete"): backup_restore( name=backup_list()["archives"][1], system=[], apps=None, force=True ) @@ -336,7 +336,7 @@ def test_backup_script_failure_handling(monkeypatch, mocker): # with the expected error message key monkeypatch.setattr("yunohost.backup.hook_exec", custom_hook_exec) - with message(mocker, "backup_app_failed", app="backup_recommended_app"): + with message("backup_app_failed", app="backup_recommended_app"): with raiseYunohostError(mocker, "backup_nothings_done"): backup_create(system=None, apps=["backup_recommended_app"]) @@ -363,7 +363,7 @@ def test_backup_not_enough_free_space(monkeypatch, mocker): def test_backup_app_not_installed(mocker): assert not _is_installed("wordpress") - with message(mocker, "unbackup_app", app="wordpress"): + with message("unbackup_app", app="wordpress"): with raiseYunohostError(mocker, "backup_nothings_done"): backup_create(system=None, apps=["wordpress"]) @@ -375,14 +375,14 @@ def test_backup_app_with_no_backup_script(mocker): assert not os.path.exists(backup_script) with message( - mocker, "backup_with_no_backup_script_for_app", app="backup_recommended_app" + "backup_with_no_backup_script_for_app", app="backup_recommended_app" ): with raiseYunohostError(mocker, "backup_nothings_done"): backup_create(system=None, apps=["backup_recommended_app"]) @pytest.mark.with_backup_recommended_app_installed -def test_backup_app_with_no_restore_script(mocker): +def test_backup_app_with_no_restore_script(): restore_script = "/etc/yunohost/apps/backup_recommended_app/scripts/restore" os.system("rm %s" % restore_script) assert not os.path.exists(restore_script) @@ -391,16 +391,16 @@ def test_backup_app_with_no_restore_script(mocker): # user... with message( - mocker, "backup_with_no_restore_script_for_app", app="backup_recommended_app" + "backup_with_no_restore_script_for_app", app="backup_recommended_app" ): backup_create(system=None, apps=["backup_recommended_app"]) @pytest.mark.clean_opt_dir -def test_backup_with_different_output_directory(mocker): +def test_backup_with_different_output_directory(): name = random_ascii(8) # Create the backup - with message(mocker, "backup_created", name=name): + with message("backup_created", name=name): backup_create( system=["conf_ynh_settings"], apps=None, @@ -420,10 +420,10 @@ def test_backup_with_different_output_directory(mocker): @pytest.mark.clean_opt_dir -def test_backup_using_copy_method(mocker): +def test_backup_using_copy_method(): # Create the backup name = random_ascii(8) - with message(mocker, "backup_created", name=name): + with message("backup_created", name=name): backup_create( system=["conf_ynh_settings"], apps=None, @@ -442,8 +442,8 @@ def test_backup_using_copy_method(mocker): @pytest.mark.with_wordpress_archive_from_4p2 @pytest.mark.with_custom_domain("yolo.test") -def test_restore_app_wordpress_from_Ynh4p2(mocker): - with message(mocker, "restore_complete"): +def test_restore_app_wordpress_from_Ynh4p2(): + with message("restore_complete"): backup_restore( system=None, name=backup_list()["archives"][0], apps=["wordpress"] ) @@ -461,7 +461,7 @@ def test_restore_app_script_failure_handling(monkeypatch, mocker): assert not _is_installed("wordpress") - with message(mocker, "app_restore_script_failed"): + with message("app_restore_script_failed"): with raiseYunohostError(mocker, "restore_nothings_done"): backup_restore( system=None, name=backup_list()["archives"][0], apps=["wordpress"] @@ -494,7 +494,7 @@ def test_restore_app_not_in_backup(mocker): assert not _is_installed("wordpress") assert not _is_installed("yoloswag") - with message(mocker, "backup_archive_app_not_found", app="yoloswag"): + with message("backup_archive_app_not_found", app="yoloswag"): with raiseYunohostError(mocker, "restore_nothings_done"): backup_restore( system=None, name=backup_list()["archives"][0], apps=["yoloswag"] @@ -509,7 +509,7 @@ def test_restore_app_not_in_backup(mocker): def test_restore_app_already_installed(mocker): assert not _is_installed("wordpress") - with message(mocker, "restore_complete"): + with message("restore_complete"): backup_restore( system=None, name=backup_list()["archives"][0], apps=["wordpress"] ) @@ -525,22 +525,22 @@ def test_restore_app_already_installed(mocker): @pytest.mark.with_legacy_app_installed -def test_backup_and_restore_legacy_app(mocker): - _test_backup_and_restore_app(mocker, "legacy_app") +def test_backup_and_restore_legacy_app(): + _test_backup_and_restore_app("legacy_app") @pytest.mark.with_backup_recommended_app_installed -def test_backup_and_restore_recommended_app(mocker): - _test_backup_and_restore_app(mocker, "backup_recommended_app") +def test_backup_and_restore_recommended_app(): + _test_backup_and_restore_app("backup_recommended_app") @pytest.mark.with_backup_recommended_app_installed_with_ynh_restore -def test_backup_and_restore_with_ynh_restore(mocker): - _test_backup_and_restore_app(mocker, "backup_recommended_app") +def test_backup_and_restore_with_ynh_restore(): + _test_backup_and_restore_app("backup_recommended_app") @pytest.mark.with_permission_app_installed -def test_backup_and_restore_permission_app(mocker): +def test_backup_and_restore_permission_app(): res = user_permission_list(full=True)["permissions"] assert "permissions_app.main" in res assert "permissions_app.admin" in res @@ -554,7 +554,7 @@ def test_backup_and_restore_permission_app(mocker): assert res["permissions_app.admin"]["allowed"] == ["alice"] assert res["permissions_app.dev"]["allowed"] == [] - _test_backup_and_restore_app(mocker, "permissions_app") + _test_backup_and_restore_app("permissions_app") res = user_permission_list(full=True)["permissions"] assert "permissions_app.main" in res @@ -570,10 +570,10 @@ def test_backup_and_restore_permission_app(mocker): assert res["permissions_app.dev"]["allowed"] == [] -def _test_backup_and_restore_app(mocker, app): +def _test_backup_and_restore_app(app): # Create a backup of this app name = random_ascii(8) - with message(mocker, "backup_created", name=name): + with message("backup_created", name=name): backup_create(name=name, system=None, apps=[app]) archives = backup_list()["archives"] @@ -590,7 +590,7 @@ def _test_backup_and_restore_app(mocker, app): assert app + ".main" not in user_permission_list()["permissions"] # Restore the app - with message(mocker, "restore_complete"): + with message("restore_complete"): backup_restore(system=None, name=archives[0], apps=[app]) assert app_is_installed(app) @@ -631,19 +631,19 @@ def test_restore_archive_with_bad_archive(mocker): clean_tmp_backup_directory() -def test_restore_archive_with_custom_hook(mocker): +def test_restore_archive_with_custom_hook(): custom_restore_hook_folder = os.path.join(CUSTOM_HOOK_FOLDER, "restore") os.system("touch %s/99-yolo" % custom_restore_hook_folder) # Backup with custom hook system name = random_ascii(8) - with message(mocker, "backup_created", name=name): + with message("backup_created", name=name): backup_create(name=name, system=[], apps=None) archives = backup_list()["archives"] assert len(archives) == 1 # Restore system with custom hook - with message(mocker, "restore_complete"): + with message("restore_complete"): backup_restore( name=backup_list()["archives"][0], system=[], apps=None, force=True ) @@ -651,7 +651,7 @@ def test_restore_archive_with_custom_hook(mocker): os.system("rm %s/99-yolo" % custom_restore_hook_folder) -def test_backup_binds_are_readonly(mocker, monkeypatch): +def test_backup_binds_are_readonly(monkeypatch): def custom_mount_and_backup(self): self._organize_files() @@ -676,5 +676,5 @@ def test_backup_binds_are_readonly(mocker, monkeypatch): # Create the backup name = random_ascii(8) - with message(mocker, "backup_created", name=name): + with message("backup_created", name=name): backup_create(name=name, system=[]) diff --git a/src/tests/test_changeurl.py b/src/tests/test_changeurl.py index 04cb4a1a9..b8ca20355 100644 --- a/src/tests/test_changeurl.py +++ b/src/tests/test_changeurl.py @@ -39,7 +39,7 @@ def check_changeurl_app(path): assert appmap[maindomain][path]["id"] == "change_url_app" r = requests.get( - "https://127.0.0.1%s/" % path, headers={"domain": maindomain}, verify=False + "https://127.0.0.1%s/" % path, headers={"Host": maindomain}, verify=False ) assert r.status_code == 200 diff --git a/src/tests/test_ldapauth.py b/src/tests/test_ldapauth.py index 9e3ae36cc..73cb09d27 100644 --- a/src/tests/test_ldapauth.py +++ b/src/tests/test_ldapauth.py @@ -59,7 +59,7 @@ def test_authenticate_with_wrong_password(): assert expected_msg in str(exception) -def test_authenticate_server_down(mocker): +def test_authenticate_server_down(): os.system("systemctl stop slapd && sleep 5") LDAPAuth().authenticate_credentials(credentials="alice:Yunohost") diff --git a/src/tests/test_permission.py b/src/tests/test_permission.py index 10bd018d2..8620e9611 100644 --- a/src/tests/test_permission.py +++ b/src/tests/test_permission.py @@ -435,8 +435,8 @@ def test_permission_list(): # -def test_permission_create_main(mocker): - with message(mocker, "permission_created", permission="site.main"): +def test_permission_create_main(): + with message("permission_created", permission="site.main"): permission_create("site.main", allowed=["all_users"], protected=False) res = user_permission_list(full=True)["permissions"] @@ -446,8 +446,8 @@ def test_permission_create_main(mocker): assert res["site.main"]["protected"] is False -def test_permission_create_extra(mocker): - with message(mocker, "permission_created", permission="site.test"): +def test_permission_create_extra(): + with message("permission_created", permission="site.test"): permission_create("site.test") res = user_permission_list(full=True)["permissions"] @@ -466,8 +466,8 @@ def test_permission_create_with_specific_user(): assert res["site.test"]["allowed"] == ["alice"] -def test_permission_create_with_tile_management(mocker): - with message(mocker, "permission_created", permission="site.main"): +def test_permission_create_with_tile_management(): + with message("permission_created", permission="site.main"): _permission_create_with_dummy_app( "site.main", allowed=["all_users"], @@ -483,8 +483,8 @@ def test_permission_create_with_tile_management(mocker): assert res["site.main"]["show_tile"] is False -def test_permission_create_with_tile_management_with_main_default_value(mocker): - with message(mocker, "permission_created", permission="site.main"): +def test_permission_create_with_tile_management_with_main_default_value(): + with message("permission_created", permission="site.main"): _permission_create_with_dummy_app( "site.main", allowed=["all_users"], @@ -500,8 +500,8 @@ def test_permission_create_with_tile_management_with_main_default_value(mocker): assert res["site.main"]["show_tile"] is True -def test_permission_create_with_tile_management_with_not_main_default_value(mocker): - with message(mocker, "permission_created", permission="wiki.api"): +def test_permission_create_with_tile_management_with_not_main_default_value(): + with message("permission_created", permission="wiki.api"): _permission_create_with_dummy_app( "wiki.api", allowed=["all_users"], @@ -517,8 +517,8 @@ def test_permission_create_with_tile_management_with_not_main_default_value(mock assert res["wiki.api"]["show_tile"] is True -def test_permission_create_with_urls_management_without_url(mocker): - with message(mocker, "permission_created", permission="wiki.api"): +def test_permission_create_with_urls_management_without_url(): + with message("permission_created", permission="wiki.api"): _permission_create_with_dummy_app( "wiki.api", allowed=["all_users"], domain=maindomain, path="/site" ) @@ -530,8 +530,8 @@ def test_permission_create_with_urls_management_without_url(mocker): assert res["wiki.api"]["auth_header"] is True -def test_permission_create_with_urls_management_simple_domain(mocker): - with message(mocker, "permission_created", permission="site.main"): +def test_permission_create_with_urls_management_simple_domain(): + with message("permission_created", permission="site.main"): _permission_create_with_dummy_app( "site.main", allowed=["all_users"], @@ -553,8 +553,8 @@ def test_permission_create_with_urls_management_simple_domain(mocker): @pytest.mark.other_domains(number=2) -def test_permission_create_with_urls_management_multiple_domain(mocker): - with message(mocker, "permission_created", permission="site.main"): +def test_permission_create_with_urls_management_multiple_domain(): + with message("permission_created", permission="site.main"): _permission_create_with_dummy_app( "site.main", allowed=["all_users"], @@ -575,14 +575,14 @@ def test_permission_create_with_urls_management_multiple_domain(mocker): assert res["site.main"]["auth_header"] is True -def test_permission_delete(mocker): - with message(mocker, "permission_deleted", permission="wiki.main"): +def test_permission_delete(): + with message("permission_deleted", permission="wiki.main"): permission_delete("wiki.main", force=True) res = user_permission_list()["permissions"] assert "wiki.main" not in res - with message(mocker, "permission_deleted", permission="blog.api"): + with message("permission_deleted", permission="blog.api"): permission_delete("blog.api", force=False) res = user_permission_list()["permissions"] @@ -625,8 +625,8 @@ def test_permission_delete_main_without_force(mocker): # user side functions -def test_permission_add_group(mocker): - with message(mocker, "permission_updated", permission="wiki.main"): +def test_permission_add_group(): + with message("permission_updated", permission="wiki.main"): user_permission_update("wiki.main", add="alice") res = user_permission_list(full=True)["permissions"] @@ -634,8 +634,8 @@ def test_permission_add_group(mocker): assert set(res["wiki.main"]["corresponding_users"]) == {"alice", "bob"} -def test_permission_remove_group(mocker): - with message(mocker, "permission_updated", permission="blog.main"): +def test_permission_remove_group(): + with message("permission_updated", permission="blog.main"): user_permission_update("blog.main", remove="alice") res = user_permission_list(full=True)["permissions"] @@ -643,8 +643,8 @@ def test_permission_remove_group(mocker): assert res["blog.main"]["corresponding_users"] == [] -def test_permission_add_and_remove_group(mocker): - with message(mocker, "permission_updated", permission="wiki.main"): +def test_permission_add_and_remove_group(): + with message("permission_updated", permission="wiki.main"): user_permission_update("wiki.main", add="alice", remove="all_users") res = user_permission_list(full=True)["permissions"] @@ -652,9 +652,9 @@ def test_permission_add_and_remove_group(mocker): assert res["wiki.main"]["corresponding_users"] == ["alice"] -def test_permission_add_group_already_allowed(mocker): +def test_permission_add_group_already_allowed(): with message( - mocker, "permission_already_allowed", permission="blog.main", group="alice" + "permission_already_allowed", permission="blog.main", group="alice" ): user_permission_update("blog.main", add="alice") @@ -663,9 +663,9 @@ def test_permission_add_group_already_allowed(mocker): assert res["blog.main"]["corresponding_users"] == ["alice"] -def test_permission_remove_group_already_not_allowed(mocker): +def test_permission_remove_group_already_not_allowed(): with message( - mocker, "permission_already_disallowed", permission="blog.main", group="bob" + "permission_already_disallowed", permission="blog.main", group="bob" ): user_permission_update("blog.main", remove="bob") @@ -674,8 +674,8 @@ def test_permission_remove_group_already_not_allowed(mocker): assert res["blog.main"]["corresponding_users"] == ["alice"] -def test_permission_reset(mocker): - with message(mocker, "permission_updated", permission="blog.main"): +def test_permission_reset(): + with message("permission_updated", permission="blog.main"): user_permission_reset("blog.main") res = user_permission_list(full=True)["permissions"] @@ -693,42 +693,42 @@ def test_permission_reset_idempotency(): assert set(res["blog.main"]["corresponding_users"]) == {"alice", "bob"} -def test_permission_change_label(mocker): - with message(mocker, "permission_updated", permission="wiki.main"): +def test_permission_change_label(): + with message("permission_updated", permission="wiki.main"): user_permission_update("wiki.main", label="New Wiki") res = user_permission_list(full=True)["permissions"] assert res["wiki.main"]["label"] == "New Wiki" -def test_permission_change_label_with_same_value(mocker): - with message(mocker, "permission_updated", permission="wiki.main"): +def test_permission_change_label_with_same_value(): + with message("permission_updated", permission="wiki.main"): user_permission_update("wiki.main", label="Wiki") res = user_permission_list(full=True)["permissions"] assert res["wiki.main"]["label"] == "Wiki" -def test_permission_switch_show_tile(mocker): +def test_permission_switch_show_tile(): # Note that from the actionmap the value is passed as string, not as bool # Try with lowercase - with message(mocker, "permission_updated", permission="wiki.main"): + with message("permission_updated", permission="wiki.main"): user_permission_update("wiki.main", show_tile="false") res = user_permission_list(full=True)["permissions"] assert res["wiki.main"]["show_tile"] is False # Try with uppercase - with message(mocker, "permission_updated", permission="wiki.main"): + with message("permission_updated", permission="wiki.main"): user_permission_update("wiki.main", show_tile="TRUE") res = user_permission_list(full=True)["permissions"] assert res["wiki.main"]["show_tile"] is True -def test_permission_switch_show_tile_with_same_value(mocker): +def test_permission_switch_show_tile_with_same_value(): # Note that from the actionmap the value is passed as string, not as bool - with message(mocker, "permission_updated", permission="wiki.main"): + with message("permission_updated", permission="wiki.main"): user_permission_update("wiki.main", show_tile="True") res = user_permission_list(full=True)["permissions"] @@ -806,7 +806,7 @@ def test_permission_main_url_regex(): def test_permission_main_url_bad_regex(mocker): with raiseYunohostError(mocker, "invalid_regex"): - permission_url("blog.main", url="re:/[a-z]++reboy/.*") + permission_url("blog.main", url="re:/[a-z]+++reboy/.*") @pytest.mark.other_domains(number=1) @@ -837,7 +837,7 @@ def test_permission_add_additional_regex(): def test_permission_add_additional_bad_regex(mocker): with raiseYunohostError(mocker, "invalid_regex"): - permission_url("blog.main", add_url=["re:/[a-z]++reboy/.*"]) + permission_url("blog.main", add_url=["re:/[a-z]+++reboy/.*"]) def test_permission_remove_additional_url(): diff --git a/src/tests/test_questions.py b/src/tests/test_questions.py index cf7c3c6e6..e23be9925 100644 --- a/src/tests/test_questions.py +++ b/src/tests/test_questions.py @@ -1,20 +1,28 @@ +import inspect import sys import pytest import os +import tempfile +from contextlib import contextmanager from mock import patch from io import StringIO +from typing import Any, Literal, Sequence, TypedDict, Union + +from _pytest.mark.structures import ParameterSet + from moulinette import Moulinette - -from yunohost import domain, user -from yunohost.utils.config import ( +from yunohost import app, domain, user +from yunohost.utils.form import ( + OPTIONS, ask_questions_and_parse_answers, - PasswordQuestion, - DomainQuestion, - PathQuestion, - BooleanQuestion, - FileQuestion, + DisplayTextOption, + PasswordOption, + DomainOption, + WebPathOption, + BooleanOption, + FileOption, evaluate_simple_js_expression, ) from yunohost.utils.error import YunohostError, YunohostValidationError @@ -44,38 +52,1939 @@ User answers: """ -def test_question_empty(): +# ╭───────────────────────────────────────────────────────╮ +# │ ┌─╮╭─┐╶┬╴╭─╴╷ ╷╶┬╴╭╮╷╭─╮ │ +# │ ├─╯├─┤ │ │ ├─┤ │ ││││╶╮ │ +# │ ╵ ╵ ╵ ╵ ╰─╴╵ ╵╶┴╴╵╰╯╰─╯ │ +# ╰───────────────────────────────────────────────────────╯ + + +@contextmanager +def patch_isatty(isatty): + with patch.object(os, "isatty", return_value=isatty): + yield + + +@contextmanager +def patch_interface(interface: Literal["api", "cli"] = "api"): + with patch.object(Moulinette.interface, "type", interface), patch_isatty( + interface == "cli" + ): + yield + + +@contextmanager +def patch_prompt(return_value): + with patch_interface("cli"), patch.object( + Moulinette, "prompt", return_value=return_value + ) as prompt: + yield prompt + + +@pytest.fixture +def patch_no_tty(): + with patch_isatty(False): + yield + + +@pytest.fixture +def patch_with_tty(): + with patch_isatty(True): + yield + + +# ╭───────────────────────────────────────────────────────╮ +# │ ╭─╴╭─╴┌─╴╭╮╷╭─┐┌─╮╶┬╴╭─╮╭─╴ │ +# │ ╰─╮│ ├─╴│││├─┤├┬╯ │ │ │╰─╮ │ +# │ ╶─╯╰─╴╰─╴╵╰╯╵ ╵╵ ╰╶┴╴╰─╯╶─╯ │ +# ╰───────────────────────────────────────────────────────╯ + + +MinScenario = tuple[Any, Union[Literal["FAIL"], Any]] +PartialScenario = tuple[Any, Union[Literal["FAIL"], Any], dict[str, Any]] +FullScenario = tuple[Any, Union[Literal["FAIL"], Any], dict[str, Any], dict[str, Any]] + +Scenario = Union[ + MinScenario, + PartialScenario, + FullScenario, + "InnerScenario", +] + + +class InnerScenario(TypedDict, total=False): + scenarios: Sequence[Scenario] + raw_options: Sequence[dict[str, Any]] + data: Sequence[dict[str, Any]] + + +# ╭───────────────────────────────────────────────────────╮ +# │ Scenario generators/helpers │ +# ╰───────────────────────────────────────────────────────╯ + + +def get_hydrated_scenarios(raw_options, scenarios, data=[{}]): + """ + Normalize and hydrate a mixed list of scenarios to proper tuple/pytest.param flattened list values. + + Example:: + scenarios = [ + { + "raw_options": [{}, {"optional": True}], + "scenarios": [ + ("", "value", {"default": "value"}), + *unchanged("value", "other"), + ] + }, + *all_fails(-1, 0, 1, raw_options={"optional": True}), + *xfail(scenarios=[(True, "True"), (False, "False)], reason="..."), + ] + # Is exactly the same as + scenarios = [ + ("", "value", {"default": "value"}), + ("", "value", {"optional": True, "default": "value"}), + ("value", "value", {}), + ("value", "value", {"optional": True}), + ("other", "other", {}), + ("other", "other", {"optional": True}), + (-1, FAIL, {"optional": True}), + (0, FAIL, {"optional": True}), + (1, FAIL, {"optional": True}), + pytest.param(True, "True", {}, marks=pytest.mark.xfail(reason="...")), + pytest.param(False, "False", {}, marks=pytest.mark.xfail(reason="...")), + ] + """ + hydrated_scenarios = [] + for raw_option in raw_options: + for mocked_data in data: + for scenario in scenarios: + if isinstance(scenario, dict): + merged_raw_options = [ + {**raw_option, **raw_opt} + for raw_opt in scenario.get("raw_options", [{}]) + ] + hydrated_scenarios += get_hydrated_scenarios( + merged_raw_options, + scenario["scenarios"], + scenario.get("data", [mocked_data]), + ) + elif isinstance(scenario, ParameterSet): + intake, output, custom_raw_option = ( + scenario.values + if len(scenario.values) == 3 + else (*scenario.values, {}) + ) + merged_raw_option = {**raw_option, **custom_raw_option} + hydrated_scenarios.append( + pytest.param( + intake, + output, + merged_raw_option, + mocked_data, + marks=scenario.marks, + ) + ) + elif isinstance(scenario, tuple): + intake, output, custom_raw_option = ( + scenario if len(scenario) == 3 else (*scenario, {}) + ) + merged_raw_option = {**raw_option, **custom_raw_option} + hydrated_scenarios.append( + (intake, output, merged_raw_option, mocked_data) + ) + else: + raise Exception( + "Test scenario should be tuple(intake, output, raw_option), pytest.param(intake, output, raw_option) or dict(raw_options, scenarios, data)" + ) + + return hydrated_scenarios + + +def generate_test_name(intake, output, raw_option, data): + values_as_str = [] + for value in (intake, output): + if isinstance(value, str) and value != FAIL: + values_as_str.append(f"'{value}'") + elif inspect.isclass(value) and issubclass(value, Exception): + values_as_str.append(value.__name__) + else: + values_as_str.append(value) + name = f"{values_as_str[0]} -> {values_as_str[1]}" + + keys = [ + "=".join( + [ + key, + str(raw_option[key]) + if not isinstance(raw_option[key], str) + else f"'{raw_option[key]}'", + ] + ) + for key in raw_option.keys() + if key not in ("id", "type") + ] + if keys: + name += " (" + ",".join(keys) + ")" + return name + + +def pytest_generate_tests(metafunc): + """ + Pytest test factory that, for each `BaseTest` subclasses, parametrize its + methods if it requires it by checking the method's parameters. + For those and based on their `cls.scenarios`, a series of `pytest.param` are + automaticly injected as test values. + """ + if metafunc.cls and issubclass(metafunc.cls, BaseTest): + argnames = [] + argvalues = [] + ids = [] + fn_params = inspect.signature(metafunc.function).parameters + + for params in [ + ["intake", "expected_output", "raw_option", "data"], + ["intake", "expected_normalized", "raw_option", "data"], + ["intake", "expected_humanized", "raw_option", "data"], + ]: + if all(param in fn_params for param in params): + argnames += params + if params[1] == "expected_output": + # Hydrate scenarios with generic raw_option data + argvalues += get_hydrated_scenarios( + [metafunc.cls.raw_option], metafunc.cls.scenarios + ) + ids += [ + generate_test_name(*args.values) + if isinstance(args, ParameterSet) + else generate_test_name(*args) + for args in argvalues + ] + elif params[1] == "expected_normalized": + argvalues += metafunc.cls.normalized + ids += [ + f"{metafunc.cls.raw_option['type']}-normalize-{scenario[0]}" + for scenario in metafunc.cls.normalized + ] + elif params[1] == "expected_humanized": + argvalues += metafunc.cls.humanized + ids += [ + f"{metafunc.cls.raw_option['type']}-normalize-{scenario[0]}" + for scenario in metafunc.cls.humanized + ] + + metafunc.parametrize(argnames, argvalues, ids=ids) + + +# ╭───────────────────────────────────────────────────────╮ +# │ Scenario helpers │ +# ╰───────────────────────────────────────────────────────╯ + +FAIL = YunohostValidationError + + +def nones( + *nones, output, raw_option: dict[str, Any] = {}, fail_if_required: bool = True +) -> list[PartialScenario]: + """ + Returns common scenarios for ~None values. + - required and required + as default -> `FAIL` + - optional and optional + as default -> `expected_output=None` + """ + return [ + (none, FAIL if fail_if_required else output, base_raw_option | raw_option) # type: ignore + for none in nones + for base_raw_option in ({}, {"default": none}) + ] + [ + (none, output, base_raw_option | raw_option) + for none in nones + for base_raw_option in ({"optional": True}, {"optional": True, "default": none}) + ] + + +def unchanged(*args, raw_option: dict[str, Any] = {}) -> list[PartialScenario]: + """ + Returns a series of params for which output is expected to be the same as its intake + + Example:: + # expect `"value"` to output as `"value"`, etc. + unchanged("value", "yes", "none") + + """ + return [(arg, arg, raw_option.copy()) for arg in args] + + +def all_as(*args, output, raw_option: dict[str, Any] = {}) -> list[PartialScenario]: + """ + Returns a series of params for which output is expected to be the same single value + + Example:: + # expect all values to output as `True` + all_as("y", "yes", 1, True, output=True) + """ + return [(arg, output, raw_option.copy()) for arg in args] + + +def all_fails( + *args, raw_option: dict[str, Any] = {}, error=FAIL +) -> list[PartialScenario]: + """ + Returns a series of params for which output is expected to be failing with validation error + """ + return [(arg, error, raw_option.copy()) for arg in args] + + +def xpass(*, scenarios: list[Scenario], reason="unknown") -> list[Scenario]: + """ + Return a pytest param for which test should have fail but currently passes. + """ + return [ + pytest.param( + *scenario, + marks=pytest.mark.xfail( + reason=f"Currently valid but probably shouldn't. details: {reason}." + ), + ) + for scenario in scenarios + ] + + +def xfail(*, scenarios: list[Scenario], reason="unknown") -> list[Scenario]: + """ + Return a pytest param for which test should have passed but currently fails. + """ + return [ + pytest.param( + *scenario, + marks=pytest.mark.xfail( + reason=f"Currently invalid but should probably pass. details: {reason}." + ), + ) + for scenario in scenarios + ] + + +# ╭───────────────────────────────────────────────────────╮ +# │ ╶┬╴┌─╴╭─╴╶┬╴╭─╴ │ +# │ │ ├─╴╰─╮ │ ╰─╮ │ +# │ ╵ ╰─╴╶─╯ ╵ ╶─╯ │ +# ╰───────────────────────────────────────────────────────╯ + + +def _fill_or_prompt_one_option(raw_option, intake): + raw_option = raw_option.copy() + id_ = raw_option.pop("id") + options = {id_: raw_option} + answers = {id_: intake} if intake is not None else {} + + option = ask_questions_and_parse_answers(options, answers)[0] + + return (option, option.value) + + +def _test_value_is_expected_output(value, expected_output): + """ + Properly compares bools and None + """ + if isinstance(expected_output, bool) or expected_output is None: + assert value is expected_output + else: + assert value == expected_output + + +def _test_intake(raw_option, intake, expected_output): + option, value = _fill_or_prompt_one_option(raw_option, intake) + + _test_value_is_expected_output(value, expected_output) + + +def _test_intake_may_fail(raw_option, intake, expected_output): + if inspect.isclass(expected_output) and issubclass(expected_output, Exception): + with pytest.raises(expected_output): + _fill_or_prompt_one_option(raw_option, intake) + else: + _test_intake(raw_option, intake, expected_output) + + +class BaseTest: + raw_option: dict[str, Any] = {} + prefill: dict[Literal["raw_option", "prefill", "intake"], Any] + scenarios: list[Scenario] + + # fmt: off + # scenarios = [ + # *all_fails(False, True, 0, 1, -1, 1337, 13.37, [], ["one"], {}, raw_option={"optional": True}), + # *all_fails("none", "_none", "False", "True", "0", "1", "-1", "1337", "13.37", "[]", ",", "['one']", "one,two", r"{}", "value", "value\n", raw_option={"optional": True}), + # *nones(None, "", output=""), + # ] + # fmt: on + # TODO + # - pattern (also on Date for example to see if it override the default pattern) + # - example + # - visible + # - redact + # - regex + # - hooks + + @classmethod + def get_raw_option(cls, raw_option={}, **kwargs): + base_raw_option = cls.raw_option.copy() + base_raw_option.update(**raw_option) + base_raw_option.update(**kwargs) + return base_raw_option + + @classmethod + def _test_basic_attrs(self): + raw_option = self.get_raw_option(optional=True) + id_ = raw_option["id"] + option, value = _fill_or_prompt_one_option(raw_option, None) + + is_special_readonly_option = isinstance(option, DisplayTextOption) + + assert isinstance(option, OPTIONS[raw_option["type"]]) + assert option.type == raw_option["type"] + assert option.name == id_ + assert option.ask == {"en": id_} + assert option.readonly is (True if is_special_readonly_option else False) + assert option.visible is None + # assert option.bind is None + + if is_special_readonly_option: + assert value is None + + return (raw_option, option, value) + + @pytest.mark.usefixtures("patch_no_tty") + def test_basic_attrs(self): + """ + Test basic options factories and BaseOption default attributes values. + """ + # Intermediate method since pytest doesn't like tests that returns something. + # This allow a test class to call `_test_basic_attrs` then do additional checks + self._test_basic_attrs() + + def test_options_prompted_with_ask_help(self, prefill_data=None): + """ + Test that assert that moulinette prompt is called with: + - `message` with translated string and possible choices list + - help` with translated string + - `prefill` is the expected string value from a custom default + - `is_password` is true for `password`s only + - `is_multiline` is true for `text`s only + - `autocomplete` is option choices + + Ran only once with `cls.prefill` data + """ + if prefill_data is None: + prefill_data = self.prefill + + base_raw_option = prefill_data["raw_option"] + prefill = prefill_data["prefill"] + + with patch_prompt("") as prompt: + raw_option = self.get_raw_option( + raw_option=base_raw_option, + ask={"en": "Can i haz question?"}, + help={"en": "Here's help!"}, + ) + option, value = _fill_or_prompt_one_option(raw_option, None) + + expected_message = option.ask["en"] + + if option.choices: + choices = ( + option.choices + if isinstance(option.choices, list) + else option.choices.keys() + ) + expected_message += f" [{' | '.join(choices)}]" + if option.type == "boolean": + expected_message += " [yes | no]" + + prompt.assert_called_with( + message=expected_message, + is_password=option.type == "password", + confirm=False, # FIXME no confirm? + prefill=prefill, + is_multiline=option.type == "text", + autocomplete=option.choices or [], + help=option.help["en"], + ) + + def test_scenarios(self, intake, expected_output, raw_option, data): + with patch_interface("api"): + _test_intake_may_fail( + raw_option, + intake, + expected_output, + ) + + +# ╭───────────────────────────────────────────────────────╮ +# │ DISPLAY_TEXT │ +# ╰───────────────────────────────────────────────────────╯ + + +class TestDisplayText(BaseTest): + raw_option = {"type": "display_text", "id": "display_text_id"} + prefill = { + "raw_option": {}, + "prefill": " custom default", + } + # fmt: off + scenarios = [ + (None, None, {"ask": "Some text\na new line"}), + (None, None, {"ask": {"en": "Some text\na new line", "fr": "Un peu de texte\nune nouvelle ligne"}}), + ] + # fmt: on + + def test_options_prompted_with_ask_help(self, prefill_data=None): + pytest.skip(reason="no prompt for display types") + + def test_scenarios(self, intake, expected_output, raw_option, data): + _id = raw_option.pop("id") + answers = {_id: intake} if intake is not None else {} + options = None + with patch_interface("cli"): + if inspect.isclass(expected_output) and issubclass( + expected_output, Exception + ): + with pytest.raises(expected_output): + ask_questions_and_parse_answers({_id: raw_option}, answers) + else: + with patch.object(sys, "stdout", new_callable=StringIO) as stdout: + options = ask_questions_and_parse_answers( + {_id: raw_option}, answers + ) + assert stdout.getvalue() == f"{options[0].ask['en']}\n" + + +# ╭───────────────────────────────────────────────────────╮ +# │ MARKDOWN │ +# ╰───────────────────────────────────────────────────────╯ + + +class TestMarkdown(TestDisplayText): + raw_option = {"type": "markdown", "id": "markdown_id"} + # in cli this option is exactly the same as "display_text", no markdown support for now + + +# ╭───────────────────────────────────────────────────────╮ +# │ ALERT │ +# ╰───────────────────────────────────────────────────────╯ + + +class TestAlert(TestDisplayText): + raw_option = {"type": "alert", "id": "alert_id"} + prefill = { + "raw_option": {"ask": " Custom info message"}, + "prefill": " custom default", + } + # fmt: off + scenarios = [ + (None, None, {"ask": "Some text\na new line"}), + (None, None, {"ask": {"en": "Some text\na new line", "fr": "Un peu de texte\nune nouvelle ligne"}}), + *[(None, None, {"ask": "question", "style": style}) for style in ("success", "info", "warning", "danger")], + *xpass(scenarios=[ + (None, None, {"ask": "question", "style": "nimp"}), + ], reason="Should fail, wrong style"), + ] + # fmt: on + + def test_scenarios(self, intake, expected_output, raw_option, data): + style = raw_option.get("style", "info") + colors = {"danger": "31", "warning": "33", "info": "36", "success": "32"} + answers = {"alert_id": intake} if intake is not None else {} + + with patch_interface("cli"): + if inspect.isclass(expected_output) and issubclass( + expected_output, Exception + ): + with pytest.raises(expected_output): + ask_questions_and_parse_answers( + {"display_text_id": raw_option}, answers + ) + else: + with patch.object(sys, "stdout", new_callable=StringIO) as stdout: + options = ask_questions_and_parse_answers( + {"display_text_id": raw_option}, answers + ) + ask = options[0].ask["en"] + if style in colors: + color = colors[style] + title = style.title() + (":" if style != "success" else "!") + assert ( + stdout.getvalue() + == f"\x1b[{color}m\x1b[1m{title}\x1b[m {ask}\n" + ) + else: + # FIXME should fail + stdout.getvalue() == f"{ask}\n" + + +# ╭───────────────────────────────────────────────────────╮ +# │ BUTTON │ +# ╰───────────────────────────────────────────────────────╯ + + +# TODO + + +# ╭───────────────────────────────────────────────────────╮ +# │ STRING │ +# ╰───────────────────────────────────────────────────────╯ + + +class TestString(BaseTest): + raw_option = {"type": "string", "id": "string_id"} + prefill = { + "raw_option": {"default": " custom default"}, + "prefill": " custom default", + } + # fmt: off + scenarios = [ + *nones(None, "", output=""), + # basic typed values + *unchanged(False, True, 0, 1, -1, 1337, 13.37, [], ["one"], {}, raw_option={"optional": True}), # FIXME should output as str? + *unchanged("none", "_none", "False", "True", "0", "1", "-1", "1337", "13.37", "[]", ",", "['one']", "one,two", r"{}", "value", raw_option={"optional": True}), + *xpass(scenarios=[ + ([], []), + ], reason="Should fail"), + # test strip + ("value", "value"), + ("value\n", "value"), + (" \n value\n", "value"), + (" \\n value\\n", "\\n value\\n"), + (" \tvalue\t", "value"), + (r" ##value \n \tvalue\n ", r"##value \n \tvalue\n"), + *xpass(scenarios=[ + ("value\nvalue", "value\nvalue"), + (" ##value \n \tvalue\n ", "##value \n \tvalue"), + ], reason=r"should fail or without `\n`?"), + # readonly + *xfail(scenarios=[ + ("overwrite", "expected value", {"readonly": True, "default": "expected value"}), + ], reason="Should not be overwritten"), + ] + # fmt: on + + +# ╭───────────────────────────────────────────────────────╮ +# │ TEXT │ +# ╰───────────────────────────────────────────────────────╯ + + +class TestText(BaseTest): + raw_option = {"type": "text", "id": "text_id"} + prefill = { + "raw_option": {"default": "some value\nanother line "}, + "prefill": "some value\nanother line ", + } + # fmt: off + scenarios = [ + *nones(None, "", output=""), + # basic typed values + *unchanged(False, True, 0, 1, -1, 1337, 13.37, [], ["one"], {}, raw_option={"optional": True}), # FIXME should fail or output as str? + *unchanged("none", "_none", "False", "True", "0", "1", "-1", "1337", "13.37", "[]", ",", "['one']", "one,two", r"{}", "value", raw_option={"optional": True}), + *xpass(scenarios=[ + ([], []) + ], reason="Should fail"), + ("value", "value"), + ("value\n value", "value\n value"), + # test no strip + *xpass(scenarios=[ + ("value\n", "value"), + (" \n value\n", "value"), + (" \\n value\\n", "\\n value\\n"), + (" \tvalue\t", "value"), + (" ##value \n \tvalue\n ", "##value \n \tvalue"), + (r" ##value \n \tvalue\n ", r"##value \n \tvalue\n"), + ], reason="Should not be stripped"), + # readonly + *xfail(scenarios=[ + ("overwrite", "expected value", {"readonly": True, "default": "expected value"}), + ], reason="Should not be overwritten"), + ] + # fmt: on + + +# ╭───────────────────────────────────────────────────────╮ +# │ PASSWORD │ +# ╰───────────────────────────────────────────────────────╯ + + +class TestPassword(BaseTest): + raw_option = {"type": "password", "id": "password_id"} + prefill = { + "raw_option": {"default": None, "optional": True}, + "prefill": "", + } + # fmt: off + scenarios = [ + *all_fails(False, True, 0, 1, -1, 1337, 13.37, raw_option={"optional": True}, error=TypeError), # FIXME those fails with TypeError + *all_fails([], ["one"], {}, raw_option={"optional": True}, error=AttributeError), # FIXME those fails with AttributeError + *all_fails("none", "_none", "False", "True", "0", "1", "-1", "1337", "13.37", "[]", ",", "['one']", "one,two", r"{}", "value", "value\n", raw_option={"optional": True}), + *nones(None, "", output=""), + ("s3cr3t!!", FAIL, {"default": "SUPAs3cr3t!!"}), # default is forbidden + *xpass(scenarios=[ + ("s3cr3t!!", "s3cr3t!!", {"example": "SUPAs3cr3t!!"}), # example is forbidden + ], reason="Should fail; example is forbidden"), + *xpass(scenarios=[ + (" value \n moarc0mpl1cat3d\n ", "value \n moarc0mpl1cat3d"), + (" some_ value", "some_ value"), + ], reason="Should output exactly the same"), + ("s3cr3t!!", "s3cr3t!!"), + ("secret", FAIL), + *[("supersecret" + char, FAIL) for char in PasswordOption.forbidden_chars], # FIXME maybe add ` \n` to the list? + # readonly + *xpass(scenarios=[ + ("s3cr3t!!", "s3cr3t!!", {"readonly": True}), + ], reason="Should fail since readonly is forbidden"), + ] + # fmt: on + + +# ╭───────────────────────────────────────────────────────╮ +# │ COLOR │ +# ╰───────────────────────────────────────────────────────╯ + + +class TestColor(BaseTest): + raw_option = {"type": "color", "id": "color_id"} + prefill = { + "raw_option": {"default": "#ff0000"}, + "prefill": "#ff0000", + # "intake": "#ff00ff", + } + # fmt: off + scenarios = [ + *all_fails(False, True, 0, 1, -1, 1337, 13.37, [], ["one"], {}, raw_option={"optional": True}), + *all_fails("none", "_none", "False", "True", "0", "1", "-1", "1337", "13.37", "[]", ",", "['one']", "one,two", r"{}", "value", "value\n", raw_option={"optional": True}), + *nones(None, "", output=""), + # custom valid + ("#000000", "#000000"), + ("#000", "#000"), + ("#fe100", "#fe100"), + (" #fe100 ", "#fe100"), + ("#ABCDEF", "#ABCDEF"), + # custom fail + *xpass(scenarios=[ + ("#feaf", "#feaf"), + ], reason="Should fail; not a legal color value"), + ("000000", FAIL), + ("#12", FAIL), + ("#gggggg", FAIL), + ("#01010101af", FAIL), + *xfail(scenarios=[ + ("red", "#ff0000"), + ("yellow", "#ffff00"), + ], reason="Should work with pydantic"), + # readonly + *xfail(scenarios=[ + ("#ffff00", "#fe100", {"readonly": True, "default": "#fe100"}), + ], reason="Should not be overwritten"), + ] + # fmt: on + + +# ╭───────────────────────────────────────────────────────╮ +# │ NUMBER | RANGE │ +# ╰───────────────────────────────────────────────────────╯ +# Testing only number since "range" is only for webadmin (slider instead of classic intake). + + +class TestNumber(BaseTest): + raw_option = {"type": "number", "id": "number_id"} + prefill = { + "raw_option": {"default": 10}, + "prefill": "10", + } + # fmt: off + scenarios = [ + *all_fails([], ["one"], {}), + *all_fails("none", "_none", "False", "True", "[]", ",", "['one']", "one,two", r"{}", "value"), + + *nones(None, "", output=None), + *unchanged(0, 1, -1, 1337), + *xpass(scenarios=[(False, False)], reason="should fail or output as `0`"), + *xpass(scenarios=[(True, True)], reason="should fail or output as `1`"), + *all_as("0", 0, output=0), + *all_as("1", 1, output=1), + *all_as("1337", 1337, output=1337), + *xfail(scenarios=[ + ("-1", -1) + ], reason="should output as `-1` instead of failing"), + *all_fails(13.37, "13.37"), + + *unchanged(10, 5000, 10000, raw_option={"min": 10, "max": 10000}), + *all_fails(9, 10001, raw_option={"min": 10, "max": 10000}), + + *all_as(None, "", output=0, raw_option={"default": 0}), + *all_as(None, "", output=0, raw_option={"default": 0, "optional": True}), + (-10, -10, {"default": 10}), + (-10, -10, {"default": 10, "optional": True}), + # readonly + *xfail(scenarios=[ + (1337, 10000, {"readonly": True, "default": 10000}), + ], reason="Should not be overwritten"), + ] + # fmt: on + # FIXME should `step` be some kind of "multiple of"? + + +# ╭───────────────────────────────────────────────────────╮ +# │ BOOLEAN │ +# ╰───────────────────────────────────────────────────────╯ + + +class TestBoolean(BaseTest): + raw_option = {"type": "boolean", "id": "boolean_id"} + prefill = { + "raw_option": {"default": True}, + "prefill": "yes", + } + # fmt: off + truthy_values = (True, 1, "1", "True", "true", "Yes", "yes", "y", "on") + falsy_values = (False, 0, "0", "False", "false", "No", "no", "n", "off") + scenarios = [ + *all_as(None, "", output=0), + *all_fails("none", "None"), # FIXME should output as `0` (default) like other none values when required? + *all_as(None, "", output=0, raw_option={"optional": True}), # FIXME should output as `None`? + *all_as("none", "None", output=None, raw_option={"optional": True}), + # FIXME even if default is explicity `None|""`, it ends up with class_default `0` + *all_as(None, "", output=0, raw_option={"default": None}), # FIXME this should fail, default is `None` + *all_as(None, "", output=0, raw_option={"optional": True, "default": None}), # FIXME even if default is explicity None, it ends up with class_default + *all_as(None, "", output=0, raw_option={"default": ""}), # FIXME this should fail, default is `""` + *all_as(None, "", output=0, raw_option={"optional": True, "default": ""}), # FIXME even if default is explicity None, it ends up with class_default + # With "none" behavior is ok + *all_fails(None, "", raw_option={"default": "none"}), + *all_as(None, "", output=None, raw_option={"optional": True, "default": "none"}), + # Unhandled types should fail + *all_fails(1337, "1337", "string", [], "[]", ",", "one,two"), + *all_fails(1337, "1337", "string", [], "[]", ",", "one,two", {"optional": True}), + # Required + *all_as(*truthy_values, output=1), + *all_as(*falsy_values, output=0), + # Optional + *all_as(*truthy_values, output=1, raw_option={"optional": True}), + *all_as(*falsy_values, output=0, raw_option={"optional": True}), + # test values as default, as required option without intake + *[(None, 1, {"default": true for true in truthy_values})], + *[(None, 0, {"default": false for false in falsy_values})], + # custom boolean output + ("", "disallow", {"yes": "allow", "no": "disallow"}), # required -> default to False -> `"disallow"` + ("n", "disallow", {"yes": "allow", "no": "disallow"}), + ("y", "allow", {"yes": "allow", "no": "disallow"}), + ("", False, {"yes": True, "no": False}), # required -> default to False -> `False` + ("n", False, {"yes": True, "no": False}), + ("y", True, {"yes": True, "no": False}), + ("", -1, {"yes": 1, "no": -1}), # required -> default to False -> `-1` + ("n", -1, {"yes": 1, "no": -1}), + ("y", 1, {"yes": 1, "no": -1}), + { + "raw_options": [ + {"yes": "no", "no": "yes", "optional": True}, + {"yes": False, "no": True, "optional": True}, + {"yes": "0", "no": "1", "optional": True}, + ], + # "no" for "yes" and "yes" for "no" should fail + "scenarios": all_fails("", "y", "n", error=AssertionError), + }, + # readonly + *xfail(scenarios=[ + (1, 0, {"readonly": True, "default": 0}), + ], reason="Should not be overwritten"), + ] + + +# ╭───────────────────────────────────────────────────────╮ +# │ DATE │ +# ╰───────────────────────────────────────────────────────╯ + + +class TestDate(BaseTest): + raw_option = {"type": "date", "id": "date_id"} + prefill = { + "raw_option": {"default": "2024-12-29"}, + "prefill": "2024-12-29", + } + # fmt: off + scenarios = [ + *all_fails(False, True, 0, 1, -1, 1337, 13.37, [], ["one"], {}, raw_option={"optional": True}), + *all_fails("none", "_none", "False", "True", "0", "1", "-1", "1337", "13.37", "[]", ",", "['one']", "one,two", r"{}", "value", "value\n", raw_option={"optional": True}), + *nones(None, "", output=""), + # custom valid + ("2070-12-31", "2070-12-31"), + ("2024-02-29", "2024-02-29"), + *xfail(scenarios=[ + ("2025-06-15T13:45:30", "2025-06-15"), + ("2025-06-15 13:45:30", "2025-06-15") + ], reason="iso date repr should be valid and extra data striped"), + *xfail(scenarios=[ + (1749938400, "2025-06-15"), + (1749938400.0, "2025-06-15"), + ("1749938400", "2025-06-15"), + ("1749938400.0", "2025-06-15"), + ], reason="timestamp could be an accepted value"), + # custom invalid + ("29-12-2070", FAIL), + ("12-01-10", FAIL), + ("2022-02-29", FAIL), + # readonly + *xfail(scenarios=[ + ("2070-12-31", "2024-02-29", {"readonly": True, "default": "2024-02-29"}), + ], reason="Should not be overwritten"), + ] + # fmt: on + + +# ╭───────────────────────────────────────────────────────╮ +# │ TIME │ +# ╰───────────────────────────────────────────────────────╯ + + +class TestTime(BaseTest): + raw_option = {"type": "time", "id": "time_id"} + prefill = { + "raw_option": {"default": "12:26"}, + "prefill": "12:26", + } + # fmt: off + scenarios = [ + *all_fails(False, True, 0, 1, -1, 1337, 13.37, [], ["one"], {}, raw_option={"optional": True}), + *all_fails("none", "_none", "False", "True", "0", "1", "-1", "1337", "13.37", "[]", ",", "['one']", "one,two", r"{}", "value", "value\n", raw_option={"optional": True}), + *nones(None, "", output=""), + # custom valid + *unchanged("00:00", "08:00", "12:19", "20:59", "23:59"), + ("3:00", "3:00"), # FIXME should fail or output as `"03:00"`? + *xfail(scenarios=[ + ("22:35:05", "22:35"), + ("22:35:03.514", "22:35"), + ], reason="time as iso format could be valid"), + # custom invalid + ("24:00", FAIL), + ("23:1", FAIL), + ("23:005", FAIL), + # readonly + *xfail(scenarios=[ + ("00:00", "08:00", {"readonly": True, "default": "08:00"}), + ], reason="Should not be overwritten"), + ] + # fmt: on + + +# ╭───────────────────────────────────────────────────────╮ +# │ EMAIL │ +# ╰───────────────────────────────────────────────────────╯ + + +class TestEmail(BaseTest): + raw_option = {"type": "email", "id": "email_id"} + prefill = { + "raw_option": {"default": "Abc@example.tld"}, + "prefill": "Abc@example.tld", + } + # fmt: off + scenarios = [ + *all_fails(False, True, 0, 1, 1337, 13.37, [], ["one"], {}, raw_option={"optional": True}), + *all_fails("none", "_none", "False", "True", "0", "1", "1337", "13.37", "[]", ",", "['one']", "one,two", r"{}", "value", "value\n", raw_option={"optional": True}), + + *nones(None, "", output=""), + ("\n Abc@example.tld ", "Abc@example.tld"), + # readonly + *xfail(scenarios=[ + ("Abc@example.tld", "admin@ynh.local", {"readonly": True, "default": "admin@ynh.local"}), + ], reason="Should not be overwritten"), + + # Next examples are from https://github.com/JoshData/python-email-validator/blob/main/tests/test_syntax.py + # valid email values + ("Abc@example.tld", "Abc@example.tld"), + ("Abc.123@test-example.com", "Abc.123@test-example.com"), + ("user+mailbox/department=shipping@example.tld", "user+mailbox/department=shipping@example.tld"), + ("伊昭傑@郵件.商務", "伊昭傑@郵件.商務"), + ("राम@मोहन.ईन्फो", "राम@मोहन.ईन्फो"), + ("юзер@екзампл.ком", "юзер@екзампл.ком"), + ("θσερ@εχαμπλε.ψομ", "θσερ@εχαμπλε.ψομ"), + ("葉士豪@臺網中心.tw", "葉士豪@臺網中心.tw"), + ("jeff@臺網中心.tw", "jeff@臺網中心.tw"), + ("葉士豪@臺網中心.台灣", "葉士豪@臺網中心.台灣"), + ("jeff葉@臺網中心.tw", "jeff葉@臺網中心.tw"), + ("ñoñó@example.tld", "ñoñó@example.tld"), + ("甲斐黒川日本@example.tld", "甲斐黒川日本@example.tld"), + ("чебурашкаящик-с-апельсинами.рф@example.tld", "чебурашкаящик-с-апельсинами.рф@example.tld"), + ("उदाहरण.परीक्ष@domain.with.idn.tld", "उदाहरण.परीक्ष@domain.with.idn.tld"), + ("ιωάννης@εεττ.gr", "ιωάννης@εεττ.gr"), + # invalid email (Hiding because our current regex is very permissive) + # ("my@localhost", FAIL), + # ("my@.leadingdot.com", FAIL), + # ("my@.leadingfwdot.com", FAIL), + # ("my@twodots..com", FAIL), + # ("my@twofwdots...com", FAIL), + # ("my@trailingdot.com.", FAIL), + # ("my@trailingfwdot.com.", FAIL), + # ("me@-leadingdash", FAIL), + # ("me@-leadingdashfw", FAIL), + # ("me@trailingdash-", FAIL), + # ("me@trailingdashfw-", FAIL), + # ("my@baddash.-.com", FAIL), + # ("my@baddash.-a.com", FAIL), + # ("my@baddash.b-.com", FAIL), + # ("my@baddashfw.-.com", FAIL), + # ("my@baddashfw.-a.com", FAIL), + # ("my@baddashfw.b-.com", FAIL), + # ("my@example.com\n", FAIL), + # ("my@example\n.com", FAIL), + # ("me@x!", FAIL), + # ("me@x ", FAIL), + # (".leadingdot@domain.com", FAIL), + # ("twodots..here@domain.com", FAIL), + # ("trailingdot.@domain.email", FAIL), + # ("me@⒈wouldbeinvalid.com", FAIL), + ("@example.com", FAIL), + # ("\nmy@example.com", FAIL), + ("m\ny@example.com", FAIL), + ("my\n@example.com", FAIL), + # ("11111111112222222222333333333344444444445555555555666666666677777@example.com", FAIL), + # ("111111111122222222223333333333444444444455555555556666666666777777@example.com", FAIL), + # ("me@1111111111222222222233333333334444444444555555555.6666666666777777777788888888889999999999000000000.1111111111222222222233333333334444444444555555555.6666666666777777777788888888889999999999000000000.111111111122222222223333333333444444444455555555556.com", FAIL), + # ("me@1111111111222222222233333333334444444444555555555.6666666666777777777788888888889999999999000000000.1111111111222222222233333333334444444444555555555.6666666666777777777788888888889999999999000000000.1111111111222222222233333333334444444444555555555566.com", FAIL), + # ("me@中1111111111222222222233333333334444444444555555555.6666666666777777777788888888889999999999000000000.1111111111222222222233333333334444444444555555555.6666666666777777777788888888889999999999000000000.1111111111222222222233333333334444444444555555555566.com", FAIL), + # ("my.long.address@1111111111222222222233333333334444444444555555555.6666666666777777777788888888889999999999000000000.1111111111222222222233333333334444444444555555555.6666666666777777777788888888889999999999000000000.11111111112222222222333333333344444.info", FAIL), + # ("my.long.address@λ111111111222222222233333333334444444444555555555.6666666666777777777788888888889999999999000000000.1111111111222222222233333333334444444444555555555.6666666666777777777788888888889999999999000000000.11111111112222222222333333.info", FAIL), + # ("my.long.address@λ111111111222222222233333333334444444444555555555.6666666666777777777788888888889999999999000000000.1111111111222222222233333333334444444444555555555.6666666666777777777788888888889999999999000000000.1111111111222222222233333333334444.info", FAIL), + # ("my.λong.address@1111111111222222222233333333334444444444555555555.6666666666777777777788888888889999999999000000000.1111111111222222222233333333334444444444555555555.6666666666777777777788888888889999999999000000000.111111111122222222223333333333444.info", FAIL), + # ("my.λong.address@1111111111222222222233333333334444444444555555555.6666666666777777777788888888889999999999000000000.1111111111222222222233333333334444444444555555555.6666666666777777777788888888889999999999000000000.1111111111222222222233333333334444.info", FAIL), + # ("me@bad-tld-1", FAIL), + # ("me@bad.tld-2", FAIL), + # ("me@xn--0.tld", FAIL), + # ("me@yy--0.tld", FAIL), + # ("me@yy--0.tld", FAIL), + ] + # fmt: on + + +# ╭───────────────────────────────────────────────────────╮ +# │ PATH │ +# ╰───────────────────────────────────────────────────────╯ + + +class TestWebPath(BaseTest): + raw_option = {"type": "path", "id": "path_id"} + prefill = { + "raw_option": {"default": "some_path"}, + "prefill": "some_path", + } + # fmt: off + scenarios = [ + *all_fails(False, True, 0, 1, -1, 1337, 13.37, [], ["one"], {}, raw_option={"optional": True}), + + *nones(None, "", output=""), + # custom valid + ("/", "/"), + ("/one/two", "/one/two"), + *[ + (v, "/" + v) + for v in ("none", "_none", "False", "True", "0", "1", "-1", "1337", "13.37", "[]", ",", "['one']", "one,two", r"{}", "value") + ], + ("value\n", "/value"), + ("//value", "/value"), + ("///value///", "/value"), + *xpass(scenarios=[ + ("value\nvalue", "/value\nvalue"), + ("value value", "/value value"), + ("value//value", "/value//value"), + ], reason="Should fail"), + *xpass(scenarios=[ + ("./here", "/./here"), + ("../here", "/../here"), + ("/somewhere/../here", "/somewhere/../here"), + ], reason="Should fail or flattened"), + + *xpass(scenarios=[ + ("/one?withquery=ah", "/one?withquery=ah"), + ], reason="Should fail or query string removed"), + *xpass(scenarios=[ + ("https://example.com/folder", "/https://example.com/folder") + ], reason="Should fail or scheme+domain removed"), + # readonly + *xfail(scenarios=[ + ("/overwrite", "/value", {"readonly": True, "default": "/value"}), + ], reason="Should not be overwritten"), + # FIXME should path have forbidden_chars? + ] + # fmt: on + + +# ╭───────────────────────────────────────────────────────╮ +# │ URL │ +# ╰───────────────────────────────────────────────────────╯ + + +class TestUrl(BaseTest): + raw_option = {"type": "url", "id": "url_id"} + prefill = { + "raw_option": {"default": "https://domain.tld"}, + "prefill": "https://domain.tld", + } + # fmt: off + scenarios = [ + *all_fails(False, True, 0, 1, -1, 1337, 13.37, [], ["one"], {}, raw_option={"optional": True}), + *all_fails("none", "_none", "False", "True", "0", "1", "-1", "1337", "13.37", "[]", ",", "['one']", "one,two", r"{}", "value", "value\n", raw_option={"optional": True}), + + *nones(None, "", output=""), + ("http://some.org/folder/file.txt", "http://some.org/folder/file.txt"), + # readonly + *xfail(scenarios=[ + ("https://overwrite.org", "https://example.org", {"readonly": True, "default": "https://example.org"}), + ], reason="Should not be overwritten"), + # rest is taken from https://github.com/pydantic/pydantic/blob/main/tests/test_networks.py + # valid + *unchanged( + # Those are valid but not sure how they will output with pydantic + 'http://example.org', + 'http://test', + 'http://localhost', + 'https://example.org/whatever/next/', + 'https://example.org', + 'http://localhost', + 'http://localhost/', + 'http://localhost:8000', + 'http://localhost:8000/', + 'https://foo_bar.example.com/', + 'http://example.co.jp', + 'http://www.example.com/a%C2%B1b', + 'http://www.example.com/~username/', + 'http://info.example.com?fred', + 'http://info.example.com/?fred', + 'http://xn--mgbh0fb.xn--kgbechtv/', + 'http://example.com/blue/red%3Fand+green', + 'http://www.example.com/?array%5Bkey%5D=value', + 'http://xn--rsum-bpad.example.org/', + 'http://123.45.67.8/', + 'http://123.45.67.8:8329/', + 'http://[2001:db8::ff00:42]:8329', + 'http://[2001::1]:8329', + 'http://[2001:db8::1]/', + 'http://www.example.com:8000/foo', + 'http://www.cwi.nl:80/%7Eguido/Python.html', + 'https://www.python.org/путь', + 'http://андрей@example.com', + 'https://exam_ple.com/', + 'http://twitter.com/@handle/', + 'http://11.11.11.11.example.com/action', + 'http://abc.11.11.11.11.example.com/action', + 'http://example#', + 'http://example/#', + 'http://example/#fragment', + 'http://example/?#', + 'http://example.org/path#', + 'http://example.org/path#fragment', + 'http://example.org/path?query#', + 'http://example.org/path?query#fragment', + ), + # Pydantic default parsing add a final `/` + ('https://foo_bar.example.com/', 'https://foo_bar.example.com/'), + ('https://exam_ple.com/', 'https://exam_ple.com/'), + *xfail(scenarios=[ + (' https://www.example.com \n', 'https://www.example.com/'), + ('HTTP://EXAMPLE.ORG', 'http://example.org/'), + ('https://example.org', 'https://example.org/'), + ('https://example.org?a=1&b=2', 'https://example.org/?a=1&b=2'), + ('https://example.org#a=3;b=3', 'https://example.org/#a=3;b=3'), + ('https://example.xn--p1ai', 'https://example.xn--p1ai/'), + ('https://example.xn--vermgensberatung-pwb', 'https://example.xn--vermgensberatung-pwb/'), + ('https://example.xn--zfr164b', 'https://example.xn--zfr164b/'), + ], reason="pydantic default behavior would append a final `/`"), + + # invalid + *all_fails( + 'ftp://example.com/', + "$https://example.org", + "../icons/logo.gif", + "abc", + "..", + "/", + "+http://example.com/", + "ht*tp://example.com/", + ), + *xpass(scenarios=[ + ("http:///", "http:///"), + ("http://??", "http://??"), + ("https://example.org more", "https://example.org more"), + ("http://2001:db8::ff00:42:8329", "http://2001:db8::ff00:42:8329"), + ("http://[192.168.1.1]:8329", "http://[192.168.1.1]:8329"), + ("http://example.com:99999", "http://example.com:99999"), + ], reason="Should fail"), + ] + # fmt: on + + +# ╭───────────────────────────────────────────────────────╮ +# │ FILE │ +# ╰───────────────────────────────────────────────────────╯ + + +@pytest.fixture +def file_clean(): + FileOption.clean_upload_dirs() + yield + FileOption.clean_upload_dirs() + + +@contextmanager +def patch_file_cli(intake): + upload_dir = tempfile.mkdtemp(prefix="ynh_test_option_file") + _, filename = tempfile.mkstemp(dir=upload_dir) + with open(filename, "w") as f: + f.write(intake) + + yield filename + os.system(f"rm -f {filename}") + + +@contextmanager +def patch_file_api(intake): + from base64 import b64encode + + with patch_interface("api"): + yield b64encode(intake.encode()) + + +def _test_file_intake_may_fail(raw_option, intake, expected_output): + if inspect.isclass(expected_output) and issubclass(expected_output, Exception): + with pytest.raises(expected_output): + _fill_or_prompt_one_option(raw_option, intake) + + option, value = _fill_or_prompt_one_option(raw_option, intake) + + # The file is supposed to be copied somewhere else + assert value != intake + assert value.startswith("/tmp/ynh_filequestion_") + assert os.path.exists(value) + with open(value) as f: + assert f.read() == expected_output + + FileOption.clean_upload_dirs() + + assert not os.path.exists(value) + + +file_content1 = "helloworld" +file_content2 = """ +{ + "testy": true, + "test": ["one"] +} +""" + + +class TestFile(BaseTest): + raw_option = {"type": "file", "id": "file_id"} + # Prefill data is generated in `cls.test_options_prompted_with_ask_help` + # fmt: off + scenarios = [ + *nones(None, "", output=""), + *unchanged(file_content1, file_content2), + # other type checks are done in `test_wrong_intake` + ] + # fmt: on + # TODO test readonly + # TODO test accept + + @pytest.mark.usefixtures("patch_no_tty") + def test_basic_attrs(self): + raw_option, option, value = self._test_basic_attrs() + + accept = raw_option.get("accept", "") # accept default + assert option.accept == accept + + def test_options_prompted_with_ask_help(self): + with patch_file_cli(file_content1) as default_filename: + super().test_options_prompted_with_ask_help( + prefill_data={ + "raw_option": { + "default": default_filename, + }, + "prefill": default_filename, + } + ) + + @pytest.mark.usefixtures("file_clean") + def test_scenarios(self, intake, expected_output, raw_option, data): + if intake in (None, ""): + with patch_prompt(intake): + _test_intake_may_fail(raw_option, None, expected_output) + with patch_isatty(False): + _test_intake_may_fail(raw_option, intake, expected_output) + else: + with patch_file_cli(intake) as filename: + with patch_prompt(filename): + _test_file_intake_may_fail(raw_option, None, expected_output) + with patch_file_api(intake) as b64content: + with patch_isatty(False): + _test_file_intake_may_fail(raw_option, b64content, expected_output) + + @pytest.mark.parametrize( + "path", + [ + "/tmp/inexistant_file.txt", + "/tmp", + "/tmp/", + ], + ) + def test_wrong_cli_filename(self, path): + with patch_prompt(path): + with pytest.raises(YunohostValidationError): + _fill_or_prompt_one_option(self.raw_option, None) + + @pytest.mark.parametrize( + "intake", + [ + # fmt: off + False, True, 0, 1, -1, 1337, 13.37, [], ["one"], {}, + "none", "_none", "False", "True", "0", "1", "-1", "1337", "13.37", "[]", ",", "['one']", "one,two", r"{}", "value", "value\n" + # fmt: on + ], + ) + def test_wrong_intake(self, intake): + with pytest.raises(YunohostValidationError): + with patch_prompt(intake): + _fill_or_prompt_one_option(self.raw_option, None) + with patch_isatty(False): + _fill_or_prompt_one_option(self.raw_option, intake) + + +# ╭───────────────────────────────────────────────────────╮ +# │ SELECT │ +# ╰───────────────────────────────────────────────────────╯ + + +class TestSelect(BaseTest): + raw_option = {"type": "select", "id": "select_id"} + prefill = { + "raw_option": {"default": "one", "choices": ["one", "two"]}, + "prefill": "one", + } + # fmt: off + scenarios = [ + { + # ["one", "two"] + "raw_options": [ + {"choices": ["one", "two"]}, + {"choices": {"one": "verbose one", "two": "verbose two"}}, + ], + "scenarios": [ + *nones(None, "", output=""), + *unchanged("one", "two"), + ("three", FAIL), + ] + }, + # custom bash style list as choices (only strings for now) + ("one", "one", {"choices": "one,two"}), + { + # [-1, 0, 1] + "raw_options": [ + {"choices": [-1, 0, 1, 10]}, + {"choices": {-1: "verbose -one", 0: "verbose zero", 1: "verbose one", 10: "verbose ten"}}, + ], + "scenarios": [ + *nones(None, "", output=""), + *unchanged(-1, 0, 1, 10), + *xfail(scenarios=[ + ("-1", -1), + ("0", 0), + ("1", 1), + ("10", 10), + ], reason="str -> int not handled"), + *all_fails("100", 100), + ] + }, + # [True, False, None] + *unchanged(True, False, raw_option={"choices": [True, False, None]}), # FIXME we should probably forbid None in choices + (None, FAIL, {"choices": [True, False, None]}), + { + # mixed types + "raw_options": [{"choices": ["one", 2, True]}], + "scenarios": [ + *xpass(scenarios=[ + ("one", "one"), + (2, 2), + (True, True), + ], reason="mixed choices, should fail"), + *all_fails("2", "True", "y"), + ] + }, + { + "raw_options": [{"choices": ""}, {"choices": []}], + "scenarios": [ + # FIXME those should fail at option level (wrong default, dev error) + *all_fails(None, ""), + *xpass(scenarios=[ + ("", "", {"optional": True}), + (None, "", {"optional": True}), + ], reason="empty choices, should fail at option instantiation"), + ] + }, + # readonly + *xfail(scenarios=[ + ("one", "two", {"readonly": True, "choices": ["one", "two"], "default": "two"}), + ], reason="Should not be overwritten"), + ] + # fmt: on + + +# ╭───────────────────────────────────────────────────────╮ +# │ TAGS │ +# ╰───────────────────────────────────────────────────────╯ + + +class TestTags(BaseTest): + raw_option = {"type": "tags", "id": "tags_id"} + prefill = { + "raw_option": {"default": ["one", "two"]}, + "prefill": "one,two", + } + # fmt: off + scenarios = [ + *nones(None, [], "", output=""), + # FIXME `","` could be considered a none value which kinda already is since it fail when required + (",", FAIL), + *xpass(scenarios=[ + (",", ",", {"optional": True}) + ], reason="Should output as `''`? ie: None"), + { + "raw_options": [ + {}, + {"choices": ["one", "two"]} + ], + "scenarios": [ + *unchanged("one", "one,two"), + (["one"], "one"), + (["one", "two"], "one,two"), + ] + }, + ("three", FAIL, {"choices": ["one", "two"]}), + *unchanged("none", "_none", "False", "True", "0", "1", "-1", "1337", "13.37", "[]", "['one']", "one,two", r"{}", "value"), + (" value\n", "value"), + ([False, True, -1, 0, 1, 1337, 13.37, [], ["one"], {}], "False,True,-1,0,1,1337,13.37,[],['one'],{}"), + *(([t], str(t)) for t in (False, True, -1, 0, 1, 1337, 13.37, [], ["one"], {})), + # basic types (not in a list) should fail + *all_fails(True, False, -1, 0, 1, 1337, 13.37, {}), + # Mixed choices should fail + ([False, True, -1, 0, 1, 1337, 13.37, [], ["one"], {}], FAIL, {"choices": [False, True, -1, 0, 1, 1337, 13.37, [], ["one"], {}]}), + ("False,True,-1,0,1,1337,13.37,[],['one'],{}", FAIL, {"choices": [False, True, -1, 0, 1, 1337, 13.37, [], ["one"], {}]}), + *all_fails(*([t] for t in [False, True, -1, 0, 1, 1337, 13.37, [], ["one"], {}]), raw_option={"choices": [False, True, -1, 0, 1, 1337, 13.37, [], ["one"], {}]}), + *all_fails(*([str(t)] for t in [False, True, -1, 0, 1, 1337, 13.37, [], ["one"], {}]), raw_option={"choices": [False, True, -1, 0, 1, 1337, 13.37, [], ["one"], {}]}), + # readonly + *xfail(scenarios=[ + ("one", "one,two", {"readonly": True, "choices": ["one", "two"], "default": "one,two"}), + ], reason="Should not be overwritten"), + ] + # fmt: on + + +# ╭───────────────────────────────────────────────────────╮ +# │ DOMAIN │ +# ╰───────────────────────────────────────────────────────╯ + +main_domain = "ynh.local" +domains1 = ["ynh.local"] +domains2 = ["another.org", "ynh.local", "yet.another.org"] + + +@contextmanager +def patch_domains(*, domains, main_domain): + """ + Data mocking for DomainOption: + - yunohost.domain.domain_list + """ + with patch.object( + domain, + "domain_list", + return_value={"domains": domains, "main": main_domain}, + ), patch.object(domain, "_get_maindomain", return_value=main_domain): + yield + + +class TestDomain(BaseTest): + raw_option = {"type": "domain", "id": "domain_id"} + prefill = { + "raw_option": { + "default": None, + }, + "prefill": main_domain, + } + # fmt: off + scenarios = [ + # Probably not needed to test common types since those are not available as choices + # Also no scenarios with no domains since it should not be possible + { + "data": [{"main_domain": domains1[0], "domains": domains1}], + "scenarios": [ + *nones(None, "", output=domains1[0], fail_if_required=False), + (domains1[0], domains1[0], {}), + ("doesnt_exist.pouet", FAIL, {}), + ("fake.com", FAIL, {"choices": ["fake.com"]}), + # readonly + *xpass(scenarios=[ + (domains1[0], domains1[0], {"readonly": True}), + ], reason="Should fail since readonly is forbidden"), + ] + }, + { + "data": [{"main_domain": domains2[1], "domains": domains2}], + "scenarios": [ + *nones(None, "", output=domains2[1], fail_if_required=False), + (domains2[1], domains2[1], {}), + (domains2[0], domains2[0], {}), + ("doesnt_exist.pouet", FAIL, {}), + ("fake.com", FAIL, {"choices": ["fake.com"]}), + ] + }, + + ] + # fmt: on + + def test_options_prompted_with_ask_help(self, prefill_data=None): + with patch_domains(domains=[main_domain], main_domain=main_domain): + super().test_options_prompted_with_ask_help(prefill_data=prefill_data) + + def test_scenarios(self, intake, expected_output, raw_option, data): + with patch_domains(**data): + super().test_scenarios(intake, expected_output, raw_option, data) + + +# ╭───────────────────────────────────────────────────────╮ +# │ APP │ +# ╰───────────────────────────────────────────────────────╯ + +installed_webapp = { + "is_webapp": True, + "is_default": True, + "label": "My webapp", + "id": "my_webapp", + "domain_path": "/ynh-dev", +} +installed_non_webapp = { + "is_webapp": False, + "is_default": False, + "label": "My non webapp", + "id": "my_non_webapp", +} + + +@contextmanager +def patch_apps(*, apps): + """ + Data mocking for AppOption: + - yunohost.app.app_list + """ + with patch.object(app, "app_list", return_value={"apps": apps}): + yield + + +class TestApp(BaseTest): + raw_option = {"type": "app", "id": "app_id"} + # fmt: off + scenarios = [ + # Probably not needed to test common types since those are not available as choices + { + "data": [ + {"apps": []}, + {"apps": [installed_webapp]}, + {"apps": [installed_webapp, installed_non_webapp]}, + ], + "scenarios": [ + # FIXME there are currently 3 different nones (`None`, `""` and `_none`), choose one? + *nones(None, output=None), # FIXME Should return chosen none? + *nones("", output=""), # FIXME Should return chosen none? + *xpass(scenarios=[ + ("_none", "_none"), + ("_none", "_none", {"default": "_none"}), + ], reason="should fail; is required"), + *xpass(scenarios=[ + ("_none", "_none", {"optional": True}), + ("_none", "_none", {"optional": True, "default": "_none"}) + ], reason="Should output chosen none value"), + ("fake_app", FAIL), + ("fake_app", FAIL, {"choices": ["fake_app"]}), + ] + }, + { + "data": [ + {"apps": [installed_webapp]}, + {"apps": [installed_webapp, installed_non_webapp]}, + ], + "scenarios": [ + (installed_webapp["id"], installed_webapp["id"]), + (installed_webapp["id"], installed_webapp["id"], {"filter": "is_webapp"}), + (installed_webapp["id"], FAIL, {"filter": "is_webapp == false"}), + (installed_webapp["id"], FAIL, {"filter": "id != 'my_webapp'"}), + (None, None, {"filter": "id == 'fake_app'", "optional": True}), + ] + }, + { + "data": [{"apps": [installed_webapp, installed_non_webapp]}], + "scenarios": [ + (installed_non_webapp["id"], installed_non_webapp["id"]), + (installed_non_webapp["id"], FAIL, {"filter": "is_webapp"}), + # readonly + *xpass(scenarios=[ + (installed_non_webapp["id"], installed_non_webapp["id"], {"readonly": True}), + ], reason="Should fail since readonly is forbidden"), + ] + }, + ] + # fmt: on + + @pytest.mark.usefixtures("patch_no_tty") + def test_basic_attrs(self): + with patch_apps(apps=[]): + raw_option, option, value = self._test_basic_attrs() + + assert option.choices == {"_none": "---"} + assert option.filter is None + + with patch_apps(apps=[installed_webapp, installed_non_webapp]): + raw_option, option, value = self._test_basic_attrs() + + assert option.choices == { + "_none": "---", + "my_webapp": "My webapp (/ynh-dev)", + "my_non_webapp": "My non webapp (my_non_webapp)", + } + assert option.filter is None + + def test_options_prompted_with_ask_help(self, prefill_data=None): + with patch_apps(apps=[installed_webapp, installed_non_webapp]): + super().test_options_prompted_with_ask_help( + prefill_data={ + "raw_option": {"default": installed_webapp["id"]}, + "prefill": installed_webapp["id"], + } + ) + super().test_options_prompted_with_ask_help( + prefill_data={"raw_option": {"optional": True}, "prefill": ""} + ) + + def test_scenarios(self, intake, expected_output, raw_option, data): + with patch_apps(**data): + super().test_scenarios(intake, expected_output, raw_option, data) + + +# ╭───────────────────────────────────────────────────────╮ +# │ USER │ +# ╰───────────────────────────────────────────────────────╯ + +admin_username = "admin_user" +admin_user = { + "ssh_allowed": False, + "username": admin_username, + "mailbox-quota": "0", + "mail": "a@ynh.local", + "mail-aliases": [f"root@{main_domain}"], # Faking "admin" + "fullname": "john doe", + "group": [], +} +regular_username = "normal_user" +regular_user = { + "ssh_allowed": False, + "username": regular_username, + "mailbox-quota": "0", + "mail": "z@ynh.local", + "fullname": "john doe", + "group": [], +} + + +@contextmanager +def patch_users( + *, + users, + admin_username, + main_domain, +): + """ + Data mocking for UserOption: + - yunohost.user.user_list + - yunohost.user.user_info + - yunohost.domain._get_maindomain + """ + admin_info = next( + (user for user in users.values() if user["username"] == admin_username), + {"mail-aliases": []}, + ) + with patch.object(user, "user_list", return_value={"users": users}), patch.object( + user, + "user_info", + return_value=admin_info, # Faking admin user + ), patch.object(domain, "_get_maindomain", return_value=main_domain): + yield + + +class TestUser(BaseTest): + raw_option = {"type": "user", "id": "user_id"} + # fmt: off + scenarios = [ + # No tests for empty users since it should not happens + { + "data": [ + {"users": {admin_username: admin_user}, "admin_username": admin_username, "main_domain": main_domain}, + {"users": {admin_username: admin_user, regular_username: regular_user}, "admin_username": admin_username, "main_domain": main_domain}, + ], + "scenarios": [ + # FIXME User option is not really nullable, even if optional + *nones(None, "", output=admin_username, fail_if_required=False), + ("fake_user", FAIL), + ("fake_user", FAIL, {"choices": ["fake_user"]}), + ] + }, + { + "data": [ + {"users": {admin_username: admin_user, regular_username: regular_user}, "admin_username": admin_username, "main_domain": main_domain}, + ], + "scenarios": [ + *xpass(scenarios=[ + ("", regular_username, {"default": regular_username}) + ], reason="Should throw 'no default allowed'"), + # readonly + *xpass(scenarios=[ + (admin_username, admin_username, {"readonly": True}), + ], reason="Should fail since readonly is forbidden"), + ] + }, + ] + # fmt: on + + @pytest.mark.usefixtures("patch_no_tty") + def test_basic_attrs(self): + with patch_users( + users={admin_username: admin_user}, + admin_username=admin_username, + main_domain=main_domain, + ): + self._test_basic_attrs() + + def test_options_prompted_with_ask_help(self, prefill_data=None): + with patch_users( + users={admin_username: admin_user, regular_username: regular_user}, + admin_username=admin_username, + main_domain=main_domain, + ): + super().test_options_prompted_with_ask_help( + prefill_data={"raw_option": {}, "prefill": admin_username} + ) + # FIXME This should fail, not allowed to set a default + super().test_options_prompted_with_ask_help( + prefill_data={ + "raw_option": {"default": regular_username}, + "prefill": regular_username, + } + ) + + def test_scenarios(self, intake, expected_output, raw_option, data): + with patch_users(**data): + super().test_scenarios(intake, expected_output, raw_option, data) + + +# ╭───────────────────────────────────────────────────────╮ +# │ GROUP │ +# ╰───────────────────────────────────────────────────────╯ + +groups1 = ["all_users", "visitors", "admins"] +groups2 = ["all_users", "visitors", "admins", "custom_group"] + + +@contextmanager +def patch_groups(*, groups): + """ + Data mocking for GroupOption: + - yunohost.user.user_group_list + """ + with patch.object(user, "user_group_list", return_value={"groups": groups}): + yield + + +class TestGroup(BaseTest): + raw_option = {"type": "group", "id": "group_id"} + # fmt: off + scenarios = [ + # No tests for empty groups since it should not happens + { + "data": [ + {"groups": groups1}, + {"groups": groups2}, + ], + "scenarios": [ + # FIXME Group option is not really nullable, even if optional + *nones(None, "", output="all_users", fail_if_required=False), + ("admins", "admins"), + ("fake_group", FAIL), + ("fake_group", FAIL, {"choices": ["fake_group"]}), + ] + }, + { + "data": [ + {"groups": groups2}, + ], + "scenarios": [ + ("custom_group", "custom_group"), + *all_as("", None, output="visitors", raw_option={"default": "visitors"}), + *xpass(scenarios=[ + ("", "custom_group", {"default": "custom_group"}), + ], reason="Should throw 'default must be in (None, 'all_users', 'visitors', 'admins')"), + # readonly + *xpass(scenarios=[ + ("admins", "admins", {"readonly": True}), + ], reason="Should fail since readonly is forbidden"), + ] + }, + ] + # fmt: on + + def test_options_prompted_with_ask_help(self, prefill_data=None): + with patch_groups(groups=groups2): + super().test_options_prompted_with_ask_help( + prefill_data={"raw_option": {}, "prefill": "all_users"} + ) + super().test_options_prompted_with_ask_help( + prefill_data={ + "raw_option": {"default": "admins"}, + "prefill": "admins", + } + ) + # FIXME This should fail, not allowed to set a default which is not a default group + super().test_options_prompted_with_ask_help( + prefill_data={ + "raw_option": {"default": "custom_group"}, + "prefill": "custom_group", + } + ) + + def test_scenarios(self, intake, expected_output, raw_option, data): + with patch_groups(**data): + super().test_scenarios(intake, expected_output, raw_option, data) + + +# ╭───────────────────────────────────────────────────────╮ +# │ MULTIPLE │ +# ╰───────────────────────────────────────────────────────╯ + + +@pytest.fixture +def patch_entities(): + with patch_domains(domains=domains2, main_domain=main_domain), patch_apps( + apps=[installed_webapp, installed_non_webapp] + ), patch_users( + users={admin_username: admin_user, regular_username: regular_user}, + admin_username=admin_username, + main_domain=main_domain, + ), patch_groups( + groups=groups2 + ): + yield + + +def test_options_empty(): ask_questions_and_parse_answers({}, {}) == [] -def test_question_string(): - questions = { - "some_string": { - "type": "string", - } +@pytest.mark.usefixtures("patch_entities", "file_clean") +def test_options_query_string(): + raw_options = { + "string_id": {"type": "string"}, + "text_id": {"type": "text"}, + "password_id": {"type": "password"}, + "color_id": {"type": "color"}, + "number_id": {"type": "number"}, + "boolean_id": {"type": "boolean"}, + "date_id": {"type": "date"}, + "time_id": {"type": "time"}, + "email_id": {"type": "email"}, + "path_id": {"type": "path"}, + "url_id": {"type": "url"}, + "file_id": {"type": "file"}, + "select_id": {"type": "select", "choices": ["one", "two"]}, + "tags_id": {"type": "tags", "choices": ["one", "two"]}, + "domain_id": {"type": "domain"}, + "app_id": {"type": "app"}, + "user_id": {"type": "user"}, + "group_id": {"type": "group"}, } - answers = {"some_string": "some_value"} - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_string" - assert out.type == "string" - assert out.value == "some_value" - - -def test_question_string_from_query_string(): - questions = { - "some_string": { - "type": "string", - } + results = { + "string_id": "string", + "text_id": "text\ntext", + "password_id": "sUpRSCRT", + "color_id": "#ffff00", + "number_id": 10, + "boolean_id": 1, + "date_id": "2030-03-06", + "time_id": "20:55", + "email_id": "coucou@ynh.local", + "path_id": "/ynh-dev", + "url_id": "https://yunohost.org", + "file_id": file_content1, + "select_id": "one", + "tags_id": "one,two", + "domain_id": main_domain, + "app_id": installed_webapp["id"], + "user_id": regular_username, + "group_id": "admins", } - answers = "foo=bar&some_string=some_value&lorem=ipsum" - out = ask_questions_and_parse_answers(questions, answers)[0] + @contextmanager + def patch_query_string(file_repr): + yield ( + "string_id= string" + "&text_id=text\ntext" + "&password_id=sUpRSCRT" + "&color_id=#ffff00" + "&number_id=10" + "&boolean_id=y" + "&date_id=2030-03-06" + "&time_id=20:55" + "&email_id=coucou@ynh.local" + "&path_id=ynh-dev/" + "&url_id=https://yunohost.org" + f"&file_id={file_repr}" + "&select_id=one" + "&tags_id=one,two" + # FIXME We can't test with parse.qs for now, next syntax is available only with config panels + # "&tags_id=one" + # "&tags_id=two" + f"&domain_id={main_domain}" + f"&app_id={installed_webapp['id']}" + f"&user_id={regular_username}" + "&group_id=admins" + # not defined extra values are silently ignored + "&fake_id=fake_value" + ) - assert out.name == "some_string" - assert out.type == "string" - assert out.value == "some_value" + def _assert_correct_values(options, raw_options): + form = {option.name: option.value for option in options} + + for k, v in results.items(): + if k == "file_id": + assert os.path.exists(form["file_id"]) and os.path.isfile( + form["file_id"] + ) + with open(form["file_id"], "r") as f: + assert f.read() == file_content1 + else: + assert form[k] == results[k] + + assert len(options) == len(raw_options.keys()) + assert "fake_id" not in form + + with patch_interface("api"), patch_file_api(file_content1) as b64content: + with patch_query_string(b64content.decode("utf-8")) as query_string: + options = ask_questions_and_parse_answers(raw_options, query_string) + _assert_correct_values(options, raw_options) + + with patch_interface("cli"), patch_file_cli(file_content1) as filepath: + with patch_query_string(filepath) as query_string: + options = ask_questions_and_parse_answers(raw_options, query_string) + _assert_correct_values(options, raw_options) def test_question_string_default_type(): @@ -89,179 +1998,6 @@ def test_question_string_default_type(): assert out.value == "some_value" -def test_question_string_no_input(): - questions = {"some_string": {}} - answers = {} - - with pytest.raises(YunohostError), patch.object(os, "isatty", return_value=False): - ask_questions_and_parse_answers(questions, answers) - - -def test_question_string_input(): - questions = { - "some_string": { - "ask": "some question", - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value="some_value"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_string" - assert out.type == "string" - assert out.value == "some_value" - - -def test_question_string_input_no_ask(): - questions = {"some_string": {}} - answers = {} - - with patch.object(Moulinette, "prompt", return_value="some_value"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_string" - assert out.type == "string" - assert out.value == "some_value" - - -def test_question_string_no_input_optional(): - questions = {"some_string": {"optional": True}} - answers = {} - with patch.object(os, "isatty", return_value=False): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_string" - assert out.type == "string" - assert out.value == "" - - -def test_question_string_optional_with_input(): - questions = { - "some_string": { - "ask": "some question", - "optional": True, - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value="some_value"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_string" - assert out.type == "string" - assert out.value == "some_value" - - -def test_question_string_optional_with_empty_input(): - questions = { - "some_string": { - "ask": "some question", - "optional": True, - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value=""), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_string" - assert out.type == "string" - assert out.value == "" - - -def test_question_string_optional_with_input_without_ask(): - questions = { - "some_string": { - "optional": True, - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value="some_value"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_string" - assert out.type == "string" - assert out.value == "some_value" - - -def test_question_string_no_input_default(): - questions = { - "some_string": { - "ask": "some question", - "default": "some_value", - } - } - answers = {} - with patch.object(os, "isatty", return_value=False): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_string" - assert out.type == "string" - assert out.value == "some_value" - - -def test_question_string_input_test_ask(): - ask_text = "some question" - questions = { - "some_string": { - "ask": ask_text, - } - } - answers = {} - - with patch.object( - Moulinette, "prompt", return_value="some_value" - ) as prompt, patch.object(os, "isatty", return_value=True): - ask_questions_and_parse_answers(questions, answers) - prompt.assert_called_with( - message=ask_text, - is_password=False, - confirm=False, - prefill="", - is_multiline=False, - autocomplete=[], - help=None, - ) - - -def test_question_string_input_test_ask_with_default(): - ask_text = "some question" - default_text = "some example" - questions = { - "some_string": { - "ask": ask_text, - "default": default_text, - } - } - answers = {} - - with patch.object( - Moulinette, "prompt", return_value="some_value" - ) as prompt, patch.object(os, "isatty", return_value=True): - ask_questions_and_parse_answers(questions, answers) - prompt.assert_called_with( - message=ask_text, - is_password=False, - confirm=False, - prefill=default_text, - is_multiline=False, - autocomplete=[], - help=None, - ) - - @pytest.mark.skip # we should do something with this example def test_question_string_input_test_ask_with_example(): ask_text = "some question" @@ -282,26 +2018,6 @@ def test_question_string_input_test_ask_with_example(): assert example_text in prompt.call_args[1]["message"] -@pytest.mark.skip # we should do something with this help -def test_question_string_input_test_ask_with_help(): - ask_text = "some question" - help_text = "some_help" - questions = { - "some_string": { - "ask": ask_text, - "help": help_text, - } - } - answers = {} - - with patch.object( - Moulinette, "prompt", return_value="some_value" - ) as prompt, patch.object(os, "isatty", return_value=True): - ask_questions_and_parse_answers(questions, answers) - assert ask_text in prompt.call_args[1]["message"] - assert help_text in prompt.call_args[1]["message"] - - def test_question_string_with_choice(): questions = {"some_string": {"type": "string", "choices": ["fr", "en"]}} answers = {"some_string": "fr"} @@ -371,210 +2087,6 @@ def test_question_string_with_choice_default(): assert out.value == "en" -def test_question_password(): - questions = { - "some_password": { - "type": "password", - } - } - answers = {"some_password": "some_value"} - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_password" - assert out.type == "password" - assert out.value == "some_value" - - -def test_question_password_no_input(): - questions = { - "some_password": { - "type": "password", - } - } - answers = {} - - with pytest.raises(YunohostError), patch.object(os, "isatty", return_value=False): - ask_questions_and_parse_answers(questions, answers) - - -def test_question_password_input(): - questions = { - "some_password": { - "type": "password", - "ask": "some question", - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value="some_value"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_password" - assert out.type == "password" - assert out.value == "some_value" - - -def test_question_password_input_no_ask(): - questions = { - "some_password": { - "type": "password", - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value="some_value"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_password" - assert out.type == "password" - assert out.value == "some_value" - - -def test_question_password_no_input_optional(): - questions = { - "some_password": { - "type": "password", - "optional": True, - } - } - answers = {} - - with patch.object(os, "isatty", return_value=False): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_password" - assert out.type == "password" - assert out.value == "" - - questions = {"some_password": {"type": "password", "optional": True, "default": ""}} - - with patch.object(os, "isatty", return_value=False): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_password" - assert out.type == "password" - assert out.value == "" - - -def test_question_password_optional_with_input(): - questions = { - "some_password": { - "ask": "some question", - "type": "password", - "optional": True, - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value="some_value"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_password" - assert out.type == "password" - assert out.value == "some_value" - - -def test_question_password_optional_with_empty_input(): - questions = { - "some_password": { - "ask": "some question", - "type": "password", - "optional": True, - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value=""), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_password" - assert out.type == "password" - assert out.value == "" - - -def test_question_password_optional_with_input_without_ask(): - questions = { - "some_password": { - "type": "password", - "optional": True, - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value="some_value"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_password" - assert out.type == "password" - assert out.value == "some_value" - - -def test_question_password_no_input_default(): - questions = { - "some_password": { - "type": "password", - "ask": "some question", - "default": "some_value", - } - } - answers = {} - - # no default for password! - with pytest.raises(YunohostError), patch.object(os, "isatty", return_value=False): - ask_questions_and_parse_answers(questions, answers) - - -@pytest.mark.skip # this should raises -def test_question_password_no_input_example(): - questions = { - "some_password": { - "type": "password", - "ask": "some question", - "example": "some_value", - } - } - answers = {"some_password": "some_value"} - - # no example for password! - with pytest.raises(YunohostError), patch.object(os, "isatty", return_value=False): - ask_questions_and_parse_answers(questions, answers) - - -def test_question_password_input_test_ask(): - ask_text = "some question" - questions = { - "some_password": { - "type": "password", - "ask": ask_text, - } - } - answers = {} - - with patch.object( - Moulinette, "prompt", return_value="some_value" - ) as prompt, patch.object(os, "isatty", return_value=True): - ask_questions_and_parse_answers(questions, answers) - prompt.assert_called_with( - message=ask_text, - is_password=True, - confirm=False, - prefill="", - is_multiline=False, - autocomplete=[], - help=None, - ) - - @pytest.mark.skip # we should do something with this example def test_question_password_input_test_ask_with_example(): ask_text = "some question" @@ -596,284 +2108,6 @@ def test_question_password_input_test_ask_with_example(): assert example_text in prompt.call_args[1]["message"] -@pytest.mark.skip # we should do something with this help -def test_question_password_input_test_ask_with_help(): - ask_text = "some question" - help_text = "some_help" - questions = { - "some_password": { - "type": "password", - "ask": ask_text, - "help": help_text, - } - } - answers = {} - - with patch.object( - Moulinette, "prompt", return_value="some_value" - ) as prompt, patch.object(os, "isatty", return_value=True): - ask_questions_and_parse_answers(questions, answers) - assert ask_text in prompt.call_args[1]["message"] - assert help_text in prompt.call_args[1]["message"] - - -def test_question_password_bad_chars(): - questions = { - "some_password": { - "type": "password", - "ask": "some question", - "example": "some_value", - } - } - - for i in PasswordQuestion.forbidden_chars: - with pytest.raises(YunohostError), patch.object( - os, "isatty", return_value=False - ): - ask_questions_and_parse_answers(questions, {"some_password": i * 8}) - - -def test_question_password_strong_enough(): - questions = { - "some_password": { - "type": "password", - "ask": "some question", - "example": "some_value", - } - } - - with pytest.raises(YunohostError), patch.object(os, "isatty", return_value=False): - # too short - ask_questions_and_parse_answers(questions, {"some_password": "a"}) - - with pytest.raises(YunohostError), patch.object(os, "isatty", return_value=False): - ask_questions_and_parse_answers(questions, {"some_password": "password"}) - - -def test_question_password_optional_strong_enough(): - questions = { - "some_password": { - "ask": "some question", - "type": "password", - "optional": True, - } - } - - with pytest.raises(YunohostError), patch.object(os, "isatty", return_value=False): - # too short - ask_questions_and_parse_answers(questions, {"some_password": "a"}) - - with pytest.raises(YunohostError), patch.object(os, "isatty", return_value=False): - ask_questions_and_parse_answers(questions, {"some_password": "password"}) - - -def test_question_path(): - questions = { - "some_path": { - "type": "path", - } - } - answers = {"some_path": "/some_value"} - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_path" - assert out.type == "path" - assert out.value == "/some_value" - - -def test_question_path_no_input(): - questions = { - "some_path": { - "type": "path", - } - } - answers = {} - - with pytest.raises(YunohostError), patch.object(os, "isatty", return_value=False): - ask_questions_and_parse_answers(questions, answers) - - -def test_question_path_input(): - questions = { - "some_path": { - "type": "path", - "ask": "some question", - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value="/some_value"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_path" - assert out.type == "path" - assert out.value == "/some_value" - - -def test_question_path_input_no_ask(): - questions = { - "some_path": { - "type": "path", - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value="/some_value"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_path" - assert out.type == "path" - assert out.value == "/some_value" - - -def test_question_path_no_input_optional(): - questions = { - "some_path": { - "type": "path", - "optional": True, - } - } - answers = {} - with patch.object(os, "isatty", return_value=False): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_path" - assert out.type == "path" - assert out.value == "" - - -def test_question_path_optional_with_input(): - questions = { - "some_path": { - "ask": "some question", - "type": "path", - "optional": True, - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value="/some_value"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_path" - assert out.type == "path" - assert out.value == "/some_value" - - -def test_question_path_optional_with_empty_input(): - questions = { - "some_path": { - "ask": "some question", - "type": "path", - "optional": True, - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value=""), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_path" - assert out.type == "path" - assert out.value == "" - - -def test_question_path_optional_with_input_without_ask(): - questions = { - "some_path": { - "type": "path", - "optional": True, - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value="/some_value"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_path" - assert out.type == "path" - assert out.value == "/some_value" - - -def test_question_path_no_input_default(): - questions = { - "some_path": { - "ask": "some question", - "type": "path", - "default": "some_value", - } - } - answers = {} - with patch.object(os, "isatty", return_value=False): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_path" - assert out.type == "path" - assert out.value == "/some_value" - - -def test_question_path_input_test_ask(): - ask_text = "some question" - questions = { - "some_path": { - "type": "path", - "ask": ask_text, - } - } - answers = {} - - with patch.object( - Moulinette, "prompt", return_value="some_value" - ) as prompt, patch.object(os, "isatty", return_value=True): - ask_questions_and_parse_answers(questions, answers) - prompt.assert_called_with( - message=ask_text, - is_password=False, - confirm=False, - prefill="", - is_multiline=False, - autocomplete=[], - help=None, - ) - - -def test_question_path_input_test_ask_with_default(): - ask_text = "some question" - default_text = "someexample" - questions = { - "some_path": { - "type": "path", - "ask": ask_text, - "default": default_text, - } - } - answers = {} - - with patch.object( - Moulinette, "prompt", return_value="some_value" - ) as prompt, patch.object(os, "isatty", return_value=True): - ask_questions_and_parse_answers(questions, answers) - prompt.assert_called_with( - message=ask_text, - is_password=False, - confirm=False, - prefill=default_text, - is_multiline=False, - autocomplete=[], - help=None, - ) - - @pytest.mark.skip # we should do something with this example def test_question_path_input_test_ask_with_example(): ask_text = "some question" @@ -895,897 +2129,6 @@ def test_question_path_input_test_ask_with_example(): assert example_text in prompt.call_args[1]["message"] -@pytest.mark.skip # we should do something with this help -def test_question_path_input_test_ask_with_help(): - ask_text = "some question" - help_text = "some_help" - questions = { - "some_path": { - "type": "path", - "ask": ask_text, - "help": help_text, - } - } - answers = {} - - with patch.object( - Moulinette, "prompt", return_value="some_value" - ) as prompt, patch.object(os, "isatty", return_value=True): - ask_questions_and_parse_answers(questions, answers) - assert ask_text in prompt.call_args[1]["message"] - assert help_text in prompt.call_args[1]["message"] - - -def test_question_boolean(): - questions = { - "some_boolean": { - "type": "boolean", - } - } - answers = {"some_boolean": "y"} - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_boolean" - assert out.type == "boolean" - assert out.value == 1 - - -def test_question_boolean_all_yes(): - questions = { - "some_boolean": { - "type": "boolean", - } - } - - for value in ["Y", "yes", "Yes", "YES", "1", 1, True, "True", "TRUE", "true"]: - out = ask_questions_and_parse_answers(questions, {"some_boolean": value})[0] - assert out.name == "some_boolean" - assert out.type == "boolean" - assert out.value == 1 - - -def test_question_boolean_all_no(): - questions = { - "some_boolean": { - "type": "boolean", - } - } - - for value in ["n", "N", "no", "No", "No", "0", 0, False, "False", "FALSE", "false"]: - out = ask_questions_and_parse_answers(questions, {"some_boolean": value})[0] - assert out.name == "some_boolean" - assert out.type == "boolean" - assert out.value == 0 - - -# XXX apparently boolean are always False (0) by default, I'm not sure what to think about that -def test_question_boolean_no_input(): - questions = { - "some_boolean": { - "type": "boolean", - } - } - answers = {} - - with patch.object(os, "isatty", return_value=False): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.value == 0 - - -def test_question_boolean_bad_input(): - questions = { - "some_boolean": { - "type": "boolean", - } - } - answers = {"some_boolean": "stuff"} - - with pytest.raises(YunohostError), patch.object(os, "isatty", return_value=False): - ask_questions_and_parse_answers(questions, answers) - - -def test_question_boolean_input(): - questions = { - "some_boolean": { - "type": "boolean", - "ask": "some question", - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value="y"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - assert out.value == 1 - - with patch.object(Moulinette, "prompt", return_value="n"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - assert out.value == 0 - - -def test_question_boolean_input_no_ask(): - questions = { - "some_boolean": { - "type": "boolean", - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value="y"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - assert out.value == 1 - - -def test_question_boolean_no_input_optional(): - questions = { - "some_boolean": { - "type": "boolean", - "optional": True, - } - } - answers = {} - with patch.object(os, "isatty", return_value=False): - out = ask_questions_and_parse_answers(questions, answers)[0] - assert out.value == 0 - - -def test_question_boolean_optional_with_input(): - questions = { - "some_boolean": { - "ask": "some question", - "type": "boolean", - "optional": True, - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value="y"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - assert out.value == 1 - - -def test_question_boolean_optional_with_empty_input(): - questions = { - "some_boolean": { - "ask": "some question", - "type": "boolean", - "optional": True, - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value=""), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.value == 0 - - -def test_question_boolean_optional_with_input_without_ask(): - questions = { - "some_boolean": { - "type": "boolean", - "optional": True, - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value="n"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.value == 0 - - -def test_question_boolean_no_input_default(): - questions = { - "some_boolean": { - "ask": "some question", - "type": "boolean", - "default": 0, - } - } - answers = {} - - with patch.object(os, "isatty", return_value=False): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.value == 0 - - -def test_question_boolean_bad_default(): - questions = { - "some_boolean": { - "ask": "some question", - "type": "boolean", - "default": "bad default", - } - } - answers = {} - with pytest.raises(YunohostError): - ask_questions_and_parse_answers(questions, answers) - - -def test_question_boolean_input_test_ask(): - ask_text = "some question" - questions = { - "some_boolean": { - "type": "boolean", - "ask": ask_text, - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value=0) as prompt, patch.object( - os, "isatty", return_value=True - ): - ask_questions_and_parse_answers(questions, answers) - prompt.assert_called_with( - message=ask_text + " [yes | no]", - is_password=False, - confirm=False, - prefill="no", - is_multiline=False, - autocomplete=[], - help=None, - ) - - -def test_question_boolean_input_test_ask_with_default(): - ask_text = "some question" - default_text = 1 - questions = { - "some_boolean": { - "type": "boolean", - "ask": ask_text, - "default": default_text, - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value=1) as prompt, patch.object( - os, "isatty", return_value=True - ): - ask_questions_and_parse_answers(questions, answers) - prompt.assert_called_with( - message=ask_text + " [yes | no]", - is_password=False, - confirm=False, - prefill="yes", - is_multiline=False, - autocomplete=[], - help=None, - ) - - -def test_question_domain_empty(): - questions = { - "some_domain": { - "type": "domain", - } - } - main_domain = "my_main_domain.com" - answers = {} - - with patch.object( - domain, "_get_maindomain", return_value="my_main_domain.com" - ), patch.object( - domain, "domain_list", return_value={"domains": [main_domain]} - ), patch.object( - os, "isatty", return_value=False - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_domain" - assert out.type == "domain" - assert out.value == main_domain - - -def test_question_domain(): - main_domain = "my_main_domain.com" - domains = [main_domain] - questions = { - "some_domain": { - "type": "domain", - } - } - - answers = {"some_domain": main_domain} - - with patch.object( - domain, "_get_maindomain", return_value=main_domain - ), patch.object(domain, "domain_list", return_value={"domains": domains}): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_domain" - assert out.type == "domain" - assert out.value == main_domain - - -def test_question_domain_two_domains(): - main_domain = "my_main_domain.com" - other_domain = "some_other_domain.tld" - domains = [main_domain, other_domain] - - questions = { - "some_domain": { - "type": "domain", - } - } - answers = {"some_domain": other_domain} - - with patch.object( - domain, "_get_maindomain", return_value=main_domain - ), patch.object(domain, "domain_list", return_value={"domains": domains}): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_domain" - assert out.type == "domain" - assert out.value == other_domain - - answers = {"some_domain": main_domain} - - with patch.object( - domain, "_get_maindomain", return_value=main_domain - ), patch.object(domain, "domain_list", return_value={"domains": domains}): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_domain" - assert out.type == "domain" - assert out.value == main_domain - - -def test_question_domain_two_domains_wrong_answer(): - main_domain = "my_main_domain.com" - other_domain = "some_other_domain.tld" - domains = [main_domain, other_domain] - - questions = { - "some_domain": { - "type": "domain", - } - } - answers = {"some_domain": "doesnt_exist.pouet"} - - with patch.object( - domain, "_get_maindomain", return_value=main_domain - ), patch.object(domain, "domain_list", return_value={"domains": domains}): - with pytest.raises(YunohostError), patch.object( - os, "isatty", return_value=False - ): - ask_questions_and_parse_answers(questions, answers) - - -def test_question_domain_two_domains_default_no_ask(): - main_domain = "my_main_domain.com" - other_domain = "some_other_domain.tld" - domains = [main_domain, other_domain] - - questions = { - "some_domain": { - "type": "domain", - } - } - answers = {} - - with patch.object( - domain, "_get_maindomain", return_value=main_domain - ), patch.object( - domain, "domain_list", return_value={"domains": domains} - ), patch.object( - os, "isatty", return_value=False - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_domain" - assert out.type == "domain" - assert out.value == main_domain - - -def test_question_domain_two_domains_default(): - main_domain = "my_main_domain.com" - other_domain = "some_other_domain.tld" - domains = [main_domain, other_domain] - - questions = {"some_domain": {"type": "domain", "ask": "choose a domain"}} - answers = {} - - with patch.object( - domain, "_get_maindomain", return_value=main_domain - ), patch.object( - domain, "domain_list", return_value={"domains": domains} - ), patch.object( - os, "isatty", return_value=False - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_domain" - assert out.type == "domain" - assert out.value == main_domain - - -def test_question_domain_two_domains_default_input(): - main_domain = "my_main_domain.com" - other_domain = "some_other_domain.tld" - domains = [main_domain, other_domain] - - questions = {"some_domain": {"type": "domain", "ask": "choose a domain"}} - answers = {} - - with patch.object( - domain, "_get_maindomain", return_value=main_domain - ), patch.object( - domain, "domain_list", return_value={"domains": domains} - ), patch.object( - os, "isatty", return_value=True - ): - with patch.object(Moulinette, "prompt", return_value=main_domain): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_domain" - assert out.type == "domain" - assert out.value == main_domain - - with patch.object(Moulinette, "prompt", return_value=other_domain): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_domain" - assert out.type == "domain" - assert out.value == other_domain - - -def test_question_user_empty(): - users = { - "some_user": { - "ssh_allowed": False, - "username": "some_user", - "mailbox-quota": "0", - "mail": "p@ynh.local", - "fullname": "the first name the last name", - } - } - - questions = { - "some_user": { - "type": "user", - } - } - answers = {} - - with patch.object(user, "user_list", return_value={"users": users}): - with pytest.raises(YunohostError), patch.object( - os, "isatty", return_value=False - ): - ask_questions_and_parse_answers(questions, answers) - - -def test_question_user(): - username = "some_user" - users = { - username: { - "ssh_allowed": False, - "username": "some_user", - "mailbox-quota": "0", - "mail": "p@ynh.local", - "fullname": "the first name the last name", - } - } - - questions = { - "some_user": { - "type": "user", - } - } - answers = {"some_user": username} - - with patch.object(user, "user_list", return_value={"users": users}), patch.object( - user, "user_info", return_value={} - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_user" - assert out.type == "user" - assert out.value == username - - -def test_question_user_two_users(): - username = "some_user" - other_user = "some_other_user" - users = { - username: { - "ssh_allowed": False, - "username": "some_user", - "mailbox-quota": "0", - "mail": "p@ynh.local", - "fullname": "the first name the last name", - }, - other_user: { - "ssh_allowed": False, - "username": "some_user", - "mailbox-quota": "0", - "mail": "z@ynh.local", - "fullname": "john doe", - }, - } - - questions = { - "some_user": { - "type": "user", - } - } - answers = {"some_user": other_user} - - with patch.object(user, "user_list", return_value={"users": users}), patch.object( - user, "user_info", return_value={} - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_user" - assert out.type == "user" - assert out.value == other_user - - answers = {"some_user": username} - - with patch.object(user, "user_list", return_value={"users": users}), patch.object( - user, "user_info", return_value={} - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_user" - assert out.type == "user" - assert out.value == username - - -def test_question_user_two_users_wrong_answer(): - username = "my_username.com" - other_user = "some_other_user" - users = { - username: { - "ssh_allowed": False, - "username": "some_user", - "mailbox-quota": "0", - "mail": "p@ynh.local", - "fullname": "the first name the last name", - }, - other_user: { - "ssh_allowed": False, - "username": "some_user", - "mailbox-quota": "0", - "mail": "z@ynh.local", - "fullname": "john doe", - }, - } - - questions = { - "some_user": { - "type": "user", - } - } - answers = {"some_user": "doesnt_exist.pouet"} - - with patch.object(user, "user_list", return_value={"users": users}): - with pytest.raises(YunohostError), patch.object( - os, "isatty", return_value=False - ): - ask_questions_and_parse_answers(questions, answers) - - -def test_question_user_two_users_no_default(): - username = "my_username.com" - other_user = "some_other_user.tld" - users = { - username: { - "ssh_allowed": False, - "username": "some_user", - "mailbox-quota": "0", - "mail": "p@ynh.local", - "fullname": "the first name the last name", - }, - other_user: { - "ssh_allowed": False, - "username": "some_user", - "mailbox-quota": "0", - "mail": "z@ynh.local", - "fullname": "john doe", - }, - } - - questions = {"some_user": {"type": "user", "ask": "choose a user"}} - answers = {} - - with patch.object(user, "user_list", return_value={"users": users}): - with pytest.raises(YunohostError), patch.object( - os, "isatty", return_value=False - ): - ask_questions_and_parse_answers(questions, answers) - - -def test_question_user_two_users_default_input(): - username = "my_username.com" - other_user = "some_other_user.tld" - users = { - username: { - "ssh_allowed": False, - "username": "some_user", - "mailbox-quota": "0", - "mail": "p@ynh.local", - "fullname": "the first name the last name", - }, - other_user: { - "ssh_allowed": False, - "username": "some_user", - "mailbox-quota": "0", - "mail": "z@ynh.local", - "fullname": "john doe", - }, - } - - questions = {"some_user": {"type": "user", "ask": "choose a user"}} - answers = {} - - with patch.object(user, "user_list", return_value={"users": users}), patch.object( - os, "isatty", return_value=True - ): - with patch.object(user, "user_info", return_value={}): - with patch.object(Moulinette, "prompt", return_value=username): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_user" - assert out.type == "user" - assert out.value == username - - with patch.object(Moulinette, "prompt", return_value=other_user): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_user" - assert out.type == "user" - assert out.value == other_user - - -def test_question_number(): - questions = { - "some_number": { - "type": "number", - } - } - answers = {"some_number": 1337} - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_number" - assert out.type == "number" - assert out.value == 1337 - - -def test_question_number_no_input(): - questions = { - "some_number": { - "type": "number", - } - } - answers = {} - - with pytest.raises(YunohostError), patch.object(os, "isatty", return_value=False): - ask_questions_and_parse_answers(questions, answers) - - -def test_question_number_bad_input(): - questions = { - "some_number": { - "type": "number", - } - } - answers = {"some_number": "stuff"} - - with pytest.raises(YunohostError), patch.object(os, "isatty", return_value=False): - ask_questions_and_parse_answers(questions, answers) - - answers = {"some_number": 1.5} - with pytest.raises(YunohostError), patch.object(os, "isatty", return_value=False): - ask_questions_and_parse_answers(questions, answers) - - -def test_question_number_input(): - questions = { - "some_number": { - "type": "number", - "ask": "some question", - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value="1337"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_number" - assert out.type == "number" - assert out.value == 1337 - - with patch.object(Moulinette, "prompt", return_value=1337), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_number" - assert out.type == "number" - assert out.value == 1337 - - with patch.object(Moulinette, "prompt", return_value="0"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_number" - assert out.type == "number" - assert out.value == 0 - - -def test_question_number_input_no_ask(): - questions = { - "some_number": { - "type": "number", - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value="1337"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_number" - assert out.type == "number" - assert out.value == 1337 - - -def test_question_number_no_input_optional(): - questions = { - "some_number": { - "type": "number", - "optional": True, - } - } - answers = {} - with patch.object(os, "isatty", return_value=False): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_number" - assert out.type == "number" - assert out.value is None - - -def test_question_number_optional_with_input(): - questions = { - "some_number": { - "ask": "some question", - "type": "number", - "optional": True, - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value="1337"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_number" - assert out.type == "number" - assert out.value == 1337 - - -def test_question_number_optional_with_input_without_ask(): - questions = { - "some_number": { - "type": "number", - "optional": True, - } - } - answers = {} - - with patch.object(Moulinette, "prompt", return_value="0"), patch.object( - os, "isatty", return_value=True - ): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_number" - assert out.type == "number" - assert out.value == 0 - - -def test_question_number_no_input_default(): - questions = { - "some_number": { - "ask": "some question", - "type": "number", - "default": 1337, - } - } - answers = {} - with patch.object(os, "isatty", return_value=False): - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_number" - assert out.type == "number" - assert out.value == 1337 - - -def test_question_number_bad_default(): - questions = { - "some_number": { - "ask": "some question", - "type": "number", - "default": "bad default", - } - } - answers = {} - with pytest.raises(YunohostError), patch.object(os, "isatty", return_value=False): - ask_questions_and_parse_answers(questions, answers) - - -def test_question_number_input_test_ask(): - ask_text = "some question" - questions = { - "some_number": { - "type": "number", - "ask": ask_text, - } - } - answers = {} - - with patch.object( - Moulinette, "prompt", return_value="1111" - ) as prompt, patch.object(os, "isatty", return_value=True): - ask_questions_and_parse_answers(questions, answers) - prompt.assert_called_with( - message=ask_text, - is_password=False, - confirm=False, - prefill="", - is_multiline=False, - autocomplete=[], - help=None, - ) - - -def test_question_number_input_test_ask_with_default(): - ask_text = "some question" - default_value = 1337 - questions = { - "some_number": { - "type": "number", - "ask": ask_text, - "default": default_value, - } - } - answers = {} - - with patch.object( - Moulinette, "prompt", return_value="1111" - ) as prompt, patch.object(os, "isatty", return_value=True): - ask_questions_and_parse_answers(questions, answers) - prompt.assert_called_with( - message=ask_text, - is_password=False, - confirm=False, - prefill=str(default_value), - is_multiline=False, - autocomplete=[], - help=None, - ) - - @pytest.mark.skip # we should do something with this example def test_question_number_input_test_ask_with_example(): ask_text = "some question" @@ -1807,183 +2150,89 @@ def test_question_number_input_test_ask_with_example(): assert example_value in prompt.call_args[1]["message"] -@pytest.mark.skip # we should do something with this help -def test_question_number_input_test_ask_with_help(): - ask_text = "some question" - help_value = 1337 - questions = { - "some_number": { - "type": "number", - "ask": ask_text, - "help": help_value, - } - } - answers = {} - - with patch.object( - Moulinette, "prompt", return_value="1111" - ) as prompt, patch.object(os, "isatty", return_value=True): - ask_questions_and_parse_answers(questions, answers) - assert ask_text in prompt.call_args[1]["message"] - assert help_value in prompt.call_args[1]["message"] - - -def test_question_display_text(): - questions = {"some_app": {"type": "display_text", "ask": "foobar"}} - answers = {} - - with patch.object(sys, "stdout", new_callable=StringIO) as stdout, patch.object( - os, "isatty", return_value=True - ): - ask_questions_and_parse_answers(questions, answers) - assert "foobar" in stdout.getvalue() - - -def test_question_file_from_cli(): - FileQuestion.clean_upload_dirs() - - filename = "/tmp/ynh_test_question_file" - os.system(f"rm -f {filename}") - os.system(f"echo helloworld > {filename}") - - questions = { - "some_file": { - "type": "file", - } - } - answers = {"some_file": filename} - - out = ask_questions_and_parse_answers(questions, answers)[0] - - assert out.name == "some_file" - assert out.type == "file" - - # The file is supposed to be copied somewhere else - assert out.value != filename - assert out.value.startswith("/tmp/") - assert os.path.exists(out.value) - assert "helloworld" in open(out.value).read().strip() - - FileQuestion.clean_upload_dirs() - - assert not os.path.exists(out.value) - - -def test_question_file_from_api(): - FileQuestion.clean_upload_dirs() - - from base64 import b64encode - - b64content = b64encode(b"helloworld") - questions = { - "some_file": { - "type": "file", - } - } - answers = {"some_file": b64content} - - interface_type_bkp = Moulinette.interface.type - try: - Moulinette.interface.type = "api" - out = ask_questions_and_parse_answers(questions, answers)[0] - finally: - Moulinette.interface.type = interface_type_bkp - - assert out.name == "some_file" - assert out.type == "file" - - assert out.value.startswith("/tmp/") - assert os.path.exists(out.value) - assert "helloworld" in open(out.value).read().strip() - - FileQuestion.clean_upload_dirs() - - assert not os.path.exists(out.value) - - def test_normalize_boolean_nominal(): - assert BooleanQuestion.normalize("yes") == 1 - assert BooleanQuestion.normalize("Yes") == 1 - assert BooleanQuestion.normalize(" yes ") == 1 - assert BooleanQuestion.normalize("y") == 1 - assert BooleanQuestion.normalize("true") == 1 - assert BooleanQuestion.normalize("True") == 1 - assert BooleanQuestion.normalize("on") == 1 - assert BooleanQuestion.normalize("1") == 1 - assert BooleanQuestion.normalize(1) == 1 + assert BooleanOption.normalize("yes") == 1 + assert BooleanOption.normalize("Yes") == 1 + assert BooleanOption.normalize(" yes ") == 1 + assert BooleanOption.normalize("y") == 1 + assert BooleanOption.normalize("true") == 1 + assert BooleanOption.normalize("True") == 1 + assert BooleanOption.normalize("on") == 1 + assert BooleanOption.normalize("1") == 1 + assert BooleanOption.normalize(1) == 1 - assert BooleanQuestion.normalize("no") == 0 - assert BooleanQuestion.normalize("No") == 0 - assert BooleanQuestion.normalize(" no ") == 0 - assert BooleanQuestion.normalize("n") == 0 - assert BooleanQuestion.normalize("false") == 0 - assert BooleanQuestion.normalize("False") == 0 - assert BooleanQuestion.normalize("off") == 0 - assert BooleanQuestion.normalize("0") == 0 - assert BooleanQuestion.normalize(0) == 0 + assert BooleanOption.normalize("no") == 0 + assert BooleanOption.normalize("No") == 0 + assert BooleanOption.normalize(" no ") == 0 + assert BooleanOption.normalize("n") == 0 + assert BooleanOption.normalize("false") == 0 + assert BooleanOption.normalize("False") == 0 + assert BooleanOption.normalize("off") == 0 + assert BooleanOption.normalize("0") == 0 + assert BooleanOption.normalize(0) == 0 - assert BooleanQuestion.normalize("") is None - assert BooleanQuestion.normalize(" ") is None - assert BooleanQuestion.normalize(" none ") is None - assert BooleanQuestion.normalize("None") is None - assert BooleanQuestion.normalize("noNe") is None - assert BooleanQuestion.normalize(None) is None + assert BooleanOption.normalize("") is None + assert BooleanOption.normalize(" ") is None + assert BooleanOption.normalize(" none ") is None + assert BooleanOption.normalize("None") is None + assert BooleanOption.normalize("noNe") is None + assert BooleanOption.normalize(None) is None def test_normalize_boolean_humanize(): - assert BooleanQuestion.humanize("yes") == "yes" - assert BooleanQuestion.humanize("true") == "yes" - assert BooleanQuestion.humanize("on") == "yes" + assert BooleanOption.humanize("yes") == "yes" + assert BooleanOption.humanize("true") == "yes" + assert BooleanOption.humanize("on") == "yes" - assert BooleanQuestion.humanize("no") == "no" - assert BooleanQuestion.humanize("false") == "no" - assert BooleanQuestion.humanize("off") == "no" + assert BooleanOption.humanize("no") == "no" + assert BooleanOption.humanize("false") == "no" + assert BooleanOption.humanize("off") == "no" def test_normalize_boolean_invalid(): with pytest.raises(YunohostValidationError): - BooleanQuestion.normalize("yesno") + BooleanOption.normalize("yesno") with pytest.raises(YunohostValidationError): - BooleanQuestion.normalize("foobar") + BooleanOption.normalize("foobar") with pytest.raises(YunohostValidationError): - BooleanQuestion.normalize("enabled") + BooleanOption.normalize("enabled") def test_normalize_boolean_special_yesno(): customyesno = {"yes": "enabled", "no": "disabled"} - assert BooleanQuestion.normalize("yes", customyesno) == "enabled" - assert BooleanQuestion.normalize("true", customyesno) == "enabled" - assert BooleanQuestion.normalize("enabled", customyesno) == "enabled" - assert BooleanQuestion.humanize("yes", customyesno) == "yes" - assert BooleanQuestion.humanize("true", customyesno) == "yes" - assert BooleanQuestion.humanize("enabled", customyesno) == "yes" + assert BooleanOption.normalize("yes", customyesno) == "enabled" + assert BooleanOption.normalize("true", customyesno) == "enabled" + assert BooleanOption.normalize("enabled", customyesno) == "enabled" + assert BooleanOption.humanize("yes", customyesno) == "yes" + assert BooleanOption.humanize("true", customyesno) == "yes" + assert BooleanOption.humanize("enabled", customyesno) == "yes" - assert BooleanQuestion.normalize("no", customyesno) == "disabled" - assert BooleanQuestion.normalize("false", customyesno) == "disabled" - assert BooleanQuestion.normalize("disabled", customyesno) == "disabled" - assert BooleanQuestion.humanize("no", customyesno) == "no" - assert BooleanQuestion.humanize("false", customyesno) == "no" - assert BooleanQuestion.humanize("disabled", customyesno) == "no" + assert BooleanOption.normalize("no", customyesno) == "disabled" + assert BooleanOption.normalize("false", customyesno) == "disabled" + assert BooleanOption.normalize("disabled", customyesno) == "disabled" + assert BooleanOption.humanize("no", customyesno) == "no" + assert BooleanOption.humanize("false", customyesno) == "no" + assert BooleanOption.humanize("disabled", customyesno) == "no" def test_normalize_domain(): - assert DomainQuestion.normalize("https://yolo.swag/") == "yolo.swag" - assert DomainQuestion.normalize("http://yolo.swag") == "yolo.swag" - assert DomainQuestion.normalize("yolo.swag/") == "yolo.swag" + assert DomainOption.normalize("https://yolo.swag/") == "yolo.swag" + assert DomainOption.normalize("http://yolo.swag") == "yolo.swag" + assert DomainOption.normalize("yolo.swag/") == "yolo.swag" def test_normalize_path(): - assert PathQuestion.normalize("") == "/" - assert PathQuestion.normalize("") == "/" - assert PathQuestion.normalize("macnuggets") == "/macnuggets" - assert PathQuestion.normalize("/macnuggets") == "/macnuggets" - assert PathQuestion.normalize(" /macnuggets ") == "/macnuggets" - assert PathQuestion.normalize("/macnuggets") == "/macnuggets" - assert PathQuestion.normalize("mac/nuggets") == "/mac/nuggets" - assert PathQuestion.normalize("/macnuggets/") == "/macnuggets" - assert PathQuestion.normalize("macnuggets/") == "/macnuggets" - assert PathQuestion.normalize("////macnuggets///") == "/macnuggets" + assert WebPathOption.normalize("") == "/" + assert WebPathOption.normalize("") == "/" + assert WebPathOption.normalize("macnuggets") == "/macnuggets" + assert WebPathOption.normalize("/macnuggets") == "/macnuggets" + assert WebPathOption.normalize(" /macnuggets ") == "/macnuggets" + assert WebPathOption.normalize("/macnuggets") == "/macnuggets" + assert WebPathOption.normalize("mac/nuggets") == "/mac/nuggets" + assert WebPathOption.normalize("/macnuggets/") == "/macnuggets" + assert WebPathOption.normalize("macnuggets/") == "/macnuggets" + assert WebPathOption.normalize("////macnuggets///") == "/macnuggets" def test_simple_evaluate(): diff --git a/src/tests/test_regenconf.py b/src/tests/test_regenconf.py index 8dda1a7f2..3966ef291 100644 --- a/src/tests/test_regenconf.py +++ b/src/tests/test_regenconf.py @@ -87,7 +87,7 @@ def test_ssh_conf_unmanaged(): assert SSHD_CONFIG in _get_conf_hashes("ssh") -def test_ssh_conf_unmanaged_and_manually_modified(mocker): +def test_ssh_conf_unmanaged_and_manually_modified(): _force_clear_hashes([SSHD_CONFIG]) os.system("echo ' ' >> %s" % SSHD_CONFIG) @@ -98,7 +98,7 @@ def test_ssh_conf_unmanaged_and_manually_modified(mocker): assert SSHD_CONFIG in _get_conf_hashes("ssh") assert SSHD_CONFIG in manually_modified_files() - with message(mocker, "regenconf_need_to_explicitly_specify_ssh"): + with message("regenconf_need_to_explicitly_specify_ssh"): regen_conf(force=True) assert SSHD_CONFIG in _get_conf_hashes("ssh") diff --git a/src/tests/test_user-group.py b/src/tests/test_user-group.py index eececb827..57f9ffa3f 100644 --- a/src/tests/test_user-group.py +++ b/src/tests/test_user-group.py @@ -91,8 +91,8 @@ def test_list_groups(): # -def test_create_user(mocker): - with message(mocker, "user_created"): +def test_create_user(): + with message("user_created"): user_create("albert", maindomain, "test123Ynh", fullname="Albert Good") group_res = user_group_list()["groups"] @@ -102,8 +102,8 @@ def test_create_user(mocker): assert "albert" in group_res["all_users"]["members"] -def test_del_user(mocker): - with message(mocker, "user_deleted"): +def test_del_user(): + with message("user_deleted"): user_delete("alice") group_res = user_group_list()["groups"] @@ -112,7 +112,7 @@ def test_del_user(mocker): assert "alice" not in group_res["all_users"]["members"] -def test_import_user(mocker): +def test_import_user(): import csv from io import StringIO @@ -157,7 +157,7 @@ def test_import_user(mocker): } ) csv_io.seek(0) - with message(mocker, "user_import_success"): + with message("user_import_success"): user_import(csv_io, update=True, delete=True) group_res = user_group_list()["groups"] @@ -171,7 +171,7 @@ def test_import_user(mocker): assert "alice" not in group_res["dev"]["members"] -def test_export_user(mocker): +def test_export_user(): result = user_export() should_be = ( "username;firstname;lastname;password;mail;mail-alias;mail-forward;mailbox-quota;groups\r\n" @@ -182,8 +182,8 @@ def test_export_user(mocker): assert result == should_be -def test_create_group(mocker): - with message(mocker, "group_created", group="adminsys"): +def test_create_group(): + with message("group_created", group="adminsys"): user_group_create("adminsys") group_res = user_group_list()["groups"] @@ -192,8 +192,8 @@ def test_create_group(mocker): assert group_res["adminsys"]["members"] == [] -def test_del_group(mocker): - with message(mocker, "group_deleted", group="dev"): +def test_del_group(): + with message("group_deleted", group="dev"): user_group_delete("dev") group_res = user_group_list()["groups"] @@ -262,46 +262,46 @@ def test_del_group_that_does_not_exist(mocker): # -def test_update_user(mocker): - with message(mocker, "user_updated"): +def test_update_user(): + with message("user_updated"): user_update("alice", firstname="NewName", lastname="NewLast") info = user_info("alice") assert info["fullname"] == "NewName NewLast" - with message(mocker, "user_updated"): + with message("user_updated"): user_update("alice", fullname="New2Name New2Last") info = user_info("alice") assert info["fullname"] == "New2Name New2Last" -def test_update_group_add_user(mocker): - with message(mocker, "group_updated", group="dev"): +def test_update_group_add_user(): + with message("group_updated", group="dev"): user_group_update("dev", add=["bob"]) group_res = user_group_list()["groups"] assert set(group_res["dev"]["members"]) == {"alice", "bob"} -def test_update_group_add_user_already_in(mocker): - with message(mocker, "group_user_already_in_group", user="bob", group="apps"): +def test_update_group_add_user_already_in(): + with message("group_user_already_in_group", user="bob", group="apps"): user_group_update("apps", add=["bob"]) group_res = user_group_list()["groups"] assert group_res["apps"]["members"] == ["bob"] -def test_update_group_remove_user(mocker): - with message(mocker, "group_updated", group="apps"): +def test_update_group_remove_user(): + with message("group_updated", group="apps"): user_group_update("apps", remove=["bob"]) group_res = user_group_list()["groups"] assert group_res["apps"]["members"] == [] -def test_update_group_remove_user_not_already_in(mocker): - with message(mocker, "group_user_not_in_group", user="jack", group="apps"): +def test_update_group_remove_user_not_already_in(): + with message("group_user_not_in_group", user="jack", group="apps"): user_group_update("apps", remove=["jack"]) group_res = user_group_list()["groups"] diff --git a/src/user.py b/src/user.py index 12f13f75c..00876854e 100644 --- a/src/user.py +++ b/src/user.py @@ -631,7 +631,7 @@ def user_info(username): has_value = re.search(r"Value=(\d+)", cmd_result) if has_value: - storage_use = int(has_value.group(1)) + storage_use = int(has_value.group(1)) * 1000 storage_use = binary_to_human(storage_use) if is_limited: @@ -1189,6 +1189,7 @@ def user_group_update( ) else: operation_logger.related_to.append(("user", user)) + logger.info(m18n.n("group_user_add", group=groupname, user=user)) new_group_members += users_to_add @@ -1202,6 +1203,7 @@ def user_group_update( ) else: operation_logger.related_to.append(("user", user)) + logger.info(m18n.n("group_user_remove", group=groupname, user=user)) # Remove users_to_remove from new_group_members # Kinda like a new_group_members -= users_to_remove @@ -1237,6 +1239,7 @@ def user_group_update( "mail_domain_unknown", domain=mail[mail.find("@") + 1 :] ) new_group_mail.append(mail) + logger.info(m18n.n("group_mailalias_add", group=groupname, mail=mail)) if remove_mailalias: from yunohost.domain import _get_maindomain @@ -1256,6 +1259,9 @@ def user_group_update( ) if mail in new_group_mail: new_group_mail.remove(mail) + logger.info( + m18n.n("group_mailalias_remove", group=groupname, mail=mail) + ) else: raise YunohostValidationError("mail_alias_remove_failed", mail=mail) diff --git a/src/utils/configpanel.py b/src/utils/configpanel.py new file mode 100644 index 000000000..2c56eb754 --- /dev/null +++ b/src/utils/configpanel.py @@ -0,0 +1,686 @@ +# +# Copyright (c) 2023 YunoHost Contributors +# +# This file is part of YunoHost (see https://yunohost.org) +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +import glob +import os +import re +import urllib.parse +from collections import OrderedDict +from typing import Union + +from moulinette import Moulinette, m18n +from moulinette.interfaces.cli import colorize +from moulinette.utils.filesystem import mkdir, read_toml, read_yaml, write_to_yaml +from moulinette.utils.log import getActionLogger +from yunohost.utils.error import YunohostError, YunohostValidationError +from yunohost.utils.form import ( + OPTIONS, + BaseOption, + FileOption, + ask_questions_and_parse_answers, + evaluate_simple_js_expression, +) +from yunohost.utils.i18n import _value_for_locale + +logger = getActionLogger("yunohost.configpanel") +CONFIG_PANEL_VERSION_SUPPORTED = 1.0 + + +class ConfigPanel: + entity_type = "config" + save_path_tpl: Union[str, None] = None + config_path_tpl = "/usr/share/yunohost/config_{entity_type}.toml" + save_mode = "full" + + @classmethod + def list(cls): + """ + List available config panel + """ + try: + entities = [ + re.match( + "^" + cls.save_path_tpl.format(entity="(?p)") + "$", f + ).group("entity") + for f in glob.glob(cls.save_path_tpl.format(entity="*")) + if os.path.isfile(f) + ] + except FileNotFoundError: + entities = [] + return entities + + def __init__(self, entity, config_path=None, save_path=None, creation=False): + self.entity = entity + self.config_path = config_path + if not config_path: + self.config_path = self.config_path_tpl.format( + entity=entity, entity_type=self.entity_type + ) + self.save_path = save_path + if not save_path and self.save_path_tpl: + self.save_path = self.save_path_tpl.format(entity=entity) + self.config = {} + self.values = {} + self.new_values = {} + + if ( + self.save_path + and self.save_mode != "diff" + and not creation + and not os.path.exists(self.save_path) + ): + raise YunohostValidationError( + f"{self.entity_type}_unknown", **{self.entity_type: entity} + ) + if self.save_path and creation and os.path.exists(self.save_path): + raise YunohostValidationError( + f"{self.entity_type}_exists", **{self.entity_type: entity} + ) + + # Search for hooks in the config panel + self.hooks = { + func: getattr(self, func) + for func in dir(self) + if callable(getattr(self, func)) + and re.match("^(validate|post_ask)__", func) + } + + def get(self, key="", mode="classic"): + self.filter_key = key or "" + + # Read config panel toml + self._get_config_panel() + + if not self.config: + raise YunohostValidationError("config_no_panel") + + # Read or get values and hydrate the config + self._get_raw_settings() + self._hydrate() + + # In 'classic' mode, we display the current value if key refer to an option + if self.filter_key.count(".") == 2 and mode == "classic": + option = self.filter_key.split(".")[-1] + value = self.values.get(option, None) + + option_type = None + for _, _, option_ in self._iterate(): + if option_["id"] == option: + option_type = OPTIONS[option_["type"]] + break + + return option_type.normalize(value) if option_type else value + + # Format result in 'classic' or 'export' mode + logger.debug(f"Formating result in '{mode}' mode") + result = {} + for panel, section, option in self._iterate(): + if section["is_action_section"] and mode != "full": + continue + + key = f"{panel['id']}.{section['id']}.{option['id']}" + if mode == "export": + result[option["id"]] = option.get("current_value") + continue + + ask = None + if "ask" in option: + ask = _value_for_locale(option["ask"]) + elif "i18n" in self.config: + ask = m18n.n(self.config["i18n"] + "_" + option["id"]) + + if mode == "full": + option["ask"] = ask + question_class = OPTIONS[option.get("type", "string")] + # FIXME : maybe other properties should be taken from the question, not just choices ?. + option["choices"] = question_class(option).choices + option["default"] = question_class(option).default + option["pattern"] = question_class(option).pattern + else: + result[key] = {"ask": ask} + if "current_value" in option: + question_class = OPTIONS[option.get("type", "string")] + result[key]["value"] = question_class.humanize( + option["current_value"], option + ) + # FIXME: semantics, technically here this is not about a prompt... + if question_class.hide_user_input_in_prompt: + result[key][ + "value" + ] = "**************" # Prevent displaying password in `config get` + + if mode == "full": + return self.config + else: + return result + + def set( + self, key=None, value=None, args=None, args_file=None, operation_logger=None + ): + self.filter_key = key or "" + + # Read config panel toml + self._get_config_panel() + + if not self.config: + raise YunohostValidationError("config_no_panel") + + if (args is not None or args_file is not None) and value is not None: + raise YunohostValidationError( + "You should either provide a value, or a serie of args/args_file, but not both at the same time", + raw_msg=True, + ) + + if self.filter_key.count(".") != 2 and value is not None: + raise YunohostValidationError("config_cant_set_value_on_section") + + # Import and parse pre-answered options + logger.debug("Import and parse pre-answered options") + self._parse_pre_answered(args, value, args_file) + + # Read or get values and hydrate the config + self._get_raw_settings() + self._hydrate() + BaseOption.operation_logger = operation_logger + self._ask() + + if operation_logger: + operation_logger.start() + + try: + self._apply() + except YunohostError: + raise + # Script got manually interrupted ... + # N.B. : KeyboardInterrupt does not inherit from Exception + except (KeyboardInterrupt, EOFError): + error = m18n.n("operation_interrupted") + logger.error(m18n.n("config_apply_failed", error=error)) + raise + # Something wrong happened in Yunohost's code (most probably hook_exec) + except Exception: + import traceback + + error = m18n.n("unexpected_error", error="\n" + traceback.format_exc()) + logger.error(m18n.n("config_apply_failed", error=error)) + raise + finally: + # Delete files uploaded from API + # FIXME : this is currently done in the context of config panels, + # but could also happen in the context of app install ... (or anywhere else + # where we may parse args etc...) + FileOption.clean_upload_dirs() + + self._reload_services() + + logger.success("Config updated as expected") + operation_logger.success() + + def list_actions(self): + actions = {} + + # FIXME : meh, loading the entire config panel is again going to cause + # stupid issues for domain (e.g loading registrar stuff when willing to just list available actions ...) + self.filter_key = "" + self._get_config_panel() + for panel, section, option in self._iterate(): + if option["type"] == "button": + key = f"{panel['id']}.{section['id']}.{option['id']}" + actions[key] = _value_for_locale(option["ask"]) + + return actions + + def run_action(self, action=None, args=None, args_file=None, operation_logger=None): + # + # FIXME : this stuff looks a lot like set() ... + # + + self.filter_key = ".".join(action.split(".")[:2]) + action_id = action.split(".")[2] + + # Read config panel toml + self._get_config_panel() + + # FIXME: should also check that there's indeed a key called action + if not self.config: + raise YunohostValidationError(f"No action named {action}", raw_msg=True) + + # Import and parse pre-answered options + logger.debug("Import and parse pre-answered options") + self._parse_pre_answered(args, None, args_file) + + # Read or get values and hydrate the config + self._get_raw_settings() + self._hydrate() + BaseOption.operation_logger = operation_logger + self._ask(action=action_id) + + # FIXME: here, we could want to check constrains on + # the action's visibility / requirements wrt to the answer to questions ... + + if operation_logger: + operation_logger.start() + + try: + self._run_action(action_id) + except YunohostError: + raise + # Script got manually interrupted ... + # N.B. : KeyboardInterrupt does not inherit from Exception + except (KeyboardInterrupt, EOFError): + error = m18n.n("operation_interrupted") + logger.error(m18n.n("config_action_failed", action=action, error=error)) + raise + # Something wrong happened in Yunohost's code (most probably hook_exec) + except Exception: + import traceback + + error = m18n.n("unexpected_error", error="\n" + traceback.format_exc()) + logger.error(m18n.n("config_action_failed", action=action, error=error)) + raise + finally: + # Delete files uploaded from API + # FIXME : this is currently done in the context of config panels, + # but could also happen in the context of app install ... (or anywhere else + # where we may parse args etc...) + FileOption.clean_upload_dirs() + + # FIXME: i18n + logger.success(f"Action {action_id} successful") + operation_logger.success() + + def _get_raw_config(self): + return read_toml(self.config_path) + + def _get_config_panel(self): + # Split filter_key + filter_key = self.filter_key.split(".") if self.filter_key != "" else [] + if len(filter_key) > 3: + raise YunohostError( + f"The filter key {filter_key} has too many sub-levels, the max is 3.", + raw_msg=True, + ) + + if not os.path.exists(self.config_path): + logger.debug(f"Config panel {self.config_path} doesn't exists") + return None + + toml_config_panel = self._get_raw_config() + + # Check TOML config panel is in a supported version + if float(toml_config_panel["version"]) < CONFIG_PANEL_VERSION_SUPPORTED: + logger.error( + f"Config panels version {toml_config_panel['version']} are not supported" + ) + return None + + # Transform toml format into internal format + format_description = { + "root": { + "properties": ["version", "i18n"], + "defaults": {"version": 1.0}, + }, + "panels": { + "properties": ["name", "services", "actions", "help"], + "defaults": { + "services": [], + "actions": {"apply": {"en": "Apply"}}, + }, + }, + "sections": { + "properties": ["name", "services", "optional", "help", "visible"], + "defaults": { + "name": "", + "services": [], + "optional": True, + "is_action_section": False, + }, + }, + "options": { + "properties": [ + "ask", + "type", + "bind", + "help", + "example", + "default", + "style", + "icon", + "placeholder", + "visible", + "optional", + "choices", + "yes", + "no", + "pattern", + "limit", + "min", + "max", + "step", + "accept", + "redact", + "filter", + "readonly", + "enabled", + # "confirm", # TODO: to ask confirmation before running an action + ], + "defaults": {}, + }, + } + + def _build_internal_config_panel(raw_infos, level): + """Convert TOML in internal format ('full' mode used by webadmin) + Here are some properties of 1.0 config panel in toml: + - node properties and node children are mixed, + - text are in english only + - some properties have default values + This function detects all children nodes and put them in a list + """ + + defaults = format_description[level]["defaults"] + properties = format_description[level]["properties"] + + # Start building the ouput (merging the raw infos + defaults) + out = {key: raw_infos.get(key, value) for key, value in defaults.items()} + + # Now fill the sublevels (+ apply filter_key) + i = list(format_description).index(level) + sublevel = list(format_description)[i + 1] if level != "options" else None + search_key = filter_key[i] if len(filter_key) > i else False + + for key, value in raw_infos.items(): + # Key/value are a child node + if ( + isinstance(value, OrderedDict) + and key not in properties + and sublevel + ): + # We exclude all nodes not referenced by the filter_key + if search_key and key != search_key: + continue + subnode = _build_internal_config_panel(value, sublevel) + subnode["id"] = key + if level == "root": + subnode.setdefault("name", {"en": key.capitalize()}) + elif level == "sections": + subnode["name"] = key # legacy + subnode.setdefault("optional", raw_infos.get("optional", True)) + # If this section contains at least one button, it becomes an "action" section + if subnode.get("type") == "button": + out["is_action_section"] = True + out.setdefault(sublevel, []).append(subnode) + # Key/value are a property + else: + if key not in properties: + logger.warning(f"Unknown key '{key}' found in config panel") + # Todo search all i18n keys + out[key] = ( + value + if key not in ["ask", "help", "name"] or isinstance(value, dict) + else {"en": value} + ) + return out + + self.config = _build_internal_config_panel(toml_config_panel, "root") + + try: + self.config["panels"][0]["sections"][0]["options"][0] + except (KeyError, IndexError): + raise YunohostValidationError( + "config_unknown_filter_key", filter_key=self.filter_key + ) + + # List forbidden keywords from helpers and sections toml (to avoid conflict) + forbidden_keywords = [ + "old", + "app", + "changed", + "file_hash", + "binds", + "types", + "formats", + "getter", + "setter", + "short_setting", + "type", + "bind", + "nothing_changed", + "changes_validated", + "result", + "max_progression", + ] + forbidden_keywords += format_description["sections"] + forbidden_readonly_types = ["password", "app", "domain", "user", "file"] + + for _, _, option in self._iterate(): + if option["id"] in forbidden_keywords: + raise YunohostError("config_forbidden_keyword", keyword=option["id"]) + if ( + option.get("readonly", False) + and option.get("type", "string") in forbidden_readonly_types + ): + raise YunohostError( + "config_forbidden_readonly_type", + type=option["type"], + id=option["id"], + ) + + return self.config + + def _get_default_values(self): + return { + option["id"]: option["default"] + for _, _, option in self._iterate() + if "default" in option + } + + def _get_raw_settings(self): + """ + Retrieve entries in YAML file + And set default values if needed + """ + + # Inject defaults if needed (using the magic .update() ;)) + self.values = self._get_default_values() + + # Retrieve entries in the YAML + if os.path.exists(self.save_path) and os.path.isfile(self.save_path): + self.values.update(read_yaml(self.save_path) or {}) + + def _hydrate(self): + # Hydrating config panel with current value + for _, section, option in self._iterate(): + if option["id"] not in self.values: + allowed_empty_types = [ + "alert", + "display_text", + "markdown", + "file", + "button", + ] + + if section["is_action_section"] and option.get("default") is not None: + self.values[option["id"]] = option["default"] + elif ( + option["type"] in allowed_empty_types + or option.get("bind") == "null" + ): + continue + else: + raise YunohostError( + f"Config panel question '{option['id']}' should be initialized with a value during install or upgrade.", + raw_msg=True, + ) + value = self.values[option["name"]] + + # Allow to use value instead of current_value in app config script. + # e.g. apps may write `echo 'value: "foobar"'` in the config file (which is more intuitive that `echo 'current_value: "foobar"'` + # For example hotspot used it... + # See https://github.com/YunoHost/yunohost/pull/1546 + if ( + isinstance(value, dict) + and "value" in value + and "current_value" not in value + ): + value["current_value"] = value["value"] + + # In general, the value is just a simple value. + # Sometimes it could be a dict used to overwrite the option itself + value = value if isinstance(value, dict) else {"current_value": value} + option.update(value) + + self.values[option["id"]] = value.get("current_value") + + return self.values + + def _ask(self, action=None): + logger.debug("Ask unanswered question and prevalidate data") + + if "i18n" in self.config: + for panel, section, option in self._iterate(): + if "ask" not in option: + option["ask"] = m18n.n(self.config["i18n"] + "_" + option["id"]) + # auto add i18n help text if present in locales + if m18n.key_exists(self.config["i18n"] + "_" + option["id"] + "_help"): + option["help"] = m18n.n( + self.config["i18n"] + "_" + option["id"] + "_help" + ) + + def display_header(message): + """CLI panel/section header display""" + if Moulinette.interface.type == "cli" and self.filter_key.count(".") < 2: + Moulinette.display(colorize(message, "purple")) + + for panel, section, obj in self._iterate(["panel", "section"]): + if ( + section + and section.get("visible") + and not evaluate_simple_js_expression( + section["visible"], context=self.future_values + ) + ): + continue + + # Ugly hack to skip action section ... except when when explicitly running actions + if not action: + if section and section["is_action_section"]: + continue + + if panel == obj: + name = _value_for_locale(panel["name"]) + display_header(f"\n{'='*40}\n>>>> {name}\n{'='*40}") + else: + name = _value_for_locale(section["name"]) + if name: + display_header(f"\n# {name}") + elif section: + # filter action section options in case of multiple buttons + section["options"] = [ + option + for option in section["options"] + if option.get("type", "string") != "button" + or option["id"] == action + ] + + if panel == obj: + continue + + # Check and ask unanswered questions + prefilled_answers = self.args.copy() + prefilled_answers.update(self.new_values) + + questions = ask_questions_and_parse_answers( + {question["name"]: question for question in section["options"]}, + prefilled_answers=prefilled_answers, + current_values=self.values, + hooks=self.hooks, + ) + self.new_values.update( + { + question.name: question.value + for question in questions + if question.value is not None + } + ) + + @property + def future_values(self): + return {**self.values, **self.new_values} + + def __getattr__(self, name): + if "new_values" in self.__dict__ and name in self.new_values: + return self.new_values[name] + + if "values" in self.__dict__ and name in self.values: + return self.values[name] + + return self.__dict__[name] + + def _parse_pre_answered(self, args, value, args_file): + args = urllib.parse.parse_qs(args or "", keep_blank_values=True) + self.args = {key: ",".join(value_) for key, value_ in args.items()} + + if args_file: + # Import YAML / JSON file but keep --args values + self.args = {**read_yaml(args_file), **self.args} + + if value is not None: + self.args = {self.filter_key.split(".")[-1]: value} + + def _apply(self): + logger.info("Saving the new configuration...") + dir_path = os.path.dirname(os.path.realpath(self.save_path)) + if not os.path.exists(dir_path): + mkdir(dir_path, mode=0o700) + + values_to_save = self.future_values + if self.save_mode == "diff": + defaults = self._get_default_values() + values_to_save = { + k: v for k, v in values_to_save.items() if defaults.get(k) != v + } + + # Save the settings to the .yaml file + write_to_yaml(self.save_path, values_to_save) + + def _reload_services(self): + from yunohost.service import service_reload_or_restart + + services_to_reload = set() + for panel, section, obj in self._iterate(["panel", "section", "option"]): + services_to_reload |= set(obj.get("services", [])) + + services_to_reload = list(services_to_reload) + services_to_reload.sort(key="nginx".__eq__) + if services_to_reload: + logger.info("Reloading services...") + for service in services_to_reload: + if hasattr(self, "entity"): + service = service.replace("__APP__", self.entity) + service_reload_or_restart(service) + + def _iterate(self, trigger=["option"]): + for panel in self.config.get("panels", []): + if "panel" in trigger: + yield (panel, None, panel) + for section in panel.get("sections", []): + if "section" in trigger: + yield (panel, section, section) + if "option" in trigger: + for option in section.get("options", []): + yield (panel, section, option) diff --git a/src/utils/config.py b/src/utils/form.py similarity index 51% rename from src/utils/config.py rename to src/utils/form.py index 6f06ed1fb..12c3249c3 100644 --- a/src/utils/config.py +++ b/src/utils/form.py @@ -16,35 +16,31 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . # -import glob -import os -import re -import urllib.parse -import tempfile -import shutil import ast import operator as op -from collections import OrderedDict -from typing import Optional, Dict, List, Union, Any, Mapping, Callable +import os +import re +import shutil +import tempfile +import urllib.parse +from typing import Any, Callable, Dict, List, Mapping, Optional, Union -from moulinette.interfaces.cli import colorize from moulinette import Moulinette, m18n +from moulinette.interfaces.cli import colorize +from moulinette.utils.filesystem import read_file, write_to_file from moulinette.utils.log import getActionLogger -from moulinette.utils.filesystem import ( - read_file, - write_to_file, - read_toml, - read_yaml, - write_to_yaml, - mkdir, -) - -from yunohost.utils.i18n import _value_for_locale -from yunohost.utils.error import YunohostError, YunohostValidationError from yunohost.log import OperationLogger +from yunohost.utils.error import YunohostError, YunohostValidationError +from yunohost.utils.i18n import _value_for_locale -logger = getActionLogger("yunohost.config") -CONFIG_PANEL_VERSION_SUPPORTED = 1.0 +logger = getActionLogger("yunohost.form") + + +# ╭───────────────────────────────────────────────────────╮ +# │ ┌─╴╷ ╷╭─┐╷ │ +# │ ├─╴│╭╯├─┤│ │ +# │ ╰─╴╰╯ ╵ ╵╰─╴ │ +# ╰───────────────────────────────────────────────────────╯ # Those js-like evaluate functions are used to eval safely visible attributes @@ -190,652 +186,14 @@ def evaluate_simple_js_expression(expr, context={}): return evaluate_simple_ast(node, context) -class ConfigPanel: - entity_type = "config" - save_path_tpl: Union[str, None] = None - config_path_tpl = "/usr/share/yunohost/config_{entity_type}.toml" - save_mode = "full" +# ╭───────────────────────────────────────────────────────╮ +# │ ╭─╮┌─╮╶┬╴╶┬╴╭─╮╭╮╷╭─╴ │ +# │ │ │├─╯ │ │ │ ││││╰─╮ │ +# │ ╰─╯╵ ╵ ╶┴╴╰─╯╵╰╯╶─╯ │ +# ╰───────────────────────────────────────────────────────╯ - @classmethod - def list(cls): - """ - List available config panel - """ - try: - entities = [ - re.match( - "^" + cls.save_path_tpl.format(entity="(?p)") + "$", f - ).group("entity") - for f in glob.glob(cls.save_path_tpl.format(entity="*")) - if os.path.isfile(f) - ] - except FileNotFoundError: - entities = [] - return entities - def __init__(self, entity, config_path=None, save_path=None, creation=False): - self.entity = entity - self.config_path = config_path - if not config_path: - self.config_path = self.config_path_tpl.format( - entity=entity, entity_type=self.entity_type - ) - self.save_path = save_path - if not save_path and self.save_path_tpl: - self.save_path = self.save_path_tpl.format(entity=entity) - self.config = {} - self.values = {} - self.new_values = {} - - if ( - self.save_path - and self.save_mode != "diff" - and not creation - and not os.path.exists(self.save_path) - ): - raise YunohostValidationError( - f"{self.entity_type}_unknown", **{self.entity_type: entity} - ) - if self.save_path and creation and os.path.exists(self.save_path): - raise YunohostValidationError( - f"{self.entity_type}_exists", **{self.entity_type: entity} - ) - - # Search for hooks in the config panel - self.hooks = { - func: getattr(self, func) - for func in dir(self) - if callable(getattr(self, func)) - and re.match("^(validate|post_ask)__", func) - } - - def get(self, key="", mode="classic"): - self.filter_key = key or "" - - # Read config panel toml - self._get_config_panel() - - if not self.config: - raise YunohostValidationError("config_no_panel") - - # Read or get values and hydrate the config - self._load_current_values() - self._hydrate() - - # In 'classic' mode, we display the current value if key refer to an option - if self.filter_key.count(".") == 2 and mode == "classic": - option = self.filter_key.split(".")[-1] - value = self.values.get(option, None) - - option_type = None - for _, _, option_ in self._iterate(): - if option_["id"] == option: - option_type = ARGUMENTS_TYPE_PARSERS[option_["type"]] - break - - return option_type.normalize(value) if option_type else value - - # Format result in 'classic' or 'export' mode - logger.debug(f"Formating result in '{mode}' mode") - result = {} - for panel, section, option in self._iterate(): - if section["is_action_section"] and mode != "full": - continue - - key = f"{panel['id']}.{section['id']}.{option['id']}" - if mode == "export": - result[option["id"]] = option.get("current_value") - continue - - ask = None - if "ask" in option: - ask = _value_for_locale(option["ask"]) - elif "i18n" in self.config: - ask = m18n.n(self.config["i18n"] + "_" + option["id"]) - - if mode == "full": - option["ask"] = ask - question_class = ARGUMENTS_TYPE_PARSERS[option.get("type", "string")] - # FIXME : maybe other properties should be taken from the question, not just choices ?. - option["choices"] = question_class(option).choices - option["default"] = question_class(option).default - option["pattern"] = question_class(option).pattern - else: - result[key] = {"ask": ask} - if "current_value" in option: - question_class = ARGUMENTS_TYPE_PARSERS[ - option.get("type", "string") - ] - result[key]["value"] = question_class.humanize( - option["current_value"], option - ) - # FIXME: semantics, technically here this is not about a prompt... - if question_class.hide_user_input_in_prompt: - result[key][ - "value" - ] = "**************" # Prevent displaying password in `config get` - - if mode == "full": - return self.config - else: - return result - - def list_actions(self): - actions = {} - - # FIXME : meh, loading the entire config panel is again going to cause - # stupid issues for domain (e.g loading registrar stuff when willing to just list available actions ...) - self.filter_key = "" - self._get_config_panel() - for panel, section, option in self._iterate(): - if option["type"] == "button": - key = f"{panel['id']}.{section['id']}.{option['id']}" - actions[key] = _value_for_locale(option["ask"]) - - return actions - - def run_action(self, action=None, args=None, args_file=None, operation_logger=None): - # - # FIXME : this stuff looks a lot like set() ... - # - - self.filter_key = ".".join(action.split(".")[:2]) - action_id = action.split(".")[2] - - # Read config panel toml - self._get_config_panel() - - # FIXME: should also check that there's indeed a key called action - if not self.config: - raise YunohostValidationError(f"No action named {action}", raw_msg=True) - - # Import and parse pre-answered options - logger.debug("Import and parse pre-answered options") - self._parse_pre_answered(args, None, args_file) - - # Read or get values and hydrate the config - self._load_current_values() - self._hydrate() - Question.operation_logger = operation_logger - self._ask(action=action_id) - - # FIXME: here, we could want to check constrains on - # the action's visibility / requirements wrt to the answer to questions ... - - if operation_logger: - operation_logger.start() - - try: - self._run_action(action_id) - except YunohostError: - raise - # Script got manually interrupted ... - # N.B. : KeyboardInterrupt does not inherit from Exception - except (KeyboardInterrupt, EOFError): - error = m18n.n("operation_interrupted") - logger.error(m18n.n("config_action_failed", action=action, error=error)) - raise - # Something wrong happened in Yunohost's code (most probably hook_exec) - except Exception: - import traceback - - error = m18n.n("unexpected_error", error="\n" + traceback.format_exc()) - logger.error(m18n.n("config_action_failed", action=action, error=error)) - raise - finally: - # Delete files uploaded from API - # FIXME : this is currently done in the context of config panels, - # but could also happen in the context of app install ... (or anywhere else - # where we may parse args etc...) - FileQuestion.clean_upload_dirs() - - # FIXME: i18n - logger.success(f"Action {action_id} successful") - operation_logger.success() - - def set( - self, key=None, value=None, args=None, args_file=None, operation_logger=None - ): - self.filter_key = key or "" - - # Read config panel toml - self._get_config_panel() - - if not self.config: - raise YunohostValidationError("config_no_panel") - - if (args is not None or args_file is not None) and value is not None: - raise YunohostValidationError( - "You should either provide a value, or a serie of args/args_file, but not both at the same time", - raw_msg=True, - ) - - if self.filter_key.count(".") != 2 and value is not None: - raise YunohostValidationError("config_cant_set_value_on_section") - - # Import and parse pre-answered options - logger.debug("Import and parse pre-answered options") - self._parse_pre_answered(args, value, args_file) - - # Read or get values and hydrate the config - self._load_current_values() - self._hydrate() - Question.operation_logger = operation_logger - self._ask() - - if operation_logger: - operation_logger.start() - - try: - self._apply() - except YunohostError: - raise - # Script got manually interrupted ... - # N.B. : KeyboardInterrupt does not inherit from Exception - except (KeyboardInterrupt, EOFError): - error = m18n.n("operation_interrupted") - logger.error(m18n.n("config_apply_failed", error=error)) - raise - # Something wrong happened in Yunohost's code (most probably hook_exec) - except Exception: - import traceback - - error = m18n.n("unexpected_error", error="\n" + traceback.format_exc()) - logger.error(m18n.n("config_apply_failed", error=error)) - raise - finally: - # Delete files uploaded from API - # FIXME : this is currently done in the context of config panels, - # but could also happen in the context of app install ... (or anywhere else - # where we may parse args etc...) - FileQuestion.clean_upload_dirs() - - self._reload_services() - - logger.success("Config updated as expected") - operation_logger.success() - - def _get_toml(self): - return read_toml(self.config_path) - - def _get_config_panel(self): - # Split filter_key - filter_key = self.filter_key.split(".") if self.filter_key != "" else [] - if len(filter_key) > 3: - raise YunohostError( - f"The filter key {filter_key} has too many sub-levels, the max is 3.", - raw_msg=True, - ) - - if not os.path.exists(self.config_path): - logger.debug(f"Config panel {self.config_path} doesn't exists") - return None - - toml_config_panel = self._get_toml() - - # Check TOML config panel is in a supported version - if float(toml_config_panel["version"]) < CONFIG_PANEL_VERSION_SUPPORTED: - logger.error( - f"Config panels version {toml_config_panel['version']} are not supported" - ) - return None - - # Transform toml format into internal format - format_description = { - "root": { - "properties": ["version", "i18n"], - "defaults": {"version": 1.0}, - }, - "panels": { - "properties": ["name", "services", "actions", "help"], - "defaults": { - "services": [], - "actions": {"apply": {"en": "Apply"}}, - }, - }, - "sections": { - "properties": ["name", "services", "optional", "help", "visible"], - "defaults": { - "name": "", - "services": [], - "optional": True, - "is_action_section": False, - }, - }, - "options": { - "properties": [ - "ask", - "type", - "bind", - "help", - "example", - "default", - "style", - "icon", - "placeholder", - "visible", - "optional", - "choices", - "yes", - "no", - "pattern", - "limit", - "min", - "max", - "step", - "accept", - "redact", - "filter", - "readonly", - "enabled", - # "confirm", # TODO: to ask confirmation before running an action - ], - "defaults": {}, - }, - } - - def _build_internal_config_panel(raw_infos, level): - """Convert TOML in internal format ('full' mode used by webadmin) - Here are some properties of 1.0 config panel in toml: - - node properties and node children are mixed, - - text are in english only - - some properties have default values - This function detects all children nodes and put them in a list - """ - - defaults = format_description[level]["defaults"] - properties = format_description[level]["properties"] - - # Start building the ouput (merging the raw infos + defaults) - out = {key: raw_infos.get(key, value) for key, value in defaults.items()} - - # Now fill the sublevels (+ apply filter_key) - i = list(format_description).index(level) - sublevel = list(format_description)[i + 1] if level != "options" else None - search_key = filter_key[i] if len(filter_key) > i else False - - for key, value in raw_infos.items(): - # Key/value are a child node - if ( - isinstance(value, OrderedDict) - and key not in properties - and sublevel - ): - # We exclude all nodes not referenced by the filter_key - if search_key and key != search_key: - continue - subnode = _build_internal_config_panel(value, sublevel) - subnode["id"] = key - if level == "root": - subnode.setdefault("name", {"en": key.capitalize()}) - elif level == "sections": - subnode["name"] = key # legacy - subnode.setdefault("optional", raw_infos.get("optional", True)) - # If this section contains at least one button, it becomes an "action" section - if subnode.get("type") == "button": - out["is_action_section"] = True - out.setdefault(sublevel, []).append(subnode) - # Key/value are a property - else: - if key not in properties: - logger.warning(f"Unknown key '{key}' found in config panel") - # Todo search all i18n keys - out[key] = ( - value if key not in ["ask", "help", "name"] else {"en": value} - ) - return out - - self.config = _build_internal_config_panel(toml_config_panel, "root") - - try: - self.config["panels"][0]["sections"][0]["options"][0] - except (KeyError, IndexError): - raise YunohostValidationError( - "config_unknown_filter_key", filter_key=self.filter_key - ) - - # List forbidden keywords from helpers and sections toml (to avoid conflict) - forbidden_keywords = [ - "old", - "app", - "changed", - "file_hash", - "binds", - "types", - "formats", - "getter", - "setter", - "short_setting", - "type", - "bind", - "nothing_changed", - "changes_validated", - "result", - "max_progression", - ] - forbidden_keywords += format_description["sections"] - forbidden_readonly_types = ["password", "app", "domain", "user", "file"] - - for _, _, option in self._iterate(): - if option["id"] in forbidden_keywords: - raise YunohostError("config_forbidden_keyword", keyword=option["id"]) - if ( - option.get("readonly", False) - and option.get("type", "string") in forbidden_readonly_types - ): - raise YunohostError( - "config_forbidden_readonly_type", - type=option["type"], - id=option["id"], - ) - - return self.config - - def _hydrate(self): - # Hydrating config panel with current value - for _, section, option in self._iterate(): - if option["id"] not in self.values: - allowed_empty_types = [ - "alert", - "display_text", - "markdown", - "file", - "button", - ] - - if section["is_action_section"] and option.get("default") is not None: - self.values[option["id"]] = option["default"] - elif ( - option["type"] in allowed_empty_types - or option.get("bind") == "null" - ): - continue - else: - raise YunohostError( - f"Config panel question '{option['id']}' should be initialized with a value during install or upgrade.", - raw_msg=True, - ) - value = self.values[option["name"]] - - # Allow to use value instead of current_value in app config script. - # e.g. apps may write `echo 'value: "foobar"'` in the config file (which is more intuitive that `echo 'current_value: "foobar"'` - # For example hotspot used it... - # See https://github.com/YunoHost/yunohost/pull/1546 - if ( - isinstance(value, dict) - and "value" in value - and "current_value" not in value - ): - value["current_value"] = value["value"] - - # In general, the value is just a simple value. - # Sometimes it could be a dict used to overwrite the option itself - value = value if isinstance(value, dict) else {"current_value": value} - option.update(value) - - self.values[option["id"]] = value.get("current_value") - - return self.values - - def _ask(self, action=None): - logger.debug("Ask unanswered question and prevalidate data") - - if "i18n" in self.config: - for panel, section, option in self._iterate(): - if "ask" not in option: - option["ask"] = m18n.n(self.config["i18n"] + "_" + option["id"]) - # auto add i18n help text if present in locales - if m18n.key_exists(self.config["i18n"] + "_" + option["id"] + "_help"): - option["help"] = m18n.n( - self.config["i18n"] + "_" + option["id"] + "_help" - ) - - def display_header(message): - """CLI panel/section header display""" - if Moulinette.interface.type == "cli" and self.filter_key.count(".") < 2: - Moulinette.display(colorize(message, "purple")) - - for panel, section, obj in self._iterate(["panel", "section"]): - if ( - section - and section.get("visible") - and not evaluate_simple_js_expression( - section["visible"], context=self.future_values - ) - ): - continue - - # Ugly hack to skip action section ... except when when explicitly running actions - if not action: - if section and section["is_action_section"]: - continue - - if panel == obj: - name = _value_for_locale(panel["name"]) - display_header(f"\n{'='*40}\n>>>> {name}\n{'='*40}") - else: - name = _value_for_locale(section["name"]) - if name: - display_header(f"\n# {name}") - elif section: - # filter action section options in case of multiple buttons - section["options"] = [ - option - for option in section["options"] - if option.get("type", "string") != "button" - or option["id"] == action - ] - - if panel == obj: - continue - - # Check and ask unanswered questions - prefilled_answers = self.args.copy() - prefilled_answers.update(self.new_values) - - questions = ask_questions_and_parse_answers( - {question["name"]: question for question in section["options"]}, - prefilled_answers=prefilled_answers, - current_values=self.values, - hooks=self.hooks, - ) - self.new_values.update( - { - question.name: question.value - for question in questions - if question.value is not None - } - ) - - def _get_default_values(self): - return { - option["id"]: option["default"] - for _, _, option in self._iterate() - if "default" in option - } - - @property - def future_values(self): - return {**self.values, **self.new_values} - - def __getattr__(self, name): - if "new_values" in self.__dict__ and name in self.new_values: - return self.new_values[name] - - if "values" in self.__dict__ and name in self.values: - return self.values[name] - - return self.__dict__[name] - - def _load_current_values(self): - """ - Retrieve entries in YAML file - And set default values if needed - """ - - # Inject defaults if needed (using the magic .update() ;)) - self.values = self._get_default_values() - - # Retrieve entries in the YAML - if os.path.exists(self.save_path) and os.path.isfile(self.save_path): - self.values.update(read_yaml(self.save_path) or {}) - - def _parse_pre_answered(self, args, value, args_file): - args = urllib.parse.parse_qs(args or "", keep_blank_values=True) - self.args = {key: ",".join(value_) for key, value_ in args.items()} - - if args_file: - # Import YAML / JSON file but keep --args values - self.args = {**read_yaml(args_file), **self.args} - - if value is not None: - self.args = {self.filter_key.split(".")[-1]: value} - - def _apply(self): - logger.info("Saving the new configuration...") - dir_path = os.path.dirname(os.path.realpath(self.save_path)) - if not os.path.exists(dir_path): - mkdir(dir_path, mode=0o700) - - values_to_save = self.future_values - if self.save_mode == "diff": - defaults = self._get_default_values() - values_to_save = { - k: v for k, v in values_to_save.items() if defaults.get(k) != v - } - - # Save the settings to the .yaml file - write_to_yaml(self.save_path, values_to_save) - - def _reload_services(self): - from yunohost.service import service_reload_or_restart - - services_to_reload = set() - for panel, section, obj in self._iterate(["panel", "section", "option"]): - services_to_reload |= set(obj.get("services", [])) - - services_to_reload = list(services_to_reload) - services_to_reload.sort(key="nginx".__eq__) - if services_to_reload: - logger.info("Reloading services...") - for service in services_to_reload: - if hasattr(self, "entity"): - service = service.replace("__APP__", self.entity) - service_reload_or_restart(service) - - def _iterate(self, trigger=["option"]): - for panel in self.config.get("panels", []): - if "panel" in trigger: - yield (panel, None, panel) - for section in panel.get("sections", []): - if "section" in trigger: - yield (panel, section, section) - if "option" in trigger: - for option in section.get("options", []): - yield (panel, section, option) - - -class Question: +class BaseOption: hide_user_input_in_prompt = False pattern: Optional[Dict] = None @@ -856,7 +214,9 @@ class Question: # Don't restrict choices if there's none specified self.choices = question.get("choices", None) self.pattern = question.get("pattern", self.pattern) - self.ask = question.get("ask", {"en": self.name}) + self.ask = question.get("ask", self.name) + if not isinstance(self.ask, dict): + self.ask = {"en": self.ask} self.help = question.get("help") self.redact = question.get("redact", False) self.filter = question.get("filter", None) @@ -881,22 +241,6 @@ class Question: value = value.strip() return value - def _prompt(self, text): - prefill = "" - if self.current_value is not None: - prefill = self.humanize(self.current_value, self) - elif self.default is not None: - prefill = self.humanize(self.default, self) - self.value = Moulinette.prompt( - message=text, - is_password=self.hide_user_input_in_prompt, - confirm=False, - prefill=prefill, - is_multiline=(self.type == "text"), - autocomplete=self.choices or [], - help=_value_for_locale(self.help), - ) - def ask_if_needed(self): if self.visible and not evaluate_simple_js_expression( self.visible, context=self.context @@ -929,7 +273,7 @@ class Question: try: # Normalize and validate self.value = self.normalize(self.value, self) - self._prevalidate() + self._value_pre_validator() except YunohostValidationError as e: # If in interactive cli, re-ask the current question if i < 4 and Moulinette.interface.type == "cli" and os.isatty(1): @@ -942,7 +286,7 @@ class Question: break - self.value = self.values[self.name] = self._post_parse_value() + self.value = self.values[self.name] = self._value_post_validator() # Search for post actions in hooks post_hook = f"post_ask__{self.name}" @@ -951,25 +295,21 @@ class Question: return self.values - def _prevalidate(self): - if self.value in [None, ""] and not self.optional: - raise YunohostValidationError("app_argument_required", name=self.name) - - # we have an answer, do some post checks - if self.value not in [None, ""]: - if self.choices and self.value not in self.choices: - raise YunohostValidationError( - "app_argument_choice_invalid", - name=self.name, - value=self.value, - choices=", ".join(self.choices), - ) - if self.pattern and not re.match(self.pattern["regexp"], str(self.value)): - raise YunohostValidationError( - self.pattern["error"], - name=self.name, - value=self.value, - ) + def _prompt(self, text): + prefill = "" + if self.current_value is not None: + prefill = self.humanize(self.current_value, self) + elif self.default is not None: + prefill = self.humanize(self.default, self) + self.value = Moulinette.prompt( + message=text, + is_password=self.hide_user_input_in_prompt, + confirm=False, + prefill=prefill, + is_multiline=(self.type == "text"), + autocomplete=self.choices or [], + help=_value_for_locale(self.help), + ) def _format_text_for_user_input_in_cli(self): text_for_user_input_in_cli = _value_for_locale(self.ask) @@ -1003,7 +343,27 @@ class Question: return text_for_user_input_in_cli - def _post_parse_value(self): + def _value_pre_validator(self): + if self.value in [None, ""] and not self.optional: + raise YunohostValidationError("app_argument_required", name=self.name) + + # we have an answer, do some post checks + if self.value not in [None, ""]: + if self.choices and self.value not in self.choices: + raise YunohostValidationError( + "app_argument_choice_invalid", + name=self.name, + value=self.value, + choices=", ".join(str(choice) for choice in self.choices), + ) + if self.pattern and not re.match(self.pattern["regexp"], str(self.value)): + raise YunohostValidationError( + self.pattern["error"], + name=self.name, + value=self.value, + ) + + def _value_post_validator(self): if not self.redact: return self.value @@ -1027,93 +387,66 @@ class Question: return self.value -class StringQuestion(Question): +# ╭───────────────────────────────────────────────────────╮ +# │ DISPLAY OPTIONS │ +# ╰───────────────────────────────────────────────────────╯ + + +class DisplayTextOption(BaseOption): + argument_type = "display_text" + + def __init__( + self, question, context: Mapping[str, Any] = {}, hooks: Dict[str, Callable] = {} + ): + super().__init__(question, context, hooks) + + self.optional = True + self.readonly = True + self.style = question.get( + "style", "info" if question["type"] == "alert" else "" + ) + + def _format_text_for_user_input_in_cli(self): + text = _value_for_locale(self.ask) + + if self.style in ["success", "info", "warning", "danger"]: + color = { + "success": "green", + "info": "cyan", + "warning": "yellow", + "danger": "red", + } + prompt = m18n.g(self.style) if self.style != "danger" else m18n.n("danger") + return colorize(prompt, color[self.style]) + f" {text}" + else: + return text + + +class ButtonOption(BaseOption): + argument_type = "button" + enabled = None + + def __init__( + self, question, context: Mapping[str, Any] = {}, hooks: Dict[str, Callable] = {} + ): + super().__init__(question, context, hooks) + self.enabled = question.get("enabled", None) + + +# ╭───────────────────────────────────────────────────────╮ +# │ INPUT OPTIONS │ +# ╰───────────────────────────────────────────────────────╯ + + +# ─ STRINGS ─────────────────────────────────────────────── + + +class StringOption(BaseOption): argument_type = "string" default_value = "" -class EmailQuestion(StringQuestion): - pattern = { - "regexp": r"^.+@.+", - "error": "config_validate_email", # i18n: config_validate_email - } - - -class URLQuestion(StringQuestion): - pattern = { - "regexp": r"^https?://.*$", - "error": "config_validate_url", # i18n: config_validate_url - } - - -class DateQuestion(StringQuestion): - pattern = { - "regexp": r"^\d{4}-\d\d-\d\d$", - "error": "config_validate_date", # i18n: config_validate_date - } - - def _prevalidate(self): - from datetime import datetime - - super()._prevalidate() - - if self.value not in [None, ""]: - try: - datetime.strptime(self.value, "%Y-%m-%d") - except ValueError: - raise YunohostValidationError("config_validate_date") - - -class TimeQuestion(StringQuestion): - pattern = { - "regexp": r"^(?:\d|[01]\d|2[0-3]):[0-5]\d$", - "error": "config_validate_time", # i18n: config_validate_time - } - - -class ColorQuestion(StringQuestion): - pattern = { - "regexp": r"^#[ABCDEFabcdef\d]{3,6}$", - "error": "config_validate_color", # i18n: config_validate_color - } - - -class TagsQuestion(Question): - argument_type = "tags" - default_value = "" - - @staticmethod - def humanize(value, option={}): - if isinstance(value, list): - return ",".join(value) - return value - - @staticmethod - def normalize(value, option={}): - if isinstance(value, list): - return ",".join(value) - if isinstance(value, str): - value = value.strip() - return value - - def _prevalidate(self): - values = self.value - if isinstance(values, str): - values = values.split(",") - elif values is None: - values = [] - for value in values: - self.value = value - super()._prevalidate() - self.value = values - - def _post_parse_value(self): - if isinstance(self.value, list): - self.value = ",".join(self.value) - return super()._post_parse_value() - - -class PasswordQuestion(Question): +class PasswordOption(BaseOption): hide_user_input_in_prompt = True argument_type = "password" default_value = "" @@ -1129,8 +462,8 @@ class PasswordQuestion(Question): "app_argument_password_no_default", name=self.name ) - def _prevalidate(self): - super()._prevalidate() + def _value_pre_validator(self): + super()._value_pre_validator() if self.value not in [None, ""]: if any(char in self.value for char in self.forbidden_chars): @@ -1144,44 +477,95 @@ class PasswordQuestion(Question): assert_password_is_strong_enough("user", self.value) -class PathQuestion(Question): - argument_type = "path" - default_value = "" +class ColorOption(StringOption): + pattern = { + "regexp": r"^#[ABCDEFabcdef\d]{3,6}$", + "error": "config_validate_color", # i18n: config_validate_color + } + + +# ─ NUMERIC ─────────────────────────────────────────────── + + +class NumberOption(BaseOption): + argument_type = "number" + default_value = None + + def __init__( + self, question, context: Mapping[str, Any] = {}, hooks: Dict[str, Callable] = {} + ): + super().__init__(question, context, hooks) + self.min = question.get("min", None) + self.max = question.get("max", None) + self.step = question.get("step", None) @staticmethod def normalize(value, option={}): - option = option.__dict__ if isinstance(option, Question) else option + if isinstance(value, int): + return value - if not value.strip(): - if option.get("optional"): - return "" - # Hmpf here we could just have a "else" case - # but we also want PathQuestion.normalize("") to return "/" - # (i.e. if no option is provided, hence .get("optional") is None - elif option.get("optional") is False: - raise YunohostValidationError( - "app_argument_invalid", - name=option.get("name"), - error="Question is mandatory", - ) + if isinstance(value, str): + value = value.strip() - return "/" + value.strip().strip(" /") + if isinstance(value, str) and value.isdigit(): + return int(value) + + if value in [None, ""]: + return None + + option = option.__dict__ if isinstance(option, BaseOption) else option + raise YunohostValidationError( + "app_argument_invalid", + name=option.get("name"), + error=m18n.n("invalid_number"), + ) + + def _value_pre_validator(self): + super()._value_pre_validator() + if self.value in [None, ""]: + return + + if self.min is not None and int(self.value) < self.min: + raise YunohostValidationError( + "app_argument_invalid", + name=self.name, + error=m18n.n("invalid_number_min", min=self.min), + ) + + if self.max is not None and int(self.value) > self.max: + raise YunohostValidationError( + "app_argument_invalid", + name=self.name, + error=m18n.n("invalid_number_max", max=self.max), + ) -class BooleanQuestion(Question): +# ─ BOOLEAN ─────────────────────────────────────────────── + + +class BooleanOption(BaseOption): argument_type = "boolean" default_value = 0 yes_answers = ["1", "yes", "y", "true", "t", "on"] no_answers = ["0", "no", "n", "false", "f", "off"] + def __init__( + self, question, context: Mapping[str, Any] = {}, hooks: Dict[str, Callable] = {} + ): + super().__init__(question, context, hooks) + self.yes = question.get("yes", 1) + self.no = question.get("no", 0) + if self.default is None: + self.default = self.no + @staticmethod def humanize(value, option={}): - option = option.__dict__ if isinstance(option, Question) else option + option = option.__dict__ if isinstance(option, BaseOption) else option yes = option.get("yes", 1) no = option.get("no", 0) - value = BooleanQuestion.normalize(value, option) + value = BooleanOption.normalize(value, option) if value == yes: return "yes" @@ -1199,7 +583,7 @@ class BooleanQuestion(Question): @staticmethod def normalize(value, option={}): - option = option.__dict__ if isinstance(option, Question) else option + option = option.__dict__ if isinstance(option, BaseOption) else option if isinstance(value, str): value = value.strip() @@ -1207,8 +591,8 @@ class BooleanQuestion(Question): technical_yes = option.get("yes", 1) technical_no = option.get("no", 0) - no_answers = BooleanQuestion.no_answers - yes_answers = BooleanQuestion.yes_answers + no_answers = BooleanOption.no_answers + yes_answers = BooleanOption.yes_answers assert ( str(technical_yes).lower() not in no_answers @@ -1237,14 +621,8 @@ class BooleanQuestion(Question): choices="yes/no", ) - def __init__( - self, question, context: Mapping[str, Any] = {}, hooks: Dict[str, Callable] = {} - ): - super().__init__(question, context, hooks) - self.yes = question.get("yes", 1) - self.no = question.get("no", 0) - if self.default is None: - self.default = self.no + def get(self, key, default=None): + return getattr(self, key, default) def _format_text_for_user_input_in_cli(self): text_for_user_input_in_cli = super()._format_text_for_user_input_in_cli() @@ -1254,11 +632,205 @@ class BooleanQuestion(Question): return text_for_user_input_in_cli - def get(self, key, default=None): - return getattr(self, key, default) + +# ─ TIME ────────────────────────────────────────────────── -class DomainQuestion(Question): +class DateOption(StringOption): + pattern = { + "regexp": r"^\d{4}-\d\d-\d\d$", + "error": "config_validate_date", # i18n: config_validate_date + } + + def _value_pre_validator(self): + from datetime import datetime + + super()._value_pre_validator() + + if self.value not in [None, ""]: + try: + datetime.strptime(self.value, "%Y-%m-%d") + except ValueError: + raise YunohostValidationError("config_validate_date") + + +class TimeOption(StringOption): + pattern = { + "regexp": r"^(?:\d|[01]\d|2[0-3]):[0-5]\d$", + "error": "config_validate_time", # i18n: config_validate_time + } + + +# ─ LOCATIONS ───────────────────────────────────────────── + + +class EmailOption(StringOption): + pattern = { + "regexp": r"^.+@.+", + "error": "config_validate_email", # i18n: config_validate_email + } + + +class WebPathOption(BaseOption): + argument_type = "path" + default_value = "" + + @staticmethod + def normalize(value, option={}): + option = option.__dict__ if isinstance(option, BaseOption) else option + + if not isinstance(value, str): + raise YunohostValidationError( + "app_argument_invalid", + name=option.get("name"), + error="Argument for path should be a string.", + ) + + if not value.strip(): + if option.get("optional"): + return "" + # Hmpf here we could just have a "else" case + # but we also want WebPathOption.normalize("") to return "/" + # (i.e. if no option is provided, hence .get("optional") is None + elif option.get("optional") is False: + raise YunohostValidationError( + "app_argument_invalid", + name=option.get("name"), + error="Option is mandatory", + ) + + return "/" + value.strip().strip(" /") + + +class URLOption(StringOption): + pattern = { + "regexp": r"^https?://.*$", + "error": "config_validate_url", # i18n: config_validate_url + } + + +# ─ FILE ────────────────────────────────────────────────── + + +class FileOption(BaseOption): + argument_type = "file" + upload_dirs: List[str] = [] + + def __init__( + self, question, context: Mapping[str, Any] = {}, hooks: Dict[str, Callable] = {} + ): + super().__init__(question, context, hooks) + self.accept = question.get("accept", "") + + @classmethod + def clean_upload_dirs(cls): + # Delete files uploaded from API + for upload_dir in cls.upload_dirs: + if os.path.exists(upload_dir): + shutil.rmtree(upload_dir) + + def _value_pre_validator(self): + if self.value is None: + self.value = self.current_value + + super()._value_pre_validator() + + # Validation should have already failed if required + if self.value in [None, ""]: + return self.value + + if Moulinette.interface.type != "api": + if not os.path.exists(str(self.value)) or not os.path.isfile( + str(self.value) + ): + raise YunohostValidationError( + "app_argument_invalid", + name=self.name, + error=m18n.n("file_does_not_exist", path=str(self.value)), + ) + + def _value_post_validator(self): + from base64 import b64decode + + if not self.value: + return "" + + upload_dir = tempfile.mkdtemp(prefix="ynh_filequestion_") + _, file_path = tempfile.mkstemp(dir=upload_dir) + + FileOption.upload_dirs += [upload_dir] + + logger.debug(f"Saving file {self.name} for file question into {file_path}") + + def is_file_path(s): + return isinstance(s, str) and s.startswith("/") and os.path.exists(s) + + if Moulinette.interface.type != "api" or is_file_path(self.value): + content = read_file(str(self.value), file_mode="rb") + else: + content = b64decode(self.value) + + write_to_file(file_path, content, file_mode="wb") + + self.value = file_path + + return self.value + + +# ─ CHOICES ─────────────────────────────────────────────── + + +class TagsOption(BaseOption): + argument_type = "tags" + default_value = "" + + @staticmethod + def humanize(value, option={}): + if isinstance(value, list): + return ",".join(str(v) for v in value) + return value + + @staticmethod + def normalize(value, option={}): + if isinstance(value, list): + return ",".join(str(v) for v in value) + if isinstance(value, str): + value = value.strip() + return value + + def _value_pre_validator(self): + values = self.value + if isinstance(values, str): + values = values.split(",") + elif values is None: + values = [] + + if not isinstance(values, list): + if self.choices: + raise YunohostValidationError( + "app_argument_choice_invalid", + name=self.name, + value=self.value, + choices=", ".join(str(choice) for choice in self.choices), + ) + raise YunohostValidationError( + "app_argument_invalid", + name=self.name, + error=f"'{str(self.value)}' is not a list", + ) + + for value in values: + self.value = value + super()._value_pre_validator() + self.value = values + + def _value_post_validator(self): + if isinstance(self.value, list): + self.value = ",".join(self.value) + return super()._value_post_validator() + + +class DomainOption(BaseOption): argument_type = "domain" def __init__( @@ -1289,7 +861,7 @@ class DomainQuestion(Question): return value -class AppQuestion(Question): +class AppOption(BaseOption): argument_type = "app" def __init__( @@ -1316,7 +888,7 @@ class AppQuestion(Question): self.choices.update({app["id"]: _app_display(app) for app in apps}) -class UserQuestion(Question): +class UserOption(BaseOption): argument_type = "user" def __init__( @@ -1349,7 +921,7 @@ class UserQuestion(Question): break -class GroupQuestion(Question): +class GroupOption(BaseOption): argument_type = "group" def __init__( @@ -1375,192 +947,46 @@ class GroupQuestion(Question): self.default = "all_users" -class NumberQuestion(Question): - argument_type = "number" - default_value = None - - def __init__( - self, question, context: Mapping[str, Any] = {}, hooks: Dict[str, Callable] = {} - ): - super().__init__(question, context, hooks) - self.min = question.get("min", None) - self.max = question.get("max", None) - self.step = question.get("step", None) - - @staticmethod - def normalize(value, option={}): - if isinstance(value, int): - return value - - if isinstance(value, str): - value = value.strip() - - if isinstance(value, str) and value.isdigit(): - return int(value) - - if value in [None, ""]: - return value - - option = option.__dict__ if isinstance(option, Question) else option - raise YunohostValidationError( - "app_argument_invalid", - name=option.get("name"), - error=m18n.n("invalid_number"), - ) - - def _prevalidate(self): - super()._prevalidate() - if self.value in [None, ""]: - return - - if self.min is not None and int(self.value) < self.min: - raise YunohostValidationError( - "app_argument_invalid", - name=self.name, - error=m18n.n("invalid_number_min", min=self.min), - ) - - if self.max is not None and int(self.value) > self.max: - raise YunohostValidationError( - "app_argument_invalid", - name=self.name, - error=m18n.n("invalid_number_max", max=self.max), - ) - - -class DisplayTextQuestion(Question): - argument_type = "display_text" - - def __init__( - self, question, context: Mapping[str, Any] = {}, hooks: Dict[str, Callable] = {} - ): - super().__init__(question, context, hooks) - - self.optional = True - self.readonly = True - self.style = question.get( - "style", "info" if question["type"] == "alert" else "" - ) - - def _format_text_for_user_input_in_cli(self): - text = _value_for_locale(self.ask) - - if self.style in ["success", "info", "warning", "danger"]: - color = { - "success": "green", - "info": "cyan", - "warning": "yellow", - "danger": "red", - } - prompt = m18n.g(self.style) if self.style != "danger" else m18n.n("danger") - return colorize(prompt, color[self.style]) + f" {text}" - else: - return text - - -class FileQuestion(Question): - argument_type = "file" - upload_dirs: List[str] = [] - - @classmethod - def clean_upload_dirs(cls): - # Delete files uploaded from API - for upload_dir in cls.upload_dirs: - if os.path.exists(upload_dir): - shutil.rmtree(upload_dir) - - def __init__( - self, question, context: Mapping[str, Any] = {}, hooks: Dict[str, Callable] = {} - ): - super().__init__(question, context, hooks) - self.accept = question.get("accept", "") - - def _prevalidate(self): - if self.value is None: - self.value = self.current_value - - super()._prevalidate() - - if Moulinette.interface.type != "api": - if not self.value or not os.path.exists(str(self.value)): - raise YunohostValidationError( - "app_argument_invalid", - name=self.name, - error=m18n.n("file_does_not_exist", path=str(self.value)), - ) - - def _post_parse_value(self): - from base64 import b64decode - - if not self.value: - return self.value - - upload_dir = tempfile.mkdtemp(prefix="ynh_filequestion_") - _, file_path = tempfile.mkstemp(dir=upload_dir) - - FileQuestion.upload_dirs += [upload_dir] - - logger.debug(f"Saving file {self.name} for file question into {file_path}") - - def is_file_path(s): - return isinstance(s, str) and s.startswith("/") and os.path.exists(s) - - if Moulinette.interface.type != "api" or is_file_path(self.value): - content = read_file(str(self.value), file_mode="rb") - else: - content = b64decode(self.value) - - write_to_file(file_path, content, file_mode="wb") - - self.value = file_path - - return self.value - - -class ButtonQuestion(Question): - argument_type = "button" - enabled = None - - def __init__( - self, question, context: Mapping[str, Any] = {}, hooks: Dict[str, Callable] = {} - ): - super().__init__(question, context, hooks) - self.enabled = question.get("enabled", None) - - -ARGUMENTS_TYPE_PARSERS = { - "string": StringQuestion, - "text": StringQuestion, - "select": StringQuestion, - "tags": TagsQuestion, - "email": EmailQuestion, - "url": URLQuestion, - "date": DateQuestion, - "time": TimeQuestion, - "color": ColorQuestion, - "password": PasswordQuestion, - "path": PathQuestion, - "boolean": BooleanQuestion, - "domain": DomainQuestion, - "user": UserQuestion, - "group": GroupQuestion, - "number": NumberQuestion, - "range": NumberQuestion, - "display_text": DisplayTextQuestion, - "alert": DisplayTextQuestion, - "markdown": DisplayTextQuestion, - "file": FileQuestion, - "app": AppQuestion, - "button": ButtonQuestion, +OPTIONS = { + "display_text": DisplayTextOption, + "markdown": DisplayTextOption, + "alert": DisplayTextOption, + "button": ButtonOption, + "string": StringOption, + "text": StringOption, + "password": PasswordOption, + "color": ColorOption, + "number": NumberOption, + "range": NumberOption, + "boolean": BooleanOption, + "date": DateOption, + "time": TimeOption, + "email": EmailOption, + "path": WebPathOption, + "url": URLOption, + "file": FileOption, + "select": StringOption, + "tags": TagsOption, + "domain": DomainOption, + "app": AppOption, + "user": UserOption, + "group": GroupOption, } +# ╭───────────────────────────────────────────────────────╮ +# │ ╷ ╷╶┬╴╶┬╴╷ ╭─╴ │ +# │ │ │ │ │ │ ╰─╮ │ +# │ ╰─╯ ╵ ╶┴╴╰─╴╶─╯ │ +# ╰───────────────────────────────────────────────────────╯ + + def ask_questions_and_parse_answers( raw_questions: Dict, prefilled_answers: Union[str, Mapping[str, Any]] = {}, current_values: Mapping[str, Any] = {}, hooks: Dict[str, Callable[[], None]] = {}, -) -> List[Question]: +) -> List[BaseOption]: """Parse arguments store in either manifest.json or actions.json or from a config panel against the user answers when they are present. @@ -1591,7 +1017,7 @@ def ask_questions_and_parse_answers( for name, raw_question in raw_questions.items(): raw_question["name"] = name - question_class = ARGUMENTS_TYPE_PARSERS[raw_question.get("type", "string")] + question_class = OPTIONS[raw_question.get("type", "string")] raw_question["value"] = answers.get(name) question = question_class(raw_question, context=context, hooks=hooks) if question.type == "button": @@ -1618,9 +1044,7 @@ def hydrate_questions_with_choices(raw_questions: List) -> List: out = [] for raw_question in raw_questions: - question = ARGUMENTS_TYPE_PARSERS[raw_question.get("type", "string")]( - raw_question - ) + question = OPTIONS[raw_question.get("type", "string")](raw_question) if question.choices: raw_question["choices"] = question.choices raw_question["default"] = question.default diff --git a/src/utils/legacy.py b/src/utils/legacy.py index 82507d64d..226a8c929 100644 --- a/src/utils/legacy.py +++ b/src/utils/legacy.py @@ -163,32 +163,45 @@ def translate_legacy_default_app_in_ssowant_conf_json_persistent(): LEGACY_PHP_VERSION_REPLACEMENTS = [ - ("/etc/php5", "/etc/php/7.4"), - ("/etc/php/7.0", "/etc/php/7.4"), - ("/etc/php/7.3", "/etc/php/7.4"), - ("/var/run/php5-fpm", "/var/run/php/php7.4-fpm"), - ("/var/run/php/php7.0-fpm", "/var/run/php/php7.4-fpm"), - ("/var/run/php/php7.3-fpm", "/var/run/php/php7.4-fpm"), - ("php5", "php7.4"), - ("php7.0", "php7.4"), - ("php7.3", "php7.4"), - ('YNH_PHP_VERSION="7.3"', 'YNH_PHP_VERSION="7.4"'), + ("/etc/php5", "/etc/php/8.2"), + ("/etc/php/7.0", "/etc/php/8.2"), + ("/etc/php/7.3", "/etc/php/8.2"), + ("/etc/php/7.4", "/etc/php/8.2"), + ("/var/run/php5-fpm", "/var/run/php/php8.2-fpm"), + ("/var/run/php/php7.0-fpm", "/var/run/php/php8.2-fpm"), + ("/var/run/php/php7.3-fpm", "/var/run/php/php8.2-fpm"), + ("/var/run/php/php7.4-fpm", "/var/run/php/php8.2-fpm"), + ("php5", "php8.2"), + ("php7.0", "php8.2"), + ("php7.3", "php8.2"), + ("php7.4", "php8.2"), + ('YNH_PHP_VERSION="7.3"', 'YNH_PHP_VERSION="8.2"'), + ('YNH_PHP_VERSION="7.4"', 'YNH_PHP_VERSION="8.2"'), ( 'phpversion="${phpversion:-7.0}"', - 'phpversion="${phpversion:-7.4}"', + 'phpversion="${phpversion:-8.2}"', ), # Many helpers like the composer ones use 7.0 by default ... ( 'phpversion="${phpversion:-7.3}"', + 'phpversion="${phpversion:-8.2}"', + ), # Many helpers like the composer ones use 7.0 by default ... + ( 'phpversion="${phpversion:-7.4}"', + 'phpversion="${phpversion:-8.2}"', ), # Many helpers like the composer ones use 7.0 by default ... ( '"$phpversion" == "7.0"', - '$(bc <<< "$phpversion >= 7.4") -eq 1', + '$(bc <<< "$phpversion >= 8.2") -eq 1', ), # patch ynh_install_php to refuse installing/removing php <= 7.3 ( '"$phpversion" == "7.3"', - '$(bc <<< "$phpversion >= 7.4") -eq 1', + '$(bc <<< "$phpversion >= 8.2") -eq 1', ), # patch ynh_install_php to refuse installing/removing php <= 7.3 + ( + '"$phpversion" == "7.4"', + '$(bc <<< "$phpversion >= 8.2") -eq 1', + ), # patch ynh_install_php to refuse installing/removing php <= 7.3 + ] @@ -217,15 +230,16 @@ def _patch_legacy_php_versions(app_folder): def _patch_legacy_php_versions_in_settings(app_folder): settings = read_yaml(os.path.join(app_folder, "settings.yml")) - if settings.get("fpm_config_dir") in ["/etc/php/7.0/fpm", "/etc/php/7.3/fpm"]: - settings["fpm_config_dir"] = "/etc/php/7.4/fpm" - if settings.get("fpm_service") in ["php7.0-fpm", "php7.3-fpm"]: - settings["fpm_service"] = "php7.4-fpm" - if settings.get("phpversion") in ["7.0", "7.3"]: - settings["phpversion"] = "7.4" + if settings.get("fpm_config_dir") in ["/etc/php/7.0/fpm", "/etc/php/7.3/fpm", "/etc/php/7.4/fpm"]: + settings["fpm_config_dir"] = "/etc/php/8.2/fpm" + if settings.get("fpm_service") in ["php7.0-fpm", "php7.3-fpm", "php7.4-fpm"]: + settings["fpm_service"] = "php8.2-fpm" + if settings.get("phpversion") in ["7.0", "7.3", "7.4"]: + settings["phpversion"] = "8.2" # We delete these checksums otherwise the file will appear as manually modified list_to_remove = [ + "checksum__etc_php_7.4_fpm_pool", "checksum__etc_php_7.3_fpm_pool", "checksum__etc_php_7.0_fpm_pool", "checksum__etc_nginx_conf.d", diff --git a/src/utils/resources.py b/src/utils/resources.py index cff6c6b19..9891fe9c6 100644 --- a/src/utils/resources.py +++ b/src/utils/resources.py @@ -21,6 +21,7 @@ import copy import shutil import random import tempfile +import subprocess from typing import Dict, Any, List from moulinette import m18n @@ -30,7 +31,7 @@ from moulinette.utils.filesystem import mkdir, chown, chmod, write_to_file from moulinette.utils.filesystem import ( rm, ) - +from yunohost.utils.system import system_arch from yunohost.utils.error import YunohostError, YunohostValidationError logger = getActionLogger("yunohost.app_resources") @@ -258,6 +259,201 @@ ynh_abort_if_errors # print(ret) +class SourcesResource(AppResource): + """ + Declare what are the sources / assets used by this app. Typically, this corresponds to some tarball published by the upstream project, that needs to be downloaded and extracted in the install dir using the ynh_setup_source helper. + + This resource is intended both to declare the assets, which will be parsed by ynh_setup_source during the app script runtime, AND to prefetch and validate the sha256sum of those asset before actually running the script, to be able to report an error early when the asset turns out to not be available for some reason. + + Various options are available to accomodate the behavior according to the asset structure + + ##### Example + + ```toml + [resources.sources] + + [resources.sources.main] + url = "https://github.com/foo/bar/archive/refs/tags/v1.2.3.tar.gz" + sha256 = "01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b" + + autoupdate.strategy = "latest_github_tag" + ``` + + Or more complex examples with several element, including one with asset that depends on the arch + + ```toml + [resources.sources] + + [resources.sources.main] + in_subdir = false + amd64.url = "https://github.com/foo/bar/archive/refs/tags/v1.2.3.amd64.tar.gz" + amd64.sha256 = "01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b" + i386.url = "https://github.com/foo/bar/archive/refs/tags/v1.2.3.386.tar.gz" + i386.sha256 = "53c234e5e8472b6ac51c1ae1cab3fe06fad053beb8ebfd8977b010655bfdd3c3" + armhf.url = "https://github.com/foo/bar/archive/refs/tags/v1.2.3.arm.tar.gz" + armhf.sha256 = "4355a46b19d348dc2f57c046f8ef63d4538ebb936000f3c9ee954a27460dd865" + + autoupdate.strategy = "latest_github_release" + autoupdate.asset.amd64 = ".*\\.amd64.tar.gz" + autoupdate.asset.i386 = ".*\\.386.tar.gz" + autoupdate.asset.armhf = ".*\\.arm.tar.gz" + + [resources.sources.zblerg] + url = "https://zblerg.com/download/zblerg" + sha256 = "1121cfccd5913f0a63fec40a6ffd44ea64f9dc135c66634ba001d10bcf4302a2" + format = "script" + rename = "zblerg.sh" + + ``` + + ##### Properties (for each source) + + - `prefetch` : `true` (default) or `false`, wether or not to pre-fetch this asset during the provisioning phase of the resource. If several arch-dependent url are provided, YunoHost will only prefetch the one for the current system architecture. + - `url` : the asset's URL + - If the asset's URL depend on the architecture, you may instead provide `amd64.url`, `i386.url`, `armhf.url` and `arm64.url` (depending on what architectures are supported), using the same `dpkg --print-architecture` nomenclature as for the supported architecture key in the manifest + - `sha256` : the asset's sha256sum. This is used both as an integrity check, and as a layer of security to protect against malicious actors which could have injected malicious code inside the asset... + - Same as `url` : if the asset's URL depend on the architecture, you may instead provide `amd64.sha256`, `i386.sha256`, ... + - `format` : The "format" of the asset. It is typically automatically guessed from the extension of the URL (or the mention of "tarball", "zipball" in the URL), but can be set explicitly: + - `tar.gz`, `tar.xz`, `tar.bz2` : will use `tar` to extract the archive + - `zip` : will use `unzip` to extract the archive + - `docker` : useful to extract files from an already-built docker image (instead of rebuilding them locally). Will use `docker-image-extract` + - `whatever`: whatever arbitrary value, not really meaningful except to imply that the file won't be extracted (eg because it's a .deb to be manually installed with dpkg/apt, or a script, or ...) + - `in_subdir`: `true` (default) or `false`, depending on if there's an intermediate subdir in the archive before accessing the actual files. Can also be `N` (an integer) to handle special cases where there's `N` level of subdir to get rid of to actually access the files + - `extract` : `true` or `false`. Defaults to `true` for archives such as `zip`, `tar.gz`, `tar.bz2`, ... Or defaults to `false` when `format` is not something that should be extracted. When `extract = false`, the file will only be `mv`ed to the location, possibly renamed using the `rename` value + - `rename`: some string like `whatever_your_want`, to be used for convenience when `extract` is `false` and the default name of the file is not practical + - `platform`: for example `linux/amd64` (defaults to `linux/$YNH_ARCH`) to be used in conjonction with `format = "docker"` to specify which architecture to extract for + + ###### Regarding `autoupdate` + + Strictly speaking, this has nothing to do with the actual app install. `autoupdate` is expected to contain metadata for automatic maintenance / update of the app sources info in the manifest. It is meant to be a simpler replacement for "autoupdate" Github workflow mechanism. + + The infos are used by this script : https://github.com/YunoHost/apps/blob/master/tools/autoupdate_app_sources/autoupdate_app_sources.py which is ran by the YunoHost infrastructure periodically and will create the corresponding pull request automatically. + + The script will rely on the code repo specified in the upstream section of the manifest. + + `autoupdate.strategy` is expected to be one of : + - `latest_github_tag` : look for the latest tag (by sorting tags and finding the "largest" version). Then using the corresponding tar.gz url. Tags containing `rc`, `beta`, `alpha`, `start` are ignored, and actually any tag which doesn't look like `x.y.z` or `vx.y.z` + - `latest_github_release` : similar to `latest_github_tags`, but starting from the list of releases. Pre- or draft releases are ignored. Releases may have assets attached to them, in which case you can define: + - `autoupdate.asset = "some regex"` (when there's only one asset to use). The regex is used to find the appropriate asset among the list of all assets + - or several `autoupdate.asset.$arch = "some_regex"` (when the asset is arch-specific). The regex is used to find the appropriate asset for the specific arch among the list of assets + - `latest_github_commit` : will use the latest commit on github, and the corresponding tarball. If this is used for the 'main' source, it will also assume that the version is YYYY.MM.DD corresponding to the date of the commit. + + It is also possible to define `autoupdate.upstream` to use a different Git(hub) repository instead of the code repository from the upstream section of the manifest. This can be useful when, for example, the app uses other assets such as plugin from a different repository. + + ##### Provision/Update + - For elements with `prefetch = true`, will download the asset (for the appropriate architecture) and store them in `/var/cache/yunohost/download/$app/$source_id`, to be later picked up by `ynh_setup_source`. (NB: this only happens during install and upgrade, not restore) + + ##### Deprovision + - Nothing (just cleanup the cache) + """ + + type = "sources" + priority = 10 + + default_sources_properties: Dict[str, Any] = { + "prefetch": True, + "url": None, + "sha256": None, + } + + sources: Dict[str, Dict[str, Any]] = {} + + def __init__(self, properties: Dict[str, Any], *args, **kwargs): + for source_id, infos in properties.items(): + properties[source_id] = copy.copy(self.default_sources_properties) + properties[source_id].update(infos) + + super().__init__({"sources": properties}, *args, **kwargs) + + def deprovision(self, context: Dict = {}): + if os.path.isdir(f"/var/cache/yunohost/download/{self.app}/"): + rm(f"/var/cache/yunohost/download/{self.app}/", recursive=True) + + def provision_or_update(self, context: Dict = {}): + # Don't prefetch stuff during restore + if context.get("action") == "restore": + return + + for source_id, infos in self.sources.items(): + if not infos["prefetch"]: + continue + + if infos["url"] is None: + arch = system_arch() + if ( + arch in infos + and isinstance(infos[arch], dict) + and isinstance(infos[arch].get("url"), str) + and isinstance(infos[arch].get("sha256"), str) + ): + self.prefetch(source_id, infos[arch]["url"], infos[arch]["sha256"]) + else: + raise YunohostError( + f"In resources.sources: it looks like you forgot to define url/sha256 or {arch}.url/{arch}.sha256", + raw_msg=True, + ) + else: + if infos["sha256"] is None: + raise YunohostError( + f"In resources.sources: it looks like the sha256 is missing for {source_id}", + raw_msg=True, + ) + self.prefetch(source_id, infos["url"], infos["sha256"]) + + def prefetch(self, source_id, url, expected_sha256): + logger.debug(f"Prefetching asset {source_id}: {url} ...") + + if not os.path.isdir(f"/var/cache/yunohost/download/{self.app}/"): + mkdir(f"/var/cache/yunohost/download/{self.app}/", parents=True) + filename = f"/var/cache/yunohost/download/{self.app}/{source_id}" + + # NB: we use wget and not requests.get() because we want to output to a file (ie avoid ending up with the full archive in RAM) + # AND the nice --tries, --no-dns-cache, --timeout options ... + p = subprocess.Popen( + [ + "/usr/bin/wget", + "--tries=3", + "--no-dns-cache", + "--timeout=900", + "--no-verbose", + "--output-document=" + filename, + url, + ], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + out, _ = p.communicate() + returncode = p.returncode + if returncode != 0: + if os.path.exists(filename): + rm(filename) + raise YunohostError( + "app_failed_to_download_asset", + source_id=source_id, + url=url, + app=self.app, + out=out.decode(), + ) + + assert os.path.exists( + filename + ), f"For some reason, wget worked but {filename} doesnt exists?" + + computed_sha256 = check_output(f"sha256sum {filename}").split()[0] + if computed_sha256 != expected_sha256: + size = check_output(f"du -hs {filename}").split()[0] + rm(filename) + raise YunohostError( + "app_corrupt_source", + source_id=source_id, + url=url, + app=self.app, + expected_sha256=expected_sha256, + computed_sha256=computed_sha256, + size=size, + ) + + class PermissionsResource(AppResource): """ Configure the SSO permissions/tiles. Typically, webapps are expected to have a 'main' permission mapped to '/', meaning that a tile pointing to the `$domain/$path` will be available in the SSO for users allowed to access that app. @@ -266,7 +462,7 @@ class PermissionsResource(AppResource): The list of allowed user/groups may be initialized using the content of the `init_{perm}_permission` question from the manifest, hence `init_main_permission` replaces the `is_public` question and shall contain a group name (typically, `all_users` or `visitors`). - ##### Example: + ##### Example ```toml [resources.permissions] main.url = "/" @@ -277,7 +473,7 @@ class PermissionsResource(AppResource): admin.allowed = "admins" # Assuming the "admins" group exists (cf future developments ;)) ``` - ##### Properties (for each perm name): + ##### Properties (for each perm name) - `url`: The relative URI corresponding to this permission. Typically `/` or `/something`. This property may be omitted for non-web permissions. - `show_tile`: (default: `true` if `url` is defined) Wether or not a tile should be displayed for that permission in the user portal - `allowed`: (default: nobody) The group initially allowed to access this perm, if `init_{perm}_permission` is not defined in the manifest questions. Note that the admin may tweak who is allowed/unallowed on that permission later on, this is only meant to **initialize** the permission. @@ -285,14 +481,14 @@ class PermissionsResource(AppResource): - `protected`: (default: `false`) Define if this permission is protected. If it is protected the administrator won't be able to add or remove the visitors group of this permission. Defaults to 'false'. - `additional_urls`: (default: none) List of additional URL for which access will be allowed/forbidden - ##### Provision/Update: + ##### Provision/Update - Delete any permissions that may exist and be related to this app yet is not declared anymore - Loop over the declared permissions and create them if needed or update them with the new values - ##### Deprovision: + ##### Deprovision - Delete all permission related to this app - ##### Legacy management: + ##### Legacy management - Legacy `is_public` setting will be deleted if it exists """ @@ -320,32 +516,65 @@ class PermissionsResource(AppResource): def __init__(self, properties: Dict[str, Any], *args, **kwargs): # FIXME : if url != None, we should check that there's indeed a domain/path defined ? ie that app is a webapp + # Validate packager-provided infos + for perm, infos in properties.items(): + if "auth_header" in infos and not isinstance( + infos.get("auth_header"), bool + ): + raise YunohostError( + f"In manifest, for permission '{perm}', 'auth_header' should be a boolean", + raw_msg=True, + ) + if "show_tile" in infos and not isinstance(infos.get("show_tile"), bool): + raise YunohostError( + f"In manifest, for permission '{perm}', 'show_tile' should be a boolean", + raw_msg=True, + ) + if "protected" in infos and not isinstance(infos.get("protected"), bool): + raise YunohostError( + f"In manifest, for permission '{perm}', 'protected' should be a boolean", + raw_msg=True, + ) + if "additional_urls" in infos and not isinstance( + infos.get("additional_urls"), list + ): + raise YunohostError( + f"In manifest, for permission '{perm}', 'additional_urls' should be a list", + raw_msg=True, + ) + + if "main" not in properties: + properties["main"] = copy.copy(self.default_perm_properties) + for perm, infos in properties.items(): properties[perm] = copy.copy(self.default_perm_properties) properties[perm].update(infos) if properties[perm]["show_tile"] is None: properties[perm]["show_tile"] = bool(properties[perm]["url"]) - if ( + if properties["main"]["url"] is not None and ( not isinstance(properties["main"].get("url"), str) or properties["main"]["url"] != "/" ): raise YunohostError( - "URL for the 'main' permission should be '/' for webapps (or undefined/None for non-webapps). Note that / refers to the install url of the app, i.e $domain.tld/$path/", + "URL for the 'main' permission should be '/' for webapps (or left undefined for non-webapps). Note that / refers to the install url of the app, i.e $domain.tld/$path/", raw_msg=True, ) super().__init__({"permissions": properties}, *args, **kwargs) + from yunohost.app import _get_app_settings, _hydrate_app_template + + settings = _get_app_settings(self.app) for perm, infos in self.permissions.items(): - if infos.get("url") and "__DOMAIN__" in infos.get("url", ""): - infos["url"] = infos["url"].replace( - "__DOMAIN__", self.get_setting("domain") - ) - infos["additional_urls"] = [ - u.replace("__DOMAIN__", self.get_setting("domain")) - for u in infos.get("additional_urls", []) - ] + if infos.get("url") and "__" in infos.get("url"): + infos["url"] = _hydrate_app_template(infos["url"], settings) + + if infos.get("additional_urls"): + infos["additional_urls"] = [ + _hydrate_app_template(url, settings) + for url in infos["additional_urls"] + ] def provision_or_update(self, context: Dict = {}): from yunohost.permission import ( @@ -441,22 +670,22 @@ class SystemuserAppResource(AppResource): """ Provision a system user to be used by the app. The username is exactly equal to the app id - ##### Example: + ##### Example ```toml [resources.system_user] # (empty - defaults are usually okay) ``` - ##### Properties: + ##### Properties - `allow_ssh`: (default: False) Adds the user to the ssh.app group, allowing SSH connection via this user - `allow_sftp`: (default: False) Adds the user to the sftp.app group, allowing SFTP connection via this user - `home`: (default: `/var/www/__APP__`) Defines the home property for this user. NB: unfortunately you can't simply use `__INSTALL_DIR__` or `__DATA_DIR__` for now - ##### Provision/Update: + ##### Provision/Update - will create the system user if it doesn't exists yet - will add/remove the ssh/sftp.app groups - ##### Deprovision: + ##### Deprovision - deletes the user and group """ @@ -549,28 +778,28 @@ class InstalldirAppResource(AppResource): """ Creates a directory to be used by the app as the installation directory, typically where the app sources and assets are located. The corresponding path is stored in the settings as `install_dir` - ##### Example: + ##### Example ```toml [resources.install_dir] # (empty - defaults are usually okay) ``` - ##### Properties: + ##### Properties - `dir`: (default: `/var/www/__APP__`) The full path of the install dir - `owner`: (default: `__APP__:rwx`) The owner (and owner permissions) for the install dir - `group`: (default: `__APP__:rx`) The group (and group permissions) for the install dir - ##### Provision/Update: + ##### Provision/Update - during install, the folder will be deleted if it already exists (FIXME: is this what we want?) - if the dir path changed and a folder exists at the old location, the folder will be `mv`'ed to the new location - otherwise, creates the directory if it doesn't exists yet - (re-)apply permissions (only on the folder itself, not recursively) - save the value of `dir` as `install_dir` in the app's settings, which can be then used by the app scripts (`$install_dir`) and conf templates (`__INSTALL_DIR__`) - ##### Deprovision: + ##### Deprovision - recursively deletes the directory if it exists - ##### Legacy management: + ##### Legacy management - In the past, the setting was called `final_path`. The code will automatically rename it as `install_dir`. - As explained in the 'Provision/Update' section, the folder will also be moved if the location changed @@ -664,28 +893,30 @@ class DatadirAppResource(AppResource): """ Creates a directory to be used by the app as the data store directory, typically where the app multimedia or large assets added by users are located. The corresponding path is stored in the settings as `data_dir`. This resource behaves very similarly to install_dir. - ##### Example: + ##### Example ```toml [resources.data_dir] # (empty - defaults are usually okay) ``` - ##### Properties: + ##### Properties - `dir`: (default: `/home/yunohost.app/__APP__`) The full path of the data dir + - `subdirs`: (default: empty list) A list of subdirs to initialize inside the data dir. For example, `['foo', 'bar']` - `owner`: (default: `__APP__:rwx`) The owner (and owner permissions) for the data dir - `group`: (default: `__APP__:rx`) The group (and group permissions) for the data dir - ##### Provision/Update: + ##### Provision/Update - if the dir path changed and a folder exists at the old location, the folder will be `mv`'ed to the new location - otherwise, creates the directory if it doesn't exists yet - - (re-)apply permissions (only on the folder itself, not recursively) + - create each subdir declared and which do not exist already + - (re-)apply permissions (only on the folder itself and declared subdirs, not recursively) - save the value of `dir` as `data_dir` in the app's settings, which can be then used by the app scripts (`$data_dir`) and conf templates (`__DATA_DIR__`) - ##### Deprovision: + ##### Deprovision - (only if the purge option is chosen by the user) recursively deletes the directory if it exists - also delete the corresponding setting - ##### Legacy management: + ##### Legacy management - In the past, the setting may have been called `datadir`. The code will automatically rename it as `data_dir`. - As explained in the 'Provision/Update' section, the folder will also be moved if the location changed @@ -701,11 +932,13 @@ class DatadirAppResource(AppResource): default_properties: Dict[str, Any] = { "dir": "/home/yunohost.app/__APP__", + "subdirs": [], "owner": "__APP__:rwx", "group": "__APP__:rx", } dir: str = "" + subdirs: list = [] owner: str = "" group: str = "" @@ -727,7 +960,12 @@ class DatadirAppResource(AppResource): ) shutil.move(current_data_dir, self.dir) else: - mkdir(self.dir) + mkdir(self.dir, parents=True) + + for subdir in self.subdirs: + full_path = os.path.join(self.dir, subdir) + if not os.path.isdir(full_path): + mkdir(full_path, parents=True) owner, owner_perm = self.owner.split(":") group, group_perm = self.group.split(":") @@ -747,6 +985,10 @@ class DatadirAppResource(AppResource): # in which case we want to apply the perm to the pointed dir, not to the symlink chmod(os.path.realpath(self.dir), perm_octal) chown(os.path.realpath(self.dir), owner, group) + for subdir in self.subdirs: + full_path = os.path.join(self.dir, subdir) + chmod(os.path.realpath(full_path), perm_octal) + chown(os.path.realpath(full_path), owner, group) self.set_setting("data_dir", self.dir) self.delete_setting("datadir") # Legacy @@ -766,7 +1008,7 @@ class AptDependenciesAppResource(AppResource): """ Create a virtual package in apt, depending on the list of specified packages that the app needs. The virtual packages is called `$app-ynh-deps` (with `_` being replaced by `-` in the app name, see `ynh_install_app_dependencies`) - ##### Example: + ##### Example ```toml [resources.apt] packages = "nyancat, lolcat, sl" @@ -777,16 +1019,16 @@ class AptDependenciesAppResource(AppResource): extras.yarn.packages = "yarn" ``` - ##### Properties: + ##### Properties - `packages`: Comma-separated list of packages to be installed via `apt` - `packages_from_raw_bash`: A multi-line bash snippet (using triple quotes as open/close) which should echo additional packages to be installed. Meant to be used for packages to be conditionally installed depending on architecture, debian version, install questions, or other logic. - `extras`: A dict of (repo, key, packages) corresponding to "extra" repositories to fetch dependencies from - ##### Provision/Update: + ##### Provision/Update - The code literally calls the bash helpers `ynh_install_app_dependencies` and `ynh_install_extra_app_dependencies`, similar to what happens in v1. - Note that when `packages` contains some phpX.Y-foobar dependencies, this will automagically define a `phpversion` setting equal to `X.Y` which can therefore be used in app scripts ($phpversion) or templates (`__PHPVERSION__`) - ##### Deprovision: + ##### Deprovision - The code literally calls the bash helper `ynh_remove_app_dependencies` """ @@ -845,7 +1087,7 @@ class PortsResource(AppResource): Note that because multiple ports can be booked, each properties is prefixed by the name of the port. `main` is a special name and will correspond to the setting `$port`, whereas for example `xmpp_client` will correspond to the setting `$port_xmpp_client`. - ##### Example: + ##### Example ```toml [resources.ports] # (empty should be fine for most apps... though you can customize stuff if absolutely needed) @@ -857,21 +1099,21 @@ class PortsResource(AppResource): xmpp_client.exposed = "TCP" # here, we're telling that the port needs to be publicly exposed on TCP on the firewall ``` - ##### Properties (for every port name): + ##### Properties (for every port name) - `default`: The prefered value for the port. If this port is already being used by another process right now, or is booked in another app's setting, the code will increment the value until it finds a free port and store that value as the setting. If no value is specified, a random value between 10000 and 60000 is used. - `exposed`: (default: `false`) Wether this port should be opened on the firewall and be publicly reachable. This should be kept to `false` for the majority of apps than only need a port for internal reverse-proxying! Possible values: `false`, `true`(=`Both`), `Both`, `TCP`, `UDP`. This will result in the port being opened on the firewall, and the diagnosis checking that a program answers on that port. - `fixed`: (default: `false`) Tells that the app absolutely needs the specific value provided in `default`, typically because it's needed for a specific protocol - ##### Provision/Update (for every port name): + ##### Provision/Update (for every port name) - If not already booked, look for a free port, starting with the `default` value (or a random value between 10000 and 60000 if no `default` set) - If `exposed` is not `false`, open the port in the firewall accordingly - otherwise make sure it's closed. - The value of the port is stored in the `$port` setting for the `main` port, or `$port_NAME` for other `NAME`s - ##### Deprovision: + ##### Deprovision - Close the ports on the firewall if relevant - Deletes all the port settings - ##### Legacy management: + ##### Legacy management - In the past, some settings may have been named `NAME_port` instead of `port_NAME`, in which case the code will automatically rename the old setting. """ @@ -913,8 +1155,8 @@ class PortsResource(AppResource): % port ) # This second command is mean to cover (most) case where an app is using a port yet ain't currently using it for some reason (typically service ain't up) - cmd2 = f"grep --quiet \"port: '{port}'\" /etc/yunohost/apps/*/settings.yml" - return os.system(cmd1) == 0 and os.system(cmd2) == 0 + cmd2 = f"grep --quiet --extended-regexp \"port: '?{port}'?\" /etc/yunohost/apps/*/settings.yml" + return os.system(cmd1) == 0 or os.system(cmd2) == 0 def provision_or_update(self, context: Dict = {}): from yunohost.firewall import firewall_allow, firewall_disallow @@ -974,25 +1216,25 @@ class DatabaseAppResource(AppResource): NB2: no automagic migration will happen in an suddenly change `type` from `mysql` to `postgresql` or viceversa in its life - ##### Example: + ##### Example ```toml [resources.database] type = "mysql" # or : "postgresql". Only these two values are supported ``` - ##### Properties: + ##### Properties - `type`: The database type, either `mysql` or `postgresql` - ##### Provision/Update: + ##### Provision/Update - (Re)set the `$db_name` and `$db_user` settings with the sanitized app name (replacing `-` and `.` with `_`) - If `$db_pwd` doesn't already exists, pick a random database password and store it in that setting - If the database doesn't exists yet, create the SQL user and DB using `ynh_mysql_create_db` or `ynh_psql_create_db`. - ##### Deprovision: + ##### Deprovision - Drop the DB using `ynh_mysql_remove_db` or `ynh_psql_remove_db` - Deletes the `db_name`, `db_user` and `db_pwd` settings - ##### Legacy management: + ##### Legacy management - In the past, the sql passwords may have been named `mysqlpwd` or `psqlpwd`, in which case it will automatically be renamed as `db_pwd` """ diff --git a/tox.ini b/tox.ini index 49c78959d..58c443f25 100644 --- a/tox.ini +++ b/tox.ini @@ -1,15 +1,15 @@ [tox] -envlist = py39-{lint,invalidcode},py39-black-{run,check} +envlist = py311-{lint,invalidcode},py311-black-{run,check} [testenv] skip_install=True deps = - py39-{lint,invalidcode}: flake8 - py39-black-{run,check}: black - py39-mypy: mypy >= 0.900 + py311-{lint,invalidcode}: flake8 + py311-black-{run,check}: black + py311-mypy: mypy >= 0.900 commands = - py39-lint: flake8 src doc maintenance tests --ignore E402,E501,E203,W503,E741 --exclude src/vendor - py39-invalidcode: flake8 src bin maintenance --exclude src/tests,src/vendor --select F,E722,W605 - py39-black-check: black --check --diff bin src doc maintenance tests - py39-black-run: black bin src doc maintenance tests - py39-mypy: mypy --ignore-missing-import --install-types --non-interactive --follow-imports silent src/ --exclude (acme_tiny|migrations) + py311-lint: flake8 src doc maintenance tests --ignore E402,E501,E203,W503,E741 --exclude src/vendor + py311-invalidcode: flake8 src bin maintenance --exclude src/tests,src/vendor --select F,E722,W605 + py311-black-check: black --check --diff bin src doc maintenance tests + py311-black-run: black bin src doc maintenance tests + py311-mypy: mypy --ignore-missing-import --install-types --non-interactive --follow-imports silent src/ --exclude (acme_tiny|migrations)