Merge pull request #1395 from YunoHost/pyupgrade

Pyupgrade
This commit is contained in:
Alexandre Aubin 2021-12-29 00:36:31 +01:00 committed by GitHub
commit f8004570ab
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
35 changed files with 145 additions and 183 deletions

View file

@ -42,10 +42,8 @@ class BaseSystemDiagnoser(Diagnoser):
elif os.path.exists("/sys/devices/virtual/dmi/id/sys_vendor"):
model = read_file("/sys/devices/virtual/dmi/id/sys_vendor").strip()
if os.path.exists("/sys/devices/virtual/dmi/id/product_name"):
model = "%s %s" % (
model,
read_file("/sys/devices/virtual/dmi/id/product_name").strip(),
)
product_name = read_file("/sys/devices/virtual/dmi/id/product_name").strip()
model = f"{model} {product_name}"
hardware["data"]["model"] = model
hardware["details"] = ["diagnosis_basesystem_hardware_model"]
@ -116,7 +114,7 @@ class BaseSystemDiagnoser(Diagnoser):
bad_sury_packages = list(self.bad_sury_packages())
if bad_sury_packages:
cmd_to_fix = "apt install --allow-downgrades " + " ".join(
["%s=%s" % (package, version) for package, version in bad_sury_packages]
[f"{package}={version}" for package, version in bad_sury_packages]
)
yield dict(
meta={"test": "packages_from_sury"},

View file

@ -167,10 +167,7 @@ class IPDiagnoser(Diagnoser):
assert (
resolvers != []
), "Uhoh, need at least one IPv%s DNS resolver in %s ..." % (
protocol,
resolver_file,
)
), f"Uhoh, need at least one IPv{protocol} DNS resolver in {resolver_file} ..."
# So let's try to ping the first 4~5 resolvers (shuffled)
# If we succesfully ping any of them, we conclude that we are indeed connected
@ -220,9 +217,9 @@ class IPDiagnoser(Diagnoser):
try:
return download_text(url, timeout=30).strip()
except Exception as e:
self.logger_debug(
"Could not get public IPv%s : %s" % (str(protocol), str(e))
)
protocol = str(protocol)
e = str(e)
self.logger_debug(f"Could not get public IPv{protocol} : {e}")
return None

View file

@ -132,7 +132,7 @@ class SystemResourcesDiagnoser(Diagnoser):
d for d in disk_partitions if d.mountpoint in ["/", "/var"]
]
main_space = sum(
[psutil.disk_usage(d.mountpoint).total for d in main_disk_partitions]
psutil.disk_usage(d.mountpoint).total for d in main_disk_partitions
)
if main_space < 10 * GB:
yield dict(
@ -156,7 +156,7 @@ class SystemResourcesDiagnoser(Diagnoser):
kills_count = self.recent_kills_by_oom_reaper()
if kills_count:
kills_summary = "\n".join(
["%s (x%s)" % (proc, count) for proc, count in kills_count]
[f"{proc} (x{count})" for proc, count in kills_count]
)
yield dict(
@ -202,9 +202,11 @@ def human_size(bytes_):
# Adapted from https://stackoverflow.com/a/1094933
for unit in ["", "ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"]:
if abs(bytes_) < 1024.0:
return "%s %sB" % (round_(bytes_), unit)
bytes_ = round_(bytes_)
return f"{bytes_} {unit}B"
bytes_ /= 1024.0
return "%s %sB" % (round_(bytes_), "Yi")
bytes_ = round_(bytes_)
return f"{bytes_} YiB"
def round_(n):

View file

@ -107,7 +107,7 @@ class Parser:
else:
# We're getting out of a comment bloc, we should find
# the name of the function
assert len(line.split()) >= 1, "Malformed line %s in %s" % (
assert len(line.split()) >= 1, "Malformed line {} in {}".format(
i,
self.filename,
)

View file

@ -122,7 +122,7 @@ def app_list(full=False, installed=False, filter=None):
try:
app_info_dict = app_info(app_id, full=full)
except Exception as e:
logger.error("Failed to read info for %s : %s" % (app_id, e))
logger.error(f"Failed to read info for {app_id} : {e}")
continue
app_info_dict["id"] = app_id
out.append(app_info_dict)
@ -1219,7 +1219,8 @@ def app_setting(app, key, value=None, delete=False):
)
permissions = user_permission_list(full=True, apps=[app])["permissions"]
permission_name = "%s.legacy_%s_uris" % (app, key.split("_")[0])
key_ = key.split("_")[0]
permission_name = f"{app}.legacy_{key_}_uris"
permission = permissions.get(permission_name)
# GET
@ -1562,11 +1563,7 @@ def app_action_run(operation_logger, app, action, args=None):
shutil.rmtree(tmp_workdir_for_app)
if retcode not in action_declaration.get("accepted_return_codes", [0]):
msg = "Error while executing action '%s' of app '%s': return code %s" % (
action,
app,
retcode,
)
msg = f"Error while executing action '{action}' of app '{app}': return code {retcode}"
operation_logger.error(msg)
raise YunohostError(msg, raw_msg=True)
@ -1989,7 +1986,8 @@ def _set_default_ask_questions(arguments):
for question in questions_with_default
):
# The key is for example "app_manifest_install_ask_domain"
key = "app_manifest_%s_ask_%s" % (script_name, arg["name"])
arg_name = arg["name"]
key = f"app_manifest_{script_name}_ask_{arg_name}"
arg["ask"] = m18n.n(key)
# Also it in fact doesn't make sense for any of those questions to have an example value nor a default value...
@ -2397,7 +2395,8 @@ def _make_environment_for_app_script(
env_dict["YNH_APP_BASEDIR"] = workdir
for arg_name, arg_value in args.items():
env_dict["YNH_%s%s" % (args_prefix, arg_name.upper())] = str(arg_value)
arg_name_upper = arg_name.upper()
env_dict[f"YNH_{args_prefix}{arg_name_upper}"] = str(arg_value)
return env_dict

View file

@ -217,7 +217,7 @@ def _load_apps_catalog():
)
except Exception as e:
raise YunohostError(
"Unable to read cache for apps_catalog %s : %s" % (cache_file, e),
f"Unable to read cache for apps_catalog {cache_file} : {e}",
raw_msg=True,
)

View file

@ -80,7 +80,7 @@ MB_ALLOWED_TO_ORGANIZE = 10
logger = getActionLogger("yunohost.backup")
class BackupRestoreTargetsManager(object):
class BackupRestoreTargetsManager:
"""
BackupRestoreTargetsManager manage the targets
@ -1570,7 +1570,7 @@ class RestoreManager:
#
# Backup methods #
#
class BackupMethod(object):
class BackupMethod:
"""
BackupMethod is an abstract class that represents a way to backup and
@ -2380,7 +2380,7 @@ def backup_list(with_info=False, human_readable=False):
# Get local archives sorted according to last modification time
# (we do a realpath() to resolve symlinks)
archives = glob("%s/*.tar.gz" % ARCHIVES_PATH) + glob("%s/*.tar" % ARCHIVES_PATH)
archives = set([os.path.realpath(archive) for archive in archives])
archives = {os.path.realpath(archive) for archive in archives}
archives = sorted(archives, key=lambda x: os.path.getctime(x))
# Extract only filename without the extension
@ -2420,7 +2420,7 @@ def backup_download(name):
)
return
archive_file = "%s/%s.tar" % (ARCHIVES_PATH, name)
archive_file = f"{ARCHIVES_PATH}/{name}.tar"
# Check file exist (even if it's a broken symlink)
if not os.path.lexists(archive_file):
@ -2462,7 +2462,7 @@ def backup_info(name, with_details=False, human_readable=False):
elif name.endswith(".tar"):
name = name[: -len(".tar")]
archive_file = "%s/%s.tar" % (ARCHIVES_PATH, name)
archive_file = f"{ARCHIVES_PATH}/{name}.tar"
# Check file exist (even if it's a broken symlink)
if not os.path.lexists(archive_file):
@ -2480,7 +2480,7 @@ def backup_info(name, with_details=False, human_readable=False):
"backup_archive_broken_link", path=archive_file
)
info_file = "%s/%s.info.json" % (ARCHIVES_PATH, name)
info_file = f"{ARCHIVES_PATH}/{name}.info.json"
if not os.path.exists(info_file):
tar = tarfile.open(
@ -2591,10 +2591,10 @@ def backup_delete(name):
hook_callback("pre_backup_delete", args=[name])
archive_file = "%s/%s.tar" % (ARCHIVES_PATH, name)
archive_file = f"{ARCHIVES_PATH}/{name}.tar"
if not os.path.exists(archive_file) and os.path.exists(archive_file + ".gz"):
archive_file += ".gz"
info_file = "%s/%s.info.json" % (ARCHIVES_PATH, name)
info_file = f"{ARCHIVES_PATH}/{name}.info.json"
files_to_delete = [archive_file, info_file]
@ -2693,5 +2693,5 @@ def binary_to_human(n, customary=False):
for s in reversed(symbols):
if n >= prefix[s]:
value = float(n) / prefix[s]
return "%.1f%s" % (value, s)
return "{:.1f}{}".format(value, s)
return "%s" % n

View file

@ -143,11 +143,7 @@ def _certificate_install_selfsigned(domain_list, force=False):
# Paths of files and folder we'll need
date_tag = datetime.utcnow().strftime("%Y%m%d.%H%M%S")
new_cert_folder = "%s/%s-history/%s-selfsigned" % (
CERT_FOLDER,
domain,
date_tag,
)
new_cert_folder = f"{CERT_FOLDER}/{domain}-history/{date_tag}-selfsigned"
conf_template = os.path.join(SSL_DIR, "openssl.cnf")
@ -300,10 +296,7 @@ def _certificate_install_letsencrypt(
try:
_fetch_and_enable_new_certificate(domain, staging, no_checks=no_checks)
except Exception as e:
msg = "Certificate installation for %s failed !\nException: %s" % (
domain,
e,
)
msg = f"Certificate installation for {domain} failed !\nException: {e}"
logger.error(msg)
operation_logger.error(msg)
if no_checks:
@ -456,39 +449,25 @@ def _email_renewing_failed(domain, exception_message, stack=""):
subject_ = "Certificate renewing attempt for %s failed!" % domain
logs = _tail(50, "/var/log/yunohost/yunohost-cli.log")
text = """
An attempt for renewing the certificate for domain %s failed with the following
message = f"""\
From: {from_}
To: {to_}
Subject: {subject_}
An attempt for renewing the certificate for domain {domain} failed with the following
error :
%s
%s
{exception_message}
{stack}
Here's the tail of /var/log/yunohost/yunohost-cli.log, which might help to
investigate :
%s
{logs}
-- Certificate Manager
""" % (
domain,
exception_message,
stack,
logs,
)
message = """\
From: %s
To: %s
Subject: %s
%s
""" % (
from_,
to_,
subject_,
text,
)
"""
import smtplib
@ -532,7 +511,7 @@ def _fetch_and_enable_new_certificate(domain, staging=False, no_checks=False):
# Prepare certificate signing request
logger.debug("Prepare key and certificate signing request (CSR) for %s...", domain)
domain_key_file = "%s/%s.pem" % (TMP_FOLDER, domain)
domain_key_file = f"{TMP_FOLDER}/{domain}.pem"
_generate_key(domain_key_file)
_set_permissions(domain_key_file, "root", "ssl-cert", 0o640)
@ -541,7 +520,7 @@ def _fetch_and_enable_new_certificate(domain, staging=False, no_checks=False):
# Sign the certificate
logger.debug("Now using ACME Tiny to sign the certificate...")
domain_csr_file = "%s/%s.csr" % (TMP_FOLDER, domain)
domain_csr_file = f"{TMP_FOLDER}/{domain}.csr"
if staging:
certification_authority = STAGING_CERTIFICATION_AUTHORITY
@ -580,12 +559,7 @@ def _fetch_and_enable_new_certificate(domain, staging=False, no_checks=False):
else:
folder_flag = "letsencrypt"
new_cert_folder = "%s/%s-history/%s-%s" % (
CERT_FOLDER,
domain,
date_tag,
folder_flag,
)
new_cert_folder = f"{CERT_FOLDER}/{domain}-history/{date_tag}-{folder_flag}"
os.makedirs(new_cert_folder)
@ -642,7 +616,7 @@ def _prepare_certificate_signing_request(domain, key_file, output_folder):
csr.add_extensions(
[
crypto.X509Extension(
"subjectAltName".encode("utf8"),
b"subjectAltName",
False,
("DNS:" + subdomain).encode("utf8"),
)
@ -844,7 +818,7 @@ def _backup_current_cert(domain):
cert_folder_domain = os.path.join(CERT_FOLDER, domain)
date_tag = datetime.utcnow().strftime("%Y%m%d.%H%M%S")
backup_folder = "%s-backups/%s" % (cert_folder_domain, date_tag)
backup_folder = f"{cert_folder_domain}-backups/{date_tag}"
shutil.copytree(cert_folder_domain, backup_folder)

View file

@ -269,14 +269,14 @@ class MyMigration(Migration):
% default_crt
)
os.system("mv %s %s.old" % (default_crt, default_crt))
os.system("mv %s %s.old" % (default_key, default_key))
os.system("mv {} {}.old".format(default_crt, default_crt))
os.system("mv {} {}.old".format(default_key, default_key))
ret = os.system("/usr/share/yunohost/hooks/conf_regen/02-ssl init")
if ret != 0 or not os.path.exists(default_crt):
logger.error("Upgrading the certificate failed ... reverting")
os.system("mv %s.old %s" % (default_crt, default_crt))
os.system("mv %s.old %s" % (default_key, default_key))
os.system("mv {}.old {}".format(default_crt, default_crt))
os.system("mv {}.old {}".format(default_key, default_key))
signatures = {cert: check_output(cmd % cert) for cert in active_certs}

View file

@ -640,7 +640,7 @@ class Diagnoser:
elif ipversion == 6:
socket.getaddrinfo = getaddrinfo_ipv6_only
url = "https://%s/%s" % (DIAGNOSIS_SERVER, uri)
url = "https://{}/{}".format(DIAGNOSIS_SERVER, uri)
try:
r = requests.post(url, json=data, timeout=timeout)
finally:
@ -679,7 +679,7 @@ def _email_diagnosis_issues():
from yunohost.domain import _get_maindomain
maindomain = _get_maindomain()
from_ = "diagnosis@%s (Automatic diagnosis on %s)" % (maindomain, maindomain)
from_ = "diagnosis@{} (Automatic diagnosis on {})".format(maindomain, maindomain)
to_ = "root"
subject_ = "Issues found by automatic diagnosis on %s" % maindomain
@ -692,16 +692,16 @@ def _email_diagnosis_issues():
content = _dump_human_readable_reports(issues)
message = """\
From: %s
To: %s
Subject: %s
From: {}
To: {}
Subject: {}
%s
{}
---
%s
""" % (
{}
""".format(
from_,
to_,
subject_,

View file

@ -762,7 +762,7 @@ def domain_dns_push(operation_logger, domain, dry_run=False, force=False, purge=
changes = {"delete": [], "update": [], "create": [], "unchanged": []}
type_and_names = sorted(
set([(r["type"], r["name"]) for r in current_records + wanted_records])
{(r["type"], r["name"]) for r in current_records + wanted_records}
)
comparison = {
type_and_name: {"current": [], "wanted": []} for type_and_name in type_and_names

View file

@ -151,7 +151,7 @@ def dyndns_subscribe(operation_logger, domain=None, key=None):
try:
error = json.loads(r.text)["error"]
except Exception:
error = 'Server error, code: %s. (Message: "%s")' % (r.status_code, r.text)
error = f'Server error, code: {r.status_code}. (Message: "{r.text}")'
raise YunohostError("dyndns_registration_failed", error=error)
# Yunohost regen conf will add the dyndns cron job if a private key exists
@ -196,7 +196,7 @@ def dyndns_update(
# If key is not given, pick the first file we find with the domain given
elif key is None:
keys = glob.glob("/etc/yunohost/dyndns/K{0}.+*.private".format(domain))
keys = glob.glob(f"/etc/yunohost/dyndns/K{domain}.+*.private")
if not keys:
raise YunohostValidationError("dyndns_key_not_found")
@ -263,14 +263,14 @@ def dyndns_update(
return None
raise YunohostError(
"Failed to resolve %s for %s" % (rdtype, domain), raw_msg=True
f"Failed to resolve {rdtype} for {domain}", raw_msg=True
)
old_ipv4 = resolve_domain(domain, "A")
old_ipv6 = resolve_domain(domain, "AAAA")
logger.debug("Old IPv4/v6 are (%s, %s)" % (old_ipv4, old_ipv6))
logger.debug("Requested IPv4/v6 are (%s, %s)" % (ipv4, ipv6))
logger.debug(f"Old IPv4/v6 are ({old_ipv4}, {old_ipv6})")
logger.debug(f"Requested IPv4/v6 are ({ipv4}, {ipv6})")
# no need to update
if (not force and not dry_run) and (old_ipv4 == ipv4 and old_ipv6 == ipv6):

View file

@ -156,7 +156,7 @@ def hook_list(action, list_by="name", show_info=False):
try:
d[priority].add(name)
except KeyError:
d[priority] = set([name])
d[priority] = {name}
elif list_by == "name" or list_by == "folder":
if show_info:
@ -197,7 +197,7 @@ def hook_list(action, list_by="name", show_info=False):
or (f.startswith("__") and f.endswith("__"))
):
continue
path = "%s%s/%s" % (folder, action, f)
path = f"{folder}{action}/{f}"
priority, name = _extract_filename_parts(f)
_append_hook(d, priority, name, path)
@ -407,7 +407,7 @@ def _hook_exec_bash(path, args, chdir, env, user, return_format, loggers):
if not chdir:
# use the script directory as current one
chdir, cmd_script = os.path.split(path)
cmd_script = "./{0}".format(cmd_script)
cmd_script = f"./{cmd_script}"
else:
cmd_script = path

View file

@ -469,7 +469,7 @@ class RedactingFormatter(Formatter):
)
class OperationLogger(object):
class OperationLogger:
"""
Instances of this class represents unit operation done on the ynh instance.
@ -544,7 +544,7 @@ class OperationLogger(object):
# We use proc.open_files() to list files opened / actively used by this proc
# We only keep files matching a recent yunohost operation log
active_logs = sorted(
[f.path for f in proc.open_files() if f.path in recent_operation_logs],
(f.path for f in proc.open_files() if f.path in recent_operation_logs),
key=os.path.getctime,
reverse=True,
)

View file

@ -139,7 +139,8 @@ def user_permission_list(
continue
main_perm_label = permissions[main_perm_name]["label"]
infos["sublabel"] = infos["label"]
infos["label"] = "%s (%s)" % (main_perm_label, infos["label"])
label_ = infos["label"]
infos["label"] = f"{main_perm_label} ({label_})"
if short:
permissions = list(permissions.keys())
@ -664,13 +665,11 @@ def permission_sync_to_user():
currently_allowed_users = set(permission_infos["corresponding_users"])
# These are the users that should be allowed because they are member of a group that is allowed for this permission ...
should_be_allowed_users = set(
[
should_be_allowed_users = {
user
for group in permission_infos["allowed"]
for user in groups[group]["members"]
]
)
}
# Note that a LDAP operation with the same value that is in LDAP crash SLAP.
# So we need to check before each ldap operation that we really change something in LDAP

View file

@ -638,12 +638,9 @@ def _process_regen_conf(system_conf, new_conf=None, save=True):
"""
if save:
backup_path = os.path.join(
BACKUP_CONF_DIR,
"{0}-{1}".format(
system_conf.lstrip("/"), datetime.utcnow().strftime("%Y%m%d.%H%M%S")
),
)
system_conf_ = system_conf.lstrip("/")
now_ = datetime.utcnow().strftime("%Y%m%d.%H%M%S")
backup_path = os.path.join(BACKUP_CONF_DIR, f"{system_conf_}-{now_}")
backup_dir = os.path.dirname(backup_path)
if not os.path.isdir(backup_dir):

View file

@ -625,7 +625,7 @@ def _run_service_command(action, service):
% (action, ", ".join(possible_actions))
)
cmd = "systemctl %s %s" % (action, service)
cmd = f"systemctl {action} {service}"
need_lock = services[service].get("need_lock", False) and action in [
"start",
@ -673,7 +673,7 @@ def _give_lock(action, service, p):
else:
systemctl_PID_name = "ControlPID"
cmd_get_son_PID = "systemctl show %s -p %s" % (service, systemctl_PID_name)
cmd_get_son_PID = f"systemctl show {service} -p {systemctl_PID_name}"
son_PID = 0
# As long as we did not found the PID and that the command is still running
while son_PID == 0 and p.poll() is None:
@ -686,9 +686,7 @@ def _give_lock(action, service, p):
# If we found a PID
if son_PID != 0:
# Append the PID to the lock file
logger.debug(
"Giving a lock to PID %s for service %s !" % (str(son_PID), service)
)
logger.debug(f"Giving a lock to PID {son_PID} for service {service} !")
append_to_file(MOULINETTE_LOCK, "\n%s" % str(son_PID))
return son_PID
@ -865,9 +863,7 @@ def _get_journalctl_logs(service, number="all"):
systemd_service = services.get(service, {}).get("actual_systemd_service", service)
try:
return check_output(
"journalctl --no-hostname --no-pager -u {0} -n{1}".format(
systemd_service, number
)
f"journalctl --no-hostname --no-pager -u {systemd_service} -n{number}"
)
except Exception:
import traceback

View file

@ -224,7 +224,7 @@ def settings_set(key, value):
try:
trigger_post_change_hook(key, old_value, value)
except Exception as e:
logger.error("Post-change hook for setting %s failed : %s" % (key, e))
logger.error(f"Post-change hook for setting {key} failed : {e}")
raise

View file

@ -132,7 +132,7 @@ def test_apps_catalog_update_nominal(mocker):
catalog = app_catalog(with_categories=True)
assert "apps" in catalog
assert set(catalog["apps"].keys()) == set(["foo", "bar"])
assert set(catalog["apps"].keys()) == {"foo", "bar"}
assert "categories" in catalog
assert [c["id"] for c in catalog["categories"]] == ["yolo", "swag"]

View file

@ -70,7 +70,7 @@ def legacy_app(request):
app_install(
os.path.join(get_test_apps_dir(), "legacy_app_ynh"),
args="domain=%s&path=%s&is_public=%s" % (main_domain, "/", 1),
args="domain={}&path={}&is_public={}".format(main_domain, "/", 1),
force=True,
)

View file

@ -111,7 +111,7 @@ def secondary_domain(request):
def app_expected_files(domain, app):
yield "/etc/nginx/conf.d/%s.d/%s.conf" % (domain, app)
yield "/etc/nginx/conf.d/{}.d/{}.conf".format(domain, app)
if app.startswith("legacy_app"):
yield "/var/www/%s/index.html" % app
yield "/etc/yunohost/apps/%s/settings.yml" % app
@ -152,7 +152,7 @@ def install_legacy_app(domain, path, public=True):
app_install(
os.path.join(get_test_apps_dir(), "legacy_app_ynh"),
args="domain=%s&path=%s&is_public=%s" % (domain, path, 1 if public else 0),
args="domain={}&path={}&is_public={}".format(domain, path, 1 if public else 0),
force=True,
)
@ -170,7 +170,7 @@ def install_break_yo_system(domain, breakwhat):
app_install(
os.path.join(get_test_apps_dir(), "break_yo_system_ynh"),
args="domain=%s&breakwhat=%s" % (domain, breakwhat),
args="domain={}&breakwhat={}".format(domain, breakwhat),
force=True,
)
@ -338,7 +338,7 @@ def test_legacy_app_failed_remove(mocker, secondary_domain):
# The remove script runs with set -eu and attempt to remove this
# file without -f, so will fail if it's not there ;)
os.remove("/etc/nginx/conf.d/%s.d/%s.conf" % (secondary_domain, "legacy_app"))
os.remove("/etc/nginx/conf.d/{}.d/{}.conf".format(secondary_domain, "legacy_app"))
# TODO / FIXME : can't easily validate that 'app_not_properly_removed'
# is triggered for weird reasons ...

View file

@ -99,7 +99,7 @@ def test_registerurl():
app_install(
os.path.join(get_test_apps_dir(), "register_url_app_ynh"),
args="domain=%s&path=%s" % (maindomain, "/urlregisterapp"),
args="domain={}&path={}".format(maindomain, "/urlregisterapp"),
force=True,
)
@ -109,7 +109,7 @@ def test_registerurl():
with pytest.raises(YunohostError):
app_install(
os.path.join(get_test_apps_dir(), "register_url_app_ynh"),
args="domain=%s&path=%s" % (maindomain, "/urlregisterapp"),
args="domain={}&path={}".format(maindomain, "/urlregisterapp"),
force=True,
)
@ -119,7 +119,7 @@ def test_registerurl_baddomain():
with pytest.raises(YunohostError):
app_install(
os.path.join(get_test_apps_dir(), "register_url_app_ynh"),
args="domain=%s&path=%s" % ("yolo.swag", "/urlregisterapp"),
args="domain={}&path={}".format("yolo.swag", "/urlregisterapp"),
force=True,
)
@ -234,7 +234,7 @@ def test_normalize_permission_path_with_unknown_domain():
def test_normalize_permission_path_conflicting_path():
app_install(
os.path.join(get_test_apps_dir(), "register_url_app_ynh"),
args="domain=%s&path=%s" % (maindomain, "/url/registerapp"),
args="domain={}&path={}".format(maindomain, "/url/registerapp"),
force=True,
)

View file

@ -139,7 +139,7 @@ def app_is_installed(app):
# These are files we know should be installed by the app
app_files = []
app_files.append("/etc/nginx/conf.d/%s.d/%s.conf" % (maindomain, app))
app_files.append("/etc/nginx/conf.d/{}.d/{}.conf".format(maindomain, app))
app_files.append("/var/www/%s/index.html" % app)
app_files.append("/etc/importantfile")
@ -214,7 +214,7 @@ def install_app(app, path, additionnal_args=""):
app_install(
os.path.join(get_test_apps_dir(), app),
args="domain=%s&path=%s%s" % (maindomain, path, additionnal_args),
args="domain={}&path={}{}".format(maindomain, path, additionnal_args),
force=True,
)

View file

@ -26,7 +26,7 @@ def teardown_function(function):
def install_changeurl_app(path):
app_install(
os.path.join(get_test_apps_dir(), "change_url_app_ynh"),
args="domain=%s&path=%s" % (maindomain, path),
args="domain={}&path={}".format(maindomain, path),
force=True,
)

View file

@ -347,7 +347,7 @@ def check_permission_for_apps():
# {"bar", "foo"}
# and compare this to the list of installed apps ...
app_perms_prefix = set(p.split(".")[0] for p in app_perms)
app_perms_prefix = {p.split(".")[0] for p in app_perms}
assert set(_installed_apps()) == app_perms_prefix
@ -398,7 +398,7 @@ def test_permission_list():
assert res["wiki.main"]["allowed"] == ["all_users"]
assert res["blog.main"]["allowed"] == ["alice"]
assert res["blog.api"]["allowed"] == ["visitors"]
assert set(res["wiki.main"]["corresponding_users"]) == set(["alice", "bob"])
assert set(res["wiki.main"]["corresponding_users"]) == {"alice", "bob"}
assert res["blog.main"]["corresponding_users"] == ["alice"]
assert res["blog.api"]["corresponding_users"] == []
assert res["wiki.main"]["url"] == "/"
@ -442,7 +442,7 @@ def test_permission_create_main(mocker):
res = user_permission_list(full=True)["permissions"]
assert "site.main" in res
assert res["site.main"]["allowed"] == ["all_users"]
assert set(res["site.main"]["corresponding_users"]) == set(["alice", "bob"])
assert set(res["site.main"]["corresponding_users"]) == {"alice", "bob"}
assert res["site.main"]["protected"] is False
@ -630,8 +630,8 @@ def test_permission_add_group(mocker):
user_permission_update("wiki.main", add="alice")
res = user_permission_list(full=True)["permissions"]
assert set(res["wiki.main"]["allowed"]) == set(["all_users", "alice"])
assert set(res["wiki.main"]["corresponding_users"]) == set(["alice", "bob"])
assert set(res["wiki.main"]["allowed"]) == {"all_users", "alice"}
assert set(res["wiki.main"]["corresponding_users"]) == {"alice", "bob"}
def test_permission_remove_group(mocker):
@ -680,7 +680,7 @@ def test_permission_reset(mocker):
res = user_permission_list(full=True)["permissions"]
assert res["blog.main"]["allowed"] == ["all_users"]
assert set(res["blog.main"]["corresponding_users"]) == set(["alice", "bob"])
assert set(res["blog.main"]["corresponding_users"]) == {"alice", "bob"}
def test_permission_reset_idempotency():
@ -690,7 +690,7 @@ def test_permission_reset_idempotency():
res = user_permission_list(full=True)["permissions"]
assert res["blog.main"]["allowed"] == ["all_users"]
assert set(res["blog.main"]["corresponding_users"]) == set(["alice", "bob"])
assert set(res["blog.main"]["corresponding_users"]) == {"alice", "bob"}
def test_permission_change_label(mocker):
@ -1013,9 +1013,9 @@ def test_permission_app_install():
assert res["permissions_app.dev"]["url"] == "/dev"
assert res["permissions_app.main"]["allowed"] == ["all_users"]
assert set(res["permissions_app.main"]["corresponding_users"]) == set(
["alice", "bob"]
)
assert set(res["permissions_app.main"]["corresponding_users"]) == {
"alice", "bob"
}
assert res["permissions_app.admin"]["allowed"] == ["alice"]
assert res["permissions_app.admin"]["corresponding_users"] == ["alice"]

View file

@ -1977,7 +1977,7 @@ def test_question_file_from_api():
from base64 import b64encode
b64content = b64encode("helloworld".encode())
b64content = b64encode(b"helloworld")
questions = [
{
"name": "some_file",

View file

@ -281,7 +281,7 @@ def test_update_group_add_user(mocker):
user_group_update("dev", add=["bob"])
group_res = user_group_list()["groups"]
assert set(group_res["dev"]["members"]) == set(["alice", "bob"])
assert set(group_res["dev"]["members"]) == {"alice", "bob"}
def test_update_group_add_user_already_in(mocker):

View file

@ -224,7 +224,7 @@ def tools_postinstall(
disk_partitions = sorted(psutil.disk_partitions(), key=lambda k: k.mountpoint)
main_disk_partitions = [d for d in disk_partitions if d.mountpoint in ["/", "/var"]]
main_space = sum(
[psutil.disk_usage(d.mountpoint).total for d in main_disk_partitions]
psutil.disk_usage(d.mountpoint).total for d in main_disk_partitions
)
GB = 1024 ** 3
if not force_diskspace and main_space < 10 * GB:
@ -1107,7 +1107,7 @@ def _tools_migrations_run_before_app_restore(backup_version, app_id):
raise
class Migration(object):
class Migration:
# Those are to be implemented by daughter classes

View file

@ -97,7 +97,7 @@ def user_list(fields=None):
and values[0].strip() == "/bin/false",
}
attrs = set(["uid"])
attrs = {"uid"}
users = {}
if not fields:
@ -166,7 +166,7 @@ def user_create(
# On affiche les differents domaines possibles
Moulinette.display(m18n.n("domains_available"))
for domain in domain_list()["domains"]:
Moulinette.display("- {}".format(domain))
Moulinette.display(f"- {domain}")
maindomain = _get_maindomain()
domain = Moulinette.prompt(
@ -215,7 +215,7 @@ def user_create(
uid_guid_found = uid not in all_uid and uid not in all_gid
# Adapt values for LDAP
fullname = "%s %s" % (firstname, lastname)
fullname = f"{firstname} {lastname}"
attr_dict = {
"objectClass": [
@ -333,8 +333,8 @@ def user_delete(operation_logger, username, purge=False, from_import=False):
subprocess.call(["nscd", "-i", "passwd"])
if purge:
subprocess.call(["rm", "-rf", "/home/{0}".format(username)])
subprocess.call(["rm", "-rf", "/var/mail/{0}".format(username)])
subprocess.call(["rm", "-rf", f"/home/{username}"])
subprocess.call(["rm", "-rf", f"/var/mail/{username}"])
hook_callback("post_user_delete", args=[username, purge])
@ -1334,9 +1334,9 @@ def user_ssh_remove_key(username, key):
def _convertSize(num, suffix=""):
for unit in ["K", "M", "G", "T", "P", "E", "Z"]:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
return "{:3.1f}{}{}".format(num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, "Yi", suffix)
return "{:.1f}{}{}".format(num, "Yi", suffix)
def _hash_user_password(password):

View file

@ -678,7 +678,7 @@ class ConfigPanel:
yield (panel, section, option)
class Question(object):
class Question:
hide_user_input_in_prompt = False
pattern: Optional[Dict] = None

View file

@ -44,7 +44,7 @@ def get_public_ip(protocol=4):
):
ip = read_file(cache_file).strip()
ip = ip if ip else None # Empty file (empty string) means there's no IP
logger.debug("Reusing IPv%s from cache: %s" % (protocol, ip))
logger.debug(f"Reusing IPv{protocol} from cache: {ip}")
else:
ip = get_public_ip_from_remote_server(protocol)
logger.debug("IP fetched: %s" % ip)
@ -87,7 +87,7 @@ def get_public_ip_from_remote_server(protocol=4):
try:
return download_text(url, timeout=30).strip()
except Exception as e:
logger.debug("Could not get public IPv%s : %s" % (str(protocol), str(e)))
logger.debug(f"Could not get public IPv{protocol} : {e}")
return None

View file

@ -51,7 +51,7 @@ def assert_password_is_strong_enough(profile, password):
PasswordValidator(profile).validate(password)
class PasswordValidator(object):
class PasswordValidator:
def __init__(self, profile):
"""
Initialize a password validator.

View file

@ -49,7 +49,7 @@ def yunopaste(data):
raw_msg=True,
)
return "%s/raw/%s" % (paste_server, url)
return "{}/raw/{}".format(paste_server, url)
def anonymize(data):

View file

@ -38,7 +38,7 @@ def get_crt(
)
out, err = proc.communicate(cmd_input)
if proc.returncode != 0:
raise IOError("{0}\n{1}".format(err_msg, err))
raise IOError("{}\n{}".format(err_msg, err))
return out
# helper function - make request and automatically parse json response
@ -74,7 +74,7 @@ def get_crt(
raise IndexError(resp_data) # allow 100 retrys for bad nonces
if code not in [200, 201, 204]:
raise ValueError(
"{0}:\nUrl: {1}\nData: {2}\nResponse Code: {3}\nResponse: {4}".format(
"{}:\nUrl: {}\nData: {}\nResponse Code: {}\nResponse: {}".format(
err_msg, url, data, code, resp_data
)
)
@ -89,7 +89,7 @@ def get_crt(
{"jwk": jwk} if acct_headers is None else {"kid": acct_headers["Location"]}
)
protected64 = _b64(json.dumps(protected).encode("utf8"))
protected_input = "{0}.{1}".format(protected64, payload64).encode("utf8")
protected_input = "{}.{}".format(protected64, payload64).encode("utf8")
out = _cmd(
["openssl", "dgst", "-sha256", "-sign", account_key],
stdin=subprocess.PIPE,
@ -125,8 +125,8 @@ def get_crt(
pub_hex, pub_exp = re.search(
pub_pattern, out.decode("utf8"), re.MULTILINE | re.DOTALL
).groups()
pub_exp = "{0:x}".format(int(pub_exp))
pub_exp = "0{0}".format(pub_exp) if len(pub_exp) % 2 else pub_exp
pub_exp = "{:x}".format(int(pub_exp))
pub_exp = "0{}".format(pub_exp) if len(pub_exp) % 2 else pub_exp
alg = "RS256"
jwk = {
"e": _b64(binascii.unhexlify(pub_exp.encode("utf-8"))),
@ -140,9 +140,9 @@ def get_crt(
log.info("Parsing CSR...")
out = _cmd(
["openssl", "req", "-in", csr, "-noout", "-text"],
err_msg="Error loading {0}".format(csr),
err_msg="Error loading {}".format(csr),
)
domains = set([])
domains = set()
common_name = re.search(r"Subject:.*? CN\s?=\s?([^\s,;/]+)", out.decode("utf8"))
if common_name is not None:
domains.add(common_name.group(1))
@ -155,7 +155,7 @@ def get_crt(
for san in subject_alt_names.group(1).split(", "):
if san.startswith("DNS:"):
domains.add(san[4:])
log.info("Found domains: {0}".format(", ".join(domains)))
log.info("Found domains: {}".format(", ".join(domains)))
# get the ACME directory of urls
log.info("Getting directory...")
@ -178,7 +178,7 @@ def get_crt(
{"contact": contact},
"Error updating contact details",
)
log.info("Updated contact details:\n{0}".format("\n".join(account["contact"])))
log.info("Updated contact details:\n{}".format("\n".join(account["contact"])))
# create a new order
log.info("Creating new order...")
@ -194,46 +194,46 @@ def get_crt(
auth_url, None, "Error getting challenges"
)
domain = authorization["identifier"]["value"]
log.info("Verifying {0}...".format(domain))
log.info("Verifying {}...".format(domain))
# find the http-01 challenge and write the challenge file
challenge = [c for c in authorization["challenges"] if c["type"] == "http-01"][
0
]
token = re.sub(r"[^A-Za-z0-9_\-]", "_", challenge["token"])
keyauthorization = "{0}.{1}".format(token, thumbprint)
keyauthorization = "{}.{}".format(token, thumbprint)
wellknown_path = os.path.join(acme_dir, token)
with open(wellknown_path, "w") as wellknown_file:
wellknown_file.write(keyauthorization)
# check that the file is in place
try:
wellknown_url = "http://{0}/.well-known/acme-challenge/{1}".format(
wellknown_url = "http://{}/.well-known/acme-challenge/{}".format(
domain, token
)
assert disable_check or _do_request(wellknown_url)[0] == keyauthorization
except (AssertionError, ValueError) as e:
raise ValueError(
"Wrote file to {0}, but couldn't download {1}: {2}".format(
"Wrote file to {}, but couldn't download {}: {}".format(
wellknown_path, wellknown_url, e
)
)
# say the challenge is done
_send_signed_request(
challenge["url"], {}, "Error submitting challenges: {0}".format(domain)
challenge["url"], {}, "Error submitting challenges: {}".format(domain)
)
authorization = _poll_until_not(
auth_url,
["pending"],
"Error checking challenge status for {0}".format(domain),
"Error checking challenge status for {}".format(domain),
)
if authorization["status"] != "valid":
raise ValueError(
"Challenge did not pass for {0}: {1}".format(domain, authorization)
"Challenge did not pass for {}: {}".format(domain, authorization)
)
os.remove(wellknown_path)
log.info("{0} verified!".format(domain))
log.info("{} verified!".format(domain))
# finalize the order with the csr
log.info("Signing certificate...")
@ -251,7 +251,7 @@ def get_crt(
"Error checking order status",
)
if order["status"] != "valid":
raise ValueError("Order failed: {0}".format(order))
raise ValueError("Order failed: {}".format(order))
# download the certificate
certificate_pem, _, _ = _send_signed_request(

View file

@ -26,10 +26,10 @@ def find_inconsistencies(locale_file):
# Then we check that every "{stuff}" (for python's .format())
# should also be in the translated string, otherwise the .format
# will trigger an exception!
subkeys_in_ref = set(k[0] for k in re.findall(r"{(\w+)(:\w)?}", string))
subkeys_in_this_locale = set(
subkeys_in_ref = {k[0] for k in re.findall(r"{(\w+)(:\w)?}", string)}
subkeys_in_this_locale = {
k[0] for k in re.findall(r"{(\w+)(:\w)?}", this_locale[key])
)
}
if any(k not in subkeys_in_ref for k in subkeys_in_this_locale):
yield """\n