mirror of
https://github.com/YunoHost/yunohost.git
synced 2024-09-03 20:06:10 +02:00
Reflag some messages as info or debug
This commit is contained in:
parent
a4d75a2e40
commit
91483f3896
11 changed files with 67 additions and 67 deletions
|
@ -566,7 +566,7 @@ def app_upgrade(auth, app=[], url=None, file=None):
|
|||
logger.info("Upgrading apps %s", ", ".join(app))
|
||||
|
||||
for app_instance_name in apps:
|
||||
logger.warning(m18n.n('app_upgrade_app_name', app=app_instance_name))
|
||||
logger.info(m18n.n('app_upgrade_app_name', app=app_instance_name))
|
||||
installed = _is_installed(app_instance_name)
|
||||
if not installed:
|
||||
raise MoulinetteError(errno.ENOPKG,
|
||||
|
@ -1098,7 +1098,7 @@ def app_setting(app, key, value=None, delete=False):
|
|||
try:
|
||||
return app_settings[key]
|
||||
except:
|
||||
logger.info("cannot get app setting '%s' for '%s'", key, app)
|
||||
logger.debug("cannot get app setting '%s' for '%s'", key, app)
|
||||
return None
|
||||
else:
|
||||
if delete and key in app_settings:
|
||||
|
@ -1449,7 +1449,7 @@ def _extract_app_from_file(path, remove=False):
|
|||
Dict manifest
|
||||
|
||||
"""
|
||||
logger.info(m18n.n('extracting'))
|
||||
logger.debug(m18n.n('extracting'))
|
||||
|
||||
if os.path.exists(APP_TMP_FOLDER):
|
||||
shutil.rmtree(APP_TMP_FOLDER)
|
||||
|
@ -1490,7 +1490,7 @@ def _extract_app_from_file(path, remove=False):
|
|||
raise MoulinetteError(errno.EINVAL,
|
||||
m18n.n('app_manifest_invalid', error=e.strerror))
|
||||
|
||||
logger.info(m18n.n('done'))
|
||||
logger.debug(m18n.n('done'))
|
||||
|
||||
manifest['remote'] = {'type': 'file', 'path': path}
|
||||
return manifest, extracted_app_folder
|
||||
|
@ -1535,7 +1535,7 @@ def _fetch_app_from_git(app):
|
|||
if os.path.exists(app_tmp_archive):
|
||||
os.remove(app_tmp_archive)
|
||||
|
||||
logger.info(m18n.n('downloading'))
|
||||
logger.debug(m18n.n('downloading'))
|
||||
|
||||
if ('@' in app) or ('http://' in app) or ('https://' in app):
|
||||
url = app
|
||||
|
@ -1586,7 +1586,7 @@ def _fetch_app_from_git(app):
|
|||
raise MoulinetteError(errno.EIO,
|
||||
m18n.n('app_manifest_invalid', error=e.strerror))
|
||||
else:
|
||||
logger.info(m18n.n('done'))
|
||||
logger.debug(m18n.n('done'))
|
||||
|
||||
# Store remote repository info into the returned manifest
|
||||
manifest['remote'] = {'type': 'git', 'url': url, 'branch': branch}
|
||||
|
@ -1643,7 +1643,7 @@ def _fetch_app_from_git(app):
|
|||
raise MoulinetteError(errno.EIO,
|
||||
m18n.n('app_manifest_invalid', error=e.strerror))
|
||||
else:
|
||||
logger.info(m18n.n('done'))
|
||||
logger.debug(m18n.n('done'))
|
||||
|
||||
# Store remote repository info into the returned manifest
|
||||
manifest['remote'] = {
|
||||
|
@ -1766,7 +1766,7 @@ def _check_manifest_requirements(manifest, app_instance_name):
|
|||
elif not requirements:
|
||||
return
|
||||
|
||||
logger.info(m18n.n('app_requirements_checking', app=app_instance_name))
|
||||
logger.debug(m18n.n('app_requirements_checking', app=app_instance_name))
|
||||
|
||||
# Retrieve versions of each required package
|
||||
try:
|
||||
|
@ -1996,7 +1996,7 @@ def _migrate_appslist_system():
|
|||
|
||||
for cron_path in legacy_crons:
|
||||
appslist_name = os.path.basename(cron_path).replace("yunohost-applist-", "")
|
||||
logger.info(m18n.n('appslist_migrating', appslist=appslist_name))
|
||||
logger.debug(m18n.n('appslist_migrating', appslist=appslist_name))
|
||||
|
||||
# Parse appslist url in cron
|
||||
cron_file_content = open(cron_path).read().strip()
|
||||
|
|
|
@ -577,7 +577,7 @@ class BackupManager():
|
|||
if system_targets == []:
|
||||
return
|
||||
|
||||
logger.info(m18n.n('backup_running_hooks'))
|
||||
logger.debug(m18n.n('backup_running_hooks'))
|
||||
|
||||
# Prepare environnement
|
||||
env_dict = self._get_env_var()
|
||||
|
@ -665,7 +665,7 @@ class BackupManager():
|
|||
tmp_app_bkp_dir = env_dict["YNH_APP_BACKUP_DIR"]
|
||||
settings_dir = os.path.join(self.work_dir, 'apps', app, 'settings')
|
||||
|
||||
logger.info(m18n.n('backup_running_app_script', app=app))
|
||||
logger.debug(m18n.n('backup_running_app_script', app=app))
|
||||
try:
|
||||
# Prepare backup directory for the app
|
||||
filesystem.mkdir(tmp_app_bkp_dir, 0750, True, uid='admin')
|
||||
|
@ -722,9 +722,9 @@ class BackupManager():
|
|||
"""Apply backup methods"""
|
||||
|
||||
for method in self.methods:
|
||||
logger.info(m18n.n('backup_applying_method_' + method.method_name))
|
||||
logger.debug(m18n.n('backup_applying_method_' + method.method_name))
|
||||
method.mount_and_backup(self)
|
||||
logger.info(m18n.n('backup_method_' + method.method_name + '_finished'))
|
||||
logger.debug(m18n.n('backup_method_' + method.method_name + '_finished'))
|
||||
|
||||
def _compute_backup_size(self):
|
||||
"""
|
||||
|
@ -1125,7 +1125,7 @@ class RestoreManager():
|
|||
if system_targets == []:
|
||||
return
|
||||
|
||||
logger.info(m18n.n('restore_running_hooks'))
|
||||
logger.debug(m18n.n('restore_running_hooks'))
|
||||
|
||||
env_dict = self._get_env_var()
|
||||
ret = hook_callback('restore',
|
||||
|
@ -1210,7 +1210,7 @@ class RestoreManager():
|
|||
self.targets.set_result("apps", app_instance_name, "Warning")
|
||||
return
|
||||
|
||||
logger.info(m18n.n('restore_running_app_script', app=app_instance_name))
|
||||
logger.debug(m18n.n('restore_running_app_script', app=app_instance_name))
|
||||
try:
|
||||
# Restore app settings
|
||||
app_settings_new_path = os.path.join('/etc/yunohost/apps/',
|
||||
|
@ -1582,7 +1582,7 @@ class BackupMethod(object):
|
|||
m18n.n('backup_unable_to_organize_files'))
|
||||
|
||||
# Copy unbinded path
|
||||
logger.info(m18n.n('backup_copying_to_organize_the_archive',
|
||||
logger.debug(m18n.n('backup_copying_to_organize_the_archive',
|
||||
size=str(size)))
|
||||
for path in paths_needed_to_be_copied:
|
||||
dest = os.path.join(self.work_dir, path['dest'])
|
||||
|
@ -1786,7 +1786,7 @@ class TarBackupMethod(BackupMethod):
|
|||
if ret != 0:
|
||||
logger.warning(m18n.n('backup_archive_mount_failed'))
|
||||
|
||||
logger.info(m18n.n("restore_extracting"))
|
||||
logger.debug(m18n.n("restore_extracting"))
|
||||
tar = tarfile.open(self._archive_file, "r:gz")
|
||||
tar.extract('info.json', path=self.work_dir)
|
||||
|
||||
|
|
|
@ -210,7 +210,7 @@ def _certificate_install_selfsigned(domain_list, force=False):
|
|||
raise MoulinetteError(
|
||||
errno.EIO, m18n.n('domain_cert_gen_failed'))
|
||||
else:
|
||||
logger.info(out)
|
||||
logger.debug(out)
|
||||
|
||||
# Link the CA cert (not sure it's actually needed in practice though,
|
||||
# since we append it at the end of crt.pem. For instance for Let's
|
||||
|
@ -485,11 +485,11 @@ location ^~ '/.well-known/acme-challenge'
|
|||
|
||||
# Write the conf
|
||||
if os.path.exists(nginx_conf_file):
|
||||
logger.info(
|
||||
logger.debug(
|
||||
"Nginx configuration file for ACME challenge already exists for domain, skipping.")
|
||||
return
|
||||
|
||||
logger.info(
|
||||
logger.debug(
|
||||
"Adding Nginx configuration file for Acme challenge for domain %s.", domain)
|
||||
|
||||
with open(nginx_conf_file, "w") as f:
|
||||
|
@ -531,7 +531,7 @@ def _fetch_and_enable_new_certificate(domain, staging=False):
|
|||
_regen_dnsmasq_if_needed()
|
||||
|
||||
# Prepare certificate signing request
|
||||
logger.info(
|
||||
logger.debug(
|
||||
"Prepare key and certificate signing request (CSR) for %s...", domain)
|
||||
|
||||
domain_key_file = "%s/%s.pem" % (TMP_FOLDER, domain)
|
||||
|
@ -541,7 +541,7 @@ def _fetch_and_enable_new_certificate(domain, staging=False):
|
|||
_prepare_certificate_signing_request(domain, domain_key_file, TMP_FOLDER)
|
||||
|
||||
# Sign the certificate
|
||||
logger.info("Now using ACME Tiny to sign the certificate...")
|
||||
logger.debug("Now using ACME Tiny to sign the certificate...")
|
||||
|
||||
domain_csr_file = "%s/%s.csr" % (TMP_FOLDER, domain)
|
||||
|
||||
|
@ -579,7 +579,7 @@ def _fetch_and_enable_new_certificate(domain, staging=False):
|
|||
raise MoulinetteError(errno.EINVAL, m18n.n('certmanager_couldnt_fetch_intermediate_cert'))
|
||||
|
||||
# Now save the key and signed certificate
|
||||
logger.info("Saving the key and signed certificate...")
|
||||
logger.debug("Saving the key and signed certificate...")
|
||||
|
||||
# Create corresponding directory
|
||||
date_tag = datetime.now().strftime("%Y%m%d.%H%M%S")
|
||||
|
@ -642,7 +642,7 @@ def _prepare_certificate_signing_request(domain, key_file, output_folder):
|
|||
|
||||
# Save the request in tmp folder
|
||||
csr_file = output_folder + domain + ".csr"
|
||||
logger.info("Saving to %s.", csr_file)
|
||||
logger.debug("Saving to %s.", csr_file)
|
||||
|
||||
with open(csr_file, "w") as f:
|
||||
f.write(crypto.dump_certificate_request(crypto.FILETYPE_PEM, csr))
|
||||
|
@ -753,7 +753,7 @@ def _get_status(domain):
|
|||
|
||||
|
||||
def _generate_account_key():
|
||||
logger.info("Generating account key ...")
|
||||
logger.debug("Generating account key ...")
|
||||
_generate_key(ACCOUNT_KEY_FILE)
|
||||
_set_permissions(ACCOUNT_KEY_FILE, "root", "root", 0400)
|
||||
|
||||
|
@ -776,7 +776,7 @@ def _set_permissions(path, user, group, permissions):
|
|||
|
||||
|
||||
def _enable_certificate(domain, new_cert_folder):
|
||||
logger.info("Enabling the certificate for domain %s ...", domain)
|
||||
logger.debug("Enabling the certificate for domain %s ...", domain)
|
||||
|
||||
live_link = os.path.join(CERT_FOLDER, domain)
|
||||
|
||||
|
@ -793,7 +793,7 @@ def _enable_certificate(domain, new_cert_folder):
|
|||
|
||||
os.symlink(new_cert_folder, live_link)
|
||||
|
||||
logger.info("Restarting services...")
|
||||
logger.debug("Restarting services...")
|
||||
|
||||
for service in ("postfix", "dovecot", "metronome"):
|
||||
_run_service_command("restart", service)
|
||||
|
@ -802,7 +802,7 @@ def _enable_certificate(domain, new_cert_folder):
|
|||
|
||||
|
||||
def _backup_current_cert(domain):
|
||||
logger.info("Backuping existing certificate for domain %s", domain)
|
||||
logger.debug("Backuping existing certificate for domain %s", domain)
|
||||
|
||||
cert_folder_domain = os.path.join(CERT_FOLDER, domain)
|
||||
|
||||
|
|
|
@ -30,10 +30,10 @@ class MyMigration(Migration):
|
|||
(domain, private_key_path) = _guess_current_dyndns_domain(dyn_host)
|
||||
assert "+157" in private_key_path
|
||||
except (MoulinetteError, AssertionError):
|
||||
logger.warning(m18n.n("migrate_tsig_not_needed"))
|
||||
logger.info(m18n.n("migrate_tsig_not_needed"))
|
||||
return
|
||||
|
||||
logger.warning(m18n.n('migrate_tsig_start', domain=domain))
|
||||
logger.info(m18n.n('migrate_tsig_start', domain=domain))
|
||||
public_key_path = private_key_path.rsplit(".private", 1)[0] + ".key"
|
||||
public_key_md5 = open(public_key_path).read().strip().split(' ')[-1]
|
||||
|
||||
|
@ -77,15 +77,15 @@ class MyMigration(Migration):
|
|||
os.system("mv /etc/yunohost/dyndns/*+157* /tmp")
|
||||
|
||||
# sleep to wait for dyndns cache invalidation
|
||||
logger.warning(m18n.n('migrate_tsig_wait'))
|
||||
logger.info(m18n.n('migrate_tsig_wait'))
|
||||
time.sleep(60)
|
||||
logger.warning(m18n.n('migrate_tsig_wait_2'))
|
||||
logger.info(m18n.n('migrate_tsig_wait_2'))
|
||||
time.sleep(60)
|
||||
logger.warning(m18n.n('migrate_tsig_wait_3'))
|
||||
logger.info(m18n.n('migrate_tsig_wait_3'))
|
||||
time.sleep(30)
|
||||
logger.warning(m18n.n('migrate_tsig_wait_4'))
|
||||
logger.info(m18n.n('migrate_tsig_wait_4'))
|
||||
time.sleep(30)
|
||||
|
||||
logger.warning(m18n.n('migrate_tsig_end'))
|
||||
logger.info(m18n.n('migrate_tsig_end'))
|
||||
return
|
||||
|
||||
|
|
|
@ -36,12 +36,12 @@ class MyMigration(Migration):
|
|||
|
||||
self.check_assertions()
|
||||
|
||||
logger.warning(m18n.n("migration_0003_start", logfile=self.logfile))
|
||||
logger.info(m18n.n("migration_0003_start", logfile=self.logfile))
|
||||
|
||||
# Preparing the upgrade
|
||||
self.restore_original_nginx_conf_if_needed()
|
||||
|
||||
logger.warning(m18n.n("migration_0003_patching_sources_list"))
|
||||
logger.info(m18n.n("migration_0003_patching_sources_list"))
|
||||
self.patch_apt_sources_list()
|
||||
self.backup_files_to_keep()
|
||||
self.apt_update()
|
||||
|
@ -50,7 +50,7 @@ class MyMigration(Migration):
|
|||
self.hold(YUNOHOST_PACKAGES + apps_packages + ["fail2ban"])
|
||||
|
||||
# Main dist-upgrade
|
||||
logger.warning(m18n.n("migration_0003_main_upgrade"))
|
||||
logger.info(m18n.n("migration_0003_main_upgrade"))
|
||||
_run_service_command("stop", "mysql")
|
||||
self.apt_dist_upgrade(conf_flags=["old", "miss", "def"])
|
||||
_run_service_command("start", "mysql")
|
||||
|
@ -58,7 +58,7 @@ class MyMigration(Migration):
|
|||
raise MoulinetteError(m18n.n("migration_0003_still_on_jessie_after_main_upgrade", log=self.logfile))
|
||||
|
||||
# Specific upgrade for fail2ban...
|
||||
logger.warning(m18n.n("migration_0003_fail2ban_upgrade"))
|
||||
logger.info(m18n.n("migration_0003_fail2ban_upgrade"))
|
||||
self.unhold(["fail2ban"])
|
||||
# Don't move this if folder already exists. If it does, we probably are
|
||||
# running this script a 2nd, 3rd, ... time but /etc/fail2ban will
|
||||
|
@ -73,7 +73,7 @@ class MyMigration(Migration):
|
|||
os.system("apt clean --assume-yes")
|
||||
|
||||
# Upgrade yunohost packages
|
||||
logger.warning(m18n.n("migration_0003_yunohost_upgrade"))
|
||||
logger.info(m18n.n("migration_0003_yunohost_upgrade"))
|
||||
self.restore_files_to_keep()
|
||||
self.unhold(YUNOHOST_PACKAGES + apps_packages)
|
||||
self.upgrade_yunohost_packages()
|
||||
|
|
|
@ -202,7 +202,7 @@ def domain_dns_conf(domain, ttl=None):
|
|||
|
||||
is_cli = True if msettings.get('interface') == 'cli' else False
|
||||
if is_cli:
|
||||
logger.warning(m18n.n("domain_dns_conf_is_just_a_recommendation"))
|
||||
logger.info(m18n.n("domain_dns_conf_is_just_a_recommendation"))
|
||||
|
||||
return result
|
||||
|
||||
|
|
|
@ -141,7 +141,7 @@ def dyndns_subscribe(subscribe_host="dyndns.yunohost.org", domain=None, key=None
|
|||
if not os.path.exists('/etc/yunohost/dyndns'):
|
||||
os.makedirs('/etc/yunohost/dyndns')
|
||||
|
||||
logger.info(m18n.n('dyndns_key_generating'))
|
||||
logger.debug(m18n.n('dyndns_key_generating'))
|
||||
|
||||
os.system('cd /etc/yunohost/dyndns && '
|
||||
'dnssec-keygen -a hmac-sha512 -b 512 -r /dev/urandom -n USER %s' % domain)
|
||||
|
@ -288,7 +288,7 @@ def dyndns_update(dyn_host="dyndns.yunohost.org", domain=None, key=None,
|
|||
# to nsupdate as argument
|
||||
write_to_file(DYNDNS_ZONE, '\n'.join(lines))
|
||||
|
||||
logger.info("Now pushing new conf to DynDNS host...")
|
||||
logger.debug("Now pushing new conf to DynDNS host...")
|
||||
|
||||
try:
|
||||
command = ["/usr/bin/nsupdate", "-k", key, DYNDNS_ZONE]
|
||||
|
|
|
@ -305,7 +305,7 @@ def firewall_upnp(action='status', no_refresh=False):
|
|||
|
||||
# Compatibility with previous version
|
||||
if action == 'reload':
|
||||
logger.info("'reload' action is deprecated and will be removed")
|
||||
logger.debug("'reload' action is deprecated and will be removed")
|
||||
try:
|
||||
# Remove old cron job
|
||||
os.remove('/etc/cron.d/yunohost-firewall')
|
||||
|
@ -357,7 +357,7 @@ def firewall_upnp(action='status', no_refresh=False):
|
|||
# Select UPnP device
|
||||
upnpc.selectigd()
|
||||
except:
|
||||
logger.info('unable to select UPnP device', exc_info=1)
|
||||
logger.debug('unable to select UPnP device', exc_info=1)
|
||||
enabled = False
|
||||
else:
|
||||
# Iterate over ports
|
||||
|
@ -376,7 +376,7 @@ def firewall_upnp(action='status', no_refresh=False):
|
|||
upnpc.addportmapping(port, protocol, upnpc.lanaddr,
|
||||
port, 'yunohost firewall: port %d' % port, '')
|
||||
except:
|
||||
logger.info('unable to add port %d using UPnP',
|
||||
logger.debug('unable to add port %d using UPnP',
|
||||
port, exc_info=1)
|
||||
enabled = False
|
||||
|
||||
|
@ -459,6 +459,6 @@ def _update_firewall_file(rules):
|
|||
def _on_rule_command_error(returncode, cmd, output):
|
||||
"""Callback for rules commands error"""
|
||||
# Log error and continue commands execution
|
||||
logger.info('"%s" returned non-zero exit status %d:\n%s',
|
||||
cmd, returncode, prependlines(output.rstrip(), '> '))
|
||||
logger.debug('"%s" returned non-zero exit status %d:\n%s',
|
||||
cmd, returncode, prependlines(output.rstrip(), '> '))
|
||||
return True
|
||||
|
|
|
@ -355,13 +355,13 @@ def hook_exec(path, args=None, raise_on_error=False, no_trace=False,
|
|||
command.append(cmd.format(script=cmd_script, args=cmd_args))
|
||||
|
||||
if logger.isEnabledFor(log.DEBUG):
|
||||
logger.info(m18n.n('executing_command', command=' '.join(command)))
|
||||
logger.debug(m18n.n('executing_command', command=' '.join(command)))
|
||||
else:
|
||||
logger.info(m18n.n('executing_script', script=path))
|
||||
logger.debug(m18n.n('executing_script', script=path))
|
||||
|
||||
# Define output callbacks and call command
|
||||
callbacks = (
|
||||
lambda l: logger.info(l.rstrip()),
|
||||
lambda l: logger.debug(l.rstrip()),
|
||||
lambda l: logger.warning(l.rstrip()),
|
||||
)
|
||||
returncode = call_async_output(
|
||||
|
|
|
@ -126,7 +126,7 @@ def service_start(names):
|
|||
m18n.n('service_start_failed',
|
||||
service=name,
|
||||
logs=_get_journalctl_logs(name)))
|
||||
logger.info(m18n.n('service_already_started', service=name))
|
||||
logger.debug(m18n.n('service_already_started', service=name))
|
||||
|
||||
|
||||
def service_stop(names):
|
||||
|
@ -148,7 +148,7 @@ def service_stop(names):
|
|||
m18n.n('service_stop_failed',
|
||||
service=name,
|
||||
logs=_get_journalctl_logs(name)))
|
||||
logger.info(m18n.n('service_already_stopped', service=name))
|
||||
logger.debug(m18n.n('service_already_stopped', service=name))
|
||||
|
||||
|
||||
def service_enable(names):
|
||||
|
@ -416,7 +416,7 @@ def service_regen_conf(names=[], with_diff=False, force=False, dry_run=False,
|
|||
|
||||
# Iterate over services and process pending conf
|
||||
for service, conf_files in _get_pending_conf(names).items():
|
||||
logger.info(m18n.n(
|
||||
logger.debug(m18n.n(
|
||||
'service_regenconf_pending_applying' if not dry_run else
|
||||
'service_regenconf_dry_pending_applying',
|
||||
service=service))
|
||||
|
@ -459,7 +459,7 @@ def service_regen_conf(names=[], with_diff=False, force=False, dry_run=False,
|
|||
regenerated = _regen(
|
||||
system_path, pending_path, save=False)
|
||||
else:
|
||||
logger.warning(m18n.n(
|
||||
logger.info(m18n.n(
|
||||
'service_conf_file_manually_removed',
|
||||
conf=system_path))
|
||||
conf_status = 'removed'
|
||||
|
@ -476,16 +476,16 @@ def service_regen_conf(names=[], with_diff=False, force=False, dry_run=False,
|
|||
# we assume that it is safe to regen it, since the file is backuped
|
||||
# anyway (by default in _regen), as long as we warn the user
|
||||
# appropriately.
|
||||
logger.warning(m18n.n('service_conf_new_managed_file',
|
||||
conf=system_path, service=service))
|
||||
logger.info(m18n.n('service_conf_new_managed_file',
|
||||
conf=system_path, service=service))
|
||||
regenerated = _regen(system_path, pending_path)
|
||||
conf_status = 'new'
|
||||
elif force:
|
||||
regenerated = _regen(system_path)
|
||||
conf_status = 'force-removed'
|
||||
else:
|
||||
logger.warning(m18n.n('service_conf_file_kept_back',
|
||||
conf=system_path, service=service))
|
||||
logger.info(m18n.n('service_conf_file_kept_back',
|
||||
conf=system_path, service=service))
|
||||
conf_status = 'unmanaged'
|
||||
|
||||
# -> system conf has not been manually modified
|
||||
|
@ -530,7 +530,7 @@ def service_regen_conf(names=[], with_diff=False, force=False, dry_run=False,
|
|||
|
||||
# Check for service conf changes
|
||||
if not succeed_regen and not failed_regen:
|
||||
logger.info(m18n.n('service_conf_up_to_date', service=service))
|
||||
logger.debug(m18n.n('service_conf_up_to_date', service=service))
|
||||
continue
|
||||
elif not failed_regen:
|
||||
logger.success(m18n.n(
|
||||
|
@ -865,13 +865,13 @@ def _process_regen_conf(system_conf, new_conf=None, save=True):
|
|||
filesystem.mkdir(backup_dir, 0755, True)
|
||||
|
||||
shutil.copy2(system_conf, backup_path)
|
||||
logger.info(m18n.n('service_conf_file_backed_up',
|
||||
logger.debug(m18n.n('service_conf_file_backed_up',
|
||||
conf=system_conf, backup=backup_path))
|
||||
|
||||
try:
|
||||
if not new_conf:
|
||||
os.remove(system_conf)
|
||||
logger.info(m18n.n('service_conf_file_removed',
|
||||
logger.debug(m18n.n('service_conf_file_removed',
|
||||
conf=system_conf))
|
||||
else:
|
||||
system_dir = os.path.dirname(system_conf)
|
||||
|
@ -880,8 +880,8 @@ def _process_regen_conf(system_conf, new_conf=None, save=True):
|
|||
filesystem.mkdir(system_dir, 0755, True)
|
||||
|
||||
shutil.copyfile(new_conf, system_conf)
|
||||
logger.info(m18n.n('service_conf_file_updated',
|
||||
conf=system_conf))
|
||||
logger.debug(m18n.n('service_conf_file_updated',
|
||||
conf=system_conf))
|
||||
except Exception as e:
|
||||
logger.warning("Exception while trying to regenerate conf '%s': %s", system_conf, e, exc_info=1)
|
||||
if not new_conf and os.path.exists(system_conf):
|
||||
|
|
|
@ -224,7 +224,7 @@ def _set_hostname(hostname, pretty_hostname=None):
|
|||
logger.warning(out)
|
||||
raise MoulinetteError(errno.EIO, m18n.n('domain_hostname_failed'))
|
||||
else:
|
||||
logger.info(out)
|
||||
logger.debug(out)
|
||||
|
||||
|
||||
def _is_inside_container():
|
||||
|
@ -424,7 +424,7 @@ def tools_update(ignore_apps=False, ignore_packages=False):
|
|||
cache = apt.Cache()
|
||||
|
||||
# Update APT cache
|
||||
logger.info(m18n.n('updating_apt_cache'))
|
||||
logger.debug(m18n.n('updating_apt_cache'))
|
||||
if not cache.update():
|
||||
raise MoulinetteError(errno.EPERM, m18n.n('update_cache_failed'))
|
||||
|
||||
|
@ -438,7 +438,7 @@ def tools_update(ignore_apps=False, ignore_packages=False):
|
|||
'fullname': pkg.fullname,
|
||||
'changelog': pkg.get_changelog()
|
||||
})
|
||||
logger.info(m18n.n('done'))
|
||||
logger.debug(m18n.n('done'))
|
||||
|
||||
# "apps" will list upgradable packages
|
||||
apps = []
|
||||
|
|
Loading…
Add table
Reference in a new issue