Merge pull request #487 from YunoHost/info-by-default

[enh] Verbose by default
This commit is contained in:
Bram 2018-06-13 20:07:28 +02:00 committed by GitHub
commit 07b4ec49aa
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 73 additions and 79 deletions

View file

@ -9,8 +9,8 @@ import argparse
IN_DEVEL = False IN_DEVEL = False
# Level for which loggers will log # Level for which loggers will log
LOGGERS_LEVEL = 'INFO' LOGGERS_LEVEL = 'DEBUG'
TTY_LOG_LEVEL = 'SUCCESS' TTY_LOG_LEVEL = 'INFO'
# Handlers that will be used by loggers # Handlers that will be used by loggers
# - file: log to the file LOG_DIR/LOG_FILE # - file: log to the file LOG_DIR/LOG_FILE
@ -58,10 +58,6 @@ def _parse_cli_args():
action='store_true', default=False, action='store_true', default=False,
help="Log and print debug messages", help="Log and print debug messages",
) )
parser.add_argument('--verbose',
action='store_true', default=False,
help="Be more verbose in the output",
)
parser.add_argument('--quiet', parser.add_argument('--quiet',
action='store_true', default=False, action='store_true', default=False,
help="Don't produce any output", help="Don't produce any output",
@ -92,13 +88,13 @@ def _parse_cli_args():
return (parser, opts, args) return (parser, opts, args)
def _init_moulinette(debug=False, verbose=False, quiet=False): def _init_moulinette(debug=False, quiet=False):
"""Configure logging and initialize the moulinette""" """Configure logging and initialize the moulinette"""
# Define loggers handlers # Define loggers handlers
handlers = set(LOGGERS_HANDLERS) handlers = set(LOGGERS_HANDLERS)
if quiet and 'tty' in handlers: if quiet and 'tty' in handlers:
handlers.remove('tty') handlers.remove('tty')
elif verbose and 'tty' not in handlers: elif 'tty' not in handlers:
handlers.append('tty') handlers.append('tty')
root_handlers = set(handlers) root_handlers = set(handlers)
@ -108,10 +104,8 @@ def _init_moulinette(debug=False, verbose=False, quiet=False):
# Define loggers level # Define loggers level
level = LOGGERS_LEVEL level = LOGGERS_LEVEL
tty_level = TTY_LOG_LEVEL tty_level = TTY_LOG_LEVEL
if verbose:
tty_level = 'INFO'
if debug: if debug:
tty_level = level = 'DEBUG' tty_level = 'DEBUG'
# Custom logging configuration # Custom logging configuration
logging = { logging = {
@ -196,7 +190,7 @@ if __name__ == '__main__':
sys.exit(1) sys.exit(1)
parser, opts, args = _parse_cli_args() parser, opts, args = _parse_cli_args()
_init_moulinette(opts.debug, opts.verbose, opts.quiet) _init_moulinette(opts.debug, opts.quiet)
# Check that YunoHost is installed # Check that YunoHost is installed
if not os.path.isfile('/etc/yunohost/installed') and \ if not os.path.isfile('/etc/yunohost/installed') and \

View file

@ -566,7 +566,7 @@ def app_upgrade(auth, app=[], url=None, file=None):
logger.info("Upgrading apps %s", ", ".join(app)) logger.info("Upgrading apps %s", ", ".join(app))
for app_instance_name in apps: for app_instance_name in apps:
logger.warning(m18n.n('app_upgrade_app_name', app=app_instance_name)) logger.info(m18n.n('app_upgrade_app_name', app=app_instance_name))
installed = _is_installed(app_instance_name) installed = _is_installed(app_instance_name)
if not installed: if not installed:
raise MoulinetteError(errno.ENOPKG, raise MoulinetteError(errno.ENOPKG,
@ -1098,7 +1098,7 @@ def app_setting(app, key, value=None, delete=False):
try: try:
return app_settings[key] return app_settings[key]
except: except:
logger.info("cannot get app setting '%s' for '%s'", key, app) logger.debug("cannot get app setting '%s' for '%s'", key, app)
return None return None
else: else:
if delete and key in app_settings: if delete and key in app_settings:
@ -1449,7 +1449,7 @@ def _extract_app_from_file(path, remove=False):
Dict manifest Dict manifest
""" """
logger.info(m18n.n('extracting')) logger.debug(m18n.n('extracting'))
if os.path.exists(APP_TMP_FOLDER): if os.path.exists(APP_TMP_FOLDER):
shutil.rmtree(APP_TMP_FOLDER) shutil.rmtree(APP_TMP_FOLDER)
@ -1490,7 +1490,7 @@ def _extract_app_from_file(path, remove=False):
raise MoulinetteError(errno.EINVAL, raise MoulinetteError(errno.EINVAL,
m18n.n('app_manifest_invalid', error=e.strerror)) m18n.n('app_manifest_invalid', error=e.strerror))
logger.info(m18n.n('done')) logger.debug(m18n.n('done'))
manifest['remote'] = {'type': 'file', 'path': path} manifest['remote'] = {'type': 'file', 'path': path}
return manifest, extracted_app_folder return manifest, extracted_app_folder
@ -1535,7 +1535,7 @@ def _fetch_app_from_git(app):
if os.path.exists(app_tmp_archive): if os.path.exists(app_tmp_archive):
os.remove(app_tmp_archive) os.remove(app_tmp_archive)
logger.info(m18n.n('downloading')) logger.debug(m18n.n('downloading'))
if ('@' in app) or ('http://' in app) or ('https://' in app): if ('@' in app) or ('http://' in app) or ('https://' in app):
url = app url = app
@ -1586,7 +1586,7 @@ def _fetch_app_from_git(app):
raise MoulinetteError(errno.EIO, raise MoulinetteError(errno.EIO,
m18n.n('app_manifest_invalid', error=e.strerror)) m18n.n('app_manifest_invalid', error=e.strerror))
else: else:
logger.info(m18n.n('done')) logger.debug(m18n.n('done'))
# Store remote repository info into the returned manifest # Store remote repository info into the returned manifest
manifest['remote'] = {'type': 'git', 'url': url, 'branch': branch} manifest['remote'] = {'type': 'git', 'url': url, 'branch': branch}
@ -1643,7 +1643,7 @@ def _fetch_app_from_git(app):
raise MoulinetteError(errno.EIO, raise MoulinetteError(errno.EIO,
m18n.n('app_manifest_invalid', error=e.strerror)) m18n.n('app_manifest_invalid', error=e.strerror))
else: else:
logger.info(m18n.n('done')) logger.debug(m18n.n('done'))
# Store remote repository info into the returned manifest # Store remote repository info into the returned manifest
manifest['remote'] = { manifest['remote'] = {
@ -1766,7 +1766,7 @@ def _check_manifest_requirements(manifest, app_instance_name):
elif not requirements: elif not requirements:
return return
logger.info(m18n.n('app_requirements_checking', app=app_instance_name)) logger.debug(m18n.n('app_requirements_checking', app=app_instance_name))
# Retrieve versions of each required package # Retrieve versions of each required package
try: try:
@ -1996,7 +1996,7 @@ def _migrate_appslist_system():
for cron_path in legacy_crons: for cron_path in legacy_crons:
appslist_name = os.path.basename(cron_path).replace("yunohost-applist-", "") appslist_name = os.path.basename(cron_path).replace("yunohost-applist-", "")
logger.info(m18n.n('appslist_migrating', appslist=appslist_name)) logger.debug(m18n.n('appslist_migrating', appslist=appslist_name))
# Parse appslist url in cron # Parse appslist url in cron
cron_file_content = open(cron_path).read().strip() cron_file_content = open(cron_path).read().strip()

View file

@ -577,7 +577,7 @@ class BackupManager():
if system_targets == []: if system_targets == []:
return return
logger.info(m18n.n('backup_running_hooks')) logger.debug(m18n.n('backup_running_hooks'))
# Prepare environnement # Prepare environnement
env_dict = self._get_env_var() env_dict = self._get_env_var()
@ -665,7 +665,7 @@ class BackupManager():
tmp_app_bkp_dir = env_dict["YNH_APP_BACKUP_DIR"] tmp_app_bkp_dir = env_dict["YNH_APP_BACKUP_DIR"]
settings_dir = os.path.join(self.work_dir, 'apps', app, 'settings') settings_dir = os.path.join(self.work_dir, 'apps', app, 'settings')
logger.info(m18n.n('backup_running_app_script', app=app)) logger.debug(m18n.n('backup_running_app_script', app=app))
try: try:
# Prepare backup directory for the app # Prepare backup directory for the app
filesystem.mkdir(tmp_app_bkp_dir, 0750, True, uid='admin') filesystem.mkdir(tmp_app_bkp_dir, 0750, True, uid='admin')
@ -722,9 +722,9 @@ class BackupManager():
"""Apply backup methods""" """Apply backup methods"""
for method in self.methods: for method in self.methods:
logger.info(m18n.n('backup_applying_method_' + method.method_name)) logger.debug(m18n.n('backup_applying_method_' + method.method_name))
method.mount_and_backup(self) method.mount_and_backup(self)
logger.info(m18n.n('backup_method_' + method.method_name + '_finished')) logger.debug(m18n.n('backup_method_' + method.method_name + '_finished'))
def _compute_backup_size(self): def _compute_backup_size(self):
""" """
@ -1125,7 +1125,7 @@ class RestoreManager():
if system_targets == []: if system_targets == []:
return return
logger.info(m18n.n('restore_running_hooks')) logger.debug(m18n.n('restore_running_hooks'))
env_dict = self._get_env_var() env_dict = self._get_env_var()
ret = hook_callback('restore', ret = hook_callback('restore',
@ -1210,7 +1210,7 @@ class RestoreManager():
self.targets.set_result("apps", app_instance_name, "Warning") self.targets.set_result("apps", app_instance_name, "Warning")
return return
logger.info(m18n.n('restore_running_app_script', app=app_instance_name)) logger.debug(m18n.n('restore_running_app_script', app=app_instance_name))
try: try:
# Restore app settings # Restore app settings
app_settings_new_path = os.path.join('/etc/yunohost/apps/', app_settings_new_path = os.path.join('/etc/yunohost/apps/',
@ -1582,7 +1582,7 @@ class BackupMethod(object):
m18n.n('backup_unable_to_organize_files')) m18n.n('backup_unable_to_organize_files'))
# Copy unbinded path # Copy unbinded path
logger.info(m18n.n('backup_copying_to_organize_the_archive', logger.debug(m18n.n('backup_copying_to_organize_the_archive',
size=str(size))) size=str(size)))
for path in paths_needed_to_be_copied: for path in paths_needed_to_be_copied:
dest = os.path.join(self.work_dir, path['dest']) dest = os.path.join(self.work_dir, path['dest'])
@ -1786,7 +1786,7 @@ class TarBackupMethod(BackupMethod):
if ret != 0: if ret != 0:
logger.warning(m18n.n('backup_archive_mount_failed')) logger.warning(m18n.n('backup_archive_mount_failed'))
logger.info(m18n.n("restore_extracting")) logger.debug(m18n.n("restore_extracting"))
tar = tarfile.open(self._archive_file, "r:gz") tar = tarfile.open(self._archive_file, "r:gz")
tar.extract('info.json', path=self.work_dir) tar.extract('info.json', path=self.work_dir)

View file

@ -210,7 +210,7 @@ def _certificate_install_selfsigned(domain_list, force=False):
raise MoulinetteError( raise MoulinetteError(
errno.EIO, m18n.n('domain_cert_gen_failed')) errno.EIO, m18n.n('domain_cert_gen_failed'))
else: else:
logger.info(out) logger.debug(out)
# Link the CA cert (not sure it's actually needed in practice though, # Link the CA cert (not sure it's actually needed in practice though,
# since we append it at the end of crt.pem. For instance for Let's # since we append it at the end of crt.pem. For instance for Let's
@ -485,11 +485,11 @@ location ^~ '/.well-known/acme-challenge'
# Write the conf # Write the conf
if os.path.exists(nginx_conf_file): if os.path.exists(nginx_conf_file):
logger.info( logger.debug(
"Nginx configuration file for ACME challenge already exists for domain, skipping.") "Nginx configuration file for ACME challenge already exists for domain, skipping.")
return return
logger.info( logger.debug(
"Adding Nginx configuration file for Acme challenge for domain %s.", domain) "Adding Nginx configuration file for Acme challenge for domain %s.", domain)
with open(nginx_conf_file, "w") as f: with open(nginx_conf_file, "w") as f:
@ -531,7 +531,7 @@ def _fetch_and_enable_new_certificate(domain, staging=False):
_regen_dnsmasq_if_needed() _regen_dnsmasq_if_needed()
# Prepare certificate signing request # Prepare certificate signing request
logger.info( logger.debug(
"Prepare key and certificate signing request (CSR) for %s...", domain) "Prepare key and certificate signing request (CSR) for %s...", domain)
domain_key_file = "%s/%s.pem" % (TMP_FOLDER, domain) domain_key_file = "%s/%s.pem" % (TMP_FOLDER, domain)
@ -541,7 +541,7 @@ def _fetch_and_enable_new_certificate(domain, staging=False):
_prepare_certificate_signing_request(domain, domain_key_file, TMP_FOLDER) _prepare_certificate_signing_request(domain, domain_key_file, TMP_FOLDER)
# Sign the certificate # Sign the certificate
logger.info("Now using ACME Tiny to sign the certificate...") logger.debug("Now using ACME Tiny to sign the certificate...")
domain_csr_file = "%s/%s.csr" % (TMP_FOLDER, domain) domain_csr_file = "%s/%s.csr" % (TMP_FOLDER, domain)
@ -579,7 +579,7 @@ def _fetch_and_enable_new_certificate(domain, staging=False):
raise MoulinetteError(errno.EINVAL, m18n.n('certmanager_couldnt_fetch_intermediate_cert')) raise MoulinetteError(errno.EINVAL, m18n.n('certmanager_couldnt_fetch_intermediate_cert'))
# Now save the key and signed certificate # Now save the key and signed certificate
logger.info("Saving the key and signed certificate...") logger.debug("Saving the key and signed certificate...")
# Create corresponding directory # Create corresponding directory
date_tag = datetime.now().strftime("%Y%m%d.%H%M%S") date_tag = datetime.now().strftime("%Y%m%d.%H%M%S")
@ -642,7 +642,7 @@ def _prepare_certificate_signing_request(domain, key_file, output_folder):
# Save the request in tmp folder # Save the request in tmp folder
csr_file = output_folder + domain + ".csr" csr_file = output_folder + domain + ".csr"
logger.info("Saving to %s.", csr_file) logger.debug("Saving to %s.", csr_file)
with open(csr_file, "w") as f: with open(csr_file, "w") as f:
f.write(crypto.dump_certificate_request(crypto.FILETYPE_PEM, csr)) f.write(crypto.dump_certificate_request(crypto.FILETYPE_PEM, csr))
@ -753,7 +753,7 @@ def _get_status(domain):
def _generate_account_key(): def _generate_account_key():
logger.info("Generating account key ...") logger.debug("Generating account key ...")
_generate_key(ACCOUNT_KEY_FILE) _generate_key(ACCOUNT_KEY_FILE)
_set_permissions(ACCOUNT_KEY_FILE, "root", "root", 0400) _set_permissions(ACCOUNT_KEY_FILE, "root", "root", 0400)
@ -776,7 +776,7 @@ def _set_permissions(path, user, group, permissions):
def _enable_certificate(domain, new_cert_folder): def _enable_certificate(domain, new_cert_folder):
logger.info("Enabling the certificate for domain %s ...", domain) logger.debug("Enabling the certificate for domain %s ...", domain)
live_link = os.path.join(CERT_FOLDER, domain) live_link = os.path.join(CERT_FOLDER, domain)
@ -793,7 +793,7 @@ def _enable_certificate(domain, new_cert_folder):
os.symlink(new_cert_folder, live_link) os.symlink(new_cert_folder, live_link)
logger.info("Restarting services...") logger.debug("Restarting services...")
for service in ("postfix", "dovecot", "metronome"): for service in ("postfix", "dovecot", "metronome"):
_run_service_command("restart", service) _run_service_command("restart", service)
@ -802,7 +802,7 @@ def _enable_certificate(domain, new_cert_folder):
def _backup_current_cert(domain): def _backup_current_cert(domain):
logger.info("Backuping existing certificate for domain %s", domain) logger.debug("Backuping existing certificate for domain %s", domain)
cert_folder_domain = os.path.join(CERT_FOLDER, domain) cert_folder_domain = os.path.join(CERT_FOLDER, domain)

View file

@ -30,10 +30,10 @@ class MyMigration(Migration):
(domain, private_key_path) = _guess_current_dyndns_domain(dyn_host) (domain, private_key_path) = _guess_current_dyndns_domain(dyn_host)
assert "+157" in private_key_path assert "+157" in private_key_path
except (MoulinetteError, AssertionError): except (MoulinetteError, AssertionError):
logger.warning(m18n.n("migrate_tsig_not_needed")) logger.info(m18n.n("migrate_tsig_not_needed"))
return return
logger.warning(m18n.n('migrate_tsig_start', domain=domain)) logger.info(m18n.n('migrate_tsig_start', domain=domain))
public_key_path = private_key_path.rsplit(".private", 1)[0] + ".key" public_key_path = private_key_path.rsplit(".private", 1)[0] + ".key"
public_key_md5 = open(public_key_path).read().strip().split(' ')[-1] public_key_md5 = open(public_key_path).read().strip().split(' ')[-1]
@ -77,15 +77,15 @@ class MyMigration(Migration):
os.system("mv /etc/yunohost/dyndns/*+157* /tmp") os.system("mv /etc/yunohost/dyndns/*+157* /tmp")
# sleep to wait for dyndns cache invalidation # sleep to wait for dyndns cache invalidation
logger.warning(m18n.n('migrate_tsig_wait')) logger.info(m18n.n('migrate_tsig_wait'))
time.sleep(60) time.sleep(60)
logger.warning(m18n.n('migrate_tsig_wait_2')) logger.info(m18n.n('migrate_tsig_wait_2'))
time.sleep(60) time.sleep(60)
logger.warning(m18n.n('migrate_tsig_wait_3')) logger.info(m18n.n('migrate_tsig_wait_3'))
time.sleep(30) time.sleep(30)
logger.warning(m18n.n('migrate_tsig_wait_4')) logger.info(m18n.n('migrate_tsig_wait_4'))
time.sleep(30) time.sleep(30)
logger.warning(m18n.n('migrate_tsig_end')) logger.info(m18n.n('migrate_tsig_end'))
return return

View file

@ -37,12 +37,12 @@ class MyMigration(Migration):
self.check_assertions() self.check_assertions()
logger.warning(m18n.n("migration_0003_start", logfile=self.logfile)) logger.info(m18n.n("migration_0003_start", logfile=self.logfile))
# Preparing the upgrade # Preparing the upgrade
self.restore_original_nginx_conf_if_needed() self.restore_original_nginx_conf_if_needed()
logger.warning(m18n.n("migration_0003_patching_sources_list")) logger.info(m18n.n("migration_0003_patching_sources_list"))
self.patch_apt_sources_list() self.patch_apt_sources_list()
self.backup_files_to_keep() self.backup_files_to_keep()
self.apt_update() self.apt_update()
@ -51,7 +51,7 @@ class MyMigration(Migration):
self.hold(YUNOHOST_PACKAGES + apps_packages + ["fail2ban"]) self.hold(YUNOHOST_PACKAGES + apps_packages + ["fail2ban"])
# Main dist-upgrade # Main dist-upgrade
logger.warning(m18n.n("migration_0003_main_upgrade")) logger.info(m18n.n("migration_0003_main_upgrade"))
_run_service_command("stop", "mysql") _run_service_command("stop", "mysql")
self.apt_dist_upgrade(conf_flags=["old", "miss", "def"]) self.apt_dist_upgrade(conf_flags=["old", "miss", "def"])
_run_service_command("start", "mysql") _run_service_command("start", "mysql")
@ -59,7 +59,7 @@ class MyMigration(Migration):
raise MoulinetteError(m18n.n("migration_0003_still_on_jessie_after_main_upgrade", log=self.logfile)) raise MoulinetteError(m18n.n("migration_0003_still_on_jessie_after_main_upgrade", log=self.logfile))
# Specific upgrade for fail2ban... # Specific upgrade for fail2ban...
logger.warning(m18n.n("migration_0003_fail2ban_upgrade")) logger.info(m18n.n("migration_0003_fail2ban_upgrade"))
self.unhold(["fail2ban"]) self.unhold(["fail2ban"])
# Don't move this if folder already exists. If it does, we probably are # Don't move this if folder already exists. If it does, we probably are
# running this script a 2nd, 3rd, ... time but /etc/fail2ban will # running this script a 2nd, 3rd, ... time but /etc/fail2ban will
@ -79,7 +79,7 @@ class MyMigration(Migration):
firewall_disallow("Both", 465) firewall_disallow("Both", 465)
# Upgrade yunohost packages # Upgrade yunohost packages
logger.warning(m18n.n("migration_0003_yunohost_upgrade")) logger.info(m18n.n("migration_0003_yunohost_upgrade"))
self.restore_files_to_keep() self.restore_files_to_keep()
self.unhold(YUNOHOST_PACKAGES + apps_packages) self.unhold(YUNOHOST_PACKAGES + apps_packages)
self.upgrade_yunohost_packages() self.upgrade_yunohost_packages()

View file

@ -202,7 +202,7 @@ def domain_dns_conf(domain, ttl=None):
is_cli = True if msettings.get('interface') == 'cli' else False is_cli = True if msettings.get('interface') == 'cli' else False
if is_cli: if is_cli:
logger.warning(m18n.n("domain_dns_conf_is_just_a_recommendation")) logger.info(m18n.n("domain_dns_conf_is_just_a_recommendation"))
return result return result

View file

@ -141,7 +141,7 @@ def dyndns_subscribe(subscribe_host="dyndns.yunohost.org", domain=None, key=None
if not os.path.exists('/etc/yunohost/dyndns'): if not os.path.exists('/etc/yunohost/dyndns'):
os.makedirs('/etc/yunohost/dyndns') os.makedirs('/etc/yunohost/dyndns')
logger.info(m18n.n('dyndns_key_generating')) logger.debug(m18n.n('dyndns_key_generating'))
os.system('cd /etc/yunohost/dyndns && ' os.system('cd /etc/yunohost/dyndns && '
'dnssec-keygen -a hmac-sha512 -b 512 -r /dev/urandom -n USER %s' % domain) 'dnssec-keygen -a hmac-sha512 -b 512 -r /dev/urandom -n USER %s' % domain)
@ -288,7 +288,7 @@ def dyndns_update(dyn_host="dyndns.yunohost.org", domain=None, key=None,
# to nsupdate as argument # to nsupdate as argument
write_to_file(DYNDNS_ZONE, '\n'.join(lines)) write_to_file(DYNDNS_ZONE, '\n'.join(lines))
logger.info("Now pushing new conf to DynDNS host...") logger.debug("Now pushing new conf to DynDNS host...")
try: try:
command = ["/usr/bin/nsupdate", "-k", key, DYNDNS_ZONE] command = ["/usr/bin/nsupdate", "-k", key, DYNDNS_ZONE]

View file

@ -305,7 +305,7 @@ def firewall_upnp(action='status', no_refresh=False):
# Compatibility with previous version # Compatibility with previous version
if action == 'reload': if action == 'reload':
logger.info("'reload' action is deprecated and will be removed") logger.debug("'reload' action is deprecated and will be removed")
try: try:
# Remove old cron job # Remove old cron job
os.remove('/etc/cron.d/yunohost-firewall') os.remove('/etc/cron.d/yunohost-firewall')
@ -357,7 +357,7 @@ def firewall_upnp(action='status', no_refresh=False):
# Select UPnP device # Select UPnP device
upnpc.selectigd() upnpc.selectigd()
except: except:
logger.info('unable to select UPnP device', exc_info=1) logger.debug('unable to select UPnP device', exc_info=1)
enabled = False enabled = False
else: else:
# Iterate over ports # Iterate over ports
@ -376,7 +376,7 @@ def firewall_upnp(action='status', no_refresh=False):
upnpc.addportmapping(port, protocol, upnpc.lanaddr, upnpc.addportmapping(port, protocol, upnpc.lanaddr,
port, 'yunohost firewall: port %d' % port, '') port, 'yunohost firewall: port %d' % port, '')
except: except:
logger.info('unable to add port %d using UPnP', logger.debug('unable to add port %d using UPnP',
port, exc_info=1) port, exc_info=1)
enabled = False enabled = False
@ -459,6 +459,6 @@ def _update_firewall_file(rules):
def _on_rule_command_error(returncode, cmd, output): def _on_rule_command_error(returncode, cmd, output):
"""Callback for rules commands error""" """Callback for rules commands error"""
# Log error and continue commands execution # Log error and continue commands execution
logger.info('"%s" returned non-zero exit status %d:\n%s', logger.debug('"%s" returned non-zero exit status %d:\n%s',
cmd, returncode, prependlines(output.rstrip(), '> ')) cmd, returncode, prependlines(output.rstrip(), '> '))
return True return True

View file

@ -355,13 +355,13 @@ def hook_exec(path, args=None, raise_on_error=False, no_trace=False,
command.append(cmd.format(script=cmd_script, args=cmd_args)) command.append(cmd.format(script=cmd_script, args=cmd_args))
if logger.isEnabledFor(log.DEBUG): if logger.isEnabledFor(log.DEBUG):
logger.info(m18n.n('executing_command', command=' '.join(command))) logger.debug(m18n.n('executing_command', command=' '.join(command)))
else: else:
logger.info(m18n.n('executing_script', script=path)) logger.debug(m18n.n('executing_script', script=path))
# Define output callbacks and call command # Define output callbacks and call command
callbacks = ( callbacks = (
lambda l: logger.info(l.rstrip()), lambda l: logger.debug(l.rstrip()),
lambda l: logger.warning(l.rstrip()), lambda l: logger.warning(l.rstrip()),
) )
returncode = call_async_output( returncode = call_async_output(

View file

@ -126,7 +126,7 @@ def service_start(names):
m18n.n('service_start_failed', m18n.n('service_start_failed',
service=name, service=name,
logs=_get_journalctl_logs(name))) logs=_get_journalctl_logs(name)))
logger.info(m18n.n('service_already_started', service=name)) logger.debug(m18n.n('service_already_started', service=name))
def service_stop(names): def service_stop(names):
@ -148,7 +148,7 @@ def service_stop(names):
m18n.n('service_stop_failed', m18n.n('service_stop_failed',
service=name, service=name,
logs=_get_journalctl_logs(name))) logs=_get_journalctl_logs(name)))
logger.info(m18n.n('service_already_stopped', service=name)) logger.debug(m18n.n('service_already_stopped', service=name))
def service_enable(names): def service_enable(names):
@ -416,7 +416,7 @@ def service_regen_conf(names=[], with_diff=False, force=False, dry_run=False,
# Iterate over services and process pending conf # Iterate over services and process pending conf
for service, conf_files in _get_pending_conf(names).items(): for service, conf_files in _get_pending_conf(names).items():
logger.info(m18n.n( logger.debug(m18n.n(
'service_regenconf_pending_applying' if not dry_run else 'service_regenconf_pending_applying' if not dry_run else
'service_regenconf_dry_pending_applying', 'service_regenconf_dry_pending_applying',
service=service)) service=service))
@ -459,7 +459,7 @@ def service_regen_conf(names=[], with_diff=False, force=False, dry_run=False,
regenerated = _regen( regenerated = _regen(
system_path, pending_path, save=False) system_path, pending_path, save=False)
else: else:
logger.warning(m18n.n( logger.info(m18n.n(
'service_conf_file_manually_removed', 'service_conf_file_manually_removed',
conf=system_path)) conf=system_path))
conf_status = 'removed' conf_status = 'removed'
@ -476,7 +476,7 @@ def service_regen_conf(names=[], with_diff=False, force=False, dry_run=False,
# we assume that it is safe to regen it, since the file is backuped # we assume that it is safe to regen it, since the file is backuped
# anyway (by default in _regen), as long as we warn the user # anyway (by default in _regen), as long as we warn the user
# appropriately. # appropriately.
logger.warning(m18n.n('service_conf_new_managed_file', logger.info(m18n.n('service_conf_new_managed_file',
conf=system_path, service=service)) conf=system_path, service=service))
regenerated = _regen(system_path, pending_path) regenerated = _regen(system_path, pending_path)
conf_status = 'new' conf_status = 'new'
@ -484,7 +484,7 @@ def service_regen_conf(names=[], with_diff=False, force=False, dry_run=False,
regenerated = _regen(system_path) regenerated = _regen(system_path)
conf_status = 'force-removed' conf_status = 'force-removed'
else: else:
logger.warning(m18n.n('service_conf_file_kept_back', logger.info(m18n.n('service_conf_file_kept_back',
conf=system_path, service=service)) conf=system_path, service=service))
conf_status = 'unmanaged' conf_status = 'unmanaged'
@ -530,7 +530,7 @@ def service_regen_conf(names=[], with_diff=False, force=False, dry_run=False,
# Check for service conf changes # Check for service conf changes
if not succeed_regen and not failed_regen: if not succeed_regen and not failed_regen:
logger.info(m18n.n('service_conf_up_to_date', service=service)) logger.debug(m18n.n('service_conf_up_to_date', service=service))
continue continue
elif not failed_regen: elif not failed_regen:
logger.success(m18n.n( logger.success(m18n.n(
@ -865,13 +865,13 @@ def _process_regen_conf(system_conf, new_conf=None, save=True):
filesystem.mkdir(backup_dir, 0755, True) filesystem.mkdir(backup_dir, 0755, True)
shutil.copy2(system_conf, backup_path) shutil.copy2(system_conf, backup_path)
logger.info(m18n.n('service_conf_file_backed_up', logger.debug(m18n.n('service_conf_file_backed_up',
conf=system_conf, backup=backup_path)) conf=system_conf, backup=backup_path))
try: try:
if not new_conf: if not new_conf:
os.remove(system_conf) os.remove(system_conf)
logger.info(m18n.n('service_conf_file_removed', logger.debug(m18n.n('service_conf_file_removed',
conf=system_conf)) conf=system_conf))
else: else:
system_dir = os.path.dirname(system_conf) system_dir = os.path.dirname(system_conf)
@ -880,7 +880,7 @@ def _process_regen_conf(system_conf, new_conf=None, save=True):
filesystem.mkdir(system_dir, 0755, True) filesystem.mkdir(system_dir, 0755, True)
shutil.copyfile(new_conf, system_conf) shutil.copyfile(new_conf, system_conf)
logger.info(m18n.n('service_conf_file_updated', logger.debug(m18n.n('service_conf_file_updated',
conf=system_conf)) conf=system_conf))
except Exception as e: except Exception as e:
logger.warning("Exception while trying to regenerate conf '%s': %s", system_conf, e, exc_info=1) logger.warning("Exception while trying to regenerate conf '%s': %s", system_conf, e, exc_info=1)

View file

@ -224,7 +224,7 @@ def _set_hostname(hostname, pretty_hostname=None):
logger.warning(out) logger.warning(out)
raise MoulinetteError(errno.EIO, m18n.n('domain_hostname_failed')) raise MoulinetteError(errno.EIO, m18n.n('domain_hostname_failed'))
else: else:
logger.info(out) logger.debug(out)
def _is_inside_container(): def _is_inside_container():
@ -424,7 +424,7 @@ def tools_update(ignore_apps=False, ignore_packages=False):
cache = apt.Cache() cache = apt.Cache()
# Update APT cache # Update APT cache
logger.info(m18n.n('updating_apt_cache')) logger.debug(m18n.n('updating_apt_cache'))
if not cache.update(): if not cache.update():
raise MoulinetteError(errno.EPERM, m18n.n('update_cache_failed')) raise MoulinetteError(errno.EPERM, m18n.n('update_cache_failed'))
@ -438,7 +438,7 @@ def tools_update(ignore_apps=False, ignore_packages=False):
'fullname': pkg.fullname, 'fullname': pkg.fullname,
'changelog': pkg.get_changelog() 'changelog': pkg.get_changelog()
}) })
logger.info(m18n.n('done')) logger.debug(m18n.n('done'))
# "apps" will list upgradable packages # "apps" will list upgradable packages
apps = [] apps = []