mirror of
https://github.com/YunoHost/yunohost.git
synced 2024-09-03 20:06:10 +02:00
autopep8 --in-place -a -a -a --ignore E402,E501,E722 -r src/yunohost/{__init__.py,certificate.py,diagnosis.py,domain.py,dyndns.py,firewall.py,hook.py,log.py,regenconf.py,service.py,settings.py,ssh.py,tools.py}
This commit is contained in:
parent
ecbd63636b
commit
49f6394233
9 changed files with 40 additions and 35 deletions
|
@ -91,7 +91,7 @@ def init_logging(interface="cli",
|
||||||
logfile = os.path.join(logdir, "yunohost-%s.log" % interface)
|
logfile = os.path.join(logdir, "yunohost-%s.log" % interface)
|
||||||
|
|
||||||
if not os.path.isdir(logdir):
|
if not os.path.isdir(logdir):
|
||||||
os.makedirs(logdir, 0750)
|
os.makedirs(logdir, 0o750)
|
||||||
|
|
||||||
# ####################################################################### #
|
# ####################################################################### #
|
||||||
# Logging configuration for CLI (or any other interface than api...) #
|
# Logging configuration for CLI (or any other interface than api...) #
|
||||||
|
|
|
@ -176,7 +176,7 @@ def diagnosis_run(categories=[], force=False, except_if_never_ran_yet=False, ema
|
||||||
code, report = hook_exec(path, args={"force": force}, env=None)
|
code, report = hook_exec(path, args={"force": force}, env=None)
|
||||||
except Exception:
|
except Exception:
|
||||||
import traceback
|
import traceback
|
||||||
logger.error(m18n.n("diagnosis_failed_for_category", category=category, error='\n'+traceback.format_exc()))
|
logger.error(m18n.n("diagnosis_failed_for_category", category=category, error='\n' + traceback.format_exc()))
|
||||||
else:
|
else:
|
||||||
diagnosed_categories.append(category)
|
diagnosed_categories.append(category)
|
||||||
if report != {}:
|
if report != {}:
|
||||||
|
@ -407,7 +407,7 @@ class Diagnoser():
|
||||||
warnings = [item for item in new_report["items"] if item["status"] == "WARNING" and not item["ignored"]]
|
warnings = [item for item in new_report["items"] if item["status"] == "WARNING" and not item["ignored"]]
|
||||||
errors_ignored = [item for item in new_report["items"] if item["status"] == "ERROR" and item["ignored"]]
|
errors_ignored = [item for item in new_report["items"] if item["status"] == "ERROR" and item["ignored"]]
|
||||||
warning_ignored = [item for item in new_report["items"] if item["status"] == "WARNING" and item["ignored"]]
|
warning_ignored = [item for item in new_report["items"] if item["status"] == "WARNING" and item["ignored"]]
|
||||||
ignored_msg = " " + m18n.n("diagnosis_ignored_issues", nb_ignored=len(errors_ignored+warning_ignored)) if errors_ignored or warning_ignored else ""
|
ignored_msg = " " + m18n.n("diagnosis_ignored_issues", nb_ignored=len(errors_ignored + warning_ignored)) if errors_ignored or warning_ignored else ""
|
||||||
|
|
||||||
if errors and warnings:
|
if errors and warnings:
|
||||||
logger.error(m18n.n("diagnosis_found_errors_and_warnings", errors=len(errors), warnings=len(warnings), category=new_report["description"]) + ignored_msg)
|
logger.error(m18n.n("diagnosis_found_errors_and_warnings", errors=len(errors), warnings=len(warnings), category=new_report["description"]) + ignored_msg)
|
||||||
|
@ -477,6 +477,7 @@ class Diagnoser():
|
||||||
meta_data.update(item.get("data", {}))
|
meta_data.update(item.get("data", {}))
|
||||||
|
|
||||||
html_tags = re.compile(r'<[^>]+>')
|
html_tags = re.compile(r'<[^>]+>')
|
||||||
|
|
||||||
def m18n_(info):
|
def m18n_(info):
|
||||||
if not isinstance(info, tuple) and not isinstance(info, list):
|
if not isinstance(info, tuple) and not isinstance(info, list):
|
||||||
info = (info, {})
|
info = (info, {})
|
||||||
|
@ -485,7 +486,7 @@ class Diagnoser():
|
||||||
# In cli, we remove the html tags
|
# In cli, we remove the html tags
|
||||||
if msettings.get("interface") != "api" or force_remove_html_tags:
|
if msettings.get("interface") != "api" or force_remove_html_tags:
|
||||||
s = s.replace("<cmd>", "'").replace("</cmd>", "'")
|
s = s.replace("<cmd>", "'").replace("</cmd>", "'")
|
||||||
s = html_tags.sub('', s.replace("<br>","\n"))
|
s = html_tags.sub('', s.replace("<br>", "\n"))
|
||||||
else:
|
else:
|
||||||
s = s.replace("<cmd>", "<code class='cmd'>").replace("</cmd>", "</code>")
|
s = s.replace("<cmd>", "<code class='cmd'>").replace("</cmd>", "</code>")
|
||||||
# Make it so that links open in new tabs
|
# Make it so that links open in new tabs
|
||||||
|
|
|
@ -615,15 +615,15 @@ def _get_DKIM(domain):
|
||||||
dkim = re.match((
|
dkim = re.match((
|
||||||
r'^(?P<host>[a-z_\-\.]+)[\s]+([0-9]+[\s]+)?IN[\s]+TXT[\s]+'
|
r'^(?P<host>[a-z_\-\.]+)[\s]+([0-9]+[\s]+)?IN[\s]+TXT[\s]+'
|
||||||
'[^"]*"v=(?P<v>[^";]+);'
|
'[^"]*"v=(?P<v>[^";]+);'
|
||||||
'[\s"]*k=(?P<k>[^";]+);'
|
r'[\s"]*k=(?P<k>[^";]+);'
|
||||||
'[\s"]*p=(?P<p>[^";]+)'), dkim_content, re.M | re.S
|
'[\s"]*p=(?P<p>[^";]+)'), dkim_content, re.M | re.S
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
dkim = re.match((
|
dkim = re.match((
|
||||||
r'^(?P<host>[a-z_\-\.]+)[\s]+([0-9]+[\s]+)?IN[\s]+TXT[\s]+'
|
r'^(?P<host>[a-z_\-\.]+)[\s]+([0-9]+[\s]+)?IN[\s]+TXT[\s]+'
|
||||||
'[^"]*"v=(?P<v>[^";]+);'
|
'[^"]*"v=(?P<v>[^";]+);'
|
||||||
'[\s"]*h=(?P<h>[^";]+);'
|
r'[\s"]*h=(?P<h>[^";]+);'
|
||||||
'[\s"]*k=(?P<k>[^";]+);'
|
r'[\s"]*k=(?P<k>[^";]+);'
|
||||||
'[\s"]*p=(?P<p>[^";]+)'), dkim_content, re.M | re.S
|
'[\s"]*p=(?P<p>[^";]+)'), dkim_content, re.M | re.S
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -270,9 +270,9 @@ def hook_callback(action, hooks=[], args=None, no_trace=False, chdir=None,
|
||||||
|
|
||||||
# Validate callbacks
|
# Validate callbacks
|
||||||
if not callable(pre_callback):
|
if not callable(pre_callback):
|
||||||
pre_callback = lambda name, priority, path, args: args
|
def pre_callback(name, priority, path, args): return args
|
||||||
if not callable(post_callback):
|
if not callable(post_callback):
|
||||||
post_callback = lambda name, priority, path, succeed: None
|
def post_callback(name, priority, path, succeed): return None
|
||||||
|
|
||||||
# Iterate over hooks and execute them
|
# Iterate over hooks and execute them
|
||||||
for priority in sorted(hooks_dict):
|
for priority in sorted(hooks_dict):
|
||||||
|
@ -293,9 +293,9 @@ def hook_callback(action, hooks=[], args=None, no_trace=False, chdir=None,
|
||||||
else:
|
else:
|
||||||
post_callback(name=name, priority=priority, path=path,
|
post_callback(name=name, priority=priority, path=path,
|
||||||
succeed=True)
|
succeed=True)
|
||||||
if not name in result:
|
if name not in result:
|
||||||
result[name] = {}
|
result[name] = {}
|
||||||
result[name][path] = {'state' : state, 'stdreturn' : hook_return }
|
result[name][path] = {'state': state, 'stdreturn': hook_return}
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@ -446,7 +446,7 @@ def _hook_exec_python(path, args, env, loggers):
|
||||||
dir_ = os.path.dirname(path)
|
dir_ = os.path.dirname(path)
|
||||||
name = os.path.splitext(os.path.basename(path))[0]
|
name = os.path.splitext(os.path.basename(path))[0]
|
||||||
|
|
||||||
if not dir_ in sys.path:
|
if dir_ not in sys.path:
|
||||||
sys.path = [dir_] + sys.path
|
sys.path = [dir_] + sys.path
|
||||||
module = import_module(name)
|
module = import_module(name)
|
||||||
|
|
||||||
|
@ -454,8 +454,8 @@ def _hook_exec_python(path, args, env, loggers):
|
||||||
# # Assert that the return is a (int, dict) tuple
|
# # Assert that the return is a (int, dict) tuple
|
||||||
assert isinstance(ret, tuple) \
|
assert isinstance(ret, tuple) \
|
||||||
and len(ret) == 2 \
|
and len(ret) == 2 \
|
||||||
and isinstance(ret[0],int) \
|
and isinstance(ret[0], int) \
|
||||||
and isinstance(ret[1],dict), \
|
and isinstance(ret[1], dict), \
|
||||||
"Module %s did not return a (int, dict) tuple !" % module
|
"Module %s did not return a (int, dict) tuple !" % module
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
|
@ -270,7 +270,7 @@ def log_display(path, number=None, share=False, filter_irrelevant=False, with_su
|
||||||
if os.path.exists(log_path):
|
if os.path.exists(log_path):
|
||||||
from yunohost.service import _tail
|
from yunohost.service import _tail
|
||||||
if number and filters:
|
if number and filters:
|
||||||
logs = _tail(log_path, int(number*4))
|
logs = _tail(log_path, int(number * 4))
|
||||||
elif number:
|
elif number:
|
||||||
logs = _tail(log_path, int(number))
|
logs = _tail(log_path, int(number))
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -654,7 +654,6 @@ def _tail(file, n):
|
||||||
avg_line_length = 74
|
avg_line_length = 74
|
||||||
to_read = n
|
to_read = n
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if file.endswith(".gz"):
|
if file.endswith(".gz"):
|
||||||
import gzip
|
import gzip
|
||||||
|
|
|
@ -15,6 +15,7 @@ logger = getActionLogger('yunohost.settings')
|
||||||
SETTINGS_PATH = "/etc/yunohost/settings.json"
|
SETTINGS_PATH = "/etc/yunohost/settings.json"
|
||||||
SETTINGS_PATH_OTHER_LOCATION = "/etc/yunohost/settings-%s.json"
|
SETTINGS_PATH_OTHER_LOCATION = "/etc/yunohost/settings-%s.json"
|
||||||
|
|
||||||
|
|
||||||
def is_boolean(value):
|
def is_boolean(value):
|
||||||
"""
|
"""
|
||||||
Ensure a string value is intended as a boolean
|
Ensure a string value is intended as a boolean
|
||||||
|
@ -321,17 +322,20 @@ def reconfigure_nginx(setting_name, old_value, new_value):
|
||||||
if old_value != new_value:
|
if old_value != new_value:
|
||||||
service_regen_conf(names=['nginx'])
|
service_regen_conf(names=['nginx'])
|
||||||
|
|
||||||
|
|
||||||
@post_change_hook("security.ssh.compatibility")
|
@post_change_hook("security.ssh.compatibility")
|
||||||
def reconfigure_ssh(setting_name, old_value, new_value):
|
def reconfigure_ssh(setting_name, old_value, new_value):
|
||||||
if old_value != new_value:
|
if old_value != new_value:
|
||||||
service_regen_conf(names=['ssh'])
|
service_regen_conf(names=['ssh'])
|
||||||
|
|
||||||
|
|
||||||
@post_change_hook("smtp.allow_ipv6")
|
@post_change_hook("smtp.allow_ipv6")
|
||||||
@post_change_hook("security.postfix.compatibility")
|
@post_change_hook("security.postfix.compatibility")
|
||||||
def reconfigure_postfix(setting_name, old_value, new_value):
|
def reconfigure_postfix(setting_name, old_value, new_value):
|
||||||
if old_value != new_value:
|
if old_value != new_value:
|
||||||
service_regen_conf(names=['postfix'])
|
service_regen_conf(names=['postfix'])
|
||||||
|
|
||||||
|
|
||||||
@post_change_hook("pop3.enabled")
|
@post_change_hook("pop3.enabled")
|
||||||
def reconfigure_dovecot(setting_name, old_value, new_value):
|
def reconfigure_dovecot(setting_name, old_value, new_value):
|
||||||
dovecot_package = 'dovecot-pop3d'
|
dovecot_package = 'dovecot-pop3d'
|
||||||
|
|
|
@ -51,9 +51,11 @@ MIGRATIONS_STATE_PATH = "/etc/yunohost/migrations.yaml"
|
||||||
|
|
||||||
logger = getActionLogger('yunohost.tools')
|
logger = getActionLogger('yunohost.tools')
|
||||||
|
|
||||||
|
|
||||||
def tools_versions():
|
def tools_versions():
|
||||||
return ynh_packages_version()
|
return ynh_packages_version()
|
||||||
|
|
||||||
|
|
||||||
def tools_ldapinit():
|
def tools_ldapinit():
|
||||||
"""
|
"""
|
||||||
YunoHost LDAP initialization
|
YunoHost LDAP initialization
|
||||||
|
@ -146,7 +148,7 @@ def tools_adminpw(new_password, check_strength=True):
|
||||||
ldap = _get_ldap_interface()
|
ldap = _get_ldap_interface()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ldap.update("cn=admin", {"userPassword": [ new_hash ], })
|
ldap.update("cn=admin", {"userPassword": [new_hash], })
|
||||||
except:
|
except:
|
||||||
logger.exception('unable to change admin password')
|
logger.exception('unable to change admin password')
|
||||||
raise YunohostError('admin_password_change_failed')
|
raise YunohostError('admin_password_change_failed')
|
||||||
|
@ -599,7 +601,6 @@ def tools_upgrade(operation_logger, apps=None, system=False, allow_yunohost_upgr
|
||||||
|
|
||||||
logger.debug("Running apt command :\n{}".format(dist_upgrade))
|
logger.debug("Running apt command :\n{}".format(dist_upgrade))
|
||||||
|
|
||||||
|
|
||||||
def is_relevant(l):
|
def is_relevant(l):
|
||||||
irrelevants = [
|
irrelevants = [
|
||||||
"service sudo-ldap already provided",
|
"service sudo-ldap already provided",
|
||||||
|
@ -936,7 +937,7 @@ def _migrate_legacy_migration_json():
|
||||||
# Extract the list of migration ids
|
# Extract the list of migration ids
|
||||||
from . import data_migrations
|
from . import data_migrations
|
||||||
migrations_path = data_migrations.__path__[0]
|
migrations_path = data_migrations.__path__[0]
|
||||||
migration_files = filter(lambda x: re.match("^\d+_[a-zA-Z0-9_]+\.py$", x), os.listdir(migrations_path))
|
migration_files = filter(lambda x: re.match(r"^\d+_[a-zA-Z0-9_]+\.py$", x), os.listdir(migrations_path))
|
||||||
# (here we remove the .py extension and make sure the ids are sorted)
|
# (here we remove the .py extension and make sure the ids are sorted)
|
||||||
migration_ids = sorted([f.rsplit(".", 1)[0] for f in migration_files])
|
migration_ids = sorted([f.rsplit(".", 1)[0] for f in migration_files])
|
||||||
|
|
||||||
|
@ -985,7 +986,7 @@ def _get_migrations_list():
|
||||||
# (in particular, pending migrations / not already ran are not listed
|
# (in particular, pending migrations / not already ran are not listed
|
||||||
states = tools_migrations_state()["migrations"]
|
states = tools_migrations_state()["migrations"]
|
||||||
|
|
||||||
for migration_file in filter(lambda x: re.match("^\d+_[a-zA-Z0-9_]+\.py$", x), os.listdir(migrations_path)):
|
for migration_file in filter(lambda x: re.match(r"^\d+_[a-zA-Z0-9_]+\.py$", x), os.listdir(migrations_path)):
|
||||||
m = _load_migration(migration_file)
|
m = _load_migration(migration_file)
|
||||||
m.state = states.get(m.id, "pending")
|
m.state = states.get(m.id, "pending")
|
||||||
migrations.append(m)
|
migrations.append(m)
|
||||||
|
@ -1004,7 +1005,7 @@ def _get_migration_by_name(migration_name):
|
||||||
raise AssertionError("Unable to find migration with name %s" % migration_name)
|
raise AssertionError("Unable to find migration with name %s" % migration_name)
|
||||||
|
|
||||||
migrations_path = data_migrations.__path__[0]
|
migrations_path = data_migrations.__path__[0]
|
||||||
migrations_found = filter(lambda x: re.match("^\d+_%s\.py$" % migration_name, x), os.listdir(migrations_path))
|
migrations_found = filter(lambda x: re.match(r"^\d+_%s\.py$" % migration_name, x), os.listdir(migrations_path))
|
||||||
|
|
||||||
assert len(migrations_found) == 1, "Unable to find migration with name %s" % migration_name
|
assert len(migrations_found) == 1, "Unable to find migration with name %s" % migration_name
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue