mirror of
https://github.com/YunoHost/yunohost.git
synced 2024-09-03 20:06:10 +02:00
fix linter, remove allow_failure for corresponding tests
This commit is contained in:
parent
97f26015c6
commit
bfd7257a8c
16 changed files with 31 additions and 34 deletions
|
@ -7,7 +7,6 @@ lint37:
|
||||||
stage: lint
|
stage: lint
|
||||||
image: "before-install"
|
image: "before-install"
|
||||||
needs: []
|
needs: []
|
||||||
allow_failure: true
|
|
||||||
script:
|
script:
|
||||||
- tox -e py37-lint
|
- tox -e py37-lint
|
||||||
|
|
||||||
|
@ -22,7 +21,6 @@ format-check:
|
||||||
stage: lint
|
stage: lint
|
||||||
image: "before-install"
|
image: "before-install"
|
||||||
needs: []
|
needs: []
|
||||||
allow_failure: true
|
|
||||||
script:
|
script:
|
||||||
- tox -e py37-black-check
|
- tox -e py37-black-check
|
||||||
|
|
||||||
|
|
|
@ -193,11 +193,11 @@ class IPDiagnoser(Diagnoser):
|
||||||
content = read_file("/etc/resolv.conf").strip().split("\n")
|
content = read_file("/etc/resolv.conf").strip().split("\n")
|
||||||
# Ignore comments and empty lines
|
# Ignore comments and empty lines
|
||||||
content = [
|
content = [
|
||||||
l.strip()
|
line.strip()
|
||||||
for l in content
|
for line in content
|
||||||
if l.strip()
|
if line.strip()
|
||||||
and not l.strip().startswith("#")
|
and not line.strip().startswith("#")
|
||||||
and not l.strip().startswith("search")
|
and not line.strip().startswith("search")
|
||||||
]
|
]
|
||||||
# We should only find a "nameserver 127.0.0.1"
|
# We should only find a "nameserver 127.0.0.1"
|
||||||
return len(content) == 1 and content[0].split() == ["nameserver", "127.0.0.1"]
|
return len(content) == 1 and content[0].split() == ["nameserver", "127.0.0.1"]
|
||||||
|
|
|
@ -82,7 +82,7 @@ class WebDiagnoser(Diagnoser):
|
||||||
summary="diagnosis_http_hairpinning_issue",
|
summary="diagnosis_http_hairpinning_issue",
|
||||||
details=["diagnosis_http_hairpinning_issue_details"],
|
details=["diagnosis_http_hairpinning_issue_details"],
|
||||||
)
|
)
|
||||||
except:
|
except Exception:
|
||||||
# Well I dunno what to do if that's another exception
|
# Well I dunno what to do if that's another exception
|
||||||
# type... That'll most probably *not* be an hairpinning
|
# type... That'll most probably *not* be an hairpinning
|
||||||
# issue but something else super weird ...
|
# issue but something else super weird ...
|
||||||
|
|
|
@ -124,7 +124,7 @@ class Parser:
|
||||||
|
|
||||||
# Then we keep this bloc and start a new one
|
# Then we keep this bloc and start a new one
|
||||||
# (we ignore helpers containing [internal] ...)
|
# (we ignore helpers containing [internal] ...)
|
||||||
if not "[internal]" in current_block["comments"]:
|
if "[internal]" not in current_block["comments"]:
|
||||||
self.blocks.append(current_block)
|
self.blocks.append(current_block)
|
||||||
current_block = {
|
current_block = {
|
||||||
"name": None,
|
"name": None,
|
||||||
|
|
|
@ -30,10 +30,9 @@ import shutil
|
||||||
import yaml
|
import yaml
|
||||||
import time
|
import time
|
||||||
import re
|
import re
|
||||||
import urllib.parse
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import glob
|
import glob
|
||||||
import urllib.request, urllib.parse, urllib.error
|
import urllib.parse
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
|
||||||
from moulinette import msignals, m18n, msettings
|
from moulinette import msignals, m18n, msettings
|
||||||
|
@ -912,7 +911,7 @@ def app_install(
|
||||||
args_odict = _parse_args_from_manifest(manifest, "install", args=args_dict)
|
args_odict = _parse_args_from_manifest(manifest, "install", args=args_dict)
|
||||||
|
|
||||||
# Validate domain / path availability for webapps
|
# Validate domain / path availability for webapps
|
||||||
_validate_and_normalize_webpath(manifest, args_odict, extracted_app_folder)
|
_validate_and_normalize_webpath(args_odict, extracted_app_folder)
|
||||||
|
|
||||||
# Attempt to patch legacy helpers ...
|
# Attempt to patch legacy helpers ...
|
||||||
_patch_legacy_helpers(extracted_app_folder)
|
_patch_legacy_helpers(extracted_app_folder)
|
||||||
|
@ -3040,7 +3039,7 @@ def _parse_args_in_yunohost_format(user_answers, argument_questions):
|
||||||
return parsed_answers_dict
|
return parsed_answers_dict
|
||||||
|
|
||||||
|
|
||||||
def _validate_and_normalize_webpath(manifest, args_dict, app_folder):
|
def _validate_and_normalize_webpath(args_dict, app_folder):
|
||||||
|
|
||||||
# If there's only one "domain" and "path", validate that domain/path
|
# If there's only one "domain" and "path", validate that domain/path
|
||||||
# is an available url and normalize the path.
|
# is an available url and normalize the path.
|
||||||
|
|
|
@ -229,8 +229,8 @@ class MyMigration(Migration):
|
||||||
os.system("apt-mark unhold {}".format(package))
|
os.system("apt-mark unhold {}".format(package))
|
||||||
|
|
||||||
def apt_install(self, cmd):
|
def apt_install(self, cmd):
|
||||||
def is_relevant(l):
|
def is_relevant(line):
|
||||||
return "Reading database ..." not in l.rstrip()
|
return "Reading database ..." not in line.rstrip()
|
||||||
|
|
||||||
callbacks = (
|
callbacks = (
|
||||||
lambda l: logger.info("+ " + l.rstrip() + "\r")
|
lambda l: logger.info("+ " + l.rstrip() + "\r")
|
||||||
|
|
|
@ -607,9 +607,9 @@ def _get_DKIM(domain):
|
||||||
dkim = re.match(
|
dkim = re.match(
|
||||||
(
|
(
|
||||||
r"^(?P<host>[a-z_\-\.]+)[\s]+([0-9]+[\s]+)?IN[\s]+TXT[\s]+"
|
r"^(?P<host>[a-z_\-\.]+)[\s]+([0-9]+[\s]+)?IN[\s]+TXT[\s]+"
|
||||||
'[^"]*"v=(?P<v>[^";]+);'
|
r'[^"]*"v=(?P<v>[^";]+);'
|
||||||
r'[\s"]*k=(?P<k>[^";]+);'
|
r'[\s"]*k=(?P<k>[^";]+);'
|
||||||
'[\s"]*p=(?P<p>[^";]+)'
|
r'[\s"]*p=(?P<p>[^";]+)'
|
||||||
),
|
),
|
||||||
dkim_content,
|
dkim_content,
|
||||||
re.M | re.S,
|
re.M | re.S,
|
||||||
|
@ -618,10 +618,10 @@ def _get_DKIM(domain):
|
||||||
dkim = re.match(
|
dkim = re.match(
|
||||||
(
|
(
|
||||||
r"^(?P<host>[a-z_\-\.]+)[\s]+([0-9]+[\s]+)?IN[\s]+TXT[\s]+"
|
r"^(?P<host>[a-z_\-\.]+)[\s]+([0-9]+[\s]+)?IN[\s]+TXT[\s]+"
|
||||||
'[^"]*"v=(?P<v>[^";]+);'
|
r'[^"]*"v=(?P<v>[^";]+);'
|
||||||
r'[\s"]*h=(?P<h>[^";]+);'
|
r'[\s"]*h=(?P<h>[^";]+);'
|
||||||
r'[\s"]*k=(?P<k>[^";]+);'
|
r'[\s"]*k=(?P<k>[^";]+);'
|
||||||
'[\s"]*p=(?P<p>[^";]+)'
|
r'[\s"]*p=(?P<p>[^";]+)'
|
||||||
),
|
),
|
||||||
dkim_content,
|
dkim_content,
|
||||||
re.M | re.S,
|
re.M | re.S,
|
||||||
|
|
|
@ -165,8 +165,8 @@ def hook_list(action, list_by="name", show_info=False):
|
||||||
def _append_hook(d, priority, name, path):
|
def _append_hook(d, priority, name, path):
|
||||||
# Use the name as key and a list of hooks info - the
|
# Use the name as key and a list of hooks info - the
|
||||||
# executed ones with this name - as value
|
# executed ones with this name - as value
|
||||||
l = d.get(name, list())
|
name_list = d.get(name, list())
|
||||||
for h in l:
|
for h in name_list:
|
||||||
# Only one priority for the hook is accepted
|
# Only one priority for the hook is accepted
|
||||||
if h["priority"] == priority:
|
if h["priority"] == priority:
|
||||||
# Custom hooks overwrite system ones and they
|
# Custom hooks overwrite system ones and they
|
||||||
|
@ -174,8 +174,8 @@ def hook_list(action, list_by="name", show_info=False):
|
||||||
if h["path"] != path:
|
if h["path"] != path:
|
||||||
h["path"] = path
|
h["path"] = path
|
||||||
return
|
return
|
||||||
l.append({"priority": priority, "path": path})
|
name_list.append({"priority": priority, "path": path})
|
||||||
d[name] = l
|
d[name] = name_list
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if list_by == "name":
|
if list_by == "name":
|
||||||
|
|
|
@ -168,7 +168,7 @@ def log_show(
|
||||||
def _filter_lines(lines, filters=[]):
|
def _filter_lines(lines, filters=[]):
|
||||||
|
|
||||||
filters = [re.compile(f) for f in filters]
|
filters = [re.compile(f) for f in filters]
|
||||||
return [l for l in lines if not any(f.search(l.strip()) for f in filters)]
|
return [line for line in lines if not any(f.search(line.strip()) for f in filters)]
|
||||||
|
|
||||||
# Normalize log/metadata paths and filenames
|
# Normalize log/metadata paths and filenames
|
||||||
abs_path = path
|
abs_path = path
|
||||||
|
|
|
@ -839,7 +839,7 @@ def _validate_and_sanitize_permission_url(url, app_base_path, app):
|
||||||
re:domain.tld/app/api/[A-Z]*$ -> domain.tld/app/api/[A-Z]*$
|
re:domain.tld/app/api/[A-Z]*$ -> domain.tld/app/api/[A-Z]*$
|
||||||
|
|
||||||
We can also have less-trivial regexes like:
|
We can also have less-trivial regexes like:
|
||||||
re:^\/api\/.*|\/scripts\/api.js$
|
re:^/api/.*|/scripts/api.js$
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from yunohost.domain import domain_list
|
from yunohost.domain import domain_list
|
||||||
|
|
|
@ -7,7 +7,7 @@ from collections import OrderedDict
|
||||||
|
|
||||||
from moulinette import msignals
|
from moulinette import msignals
|
||||||
|
|
||||||
from yunohost import domain, user, app
|
from yunohost import domain, user
|
||||||
from yunohost.app import _parse_args_in_yunohost_format, PasswordArgumentParser
|
from yunohost.app import _parse_args_in_yunohost_format, PasswordArgumentParser
|
||||||
from yunohost.utils.error import YunohostError
|
from yunohost.utils.error import YunohostError
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,7 @@ def setup_function(function):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
app_remove("register_url_app")
|
app_remove("register_url_app")
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@ -24,7 +24,7 @@ def teardown_function(function):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
app_remove("register_url_app")
|
app_remove("register_url_app")
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -199,11 +199,11 @@ def teardown_function(function):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
app_remove("permissions_app")
|
app_remove("permissions_app")
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
try:
|
try:
|
||||||
app_remove("legacy_app")
|
app_remove("legacy_app")
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -625,12 +625,12 @@ def tools_upgrade(
|
||||||
|
|
||||||
logger.debug("Running apt command :\n{}".format(dist_upgrade))
|
logger.debug("Running apt command :\n{}".format(dist_upgrade))
|
||||||
|
|
||||||
def is_relevant(l):
|
def is_relevant(line):
|
||||||
irrelevants = [
|
irrelevants = [
|
||||||
"service sudo-ldap already provided",
|
"service sudo-ldap already provided",
|
||||||
"Reading database ...",
|
"Reading database ...",
|
||||||
]
|
]
|
||||||
return all(i not in l.rstrip() for i in irrelevants)
|
return all(i not in line.rstrip() for i in irrelevants)
|
||||||
|
|
||||||
callbacks = (
|
callbacks = (
|
||||||
lambda l: logger.info("+ " + l.rstrip() + "\r")
|
lambda l: logger.info("+ " + l.rstrip() + "\r")
|
||||||
|
|
|
@ -120,7 +120,7 @@ def _list_upgradable_apt_packages():
|
||||||
upgradable_raw = check_output("LC_ALL=C apt list --upgradable")
|
upgradable_raw = check_output("LC_ALL=C apt list --upgradable")
|
||||||
|
|
||||||
# Dirty parsing of the output
|
# Dirty parsing of the output
|
||||||
upgradable_raw = [l.strip() for l in upgradable_raw.split("\n") if l.strip()]
|
upgradable_raw = [line.strip() for line in upgradable_raw.split("\n") if line.strip()]
|
||||||
for line in upgradable_raw:
|
for line in upgradable_raw:
|
||||||
|
|
||||||
# Remove stupid warning and verbose messages >.>
|
# Remove stupid warning and verbose messages >.>
|
||||||
|
|
|
@ -43,7 +43,7 @@ def yunopaste(data):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
url = json.loads(r.text)["key"]
|
url = json.loads(r.text)["key"]
|
||||||
except:
|
except Exception:
|
||||||
raise YunohostError(
|
raise YunohostError(
|
||||||
"Uhoh, couldn't parse the answer from paste.yunohost.org : %s" % r.text,
|
"Uhoh, couldn't parse the answer from paste.yunohost.org : %s" % r.text,
|
||||||
raw_msg=True,
|
raw_msg=True,
|
||||||
|
|
Loading…
Add table
Reference in a new issue