fix linter, remove allow_failure for corresponding tests

This commit is contained in:
Kay0u 2021-02-02 16:16:07 +01:00
parent 97f26015c6
commit bfd7257a8c
No known key found for this signature in database
GPG key ID: AAFEEB16CFA2AE2D
16 changed files with 31 additions and 34 deletions

View file

@ -7,7 +7,6 @@ lint37:
stage: lint
image: "before-install"
needs: []
allow_failure: true
script:
- tox -e py37-lint
@ -22,7 +21,6 @@ format-check:
stage: lint
image: "before-install"
needs: []
allow_failure: true
script:
- tox -e py37-black-check

View file

@ -193,11 +193,11 @@ class IPDiagnoser(Diagnoser):
content = read_file("/etc/resolv.conf").strip().split("\n")
# Ignore comments and empty lines
content = [
l.strip()
for l in content
if l.strip()
and not l.strip().startswith("#")
and not l.strip().startswith("search")
line.strip()
for line in content
if line.strip()
and not line.strip().startswith("#")
and not line.strip().startswith("search")
]
# We should only find a "nameserver 127.0.0.1"
return len(content) == 1 and content[0].split() == ["nameserver", "127.0.0.1"]

View file

@ -82,7 +82,7 @@ class WebDiagnoser(Diagnoser):
summary="diagnosis_http_hairpinning_issue",
details=["diagnosis_http_hairpinning_issue_details"],
)
except:
except Exception:
# Well I dunno what to do if that's another exception
# type... That'll most probably *not* be an hairpinning
# issue but something else super weird ...

View file

@ -124,7 +124,7 @@ class Parser:
# Then we keep this bloc and start a new one
# (we ignore helpers containing [internal] ...)
if not "[internal]" in current_block["comments"]:
if "[internal]" not in current_block["comments"]:
self.blocks.append(current_block)
current_block = {
"name": None,

View file

@ -30,10 +30,9 @@ import shutil
import yaml
import time
import re
import urllib.parse
import subprocess
import glob
import urllib.request, urllib.parse, urllib.error
import urllib.parse
from collections import OrderedDict
from moulinette import msignals, m18n, msettings
@ -912,7 +911,7 @@ def app_install(
args_odict = _parse_args_from_manifest(manifest, "install", args=args_dict)
# Validate domain / path availability for webapps
_validate_and_normalize_webpath(manifest, args_odict, extracted_app_folder)
_validate_and_normalize_webpath(args_odict, extracted_app_folder)
# Attempt to patch legacy helpers ...
_patch_legacy_helpers(extracted_app_folder)
@ -3040,7 +3039,7 @@ def _parse_args_in_yunohost_format(user_answers, argument_questions):
return parsed_answers_dict
def _validate_and_normalize_webpath(manifest, args_dict, app_folder):
def _validate_and_normalize_webpath(args_dict, app_folder):
# If there's only one "domain" and "path", validate that domain/path
# is an available url and normalize the path.

View file

@ -229,8 +229,8 @@ class MyMigration(Migration):
os.system("apt-mark unhold {}".format(package))
def apt_install(self, cmd):
def is_relevant(l):
return "Reading database ..." not in l.rstrip()
def is_relevant(line):
return "Reading database ..." not in line.rstrip()
callbacks = (
lambda l: logger.info("+ " + l.rstrip() + "\r")

View file

@ -607,9 +607,9 @@ def _get_DKIM(domain):
dkim = re.match(
(
r"^(?P<host>[a-z_\-\.]+)[\s]+([0-9]+[\s]+)?IN[\s]+TXT[\s]+"
'[^"]*"v=(?P<v>[^";]+);'
r'[^"]*"v=(?P<v>[^";]+);'
r'[\s"]*k=(?P<k>[^";]+);'
'[\s"]*p=(?P<p>[^";]+)'
r'[\s"]*p=(?P<p>[^";]+)'
),
dkim_content,
re.M | re.S,
@ -618,10 +618,10 @@ def _get_DKIM(domain):
dkim = re.match(
(
r"^(?P<host>[a-z_\-\.]+)[\s]+([0-9]+[\s]+)?IN[\s]+TXT[\s]+"
'[^"]*"v=(?P<v>[^";]+);'
r'[^"]*"v=(?P<v>[^";]+);'
r'[\s"]*h=(?P<h>[^";]+);'
r'[\s"]*k=(?P<k>[^";]+);'
'[\s"]*p=(?P<p>[^";]+)'
r'[\s"]*p=(?P<p>[^";]+)'
),
dkim_content,
re.M | re.S,

View file

@ -165,8 +165,8 @@ def hook_list(action, list_by="name", show_info=False):
def _append_hook(d, priority, name, path):
# Use the name as key and a list of hooks info - the
# executed ones with this name - as value
l = d.get(name, list())
for h in l:
name_list = d.get(name, list())
for h in name_list:
# Only one priority for the hook is accepted
if h["priority"] == priority:
# Custom hooks overwrite system ones and they
@ -174,8 +174,8 @@ def hook_list(action, list_by="name", show_info=False):
if h["path"] != path:
h["path"] = path
return
l.append({"priority": priority, "path": path})
d[name] = l
name_list.append({"priority": priority, "path": path})
d[name] = name_list
else:
if list_by == "name":

View file

@ -168,7 +168,7 @@ def log_show(
def _filter_lines(lines, filters=[]):
filters = [re.compile(f) for f in filters]
return [l for l in lines if not any(f.search(l.strip()) for f in filters)]
return [line for line in lines if not any(f.search(line.strip()) for f in filters)]
# Normalize log/metadata paths and filenames
abs_path = path

View file

@ -839,7 +839,7 @@ def _validate_and_sanitize_permission_url(url, app_base_path, app):
re:domain.tld/app/api/[A-Z]*$ -> domain.tld/app/api/[A-Z]*$
We can also have less-trivial regexes like:
re:^\/api\/.*|\/scripts\/api.js$
re:^/api/.*|/scripts/api.js$
"""
from yunohost.domain import domain_list

View file

@ -7,7 +7,7 @@ from collections import OrderedDict
from moulinette import msignals
from yunohost import domain, user, app
from yunohost import domain, user
from yunohost.app import _parse_args_in_yunohost_format, PasswordArgumentParser
from yunohost.utils.error import YunohostError

View file

@ -16,7 +16,7 @@ def setup_function(function):
try:
app_remove("register_url_app")
except:
except Exception:
pass
@ -24,7 +24,7 @@ def teardown_function(function):
try:
app_remove("register_url_app")
except:
except Exception:
pass

View file

@ -199,11 +199,11 @@ def teardown_function(function):
try:
app_remove("permissions_app")
except:
except Exception:
pass
try:
app_remove("legacy_app")
except:
except Exception:
pass

View file

@ -625,12 +625,12 @@ def tools_upgrade(
logger.debug("Running apt command :\n{}".format(dist_upgrade))
def is_relevant(l):
def is_relevant(line):
irrelevants = [
"service sudo-ldap already provided",
"Reading database ...",
]
return all(i not in l.rstrip() for i in irrelevants)
return all(i not in line.rstrip() for i in irrelevants)
callbacks = (
lambda l: logger.info("+ " + l.rstrip() + "\r")

View file

@ -120,7 +120,7 @@ def _list_upgradable_apt_packages():
upgradable_raw = check_output("LC_ALL=C apt list --upgradable")
# Dirty parsing of the output
upgradable_raw = [l.strip() for l in upgradable_raw.split("\n") if l.strip()]
upgradable_raw = [line.strip() for line in upgradable_raw.split("\n") if line.strip()]
for line in upgradable_raw:
# Remove stupid warning and verbose messages >.>

View file

@ -43,7 +43,7 @@ def yunopaste(data):
try:
url = json.loads(r.text)["key"]
except:
except Exception:
raise YunohostError(
"Uhoh, couldn't parse the answer from paste.yunohost.org : %s" % r.text,
raw_msg=True,