mirror of
https://github.com/YunoHost/moulinette.git
synced 2024-09-03 20:06:31 +02:00
[mod] run pyupgrade on source code
This commit is contained in:
parent
845399dba0
commit
8127e7cd1a
12 changed files with 43 additions and 43 deletions
|
@ -410,7 +410,7 @@ class ActionsMap(object):
|
||||||
for n in self.get_namespaces():
|
for n in self.get_namespaces():
|
||||||
logger.debug("loading actions map namespace '%s'", n)
|
logger.debug("loading actions map namespace '%s'", n)
|
||||||
|
|
||||||
actionsmap_yml = "%s/actionsmap/%s.yml" % (DATA_DIR, n)
|
actionsmap_yml = "{}/actionsmap/{}.yml".format(DATA_DIR, n)
|
||||||
actionsmap_yml_stat = os.stat(actionsmap_yml)
|
actionsmap_yml_stat = os.stat(actionsmap_yml)
|
||||||
actionsmap_pkl = "%s/actionsmap/%s-%d-%d.pkl" % (
|
actionsmap_pkl = "%s/actionsmap/%s-%d-%d.pkl" % (
|
||||||
CACHE_DIR,
|
CACHE_DIR,
|
||||||
|
@ -428,7 +428,7 @@ class ActionsMap(object):
|
||||||
actionsmap = read_yaml(actionsmap_yml)
|
actionsmap = read_yaml(actionsmap_yml)
|
||||||
|
|
||||||
# Delete old cache files
|
# Delete old cache files
|
||||||
for old_cache in glob.glob("%s/actionsmap/%s-*.pkl" % (CACHE_DIR, n)):
|
for old_cache in glob.glob("{}/actionsmap/{}-*.pkl".format(CACHE_DIR, n)):
|
||||||
os.remove(old_cache)
|
os.remove(old_cache)
|
||||||
|
|
||||||
# at installation, cachedir might not exists
|
# at installation, cachedir might not exists
|
||||||
|
@ -479,7 +479,7 @@ class ActionsMap(object):
|
||||||
auth_method = self.default_authentication
|
auth_method = self.default_authentication
|
||||||
|
|
||||||
# Load and initialize the authenticator module
|
# Load and initialize the authenticator module
|
||||||
auth_module = "%s.authenticators.%s" % (self.main_namespace, auth_method)
|
auth_module = "{}.authenticators.{}".format(self.main_namespace, auth_method)
|
||||||
logger.debug(f"Loading auth module {auth_module}")
|
logger.debug(f"Loading auth module {auth_module}")
|
||||||
try:
|
try:
|
||||||
mod = import_module(auth_module)
|
mod = import_module(auth_module)
|
||||||
|
@ -532,12 +532,12 @@ class ActionsMap(object):
|
||||||
# Retrieve action information
|
# Retrieve action information
|
||||||
if len(tid) == 4:
|
if len(tid) == 4:
|
||||||
namespace, category, subcategory, action = tid
|
namespace, category, subcategory, action = tid
|
||||||
func_name = "%s_%s_%s" % (
|
func_name = "{}_{}_{}".format(
|
||||||
category,
|
category,
|
||||||
subcategory.replace("-", "_"),
|
subcategory.replace("-", "_"),
|
||||||
action.replace("-", "_"),
|
action.replace("-", "_"),
|
||||||
)
|
)
|
||||||
full_action_name = "%s.%s.%s.%s" % (
|
full_action_name = "{}.{}.{}.{}".format(
|
||||||
namespace,
|
namespace,
|
||||||
category,
|
category,
|
||||||
subcategory,
|
subcategory,
|
||||||
|
@ -547,22 +547,22 @@ class ActionsMap(object):
|
||||||
assert len(tid) == 3
|
assert len(tid) == 3
|
||||||
namespace, category, action = tid
|
namespace, category, action = tid
|
||||||
subcategory = None
|
subcategory = None
|
||||||
func_name = "%s_%s" % (category, action.replace("-", "_"))
|
func_name = "{}_{}".format(category, action.replace("-", "_"))
|
||||||
full_action_name = "%s.%s.%s" % (namespace, category, action)
|
full_action_name = "{}.{}.{}".format(namespace, category, action)
|
||||||
|
|
||||||
# Lock the moulinette for the namespace
|
# Lock the moulinette for the namespace
|
||||||
with MoulinetteLock(namespace, timeout):
|
with MoulinetteLock(namespace, timeout):
|
||||||
start = time()
|
start = time()
|
||||||
try:
|
try:
|
||||||
mod = __import__(
|
mod = __import__(
|
||||||
"%s.%s" % (namespace, category),
|
"{}.{}".format(namespace, category),
|
||||||
globals=globals(),
|
globals=globals(),
|
||||||
level=0,
|
level=0,
|
||||||
fromlist=[func_name],
|
fromlist=[func_name],
|
||||||
)
|
)
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"loading python module %s took %.3fs",
|
"loading python module %s took %.3fs",
|
||||||
"%s.%s" % (namespace, category),
|
"{}.{}".format(namespace, category),
|
||||||
time() - start,
|
time() - start,
|
||||||
)
|
)
|
||||||
func = getattr(mod, func_name)
|
func = getattr(mod, func_name)
|
||||||
|
@ -570,7 +570,7 @@ class ActionsMap(object):
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
error_message = "unable to load function %s.%s because: %s" % (
|
error_message = "unable to load function {}.{} because: {}".format(
|
||||||
namespace,
|
namespace,
|
||||||
func_name,
|
func_name,
|
||||||
e,
|
e,
|
||||||
|
@ -619,7 +619,7 @@ class ActionsMap(object):
|
||||||
# Look for all files that match the given patterns in the actionsmap dir
|
# Look for all files that match the given patterns in the actionsmap dir
|
||||||
for namespace_pattern in NAMESPACE_PATTERNS:
|
for namespace_pattern in NAMESPACE_PATTERNS:
|
||||||
namespaces.extend(
|
namespaces.extend(
|
||||||
glob.glob("%s/actionsmap/%s.yml" % (DATA_DIR, namespace_pattern))
|
glob.glob("{}/actionsmap/{}.yml".format(DATA_DIR, namespace_pattern))
|
||||||
)
|
)
|
||||||
|
|
||||||
# Keep only the filenames with extension
|
# Keep only the filenames with extension
|
||||||
|
|
|
@ -230,7 +230,7 @@ class Moulinette18n(object):
|
||||||
# Create new Translator object
|
# Create new Translator object
|
||||||
lib_dir = env["LIB_DIR"]
|
lib_dir = env["LIB_DIR"]
|
||||||
translator = Translator(
|
translator = Translator(
|
||||||
"%s/%s/locales" % (lib_dir, namespace), self.default_locale
|
"{}/{}/locales".format(lib_dir, namespace), self.default_locale
|
||||||
)
|
)
|
||||||
translator.set_locale(self.locale)
|
translator.set_locale(self.locale)
|
||||||
self._namespaces[namespace] = translator
|
self._namespaces[namespace] = translator
|
||||||
|
|
|
@ -211,7 +211,7 @@ class _CallbackAction(argparse.Action):
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise ValueError("unable to import method {0}".format(self.callback_method))
|
raise ValueError("unable to import method {}".format(self.callback_method))
|
||||||
self._callback = func
|
self._callback = func
|
||||||
|
|
||||||
def __call__(self, parser, namespace, values, option_string=None):
|
def __call__(self, parser, namespace, values, option_string=None):
|
||||||
|
@ -226,7 +226,7 @@ class _CallbackAction(argparse.Action):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error_message = (
|
error_message = (
|
||||||
"cannot get value from callback method "
|
"cannot get value from callback method "
|
||||||
"'{0}': {1}".format(self.callback_method, e)
|
"'{}': {}".format(self.callback_method, e)
|
||||||
)
|
)
|
||||||
logger.exception(error_message)
|
logger.exception(error_message)
|
||||||
raise MoulinetteError(error_message, raw_msg=True)
|
raise MoulinetteError(error_message, raw_msg=True)
|
||||||
|
@ -562,7 +562,7 @@ class PositionalsFirstHelpFormatter(argparse.HelpFormatter):
|
||||||
usage = "\n".join(lines)
|
usage = "\n".join(lines)
|
||||||
|
|
||||||
# prefix with 'usage:'
|
# prefix with 'usage:'
|
||||||
return "%s%s\n\n" % (prefix, usage)
|
return "{}{}\n\n".format(prefix, usage)
|
||||||
|
|
||||||
|
|
||||||
class JSONExtendedEncoder(JSONEncoder):
|
class JSONExtendedEncoder(JSONEncoder):
|
||||||
|
|
|
@ -38,9 +38,9 @@ logger = log.getLogger("moulinette.interface.api")
|
||||||
# We define a global variable to manage in a dirty way the upload...
|
# We define a global variable to manage in a dirty way the upload...
|
||||||
UPLOAD_DIR = None
|
UPLOAD_DIR = None
|
||||||
|
|
||||||
CSRF_TYPES = set(
|
CSRF_TYPES = {
|
||||||
["text/plain", "application/x-www-form-urlencoded", "multipart/form-data"]
|
"text/plain", "application/x-www-form-urlencoded", "multipart/form-data"
|
||||||
)
|
}
|
||||||
|
|
||||||
|
|
||||||
def is_csrf():
|
def is_csrf():
|
||||||
|
@ -667,7 +667,7 @@ class ActionsMapParser(BaseActionsMapParser):
|
||||||
# Retrieve the tid for the route
|
# Retrieve the tid for the route
|
||||||
_, parser = self._parsers[route]
|
_, parser = self._parsers[route]
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
error_message = "no argument parser found for route '%s': %s" % (route, e)
|
error_message = "no argument parser found for route '{}': {}".format(route, e)
|
||||||
logger.error(error_message)
|
logger.error(error_message)
|
||||||
raise MoulinetteValidationError(error_message, raw_msg=True)
|
raise MoulinetteValidationError(error_message, raw_msg=True)
|
||||||
|
|
||||||
|
@ -684,7 +684,7 @@ class ActionsMapParser(BaseActionsMapParser):
|
||||||
# Retrieve the parser for the route
|
# Retrieve the parser for the route
|
||||||
_, parser = self._parsers[route]
|
_, parser = self._parsers[route]
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
error_message = "no argument parser found for route '%s': %s" % (route, e)
|
error_message = "no argument parser found for route '{}': {}".format(route, e)
|
||||||
logger.error(error_message)
|
logger.error(error_message)
|
||||||
raise MoulinetteValidationError(error_message, raw_msg=True)
|
raise MoulinetteValidationError(error_message, raw_msg=True)
|
||||||
ret = argparse.Namespace()
|
ret = argparse.Namespace()
|
||||||
|
|
|
@ -251,7 +251,7 @@ class TTYHandler(logging.StreamHandler):
|
||||||
# add translated level name before message
|
# add translated level name before message
|
||||||
level = "%s " % m18n.g(record.levelname.lower())
|
level = "%s " % m18n.g(record.levelname.lower())
|
||||||
color = self.LEVELS_COLOR.get(record.levelno, "white")
|
color = self.LEVELS_COLOR.get(record.levelno, "white")
|
||||||
msg = "{0}{1}{2}{3}".format(colors_codes[color], level, END_CLI_COLOR, msg)
|
msg = "{}{}{}{}".format(colors_codes[color], level, END_CLI_COLOR, msg)
|
||||||
if self.formatter:
|
if self.formatter:
|
||||||
# use user-defined formatter
|
# use user-defined formatter
|
||||||
record.__dict__[self.message_key] = msg
|
record.__dict__[self.message_key] = msg
|
||||||
|
@ -403,7 +403,7 @@ class ActionsMapParser(BaseActionsMapParser):
|
||||||
except SystemExit:
|
except SystemExit:
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error_message = "unable to parse arguments '%s' because: %s" % (
|
error_message = "unable to parse arguments '{}' because: {}".format(
|
||||||
" ".join(args),
|
" ".join(args),
|
||||||
e,
|
e,
|
||||||
)
|
)
|
||||||
|
@ -435,7 +435,7 @@ class ActionsMapParser(BaseActionsMapParser):
|
||||||
except SystemExit:
|
except SystemExit:
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error_message = "unable to parse arguments '%s' because: %s" % (
|
error_message = "unable to parse arguments '{}' because: {}".format(
|
||||||
" ".join(args),
|
" ".join(args),
|
||||||
e,
|
e,
|
||||||
)
|
)
|
||||||
|
|
|
@ -24,7 +24,7 @@ def read_file(file_path, file_mode="r"):
|
||||||
"""
|
"""
|
||||||
assert isinstance(
|
assert isinstance(
|
||||||
file_path, str
|
file_path, str
|
||||||
), "Error: file_path '%s' should be a string but is of type '%s' instead" % (
|
), "Error: file_path '{}' should be a string but is of type '{}' instead".format(
|
||||||
file_path,
|
file_path,
|
||||||
type(file_path),
|
type(file_path),
|
||||||
)
|
)
|
||||||
|
@ -121,7 +121,7 @@ def write_to_file(file_path, data, file_mode="w"):
|
||||||
"""
|
"""
|
||||||
assert (
|
assert (
|
||||||
isinstance(data, str) or isinstance(data, bytes) or isinstance(data, list)
|
isinstance(data, str) or isinstance(data, bytes) or isinstance(data, list)
|
||||||
), "Error: data '%s' should be either a string or a list but is of type '%s'" % (
|
), "Error: data '{}' should be either a string or a list but is of type '{}'".format(
|
||||||
data,
|
data,
|
||||||
type(data),
|
type(data),
|
||||||
)
|
)
|
||||||
|
@ -130,7 +130,7 @@ def write_to_file(file_path, data, file_mode="w"):
|
||||||
)
|
)
|
||||||
assert os.path.isdir(
|
assert os.path.isdir(
|
||||||
os.path.dirname(file_path)
|
os.path.dirname(file_path)
|
||||||
), "Error: the path ('%s') base dir ('%s') is not a dir" % (
|
), "Error: the path ('{}') base dir ('{}') is not a dir".format(
|
||||||
file_path,
|
file_path,
|
||||||
os.path.dirname(file_path),
|
os.path.dirname(file_path),
|
||||||
)
|
)
|
||||||
|
@ -140,7 +140,7 @@ def write_to_file(file_path, data, file_mode="w"):
|
||||||
for element in data:
|
for element in data:
|
||||||
assert isinstance(
|
assert isinstance(
|
||||||
element, str
|
element, str
|
||||||
), "Error: element '%s' should be a string but is of type '%s' instead" % (
|
), "Error: element '{}' should be a string but is of type '{}' instead".format(
|
||||||
element,
|
element,
|
||||||
type(element),
|
type(element),
|
||||||
)
|
)
|
||||||
|
@ -179,13 +179,13 @@ def write_to_json(file_path, data, sort_keys=False, indent=None):
|
||||||
# Assumptions
|
# Assumptions
|
||||||
assert isinstance(
|
assert isinstance(
|
||||||
file_path, str
|
file_path, str
|
||||||
), "Error: file_path '%s' should be a string but is of type '%s' instead" % (
|
), "Error: file_path '{}' should be a string but is of type '{}' instead".format(
|
||||||
file_path,
|
file_path,
|
||||||
type(file_path),
|
type(file_path),
|
||||||
)
|
)
|
||||||
assert isinstance(data, dict) or isinstance(
|
assert isinstance(data, dict) or isinstance(
|
||||||
data, list
|
data, list
|
||||||
), "Error: data '%s' should be a dict or a list but is of type '%s' instead" % (
|
), "Error: data '{}' should be a dict or a list but is of type '{}' instead".format(
|
||||||
data,
|
data,
|
||||||
type(data),
|
type(data),
|
||||||
)
|
)
|
||||||
|
@ -194,7 +194,7 @@ def write_to_json(file_path, data, sort_keys=False, indent=None):
|
||||||
)
|
)
|
||||||
assert os.path.isdir(
|
assert os.path.isdir(
|
||||||
os.path.dirname(file_path)
|
os.path.dirname(file_path)
|
||||||
), "Error: the path ('%s') base dir ('%s') is not a dir" % (
|
), "Error: the path ('{}') base dir ('{}') is not a dir".format(
|
||||||
file_path,
|
file_path,
|
||||||
os.path.dirname(file_path),
|
os.path.dirname(file_path),
|
||||||
)
|
)
|
||||||
|
|
|
@ -59,7 +59,7 @@ def searchf(pattern, path, count=0, flags=re.MULTILINE):
|
||||||
def prependlines(text, prepend):
|
def prependlines(text, prepend):
|
||||||
"""Prepend a string to each line of a text"""
|
"""Prepend a string to each line of a text"""
|
||||||
lines = text.splitlines(True)
|
lines = text.splitlines(True)
|
||||||
return "%s%s" % (prepend, prepend.join(lines))
|
return "{}{}".format(prepend, prepend.join(lines))
|
||||||
|
|
||||||
|
|
||||||
# Randomize ------------------------------------------------------------
|
# Randomize ------------------------------------------------------------
|
||||||
|
|
|
@ -40,7 +40,7 @@ def patch_translate(moulinette):
|
||||||
|
|
||||||
def patch_logging(moulinette):
|
def patch_logging(moulinette):
|
||||||
"""Configure logging to use the custom logger."""
|
"""Configure logging to use the custom logger."""
|
||||||
handlers = set(["tty", "api"])
|
handlers = {"tty", "api"}
|
||||||
root_handlers = set(handlers)
|
root_handlers = set(handlers)
|
||||||
|
|
||||||
level = "INFO"
|
level = "INFO"
|
||||||
|
@ -97,8 +97,8 @@ def moulinette(tmp_path_factory):
|
||||||
moulinette.env["DATA_DIR"] = tmp_data
|
moulinette.env["DATA_DIR"] = tmp_data
|
||||||
moulinette.env["LIB_DIR"] = tmp_lib
|
moulinette.env["LIB_DIR"] = tmp_lib
|
||||||
shutil.copytree("./test/actionsmap", "%s/actionsmap" % tmp_data)
|
shutil.copytree("./test/actionsmap", "%s/actionsmap" % tmp_data)
|
||||||
shutil.copytree("./test/src", "%s/%s" % (tmp_lib, namespace))
|
shutil.copytree("./test/src", "{}/{}".format(tmp_lib, namespace))
|
||||||
shutil.copytree("./test/locales", "%s/%s/locales" % (tmp_lib, namespace))
|
shutil.copytree("./test/locales", "{}/{}/locales".format(tmp_lib, namespace))
|
||||||
|
|
||||||
patch_init(moulinette)
|
patch_init(moulinette)
|
||||||
patch_translate(moulinette)
|
patch_translate(moulinette)
|
||||||
|
|
|
@ -192,7 +192,7 @@ def test_extra_argument_parser_add_argument_bad_arg(iface):
|
||||||
with pytest.raises(MoulinetteError) as exception:
|
with pytest.raises(MoulinetteError) as exception:
|
||||||
extra_argument_parse.add_argument("_global", "foo", {"ask": 1})
|
extra_argument_parse.add_argument("_global", "foo", {"ask": 1})
|
||||||
|
|
||||||
expected_msg = "unable to validate extra parameter '%s' for argument '%s': %s" % (
|
expected_msg = "unable to validate extra parameter '{}' for argument '{}': {}".format(
|
||||||
"ask",
|
"ask",
|
||||||
"foo",
|
"foo",
|
||||||
"parameter value must be a string, got 1",
|
"parameter value must be a string, got 1",
|
||||||
|
@ -266,7 +266,7 @@ def test_actions_map_import_error(mocker):
|
||||||
with pytest.raises(MoulinetteError) as exception:
|
with pytest.raises(MoulinetteError) as exception:
|
||||||
amap.process({}, timeout=30, route=("GET", "/test-auth/none"))
|
amap.process({}, timeout=30, route=("GET", "/test-auth/none"))
|
||||||
|
|
||||||
expected_msg = "unable to load function % s.%s because: %s" % (
|
expected_msg = "unable to load function {: }.{} because: {}".format(
|
||||||
"moulitest",
|
"moulitest",
|
||||||
"testauth_none",
|
"testauth_none",
|
||||||
"Yoloswag",
|
"Yoloswag",
|
||||||
|
|
|
@ -26,10 +26,10 @@ def find_inconsistencies(locale_file):
|
||||||
# Then we check that every "{stuff}" (for python's .format())
|
# Then we check that every "{stuff}" (for python's .format())
|
||||||
# should also be in the translated string, otherwise the .format
|
# should also be in the translated string, otherwise the .format
|
||||||
# will trigger an exception!
|
# will trigger an exception!
|
||||||
subkeys_in_ref = set(k[0] for k in re.findall(r"{(\w+)(:\w)?}", string))
|
subkeys_in_ref = {k[0] for k in re.findall(r"{(\w+)(:\w)?}", string)}
|
||||||
subkeys_in_this_locale = set(
|
subkeys_in_this_locale = {
|
||||||
k[0] for k in re.findall(r"{(\w+)(:\w)?}", this_locale[key])
|
k[0] for k in re.findall(r"{(\w+)(:\w)?}", this_locale[key])
|
||||||
)
|
}
|
||||||
|
|
||||||
if any(k not in subkeys_in_ref for k in subkeys_in_this_locale):
|
if any(k not in subkeys_in_ref for k in subkeys_in_this_locale):
|
||||||
yield """\n
|
yield """\n
|
||||||
|
|
|
@ -5,7 +5,7 @@ from moulinette.interfaces import JSONExtendedEncoder
|
||||||
def test_json_extended_encoder(caplog):
|
def test_json_extended_encoder(caplog):
|
||||||
encoder = JSONExtendedEncoder()
|
encoder = JSONExtendedEncoder()
|
||||||
|
|
||||||
assert encoder.default(set([1, 2, 3])) == [1, 2, 3]
|
assert encoder.default({1, 2, 3}) == [1, 2, 3]
|
||||||
|
|
||||||
assert encoder.default(dt(1917, 3, 8)) == "1917-03-08T00:00:00+00:00"
|
assert encoder.default(dt(1917, 3, 8)) == "1917-03-08T00:00:00+00:00"
|
||||||
|
|
||||||
|
|
|
@ -26,10 +26,10 @@ def find_inconsistencies(locale_file):
|
||||||
# Then we check that every "{stuff}" (for python's .format())
|
# Then we check that every "{stuff}" (for python's .format())
|
||||||
# should also be in the translated string, otherwise the .format
|
# should also be in the translated string, otherwise the .format
|
||||||
# will trigger an exception!
|
# will trigger an exception!
|
||||||
subkeys_in_ref = set(k[0] for k in re.findall(r"{(\w+)(:\w)?}", string))
|
subkeys_in_ref = {k[0] for k in re.findall(r"{(\w+)(:\w)?}", string)}
|
||||||
subkeys_in_this_locale = set(
|
subkeys_in_this_locale = {
|
||||||
k[0] for k in re.findall(r"{(\w+)(:\w)?}", this_locale[key])
|
k[0] for k in re.findall(r"{(\w+)(:\w)?}", this_locale[key])
|
||||||
)
|
}
|
||||||
|
|
||||||
if any(k not in subkeys_in_ref for k in subkeys_in_this_locale):
|
if any(k not in subkeys_in_ref for k in subkeys_in_this_locale):
|
||||||
yield """\n
|
yield """\n
|
||||||
|
|
Loading…
Reference in a new issue