mirror of
https://github.com/YunoHost/moulinette.git
synced 2024-09-03 20:06:31 +02:00
[mod] run pyupgrade on source code
This commit is contained in:
parent
845399dba0
commit
8127e7cd1a
12 changed files with 43 additions and 43 deletions
|
@ -410,7 +410,7 @@ class ActionsMap(object):
|
|||
for n in self.get_namespaces():
|
||||
logger.debug("loading actions map namespace '%s'", n)
|
||||
|
||||
actionsmap_yml = "%s/actionsmap/%s.yml" % (DATA_DIR, n)
|
||||
actionsmap_yml = "{}/actionsmap/{}.yml".format(DATA_DIR, n)
|
||||
actionsmap_yml_stat = os.stat(actionsmap_yml)
|
||||
actionsmap_pkl = "%s/actionsmap/%s-%d-%d.pkl" % (
|
||||
CACHE_DIR,
|
||||
|
@ -428,7 +428,7 @@ class ActionsMap(object):
|
|||
actionsmap = read_yaml(actionsmap_yml)
|
||||
|
||||
# Delete old cache files
|
||||
for old_cache in glob.glob("%s/actionsmap/%s-*.pkl" % (CACHE_DIR, n)):
|
||||
for old_cache in glob.glob("{}/actionsmap/{}-*.pkl".format(CACHE_DIR, n)):
|
||||
os.remove(old_cache)
|
||||
|
||||
# at installation, cachedir might not exists
|
||||
|
@ -479,7 +479,7 @@ class ActionsMap(object):
|
|||
auth_method = self.default_authentication
|
||||
|
||||
# Load and initialize the authenticator module
|
||||
auth_module = "%s.authenticators.%s" % (self.main_namespace, auth_method)
|
||||
auth_module = "{}.authenticators.{}".format(self.main_namespace, auth_method)
|
||||
logger.debug(f"Loading auth module {auth_module}")
|
||||
try:
|
||||
mod = import_module(auth_module)
|
||||
|
@ -532,12 +532,12 @@ class ActionsMap(object):
|
|||
# Retrieve action information
|
||||
if len(tid) == 4:
|
||||
namespace, category, subcategory, action = tid
|
||||
func_name = "%s_%s_%s" % (
|
||||
func_name = "{}_{}_{}".format(
|
||||
category,
|
||||
subcategory.replace("-", "_"),
|
||||
action.replace("-", "_"),
|
||||
)
|
||||
full_action_name = "%s.%s.%s.%s" % (
|
||||
full_action_name = "{}.{}.{}.{}".format(
|
||||
namespace,
|
||||
category,
|
||||
subcategory,
|
||||
|
@ -547,22 +547,22 @@ class ActionsMap(object):
|
|||
assert len(tid) == 3
|
||||
namespace, category, action = tid
|
||||
subcategory = None
|
||||
func_name = "%s_%s" % (category, action.replace("-", "_"))
|
||||
full_action_name = "%s.%s.%s" % (namespace, category, action)
|
||||
func_name = "{}_{}".format(category, action.replace("-", "_"))
|
||||
full_action_name = "{}.{}.{}".format(namespace, category, action)
|
||||
|
||||
# Lock the moulinette for the namespace
|
||||
with MoulinetteLock(namespace, timeout):
|
||||
start = time()
|
||||
try:
|
||||
mod = __import__(
|
||||
"%s.%s" % (namespace, category),
|
||||
"{}.{}".format(namespace, category),
|
||||
globals=globals(),
|
||||
level=0,
|
||||
fromlist=[func_name],
|
||||
)
|
||||
logger.debug(
|
||||
"loading python module %s took %.3fs",
|
||||
"%s.%s" % (namespace, category),
|
||||
"{}.{}".format(namespace, category),
|
||||
time() - start,
|
||||
)
|
||||
func = getattr(mod, func_name)
|
||||
|
@ -570,7 +570,7 @@ class ActionsMap(object):
|
|||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
error_message = "unable to load function %s.%s because: %s" % (
|
||||
error_message = "unable to load function {}.{} because: {}".format(
|
||||
namespace,
|
||||
func_name,
|
||||
e,
|
||||
|
@ -619,7 +619,7 @@ class ActionsMap(object):
|
|||
# Look for all files that match the given patterns in the actionsmap dir
|
||||
for namespace_pattern in NAMESPACE_PATTERNS:
|
||||
namespaces.extend(
|
||||
glob.glob("%s/actionsmap/%s.yml" % (DATA_DIR, namespace_pattern))
|
||||
glob.glob("{}/actionsmap/{}.yml".format(DATA_DIR, namespace_pattern))
|
||||
)
|
||||
|
||||
# Keep only the filenames with extension
|
||||
|
|
|
@ -230,7 +230,7 @@ class Moulinette18n(object):
|
|||
# Create new Translator object
|
||||
lib_dir = env["LIB_DIR"]
|
||||
translator = Translator(
|
||||
"%s/%s/locales" % (lib_dir, namespace), self.default_locale
|
||||
"{}/{}/locales".format(lib_dir, namespace), self.default_locale
|
||||
)
|
||||
translator.set_locale(self.locale)
|
||||
self._namespaces[namespace] = translator
|
||||
|
|
|
@ -211,7 +211,7 @@ class _CallbackAction(argparse.Action):
|
|||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
raise ValueError("unable to import method {0}".format(self.callback_method))
|
||||
raise ValueError("unable to import method {}".format(self.callback_method))
|
||||
self._callback = func
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
|
@ -226,7 +226,7 @@ class _CallbackAction(argparse.Action):
|
|||
except Exception as e:
|
||||
error_message = (
|
||||
"cannot get value from callback method "
|
||||
"'{0}': {1}".format(self.callback_method, e)
|
||||
"'{}': {}".format(self.callback_method, e)
|
||||
)
|
||||
logger.exception(error_message)
|
||||
raise MoulinetteError(error_message, raw_msg=True)
|
||||
|
@ -562,7 +562,7 @@ class PositionalsFirstHelpFormatter(argparse.HelpFormatter):
|
|||
usage = "\n".join(lines)
|
||||
|
||||
# prefix with 'usage:'
|
||||
return "%s%s\n\n" % (prefix, usage)
|
||||
return "{}{}\n\n".format(prefix, usage)
|
||||
|
||||
|
||||
class JSONExtendedEncoder(JSONEncoder):
|
||||
|
|
|
@ -38,9 +38,9 @@ logger = log.getLogger("moulinette.interface.api")
|
|||
# We define a global variable to manage in a dirty way the upload...
|
||||
UPLOAD_DIR = None
|
||||
|
||||
CSRF_TYPES = set(
|
||||
["text/plain", "application/x-www-form-urlencoded", "multipart/form-data"]
|
||||
)
|
||||
CSRF_TYPES = {
|
||||
"text/plain", "application/x-www-form-urlencoded", "multipart/form-data"
|
||||
}
|
||||
|
||||
|
||||
def is_csrf():
|
||||
|
@ -667,7 +667,7 @@ class ActionsMapParser(BaseActionsMapParser):
|
|||
# Retrieve the tid for the route
|
||||
_, parser = self._parsers[route]
|
||||
except KeyError as e:
|
||||
error_message = "no argument parser found for route '%s': %s" % (route, e)
|
||||
error_message = "no argument parser found for route '{}': {}".format(route, e)
|
||||
logger.error(error_message)
|
||||
raise MoulinetteValidationError(error_message, raw_msg=True)
|
||||
|
||||
|
@ -684,7 +684,7 @@ class ActionsMapParser(BaseActionsMapParser):
|
|||
# Retrieve the parser for the route
|
||||
_, parser = self._parsers[route]
|
||||
except KeyError as e:
|
||||
error_message = "no argument parser found for route '%s': %s" % (route, e)
|
||||
error_message = "no argument parser found for route '{}': {}".format(route, e)
|
||||
logger.error(error_message)
|
||||
raise MoulinetteValidationError(error_message, raw_msg=True)
|
||||
ret = argparse.Namespace()
|
||||
|
|
|
@ -251,7 +251,7 @@ class TTYHandler(logging.StreamHandler):
|
|||
# add translated level name before message
|
||||
level = "%s " % m18n.g(record.levelname.lower())
|
||||
color = self.LEVELS_COLOR.get(record.levelno, "white")
|
||||
msg = "{0}{1}{2}{3}".format(colors_codes[color], level, END_CLI_COLOR, msg)
|
||||
msg = "{}{}{}{}".format(colors_codes[color], level, END_CLI_COLOR, msg)
|
||||
if self.formatter:
|
||||
# use user-defined formatter
|
||||
record.__dict__[self.message_key] = msg
|
||||
|
@ -403,7 +403,7 @@ class ActionsMapParser(BaseActionsMapParser):
|
|||
except SystemExit:
|
||||
raise
|
||||
except Exception as e:
|
||||
error_message = "unable to parse arguments '%s' because: %s" % (
|
||||
error_message = "unable to parse arguments '{}' because: {}".format(
|
||||
" ".join(args),
|
||||
e,
|
||||
)
|
||||
|
@ -435,7 +435,7 @@ class ActionsMapParser(BaseActionsMapParser):
|
|||
except SystemExit:
|
||||
raise
|
||||
except Exception as e:
|
||||
error_message = "unable to parse arguments '%s' because: %s" % (
|
||||
error_message = "unable to parse arguments '{}' because: {}".format(
|
||||
" ".join(args),
|
||||
e,
|
||||
)
|
||||
|
|
|
@ -24,7 +24,7 @@ def read_file(file_path, file_mode="r"):
|
|||
"""
|
||||
assert isinstance(
|
||||
file_path, str
|
||||
), "Error: file_path '%s' should be a string but is of type '%s' instead" % (
|
||||
), "Error: file_path '{}' should be a string but is of type '{}' instead".format(
|
||||
file_path,
|
||||
type(file_path),
|
||||
)
|
||||
|
@ -121,7 +121,7 @@ def write_to_file(file_path, data, file_mode="w"):
|
|||
"""
|
||||
assert (
|
||||
isinstance(data, str) or isinstance(data, bytes) or isinstance(data, list)
|
||||
), "Error: data '%s' should be either a string or a list but is of type '%s'" % (
|
||||
), "Error: data '{}' should be either a string or a list but is of type '{}'".format(
|
||||
data,
|
||||
type(data),
|
||||
)
|
||||
|
@ -130,7 +130,7 @@ def write_to_file(file_path, data, file_mode="w"):
|
|||
)
|
||||
assert os.path.isdir(
|
||||
os.path.dirname(file_path)
|
||||
), "Error: the path ('%s') base dir ('%s') is not a dir" % (
|
||||
), "Error: the path ('{}') base dir ('{}') is not a dir".format(
|
||||
file_path,
|
||||
os.path.dirname(file_path),
|
||||
)
|
||||
|
@ -140,7 +140,7 @@ def write_to_file(file_path, data, file_mode="w"):
|
|||
for element in data:
|
||||
assert isinstance(
|
||||
element, str
|
||||
), "Error: element '%s' should be a string but is of type '%s' instead" % (
|
||||
), "Error: element '{}' should be a string but is of type '{}' instead".format(
|
||||
element,
|
||||
type(element),
|
||||
)
|
||||
|
@ -179,13 +179,13 @@ def write_to_json(file_path, data, sort_keys=False, indent=None):
|
|||
# Assumptions
|
||||
assert isinstance(
|
||||
file_path, str
|
||||
), "Error: file_path '%s' should be a string but is of type '%s' instead" % (
|
||||
), "Error: file_path '{}' should be a string but is of type '{}' instead".format(
|
||||
file_path,
|
||||
type(file_path),
|
||||
)
|
||||
assert isinstance(data, dict) or isinstance(
|
||||
data, list
|
||||
), "Error: data '%s' should be a dict or a list but is of type '%s' instead" % (
|
||||
), "Error: data '{}' should be a dict or a list but is of type '{}' instead".format(
|
||||
data,
|
||||
type(data),
|
||||
)
|
||||
|
@ -194,7 +194,7 @@ def write_to_json(file_path, data, sort_keys=False, indent=None):
|
|||
)
|
||||
assert os.path.isdir(
|
||||
os.path.dirname(file_path)
|
||||
), "Error: the path ('%s') base dir ('%s') is not a dir" % (
|
||||
), "Error: the path ('{}') base dir ('{}') is not a dir".format(
|
||||
file_path,
|
||||
os.path.dirname(file_path),
|
||||
)
|
||||
|
|
|
@ -59,7 +59,7 @@ def searchf(pattern, path, count=0, flags=re.MULTILINE):
|
|||
def prependlines(text, prepend):
|
||||
"""Prepend a string to each line of a text"""
|
||||
lines = text.splitlines(True)
|
||||
return "%s%s" % (prepend, prepend.join(lines))
|
||||
return "{}{}".format(prepend, prepend.join(lines))
|
||||
|
||||
|
||||
# Randomize ------------------------------------------------------------
|
||||
|
|
|
@ -40,7 +40,7 @@ def patch_translate(moulinette):
|
|||
|
||||
def patch_logging(moulinette):
|
||||
"""Configure logging to use the custom logger."""
|
||||
handlers = set(["tty", "api"])
|
||||
handlers = {"tty", "api"}
|
||||
root_handlers = set(handlers)
|
||||
|
||||
level = "INFO"
|
||||
|
@ -97,8 +97,8 @@ def moulinette(tmp_path_factory):
|
|||
moulinette.env["DATA_DIR"] = tmp_data
|
||||
moulinette.env["LIB_DIR"] = tmp_lib
|
||||
shutil.copytree("./test/actionsmap", "%s/actionsmap" % tmp_data)
|
||||
shutil.copytree("./test/src", "%s/%s" % (tmp_lib, namespace))
|
||||
shutil.copytree("./test/locales", "%s/%s/locales" % (tmp_lib, namespace))
|
||||
shutil.copytree("./test/src", "{}/{}".format(tmp_lib, namespace))
|
||||
shutil.copytree("./test/locales", "{}/{}/locales".format(tmp_lib, namespace))
|
||||
|
||||
patch_init(moulinette)
|
||||
patch_translate(moulinette)
|
||||
|
|
|
@ -192,7 +192,7 @@ def test_extra_argument_parser_add_argument_bad_arg(iface):
|
|||
with pytest.raises(MoulinetteError) as exception:
|
||||
extra_argument_parse.add_argument("_global", "foo", {"ask": 1})
|
||||
|
||||
expected_msg = "unable to validate extra parameter '%s' for argument '%s': %s" % (
|
||||
expected_msg = "unable to validate extra parameter '{}' for argument '{}': {}".format(
|
||||
"ask",
|
||||
"foo",
|
||||
"parameter value must be a string, got 1",
|
||||
|
@ -266,7 +266,7 @@ def test_actions_map_import_error(mocker):
|
|||
with pytest.raises(MoulinetteError) as exception:
|
||||
amap.process({}, timeout=30, route=("GET", "/test-auth/none"))
|
||||
|
||||
expected_msg = "unable to load function % s.%s because: %s" % (
|
||||
expected_msg = "unable to load function {: }.{} because: {}".format(
|
||||
"moulitest",
|
||||
"testauth_none",
|
||||
"Yoloswag",
|
||||
|
|
|
@ -26,10 +26,10 @@ def find_inconsistencies(locale_file):
|
|||
# Then we check that every "{stuff}" (for python's .format())
|
||||
# should also be in the translated string, otherwise the .format
|
||||
# will trigger an exception!
|
||||
subkeys_in_ref = set(k[0] for k in re.findall(r"{(\w+)(:\w)?}", string))
|
||||
subkeys_in_this_locale = set(
|
||||
subkeys_in_ref = {k[0] for k in re.findall(r"{(\w+)(:\w)?}", string)}
|
||||
subkeys_in_this_locale = {
|
||||
k[0] for k in re.findall(r"{(\w+)(:\w)?}", this_locale[key])
|
||||
)
|
||||
}
|
||||
|
||||
if any(k not in subkeys_in_ref for k in subkeys_in_this_locale):
|
||||
yield """\n
|
||||
|
|
|
@ -5,7 +5,7 @@ from moulinette.interfaces import JSONExtendedEncoder
|
|||
def test_json_extended_encoder(caplog):
|
||||
encoder = JSONExtendedEncoder()
|
||||
|
||||
assert encoder.default(set([1, 2, 3])) == [1, 2, 3]
|
||||
assert encoder.default({1, 2, 3}) == [1, 2, 3]
|
||||
|
||||
assert encoder.default(dt(1917, 3, 8)) == "1917-03-08T00:00:00+00:00"
|
||||
|
||||
|
|
|
@ -26,10 +26,10 @@ def find_inconsistencies(locale_file):
|
|||
# Then we check that every "{stuff}" (for python's .format())
|
||||
# should also be in the translated string, otherwise the .format
|
||||
# will trigger an exception!
|
||||
subkeys_in_ref = set(k[0] for k in re.findall(r"{(\w+)(:\w)?}", string))
|
||||
subkeys_in_this_locale = set(
|
||||
subkeys_in_ref = {k[0] for k in re.findall(r"{(\w+)(:\w)?}", string)}
|
||||
subkeys_in_this_locale = {
|
||||
k[0] for k in re.findall(r"{(\w+)(:\w)?}", this_locale[key])
|
||||
)
|
||||
}
|
||||
|
||||
if any(k not in subkeys_in_ref for k in subkeys_in_this_locale):
|
||||
yield """\n
|
||||
|
|
Loading…
Reference in a new issue