mirror of
https://github.com/YunoHost/moulinette.git
synced 2024-09-03 20:06:31 +02:00
Use black to format source code
This commit is contained in:
parent
6829a3dc7e
commit
7c3517e730
25 changed files with 715 additions and 405 deletions
|
@ -6,6 +6,8 @@ matrix:
|
|||
env: TOXENV=py27
|
||||
- python: 2.7
|
||||
env: TOXENV=lint
|
||||
- python: 3.6
|
||||
env: TOXENV=format-check
|
||||
- python: 2.7
|
||||
env: TOXENV=docs
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
[](https://travis-ci.org/YunoHost/moulinette)
|
||||
[](https://github.com/YunoHost/moulinette/blob/stretch-unstable/LICENSE)
|
||||
[](https://github.com/psf/black)
|
||||
|
||||
Moulinette
|
||||
==========
|
||||
|
@ -61,3 +62,10 @@ Testing
|
|||
$ pip install tox
|
||||
$ tox
|
||||
```
|
||||
|
||||
A note regarding the use of [Black](https://github.com/psf/black) for source
|
||||
code formatting. The actual source code of Moulinette is still written using
|
||||
Python 2. Black can still format this code but it must within a Python 3
|
||||
environment. Therefore, you'll need to manage this environment switching when
|
||||
you invoke Black through Tox (`tox -e format`). An environment created with
|
||||
your system Python 3 should suffice (`python3 -m venv .venv` etc.).
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from moulinette.core import init_interface, MoulinetteError, MoulinetteSignals, Moulinette18n
|
||||
from moulinette.core import (
|
||||
init_interface,
|
||||
MoulinetteError,
|
||||
MoulinetteSignals,
|
||||
Moulinette18n,
|
||||
)
|
||||
from moulinette.globals import init_moulinette_env
|
||||
|
||||
__title__ = 'moulinette'
|
||||
__version__ = '0.1'
|
||||
__author__ = ['Kload',
|
||||
'jlebleu',
|
||||
'titoko',
|
||||
'beudbeud',
|
||||
'npze']
|
||||
__author__ = ['Kload', 'jlebleu', 'titoko', 'beudbeud', 'npze']
|
||||
__license__ = 'AGPL 3.0'
|
||||
__credits__ = """
|
||||
Copyright (C) 2014 YUNOHOST.ORG
|
||||
|
@ -27,10 +28,7 @@ __credits__ = """
|
|||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program; if not, see http://www.gnu.org/licenses
|
||||
"""
|
||||
__all__ = [
|
||||
'init', 'api', 'cli', 'm18n', 'env',
|
||||
'init_interface', 'MoulinetteError',
|
||||
]
|
||||
__all__ = ['init', 'api', 'cli', 'm18n', 'env', 'init_interface', 'MoulinetteError']
|
||||
|
||||
|
||||
msignals = MoulinetteSignals()
|
||||
|
@ -40,6 +38,7 @@ m18n = Moulinette18n()
|
|||
|
||||
# Package functions
|
||||
|
||||
|
||||
def init(logging_config=None, **kwargs):
|
||||
"""Package initialization
|
||||
|
||||
|
@ -66,8 +65,10 @@ def init(logging_config=None, **kwargs):
|
|||
|
||||
# Easy access to interfaces
|
||||
|
||||
def api(namespaces, host='localhost', port=80, routes={},
|
||||
use_websocket=True, use_cache=True):
|
||||
|
||||
def api(
|
||||
namespaces, host='localhost', port=80, routes={}, use_websocket=True, use_cache=True
|
||||
):
|
||||
"""Web server (API) interface
|
||||
|
||||
Run a HTTP server with the moulinette for an API usage.
|
||||
|
@ -84,29 +85,33 @@ def api(namespaces, host='localhost', port=80, routes={},
|
|||
|
||||
"""
|
||||
try:
|
||||
moulinette = init_interface('api',
|
||||
kwargs={
|
||||
'routes': routes,
|
||||
'use_websocket': use_websocket
|
||||
},
|
||||
actionsmap={
|
||||
'namespaces': namespaces,
|
||||
'use_cache': use_cache
|
||||
}
|
||||
moulinette = init_interface(
|
||||
'api',
|
||||
kwargs={'routes': routes, 'use_websocket': use_websocket},
|
||||
actionsmap={'namespaces': namespaces, 'use_cache': use_cache},
|
||||
)
|
||||
moulinette.run(host, port)
|
||||
except MoulinetteError as e:
|
||||
import logging
|
||||
|
||||
logging.getLogger(namespaces[0]).error(e.strerror)
|
||||
return e.errno if hasattr(e, "errno") else 1
|
||||
except KeyboardInterrupt:
|
||||
import logging
|
||||
|
||||
logging.getLogger(namespaces[0]).info(m18n.g('operation_interrupted'))
|
||||
return 0
|
||||
|
||||
|
||||
def cli(namespaces, args, use_cache=True, output_as=None,
|
||||
password=None, timeout=None, parser_kwargs={}):
|
||||
def cli(
|
||||
namespaces,
|
||||
args,
|
||||
use_cache=True,
|
||||
output_as=None,
|
||||
password=None,
|
||||
timeout=None,
|
||||
parser_kwargs={},
|
||||
):
|
||||
"""Command line interface
|
||||
|
||||
Execute an action with the moulinette from the CLI and print its
|
||||
|
@ -125,7 +130,8 @@ def cli(namespaces, args, use_cache=True, output_as=None,
|
|||
|
||||
"""
|
||||
try:
|
||||
moulinette = init_interface('cli',
|
||||
moulinette = init_interface(
|
||||
'cli',
|
||||
actionsmap={
|
||||
'namespaces': namespaces,
|
||||
'use_cache': use_cache,
|
||||
|
@ -135,6 +141,7 @@ def cli(namespaces, args, use_cache=True, output_as=None,
|
|||
moulinette.run(args, output_as=output_as, password=password, timeout=timeout)
|
||||
except MoulinetteError as e:
|
||||
import logging
|
||||
|
||||
logging.getLogger(namespaces[0]).error(e.strerror)
|
||||
return 1
|
||||
return 0
|
||||
|
|
|
@ -11,10 +11,8 @@ from collections import OrderedDict
|
|||
from moulinette import m18n, msignals
|
||||
from moulinette.cache import open_cachefile
|
||||
from moulinette.globals import init_moulinette_env
|
||||
from moulinette.core import (MoulinetteError, MoulinetteLock)
|
||||
from moulinette.interfaces import (
|
||||
BaseActionsMapParser, GLOBAL_SECTION, TO_RETURN_PROP
|
||||
)
|
||||
from moulinette.core import MoulinetteError, MoulinetteLock
|
||||
from moulinette.interfaces import BaseActionsMapParser, GLOBAL_SECTION, TO_RETURN_PROP
|
||||
from moulinette.utils.log import start_action_logging
|
||||
|
||||
logger = logging.getLogger('moulinette.actionsmap')
|
||||
|
@ -24,6 +22,7 @@ logger = logging.getLogger('moulinette.actionsmap')
|
|||
|
||||
# Extra parameters definition
|
||||
|
||||
|
||||
class _ExtraParameter(object):
|
||||
|
||||
"""
|
||||
|
@ -95,12 +94,15 @@ class CommentParameter(_ExtraParameter):
|
|||
def validate(klass, value, arg_name):
|
||||
# Deprecated boolean or empty string
|
||||
if isinstance(value, bool) or (isinstance(value, str) and not value):
|
||||
logger.warning("expecting a non-empty string for extra parameter '%s' of "
|
||||
"argument '%s'", klass.name, arg_name)
|
||||
logger.warning(
|
||||
"expecting a non-empty string for extra parameter '%s' of "
|
||||
"argument '%s'",
|
||||
klass.name,
|
||||
arg_name,
|
||||
)
|
||||
value = arg_name
|
||||
elif not isinstance(value, str):
|
||||
raise TypeError("parameter value must be a string, got %r"
|
||||
% value)
|
||||
raise TypeError("parameter value must be a string, got %r" % value)
|
||||
return value
|
||||
|
||||
|
||||
|
@ -113,6 +115,7 @@ class AskParameter(_ExtraParameter):
|
|||
when asking the argument value.
|
||||
|
||||
"""
|
||||
|
||||
name = 'ask'
|
||||
skipped_iface = ['api']
|
||||
|
||||
|
@ -130,12 +133,15 @@ class AskParameter(_ExtraParameter):
|
|||
def validate(klass, value, arg_name):
|
||||
# Deprecated boolean or empty string
|
||||
if isinstance(value, bool) or (isinstance(value, str) and not value):
|
||||
logger.warning("expecting a non-empty string for extra parameter '%s' of "
|
||||
"argument '%s'", klass.name, arg_name)
|
||||
logger.warning(
|
||||
"expecting a non-empty string for extra parameter '%s' of "
|
||||
"argument '%s'",
|
||||
klass.name,
|
||||
arg_name,
|
||||
)
|
||||
value = arg_name
|
||||
elif not isinstance(value, str):
|
||||
raise TypeError("parameter value must be a string, got %r"
|
||||
% value)
|
||||
raise TypeError("parameter value must be a string, got %r" % value)
|
||||
return value
|
||||
|
||||
|
||||
|
@ -148,6 +154,7 @@ class PasswordParameter(AskParameter):
|
|||
when asking the password.
|
||||
|
||||
"""
|
||||
|
||||
name = 'password'
|
||||
|
||||
def __call__(self, message, arg_name, arg_value):
|
||||
|
@ -170,6 +177,7 @@ class PatternParameter(_ExtraParameter):
|
|||
the message to display if it doesn't match.
|
||||
|
||||
"""
|
||||
|
||||
name = 'pattern'
|
||||
|
||||
def __call__(self, arguments, arg_name, arg_value):
|
||||
|
@ -182,28 +190,32 @@ class PatternParameter(_ExtraParameter):
|
|||
v = arg_value
|
||||
|
||||
if v and not re.match(pattern, v or '', re.UNICODE):
|
||||
logger.debug("argument value '%s' for '%s' doesn't match pattern '%s'",
|
||||
v, arg_name, pattern)
|
||||
logger.debug(
|
||||
"argument value '%s' for '%s' doesn't match pattern '%s'",
|
||||
v,
|
||||
arg_name,
|
||||
pattern,
|
||||
)
|
||||
|
||||
# Attempt to retrieve message translation
|
||||
msg = m18n.n(message)
|
||||
if msg == message:
|
||||
msg = m18n.g(message)
|
||||
|
||||
raise MoulinetteError('invalid_argument',
|
||||
argument=arg_name, error=msg)
|
||||
raise MoulinetteError('invalid_argument', argument=arg_name, error=msg)
|
||||
return arg_value
|
||||
|
||||
@staticmethod
|
||||
def validate(value, arg_name):
|
||||
# Deprecated string type
|
||||
if isinstance(value, str):
|
||||
logger.warning("expecting a list as extra parameter 'pattern' of "
|
||||
"argument '%s'", arg_name)
|
||||
logger.warning(
|
||||
"expecting a list as extra parameter 'pattern' of " "argument '%s'",
|
||||
arg_name,
|
||||
)
|
||||
value = [value, 'pattern_not_match']
|
||||
elif not isinstance(value, list) or len(value) != 2:
|
||||
raise TypeError("parameter value must be a list, got %r"
|
||||
% value)
|
||||
raise TypeError("parameter value must be a list, got %r" % value)
|
||||
return value
|
||||
|
||||
|
||||
|
@ -215,21 +227,19 @@ class RequiredParameter(_ExtraParameter):
|
|||
The value of this parameter must be a boolean which is set to False by
|
||||
default.
|
||||
"""
|
||||
|
||||
name = 'required'
|
||||
|
||||
def __call__(self, required, arg_name, arg_value):
|
||||
if required and (arg_value is None or arg_value == ''):
|
||||
logger.debug("argument '%s' is required",
|
||||
arg_name)
|
||||
raise MoulinetteError('argument_required',
|
||||
argument=arg_name)
|
||||
logger.debug("argument '%s' is required", arg_name)
|
||||
raise MoulinetteError('argument_required', argument=arg_name)
|
||||
return arg_value
|
||||
|
||||
@staticmethod
|
||||
def validate(value, arg_name):
|
||||
if not isinstance(value, bool):
|
||||
raise TypeError("parameter value must be a list, got %r"
|
||||
% value)
|
||||
raise TypeError("parameter value must be a list, got %r" % value)
|
||||
return value
|
||||
|
||||
|
||||
|
@ -238,8 +248,13 @@ The list of available extra parameters classes. It will keep to this list
|
|||
order on argument parsing.
|
||||
|
||||
"""
|
||||
extraparameters_list = [CommentParameter, AskParameter, PasswordParameter,
|
||||
RequiredParameter, PatternParameter]
|
||||
extraparameters_list = [
|
||||
CommentParameter,
|
||||
AskParameter,
|
||||
PasswordParameter,
|
||||
RequiredParameter,
|
||||
PatternParameter,
|
||||
]
|
||||
|
||||
# Extra parameters argument Parser
|
||||
|
||||
|
@ -286,8 +301,13 @@ class ExtraArgumentParser(object):
|
|||
# Validate parameter value
|
||||
parameters[p] = klass.validate(v, arg_name)
|
||||
except Exception as e:
|
||||
logger.error("unable to validate extra parameter '%s' "
|
||||
"for argument '%s': %s", p, arg_name, e)
|
||||
logger.error(
|
||||
"unable to validate extra parameter '%s' "
|
||||
"for argument '%s': %s",
|
||||
p,
|
||||
arg_name,
|
||||
e,
|
||||
)
|
||||
raise MoulinetteError('error_see_log')
|
||||
|
||||
return parameters
|
||||
|
@ -353,12 +373,15 @@ class ExtraArgumentParser(object):
|
|||
|
||||
# Main class ----------------------------------------------------------
|
||||
|
||||
|
||||
def ordered_yaml_load(stream):
|
||||
class OrderedLoader(yaml.Loader):
|
||||
pass
|
||||
|
||||
OrderedLoader.add_constructor(
|
||||
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
|
||||
lambda loader, node: OrderedDict(loader.construct_pairs(node)))
|
||||
lambda loader, node: OrderedDict(loader.construct_pairs(node)),
|
||||
)
|
||||
return yaml.load(stream, OrderedLoader)
|
||||
|
||||
|
||||
|
@ -386,8 +409,7 @@ class ActionsMap(object):
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self, parser_class, namespaces=[], use_cache=True,
|
||||
parser_kwargs={}):
|
||||
def __init__(self, parser_class, namespaces=[], use_cache=True, parser_kwargs={}):
|
||||
if not issubclass(parser_class, BaseActionsMapParser):
|
||||
raise ValueError("Invalid parser class '%s'" % parser_class.__name__)
|
||||
self.parser_class = parser_class
|
||||
|
@ -411,7 +433,7 @@ class ActionsMap(object):
|
|||
CACHE_DIR,
|
||||
n,
|
||||
actionsmap_yml_stat.st_size,
|
||||
actionsmap_yml_stat.st_mtime
|
||||
actionsmap_yml_stat.st_mtime,
|
||||
)
|
||||
|
||||
if use_cache and os.path.exists(actionsmap_pkl):
|
||||
|
@ -487,8 +509,17 @@ class ActionsMap(object):
|
|||
# Retrieve action information
|
||||
if len(tid) == 4:
|
||||
namespace, category, subcategory, action = tid
|
||||
func_name = '%s_%s_%s' % (category, subcategory.replace('-', '_'), action.replace('-', '_'))
|
||||
full_action_name = "%s.%s.%s.%s" % (namespace, category, subcategory, action)
|
||||
func_name = '%s_%s_%s' % (
|
||||
category,
|
||||
subcategory.replace('-', '_'),
|
||||
action.replace('-', '_'),
|
||||
)
|
||||
full_action_name = "%s.%s.%s.%s" % (
|
||||
namespace,
|
||||
category,
|
||||
subcategory,
|
||||
action,
|
||||
)
|
||||
else:
|
||||
assert len(tid) == 3
|
||||
namespace, category, action = tid
|
||||
|
@ -500,25 +531,33 @@ class ActionsMap(object):
|
|||
with MoulinetteLock(namespace, timeout):
|
||||
start = time()
|
||||
try:
|
||||
mod = __import__('%s.%s' % (namespace, category),
|
||||
globals=globals(), level=0,
|
||||
fromlist=[func_name])
|
||||
logger.debug('loading python module %s took %.3fs',
|
||||
'%s.%s' % (namespace, category), time() - start)
|
||||
mod = __import__(
|
||||
'%s.%s' % (namespace, category),
|
||||
globals=globals(),
|
||||
level=0,
|
||||
fromlist=[func_name],
|
||||
)
|
||||
logger.debug(
|
||||
'loading python module %s took %.3fs',
|
||||
'%s.%s' % (namespace, category),
|
||||
time() - start,
|
||||
)
|
||||
func = getattr(mod, func_name)
|
||||
except (AttributeError, ImportError):
|
||||
logger.exception("unable to load function %s.%s",
|
||||
namespace, func_name)
|
||||
logger.exception("unable to load function %s.%s", namespace, func_name)
|
||||
raise MoulinetteError('error_see_log')
|
||||
else:
|
||||
log_id = start_action_logging()
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
# Log arguments in debug mode only for safety reasons
|
||||
logger.info('processing action [%s]: %s with args=%s',
|
||||
log_id, full_action_name, arguments)
|
||||
logger.info(
|
||||
'processing action [%s]: %s with args=%s',
|
||||
log_id,
|
||||
full_action_name,
|
||||
arguments,
|
||||
)
|
||||
else:
|
||||
logger.info('processing action [%s]: %s',
|
||||
log_id, full_action_name)
|
||||
logger.info('processing action [%s]: %s', log_id, full_action_name)
|
||||
|
||||
# Load translation and process the action
|
||||
m18n.load_namespace(namespace)
|
||||
|
@ -527,8 +566,7 @@ class ActionsMap(object):
|
|||
return func(**arguments)
|
||||
finally:
|
||||
stop = time()
|
||||
logger.debug('action [%s] executed in %.3fs',
|
||||
log_id, stop - start)
|
||||
logger.debug('action [%s] executed in %.3fs', log_id, stop - start)
|
||||
|
||||
@staticmethod
|
||||
def get_namespaces():
|
||||
|
@ -654,8 +692,9 @@ class ActionsMap(object):
|
|||
subcategories = {}
|
||||
|
||||
# Get category parser
|
||||
category_parser = top_parser.add_category_parser(category_name,
|
||||
**category_values)
|
||||
category_parser = top_parser.add_category_parser(
|
||||
category_name, **category_values
|
||||
)
|
||||
|
||||
# action_name is like "list" of "domain list"
|
||||
# action_options are the values
|
||||
|
@ -664,19 +703,21 @@ class ActionsMap(object):
|
|||
tid = (namespace, category_name, action_name)
|
||||
|
||||
# Get action parser
|
||||
action_parser = category_parser.add_action_parser(action_name,
|
||||
tid,
|
||||
**action_options)
|
||||
action_parser = category_parser.add_action_parser(
|
||||
action_name, tid, **action_options
|
||||
)
|
||||
|
||||
if action_parser is None: # No parser for the action
|
||||
continue
|
||||
|
||||
# Store action identifier and add arguments
|
||||
action_parser.set_defaults(_tid=tid)
|
||||
action_parser.add_arguments(arguments,
|
||||
extraparser=self.extraparser,
|
||||
format_arg_names=top_parser.format_arg_names,
|
||||
validate_extra=validate_extra)
|
||||
action_parser.add_arguments(
|
||||
arguments,
|
||||
extraparser=self.extraparser,
|
||||
format_arg_names=top_parser.format_arg_names,
|
||||
validate_extra=validate_extra,
|
||||
)
|
||||
|
||||
if 'configuration' in action_options:
|
||||
category_parser.set_conf(tid, action_options['configuration'])
|
||||
|
@ -688,7 +729,9 @@ class ActionsMap(object):
|
|||
actions = subcategory_values.pop('actions')
|
||||
|
||||
# Get subcategory parser
|
||||
subcategory_parser = category_parser.add_subcategory_parser(subcategory_name, **subcategory_values)
|
||||
subcategory_parser = category_parser.add_subcategory_parser(
|
||||
subcategory_name, **subcategory_values
|
||||
)
|
||||
|
||||
# action_name is like "status" of "domain cert status"
|
||||
# action_options are the values
|
||||
|
@ -698,19 +741,25 @@ class ActionsMap(object):
|
|||
|
||||
try:
|
||||
# Get action parser
|
||||
action_parser = subcategory_parser.add_action_parser(action_name, tid, **action_options)
|
||||
action_parser = subcategory_parser.add_action_parser(
|
||||
action_name, tid, **action_options
|
||||
)
|
||||
except AttributeError:
|
||||
# No parser for the action
|
||||
continue
|
||||
|
||||
# Store action identifier and add arguments
|
||||
action_parser.set_defaults(_tid=tid)
|
||||
action_parser.add_arguments(arguments,
|
||||
extraparser=self.extraparser,
|
||||
format_arg_names=top_parser.format_arg_names,
|
||||
validate_extra=validate_extra)
|
||||
action_parser.add_arguments(
|
||||
arguments,
|
||||
extraparser=self.extraparser,
|
||||
format_arg_names=top_parser.format_arg_names,
|
||||
validate_extra=validate_extra,
|
||||
)
|
||||
|
||||
if 'configuration' in action_options:
|
||||
category_parser.set_conf(tid, action_options['configuration'])
|
||||
category_parser.set_conf(
|
||||
tid, action_options['configuration']
|
||||
)
|
||||
|
||||
return top_parser
|
||||
|
|
|
@ -11,6 +11,7 @@ logger = logging.getLogger('moulinette.authenticator')
|
|||
|
||||
# Base Class -----------------------------------------------------------
|
||||
|
||||
|
||||
class BaseAuthenticator(object):
|
||||
|
||||
"""Authenticator base representation
|
||||
|
@ -46,8 +47,9 @@ class BaseAuthenticator(object):
|
|||
@property
|
||||
def is_authenticated(self):
|
||||
"""Either the instance is authenticated or not"""
|
||||
raise NotImplementedError("derived class '%s' must override this property" %
|
||||
self.__class__.__name__)
|
||||
raise NotImplementedError(
|
||||
"derived class '%s' must override this property" % self.__class__.__name__
|
||||
)
|
||||
|
||||
# Virtual methods
|
||||
# Each authenticator classes must implement these methods.
|
||||
|
@ -62,8 +64,9 @@ class BaseAuthenticator(object):
|
|||
- password -- A clear text password
|
||||
|
||||
"""
|
||||
raise NotImplementedError("derived class '%s' must override this method" %
|
||||
self.__class__.__name__)
|
||||
raise NotImplementedError(
|
||||
"derived class '%s' must override this method" % self.__class__.__name__
|
||||
)
|
||||
|
||||
# Authentication methods
|
||||
|
||||
|
@ -94,7 +97,9 @@ class BaseAuthenticator(object):
|
|||
# Extract id and hash from token
|
||||
s_id, s_hash = token
|
||||
except TypeError as e:
|
||||
logger.error("unable to extract token parts from '%s' because '%s'", token, e)
|
||||
logger.error(
|
||||
"unable to extract token parts from '%s' because '%s'", token, e
|
||||
)
|
||||
if password is None:
|
||||
raise MoulinetteError('error_see_log')
|
||||
|
||||
|
@ -111,8 +116,12 @@ class BaseAuthenticator(object):
|
|||
except MoulinetteError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception("authentication (name: '%s', vendor: '%s') fails because '%s'",
|
||||
self.name, self.vendor, e)
|
||||
logger.exception(
|
||||
"authentication (name: '%s', vendor: '%s') fails because '%s'",
|
||||
self.name,
|
||||
self.vendor,
|
||||
e,
|
||||
)
|
||||
raise MoulinetteError('unable_authenticate')
|
||||
|
||||
# Store session
|
||||
|
@ -121,6 +130,7 @@ class BaseAuthenticator(object):
|
|||
self._store_session(s_id, s_hash, password)
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
logger.exception("unable to store session because %s", e)
|
||||
else:
|
||||
|
@ -132,8 +142,9 @@ class BaseAuthenticator(object):
|
|||
|
||||
def _open_sessionfile(self, session_id, mode='r'):
|
||||
"""Open a session file for this instance in given mode"""
|
||||
return open_cachefile('%s.asc' % session_id, mode,
|
||||
subdir='session/%s' % self.name)
|
||||
return open_cachefile(
|
||||
'%s.asc' % session_id, mode, subdir='session/%s' % self.name
|
||||
)
|
||||
|
||||
def _store_session(self, session_id, session_hash, password):
|
||||
"""Store a session and its associated password"""
|
||||
|
@ -142,7 +153,9 @@ class BaseAuthenticator(object):
|
|||
|
||||
# Encrypt the password using the session hash
|
||||
s = str(gpg.encrypt(password, None, symmetric=True, passphrase=session_hash))
|
||||
assert len(s), "For some reason GPG can't perform encryption, maybe check /root/.gnupg/gpg.conf or re-run with gpg = gnupg.GPG(verbose=True) ?"
|
||||
assert len(
|
||||
s
|
||||
), "For some reason GPG can't perform encryption, maybe check /root/.gnupg/gpg.conf or re-run with gpg = gnupg.GPG(verbose=True) ?"
|
||||
|
||||
with self._open_sessionfile(session_id, 'w') as f:
|
||||
f.write(s)
|
||||
|
@ -161,7 +174,11 @@ class BaseAuthenticator(object):
|
|||
|
||||
decrypted = gpg.decrypt(enc_pwd, passphrase=session_hash)
|
||||
if decrypted.ok is not True:
|
||||
error_message = "unable to decrypt password for the session: %s" % decrypted.status
|
||||
error_message = (
|
||||
"unable to decrypt password for the session: %s" % decrypted.status
|
||||
)
|
||||
logger.error(error_message)
|
||||
raise MoulinetteError('unable_retrieve_session', exception=error_message)
|
||||
raise MoulinetteError(
|
||||
'unable_retrieve_session', exception=error_message
|
||||
)
|
||||
return decrypted.data
|
||||
|
|
|
@ -18,6 +18,7 @@ logger = logging.getLogger('moulinette.authenticator.ldap')
|
|||
|
||||
# LDAP Class Implementation --------------------------------------------
|
||||
|
||||
|
||||
class Authenticator(BaseAuthenticator):
|
||||
|
||||
"""LDAP Authenticator
|
||||
|
@ -34,8 +35,14 @@ class Authenticator(BaseAuthenticator):
|
|||
"""
|
||||
|
||||
def __init__(self, name, uri, base_dn, user_rdn=None):
|
||||
logger.debug("initialize authenticator '%s' with: uri='%s', "
|
||||
"base_dn='%s', user_rdn='%s'", name, uri, base_dn, user_rdn)
|
||||
logger.debug(
|
||||
"initialize authenticator '%s' with: uri='%s', "
|
||||
"base_dn='%s', user_rdn='%s'",
|
||||
name,
|
||||
uri,
|
||||
base_dn,
|
||||
user_rdn,
|
||||
)
|
||||
super(Authenticator, self).__init__(name)
|
||||
|
||||
self.uri = uri
|
||||
|
@ -79,7 +86,9 @@ class Authenticator(BaseAuthenticator):
|
|||
|
||||
def authenticate(self, password):
|
||||
try:
|
||||
con = ldap.ldapobject.ReconnectLDAPObject(self.uri, retry_max=10, retry_delay=0.5)
|
||||
con = ldap.ldapobject.ReconnectLDAPObject(
|
||||
self.uri, retry_max=10, retry_delay=0.5
|
||||
)
|
||||
if self.userdn:
|
||||
if 'cn=external,cn=auth' in self.userdn:
|
||||
con.sasl_non_interactive_bind_s('EXTERNAL')
|
||||
|
@ -99,13 +108,16 @@ class Authenticator(BaseAuthenticator):
|
|||
def _ensure_password_uses_strong_hash(self, password):
|
||||
# XXX this has been copy pasted from YunoHost, should we put that into moulinette?
|
||||
def _hash_user_password(password):
|
||||
char_set = string.ascii_uppercase + string.ascii_lowercase + string.digits + "./"
|
||||
char_set = (
|
||||
string.ascii_uppercase + string.ascii_lowercase + string.digits + "./"
|
||||
)
|
||||
salt = ''.join([random.SystemRandom().choice(char_set) for x in range(16)])
|
||||
salt = '$6$' + salt + '$'
|
||||
return '{CRYPT}' + crypt.crypt(str(password), salt)
|
||||
|
||||
hashed_password = self.search("cn=admin,dc=yunohost,dc=org",
|
||||
attrs=["userPassword"])[0]
|
||||
hashed_password = self.search(
|
||||
"cn=admin,dc=yunohost,dc=org", attrs=["userPassword"]
|
||||
)[0]
|
||||
|
||||
# post-install situation, password is not already set
|
||||
if "userPassword" not in hashed_password or not hashed_password["userPassword"]:
|
||||
|
@ -113,9 +125,7 @@ class Authenticator(BaseAuthenticator):
|
|||
|
||||
# we aren't using sha-512 but something else that is weaker, proceed to upgrade
|
||||
if not hashed_password["userPassword"][0].startswith("{CRYPT}$6$"):
|
||||
self.update("cn=admin", {
|
||||
"userPassword": _hash_user_password(password),
|
||||
})
|
||||
self.update("cn=admin", {"userPassword": _hash_user_password(password)})
|
||||
|
||||
# Additional LDAP methods
|
||||
# TODO: Review these methods
|
||||
|
@ -141,8 +151,14 @@ class Authenticator(BaseAuthenticator):
|
|||
try:
|
||||
result = self.con.search_s(base, ldap.SCOPE_SUBTREE, filter, attrs)
|
||||
except Exception as e:
|
||||
logger.exception("error during LDAP search operation with: base='%s', "
|
||||
"filter='%s', attrs=%s and exception %s", base, filter, attrs, e)
|
||||
logger.exception(
|
||||
"error during LDAP search operation with: base='%s', "
|
||||
"filter='%s', attrs=%s and exception %s",
|
||||
base,
|
||||
filter,
|
||||
attrs,
|
||||
e,
|
||||
)
|
||||
raise MoulinetteError('ldap_operation_error')
|
||||
|
||||
result_list = []
|
||||
|
@ -172,8 +188,13 @@ class Authenticator(BaseAuthenticator):
|
|||
try:
|
||||
self.con.add_s(dn, ldif)
|
||||
except Exception as e:
|
||||
logger.exception("error during LDAP add operation with: rdn='%s', "
|
||||
"attr_dict=%s and exception %s", rdn, attr_dict, e)
|
||||
logger.exception(
|
||||
"error during LDAP add operation with: rdn='%s', "
|
||||
"attr_dict=%s and exception %s",
|
||||
rdn,
|
||||
attr_dict,
|
||||
e,
|
||||
)
|
||||
raise MoulinetteError('ldap_operation_error')
|
||||
else:
|
||||
return True
|
||||
|
@ -193,7 +214,11 @@ class Authenticator(BaseAuthenticator):
|
|||
try:
|
||||
self.con.delete_s(dn)
|
||||
except Exception as e:
|
||||
logger.exception("error during LDAP delete operation with: rdn='%s' and exception %s", rdn, e)
|
||||
logger.exception(
|
||||
"error during LDAP delete operation with: rdn='%s' and exception %s",
|
||||
rdn,
|
||||
e,
|
||||
)
|
||||
raise MoulinetteError('ldap_operation_error')
|
||||
else:
|
||||
return True
|
||||
|
@ -222,9 +247,14 @@ class Authenticator(BaseAuthenticator):
|
|||
|
||||
self.con.modify_ext_s(dn, ldif)
|
||||
except Exception as e:
|
||||
logger.exception("error during LDAP update operation with: rdn='%s', "
|
||||
"attr_dict=%s, new_rdn=%s and exception: %s", rdn, attr_dict,
|
||||
new_rdn, e)
|
||||
logger.exception(
|
||||
"error during LDAP update operation with: rdn='%s', "
|
||||
"attr_dict=%s, new_rdn=%s and exception: %s",
|
||||
rdn,
|
||||
attr_dict,
|
||||
new_rdn,
|
||||
e,
|
||||
)
|
||||
raise MoulinetteError('ldap_operation_error')
|
||||
else:
|
||||
return True
|
||||
|
@ -242,11 +272,16 @@ class Authenticator(BaseAuthenticator):
|
|||
"""
|
||||
attr_found = self.get_conflict(value_dict)
|
||||
if attr_found:
|
||||
logger.info("attribute '%s' with value '%s' is not unique",
|
||||
attr_found[0], attr_found[1])
|
||||
raise MoulinetteError('ldap_attribute_already_exists',
|
||||
attribute=attr_found[0],
|
||||
value=attr_found[1])
|
||||
logger.info(
|
||||
"attribute '%s' with value '%s' is not unique",
|
||||
attr_found[0],
|
||||
attr_found[1],
|
||||
)
|
||||
raise MoulinetteError(
|
||||
'ldap_attribute_already_exists',
|
||||
attribute=attr_found[0],
|
||||
value=attr_found[1],
|
||||
)
|
||||
return True
|
||||
|
||||
def get_conflict(self, value_dict, base_dn=None):
|
||||
|
|
|
@ -40,8 +40,7 @@ def open_cachefile(filename, mode='r', **kwargs):
|
|||
|
||||
"""
|
||||
# Set make_dir if not given
|
||||
kwargs['make_dir'] = kwargs.get('make_dir',
|
||||
True if mode[0] == 'w' else False)
|
||||
kwargs['make_dir'] = kwargs.get('make_dir', True if mode[0] == 'w' else False)
|
||||
cache_dir = get_cachedir(**kwargs)
|
||||
file_path = os.path.join(cache_dir, filename)
|
||||
return open(file_path, mode)
|
||||
|
|
|
@ -21,6 +21,7 @@ def during_unittests_run():
|
|||
|
||||
# Internationalization -------------------------------------------------
|
||||
|
||||
|
||||
class Translator(object):
|
||||
|
||||
"""Internationalization class
|
||||
|
@ -41,8 +42,9 @@ class Translator(object):
|
|||
|
||||
# Attempt to load default translations
|
||||
if not self._load_translations(default_locale):
|
||||
logger.error("unable to load locale '%s' from '%s'",
|
||||
default_locale, locale_dir)
|
||||
logger.error(
|
||||
"unable to load locale '%s' from '%s'", default_locale, locale_dir
|
||||
)
|
||||
self.default_locale = default_locale
|
||||
|
||||
def get_locales(self):
|
||||
|
@ -70,8 +72,11 @@ class Translator(object):
|
|||
"""
|
||||
if locale not in self._translations:
|
||||
if not self._load_translations(locale):
|
||||
logger.debug("unable to load locale '%s' from '%s'",
|
||||
self.default_locale, self.locale_dir)
|
||||
logger.debug(
|
||||
"unable to load locale '%s' from '%s'",
|
||||
self.default_locale,
|
||||
self.locale_dir,
|
||||
)
|
||||
|
||||
# Revert to default locale
|
||||
self.locale = self.default_locale
|
||||
|
@ -94,11 +99,18 @@ class Translator(object):
|
|||
failed_to_format = False
|
||||
if key in self._translations.get(self.locale, {}):
|
||||
try:
|
||||
return self._translations[self.locale][key].encode('utf-8').format(*args, **kwargs)
|
||||
return (
|
||||
self._translations[self.locale][key]
|
||||
.encode('utf-8')
|
||||
.format(*args, **kwargs)
|
||||
)
|
||||
except KeyError as e:
|
||||
unformatted_string = self._translations[self.locale][key].encode('utf-8')
|
||||
error_message = "Failed to format translated string '%s': '%s' with arguments '%s' and '%s, raising error: %s(%s) (don't panic this is just a warning)" % (
|
||||
key, unformatted_string, args, kwargs, e.__class__.__name__, e
|
||||
unformatted_string = self._translations[self.locale][key].encode(
|
||||
'utf-8'
|
||||
)
|
||||
error_message = (
|
||||
"Failed to format translated string '%s': '%s' with arguments '%s' and '%s, raising error: %s(%s) (don't panic this is just a warning)"
|
||||
% (key, unformatted_string, args, kwargs, e.__class__.__name__, e)
|
||||
)
|
||||
|
||||
if not during_unittests_run():
|
||||
|
@ -108,16 +120,25 @@ class Translator(object):
|
|||
|
||||
failed_to_format = True
|
||||
|
||||
if failed_to_format or (self.default_locale != self.locale and key in self._translations.get(self.default_locale, {})):
|
||||
logger.info("untranslated key '%s' for locale '%s'",
|
||||
key, self.locale)
|
||||
if failed_to_format or (
|
||||
self.default_locale != self.locale
|
||||
and key in self._translations.get(self.default_locale, {})
|
||||
):
|
||||
logger.info("untranslated key '%s' for locale '%s'", key, self.locale)
|
||||
|
||||
try:
|
||||
return self._translations[self.default_locale][key].encode('utf-8').format(*args, **kwargs)
|
||||
return (
|
||||
self._translations[self.default_locale][key]
|
||||
.encode('utf-8')
|
||||
.format(*args, **kwargs)
|
||||
)
|
||||
except KeyError as e:
|
||||
unformatted_string = self._translations[self.default_locale][key].encode('utf-8')
|
||||
error_message = "Failed to format translatable string '%s': '%s' with arguments '%s' and '%s', raising error: %s(%s) (don't panic this is just a warning)" % (
|
||||
key, unformatted_string, args, kwargs, e.__class__.__name__, e
|
||||
unformatted_string = self._translations[self.default_locale][
|
||||
key
|
||||
].encode('utf-8')
|
||||
error_message = (
|
||||
"Failed to format translatable string '%s': '%s' with arguments '%s' and '%s', raising error: %s(%s) (don't panic this is just a warning)"
|
||||
% (key, unformatted_string, args, kwargs, e.__class__.__name__, e)
|
||||
)
|
||||
if not during_unittests_run():
|
||||
logger.exception(error_message)
|
||||
|
@ -126,7 +147,10 @@ class Translator(object):
|
|||
|
||||
return self._translations[self.default_locale][key].encode('utf-8')
|
||||
|
||||
error_message = "unable to retrieve string to translate with key '%s' for default locale 'locales/%s.json' file (don't panic this is just a warning)" % (key, self.default_locale)
|
||||
error_message = (
|
||||
"unable to retrieve string to translate with key '%s' for default locale 'locales/%s.json' file (don't panic this is just a warning)"
|
||||
% (key, self.default_locale)
|
||||
)
|
||||
|
||||
if not during_unittests_run():
|
||||
logger.exception(error_message)
|
||||
|
@ -202,8 +226,9 @@ class Moulinette18n(object):
|
|||
"""
|
||||
if namespace not in self._namespaces:
|
||||
# Create new Translator object
|
||||
translator = Translator('%s/%s/locales' % (self.lib_dir, namespace),
|
||||
self.default_locale)
|
||||
translator = Translator(
|
||||
'%s/%s/locales' % (self.lib_dir, namespace), self.default_locale
|
||||
)
|
||||
translator.set_locale(self.locale)
|
||||
self._namespaces[namespace] = translator
|
||||
|
||||
|
@ -354,6 +379,7 @@ class MoulinetteSignals(object):
|
|||
|
||||
# Interfaces & Authenticators management -------------------------------
|
||||
|
||||
|
||||
def init_interface(name, kwargs={}, actionsmap={}):
|
||||
"""Return a new interface instance
|
||||
|
||||
|
@ -444,6 +470,7 @@ def clean_session(session_id, profiles=[]):
|
|||
|
||||
# Moulinette core classes ----------------------------------------------
|
||||
|
||||
|
||||
class MoulinetteError(Exception):
|
||||
|
||||
"""Moulinette base exception"""
|
||||
|
@ -473,7 +500,7 @@ class MoulinetteLock(object):
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self, namespace, timeout=None, interval=.5):
|
||||
def __init__(self, namespace, timeout=None, interval=0.5):
|
||||
self.namespace = namespace
|
||||
self.timeout = timeout
|
||||
self.interval = interval
|
||||
|
@ -527,9 +554,13 @@ class MoulinetteLock(object):
|
|||
# warn the user if it's been too much time since they are waiting
|
||||
if (time.time() - start_time) > warning_treshold:
|
||||
if warning_treshold == 15:
|
||||
logger.warning(moulinette.m18n.g('warn_the_user_about_waiting_lock'))
|
||||
logger.warning(
|
||||
moulinette.m18n.g('warn_the_user_about_waiting_lock')
|
||||
)
|
||||
else:
|
||||
logger.warning(moulinette.m18n.g('warn_the_user_about_waiting_lock_again'))
|
||||
logger.warning(
|
||||
moulinette.m18n.g('warn_the_user_about_waiting_lock_again')
|
||||
)
|
||||
warning_treshold *= 4
|
||||
|
||||
# Wait before checking again
|
||||
|
@ -552,7 +583,9 @@ class MoulinetteLock(object):
|
|||
if os.path.exists(self._lockfile):
|
||||
os.unlink(self._lockfile)
|
||||
else:
|
||||
logger.warning("Uhoh, somehow the lock %s did not exist ..." % self._lockfile)
|
||||
logger.warning(
|
||||
"Uhoh, somehow the lock %s did not exist ..." % self._lockfile
|
||||
)
|
||||
logger.debug('lock has been released')
|
||||
self._locked = False
|
||||
|
||||
|
|
|
@ -7,6 +7,8 @@ def init_moulinette_env():
|
|||
return {
|
||||
'DATA_DIR': environ.get('MOULINETTE_DATA_DIR', '/usr/share/moulinette'),
|
||||
'LIB_DIR': environ.get('MOULINETTE_LIB_DIR', '/usr/lib/moulinette'),
|
||||
'LOCALES_DIR': environ.get('MOULINETTE_LOCALES_DIR', '/usr/share/moulinette/locale'),
|
||||
'LOCALES_DIR': environ.get(
|
||||
'MOULINETTE_LOCALES_DIR', '/usr/share/moulinette/locale'
|
||||
),
|
||||
'CACHE_DIR': environ.get('MOULINETTE_CACHE_DIR', '/var/cache/moulinette'),
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ import copy
|
|||
from collections import deque, OrderedDict
|
||||
|
||||
from moulinette import msignals, msettings, m18n
|
||||
from moulinette.core import (init_authenticator, MoulinetteError)
|
||||
from moulinette.core import init_authenticator, MoulinetteError
|
||||
|
||||
logger = logging.getLogger('moulinette.interface')
|
||||
|
||||
|
@ -18,6 +18,7 @@ CALLBACKS_PROP = '_callbacks'
|
|||
|
||||
# Base Class -----------------------------------------------------------
|
||||
|
||||
|
||||
class BaseActionsMapParser(object):
|
||||
|
||||
"""Actions map's base Parser
|
||||
|
@ -37,8 +38,7 @@ class BaseActionsMapParser(object):
|
|||
if parent:
|
||||
self._o = parent
|
||||
else:
|
||||
logger.debug('initializing base actions map parser for %s',
|
||||
self.interface)
|
||||
logger.debug('initializing base actions map parser for %s', self.interface)
|
||||
msettings['interface'] = self.interface
|
||||
|
||||
self._o = self
|
||||
|
@ -70,8 +70,9 @@ class BaseActionsMapParser(object):
|
|||
A list of option strings
|
||||
|
||||
"""
|
||||
raise NotImplementedError("derived class '%s' must override this method" %
|
||||
self.__class__.__name__)
|
||||
raise NotImplementedError(
|
||||
"derived class '%s' must override this method" % self.__class__.__name__
|
||||
)
|
||||
|
||||
def has_global_parser(self):
|
||||
return False
|
||||
|
@ -85,8 +86,9 @@ class BaseActionsMapParser(object):
|
|||
An ArgumentParser based object
|
||||
|
||||
"""
|
||||
raise NotImplementedError("derived class '%s' must override this method" %
|
||||
self.__class__.__name__)
|
||||
raise NotImplementedError(
|
||||
"derived class '%s' must override this method" % self.__class__.__name__
|
||||
)
|
||||
|
||||
def add_category_parser(self, name, **kwargs):
|
||||
"""Add a parser for a category
|
||||
|
@ -100,8 +102,9 @@ class BaseActionsMapParser(object):
|
|||
A BaseParser based object
|
||||
|
||||
"""
|
||||
raise NotImplementedError("derived class '%s' must override this method" %
|
||||
self.__class__.__name__)
|
||||
raise NotImplementedError(
|
||||
"derived class '%s' must override this method" % self.__class__.__name__
|
||||
)
|
||||
|
||||
def add_action_parser(self, name, tid, **kwargs):
|
||||
"""Add a parser for an action
|
||||
|
@ -116,8 +119,9 @@ class BaseActionsMapParser(object):
|
|||
An ArgumentParser based object
|
||||
|
||||
"""
|
||||
raise NotImplementedError("derived class '%s' must override this method" %
|
||||
self.__class__.__name__)
|
||||
raise NotImplementedError(
|
||||
"derived class '%s' must override this method" % self.__class__.__name__
|
||||
)
|
||||
|
||||
def parse_args(self, args, **kwargs):
|
||||
"""Parse arguments
|
||||
|
@ -132,16 +136,18 @@ class BaseActionsMapParser(object):
|
|||
The populated namespace
|
||||
|
||||
"""
|
||||
raise NotImplementedError("derived class '%s' must override this method" %
|
||||
self.__class__.__name__)
|
||||
raise NotImplementedError(
|
||||
"derived class '%s' must override this method" % self.__class__.__name__
|
||||
)
|
||||
|
||||
# Arguments helpers
|
||||
|
||||
def prepare_action_namespace(self, tid, namespace=None):
|
||||
"""Prepare the namespace for a given action"""
|
||||
# Validate tid and namespace
|
||||
if not isinstance(tid, tuple) and \
|
||||
(namespace is None or not hasattr(namespace, TO_RETURN_PROP)):
|
||||
if not isinstance(tid, tuple) and (
|
||||
namespace is None or not hasattr(namespace, TO_RETURN_PROP)
|
||||
):
|
||||
raise MoulinetteError('invalid_usage')
|
||||
elif not tid:
|
||||
tid = GLOBAL_SECTION
|
||||
|
@ -159,8 +165,10 @@ class BaseActionsMapParser(object):
|
|||
auth = msignals.authenticate(cls(), **auth_conf)
|
||||
if not auth.is_authenticated:
|
||||
raise MoulinetteError('authentication_required_long')
|
||||
if self.get_conf(tid, 'argument_auth') and \
|
||||
self.get_conf(tid, 'authenticate') == 'all':
|
||||
if (
|
||||
self.get_conf(tid, 'argument_auth')
|
||||
and self.get_conf(tid, 'authenticate') == 'all'
|
||||
):
|
||||
namespace.auth = auth
|
||||
|
||||
return namespace
|
||||
|
@ -260,8 +268,11 @@ class BaseActionsMapParser(object):
|
|||
# Store only if authentication is needed
|
||||
conf['authenticate'] = True if self.interface in ifaces else False
|
||||
else:
|
||||
logger.error("expecting 'all', 'False' or a list for "
|
||||
"configuration 'authenticate', got %r", ifaces)
|
||||
logger.error(
|
||||
"expecting 'all', 'False' or a list for "
|
||||
"configuration 'authenticate', got %r",
|
||||
ifaces,
|
||||
)
|
||||
raise MoulinetteError('error_see_log')
|
||||
|
||||
# -- 'authenticator'
|
||||
|
@ -275,13 +286,18 @@ class BaseActionsMapParser(object):
|
|||
# Store needed authenticator profile
|
||||
conf['authenticator'] = self.global_conf['authenticator'][auth]
|
||||
except KeyError:
|
||||
logger.error("requesting profile '%s' which is undefined in "
|
||||
"global configuration of 'authenticator'", auth)
|
||||
logger.error(
|
||||
"requesting profile '%s' which is undefined in "
|
||||
"global configuration of 'authenticator'",
|
||||
auth,
|
||||
)
|
||||
raise MoulinetteError('error_see_log')
|
||||
elif is_global and isinstance(auth, dict):
|
||||
if len(auth) == 0:
|
||||
logger.warning('no profile defined in global configuration '
|
||||
"for 'authenticator'")
|
||||
logger.warning(
|
||||
'no profile defined in global configuration '
|
||||
"for 'authenticator'"
|
||||
)
|
||||
else:
|
||||
auths = {}
|
||||
for auth_name, auth_conf in auth.items():
|
||||
|
@ -293,13 +309,18 @@ class BaseActionsMapParser(object):
|
|||
# configuration (i.e. 'help')
|
||||
# - parameters: a dict of arguments for the
|
||||
# authenticator profile
|
||||
auths[auth_name] = ((auth_conf.get('vendor'), auth_name),
|
||||
{'help': auth_conf.get('help', None)},
|
||||
auth_conf.get('parameters', {}))
|
||||
auths[auth_name] = (
|
||||
(auth_conf.get('vendor'), auth_name),
|
||||
{'help': auth_conf.get('help', None)},
|
||||
auth_conf.get('parameters', {}),
|
||||
)
|
||||
conf['authenticator'] = auths
|
||||
else:
|
||||
logger.error("expecting a dict of profile(s) or a profile name "
|
||||
"for configuration 'authenticator', got %r", auth)
|
||||
logger.error(
|
||||
"expecting a dict of profile(s) or a profile name "
|
||||
"for configuration 'authenticator', got %r",
|
||||
auth,
|
||||
)
|
||||
raise MoulinetteError('error_see_log')
|
||||
|
||||
# -- 'argument_auth'
|
||||
|
@ -311,8 +332,10 @@ class BaseActionsMapParser(object):
|
|||
if isinstance(arg_auth, bool):
|
||||
conf['argument_auth'] = arg_auth
|
||||
else:
|
||||
logger.error("expecting a boolean for configuration "
|
||||
"'argument_auth', got %r", arg_auth)
|
||||
logger.error(
|
||||
"expecting a boolean for configuration " "'argument_auth', got %r",
|
||||
arg_auth,
|
||||
)
|
||||
raise MoulinetteError('error_see_log')
|
||||
|
||||
# -- 'lock'
|
||||
|
@ -324,8 +347,9 @@ class BaseActionsMapParser(object):
|
|||
if isinstance(lock, bool):
|
||||
conf['lock'] = lock
|
||||
else:
|
||||
logger.error("expecting a boolean for configuration 'lock', "
|
||||
"got %r", lock)
|
||||
logger.error(
|
||||
"expecting a boolean for configuration 'lock', " "got %r", lock
|
||||
)
|
||||
raise MoulinetteError('error_see_log')
|
||||
|
||||
return conf
|
||||
|
@ -346,8 +370,7 @@ class BaseActionsMapParser(object):
|
|||
|
||||
# Return global configuration and an authenticator
|
||||
# instanciator as a 2-tuple
|
||||
return (configuration,
|
||||
lambda: init_authenticator(identifier, parameters))
|
||||
return (configuration, lambda: init_authenticator(identifier, parameters))
|
||||
|
||||
return value
|
||||
|
||||
|
@ -364,38 +387,45 @@ class BaseInterface(object):
|
|||
- actionsmap -- The ActionsMap instance to connect to
|
||||
|
||||
"""
|
||||
|
||||
# TODO: Add common interface methods and try to standardize default ones
|
||||
|
||||
def __init__(self, actionsmap):
|
||||
raise NotImplementedError("derived class '%s' must override this method" %
|
||||
self.__class__.__name__)
|
||||
raise NotImplementedError(
|
||||
"derived class '%s' must override this method" % self.__class__.__name__
|
||||
)
|
||||
|
||||
|
||||
# Argument parser ------------------------------------------------------
|
||||
|
||||
class _CallbackAction(argparse.Action):
|
||||
|
||||
def __init__(self,
|
||||
option_strings,
|
||||
dest,
|
||||
nargs=0,
|
||||
callback={},
|
||||
default=argparse.SUPPRESS,
|
||||
help=None):
|
||||
class _CallbackAction(argparse.Action):
|
||||
def __init__(
|
||||
self,
|
||||
option_strings,
|
||||
dest,
|
||||
nargs=0,
|
||||
callback={},
|
||||
default=argparse.SUPPRESS,
|
||||
help=None,
|
||||
):
|
||||
if not callback or 'method' not in callback:
|
||||
raise ValueError('callback must be provided with at least '
|
||||
'a method key')
|
||||
raise ValueError('callback must be provided with at least ' 'a method key')
|
||||
super(_CallbackAction, self).__init__(
|
||||
option_strings=option_strings,
|
||||
dest=dest,
|
||||
nargs=nargs,
|
||||
default=default,
|
||||
help=help)
|
||||
help=help,
|
||||
)
|
||||
self.callback_method = callback.get('method')
|
||||
self.callback_kwargs = callback.get('kwargs', {})
|
||||
self.callback_return = callback.get('return', False)
|
||||
logger.debug("registering new callback action '{0}' to {1}".format(
|
||||
self.callback_method, option_strings))
|
||||
logger.debug(
|
||||
"registering new callback action '{0}' to {1}".format(
|
||||
self.callback_method, option_strings
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def callback(self):
|
||||
|
@ -407,12 +437,10 @@ class _CallbackAction(argparse.Action):
|
|||
# Attempt to retrieve callback method
|
||||
mod_name, func_name = (self.callback_method).rsplit('.', 1)
|
||||
try:
|
||||
mod = __import__(mod_name, globals=globals(), level=0,
|
||||
fromlist=[func_name])
|
||||
mod = __import__(mod_name, globals=globals(), level=0, fromlist=[func_name])
|
||||
func = getattr(mod, func_name)
|
||||
except (AttributeError, ImportError):
|
||||
raise ValueError('unable to import method {0}'.format(
|
||||
self.callback_method))
|
||||
raise ValueError('unable to import method {0}'.format(self.callback_method))
|
||||
self._callback = func
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
|
@ -425,8 +453,10 @@ class _CallbackAction(argparse.Action):
|
|||
# Execute callback and get returned value
|
||||
value = self.callback(namespace, values, **self.callback_kwargs)
|
||||
except:
|
||||
logger.exception("cannot get value from callback method "
|
||||
"'{0}'".format(self.callback_method))
|
||||
logger.exception(
|
||||
"cannot get value from callback method "
|
||||
"'{0}'".format(self.callback_method)
|
||||
)
|
||||
raise MoulinetteError('error_see_log')
|
||||
else:
|
||||
if value:
|
||||
|
@ -467,8 +497,7 @@ class _ExtendedSubParsersAction(argparse._SubParsersAction):
|
|||
if 'help' in kwargs:
|
||||
del kwargs['help']
|
||||
|
||||
parser = super(_ExtendedSubParsersAction, self).add_parser(
|
||||
name, **kwargs)
|
||||
parser = super(_ExtendedSubParsersAction, self).add_parser(name, **kwargs)
|
||||
|
||||
# Append each deprecated command alias name
|
||||
for command in deprecated_alias:
|
||||
|
@ -490,23 +519,30 @@ class _ExtendedSubParsersAction(argparse._SubParsersAction):
|
|||
else:
|
||||
# Warn the user about deprecated command
|
||||
if correct_name is None:
|
||||
logger.warning(m18n.g('deprecated_command', prog=parser.prog,
|
||||
command=parser_name))
|
||||
logger.warning(
|
||||
m18n.g('deprecated_command', prog=parser.prog, command=parser_name)
|
||||
)
|
||||
else:
|
||||
logger.warning(m18n.g('deprecated_command_alias',
|
||||
old=parser_name, new=correct_name,
|
||||
prog=parser.prog))
|
||||
logger.warning(
|
||||
m18n.g(
|
||||
'deprecated_command_alias',
|
||||
old=parser_name,
|
||||
new=correct_name,
|
||||
prog=parser.prog,
|
||||
)
|
||||
)
|
||||
values[0] = correct_name
|
||||
|
||||
return super(_ExtendedSubParsersAction, self).__call__(
|
||||
parser, namespace, values, option_string)
|
||||
parser, namespace, values, option_string
|
||||
)
|
||||
|
||||
|
||||
class ExtendedArgumentParser(argparse.ArgumentParser):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ExtendedArgumentParser, self).__init__(formatter_class=PositionalsFirstHelpFormatter,
|
||||
*args, **kwargs)
|
||||
super(ExtendedArgumentParser, self).__init__(
|
||||
formatter_class=PositionalsFirstHelpFormatter, *args, **kwargs
|
||||
)
|
||||
|
||||
# Register additional actions
|
||||
self.register('action', 'callback', _CallbackAction)
|
||||
|
@ -538,11 +574,14 @@ class ExtendedArgumentParser(argparse.ArgumentParser):
|
|||
queue = list()
|
||||
return queue
|
||||
|
||||
def add_arguments(self, arguments, extraparser, format_arg_names=None, validate_extra=True):
|
||||
def add_arguments(
|
||||
self, arguments, extraparser, format_arg_names=None, validate_extra=True
|
||||
):
|
||||
for argument_name, argument_options in arguments.items():
|
||||
# will adapt arguments name for cli or api context
|
||||
names = format_arg_names(str(argument_name),
|
||||
argument_options.pop('full', None))
|
||||
names = format_arg_names(
|
||||
str(argument_name), argument_options.pop('full', None)
|
||||
)
|
||||
|
||||
if "type" in argument_options:
|
||||
argument_options['type'] = eval(argument_options['type'])
|
||||
|
@ -550,8 +589,9 @@ class ExtendedArgumentParser(argparse.ArgumentParser):
|
|||
if "extra" in argument_options:
|
||||
extra = argument_options.pop('extra')
|
||||
argument_dest = self.add_argument(*names, **argument_options).dest
|
||||
extraparser.add_argument(self.get_default("_tid"),
|
||||
argument_dest, extra, validate_extra)
|
||||
extraparser.add_argument(
|
||||
self.get_default("_tid"), argument_dest, extra, validate_extra
|
||||
)
|
||||
continue
|
||||
|
||||
self.add_argument(*names, **argument_options)
|
||||
|
@ -560,8 +600,7 @@ class ExtendedArgumentParser(argparse.ArgumentParser):
|
|||
if action.nargs == argparse.PARSER and not action.required:
|
||||
return '([-AO]*)'
|
||||
else:
|
||||
return super(ExtendedArgumentParser, self)._get_nargs_pattern(
|
||||
action)
|
||||
return super(ExtendedArgumentParser, self)._get_nargs_pattern(action)
|
||||
|
||||
def _get_values(self, action, arg_strings):
|
||||
if action.nargs == argparse.PARSER and not action.required:
|
||||
|
@ -571,8 +610,7 @@ class ExtendedArgumentParser(argparse.ArgumentParser):
|
|||
else:
|
||||
value = argparse.SUPPRESS
|
||||
else:
|
||||
value = super(ExtendedArgumentParser, self)._get_values(
|
||||
action, arg_strings)
|
||||
value = super(ExtendedArgumentParser, self)._get_values(action, arg_strings)
|
||||
return value
|
||||
|
||||
# Adapted from :
|
||||
|
@ -581,8 +619,7 @@ class ExtendedArgumentParser(argparse.ArgumentParser):
|
|||
formatter = self._get_formatter()
|
||||
|
||||
# usage
|
||||
formatter.add_usage(self.usage, self._actions,
|
||||
self._mutually_exclusive_groups)
|
||||
formatter.add_usage(self.usage, self._actions, self._mutually_exclusive_groups)
|
||||
|
||||
# description
|
||||
formatter.add_text(self.description)
|
||||
|
@ -600,14 +637,30 @@ class ExtendedArgumentParser(argparse.ArgumentParser):
|
|||
subcategories_subparser = copy.copy(action_group._group_actions[0])
|
||||
|
||||
# Filter "action"-type and "subcategory"-type commands
|
||||
actions_subparser.choices = OrderedDict([(k, v) for k, v in actions_subparser.choices.items() if v.type == "action"])
|
||||
subcategories_subparser.choices = OrderedDict([(k, v) for k, v in subcategories_subparser.choices.items() if v.type == "subcategory"])
|
||||
actions_subparser.choices = OrderedDict(
|
||||
[
|
||||
(k, v)
|
||||
for k, v in actions_subparser.choices.items()
|
||||
if v.type == "action"
|
||||
]
|
||||
)
|
||||
subcategories_subparser.choices = OrderedDict(
|
||||
[
|
||||
(k, v)
|
||||
for k, v in subcategories_subparser.choices.items()
|
||||
if v.type == "subcategory"
|
||||
]
|
||||
)
|
||||
|
||||
actions_choices = actions_subparser.choices.keys()
|
||||
subcategories_choices = subcategories_subparser.choices.keys()
|
||||
|
||||
actions_subparser._choices_actions = [c for c in choice_actions if c.dest in actions_choices]
|
||||
subcategories_subparser._choices_actions = [c for c in choice_actions if c.dest in subcategories_choices]
|
||||
actions_subparser._choices_actions = [
|
||||
c for c in choice_actions if c.dest in actions_choices
|
||||
]
|
||||
subcategories_subparser._choices_actions = [
|
||||
c for c in choice_actions if c.dest in subcategories_choices
|
||||
]
|
||||
|
||||
# Display each section (actions and subcategories)
|
||||
if actions_choices != []:
|
||||
|
@ -642,7 +695,6 @@ class ExtendedArgumentParser(argparse.ArgumentParser):
|
|||
# and fix is inspired from here :
|
||||
# https://stackoverflow.com/questions/26985650/argparse-do-not-catch-positional-arguments-with-nargs/26986546#26986546
|
||||
class PositionalsFirstHelpFormatter(argparse.HelpFormatter):
|
||||
|
||||
def _format_usage(self, usage, actions, groups, prefix):
|
||||
if prefix is None:
|
||||
# TWEAK : not using gettext here...
|
||||
|
@ -706,7 +758,7 @@ class PositionalsFirstHelpFormatter(argparse.HelpFormatter):
|
|||
if line:
|
||||
lines.append(indent + ' '.join(line))
|
||||
if prefix is not None:
|
||||
lines[0] = lines[0][len(indent):]
|
||||
lines[0] = lines[0][len(indent) :]
|
||||
return lines
|
||||
|
||||
# if prog is short, follow it with optionals or positionals
|
||||
|
|
|
@ -16,7 +16,9 @@ from bottle import abort
|
|||
from moulinette import msignals, m18n, env
|
||||
from moulinette.core import MoulinetteError, clean_session
|
||||
from moulinette.interfaces import (
|
||||
BaseActionsMapParser, BaseInterface, ExtendedArgumentParser,
|
||||
BaseActionsMapParser,
|
||||
BaseInterface,
|
||||
ExtendedArgumentParser,
|
||||
)
|
||||
from moulinette.utils import log
|
||||
from moulinette.utils.serialize import JSONExtendedEncoder
|
||||
|
@ -27,9 +29,9 @@ logger = log.getLogger('moulinette.interface.api')
|
|||
|
||||
# API helpers ----------------------------------------------------------
|
||||
|
||||
CSRF_TYPES = set(["text/plain",
|
||||
"application/x-www-form-urlencoded",
|
||||
"multipart/form-data"])
|
||||
CSRF_TYPES = set(
|
||||
["text/plain", "application/x-www-form-urlencoded", "multipart/form-data"]
|
||||
)
|
||||
|
||||
|
||||
def is_csrf():
|
||||
|
@ -53,12 +55,14 @@ def filter_csrf(callback):
|
|||
abort(403, "CSRF protection")
|
||||
else:
|
||||
return callback(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class LogQueues(dict):
|
||||
|
||||
"""Map of session id to queue."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
@ -99,13 +103,13 @@ class _HTTPArgumentParser(object):
|
|||
|
||||
def __init__(self):
|
||||
# Initialize the ArgumentParser object
|
||||
self._parser = ExtendedArgumentParser(usage='',
|
||||
prefix_chars='@',
|
||||
add_help=False)
|
||||
self._parser = ExtendedArgumentParser(
|
||||
usage='', prefix_chars='@', add_help=False
|
||||
)
|
||||
self._parser.error = self._error
|
||||
|
||||
self._positional = [] # list(arg_name)
|
||||
self._optional = {} # dict({arg_name: option_strings})
|
||||
self._positional = [] # list(arg_name)
|
||||
self._optional = {} # dict({arg_name: option_strings})
|
||||
|
||||
def set_defaults(self, **kwargs):
|
||||
return self._parser.set_defaults(**kwargs)
|
||||
|
@ -113,11 +117,14 @@ class _HTTPArgumentParser(object):
|
|||
def get_default(self, dest):
|
||||
return self._parser.get_default(dest)
|
||||
|
||||
def add_arguments(self, arguments, extraparser, format_arg_names=None, validate_extra=True):
|
||||
def add_arguments(
|
||||
self, arguments, extraparser, format_arg_names=None, validate_extra=True
|
||||
):
|
||||
for argument_name, argument_options in arguments.items():
|
||||
# will adapt arguments name for cli or api context
|
||||
names = format_arg_names(str(argument_name),
|
||||
argument_options.pop('full', None))
|
||||
names = format_arg_names(
|
||||
str(argument_name), argument_options.pop('full', None)
|
||||
)
|
||||
|
||||
if "type" in argument_options:
|
||||
argument_options['type'] = eval(argument_options['type'])
|
||||
|
@ -125,8 +132,9 @@ class _HTTPArgumentParser(object):
|
|||
if "extra" in argument_options:
|
||||
extra = argument_options.pop('extra')
|
||||
argument_dest = self.add_argument(*names, **argument_options).dest
|
||||
extraparser.add_argument(self.get_default("_tid"),
|
||||
argument_dest, extra, validate_extra)
|
||||
extraparser.add_argument(
|
||||
self.get_default("_tid"), argument_dest, extra, validate_extra
|
||||
)
|
||||
continue
|
||||
|
||||
self.add_argument(*names, **argument_options)
|
||||
|
@ -166,12 +174,19 @@ class _HTTPArgumentParser(object):
|
|||
if isinstance(v, str):
|
||||
arg_strings.append(v)
|
||||
else:
|
||||
logger.warning("unsupported argument value type %r "
|
||||
"in %s for option string %s", v, value,
|
||||
option_string)
|
||||
logger.warning(
|
||||
"unsupported argument value type %r "
|
||||
"in %s for option string %s",
|
||||
v,
|
||||
value,
|
||||
option_string,
|
||||
)
|
||||
else:
|
||||
logger.warning("unsupported argument type %r for option "
|
||||
"string %s", value, option_string)
|
||||
logger.warning(
|
||||
"unsupported argument type %r for option " "string %s",
|
||||
value,
|
||||
option_string,
|
||||
)
|
||||
|
||||
return arg_strings
|
||||
|
||||
|
@ -208,6 +223,7 @@ class _ActionsMapPlugin(object):
|
|||
to serve messages coming from the 'display' signal
|
||||
|
||||
"""
|
||||
|
||||
name = 'actionsmap'
|
||||
api = 2
|
||||
|
||||
|
@ -245,6 +261,7 @@ class _ActionsMapPlugin(object):
|
|||
except KeyError:
|
||||
pass
|
||||
return callback(**kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
# Logout wrapper
|
||||
|
@ -256,18 +273,35 @@ class _ActionsMapPlugin(object):
|
|||
except KeyError:
|
||||
pass
|
||||
return callback(**kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
# Append authentication routes
|
||||
app.route('/login', name='login', method='POST',
|
||||
callback=self.login, skip=['actionsmap'], apply=_login)
|
||||
app.route('/logout', name='logout', method='GET',
|
||||
callback=self.logout, skip=['actionsmap'], apply=_logout)
|
||||
app.route(
|
||||
'/login',
|
||||
name='login',
|
||||
method='POST',
|
||||
callback=self.login,
|
||||
skip=['actionsmap'],
|
||||
apply=_login,
|
||||
)
|
||||
app.route(
|
||||
'/logout',
|
||||
name='logout',
|
||||
method='GET',
|
||||
callback=self.logout,
|
||||
skip=['actionsmap'],
|
||||
apply=_logout,
|
||||
)
|
||||
|
||||
# Append messages route
|
||||
if self.use_websocket:
|
||||
app.route('/messages', name='messages',
|
||||
callback=self.messages, skip=['actionsmap'])
|
||||
app.route(
|
||||
'/messages',
|
||||
name='messages',
|
||||
callback=self.messages,
|
||||
skip=['actionsmap'],
|
||||
)
|
||||
|
||||
# Append routes from the actions map
|
||||
for (m, p) in self.actionsmap.parser.routes:
|
||||
|
@ -284,6 +318,7 @@ class _ActionsMapPlugin(object):
|
|||
context -- An instance of Route
|
||||
|
||||
"""
|
||||
|
||||
def _format(value):
|
||||
if isinstance(value, list) and len(value) == 1:
|
||||
return value[0]
|
||||
|
@ -314,6 +349,7 @@ class _ActionsMapPlugin(object):
|
|||
|
||||
# Process the action
|
||||
return callback((request.method, context.rule), params)
|
||||
|
||||
return wrapper
|
||||
|
||||
# Routes callbacks
|
||||
|
@ -337,8 +373,7 @@ class _ActionsMapPlugin(object):
|
|||
except KeyError:
|
||||
s_hashes = {}
|
||||
else:
|
||||
s_hashes = request.get_cookie('session.hashes',
|
||||
secret=s_secret) or {}
|
||||
s_hashes = request.get_cookie('session.hashes', secret=s_secret) or {}
|
||||
s_hash = random_ascii()
|
||||
|
||||
try:
|
||||
|
@ -358,8 +393,9 @@ class _ActionsMapPlugin(object):
|
|||
self.secrets[s_id] = s_secret = random_ascii()
|
||||
|
||||
response.set_cookie('session.id', s_id, secure=True)
|
||||
response.set_cookie('session.hashes', s_hashes, secure=True,
|
||||
secret=s_secret)
|
||||
response.set_cookie(
|
||||
'session.hashes', s_hashes, secure=True, secret=s_secret
|
||||
)
|
||||
return m18n.g('logged_in')
|
||||
|
||||
def logout(self, profile=None):
|
||||
|
@ -443,10 +479,9 @@ class _ActionsMapPlugin(object):
|
|||
if isinstance(e, HTTPResponse):
|
||||
raise e
|
||||
import traceback
|
||||
|
||||
tb = traceback.format_exc()
|
||||
logs = {"route": _route,
|
||||
"arguments": arguments,
|
||||
"traceback": tb}
|
||||
logs = {"route": _route, "arguments": arguments, "traceback": tb}
|
||||
return HTTPErrorResponse(json_encode(logs))
|
||||
else:
|
||||
return format_for_response(ret)
|
||||
|
@ -470,14 +505,16 @@ class _ActionsMapPlugin(object):
|
|||
s_id = request.get_cookie('session.id')
|
||||
try:
|
||||
s_secret = self.secrets[s_id]
|
||||
s_hash = request.get_cookie('session.hashes',
|
||||
secret=s_secret, default={})[authenticator.name]
|
||||
s_hash = request.get_cookie('session.hashes', secret=s_secret, default={})[
|
||||
authenticator.name
|
||||
]
|
||||
except KeyError:
|
||||
if authenticator.name == 'default':
|
||||
msg = m18n.g('authentication_required')
|
||||
else:
|
||||
msg = m18n.g('authentication_profile_required',
|
||||
profile=authenticator.name)
|
||||
msg = m18n.g(
|
||||
'authentication_profile_required', profile=authenticator.name
|
||||
)
|
||||
raise HTTPUnauthorizedResponse(msg)
|
||||
else:
|
||||
return authenticator(token=(s_id, s_hash))
|
||||
|
@ -504,26 +541,23 @@ class _ActionsMapPlugin(object):
|
|||
|
||||
# HTTP Responses -------------------------------------------------------
|
||||
|
||||
class HTTPOKResponse(HTTPResponse):
|
||||
|
||||
class HTTPOKResponse(HTTPResponse):
|
||||
def __init__(self, output=''):
|
||||
super(HTTPOKResponse, self).__init__(output, 200)
|
||||
|
||||
|
||||
class HTTPBadRequestResponse(HTTPResponse):
|
||||
|
||||
def __init__(self, output=''):
|
||||
super(HTTPBadRequestResponse, self).__init__(output, 400)
|
||||
|
||||
|
||||
class HTTPUnauthorizedResponse(HTTPResponse):
|
||||
|
||||
def __init__(self, output=''):
|
||||
super(HTTPUnauthorizedResponse, self).__init__(output, 401)
|
||||
|
||||
|
||||
class HTTPErrorResponse(HTTPResponse):
|
||||
|
||||
def __init__(self, output=''):
|
||||
super(HTTPErrorResponse, self).__init__(output, 500)
|
||||
|
||||
|
@ -548,6 +582,7 @@ def format_for_response(content):
|
|||
|
||||
# API Classes Implementation -------------------------------------------
|
||||
|
||||
|
||||
class ActionsMapParser(BaseActionsMapParser):
|
||||
|
||||
"""Actions map's Parser for the API
|
||||
|
@ -611,8 +646,9 @@ class ActionsMapParser(BaseActionsMapParser):
|
|||
try:
|
||||
keys.append(self._extract_route(r))
|
||||
except ValueError as e:
|
||||
logger.warning("cannot add api route '%s' for "
|
||||
"action %s: %s", r, tid, e)
|
||||
logger.warning(
|
||||
"cannot add api route '%s' for " "action %s: %s", r, tid, e
|
||||
)
|
||||
continue
|
||||
if len(keys) == 0:
|
||||
raise ValueError("no valid api route found")
|
||||
|
@ -653,8 +689,10 @@ class ActionsMapParser(BaseActionsMapParser):
|
|||
auth = msignals.authenticate(klass(), **auth_conf)
|
||||
if not auth.is_authenticated:
|
||||
raise MoulinetteError('authentication_required_long')
|
||||
if self.get_conf(tid, 'argument_auth') and \
|
||||
self.get_conf(tid, 'authenticate') == 'all':
|
||||
if (
|
||||
self.get_conf(tid, 'argument_auth')
|
||||
and self.get_conf(tid, 'authenticate') == 'all'
|
||||
):
|
||||
ret.auth = auth
|
||||
|
||||
# TODO: Catch errors?
|
||||
|
@ -702,8 +740,7 @@ class Interface(BaseInterface):
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self, actionsmap, routes={}, use_websocket=True,
|
||||
log_queues=None):
|
||||
def __init__(self, actionsmap, routes={}, use_websocket=True, log_queues=None):
|
||||
self.use_websocket = use_websocket
|
||||
|
||||
# Attempt to retrieve log queues from an APIQueueHandler
|
||||
|
@ -720,6 +757,7 @@ class Interface(BaseInterface):
|
|||
def wrapper(*args, **kwargs):
|
||||
response.set_header('Access-Control-Allow-Origin', '*')
|
||||
return callback(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
# Attempt to retrieve and set locale
|
||||
|
@ -738,8 +776,8 @@ class Interface(BaseInterface):
|
|||
app.install(_ActionsMapPlugin(actionsmap, use_websocket, log_queues))
|
||||
|
||||
# Append default routes
|
||||
# app.route(['/api', '/api/<category:re:[a-z]+>'], method='GET',
|
||||
# callback=self.doc, skip=['actionsmap'])
|
||||
# app.route(['/api', '/api/<category:re:[a-z]+>'], method='GET',
|
||||
# callback=self.doc, skip=['actionsmap'])
|
||||
|
||||
# Append additional routes
|
||||
# TODO: Add optional authentication to those routes?
|
||||
|
@ -759,22 +797,26 @@ class Interface(BaseInterface):
|
|||
- port -- Server port to bind to
|
||||
|
||||
"""
|
||||
logger.debug("starting the server instance in %s:%d with websocket=%s",
|
||||
host, port, self.use_websocket)
|
||||
logger.debug(
|
||||
"starting the server instance in %s:%d with websocket=%s",
|
||||
host,
|
||||
port,
|
||||
self.use_websocket,
|
||||
)
|
||||
|
||||
try:
|
||||
if self.use_websocket:
|
||||
from gevent.pywsgi import WSGIServer
|
||||
from geventwebsocket.handler import WebSocketHandler
|
||||
|
||||
server = WSGIServer((host, port), self._app,
|
||||
handler_class=WebSocketHandler)
|
||||
server = WSGIServer(
|
||||
(host, port), self._app, handler_class=WebSocketHandler
|
||||
)
|
||||
server.serve_forever()
|
||||
else:
|
||||
run(self._app, host=host, port=port)
|
||||
except IOError as e:
|
||||
logger.exception("unable to start the server instance on %s:%d",
|
||||
host, port)
|
||||
logger.exception("unable to start the server instance on %s:%d", host, port)
|
||||
if e.args[0] == errno.EADDRINUSE:
|
||||
raise MoulinetteError('server_already_running')
|
||||
raise MoulinetteError('error_see_log')
|
||||
|
|
|
@ -15,7 +15,9 @@ import argcomplete
|
|||
from moulinette import msignals, m18n
|
||||
from moulinette.core import MoulinetteError
|
||||
from moulinette.interfaces import (
|
||||
BaseActionsMapParser, BaseInterface, ExtendedArgumentParser,
|
||||
BaseActionsMapParser,
|
||||
BaseInterface,
|
||||
ExtendedArgumentParser,
|
||||
)
|
||||
from moulinette.utils import log
|
||||
|
||||
|
@ -175,6 +177,7 @@ def get_locale():
|
|||
|
||||
# CLI Classes Implementation -------------------------------------------
|
||||
|
||||
|
||||
class TTYHandler(logging.StreamHandler):
|
||||
|
||||
"""TTY log handler
|
||||
|
@ -192,6 +195,7 @@ class TTYHandler(logging.StreamHandler):
|
|||
stderr. Otherwise, they are sent to stdout.
|
||||
|
||||
"""
|
||||
|
||||
LEVELS_COLOR = {
|
||||
log.NOTSET: 'white',
|
||||
log.DEBUG: 'white',
|
||||
|
@ -218,8 +222,7 @@ class TTYHandler(logging.StreamHandler):
|
|||
# add translated level name before message
|
||||
level = '%s ' % m18n.g(record.levelname.lower())
|
||||
color = self.LEVELS_COLOR.get(record.levelno, 'white')
|
||||
msg = '{0}{1}{2}{3}'.format(
|
||||
colors_codes[color], level, END_CLI_COLOR, msg)
|
||||
msg = '{0}{1}{2}{3}'.format(colors_codes[color], level, END_CLI_COLOR, msg)
|
||||
if self.formatter:
|
||||
# use user-defined formatter
|
||||
record.__dict__[self.message_key] = msg
|
||||
|
@ -256,8 +259,9 @@ class ActionsMapParser(BaseActionsMapParser):
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self, parent=None, parser=None, subparser_kwargs=None,
|
||||
top_parser=None, **kwargs):
|
||||
def __init__(
|
||||
self, parent=None, parser=None, subparser_kwargs=None, top_parser=None, **kwargs
|
||||
):
|
||||
super(ActionsMapParser, self).__init__(parent)
|
||||
|
||||
if subparser_kwargs is None:
|
||||
|
@ -300,13 +304,10 @@ class ActionsMapParser(BaseActionsMapParser):
|
|||
A new ActionsMapParser object for the category
|
||||
|
||||
"""
|
||||
parser = self._subparsers.add_parser(name,
|
||||
description=category_help,
|
||||
help=category_help,
|
||||
**kwargs)
|
||||
return self.__class__(self, parser, {
|
||||
'title': "subcommands", 'required': True
|
||||
})
|
||||
parser = self._subparsers.add_parser(
|
||||
name, description=category_help, help=category_help, **kwargs
|
||||
)
|
||||
return self.__class__(self, parser, {'title': "subcommands", 'required': True})
|
||||
|
||||
def add_subcategory_parser(self, name, subcategory_help=None, **kwargs):
|
||||
"""Add a parser for a subcategory
|
||||
|
@ -318,17 +319,24 @@ class ActionsMapParser(BaseActionsMapParser):
|
|||
A new ActionsMapParser object for the category
|
||||
|
||||
"""
|
||||
parser = self._subparsers.add_parser(name,
|
||||
type_="subcategory",
|
||||
description=subcategory_help,
|
||||
help=subcategory_help,
|
||||
**kwargs)
|
||||
return self.__class__(self, parser, {
|
||||
'title': "actions", 'required': True
|
||||
})
|
||||
parser = self._subparsers.add_parser(
|
||||
name,
|
||||
type_="subcategory",
|
||||
description=subcategory_help,
|
||||
help=subcategory_help,
|
||||
**kwargs
|
||||
)
|
||||
return self.__class__(self, parser, {'title': "actions", 'required': True})
|
||||
|
||||
def add_action_parser(self, name, tid, action_help=None, deprecated=False,
|
||||
deprecated_alias=[], **kwargs):
|
||||
def add_action_parser(
|
||||
self,
|
||||
name,
|
||||
tid,
|
||||
action_help=None,
|
||||
deprecated=False,
|
||||
deprecated_alias=[],
|
||||
**kwargs
|
||||
):
|
||||
"""Add a parser for an action
|
||||
|
||||
Keyword arguments:
|
||||
|
@ -340,18 +348,21 @@ class ActionsMapParser(BaseActionsMapParser):
|
|||
A new ExtendedArgumentParser object for the action
|
||||
|
||||
"""
|
||||
return self._subparsers.add_parser(name,
|
||||
type_="action",
|
||||
help=action_help,
|
||||
description=action_help,
|
||||
deprecated=deprecated,
|
||||
deprecated_alias=deprecated_alias)
|
||||
return self._subparsers.add_parser(
|
||||
name,
|
||||
type_="action",
|
||||
help=action_help,
|
||||
description=action_help,
|
||||
deprecated=deprecated,
|
||||
deprecated_alias=deprecated_alias,
|
||||
)
|
||||
|
||||
def add_global_arguments(self, arguments):
|
||||
for argument_name, argument_options in arguments.items():
|
||||
# will adapt arguments name for cli or api context
|
||||
names = self.format_arg_names(str(argument_name),
|
||||
argument_options.pop('full', None))
|
||||
names = self.format_arg_names(
|
||||
str(argument_name), argument_options.pop('full', None)
|
||||
)
|
||||
|
||||
self.global_parser.add_argument(*names, **argument_options)
|
||||
|
||||
|
@ -417,8 +428,7 @@ class Interface(BaseInterface):
|
|||
|
||||
# Set handler for authentication
|
||||
if password:
|
||||
msignals.set_handler('authenticate',
|
||||
lambda a, h: a(password=password))
|
||||
msignals.set_handler('authenticate', lambda a, h: a(password=password))
|
||||
|
||||
try:
|
||||
ret = self.actionsmap.process(args, timeout=timeout)
|
||||
|
@ -433,6 +443,7 @@ class Interface(BaseInterface):
|
|||
if output_as == 'json':
|
||||
import json
|
||||
from moulinette.utils.serialize import JSONExtendedEncoder
|
||||
|
||||
print(json.dumps(ret, cls=JSONExtendedEncoder))
|
||||
else:
|
||||
plain_print_dict(ret)
|
||||
|
@ -451,8 +462,7 @@ class Interface(BaseInterface):
|
|||
"""
|
||||
# TODO: Allow token authentication?
|
||||
msg = m18n.n(help) if help else m18n.g('password')
|
||||
return authenticator(password=self._do_prompt(msg, True, False,
|
||||
color='yellow'))
|
||||
return authenticator(password=self._do_prompt(msg, True, False, color='yellow'))
|
||||
|
||||
def _do_prompt(self, message, is_password, confirm, color='blue'):
|
||||
"""Prompt for a value
|
||||
|
@ -464,8 +474,7 @@ class Interface(BaseInterface):
|
|||
|
||||
"""
|
||||
if is_password:
|
||||
prompt = lambda m: getpass.getpass(colorize(m18n.g('colon', m),
|
||||
color))
|
||||
prompt = lambda m: getpass.getpass(colorize(m18n.g('colon', m), color))
|
||||
else:
|
||||
prompt = lambda m: raw_input(colorize(m18n.g('colon', m), color))
|
||||
value = prompt(message)
|
||||
|
|
|
@ -22,7 +22,10 @@ def read_file(file_path):
|
|||
Keyword argument:
|
||||
file_path -- Path to the text file
|
||||
"""
|
||||
assert isinstance(file_path, basestring), "Error: file_path '%s' should be a string but is of type '%s' instead" % (file_path, type(file_path))
|
||||
assert isinstance(file_path, basestring), (
|
||||
"Error: file_path '%s' should be a string but is of type '%s' instead"
|
||||
% (file_path, type(file_path))
|
||||
)
|
||||
|
||||
# Check file exists
|
||||
if not os.path.isfile(file_path):
|
||||
|
@ -35,8 +38,9 @@ def read_file(file_path):
|
|||
except IOError as e:
|
||||
raise MoulinetteError('cannot_open_file', file=file_path, error=str(e))
|
||||
except Exception:
|
||||
raise MoulinetteError('unknown_error_reading_file',
|
||||
file=file_path, error=str(e))
|
||||
raise MoulinetteError(
|
||||
'unknown_error_reading_file', file=file_path, error=str(e)
|
||||
)
|
||||
|
||||
return file_content
|
||||
|
||||
|
@ -96,9 +100,9 @@ def read_toml(file_path):
|
|||
try:
|
||||
loaded_toml = toml.loads(file_content, _dict=OrderedDict)
|
||||
except Exception as e:
|
||||
raise MoulinetteError(errno.EINVAL,
|
||||
m18n.g('corrupted_toml',
|
||||
ressource=file_path, error=str(e)))
|
||||
raise MoulinetteError(
|
||||
errno.EINVAL, m18n.g('corrupted_toml', ressource=file_path, error=str(e))
|
||||
)
|
||||
|
||||
return loaded_toml
|
||||
|
||||
|
@ -131,8 +135,9 @@ def read_ldif(file_path, filtred_entries=[]):
|
|||
except IOError as e:
|
||||
raise MoulinetteError('cannot_open_file', file=file_path, error=str(e))
|
||||
except Exception as e:
|
||||
raise MoulinetteError('unknown_error_reading_file',
|
||||
file=file_path, error=str(e))
|
||||
raise MoulinetteError(
|
||||
'unknown_error_reading_file', file=file_path, error=str(e)
|
||||
)
|
||||
|
||||
return parser.all_records
|
||||
|
||||
|
@ -148,14 +153,25 @@ def write_to_file(file_path, data, file_mode="w"):
|
|||
file_mode -- Mode used when writing the file. Option meant to be used
|
||||
by append_to_file to avoid duplicating the code of this function.
|
||||
"""
|
||||
assert isinstance(data, basestring) or isinstance(data, list), "Error: data '%s' should be either a string or a list but is of type '%s'" % (data, type(data))
|
||||
assert not os.path.isdir(file_path), "Error: file_path '%s' point to a dir, it should be a file" % file_path
|
||||
assert os.path.isdir(os.path.dirname(file_path)), "Error: the path ('%s') base dir ('%s') is not a dir" % (file_path, os.path.dirname(file_path))
|
||||
assert isinstance(data, basestring) or isinstance(data, list), (
|
||||
"Error: data '%s' should be either a string or a list but is of type '%s'"
|
||||
% (data, type(data))
|
||||
)
|
||||
assert not os.path.isdir(file_path), (
|
||||
"Error: file_path '%s' point to a dir, it should be a file" % file_path
|
||||
)
|
||||
assert os.path.isdir(os.path.dirname(file_path)), (
|
||||
"Error: the path ('%s') base dir ('%s') is not a dir"
|
||||
% (file_path, os.path.dirname(file_path))
|
||||
)
|
||||
|
||||
# If data is a list, check elements are strings and build a single string
|
||||
if not isinstance(data, basestring):
|
||||
for element in data:
|
||||
assert isinstance(element, basestring), "Error: element '%s' should be a string but is of type '%s' instead" % (element, type(element))
|
||||
assert isinstance(element, basestring), (
|
||||
"Error: element '%s' should be a string but is of type '%s' instead"
|
||||
% (element, type(element))
|
||||
)
|
||||
data = '\n'.join(data)
|
||||
|
||||
try:
|
||||
|
@ -189,10 +205,21 @@ def write_to_json(file_path, data):
|
|||
"""
|
||||
|
||||
# Assumptions
|
||||
assert isinstance(file_path, basestring), "Error: file_path '%s' should be a string but is of type '%s' instead" % (file_path, type(file_path))
|
||||
assert isinstance(data, dict) or isinstance(data, list), "Error: data '%s' should be a dict or a list but is of type '%s' instead" % (data, type(data))
|
||||
assert not os.path.isdir(file_path), "Error: file_path '%s' point to a dir, it should be a file" % file_path
|
||||
assert os.path.isdir(os.path.dirname(file_path)), "Error: the path ('%s') base dir ('%s') is not a dir" % (file_path, os.path.dirname(file_path))
|
||||
assert isinstance(file_path, basestring), (
|
||||
"Error: file_path '%s' should be a string but is of type '%s' instead"
|
||||
% (file_path, type(file_path))
|
||||
)
|
||||
assert isinstance(data, dict) or isinstance(data, list), (
|
||||
"Error: data '%s' should be a dict or a list but is of type '%s' instead"
|
||||
% (data, type(data))
|
||||
)
|
||||
assert not os.path.isdir(file_path), (
|
||||
"Error: file_path '%s' point to a dir, it should be a file" % file_path
|
||||
)
|
||||
assert os.path.isdir(os.path.dirname(file_path)), (
|
||||
"Error: the path ('%s') base dir ('%s') is not a dir"
|
||||
% (file_path, os.path.dirname(file_path))
|
||||
)
|
||||
|
||||
# Write dict to file
|
||||
try:
|
||||
|
@ -310,7 +337,9 @@ def chown(path, uid=None, gid=None, recursive=False):
|
|||
for f in files:
|
||||
os.chown(os.path.join(root, f), uid, gid)
|
||||
except Exception as e:
|
||||
raise MoulinetteError('error_changing_file_permissions', path=path, error=str(e))
|
||||
raise MoulinetteError(
|
||||
'error_changing_file_permissions', path=path, error=str(e)
|
||||
)
|
||||
|
||||
|
||||
def chmod(path, mode, fmode=None, recursive=False):
|
||||
|
@ -334,7 +363,9 @@ def chmod(path, mode, fmode=None, recursive=False):
|
|||
for f in files:
|
||||
os.chmod(os.path.join(root, f), fmode)
|
||||
except Exception as e:
|
||||
raise MoulinetteError('error_changing_file_permissions', path=path, error=str(e))
|
||||
raise MoulinetteError(
|
||||
'error_changing_file_permissions', path=path, error=str(e)
|
||||
)
|
||||
|
||||
|
||||
def rm(path, recursive=False, force=False):
|
||||
|
|
|
@ -3,8 +3,18 @@ import logging
|
|||
|
||||
# import all constants because other modules try to import them from this
|
||||
# module because SUCCESS is defined in this module
|
||||
from logging import (addLevelName, setLoggerClass, Logger, getLogger, NOTSET, # noqa
|
||||
DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
||||
from logging import ( # noqa
|
||||
addLevelName,
|
||||
setLoggerClass,
|
||||
Logger,
|
||||
getLogger,
|
||||
NOTSET,
|
||||
DEBUG,
|
||||
INFO,
|
||||
WARNING,
|
||||
ERROR,
|
||||
CRITICAL,
|
||||
)
|
||||
|
||||
|
||||
# Global configuration and functions -----------------------------------
|
||||
|
@ -15,9 +25,7 @@ DEFAULT_LOGGING = {
|
|||
'version': 1,
|
||||
'disable_existing_loggers': False,
|
||||
'formatters': {
|
||||
'simple': {
|
||||
'format': '%(asctime)-15s %(levelname)-8s %(name)s - %(message)s'
|
||||
},
|
||||
'simple': {'format': '%(asctime)-15s %(levelname)-8s %(name)s - %(message)s'}
|
||||
},
|
||||
'handlers': {
|
||||
'console': {
|
||||
|
@ -25,14 +33,9 @@ DEFAULT_LOGGING = {
|
|||
'formatter': 'simple',
|
||||
'class': 'logging.StreamHandler',
|
||||
'stream': 'ext://sys.stdout',
|
||||
},
|
||||
},
|
||||
'loggers': {
|
||||
'moulinette': {
|
||||
'level': 'DEBUG',
|
||||
'handlers': ['console'],
|
||||
},
|
||||
}
|
||||
},
|
||||
'loggers': {'moulinette': {'level': 'DEBUG', 'handlers': ['console']}},
|
||||
}
|
||||
|
||||
|
||||
|
@ -65,7 +68,7 @@ def getHandlersByClass(classinfo, limit=0):
|
|||
return o
|
||||
handlers.append(o)
|
||||
if limit != 0 and len(handlers) > limit:
|
||||
return handlers[:limit - 1]
|
||||
return handlers[: limit - 1]
|
||||
return handlers
|
||||
|
||||
|
||||
|
@ -79,6 +82,7 @@ class MoulinetteLogger(Logger):
|
|||
LogRecord extra and can be used with the ActionFilter.
|
||||
|
||||
"""
|
||||
|
||||
action_id = None
|
||||
|
||||
def success(self, msg, *args, **kwargs):
|
||||
|
|
|
@ -15,6 +15,7 @@ def download_text(url, timeout=30, expected_status_code=200):
|
|||
None to ignore the status code.
|
||||
"""
|
||||
import requests # lazy loading this module for performance reasons
|
||||
|
||||
# Assumptions
|
||||
assert isinstance(url, str)
|
||||
|
||||
|
@ -32,13 +33,12 @@ def download_text(url, timeout=30, expected_status_code=200):
|
|||
raise MoulinetteError('download_timeout', url=url)
|
||||
# Unknown stuff
|
||||
except Exception as e:
|
||||
raise MoulinetteError('download_unknown_error',
|
||||
url=url, error=str(e))
|
||||
raise MoulinetteError('download_unknown_error', url=url, error=str(e))
|
||||
# Assume error if status code is not 200 (OK)
|
||||
if expected_status_code is not None \
|
||||
and r.status_code != expected_status_code:
|
||||
raise MoulinetteError('download_bad_status_code',
|
||||
url=url, code=str(r.status_code))
|
||||
if expected_status_code is not None and r.status_code != expected_status_code:
|
||||
raise MoulinetteError(
|
||||
'download_bad_status_code', url=url, code=str(r.status_code)
|
||||
)
|
||||
|
||||
return r.text
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@ except ImportError:
|
|||
from shlex import quote # Python3 >= 3.3
|
||||
|
||||
from .stream import async_file_reading
|
||||
|
||||
quote # This line is here to avoid W0611 PEP8 error (see comments above)
|
||||
|
||||
# Prevent to import subprocess only for common classes
|
||||
|
@ -19,6 +20,7 @@ CalledProcessError = subprocess.CalledProcessError
|
|||
|
||||
# Alternative subprocess methods ---------------------------------------
|
||||
|
||||
|
||||
def check_output(args, stderr=subprocess.STDOUT, shell=True, **kwargs):
|
||||
"""Run command with arguments and return its output as a byte string
|
||||
|
||||
|
@ -31,6 +33,7 @@ def check_output(args, stderr=subprocess.STDOUT, shell=True, **kwargs):
|
|||
|
||||
# Call with stream access ----------------------------------------------
|
||||
|
||||
|
||||
def call_async_output(args, callback, **kwargs):
|
||||
"""Run command and provide its output asynchronously
|
||||
|
||||
|
@ -54,8 +57,7 @@ def call_async_output(args, callback, **kwargs):
|
|||
"""
|
||||
for a in ['stdout', 'stderr']:
|
||||
if a in kwargs:
|
||||
raise ValueError('%s argument not allowed, '
|
||||
'it will be overridden.' % a)
|
||||
raise ValueError('%s argument not allowed, ' 'it will be overridden.' % a)
|
||||
|
||||
if "stdinfo" in kwargs and kwargs["stdinfo"] is not None:
|
||||
assert len(callback) == 3
|
||||
|
@ -101,7 +103,7 @@ def call_async_output(args, callback, **kwargs):
|
|||
stderr_consum.process_next_line()
|
||||
if stdinfo:
|
||||
stdinfo_consum.process_next_line()
|
||||
time.sleep(.1)
|
||||
time.sleep(0.1)
|
||||
stderr_reader.join()
|
||||
# clear the queues
|
||||
stdout_consum.process_current_queue()
|
||||
|
@ -111,7 +113,7 @@ def call_async_output(args, callback, **kwargs):
|
|||
else:
|
||||
while not stdout_reader.eof():
|
||||
stdout_consum.process_current_queue()
|
||||
time.sleep(.1)
|
||||
time.sleep(0.1)
|
||||
stdout_reader.join()
|
||||
# clear the queue
|
||||
stdout_consum.process_current_queue()
|
||||
|
@ -131,15 +133,15 @@ def call_async_output(args, callback, **kwargs):
|
|||
while time.time() - start < 10:
|
||||
if p.poll() is not None:
|
||||
return p.poll()
|
||||
time.sleep(.1)
|
||||
time.sleep(0.1)
|
||||
|
||||
return p.poll()
|
||||
|
||||
|
||||
# Call multiple commands -----------------------------------------------
|
||||
|
||||
def run_commands(cmds, callback=None, separate_stderr=False, shell=True,
|
||||
**kwargs):
|
||||
|
||||
def run_commands(cmds, callback=None, separate_stderr=False, shell=True, **kwargs):
|
||||
"""Run multiple commands with error management
|
||||
|
||||
Run a list of commands and allow to manage how to treat errors either
|
||||
|
@ -178,14 +180,14 @@ def run_commands(cmds, callback=None, separate_stderr=False, shell=True,
|
|||
# overriden by user input
|
||||
for a in ['stdout', 'stderr']:
|
||||
if a in kwargs:
|
||||
raise ValueError('%s argument not allowed, '
|
||||
'it will be overridden.' % a)
|
||||
raise ValueError('%s argument not allowed, ' 'it will be overridden.' % a)
|
||||
|
||||
# If no callback specified...
|
||||
if callback is None:
|
||||
# Raise CalledProcessError on command failure
|
||||
def callback(r, c, o):
|
||||
raise CalledProcessError(r, c, o)
|
||||
|
||||
elif not callable(callback):
|
||||
raise ValueError('callback argument must be callable')
|
||||
|
||||
|
@ -201,8 +203,9 @@ def run_commands(cmds, callback=None, separate_stderr=False, shell=True,
|
|||
error = 0
|
||||
for cmd in cmds:
|
||||
|
||||
process = subprocess.Popen(cmd, stdout=subprocess.PIPE,
|
||||
stderr=_stderr, shell=shell, **kwargs)
|
||||
process = subprocess.Popen(
|
||||
cmd, stdout=subprocess.PIPE, stderr=_stderr, shell=shell, **kwargs
|
||||
)
|
||||
|
||||
output = _get_output(*process.communicate())
|
||||
retcode = process.poll()
|
||||
|
|
|
@ -8,6 +8,7 @@ logger = logging.getLogger('moulinette.utils.serialize')
|
|||
|
||||
# JSON utilities -------------------------------------------------------
|
||||
|
||||
|
||||
class JSONExtendedEncoder(JSONEncoder):
|
||||
|
||||
"""Extended JSON encoder
|
||||
|
@ -24,8 +25,7 @@ class JSONExtendedEncoder(JSONEncoder):
|
|||
def default(self, o):
|
||||
"""Return a serializable object"""
|
||||
# Convert compatible containers into list
|
||||
if isinstance(o, set) or (
|
||||
hasattr(o, '__iter__') and hasattr(o, 'next')):
|
||||
if isinstance(o, set) or (hasattr(o, '__iter__') and hasattr(o, 'next')):
|
||||
return list(o)
|
||||
|
||||
# Display the date in its iso format ISO-8601 Internet Profile (RFC 3339)
|
||||
|
@ -35,6 +35,9 @@ class JSONExtendedEncoder(JSONEncoder):
|
|||
return o.isoformat()
|
||||
|
||||
# Return the repr for object that json can't encode
|
||||
logger.warning('cannot properly encode in JSON the object %s, '
|
||||
'returned repr is: %r', type(o), o)
|
||||
logger.warning(
|
||||
'cannot properly encode in JSON the object %s, ' 'returned repr is: %r',
|
||||
type(o),
|
||||
o,
|
||||
)
|
||||
return repr(o)
|
||||
|
|
|
@ -7,6 +7,7 @@ from multiprocessing.queues import SimpleQueue
|
|||
|
||||
# Read from a stream ---------------------------------------------------
|
||||
|
||||
|
||||
class AsynchronousFileReader(Process):
|
||||
|
||||
"""
|
||||
|
@ -75,7 +76,6 @@ class AsynchronousFileReader(Process):
|
|||
|
||||
|
||||
class Consummer(object):
|
||||
|
||||
def __init__(self, queue, callback):
|
||||
self.queue = queue
|
||||
self.callback = callback
|
||||
|
|
|
@ -6,6 +6,7 @@ import binascii
|
|||
|
||||
# Pattern searching ----------------------------------------------------
|
||||
|
||||
|
||||
def search(pattern, text, count=0, flags=0):
|
||||
"""Search for pattern in a text
|
||||
|
||||
|
@ -55,6 +56,7 @@ def searchf(pattern, path, count=0, flags=re.MULTILINE):
|
|||
|
||||
# Text formatting ------------------------------------------------------
|
||||
|
||||
|
||||
def prependlines(text, prepend):
|
||||
"""Prepend a string to each line of a text"""
|
||||
lines = text.splitlines(True)
|
||||
|
@ -63,6 +65,7 @@ def prependlines(text, prepend):
|
|||
|
||||
# Randomize ------------------------------------------------------------
|
||||
|
||||
|
||||
def random_ascii(length=20):
|
||||
"""Return a random ascii string"""
|
||||
return binascii.hexlify(os.urandom(length)).decode('ascii')
|
||||
|
|
2
pyproject.toml
Normal file
2
pyproject.toml
Normal file
|
@ -0,0 +1,2 @@
|
|||
[tool.black]
|
||||
skip-string-normalization = true
|
|
@ -1,2 +1,8 @@
|
|||
[flake8]
|
||||
ignore = E501,E128,E731,E722
|
||||
ignore =
|
||||
E501,
|
||||
E128,
|
||||
E731,
|
||||
E722,
|
||||
W503 # Black formatter conflict
|
||||
E203 # Black formatter conflict
|
||||
|
|
|
@ -48,47 +48,31 @@ def patch_logging(moulinette):
|
|||
'version': 1,
|
||||
'disable_existing_loggers': True,
|
||||
'formatters': {
|
||||
'tty-debug': {
|
||||
'format': '%(relativeCreated)-4d %(fmessage)s'
|
||||
},
|
||||
'tty-debug': {'format': '%(relativeCreated)-4d %(fmessage)s'},
|
||||
'precise': {
|
||||
'format': '%(asctime)-15s %(levelname)-8s %(name)s %(funcName)s - %(fmessage)s' # noqa
|
||||
},
|
||||
},
|
||||
'filters': {
|
||||
'action': {
|
||||
'()': 'moulinette.utils.log.ActionFilter',
|
||||
},
|
||||
},
|
||||
'filters': {'action': {'()': 'moulinette.utils.log.ActionFilter'}},
|
||||
'handlers': {
|
||||
'tty': {
|
||||
'level': tty_level,
|
||||
'class': 'moulinette.interfaces.cli.TTYHandler',
|
||||
'formatter': '',
|
||||
},
|
||||
}
|
||||
},
|
||||
'loggers': {
|
||||
'moulinette': {
|
||||
'level': level,
|
||||
'handlers': [],
|
||||
'propagate': True,
|
||||
},
|
||||
'moulinette': {'level': level, 'handlers': [], 'propagate': True},
|
||||
'moulinette.interface': {
|
||||
'level': level,
|
||||
'handlers': handlers,
|
||||
'propagate': False,
|
||||
},
|
||||
},
|
||||
'root': {
|
||||
'level': level,
|
||||
'handlers': root_handlers,
|
||||
},
|
||||
'root': {'level': level, 'handlers': root_handlers},
|
||||
}
|
||||
|
||||
moulinette.init(
|
||||
logging_config=logging,
|
||||
_from_source=False
|
||||
)
|
||||
moulinette.init(logging_config=logging, _from_source=False)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session', autouse=True)
|
||||
|
|
|
@ -5,7 +5,7 @@ from moulinette.actionsmap import (
|
|||
AskParameter,
|
||||
PatternParameter,
|
||||
RequiredParameter,
|
||||
ActionsMap
|
||||
ActionsMap,
|
||||
)
|
||||
from moulinette.interfaces import BaseActionsMapParser
|
||||
from moulinette.core import MoulinetteError
|
||||
|
@ -58,11 +58,9 @@ def test_pattern_parameter_bad_str_value(iface, caplog):
|
|||
assert any('expecting a list' in message for message in caplog.messages)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('iface', [
|
||||
[],
|
||||
['pattern_alone'],
|
||||
['pattern', 'message', 'extra stuff']
|
||||
])
|
||||
@pytest.mark.parametrize(
|
||||
'iface', [[], ['pattern_alone'], ['pattern', 'message', 'extra stuff']]
|
||||
)
|
||||
def test_pattern_parameter_bad_list_len(iface):
|
||||
pattern = PatternParameter(iface)
|
||||
with pytest.raises(TypeError):
|
||||
|
|
|
@ -4,8 +4,14 @@ import pytest
|
|||
|
||||
from moulinette import m18n
|
||||
from moulinette.core import MoulinetteError
|
||||
from moulinette.utils.filesystem import (append_to_file, read_file, read_json,
|
||||
rm, write_to_file, write_to_json)
|
||||
from moulinette.utils.filesystem import (
|
||||
append_to_file,
|
||||
read_file,
|
||||
read_json,
|
||||
rm,
|
||||
write_to_file,
|
||||
write_to_json,
|
||||
)
|
||||
|
||||
|
||||
def test_read_file(test_file):
|
||||
|
|
15
tox.ini
15
tox.ini
|
@ -2,6 +2,7 @@
|
|||
envlist =
|
||||
py27
|
||||
lint
|
||||
format-check
|
||||
docs
|
||||
skipdist = True
|
||||
|
||||
|
@ -24,6 +25,20 @@ deps = flake8
|
|||
skip_install = True
|
||||
usedevelop = False
|
||||
|
||||
[testenv:format]
|
||||
basepython = python3
|
||||
commands = black {posargs} moulinette test
|
||||
deps = black
|
||||
skip_install = True
|
||||
usedevelop = False
|
||||
|
||||
[testenv:format-check]
|
||||
basepython = {[testenv:format]basepython}
|
||||
commands = black {posargs:--check --diff} moulinette test
|
||||
deps = {[testenv:format]deps}
|
||||
skip_install = {[testenv:format]skip_install}
|
||||
usedevelop = {[testenv:format]usedevelop}
|
||||
|
||||
[testenv:docs]
|
||||
usedevelop = True
|
||||
commands = python -m sphinx -W doc/ doc/_build
|
||||
|
|
Loading…
Add table
Reference in a new issue