mirror of
https://github.com/YunoHost/moulinette.git
synced 2024-09-03 20:06:31 +02:00
Merge pull request #117 from YunoHost/logging
Provide new logging facilities and other helpers
This commit is contained in:
commit
ed6408ef3a
10 changed files with 335 additions and 137 deletions
7
debian/control
vendored
7
debian/control
vendored
|
@ -2,8 +2,8 @@ Source: moulinette
|
|||
Section: python
|
||||
Priority: optional
|
||||
Maintainer: Jérôme Lebleu <jerome.lebleu@mailoo.org>
|
||||
Build-Depends: debhelper (>= 7.0.50), python (>= 2.7)
|
||||
Standards-Version: 3.9.2
|
||||
Build-Depends: debhelper (>= 9), python (>= 2.7)
|
||||
Standards-Version: 3.9.6
|
||||
X-Python-Version: >= 2.7
|
||||
Homepage: https://github.com/YunoHost/moulinette
|
||||
|
||||
|
@ -14,7 +14,8 @@ Depends: ${misc:Depends}, ${python:Depends},
|
|||
python-yaml,
|
||||
python-bottle (>= 0.12),
|
||||
python-gnupg,
|
||||
python-gevent-websocket
|
||||
python-gevent-websocket,
|
||||
python-argcomplete
|
||||
Replaces: yunohost-cli
|
||||
Breaks: yunohost-cli
|
||||
Description: prototype interfaces with ease in Python
|
||||
|
|
|
@ -8,6 +8,8 @@
|
|||
"root_required" : "You must be root to perform this action",
|
||||
"instance_already_running" : "An instance is already running",
|
||||
"error_see_log" : "An error occured. Please see the log for details.",
|
||||
"file_not_exist" : "File does not exist",
|
||||
"folder_not_exist" : "Folder does not exist",
|
||||
|
||||
"unable_authenticate" : "Unable to authenticate",
|
||||
"unable_retrieve_session" : "Unable to retrieve the session",
|
||||
|
|
|
@ -87,14 +87,28 @@ def api(namespaces, host='localhost', port=80, routes={},
|
|||
instead of using the cached one
|
||||
|
||||
"""
|
||||
try:
|
||||
moulinette = init_interface('api',
|
||||
kwargs={ 'routes': routes,
|
||||
'use_websocket': use_websocket },
|
||||
actionsmap={ 'namespaces': namespaces,
|
||||
'use_cache': use_cache })
|
||||
kwargs={
|
||||
'routes': routes,
|
||||
'use_websocket': use_websocket
|
||||
},
|
||||
actionsmap={
|
||||
'namespaces': namespaces,
|
||||
'use_cache': use_cache
|
||||
}
|
||||
)
|
||||
moulinette.run(host, port)
|
||||
except MoulinetteError as e:
|
||||
import logging
|
||||
logging.getLogger('moulinette').error(e.strerror)
|
||||
return e.errno
|
||||
except KeyboardInterrupt:
|
||||
import logging
|
||||
logging.getLogger('moulinette').info(m18n.g('operation_interrupted'))
|
||||
return 0
|
||||
|
||||
def cli(namespaces, args, print_json=False, print_plain=False, use_cache=True):
|
||||
def cli(namespaces, args, use_cache=True, output_as=None, parser_kwargs={}):
|
||||
"""Command line interface
|
||||
|
||||
Execute an action with the moulinette from the CLI and print its
|
||||
|
@ -103,20 +117,25 @@ def cli(namespaces, args, print_json=False, print_plain=False, use_cache=True):
|
|||
Keyword arguments:
|
||||
- namespaces -- The list of namespaces to use
|
||||
- args -- A list of argument strings
|
||||
- print_json -- True to print result as a JSON encoded string
|
||||
- print_plain -- True to print result as a script-usable string
|
||||
- use_cache -- False if it should parse the actions map file
|
||||
instead of using the cached one
|
||||
- output_as -- Output result in another format, see
|
||||
moulinette.interfaces.cli.Interface for possible values
|
||||
- parser_kwargs -- A dict of arguments to pass to the parser
|
||||
class at construction
|
||||
|
||||
"""
|
||||
from moulinette.interfaces.cli import colorize
|
||||
|
||||
try:
|
||||
moulinette = init_interface('cli',
|
||||
actionsmap={'namespaces': namespaces,
|
||||
'use_cache': use_cache})
|
||||
moulinette.run(args, print_json, print_plain)
|
||||
actionsmap={
|
||||
'namespaces': namespaces,
|
||||
'use_cache': use_cache,
|
||||
'parser_kwargs': parser_kwargs,
|
||||
},
|
||||
)
|
||||
moulinette.run(args, output_as=output_as)
|
||||
except MoulinetteError as e:
|
||||
print('%s %s' % (colorize(m18n.g('error'), 'red'), e.strerror))
|
||||
import logging
|
||||
logging.getLogger('moulinette').error(e.strerror)
|
||||
return e.errno
|
||||
return 0
|
||||
|
|
|
@ -342,17 +342,20 @@ class ActionsMap(object):
|
|||
all available namespaces.
|
||||
|
||||
Keyword arguments:
|
||||
- parser -- The BaseActionsMapParser derived class to use for
|
||||
parsing the actions map
|
||||
- parser_class -- The BaseActionsMapParser derived class to use
|
||||
for parsing the actions map
|
||||
- namespaces -- The list of namespaces to use
|
||||
- use_cache -- False if it should parse the actions map file
|
||||
instead of using the cached one.
|
||||
instead of using the cached one
|
||||
- parser_kwargs -- A dict of arguments to pass to the parser
|
||||
class at construction
|
||||
|
||||
"""
|
||||
def __init__(self, parser, namespaces=[], use_cache=True):
|
||||
if not issubclass(parser, BaseActionsMapParser):
|
||||
raise ValueError("Invalid parser class '%s'" % parser.__name__)
|
||||
self._parser_class = parser
|
||||
def __init__(self, parser_class, namespaces=[], use_cache=True,
|
||||
parser_kwargs={}):
|
||||
if not issubclass(parser_class, BaseActionsMapParser):
|
||||
raise ValueError("Invalid parser class '%s'" % parser_class.__name__)
|
||||
self.parser_class = parser_class
|
||||
self.use_cache = use_cache
|
||||
|
||||
if len(namespaces) == 0:
|
||||
|
@ -380,8 +383,8 @@ class ActionsMap(object):
|
|||
m18n.load_namespace(n)
|
||||
|
||||
# Generate parsers
|
||||
self.extraparser = ExtraArgumentParser(parser.interface)
|
||||
self._parser = self._construct_parser(actionsmaps)
|
||||
self.extraparser = ExtraArgumentParser(parser_class.interface)
|
||||
self._parser = self._construct_parser(actionsmaps, **parser_kwargs)
|
||||
|
||||
@property
|
||||
def parser(self):
|
||||
|
@ -515,13 +518,15 @@ class ActionsMap(object):
|
|||
|
||||
## Private methods
|
||||
|
||||
def _construct_parser(self, actionsmaps):
|
||||
def _construct_parser(self, actionsmaps, **kwargs):
|
||||
"""
|
||||
Construct the parser with the actions map
|
||||
|
||||
Keyword arguments:
|
||||
- actionsmaps -- A dict of multi-level dictionnary of
|
||||
categories/actions/arguments list for each namespaces
|
||||
- **kwargs -- Additionnal arguments to pass at the parser
|
||||
class instantiation
|
||||
|
||||
Returns:
|
||||
An interface relevant's parser object
|
||||
|
@ -551,7 +556,7 @@ class ActionsMap(object):
|
|||
parser.add_argument(*names, **argp)
|
||||
|
||||
# Instantiate parser
|
||||
top_parser = self._parser_class()
|
||||
top_parser = self.parser_class(**kwargs)
|
||||
|
||||
# Iterate over actions map namespaces
|
||||
for n, actionsmap in actionsmaps.items():
|
||||
|
|
|
@ -151,7 +151,7 @@ class BaseAuthenticator(object):
|
|||
except IOError:
|
||||
logger.debug("unable to retrieve session", exc_info=1)
|
||||
raise MoulinetteError(errno.ENOENT,
|
||||
m18r.g('unable_retrieve_session'))
|
||||
m18n.g('unable_retrieve_session'))
|
||||
else:
|
||||
gpg = gnupg.GPG()
|
||||
gpg.encoding = 'utf-8'
|
||||
|
@ -161,5 +161,5 @@ class BaseAuthenticator(object):
|
|||
logger.error("unable to decrypt password for the session: %s",
|
||||
decrypted.status)
|
||||
raise MoulinetteError(errno.EINVAL,
|
||||
m18r.g('unable_retrieve_session'))
|
||||
m18n.g('unable_retrieve_session'))
|
||||
return decrypted.data
|
||||
|
|
|
@ -31,7 +31,7 @@ class BaseActionsMapParser(object):
|
|||
- parent -- A parent BaseActionsMapParser derived object
|
||||
|
||||
"""
|
||||
def __init__(self, parent=None):
|
||||
def __init__(self, parent=None, **kwargs):
|
||||
if parent:
|
||||
self._o = parent
|
||||
else:
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
import os
|
||||
import re
|
||||
import errno
|
||||
import logging
|
||||
import argparse
|
||||
from json import dumps as json_encode
|
||||
|
||||
|
@ -17,14 +16,42 @@ from moulinette.core import MoulinetteError, clean_session
|
|||
from moulinette.interfaces import (
|
||||
BaseActionsMapParser, BaseInterface, ExtendedArgumentParser,
|
||||
)
|
||||
from moulinette.utils import log
|
||||
from moulinette.utils.serialize import JSONExtendedEncoder
|
||||
from moulinette.utils.text import random_ascii
|
||||
|
||||
logger = logging.getLogger('moulinette.interface.api')
|
||||
logger = log.getLogger('moulinette.interface.api')
|
||||
|
||||
|
||||
# API helpers ----------------------------------------------------------
|
||||
|
||||
class LogQueues(dict):
|
||||
"""Map of session id to queue."""
|
||||
pass
|
||||
|
||||
class APIQueueHandler(log.Handler):
|
||||
"""
|
||||
A handler class which store logging records into a queue, to be used
|
||||
and retrieved from the API.
|
||||
"""
|
||||
def __init__(self):
|
||||
log.Handler.__init__(self)
|
||||
self.queues = LogQueues()
|
||||
|
||||
def emit(self, record):
|
||||
sid = request.get_cookie('session.id')
|
||||
try:
|
||||
queue = self.queues[sid]
|
||||
except KeyError:
|
||||
# Session is not initialized, abandon.
|
||||
return
|
||||
else:
|
||||
# Put the message as a 2-tuple in the queue
|
||||
queue.put_nowait((record.levelname.lower(), record.getMessage()))
|
||||
# Put the current greenlet to sleep for 0 second in order to
|
||||
# populate the new message in the queue
|
||||
sleep(0)
|
||||
|
||||
class _HTTPArgumentParser(object):
|
||||
"""Argument parser for HTTP requests
|
||||
|
||||
|
@ -126,7 +153,7 @@ class _ActionsMapPlugin(object):
|
|||
name = 'actionsmap'
|
||||
api = 2
|
||||
|
||||
def __init__(self, actionsmap, use_websocket):
|
||||
def __init__(self, actionsmap, use_websocket, log_queues={}):
|
||||
# Connect signals to handlers
|
||||
msignals.set_handler('authenticate', self._do_authenticate)
|
||||
if use_websocket:
|
||||
|
@ -134,9 +161,9 @@ class _ActionsMapPlugin(object):
|
|||
|
||||
self.actionsmap = actionsmap
|
||||
self.use_websocket = use_websocket
|
||||
self.log_queues = log_queues
|
||||
# TODO: Save and load secrets?
|
||||
self.secrets = {}
|
||||
self.queues = {}
|
||||
|
||||
def setup(self, app):
|
||||
"""Setup plugin on the application
|
||||
|
@ -308,11 +335,11 @@ class _ActionsMapPlugin(object):
|
|||
"""
|
||||
s_id = request.get_cookie('session.id')
|
||||
try:
|
||||
queue = self.queues[s_id]
|
||||
queue = self.log_queues[s_id]
|
||||
except KeyError:
|
||||
# Create a new queue for the session
|
||||
queue = Queue()
|
||||
self.queues[s_id] = queue
|
||||
self.log_queues[s_id] = queue
|
||||
|
||||
wsock = request.environ.get('wsgi.websocket')
|
||||
if not wsock:
|
||||
|
@ -326,7 +353,7 @@ class _ActionsMapPlugin(object):
|
|||
except TypeError:
|
||||
if item == StopIteration:
|
||||
# Delete the current queue and break
|
||||
del self.queues[s_id]
|
||||
del self.log_queues[s_id]
|
||||
break
|
||||
logger.exception("invalid item in the messages queue: %r", item)
|
||||
else:
|
||||
|
@ -358,7 +385,7 @@ class _ActionsMapPlugin(object):
|
|||
finally:
|
||||
# Close opened WebSocket by putting StopIteration in the queue
|
||||
try:
|
||||
queue = self.queues[request.get_cookie('session.id')]
|
||||
queue = self.log_queues[request.get_cookie('session.id')]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
|
@ -396,7 +423,7 @@ class _ActionsMapPlugin(object):
|
|||
"""
|
||||
s_id = request.get_cookie('session.id')
|
||||
try:
|
||||
queue = self.queues[s_id]
|
||||
queue = self.log_queues[s_id]
|
||||
except KeyError:
|
||||
return
|
||||
|
||||
|
@ -476,7 +503,7 @@ class ActionsMapParser(BaseActionsMapParser):
|
|||
the arguments is represented by a ExtendedArgumentParser object.
|
||||
|
||||
"""
|
||||
def __init__(self, parent=None):
|
||||
def __init__(self, parent=None, **kwargs):
|
||||
super(ActionsMapParser, self).__init__(parent)
|
||||
|
||||
self._parsers = {} # dict({(method, path): _HTTPArgumentParser})
|
||||
|
@ -621,11 +648,20 @@ class Interface(BaseInterface):
|
|||
- routes -- A dict of additional routes to add in the form of
|
||||
{(method, path): callback}
|
||||
- use_websocket -- Serve via WSGI to handle asynchronous responses
|
||||
- log_queues -- A LogQueues object or None to retrieve it from
|
||||
registered logging handlers
|
||||
|
||||
"""
|
||||
def __init__(self, actionsmap, routes={}, use_websocket=True):
|
||||
def __init__(self, actionsmap, routes={}, use_websocket=True,
|
||||
log_queues=None):
|
||||
self.use_websocket = use_websocket
|
||||
|
||||
# Attempt to retrieve log queues from an APIQueueHandler
|
||||
if log_queues is None:
|
||||
handler = log.getHandlersByClass(APIQueueHandler, limit=1)
|
||||
if handler:
|
||||
log_queues = handler.queues
|
||||
|
||||
# TODO: Return OK to 'OPTIONS' xhr requests (l173)
|
||||
app = Bottle(autojson=True)
|
||||
|
||||
|
@ -648,7 +684,7 @@ class Interface(BaseInterface):
|
|||
# Install plugins
|
||||
app.install(apiheader)
|
||||
app.install(api18n)
|
||||
app.install(_ActionsMapPlugin(actionsmap, use_websocket))
|
||||
app.install(_ActionsMapPlugin(actionsmap, use_websocket, log_queues))
|
||||
|
||||
# Append default routes
|
||||
# app.route(['/api', '/api/<category:re:[a-z]+>'], method='GET',
|
||||
|
|
|
@ -1,18 +1,22 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import sys
|
||||
import errno
|
||||
import getpass
|
||||
import locale
|
||||
import logging
|
||||
from argparse import SUPPRESS
|
||||
|
||||
import argcomplete
|
||||
|
||||
from moulinette.core import MoulinetteError
|
||||
from moulinette.interfaces import (
|
||||
BaseActionsMapParser, BaseInterface, ExtendedArgumentParser,
|
||||
)
|
||||
from moulinette.utils import log
|
||||
|
||||
|
||||
logger = logging.getLogger('moulinette.cli')
|
||||
logger = log.getLogger('moulinette.cli')
|
||||
|
||||
|
||||
# CLI helpers ----------------------------------------------------------
|
||||
|
@ -126,6 +130,71 @@ def get_locale():
|
|||
|
||||
# CLI Classes Implementation -------------------------------------------
|
||||
|
||||
class TTYHandler(log.StreamHandler):
|
||||
"""TTY log handler
|
||||
|
||||
A handler class which prints logging records for a tty. The record is
|
||||
neverthemess formatted depending if it is connected to a tty(-like)
|
||||
device.
|
||||
If it's the case, the level name - optionnaly colorized - is prepended
|
||||
to the message and the result is stored in the record as `message_key`
|
||||
attribute. That way, a custom formatter can be defined. The default is
|
||||
to output just the formatted message.
|
||||
Anyway, if the stream is not a tty, just the message is output.
|
||||
|
||||
Note that records with a level higher or equal to WARNING are sent to
|
||||
stderr. Otherwise, they are sent to stdout.
|
||||
|
||||
"""
|
||||
LEVELS_COLOR = {
|
||||
log.NOTSET : 'white',
|
||||
log.DEBUG : 'white',
|
||||
log.INFO : 'cyan',
|
||||
log.SUCCESS : 'green',
|
||||
log.WARNING : 'yellow',
|
||||
log.ERROR : 'red',
|
||||
log.CRITICAL : 'red',
|
||||
}
|
||||
|
||||
def __init__(self, message_key='fmessage'):
|
||||
log.StreamHandler.__init__(self)
|
||||
self.message_key = message_key
|
||||
|
||||
def format(self, record):
|
||||
"""Enhance message with level and colors if supported."""
|
||||
msg = record.getMessage()
|
||||
if self.supports_color():
|
||||
level = ''
|
||||
if self.level <= log.DEBUG:
|
||||
# add level name before message
|
||||
level = '%s ' % record.levelname
|
||||
elif record.levelname in ['SUCCESS', 'WARNING', 'ERROR']:
|
||||
# add translated level name before message
|
||||
level = '%s ' % m18n.g(record.levelname.lower())
|
||||
color = self.LEVELS_COLOR.get(record.levelno, 'white')
|
||||
msg = '\033[{0}m\033[1m{1}\033[m{2}'.format(
|
||||
colors_codes[color], level, msg)
|
||||
if self.formatter:
|
||||
# use user-defined formatter
|
||||
record.__dict__[self.message_key] = msg
|
||||
return self.formatter.format(record)
|
||||
return msg
|
||||
|
||||
def emit(self, record):
|
||||
# set proper stream first
|
||||
if record.levelno >= log.WARNING:
|
||||
self.stream = sys.stderr
|
||||
else:
|
||||
self.stream = sys.stdout
|
||||
log.StreamHandler.emit(self, record)
|
||||
|
||||
def supports_color(self):
|
||||
"""Check whether current stream supports color."""
|
||||
if hasattr(self.stream, 'isatty') and self.stream.isatty():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class ActionsMapParser(BaseActionsMapParser):
|
||||
"""Actions map's Parser for the CLI
|
||||
|
||||
|
@ -135,9 +204,12 @@ class ActionsMapParser(BaseActionsMapParser):
|
|||
Keyword arguments:
|
||||
- parser -- The ExtendedArgumentParser object to use
|
||||
- subparser_kwargs -- Arguments to pass to the sub-parser group
|
||||
- top_parser -- An ArgumentParser object whose arguments should
|
||||
be take into account but not parsed
|
||||
|
||||
"""
|
||||
def __init__(self, parent=None, parser=None, subparser_kwargs=None):
|
||||
def __init__(self, parent=None, parser=None, subparser_kwargs=None,
|
||||
top_parser=None, **kwargs):
|
||||
super(ActionsMapParser, self).__init__(parent)
|
||||
|
||||
if subparser_kwargs is None:
|
||||
|
@ -145,6 +217,14 @@ class ActionsMapParser(BaseActionsMapParser):
|
|||
|
||||
self._parser = parser or ExtendedArgumentParser()
|
||||
self._subparsers = self._parser.add_subparsers(**subparser_kwargs)
|
||||
self._global_parser = parent._global_parser if parent else None
|
||||
|
||||
if top_parser:
|
||||
# Append each top parser action to the global group
|
||||
glob = self.add_global_parser()
|
||||
for action in top_parser._actions:
|
||||
action.dest = SUPPRESS
|
||||
glob._add_action(action)
|
||||
|
||||
|
||||
## Implement virtual properties
|
||||
|
@ -161,7 +241,10 @@ class ActionsMapParser(BaseActionsMapParser):
|
|||
return [name]
|
||||
|
||||
def add_global_parser(self, **kwargs):
|
||||
return self._parser.add_mutually_exclusive_group()
|
||||
if not self._global_parser:
|
||||
self._global_parser = self._parser.add_argument_group(
|
||||
"global arguments")
|
||||
return self._global_parser
|
||||
|
||||
def add_category_parser(self, name, category_help=None, **kwargs):
|
||||
"""Add a parser for a category
|
||||
|
@ -226,7 +309,7 @@ class Interface(BaseInterface):
|
|||
|
||||
self.actionsmap = actionsmap
|
||||
|
||||
def run(self, args, print_json=False, print_plain=False):
|
||||
def run(self, args, output_as=None):
|
||||
"""Run the moulinette
|
||||
|
||||
Process the action corresponding to the given arguments 'args'
|
||||
|
@ -234,13 +317,17 @@ class Interface(BaseInterface):
|
|||
|
||||
Keyword arguments:
|
||||
- args -- A list of argument strings
|
||||
- print_json -- True to print result as a JSON encoded string
|
||||
- print_plain -- True to print result as a script-usable string
|
||||
- output_as -- Output result in another format. Possible values:
|
||||
- json: return a JSON encoded string
|
||||
- plain: return a script-readable output
|
||||
|
||||
"""
|
||||
if print_json and print_plain:
|
||||
if output_as and output_as not in ['json', 'plain']:
|
||||
raise MoulinetteError(errno.EINVAL, m18n.g('invalid_usage'))
|
||||
|
||||
# auto-complete
|
||||
argcomplete.autocomplete(self.actionsmap.parser._parser)
|
||||
|
||||
try:
|
||||
ret = self.actionsmap.process(args, timeout=5)
|
||||
except KeyboardInterrupt, EOFError:
|
||||
|
@ -250,11 +337,12 @@ class Interface(BaseInterface):
|
|||
return
|
||||
|
||||
# Format and print result
|
||||
if print_json:
|
||||
if output_as:
|
||||
if output_as == 'json':
|
||||
import json
|
||||
from moulinette.utils.serialize import JSONExtendedEncoder
|
||||
print(json.dumps(ret, cls=JSONExtendedEncoder))
|
||||
elif print_plain:
|
||||
else:
|
||||
plain_print_dict(ret)
|
||||
elif isinstance(ret, dict):
|
||||
pretty_print_dict(ret)
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
import os
|
||||
import logging
|
||||
from logging import *
|
||||
|
||||
|
||||
# Global configuration and functions -----------------------------------
|
||||
|
||||
SUCCESS = 25
|
||||
|
||||
DEFAULT_LOGGING = {
|
||||
'version': 1,
|
||||
'disable_existing_loggers': False,
|
||||
|
@ -37,10 +40,71 @@ def configure_logging(logging_config=None):
|
|||
"""
|
||||
from logging.config import dictConfig
|
||||
|
||||
# add custom logging level and class
|
||||
addLevelName(SUCCESS, 'SUCCESS')
|
||||
setLoggerClass(MoulinetteLogger)
|
||||
|
||||
# load configuration from dict
|
||||
dictConfig(DEFAULT_LOGGING)
|
||||
if logging_config:
|
||||
dictConfig(logging_config)
|
||||
|
||||
def getHandlersByClass(classinfo, limit=0):
|
||||
"""Retrieve registered handlers of a given class."""
|
||||
handlers = []
|
||||
for ref in logging._handlers.itervaluerefs():
|
||||
o = ref()
|
||||
if o is not None and isinstance(o, classinfo):
|
||||
if limit == 1:
|
||||
return o
|
||||
handlers.append(o)
|
||||
if limit != 0 and len(handlers) > limit:
|
||||
return handlers[:limit-1]
|
||||
return handlers
|
||||
|
||||
|
||||
class MoulinetteLogger(Logger):
|
||||
"""Custom logger class
|
||||
|
||||
Extend base Logger class to provide the SUCCESS custom log level with
|
||||
a convenient logging method. It also consider an optionnal action_id
|
||||
which corresponds to the associated logged action. It is added to the
|
||||
LogRecord extra and can be used with the ActionFilter.
|
||||
|
||||
"""
|
||||
action_id = None
|
||||
|
||||
def success(self, msg, *args, **kwargs):
|
||||
"""Log 'msg % args' with severity 'SUCCESS'."""
|
||||
if self.isEnabledFor(SUCCESS):
|
||||
self._log(SUCCESS, msg, args, **kwargs)
|
||||
|
||||
def findCaller(self):
|
||||
"""Override findCaller method to consider this source file."""
|
||||
f = logging.currentframe()
|
||||
if f is not None:
|
||||
f = f.f_back
|
||||
rv = "(unknown file)", 0, "(unknown function)"
|
||||
while hasattr(f, "f_code"):
|
||||
co = f.f_code
|
||||
filename = os.path.normcase(co.co_filename)
|
||||
if filename == logging._srcfile or filename == __file__:
|
||||
f = f.f_back
|
||||
continue
|
||||
rv = (co.co_filename, f.f_lineno, co.co_name)
|
||||
break
|
||||
return rv
|
||||
|
||||
def _log(self, *args, **kwargs):
|
||||
"""Append action_id if available to the extra."""
|
||||
if self.action_id is not None:
|
||||
extra = kwargs.get('extra', {})
|
||||
if not 'action_id' in extra:
|
||||
# FIXME: Get real action_id instead of logger/current one
|
||||
extra['action_id'] = _get_action_id()
|
||||
kwargs['extra'] = extra
|
||||
return Logger._log(self, *args, **kwargs)
|
||||
|
||||
|
||||
# Action logging -------------------------------------------------------
|
||||
|
||||
|
@ -62,39 +126,11 @@ def start_action_logging():
|
|||
|
||||
return _get_action_id()
|
||||
|
||||
class ActionLoggerAdapter(logging.LoggerAdapter):
|
||||
"""Adapter for action loggers
|
||||
|
||||
Extend an action logging output by processing both the logging message and the
|
||||
contextual information. The action id is prepended to the message and the
|
||||
following keyword arguments are added:
|
||||
- action_id -- the current action id
|
||||
|
||||
"""
|
||||
def process(self, msg, kwargs):
|
||||
"""Process the logging call for the action
|
||||
|
||||
Process the logging call by retrieving the action id and prepending it to
|
||||
the log message. It will also be added to the 'extra' keyword argument.
|
||||
|
||||
"""
|
||||
try:
|
||||
action_id = self.extra['action_id']
|
||||
except KeyError:
|
||||
action_id = _get_action_id()
|
||||
|
||||
# Extend current extra keyword argument
|
||||
extra = kwargs.get('extra', {})
|
||||
extra['action_id'] = action_id
|
||||
kwargs['extra'] = extra
|
||||
|
||||
return '[{:s}] {:s}'.format(action_id, msg), kwargs
|
||||
|
||||
def getActionLogger(name=None, logger=None, action_id=None):
|
||||
"""Get the logger adapter for an action
|
||||
|
||||
Return an action logger adapter with the specified name or logger and
|
||||
optionally for a given action id, creating it if necessary.
|
||||
Return a logger for the specified name - or use given logger - and
|
||||
optionally for a given action id, retrieving it if necessary.
|
||||
|
||||
Either a name or a logger must be specified.
|
||||
|
||||
|
@ -102,5 +138,30 @@ def getActionLogger(name=None, logger=None, action_id=None):
|
|||
if not name and not logger:
|
||||
raise ValueError('Either a name or a logger must be specified')
|
||||
|
||||
extra = {'action_id': action_id} if action_id else {}
|
||||
return ActionLoggerAdapter(logger or logging.getLogger(name), extra)
|
||||
logger = logger or getLogger(name)
|
||||
logger.action_id = action_id if action_id else _get_action_id()
|
||||
return logger
|
||||
|
||||
|
||||
class ActionFilter(object):
|
||||
"""Extend log record for an optionnal action
|
||||
|
||||
Filter a given record and look for an `action_id` key. If it is not found
|
||||
and `strict` is True, the record will not be logged. Otherwise, the key
|
||||
specified by `message_key` will be added to the record, containing the
|
||||
message formatted for the action or just the original one.
|
||||
|
||||
"""
|
||||
def __init__(self, message_key='fmessage', strict=False):
|
||||
self.message_key = message_key
|
||||
self.strict = strict
|
||||
|
||||
def filter(self, record):
|
||||
msg = record.getMessage()
|
||||
action_id = record.__dict__.get('action_id', None)
|
||||
if action_id is not None:
|
||||
msg = '[{:s}] {:s}'.format(action_id, msg)
|
||||
elif self.strict:
|
||||
return False
|
||||
record.__dict__[self.message_key] = msg
|
||||
return True
|
||||
|
|
|
@ -1,59 +1,45 @@
|
|||
from threading import Thread
|
||||
from Queue import Queue, Empty
|
||||
import threading
|
||||
import Queue
|
||||
|
||||
|
||||
# Read from a stream ---------------------------------------------------
|
||||
|
||||
class NonBlockingStreamReader:
|
||||
"""A non-blocking stream reader
|
||||
class AsynchronousFileReader(threading.Thread):
|
||||
"""
|
||||
Helper class to implement asynchronous reading of a file
|
||||
in a separate thread. Pushes read lines on a queue to
|
||||
be consumed in another thread.
|
||||
|
||||
Open a separate thread which reads lines from the stream whenever data
|
||||
becomes available and stores the data in a queue.
|
||||
|
||||
Based on: http://eyalarubas.com/python-subproc-nonblock.html
|
||||
|
||||
Keyword arguments:
|
||||
- stream -- The stream to read from
|
||||
Based on:
|
||||
http://stefaanlippens.net/python-asynchronous-subprocess-pipe-reading
|
||||
|
||||
"""
|
||||
def __init__(self, stream):
|
||||
self._s = stream
|
||||
self._q = Queue()
|
||||
def __init__(self, fd, queue):
|
||||
assert isinstance(queue, Queue.Queue)
|
||||
assert callable(fd.readline)
|
||||
threading.Thread.__init__(self)
|
||||
self._fd = fd
|
||||
self._queue = queue
|
||||
|
||||
def _populateQueue(stream, queue):
|
||||
"""Collect lines from the stream and put them in the queue"""
|
||||
while True:
|
||||
line = stream.readline()
|
||||
if line:
|
||||
queue.put(line)
|
||||
else:
|
||||
break
|
||||
def run(self):
|
||||
"""The body of the tread: read lines and put them on the queue."""
|
||||
for line in iter(self._fd.readline, ''):
|
||||
self._queue.put(line)
|
||||
|
||||
self._t = Thread(target=_populateQueue, args=(self._s, self._q))
|
||||
self._t.daemon = True
|
||||
# Start collecting lines from the stream
|
||||
self._t.start()
|
||||
def eof(self):
|
||||
"""Check whether there is no more content to expect."""
|
||||
return not self.is_alive() and self._queue.empty()
|
||||
|
||||
def readline(self, block=False, timeout=None):
|
||||
"""Read line from the stream
|
||||
def join(self, timeout=None, close=True):
|
||||
"""Close the file and join the thread."""
|
||||
if close:
|
||||
self._fd.close()
|
||||
threading.Thread.join(self, timeout)
|
||||
|
||||
Attempt to pull from the queue the data and return it. If no data is
|
||||
available or timeout has expired, it returns None.
|
||||
|
||||
Keyword arguments:
|
||||
- block -- If True, block if necessary until data is available
|
||||
- timeout -- The number of seconds to block
|
||||
|
||||
"""
|
||||
try:
|
||||
return self._q.get(block=timeout is not None,
|
||||
timeout=timeout)
|
||||
except Empty:
|
||||
return None
|
||||
|
||||
def close(self):
|
||||
"""Close the stream"""
|
||||
try:
|
||||
self._s.close()
|
||||
except IOError:
|
||||
pass
|
||||
def start_async_file_reading(fd):
|
||||
"""Helper which instantiate and run an AsynchronousFileReader."""
|
||||
queue = Queue.Queue()
|
||||
reader = AsynchronousFileReader(fd, queue)
|
||||
reader.start()
|
||||
return (reader, queue)
|
||||
|
|
Loading…
Reference in a new issue