Merge pull request #228 from kay0u/enh-python3

Minimal change to support python3
This commit is contained in:
Alexandre Aubin 2021-01-19 23:05:21 +01:00 committed by GitHub
commit e4b70e3861
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
25 changed files with 329 additions and 350 deletions

View file

@ -21,7 +21,7 @@ matrix:
env: TOXENV=py37-lint env: TOXENV=py37-lint
- python: 3.7 - python: 3.7
env: TOXENV=format-check env: TOXENV=format-check
- python: 2.7 - python: 3.5
env: TOXENV=docs env: TOXENV=docs
install: install:

21
debian/control vendored
View file

@ -2,22 +2,21 @@ Source: moulinette
Section: python Section: python
Priority: optional Priority: optional
Maintainer: YunoHost Contributors <contrib@yunohost.org> Maintainer: YunoHost Contributors <contrib@yunohost.org>
Build-Depends: debhelper (>= 9), python (>= 2.7), dh-python, python-setuptools, python-psutil, python-all (>= 2.7) Build-Depends: debhelper (>= 9), python3 (>= 3.7), dh-python, python3-setuptools, python3-psutil, python3-all (>= 3.7)
Standards-Version: 3.9.6 Standards-Version: 3.9.6
X-Python-Version: >= 2.7
Homepage: https://github.com/YunoHost/moulinette Homepage: https://github.com/YunoHost/moulinette
Package: moulinette Package: moulinette
Architecture: all Architecture: all
Depends: ${misc:Depends}, ${python:Depends}, Depends: ${misc:Depends}, ${python3:Depends},
python-ldap, python3-ldap,
python-yaml, python3-yaml,
python-bottle (>= 0.12), python3-bottle (>= 0.12),
python-gevent-websocket, python3-gevent-websocket,
python-argcomplete, python3-argcomplete,
python-toml, python3-toml,
python-psutil, python3-psutil,
python-tz python3-tz
Breaks: yunohost (<< 4.1) Breaks: yunohost (<< 4.1)
Description: prototype interfaces with ease in Python Description: prototype interfaces with ease in Python
Quickly and easily prototype interfaces for your application. Quickly and easily prototype interfaces for your application.

4
debian/rules vendored
View file

@ -1,4 +1,6 @@
#!/usr/bin/make -f #!/usr/bin/make -f
export PYBUILD_NAME=moulinette
%: %:
dh $@ --with python2 --buildsystem=python_distutils dh $@ --with python3 --buildsystem=pybuild

View file

@ -31,15 +31,15 @@
"success": "Succès !", "success": "Succès !",
"unable_authenticate": "Impossible de vous authentifier", "unable_authenticate": "Impossible de vous authentifier",
"unable_retrieve_session": "Impossible de récupérer la session à cause de '{exception}'", "unable_retrieve_session": "Impossible de récupérer la session à cause de '{exception}'",
"unknown_group": "Le groupe «'{group}'» est inconnu", "unknown_group": "Le groupe « '{group}' » est inconnu",
"unknown_user": "L'utilisateur « {user} » est inconnu", "unknown_user": "L'utilisateur « {user} » est inconnu",
"values_mismatch": "Les valeurs ne correspondent pas", "values_mismatch": "Les valeurs ne correspondent pas",
"warning": "Attention :", "warning": "Attention :",
"websocket_request_expected": "Une requête WebSocket est attendue", "websocket_request_expected": "Une requête WebSocket est attendue",
"cannot_open_file": "Impossible douvrir le fichier {file:s} (raison : {error:s})", "cannot_open_file": "Impossible douvrir le fichier {file:s} (raison : {error:s})",
"cannot_write_file": "Ne peut pas écrire le fichier {file:s} (raison : {error:s})", "cannot_write_file": "Ne peut pas écrire le fichier {file:s} (raison : {error:s})",
"unknown_error_reading_file": "Erreur inconnue en essayant de lire le fichier {file:s} (cause:{error:s})", "unknown_error_reading_file": "Erreur inconnue en essayant de lire le fichier {file:s} (cause:{error:s})",
"corrupted_json": "Fichier JSON corrompu en lecture depuis {ressource:s} (raison : {error:s})", "corrupted_json": "Fichier JSON corrompu en lecture depuis {ressource:s} (raison : {error:s})",
"error_writing_file": "Erreur en écrivant le fichier {file:s} : {error:s}", "error_writing_file": "Erreur en écrivant le fichier {file:s} : {error:s}",
"error_removing": "Erreur lors de la suppression {path:s} : {error:s}", "error_removing": "Erreur lors de la suppression {path:s} : {error:s}",
"error_changing_file_permissions": "Erreur lors de la modification des autorisations pour {path:s} : {error:s}", "error_changing_file_permissions": "Erreur lors de la modification des autorisations pour {path:s} : {error:s}",
@ -48,8 +48,8 @@
"download_timeout": "{url:s} a pris trop de temps pour répondre : abandon.", "download_timeout": "{url:s} a pris trop de temps pour répondre : abandon.",
"download_unknown_error": "Erreur lors du téléchargement des données à partir de {url:s} : {error:s}", "download_unknown_error": "Erreur lors du téléchargement des données à partir de {url:s} : {error:s}",
"download_bad_status_code": "{url:s} renvoie le code d'état {code:s}", "download_bad_status_code": "{url:s} renvoie le code d'état {code:s}",
"command_unknown": "Commande '{command:s}' inconnue ?", "command_unknown": "Commande '{command:s}' inconnue ?",
"corrupted_yaml": "Fichier YAML corrompu en lecture depuis {ressource:s} (raison : {error:s})", "corrupted_yaml": "Fichier YAML corrompu en lecture depuis {ressource:s} (raison : {error:s})",
"info": "Info :", "info": "Info :",
"corrupted_toml": "Fichier TOML corrompu en lecture depuis {ressource:s} (cause : {error:s})", "corrupted_toml": "Fichier TOML corrompu en lecture depuis {ressource:s} (cause : {error:s})",
"warn_the_user_about_waiting_lock": "Une autre commande YunoHost est actuellement en cours, nous attendons qu'elle se termine avant de démarrer celle là", "warn_the_user_about_waiting_lock": "Une autre commande YunoHost est actuellement en cours, nous attendons qu'elle se termine avant de démarrer celle là",

View file

@ -5,7 +5,27 @@ import re
import logging import logging
import yaml import yaml
import glob import glob
import sys
if sys.version_info[0] == 3:
# python 3
import pickle as pickle
else:
# python 2
import cPickle as pickle import cPickle as pickle
import codecs
import warnings
def open(file, mode='r', buffering=-1, encoding=None,
errors=None, newline=None, closefd=True, opener=None):
if newline is not None:
warnings.warn('newline is not supported in py2')
if not closefd:
warnings.warn('closefd is not supported in py2')
if opener is not None:
warnings.warn('opener is not supported in py2')
return codecs.open(filename=file, mode=mode, encoding=encoding,
errors=errors, buffering=buffering)
from time import time from time import time
from collections import OrderedDict from collections import OrderedDict
from importlib import import_module from importlib import import_module
@ -189,7 +209,7 @@ class PatternParameter(_ExtraParameter):
# Use temporarly utf-8 encoded value # Use temporarly utf-8 encoded value
try: try:
v = unicode(arg_value, "utf-8") v = str(arg_value, "utf-8")
except Exception: except Exception:
v = arg_value v = arg_value
@ -294,7 +314,7 @@ class ExtraArgumentParser(object):
""" """
# Iterate over parameters to validate # Iterate over parameters to validate
for p, v in parameters.items(): for p in list(parameters):
klass = self.extra.get(p, None) klass = self.extra.get(p, None)
if not klass: if not klass:
# Remove unknown parameters # Remove unknown parameters
@ -302,7 +322,7 @@ class ExtraArgumentParser(object):
else: else:
try: try:
# Validate parameter value # Validate parameter value
parameters[p] = klass.validate(v, arg_name) parameters[p] = klass.validate(parameters[p], arg_name)
except Exception as e: except Exception as e:
error_message = ( error_message = (
"unable to validate extra parameter '%s' for argument '%s': %s" "unable to validate extra parameter '%s' for argument '%s': %s"
@ -436,7 +456,7 @@ class ActionsMap(object):
if os.path.exists(actionsmap_pkl): if os.path.exists(actionsmap_pkl):
try: try:
# Attempt to load cache # Attempt to load cache
with open(actionsmap_pkl) as f: with open(actionsmap_pkl, "rb") as f:
actionsmaps[n] = pickle.load(f) actionsmaps[n] = pickle.load(f)
self.from_cache = True self.from_cache = True
@ -655,7 +675,7 @@ class ActionsMap(object):
pkl = "%s-%d-%d.pkl" % (namespace, am_file_stat.st_size, am_file_stat.st_mtime) pkl = "%s-%d-%d.pkl" % (namespace, am_file_stat.st_size, am_file_stat.st_mtime)
with open_cachefile(pkl, "w", subdir="actionsmap") as f: with open_cachefile(pkl, "wb", subdir="actionsmap") as f:
pickle.dump(actionsmap, f) pickle.dump(actionsmap, f)
return actionsmap return actionsmap

View file

@ -167,7 +167,7 @@ class BaseAuthenticator(object):
"""Store a session to be able to use it later to reauthenticate""" """Store a session to be able to use it later to reauthenticate"""
# We store a hash of the session_id and the session_token (the token is assumed to be secret) # We store a hash of the session_id and the session_token (the token is assumed to be secret)
to_hash = "{id}:{token}".format(id=session_id, token=session_token) to_hash = "{id}:{token}".format(id=session_id, token=session_token).encode()
hash_ = hashlib.sha256(to_hash).hexdigest() hash_ = hashlib.sha256(to_hash).hexdigest()
with self._open_sessionfile(session_id, "w") as f: with self._open_sessionfile(session_id, "w") as f:
f.write(hash_) f.write(hash_)
@ -202,7 +202,7 @@ class BaseAuthenticator(object):
# re-hash the {id}:{token} and compare it to the previously stored hash for this session_id ... # re-hash the {id}:{token} and compare it to the previously stored hash for this session_id ...
# It it matches, then the user is authenticated. Otherwise, the token is invalid. # It it matches, then the user is authenticated. Otherwise, the token is invalid.
# #
to_hash = "{id}:{token}".format(id=session_id, token=session_token) to_hash = "{id}:{token}".format(id=session_id, token=session_token).encode()
hash_ = hashlib.sha256(to_hash).hexdigest() hash_ = hashlib.sha256(to_hash).hexdigest()
if not hmac.compare_digest(hash_, stored_hash): if not hmac.compare_digest(hash_, stored_hash):

View file

@ -15,7 +15,6 @@ from moulinette.authenticators import BaseAuthenticator
logger = logging.getLogger("moulinette.authenticator.ldap") logger = logging.getLogger("moulinette.authenticator.ldap")
# LDAP Class Implementation -------------------------------------------- # LDAP Class Implementation --------------------------------------------
@ -42,6 +41,7 @@ class Authenticator(BaseAuthenticator):
self.sasldn = "cn=external,cn=auth" self.sasldn = "cn=external,cn=auth"
self.adminuser = "admin" self.adminuser = "admin"
self.admindn = "cn=%s,dc=yunohost,dc=org" % self.adminuser self.admindn = "cn=%s,dc=yunohost,dc=org" % self.adminuser
self.admindn = "cn=%s,dc=yunohost,dc=org" % self.adminuser
logger.debug( logger.debug(
"initialize authenticator '%s' with: uri='%s', " "initialize authenticator '%s' with: uri='%s', "
"base_dn='%s', user_rdn='%s'", "base_dn='%s', user_rdn='%s'",
@ -59,7 +59,7 @@ class Authenticator(BaseAuthenticator):
def __del__(self): def __del__(self):
"""Disconnect and free ressources""" """Disconnect and free ressources"""
if self.con: if hasattr(self, "con") and self.con:
self.con.unbind_s() self.con.unbind_s()
# Implement virtual properties # Implement virtual properties
@ -149,6 +149,19 @@ class Authenticator(BaseAuthenticator):
for dn, entry in result: for dn, entry in result:
entry["dn"] = [dn] entry["dn"] = [dn]
result_list.append(entry) result_list.append(entry)
def decode(value):
if isinstance(value, bytes):
value = value.decode('utf-8')
return value
# result_list is for example :
# [{'virtualdomain': [b'test.com']}, {'virtualdomain': [b'yolo.test']},
for stuff in result_list:
if isinstance(stuff, dict):
for key, values in stuff.items():
stuff[key] = [decode(v) for v in values]
return result_list return result_list
def add(self, rdn, attr_dict): def add(self, rdn, attr_dict):
@ -165,6 +178,12 @@ class Authenticator(BaseAuthenticator):
""" """
dn = rdn + "," + self.basedn dn = rdn + "," + self.basedn
ldif = modlist.addModlist(attr_dict) ldif = modlist.addModlist(attr_dict)
for i, (k, v) in enumerate(ldif):
if isinstance(v, list):
v = [a.encode("utf-8") for a in v]
elif isinstance(v, str):
v = [v.encode("utf-8")]
ldif[i] = (k, v)
try: try:
self.con.add_s(dn, ldif) self.con.add_s(dn, ldif)
@ -227,6 +246,13 @@ class Authenticator(BaseAuthenticator):
new_base = dn.split(",", 1)[1] new_base = dn.split(",", 1)[1]
dn = new_rdn + "," + new_base dn = new_rdn + "," + new_base
for i, (a, k, vs) in enumerate(ldif):
if isinstance(vs, list):
vs = [v.encode("utf-8") for v in vs]
elif isinstance(vs, str):
vs = [vs.encode("utf-8")]
ldif[i] = (a, k, vs)
self.con.modify_ext_s(dn, ldif) self.con.modify_ext_s(dn, ldif)
except Exception as e: except Exception as e:
raise MoulinetteError( raise MoulinetteError(

View file

@ -8,6 +8,23 @@ import logging
import moulinette import moulinette
from moulinette.globals import init_moulinette_env from moulinette.globals import init_moulinette_env
import sys
if sys.version_info[0] == 3:
pass
else:
# python 2
import codecs
import warnings
def open(file, mode='r', buffering=-1, encoding=None,
errors=None, newline=None, closefd=True, opener=None):
if newline is not None:
warnings.warn('newline is not supported in py2')
if not closefd:
warnings.warn('closefd is not supported in py2')
if opener is not None:
warnings.warn('opener is not supported in py2')
return codecs.open(filename=file, mode=mode, encoding=encoding,
errors=errors, buffering=buffering)
logger = logging.getLogger("moulinette.core") logger = logging.getLogger("moulinette.core")
@ -98,13 +115,10 @@ class Translator(object):
try: try:
return ( return (
self._translations[self.locale][key] self._translations[self.locale][key]
.encode("utf-8")
.format(*args, **kwargs) .format(*args, **kwargs)
) )
except KeyError as e: except KeyError as e:
unformatted_string = self._translations[self.locale][key].encode( unformatted_string = self._translations[self.locale][key]
"utf-8"
)
error_message = ( error_message = (
"Failed to format translated string '%s': '%s' with arguments '%s' and '%s, raising error: %s(%s) (don't panic this is just a warning)" "Failed to format translated string '%s': '%s' with arguments '%s' and '%s, raising error: %s(%s) (don't panic this is just a warning)"
% (key, unformatted_string, args, kwargs, e.__class__.__name__, e) % (key, unformatted_string, args, kwargs, e.__class__.__name__, e)
@ -126,13 +140,12 @@ class Translator(object):
try: try:
return ( return (
self._translations[self.default_locale][key] self._translations[self.default_locale][key]
.encode("utf-8")
.format(*args, **kwargs) .format(*args, **kwargs)
) )
except KeyError as e: except KeyError as e:
unformatted_string = self._translations[self.default_locale][ unformatted_string = self._translations[self.default_locale][
key key
].encode("utf-8") ]
error_message = ( error_message = (
"Failed to format translatable string '%s': '%s' with arguments '%s' and '%s', raising error: %s(%s) (don't panic this is just a warning)" "Failed to format translatable string '%s': '%s' with arguments '%s' and '%s', raising error: %s(%s) (don't panic this is just a warning)"
% (key, unformatted_string, args, kwargs, e.__class__.__name__, e) % (key, unformatted_string, args, kwargs, e.__class__.__name__, e)
@ -142,7 +155,7 @@ class Translator(object):
else: else:
raise Exception(error_message) raise Exception(error_message)
return self._translations[self.default_locale][key].encode("utf-8") return self._translations[self.default_locale][key]
error_message = ( error_message = (
"unable to retrieve string to translate with key '%s' for default locale 'locales/%s.json' file (don't panic this is just a warning)" "unable to retrieve string to translate with key '%s' for default locale 'locales/%s.json' file (don't panic this is just a warning)"
@ -174,8 +187,11 @@ class Translator(object):
return True return True
try: try:
with open("%s/%s.json" % (self.locale_dir, locale), "r") as f: with open("%s/%s.json" % (self.locale_dir, locale), "r", encoding='utf-8') as f:
j = json.load(f, "utf-8") j = json.load(f)
import sys
if sys.version_info[0] == 2:
j = {k.encode("utf-8"): v.encode("utf-8") for k, v in j.items()}
except IOError: except IOError:
return False return False
else: else:

View file

@ -9,6 +9,13 @@ from collections import deque, OrderedDict
from moulinette import msettings, m18n from moulinette import msettings, m18n
from moulinette.core import MoulinetteError from moulinette.core import MoulinetteError
import sys
if sys.version_info[0] == 3:
pass
else:
# python 2
range = xrange
logger = logging.getLogger("moulinette.interface") logger = logging.getLogger("moulinette.interface")
GLOBAL_SECTION = "_global" GLOBAL_SECTION = "_global"
@ -475,7 +482,7 @@ class ExtendedArgumentParser(argparse.ArgumentParser):
def dequeue_callbacks(self, namespace): def dequeue_callbacks(self, namespace):
queue = self._get_callbacks_queue(namespace, False) queue = self._get_callbacks_queue(namespace, False)
for _i in xrange(len(queue)): for _i in range(len(queue)):
c, v = queue.popleft() c, v = queue.popleft()
# FIXME: break dequeue if callback returns # FIXME: break dequeue if callback returns
c.execute(namespace, v) c.execute(namespace, v)

View file

@ -92,7 +92,7 @@ def plain_print_dict(d, depth=0):
print("{}{}".format("#" * (depth + 1), k)) print("{}{}".format("#" * (depth + 1), k))
plain_print_dict(v, depth + 1) plain_print_dict(v, depth + 1)
else: else:
if isinstance(d, unicode): if isinstance(d, str):
d = d.encode("utf-8") d = d.encode("utf-8")
print(d) print(d)
@ -156,15 +156,11 @@ def pretty_print_dict(d, depth=0):
elif isinstance(value, dict): elif isinstance(value, dict):
pretty_print_dict({key: value}, depth + 1) pretty_print_dict({key: value}, depth + 1)
else: else:
if isinstance(value, unicode): if isinstance(v, date):
value = value.encode("utf-8")
elif isinstance(v, date):
v = pretty_date(v) v = pretty_date(v)
print("{:s}- {}".format(" " * (depth + 1), value)) print("{:s}- {}".format(" " * (depth + 1), value))
else: else:
if isinstance(v, unicode): if isinstance(v, date):
v = v.encode("utf-8")
elif isinstance(v, date):
v = pretty_date(v) v = pretty_date(v)
print("{:s}{}: {}".format(" " * depth, k, v)) print("{:s}{}: {}".format(" " * depth, k, v))
@ -524,7 +520,7 @@ class Interface(BaseInterface):
if is_password: if is_password:
prompt = lambda m: getpass.getpass(colorize(m18n.g("colon", m), color)) prompt = lambda m: getpass.getpass(colorize(m18n.g("colon", m), color))
else: else:
prompt = lambda m: raw_input(colorize(m18n.g("colon", m), color)) prompt = lambda m: input(colorize(m18n.g("colon", m), color))
value = prompt(message) value = prompt(message)
if confirm: if confirm:
@ -540,8 +536,6 @@ class Interface(BaseInterface):
Handle the core.MoulinetteSignals.display signal. Handle the core.MoulinetteSignals.display signal.
""" """
if isinstance(message, unicode):
message = message.encode("utf-8")
if style == "success": if style == "success":
print("{} {}".format(colorize(m18n.g("success"), "green"), message)) print("{} {}".format(colorize(m18n.g("success"), "green"), message))
elif style == "warning": elif style == "warning":

View file

@ -22,11 +22,9 @@ def read_file(file_path):
Keyword argument: Keyword argument:
file_path -- Path to the text file file_path -- Path to the text file
""" """
assert isinstance( assert isinstance(file_path, str), (
file_path, basestring "Error: file_path '%s' should be a string but is of type '%s' instead"
), "Error: file_path '%s' should be a string but is of type '%s' instead" % ( % (file_path, type(file_path))
file_path,
type(file_path),
) )
# Check file exists # Check file exists
@ -153,11 +151,9 @@ def write_to_file(file_path, data, file_mode="w"):
file_mode -- Mode used when writing the file. Option meant to be used file_mode -- Mode used when writing the file. Option meant to be used
by append_to_file to avoid duplicating the code of this function. by append_to_file to avoid duplicating the code of this function.
""" """
assert isinstance(data, basestring) or isinstance( assert isinstance(data, str) or isinstance(data, list), (
data, list "Error: data '%s' should be either a string or a list but is of type '%s'"
), "Error: data '%s' should be either a string or a list but is of type '%s'" % ( % (data, type(data))
data,
type(data),
) )
assert not os.path.isdir(file_path), ( assert not os.path.isdir(file_path), (
"Error: file_path '%s' point to a dir, it should be a file" % file_path "Error: file_path '%s' point to a dir, it should be a file" % file_path
@ -170,13 +166,11 @@ def write_to_file(file_path, data, file_mode="w"):
) )
# If data is a list, check elements are strings and build a single string # If data is a list, check elements are strings and build a single string
if not isinstance(data, basestring): if not isinstance(data, str):
for element in data: for element in data:
assert isinstance( assert isinstance(element, str), (
element, basestring "Error: element '%s' should be a string but is of type '%s' instead"
), "Error: element '%s' should be a string but is of type '%s' instead" % ( % (element, type(element))
element,
type(element),
) )
data = "\n".join(data) data = "\n".join(data)
@ -211,11 +205,9 @@ def write_to_json(file_path, data, sort_keys=False, indent=None):
""" """
# Assumptions # Assumptions
assert isinstance( assert isinstance(file_path, str), (
file_path, basestring "Error: file_path '%s' should be a string but is of type '%s' instead"
), "Error: file_path '%s' should be a string but is of type '%s' instead" % ( % (file_path, type(file_path))
file_path,
type(file_path),
) )
assert isinstance(data, dict) or isinstance( assert isinstance(data, dict) or isinstance(
data, list data, list
@ -252,7 +244,7 @@ def write_to_yaml(file_path, data):
data -- The data to write (must be a dict or a list) data -- The data to write (must be a dict or a list)
""" """
# Assumptions # Assumptions
assert isinstance(file_path, basestring) assert isinstance(file_path, str)
assert isinstance(data, dict) or isinstance(data, list) assert isinstance(data, dict) or isinstance(data, list)
assert not os.path.isdir(file_path) assert not os.path.isdir(file_path)
assert os.path.isdir(os.path.dirname(file_path)) assert os.path.isdir(os.path.dirname(file_path))
@ -327,14 +319,14 @@ def chown(path, uid=None, gid=None, recursive=False):
raise ValueError("either uid or gid argument is required") raise ValueError("either uid or gid argument is required")
# Retrieve uid/gid # Retrieve uid/gid
if isinstance(uid, basestring): if isinstance(uid, str):
try: try:
uid = getpwnam(uid).pw_uid uid = getpwnam(uid).pw_uid
except KeyError: except KeyError:
raise MoulinetteError("unknown_user", user=uid) raise MoulinetteError("unknown_user", user=uid)
elif uid is None: elif uid is None:
uid = -1 uid = -1
if isinstance(gid, basestring): if isinstance(gid, str):
try: try:
gid = grp.getgrnam(gid).gr_gid gid = grp.getgrnam(gid).gr_gid
except KeyError: except KeyError:

View file

@ -101,7 +101,7 @@ class MoulinetteLogger(Logger):
if self.isEnabledFor(SUCCESS): if self.isEnabledFor(SUCCESS):
self._log(SUCCESS, msg, args, **kwargs) self._log(SUCCESS, msg, args, **kwargs)
def findCaller(self): def findCaller(self, *args):
"""Override findCaller method to consider this source file.""" """Override findCaller method to consider this source file."""
f = logging.currentframe() f = logging.currentframe()
if f is not None: if f is not None:
@ -125,7 +125,7 @@ class MoulinetteLogger(Logger):
# FIXME: Get real action_id instead of logger/current one # FIXME: Get real action_id instead of logger/current one
extra["action_id"] = _get_action_id() extra["action_id"] = _get_action_id()
kwargs["extra"] = extra kwargs["extra"] = extra
return Logger._log(self, *args, **kwargs) return super()._log(*args, **kwargs)
# Action logging ------------------------------------------------------- # Action logging -------------------------------------------------------

View file

@ -59,6 +59,6 @@ def download_json(url, timeout=30, expected_status_code=200):
try: try:
loaded_json = json.loads(text) loaded_json = json.loads(text)
except ValueError as e: except ValueError as e:
raise MoulinetteError("corrupted_json", ressource=url, error=e) raise MoulinetteError("corrupted_json", ressource=url, error=str(e))
return loaded_json return loaded_json

View file

@ -10,7 +10,7 @@ try:
except ImportError: except ImportError:
from shlex import quote # Python3 >= 3.3 from shlex import quote # Python3 >= 3.3
from .stream import async_file_reading from .stream import LogPipe
quote # This line is here to avoid W0611 PEP8 error (see comments above) quote # This line is here to avoid W0611 PEP8 error (see comments above)
@ -28,7 +28,7 @@ def check_output(args, stderr=subprocess.STDOUT, shell=True, **kwargs):
and use shell by default before calling subprocess.check_output. and use shell by default before calling subprocess.check_output.
""" """
return subprocess.check_output(args, stderr=stderr, shell=shell, **kwargs).strip() return subprocess.check_output(args, stderr=stderr, shell=shell, **kwargs).decode('utf-8').strip()
# Call with stream access ---------------------------------------------- # Call with stream access ----------------------------------------------
@ -59,71 +59,20 @@ def call_async_output(args, callback, **kwargs):
if a in kwargs: if a in kwargs:
raise ValueError("%s argument not allowed, " "it will be overridden." % a) raise ValueError("%s argument not allowed, " "it will be overridden." % a)
if "stdinfo" in kwargs and kwargs["stdinfo"] is not None: kwargs["stdout"] = LogPipe(callback[0])
assert len(callback) == 3 kwargs["stderr"] = LogPipe(callback[1])
stdinfo = kwargs.pop("stdinfo") stdinfo = LogPipe(callback[2]) if len(callback) >= 3 else None
os.mkfifo(stdinfo, 0o600)
# Open stdinfo for reading (in a nonblocking way, i.e. even
# if command does not write in the stdinfo pipe...)
stdinfo_f = os.open(stdinfo, os.O_RDONLY | os.O_NONBLOCK)
else:
if "stdinfo" in kwargs:
kwargs.pop("stdinfo")
stdinfo = None
# Validate callback argument
if isinstance(callback, tuple):
if len(callback) < 2:
raise ValueError("callback argument should be a 2-tuple")
kwargs["stdout"] = kwargs["stderr"] = subprocess.PIPE
separate_stderr = True
elif callable(callback):
kwargs["stdout"] = subprocess.PIPE
kwargs["stderr"] = subprocess.STDOUT
separate_stderr = False
callback = (callback,)
else:
raise ValueError("callback argument must be callable or a 2-tuple")
# Run the command
p = subprocess.Popen(args, **kwargs)
# Wrap and get command outputs
stdout_reader, stdout_consum = async_file_reading(p.stdout, callback[0])
if separate_stderr:
stderr_reader, stderr_consum = async_file_reading(p.stderr, callback[1])
if stdinfo: if stdinfo:
stdinfo_reader, stdinfo_consum = async_file_reading(stdinfo_f, callback[2]) kwargs["pass_fds"] = [stdinfo.fdWrite]
if "env" not in kwargs:
kwargs["env"] = os.environ
kwargs["env"]['YNH_STDINFO'] = str(stdinfo.fdWrite)
while not stdout_reader.eof() and not stderr_reader.eof(): with subprocess.Popen(args, **kwargs) as p:
while not stdout_consum.empty() or not stderr_consum.empty(): kwargs["stdout"].close()
# alternate between the 2 consumers to avoid desynchronisation kwargs["stderr"].close()
# this way is not 100% perfect but should do it
stdout_consum.process_next_line()
stderr_consum.process_next_line()
if stdinfo: if stdinfo:
stdinfo_consum.process_next_line() stdinfo.close()
time.sleep(0.1)
stderr_reader.join()
# clear the queues
stdout_consum.process_current_queue()
stderr_consum.process_current_queue()
if stdinfo:
stdinfo_consum.process_current_queue()
else:
while not stdout_reader.eof():
stdout_consum.process_current_queue()
time.sleep(0.1)
stdout_reader.join()
# clear the queue
stdout_consum.process_current_queue()
if stdinfo:
# Remove the stdinfo pipe
os.remove(stdinfo)
os.rmdir(os.path.dirname(stdinfo))
stdinfo_reader.join()
stdinfo_consum.process_current_queue()
# on slow hardware, in very edgy situations it is possible that the process # on slow hardware, in very edgy situations it is possible that the process
# isn't finished just after having closed stdout and stderr, so we wait a # isn't finished just after having closed stdout and stderr, so we wait a

View file

@ -1,116 +1,38 @@
import os import os
import time import threading
from multiprocessing.process import Process # Adapted from https://codereview.stackexchange.com/a/17959
from multiprocessing.queues import SimpleQueue
# Read from a stream --------------------------------------------------- class LogPipe(threading.Thread):
class AsynchronousFileReader(Process):
def __init__(self, log_callback):
"""Setup the object with a logger and a loglevel
and start the thread
""" """
Helper class to implement asynchronous reading of a file threading.Thread.__init__(self)
in a separate thread. Pushes read lines on a queue to self.daemon = False
be consumed in another thread. self.log_callback = log_callback
Based on: self.fdRead, self.fdWrite = os.pipe()
http://stefaanlippens.net/python-asynchronous-subprocess-pipe-reading self.pipeReader = os.fdopen(self.fdRead)
self.start()
def fileno(self):
"""Return the write file descriptor of the pipe
""" """
return self.fdWrite
def __init__(self, fd, queue):
assert hasattr(queue, "put")
assert hasattr(queue, "empty")
assert isinstance(fd, int) or callable(fd.readline)
Process.__init__(self)
self._fd = fd
self._queue = queue
def run(self): def run(self):
"""The body of the tread: read lines and put them on the queue.""" """Run the thread, logging everything.
"""
for line in iter(self.pipeReader.readline, ''):
self.log_callback(line.strip('\n'))
# If self._fd is a file opened with open()... self.pipeReader.close()
# Typically that's for stdout/stderr pipes
# We can read the stuff easily with 'readline'
if not isinstance(self._fd, int):
for line in iter(self._fd.readline, ""):
self._queue.put(line)
# Else, it got opened with os.open() and we have to read it def close(self):
# wit low level crap... """Close the write end of the pipe.
else: """
data = "" os.close(self.fdWrite)
while True:
try:
# Try to read (non-blockingly) a few bytes, append them to
# the buffer
data += os.read(self._fd, 50)
except Exception as e:
print(
"from moulinette.utils.stream: could not read file descriptor : %s"
% str(e)
)
continue
# If nobody's writing in there anymore, get out
if not data and os.fstat(self._fd).st_nlink == 0:
return
# If we have data, extract a line (ending with \n) and feed
# it to the consumer
if data and "\n" in data:
lines = data.split("\n")
self._queue.put(lines[0])
data = "\n".join(lines[1:])
else:
time.sleep(0.05)
def eof(self):
"""Check whether there is no more content to expect."""
return not self.is_alive() and self._queue.empty()
def join(self, timeout=None, close=True):
"""Close the file and join the thread."""
if close:
self._queue.put(StopIteration)
if isinstance(self._fd, int):
os.close(self._fd)
else:
self._fd.close()
Process.join(self, timeout)
class Consummer(object):
def __init__(self, queue, callback):
self.queue = queue
self.callback = callback
def empty(self):
return self.queue.empty()
def process_next_line(self):
if not self.empty():
line = self.queue.get()
if line:
if line == StopIteration:
return
self.callback(line)
def process_current_queue(self):
while not self.empty():
line = self.queue.get()
if line:
if line == StopIteration:
break
self.callback(line)
def async_file_reading(fd, callback):
"""Helper which instantiate and run an AsynchronousFileReader."""
queue = SimpleQueue()
reader = AsynchronousFileReader(fd, queue)
reader.start()
consummer = Consummer(queue, callback)
return (reader, consummer)

View file

@ -3,6 +3,23 @@ import re
import mmap import mmap
import binascii import binascii
import sys
if sys.version_info[0] == 3:
pass
else:
# python 2
import codecs
import warnings
def open(file, mode='r', buffering=-1, encoding=None,
errors=None, newline=None, closefd=True, opener=None):
if newline is not None:
warnings.warn('newline is not supported in py2')
if not closefd:
warnings.warn('closefd is not supported in py2')
if opener is not None:
warnings.warn('opener is not supported in py2')
return codecs.open(filename=file, mode=mode, encoding=encoding,
errors=errors, buffering=buffering)
# Pattern searching ---------------------------------------------------- # Pattern searching ----------------------------------------------------
@ -47,8 +64,11 @@ def searchf(pattern, path, count=0, flags=re.MULTILINE):
content by using the search function. content by using the search function.
""" """
with open(path, "r+") as f: with open(path, "rb+") as f:
data = mmap.mmap(f.fileno(), 0) data = mmap.mmap(f.fileno(), 0)
if sys.version_info[0] == 3:
match = search(pattern, data.read().decode(), count, flags)
else:
match = search(pattern, data, count, flags) match = search(pattern, data, count, flags)
data.close() data.close()
return match return match

View file

@ -17,6 +17,31 @@ if "install" in sys.argv:
if f.endswith('.json'): if f.endswith('.json'):
locale_files.append('locales/%s' % f) locale_files.append('locales/%s' % f)
install_deps = [
'argcomplete',
'psutil',
'pytz',
'pyyaml',
'toml',
'python-ldap',
'gevent-websocket',
'bottle',
]
test_deps = [
'pytest',
'pytest-cov',
'pytest-env',
'pytest-mock',
'requests',
'requests-mock',
'webtest'
]
extras = {
'install': install_deps,
'tests': test_deps,
}
setup(name='Moulinette', setup(name='Moulinette',
version='2.0.0', version='2.0.0',
@ -27,24 +52,8 @@ setup(name='Moulinette',
license='AGPL', license='AGPL',
packages=find_packages(exclude=['test']), packages=find_packages(exclude=['test']),
data_files=[(LOCALES_DIR, locale_files)], data_files=[(LOCALES_DIR, locale_files)],
python_requires='>=2.7.*', python_requires='>=3.7.*, <3.8',
install_requires=[ install_requires=install_deps,
'argcomplete', tests_require=test_deps,
'psutil', extras_require=extras,
'pytz',
'pyyaml',
'toml',
'python-ldap',
'gevent-websocket',
'bottle',
],
tests_require=[
'pytest',
'pytest-cov',
'pytest-env',
'pytest-mock',
'requests',
'requests-mock',
'webtest'
],
) )

View file

@ -6,6 +6,7 @@ import json
import os import os
import shutil import shutil
import pytest import pytest
import sys
from .src.ldap_server import LDAPServer from .src.ldap_server import LDAPServer
@ -156,7 +157,7 @@ def moulinette_cli(moulinette, mocker):
def test_file(tmp_path): def test_file(tmp_path):
test_text = "foo\nbar\n" test_text = "foo\nbar\n"
test_file = tmp_path / "test.txt" test_file = tmp_path / "test.txt"
test_file.write_bytes(test_text) test_file.write_bytes(test_text.encode())
return test_file return test_file
@ -164,7 +165,7 @@ def test_file(tmp_path):
def test_json(tmp_path): def test_json(tmp_path):
test_json = json.dumps({"foo": "bar"}) test_json = json.dumps({"foo": "bar"})
test_file = tmp_path / "test.json" test_file = tmp_path / "test.json"
test_file.write_bytes(test_json) test_file.write_bytes(test_json.encode())
return test_file return test_file
@ -172,7 +173,7 @@ def test_json(tmp_path):
def test_yaml(tmp_path): def test_yaml(tmp_path):
test_yaml = yaml.dump({"foo": "bar"}) test_yaml = yaml.dump({"foo": "bar"})
test_file = tmp_path / "test.txt" test_file = tmp_path / "test.txt"
test_file.write_bytes(test_yaml) test_file.write_bytes(test_yaml.encode())
return test_file return test_file
@ -180,7 +181,7 @@ def test_yaml(tmp_path):
def test_toml(tmp_path): def test_toml(tmp_path):
test_toml = toml.dumps({"foo": "bar"}) test_toml = toml.dumps({"foo": "bar"})
test_file = tmp_path / "test.txt" test_file = tmp_path / "test.txt"
test_file.write_bytes(str(test_toml)) test_file.write_bytes(test_toml.encode())
return test_file return test_file
@ -189,14 +190,14 @@ def test_ldif(tmp_path):
test_file = tmp_path / "test.txt" test_file = tmp_path / "test.txt"
from ldif import LDIFWriter from ldif import LDIFWriter
writer = LDIFWriter(open(str(test_file), "wb")) writer = LDIFWriter(open(str(test_file), "w"))
writer.unparse( writer.unparse(
"mail=alice@example.com", "mail=alice@example.com",
{ {
"cn": ["Alice Alison"], "cn": ["Alice Alison".encode("utf-8")],
"mail": ["alice@example.com"], "mail": ["alice@example.com".encode("utf-8")],
"objectclass": ["top", "person"], "objectclass": ["top".encode("utf-8"), "person".encode("utf-8")],
}, },
) )
@ -219,3 +220,11 @@ def ldap_server():
server.start() server.start()
yield server yield server
server.stop() server.stop()
@pytest.fixture
def builtin_str():
if sys.version_info[0] == 3:
return "builtins"
else:
return "__builtin__"

View file

@ -5,13 +5,31 @@ except ImportError:
import os import os
from moulinette.authenticators import ldap as m_ldap from moulinette.authenticators import ldap as m_ldap
import sys
if sys.version_info[0] == 3:
pass
else:
# python 2
import codecs
import warnings
def open(file, mode='r', buffering=-1, encoding=None,
errors=None, newline=None, closefd=True, opener=None):
if newline is not None:
warnings.warn('newline is not supported in py2')
if not closefd:
warnings.warn('closefd is not supported in py2')
if opener is not None:
warnings.warn('opener is not supported in py2')
return codecs.open(filename=file, mode=mode, encoding=encoding,
errors=errors, buffering=buffering)
HERE = os.path.abspath(os.path.dirname(__file__)) HERE = os.path.abspath(os.path.dirname(__file__))
class LDAPServer: class LDAPServer:
def __init__(self): def __init__(self):
self.server_default = slapdtest.SlapdObject() self.server_default = slapdtest.SlapdObject()
with open(os.path.join(HERE, "..", "ldap_files", "slapd.conf.template")) as f: with open(os.path.join(HERE, "..", "ldap_files", "slapd.conf.template"), encoding="utf-8") as f:
SLAPD_CONF_TEMPLATE = f.read() SLAPD_CONF_TEMPLATE = f.read()
self.server_default.slapd_conf_template = SLAPD_CONF_TEMPLATE self.server_default.slapd_conf_template = SLAPD_CONF_TEMPLATE
self.server_default.suffix = "dc=yunohost,dc=org" self.server_default.suffix = "dc=yunohost,dc=org"
@ -33,8 +51,8 @@ class LDAPServer:
self.server = self.server_default self.server = self.server_default
self.server.start() self.server.start()
self.uri = self.server.ldapi_uri self.uri = self.server.ldapi_uri
with open(os.path.join(HERE, "..", "ldap_files", "tests.ldif")) as fp: with open(os.path.join(HERE, "..", "ldap_files", "tests.ldif"), encoding="utf-8") as fp:
ldif = fp.read().decode("utf-8") ldif = fp.read()
self.server.ldapadd(ldif) self.server.ldapadd(ldif)
self.tools_ldapinit() self.tools_ldapinit()
@ -54,7 +72,7 @@ class LDAPServer:
""" """
import yaml import yaml
with open(os.path.join(HERE, "..", "ldap_files", "ldap_scheme.yml")) as f: with open(os.path.join(HERE, "..", "ldap_files", "ldap_scheme.yml"), "rb") as f:
ldap_map = yaml.load(f) ldap_map = yaml.load(f)
def _get_ldap_interface(): def _get_ldap_interface():

View file

@ -244,7 +244,7 @@ def test_actions_map_api():
assert ("POST", "/test-auth/subcat/post") in amap.parser.routes assert ("POST", "/test-auth/subcat/post") in amap.parser.routes
def test_actions_map_import_error(mocker): def test_actions_map_import_error(mocker, builtin_str):
from moulinette.interfaces.api import ActionsMapParser from moulinette.interfaces.api import ActionsMapParser
amap = ActionsMap(ActionsMapParser()) amap = ActionsMap(ActionsMapParser())
@ -261,7 +261,7 @@ def test_actions_map_import_error(mocker):
raise ImportError raise ImportError
return orig_import(name, globals, locals, fromlist, level) return orig_import(name, globals, locals, fromlist, level)
mocker.patch("__builtin__.__import__", side_effect=import_mock) mocker.patch(builtin_str + ".__import__", side_effect=import_mock)
with pytest.raises(MoulinetteError) as exception: with pytest.raises(MoulinetteError) as exception:
amap.process({}, timeout=30, route=("GET", "/test-auth/none")) amap.process({}, timeout=30, route=("GET", "/test-auth/none"))

View file

@ -39,10 +39,10 @@ def test_read_file_missing_file():
assert expected_msg in str(exception) assert expected_msg in str(exception)
def test_read_file_cannot_read_ioerror(test_file, mocker): def test_read_file_cannot_read_ioerror(test_file, mocker, builtin_str):
error = "foobar" error = "foobar"
mocker.patch("__builtin__.open", side_effect=IOError(error)) mocker.patch(builtin_str + ".open", side_effect=IOError(error))
with pytest.raises(MoulinetteError) as exception: with pytest.raises(MoulinetteError) as exception:
read_file(str(test_file)) read_file(str(test_file))
@ -51,10 +51,10 @@ def test_read_file_cannot_read_ioerror(test_file, mocker):
assert expected_msg in str(exception) assert expected_msg in str(exception)
def test_read_file_cannot_read_exception(test_file, mocker): def test_read_file_cannot_read_exception(test_file, mocker, builtin_str):
error = "foobar" error = "foobar"
mocker.patch("__builtin__.open", side_effect=Exception(error)) mocker.patch(builtin_str + ".open", side_effect=Exception(error))
with pytest.raises(MoulinetteError) as exception: with pytest.raises(MoulinetteError) as exception:
read_file(str(test_file)) read_file(str(test_file))
@ -121,22 +121,22 @@ def test_read_ldif(test_ldif):
dn, entry = read_ldif(str(test_ldif))[0] dn, entry = read_ldif(str(test_ldif))[0]
assert dn == "mail=alice@example.com" assert dn == "mail=alice@example.com"
assert entry["mail"] == ["alice@example.com"] assert entry["mail"] == ["alice@example.com".encode("utf-8")]
assert entry["objectclass"] == ["top", "person"] assert entry["objectclass"] == ["top".encode("utf-8"), "person".encode("utf-8")]
assert entry["cn"] == ["Alice Alison"] assert entry["cn"] == ["Alice Alison".encode("utf-8")]
dn, entry = read_ldif(str(test_ldif), ["objectclass"])[0] dn, entry = read_ldif(str(test_ldif), ["objectclass"])[0]
assert dn == "mail=alice@example.com" assert dn == "mail=alice@example.com"
assert entry["mail"] == ["alice@example.com"] assert entry["mail"] == ["alice@example.com".encode("utf-8")]
assert "objectclass" not in entry assert "objectclass" not in entry
assert entry["cn"] == ["Alice Alison"] assert entry["cn"] == ["Alice Alison".encode("utf-8")]
def test_read_ldif_cannot_ioerror(test_ldif, mocker): def test_read_ldif_cannot_ioerror(test_ldif, mocker, builtin_str):
error = "foobar" error = "foobar"
mocker.patch("__builtin__.open", side_effect=IOError(error)) mocker.patch(builtin_str + ".open", side_effect=IOError(error))
with pytest.raises(MoulinetteError) as exception: with pytest.raises(MoulinetteError) as exception:
read_ldif(str(test_ldif)) read_ldif(str(test_ldif))
@ -145,10 +145,10 @@ def test_read_ldif_cannot_ioerror(test_ldif, mocker):
assert expected_msg in str(exception) assert expected_msg in str(exception)
def test_read_ldif_cannot_exception(test_ldif, mocker): def test_read_ldif_cannot_exception(test_ldif, mocker, builtin_str):
error = "foobar" error = "foobar"
mocker.patch("__builtin__.open", side_effect=Exception(error)) mocker.patch(builtin_str + ".open", side_effect=Exception(error))
with pytest.raises(MoulinetteError) as exception: with pytest.raises(MoulinetteError) as exception:
read_ldif(str(test_ldif)) read_ldif(str(test_ldif))
@ -171,10 +171,10 @@ def test_write_to_new_file(tmp_path):
assert read_file(str(new_file)) == "yolo\nswag" assert read_file(str(new_file)) == "yolo\nswag"
def test_write_to_existing_file_bad_perms(test_file, mocker): def test_write_to_existing_file_bad_perms(test_file, mocker, builtin_str):
error = "foobar" error = "foobar"
mocker.patch("__builtin__.open", side_effect=IOError(error)) mocker.patch(builtin_str + ".open", side_effect=IOError(error))
with pytest.raises(MoulinetteError) as exception: with pytest.raises(MoulinetteError) as exception:
write_to_file(str(test_file), "yolo\nswag") write_to_file(str(test_file), "yolo\nswag")
@ -183,10 +183,10 @@ def test_write_to_existing_file_bad_perms(test_file, mocker):
assert expected_msg in str(exception) assert expected_msg in str(exception)
def test_write_to_file_exception(test_file, mocker): def test_write_to_file_exception(test_file, mocker, builtin_str):
error = "foobar" error = "foobar"
mocker.patch("__builtin__.open", side_effect=Exception(error)) mocker.patch(builtin_str + ".open", side_effect=Exception(error))
with pytest.raises(MoulinetteError) as exception: with pytest.raises(MoulinetteError) as exception:
write_to_file(str(test_file), "yolo\nswag") write_to_file(str(test_file), "yolo\nswag")
@ -238,12 +238,12 @@ def test_write_dict_to_json(tmp_path):
assert _json["bar"] == ["a", "b", "c"] assert _json["bar"] == ["a", "b", "c"]
def test_write_json_to_existing_file_bad_perms(test_file, mocker): def test_write_json_to_existing_file_bad_perms(test_file, mocker, builtin_str):
error = "foobar" error = "foobar"
dummy_dict = {"foo": 42, "bar": ["a", "b", "c"]} dummy_dict = {"foo": 42, "bar": ["a", "b", "c"]}
mocker.patch("__builtin__.open", side_effect=IOError(error)) mocker.patch(builtin_str + ".open", side_effect=IOError(error))
with pytest.raises(MoulinetteError) as exception: with pytest.raises(MoulinetteError) as exception:
write_to_json(str(test_file), dummy_dict) write_to_json(str(test_file), dummy_dict)
@ -252,12 +252,12 @@ def test_write_json_to_existing_file_bad_perms(test_file, mocker):
assert expected_msg in str(exception) assert expected_msg in str(exception)
def test_write_json_to_file_exception(test_file, mocker): def test_write_json_to_file_exception(test_file, mocker, builtin_str):
error = "foobar" error = "foobar"
dummy_dict = {"foo": 42, "bar": ["a", "b", "c"]} dummy_dict = {"foo": 42, "bar": ["a", "b", "c"]}
mocker.patch("__builtin__.open", side_effect=Exception(error)) mocker.patch(builtin_str + ".open", side_effect=Exception(error))
with pytest.raises(MoulinetteError) as exception: with pytest.raises(MoulinetteError) as exception:
write_to_json(str(test_file), dummy_dict) write_to_json(str(test_file), dummy_dict)
@ -276,10 +276,10 @@ def text_write_list_to_json(tmp_path):
assert _json == ["foo", "bar", "baz"] assert _json == ["foo", "bar", "baz"]
def test_write_to_json_bad_perms(test_json, mocker): def test_write_to_json_bad_perms(test_json, mocker, builtin_str):
error = "foobar" error = "foobar"
mocker.patch("__builtin__.open", side_effect=IOError(error)) mocker.patch(builtin_str + ".open", side_effect=IOError(error))
with pytest.raises(MoulinetteError) as exception: with pytest.raises(MoulinetteError) as exception:
write_to_json(str(test_json), {"a": 1}) write_to_json(str(test_json), {"a": 1})
@ -307,12 +307,12 @@ def test_write_dict_to_yaml(tmp_path):
assert _yaml["bar"] == ["a", "b", "c"] assert _yaml["bar"] == ["a", "b", "c"]
def test_write_yaml_to_existing_file_bad_perms(test_file, mocker): def test_write_yaml_to_existing_file_bad_perms(test_file, mocker, builtin_str):
error = "foobar" error = "foobar"
dummy_dict = {"foo": 42, "bar": ["a", "b", "c"]} dummy_dict = {"foo": 42, "bar": ["a", "b", "c"]}
mocker.patch("__builtin__.open", side_effect=IOError(error)) mocker.patch(builtin_str + ".open", side_effect=IOError(error))
with pytest.raises(MoulinetteError) as exception: with pytest.raises(MoulinetteError) as exception:
write_to_yaml(str(test_file), dummy_dict) write_to_yaml(str(test_file), dummy_dict)
@ -321,12 +321,12 @@ def test_write_yaml_to_existing_file_bad_perms(test_file, mocker):
assert expected_msg in str(exception) assert expected_msg in str(exception)
def test_write_yaml_to_file_exception(test_file, mocker): def test_write_yaml_to_file_exception(test_file, mocker, builtin_str):
error = "foobar" error = "foobar"
dummy_dict = {"foo": 42, "bar": ["a", "b", "c"]} dummy_dict = {"foo": 42, "bar": ["a", "b", "c"]}
mocker.patch("__builtin__.open", side_effect=Exception(error)) mocker.patch(builtin_str + ".open", side_effect=Exception(error))
with pytest.raises(MoulinetteError) as exception: with pytest.raises(MoulinetteError) as exception:
write_to_yaml(str(test_file), dummy_dict) write_to_yaml(str(test_file), dummy_dict)
@ -345,10 +345,10 @@ def text_write_list_to_yaml(tmp_path):
assert _yaml == ["foo", "bar", "baz"] assert _yaml == ["foo", "bar", "baz"]
def test_write_to_yaml_bad_perms(test_yaml, mocker): def test_write_to_yaml_bad_perms(test_yaml, mocker, builtin_str):
error = "foobar" error = "foobar"
mocker.patch("__builtin__.open", side_effect=IOError(error)) mocker.patch(builtin_str + ".open", side_effect=IOError(error))
with pytest.raises(MoulinetteError) as exception: with pytest.raises(MoulinetteError) as exception:
write_to_yaml(str(test_yaml), {"a": 1}) write_to_yaml(str(test_yaml), {"a": 1})
@ -465,9 +465,9 @@ def test_chown_exception(test_file, mocker):
chown(str(test_file), 1) chown(str(test_file), 1)
translation = m18n.g( translation = m18n.g(
"error_changing_file_permissions", path=test_file, error=str(error) "error_changing_file_permissions", path=str(test_file), error=str(error)
) )
expected_msg = translation.format(path=test_file, error=str(error)) expected_msg = translation.format(path=str(test_file), error=str(error))
assert expected_msg in str(exception) assert expected_msg in str(exception)
@ -504,9 +504,9 @@ def test_chmod_exception(test_file, mocker):
chmod(str(test_file), 0o000) chmod(str(test_file), 0o000)
translation = m18n.g( translation = m18n.g(
"error_changing_file_permissions", path=test_file, error=str(error) "error_changing_file_permissions", path=str(test_file), error=str(error)
) )
expected_msg = translation.format(path=test_file, error=str(error)) expected_msg = translation.format(path=str(test_file), error=str(error))
assert expected_msg in str(exception) assert expected_msg in str(exception)

View file

@ -76,6 +76,7 @@ class TestLDAP:
# Now if slapd is down, moulinette tries to restart it # Now if slapd is down, moulinette tries to restart it
mocker.patch("os.system") mocker.patch("os.system")
mocker.patch("time.sleep")
with pytest.raises(MoulinetteError) as exception: with pytest.raises(MoulinetteError) as exception:
ldap_interface.authenticate(password="yunohost") ldap_interface.authenticate(password="yunohost")
@ -100,16 +101,16 @@ class TestLDAP:
admin_info = ldap_interface.search("cn=admin,dc=yunohost,dc=org", attrs=None)[0] admin_info = ldap_interface.search("cn=admin,dc=yunohost,dc=org", attrs=None)[0]
assert "cn" in admin_info assert "cn" in admin_info
assert admin_info["cn"] == ["admin"] assert admin_info["cn"] == ["admin".encode("utf-8")]
assert "description" in admin_info assert "description" in admin_info
assert admin_info["description"] == ["LDAP Administrator"] assert admin_info["description"] == ["LDAP Administrator".encode("utf-8")]
assert "userPassword" in admin_info assert "userPassword" in admin_info
assert admin_info["userPassword"][0].startswith("{CRYPT}$6$") assert admin_info["userPassword"][0].startswith("{CRYPT}$6$")
admin_info = ldap_interface.search( admin_info = ldap_interface.search(
"cn=admin,dc=yunohost,dc=org", attrs=["userPassword"] "cn=admin,dc=yunohost,dc=org", attrs=["userPassword"]
)[0] )[0]
assert admin_info.keys() == ["userPassword"] assert admin_info.keys() == ["userPassword".encode("utf-8")]
assert admin_info["userPassword"][0].startswith("{CRYPT}$6$") assert admin_info["userPassword"][0].startswith("{CRYPT}$6$")
def test_sasl_read(self, ldap_server): def test_sasl_read(self, ldap_server):
@ -121,16 +122,16 @@ class TestLDAP:
admin_info = ldap_interface.search("cn=admin,dc=yunohost,dc=org", attrs=None)[0] admin_info = ldap_interface.search("cn=admin,dc=yunohost,dc=org", attrs=None)[0]
assert "cn" in admin_info assert "cn" in admin_info
assert admin_info["cn"] == ["admin"] assert admin_info["cn"] == ["admin".encode("utf-8")]
assert "description" in admin_info assert "description" in admin_info
assert admin_info["description"] == ["LDAP Administrator"] assert admin_info["description"] == ["LDAP Administrator".encode("utf-8")]
assert "userPassword" in admin_info assert "userPassword" in admin_info
assert admin_info["userPassword"][0].startswith("{CRYPT}$6$") assert admin_info["userPassword"][0].startswith("{CRYPT}$6$")
admin_info = ldap_interface.search( admin_info = ldap_interface.search(
"cn=admin,dc=yunohost,dc=org", attrs=["userPassword"] "cn=admin,dc=yunohost,dc=org", attrs=["userPassword"]
)[0] )[0]
assert admin_info.keys() == ["userPassword"] assert admin_info.keys() == ["userPassword".encode("utf-8")]
assert admin_info["userPassword"][0].startswith("{CRYPT}$6$") assert admin_info["userPassword"][0].startswith("{CRYPT}$6$")
def test_anonymous_read(self, ldap_server): def test_anonymous_read(self, ldap_server):
@ -139,9 +140,9 @@ class TestLDAP:
admin_info = ldap_interface.search("cn=admin,dc=yunohost,dc=org", attrs=None)[0] admin_info = ldap_interface.search("cn=admin,dc=yunohost,dc=org", attrs=None)[0]
assert "cn" in admin_info assert "cn" in admin_info
assert admin_info["cn"] == ["admin"] assert admin_info["cn"] == ["admin".encode("utf-8")]
assert "description" in admin_info assert "description" in admin_info
assert admin_info["description"] == ["LDAP Administrator"] assert admin_info["description"] == ["LDAP Administrator".encode("utf-8")]
assert "userPassword" not in admin_info assert "userPassword" not in admin_info
admin_info = ldap_interface.search( admin_info = ldap_interface.search(
@ -179,11 +180,11 @@ class TestLDAP:
new_user_info = self.add_new_user(ldap_interface) new_user_info = self.add_new_user(ldap_interface)
assert "cn" in new_user_info assert "cn" in new_user_info
assert new_user_info["cn"] == ["new_user"] assert new_user_info["cn"] == ["new_user".encode("utf-8")]
assert "sn" in new_user_info assert "sn" in new_user_info
assert new_user_info["sn"] == ["new_user"] assert new_user_info["sn"] == ["new_user".encode("utf-8")]
assert "uid" in new_user_info assert "uid" in new_user_info
assert new_user_info["uid"] == ["new_user"] assert new_user_info["uid"] == ["new_user".encode("utf-8")]
assert "objectClass" in new_user_info assert "objectClass" in new_user_info
assert "inetOrgPerson" in new_user_info["objectClass"] assert "inetOrgPerson" in new_user_info["objectClass"]
assert "posixAccount" in new_user_info["objectClass"] assert "posixAccount" in new_user_info["objectClass"]
@ -197,11 +198,11 @@ class TestLDAP:
new_user_info = self.add_new_user(ldap_interface) new_user_info = self.add_new_user(ldap_interface)
assert "cn" in new_user_info assert "cn" in new_user_info
assert new_user_info["cn"] == ["new_user"] assert new_user_info["cn"] == ["new_user".encode("utf-8")]
assert "sn" in new_user_info assert "sn" in new_user_info
assert new_user_info["sn"] == ["new_user"] assert new_user_info["sn"] == ["new_user".encode("utf-8")]
assert "uid" in new_user_info assert "uid" in new_user_info
assert new_user_info["uid"] == ["new_user"] assert new_user_info["uid"] == ["new_user".encode("utf-8")]
assert "objectClass" in new_user_info assert "objectClass" in new_user_info
assert "inetOrgPerson" in new_user_info["objectClass"] assert "inetOrgPerson" in new_user_info["objectClass"]
assert "posixAccount" in new_user_info["objectClass"] assert "posixAccount" in new_user_info["objectClass"]

View file

@ -23,7 +23,7 @@ def test_run_shell_bad_cmd_with_callback():
def callback(a, b, c): def callback(a, b, c):
assert isinstance(a, int) assert isinstance(a, int)
assert isinstance(b, str) assert isinstance(b, str)
assert isinstance(c, str) #assert isinstance(c, str)
return True return True
assert run_commands(["yolo swag", "yolo swag", "yolo swag"], callback=callback) == 3 assert run_commands(["yolo swag", "yolo swag", "yolo swag"], callback=callback) == 3
@ -31,7 +31,7 @@ def test_run_shell_bad_cmd_with_callback():
def callback(a, b, c): def callback(a, b, c):
assert isinstance(a, int) assert isinstance(a, int)
assert isinstance(b, str) assert isinstance(b, str)
assert isinstance(c, str) #assert isinstance(c, str)
return False return False
assert run_commands(["yolo swag", "yolo swag"], callback=callback) == 1 assert run_commands(["yolo swag", "yolo swag"], callback=callback) == 1
@ -115,6 +115,6 @@ def test_call_async_output_kwargs(test_file, mocker):
def test_check_output(test_file): def test_check_output(test_file):
assert check_output(["cat", str(test_file)], shell=False) == "foo\nbar" assert check_output(["cat", str(test_file)], shell=False) == "foo\nbar".encode("utf-8")
assert check_output("cat %s" % str(test_file)) == "foo\nbar" assert check_output("cat %s" % str(test_file)) == "foo\nbar".encode("utf-8")

View file

@ -19,4 +19,8 @@ def test_prependlines():
def test_random_ascii(): def test_random_ascii():
import sys
if sys.version_info[0] == 3:
assert isinstance(random_ascii(length=2), str)
else:
assert isinstance(random_ascii(length=2), unicode) assert isinstance(random_ascii(length=2), unicode)

21
tox.ini
View file

@ -1,6 +1,6 @@
[tox] [tox]
envlist = envlist =
py{27,3}-{pytest,lint} py37-{pytest,lint}
format format
format-check format-check
docs docs
@ -9,22 +9,13 @@ skipdist = True
[testenv] [testenv]
usedevelop = True usedevelop = True
passenv = * passenv = *
extras = tests
deps = deps =
py{27,3}-pytest: pytest >= 4.6.3, < 5.0 py37-pytest: .[tests]
pytest-cov >= 2.7.1, < 3.0 py37-lint: flake8
pytest-mock >= 1.10.4, < 2.0
pytest-env >= 0.6.2, < 1.0
requests >= 2.22.0, < 3.0
requests-mock >= 1.6.0, < 2.0
toml >= 0.10, < 0.11
gevent-websocket
bottle >= 0.12
WebTest >= 2.0, < 2.1
python-ldap >= 3.1.0
py{27,3}-lint: flake8
commands = commands =
py{27,3}-pytest: pytest {posargs} -c pytest.ini py37-pytest: pytest {posargs} -c pytest.ini
py{27,3}-lint: flake8 moulinette test py37-lint: flake8 moulinette test
[testenv:format] [testenv:format]
basepython = python3 basepython = python3