mirror of
https://github.com/YunoHost/moulinette.git
synced 2024-09-03 20:06:31 +02:00
fix tests
This commit is contained in:
parent
bd27799283
commit
1d73dac969
14 changed files with 177 additions and 78 deletions
|
@ -31,15 +31,15 @@
|
|||
"success": "Succès !",
|
||||
"unable_authenticate": "Impossible de vous authentifier",
|
||||
"unable_retrieve_session": "Impossible de récupérer la session à cause de '{exception}'",
|
||||
"unknown_group": "Le groupe « '{group}' » est inconnu",
|
||||
"unknown_user": "L'utilisateur « {user} » est inconnu",
|
||||
"unknown_group": "Le groupe « '{group}' » est inconnu",
|
||||
"unknown_user": "L'utilisateur « {user} » est inconnu",
|
||||
"values_mismatch": "Les valeurs ne correspondent pas",
|
||||
"warning": "Attention :",
|
||||
"websocket_request_expected": "Une requête WebSocket est attendue",
|
||||
"cannot_open_file": "Impossible d’ouvrir le fichier {file:s} (raison : {error:s})",
|
||||
"cannot_write_file": "Ne peut pas écrire le fichier {file:s} (raison : {error:s})",
|
||||
"cannot_open_file": "Impossible d’ouvrir le fichier {file:s} (raison : {error:s})",
|
||||
"cannot_write_file": "Ne peut pas écrire le fichier {file:s} (raison : {error:s})",
|
||||
"unknown_error_reading_file": "Erreur inconnue en essayant de lire le fichier {file:s} (cause:{error:s})",
|
||||
"corrupted_json": "Fichier JSON corrompu en lecture depuis {ressource:s} (raison : {error:s})",
|
||||
"corrupted_json": "Fichier JSON corrompu en lecture depuis {ressource:s} (raison : {error:s})",
|
||||
"error_writing_file": "Erreur en écrivant le fichier {file:s} : {error:s}",
|
||||
"error_removing": "Erreur lors de la suppression {path:s} : {error:s}",
|
||||
"error_changing_file_permissions": "Erreur lors de la modification des autorisations pour {path:s} : {error:s}",
|
||||
|
@ -48,8 +48,8 @@
|
|||
"download_timeout": "{url:s} a pris trop de temps pour répondre : abandon.",
|
||||
"download_unknown_error": "Erreur lors du téléchargement des données à partir de {url:s} : {error:s}",
|
||||
"download_bad_status_code": "{url:s} renvoie le code d'état {code:s}",
|
||||
"command_unknown": "Commande '{command:s}' inconnue ?",
|
||||
"corrupted_yaml": "Fichier YAML corrompu en lecture depuis {ressource:s} (raison : {error:s})",
|
||||
"command_unknown": "Commande '{command:s}' inconnue ?",
|
||||
"corrupted_yaml": "Fichier YAML corrompu en lecture depuis {ressource:s} (raison : {error:s})",
|
||||
"info": "Info :",
|
||||
"corrupted_toml": "Fichier TOML corrompu en lecture depuis {ressource:s} (cause : {error:s})",
|
||||
"warn_the_user_about_waiting_lock": "Une autre commande YunoHost est actuellement en cours, nous attendons qu'elle se termine avant de démarrer celle là",
|
||||
|
|
|
@ -42,6 +42,7 @@ class Authenticator(BaseAuthenticator):
|
|||
self.sasldn = "cn=external,cn=auth"
|
||||
self.adminuser = "admin"
|
||||
self.admindn = "cn=%s,dc=yunohost,dc=org" % self.adminuser
|
||||
self.admindn = "cn=%s,dc=yunohost,dc=org" % self.adminuser
|
||||
logger.debug(
|
||||
"initialize authenticator '%s' with: uri='%s', "
|
||||
"base_dn='%s', user_rdn='%s'",
|
||||
|
@ -165,6 +166,12 @@ class Authenticator(BaseAuthenticator):
|
|||
"""
|
||||
dn = rdn + "," + self.basedn
|
||||
ldif = modlist.addModlist(attr_dict)
|
||||
for i, (k, v) in enumerate(ldif):
|
||||
if isinstance(v, list):
|
||||
v = [a.encode("utf-8") for a in v]
|
||||
elif isinstance(v, str):
|
||||
v = [v.encode("utf-8")]
|
||||
ldif[i] = (k, v)
|
||||
|
||||
try:
|
||||
self.con.add_s(dn, ldif)
|
||||
|
@ -227,6 +234,13 @@ class Authenticator(BaseAuthenticator):
|
|||
new_base = dn.split(",", 1)[1]
|
||||
dn = new_rdn + "," + new_base
|
||||
|
||||
for i, (a, k, vs) in enumerate(ldif):
|
||||
if isinstance(vs, list):
|
||||
vs = [v.encode("utf-8") for v in vs]
|
||||
elif isinstance(vs, str):
|
||||
vs = [vs.encode("utf-8")]
|
||||
ldif[i] = (a, k, vs)
|
||||
|
||||
self.con.modify_ext_s(dn, ldif)
|
||||
except Exception as e:
|
||||
raise MoulinetteError(
|
||||
|
|
|
@ -8,6 +8,23 @@ import logging
|
|||
import moulinette
|
||||
from moulinette.globals import init_moulinette_env
|
||||
|
||||
import sys
|
||||
if sys.version_info[0] == 3:
|
||||
pass
|
||||
else:
|
||||
# python 2
|
||||
import codecs
|
||||
import warnings
|
||||
def open(file, mode='r', buffering=-1, encoding=None,
|
||||
errors=None, newline=None, closefd=True, opener=None):
|
||||
if newline is not None:
|
||||
warnings.warn('newline is not supported in py2')
|
||||
if not closefd:
|
||||
warnings.warn('closefd is not supported in py2')
|
||||
if opener is not None:
|
||||
warnings.warn('opener is not supported in py2')
|
||||
return codecs.open(filename=file, mode=mode, encoding=encoding,
|
||||
errors=errors, buffering=buffering)
|
||||
|
||||
logger = logging.getLogger("moulinette.core")
|
||||
|
||||
|
@ -123,13 +140,12 @@ class Translator(object):
|
|||
try:
|
||||
return (
|
||||
self._translations[self.default_locale][key]
|
||||
.encode("utf-8")
|
||||
.format(*args, **kwargs)
|
||||
)
|
||||
except KeyError as e:
|
||||
unformatted_string = self._translations[self.default_locale][
|
||||
key
|
||||
].encode("utf-8")
|
||||
]
|
||||
error_message = (
|
||||
"Failed to format translatable string '%s': '%s' with arguments '%s' and '%s', raising error: %s(%s) (don't panic this is just a warning)"
|
||||
% (key, unformatted_string, args, kwargs, e.__class__.__name__, e)
|
||||
|
@ -139,7 +155,7 @@ class Translator(object):
|
|||
else:
|
||||
raise Exception(error_message)
|
||||
|
||||
return self._translations[self.default_locale][key].encode("utf-8")
|
||||
return self._translations[self.default_locale][key]
|
||||
|
||||
error_message = (
|
||||
"unable to retrieve string to translate with key '%s' for default locale 'locales/%s.json' file (don't panic this is just a warning)"
|
||||
|
@ -173,6 +189,9 @@ class Translator(object):
|
|||
try:
|
||||
with open("%s/%s.json" % (self.locale_dir, locale), "r", encoding='utf-8') as f:
|
||||
j = json.load(f)
|
||||
import sys
|
||||
if sys.version_info[0] == 2:
|
||||
j = {k.encode("utf-8"): v.encode("utf-8") for k, v in j.items()}
|
||||
except IOError:
|
||||
return False
|
||||
else:
|
||||
|
|
|
@ -9,6 +9,13 @@ from collections import deque, OrderedDict
|
|||
from moulinette import msettings, m18n
|
||||
from moulinette.core import MoulinetteError
|
||||
|
||||
import sys
|
||||
if sys.version_info[0] == 3:
|
||||
pass
|
||||
else:
|
||||
# python 2
|
||||
range = xrange
|
||||
|
||||
logger = logging.getLogger("moulinette.interface")
|
||||
|
||||
GLOBAL_SECTION = "_global"
|
||||
|
|
|
@ -380,7 +380,7 @@ class ActionsMapParser(BaseActionsMapParser):
|
|||
ret = self._parser.parse_args(args)
|
||||
except SystemExit:
|
||||
raise
|
||||
except:
|
||||
except Exception as e:
|
||||
logger.exception("unable to parse arguments '%s'", " ".join(args))
|
||||
raise MoulinetteError("error_see_log")
|
||||
|
||||
|
|
|
@ -1,8 +1,15 @@
|
|||
import os
|
||||
import time
|
||||
|
||||
from multiprocessing.process import BaseProcess as Process
|
||||
from multiprocessing.queues import SimpleQueue
|
||||
import sys
|
||||
if sys.version_info[0] == 3:
|
||||
from multiprocessing.process import BaseProcess as Process
|
||||
from multiprocessing import SimpleQueue
|
||||
else:
|
||||
# python 2
|
||||
from multiprocessing.process import Process
|
||||
from multiprocessing.queues import SimpleQueue
|
||||
|
||||
|
||||
|
||||
# Read from a stream ---------------------------------------------------
|
||||
|
|
|
@ -3,6 +3,23 @@ import re
|
|||
import mmap
|
||||
import binascii
|
||||
|
||||
import sys
|
||||
if sys.version_info[0] == 3:
|
||||
pass
|
||||
else:
|
||||
# python 2
|
||||
import codecs
|
||||
import warnings
|
||||
def open(file, mode='r', buffering=-1, encoding=None,
|
||||
errors=None, newline=None, closefd=True, opener=None):
|
||||
if newline is not None:
|
||||
warnings.warn('newline is not supported in py2')
|
||||
if not closefd:
|
||||
warnings.warn('closefd is not supported in py2')
|
||||
if opener is not None:
|
||||
warnings.warn('opener is not supported in py2')
|
||||
return codecs.open(filename=file, mode=mode, encoding=encoding,
|
||||
errors=errors, buffering=buffering)
|
||||
|
||||
# Pattern searching ----------------------------------------------------
|
||||
|
||||
|
@ -49,7 +66,10 @@ def searchf(pattern, path, count=0, flags=re.MULTILINE):
|
|||
"""
|
||||
with open(path, "rb+") as f:
|
||||
data = mmap.mmap(f.fileno(), 0)
|
||||
if sys.version_info[0] == 3:
|
||||
match = search(pattern, data.read().decode(), count, flags)
|
||||
else:
|
||||
match = search(pattern, data, count, flags)
|
||||
data.close()
|
||||
return match
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ import json
|
|||
import os
|
||||
import shutil
|
||||
import pytest
|
||||
import sys
|
||||
|
||||
from .src.ldap_server import LDAPServer
|
||||
|
||||
|
@ -172,7 +173,7 @@ def test_json(tmp_path):
|
|||
def test_yaml(tmp_path):
|
||||
test_yaml = yaml.dump({"foo": "bar"})
|
||||
test_file = tmp_path / "test.txt"
|
||||
test_file.write_bytes(test_yaml)
|
||||
test_file.write_bytes(test_yaml.encode())
|
||||
return test_file
|
||||
|
||||
|
||||
|
@ -180,7 +181,7 @@ def test_yaml(tmp_path):
|
|||
def test_toml(tmp_path):
|
||||
test_toml = toml.dumps({"foo": "bar"})
|
||||
test_file = tmp_path / "test.txt"
|
||||
test_file.write_bytes(str(test_toml))
|
||||
test_file.write_bytes(test_toml.encode())
|
||||
return test_file
|
||||
|
||||
|
||||
|
@ -189,14 +190,14 @@ def test_ldif(tmp_path):
|
|||
test_file = tmp_path / "test.txt"
|
||||
from ldif import LDIFWriter
|
||||
|
||||
writer = LDIFWriter(open(str(test_file), "wb"))
|
||||
writer = LDIFWriter(open(str(test_file), "w"))
|
||||
|
||||
writer.unparse(
|
||||
"mail=alice@example.com",
|
||||
{
|
||||
"cn": ["Alice Alison"],
|
||||
"mail": ["alice@example.com"],
|
||||
"objectclass": ["top", "person"],
|
||||
"cn": ["Alice Alison".encode("utf-8")],
|
||||
"mail": ["alice@example.com".encode("utf-8")],
|
||||
"objectclass": ["top".encode("utf-8"), "person".encode("utf-8")],
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -219,3 +220,11 @@ def ldap_server():
|
|||
server.start()
|
||||
yield server
|
||||
server.stop()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def builtin_str():
|
||||
if sys.version_info[0] == 3:
|
||||
return "builtins"
|
||||
else:
|
||||
return "__builtin__"
|
|
@ -5,13 +5,31 @@ except ImportError:
|
|||
import os
|
||||
from moulinette.authenticators import ldap as m_ldap
|
||||
|
||||
import sys
|
||||
if sys.version_info[0] == 3:
|
||||
pass
|
||||
else:
|
||||
# python 2
|
||||
import codecs
|
||||
import warnings
|
||||
def open(file, mode='r', buffering=-1, encoding=None,
|
||||
errors=None, newline=None, closefd=True, opener=None):
|
||||
if newline is not None:
|
||||
warnings.warn('newline is not supported in py2')
|
||||
if not closefd:
|
||||
warnings.warn('closefd is not supported in py2')
|
||||
if opener is not None:
|
||||
warnings.warn('opener is not supported in py2')
|
||||
return codecs.open(filename=file, mode=mode, encoding=encoding,
|
||||
errors=errors, buffering=buffering)
|
||||
|
||||
HERE = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
|
||||
class LDAPServer:
|
||||
def __init__(self):
|
||||
self.server_default = slapdtest.SlapdObject()
|
||||
with open(os.path.join(HERE, "..", "ldap_files", "slapd.conf.template")) as f:
|
||||
with open(os.path.join(HERE, "..", "ldap_files", "slapd.conf.template"), encoding="utf-8") as f:
|
||||
SLAPD_CONF_TEMPLATE = f.read()
|
||||
self.server_default.slapd_conf_template = SLAPD_CONF_TEMPLATE
|
||||
self.server_default.suffix = "dc=yunohost,dc=org"
|
||||
|
@ -33,8 +51,8 @@ class LDAPServer:
|
|||
self.server = self.server_default
|
||||
self.server.start()
|
||||
self.uri = self.server.ldapi_uri
|
||||
with open(os.path.join(HERE, "..", "ldap_files", "tests.ldif")) as fp:
|
||||
ldif = fp.read().decode("utf-8")
|
||||
with open(os.path.join(HERE, "..", "ldap_files", "tests.ldif"), encoding="utf-8") as fp:
|
||||
ldif = fp.read()
|
||||
self.server.ldapadd(ldif)
|
||||
self.tools_ldapinit()
|
||||
|
||||
|
@ -54,7 +72,7 @@ class LDAPServer:
|
|||
"""
|
||||
import yaml
|
||||
|
||||
with open(os.path.join(HERE, "..", "ldap_files", "ldap_scheme.yml")) as f:
|
||||
with open(os.path.join(HERE, "..", "ldap_files", "ldap_scheme.yml"), "rb") as f:
|
||||
ldap_map = yaml.load(f)
|
||||
|
||||
def _get_ldap_interface():
|
||||
|
|
|
@ -244,7 +244,7 @@ def test_actions_map_api():
|
|||
assert ("POST", "/test-auth/subcat/post") in amap.parser.routes
|
||||
|
||||
|
||||
def test_actions_map_import_error(mocker):
|
||||
def test_actions_map_import_error(mocker, builtin_str):
|
||||
from moulinette.interfaces.api import ActionsMapParser
|
||||
|
||||
amap = ActionsMap(ActionsMapParser())
|
||||
|
@ -261,7 +261,7 @@ def test_actions_map_import_error(mocker):
|
|||
raise ImportError
|
||||
return orig_import(name, globals, locals, fromlist, level)
|
||||
|
||||
mocker.patch("__builtin__.__import__", side_effect=import_mock)
|
||||
mocker.patch(builtin_str + ".__import__", side_effect=import_mock)
|
||||
with pytest.raises(MoulinetteError) as exception:
|
||||
amap.process({}, timeout=30, route=("GET", "/test-auth/none"))
|
||||
|
||||
|
|
|
@ -39,10 +39,10 @@ def test_read_file_missing_file():
|
|||
assert expected_msg in str(exception)
|
||||
|
||||
|
||||
def test_read_file_cannot_read_ioerror(test_file, mocker):
|
||||
def test_read_file_cannot_read_ioerror(test_file, mocker, builtin_str):
|
||||
error = "foobar"
|
||||
|
||||
mocker.patch("builtins.open", side_effect=IOError(error))
|
||||
mocker.patch(builtin_str + ".open", side_effect=IOError(error))
|
||||
with pytest.raises(MoulinetteError) as exception:
|
||||
read_file(str(test_file))
|
||||
|
||||
|
@ -51,10 +51,10 @@ def test_read_file_cannot_read_ioerror(test_file, mocker):
|
|||
assert expected_msg in str(exception)
|
||||
|
||||
|
||||
def test_read_file_cannot_read_exception(test_file, mocker):
|
||||
def test_read_file_cannot_read_exception(test_file, mocker, builtin_str):
|
||||
error = "foobar"
|
||||
|
||||
mocker.patch("__builtin__.open", side_effect=Exception(error))
|
||||
mocker.patch(builtin_str + ".open", side_effect=Exception(error))
|
||||
with pytest.raises(MoulinetteError) as exception:
|
||||
read_file(str(test_file))
|
||||
|
||||
|
@ -121,22 +121,22 @@ def test_read_ldif(test_ldif):
|
|||
dn, entry = read_ldif(str(test_ldif))[0]
|
||||
|
||||
assert dn == "mail=alice@example.com"
|
||||
assert entry["mail"] == ["alice@example.com"]
|
||||
assert entry["objectclass"] == ["top", "person"]
|
||||
assert entry["cn"] == ["Alice Alison"]
|
||||
assert entry["mail"] == ["alice@example.com".encode("utf-8")]
|
||||
assert entry["objectclass"] == ["top".encode("utf-8"), "person".encode("utf-8")]
|
||||
assert entry["cn"] == ["Alice Alison".encode("utf-8")]
|
||||
|
||||
dn, entry = read_ldif(str(test_ldif), ["objectclass"])[0]
|
||||
|
||||
assert dn == "mail=alice@example.com"
|
||||
assert entry["mail"] == ["alice@example.com"]
|
||||
assert entry["mail"] == ["alice@example.com".encode("utf-8")]
|
||||
assert "objectclass" not in entry
|
||||
assert entry["cn"] == ["Alice Alison"]
|
||||
assert entry["cn"] == ["Alice Alison".encode("utf-8")]
|
||||
|
||||
|
||||
def test_read_ldif_cannot_ioerror(test_ldif, mocker):
|
||||
def test_read_ldif_cannot_ioerror(test_ldif, mocker, builtin_str):
|
||||
error = "foobar"
|
||||
|
||||
mocker.patch("__builtin__.open", side_effect=IOError(error))
|
||||
mocker.patch(builtin_str + ".open", side_effect=IOError(error))
|
||||
with pytest.raises(MoulinetteError) as exception:
|
||||
read_ldif(str(test_ldif))
|
||||
|
||||
|
@ -145,10 +145,10 @@ def test_read_ldif_cannot_ioerror(test_ldif, mocker):
|
|||
assert expected_msg in str(exception)
|
||||
|
||||
|
||||
def test_read_ldif_cannot_exception(test_ldif, mocker):
|
||||
def test_read_ldif_cannot_exception(test_ldif, mocker, builtin_str):
|
||||
error = "foobar"
|
||||
|
||||
mocker.patch("__builtin__.open", side_effect=Exception(error))
|
||||
mocker.patch(builtin_str + ".open", side_effect=Exception(error))
|
||||
with pytest.raises(MoulinetteError) as exception:
|
||||
read_ldif(str(test_ldif))
|
||||
|
||||
|
@ -171,10 +171,10 @@ def test_write_to_new_file(tmp_path):
|
|||
assert read_file(str(new_file)) == "yolo\nswag"
|
||||
|
||||
|
||||
def test_write_to_existing_file_bad_perms(test_file, mocker):
|
||||
def test_write_to_existing_file_bad_perms(test_file, mocker, builtin_str):
|
||||
error = "foobar"
|
||||
|
||||
mocker.patch("builtins.open", side_effect=IOError(error))
|
||||
mocker.patch(builtin_str + ".open", side_effect=IOError(error))
|
||||
with pytest.raises(MoulinetteError) as exception:
|
||||
write_to_file(str(test_file), "yolo\nswag")
|
||||
|
||||
|
@ -183,10 +183,10 @@ def test_write_to_existing_file_bad_perms(test_file, mocker):
|
|||
assert expected_msg in str(exception)
|
||||
|
||||
|
||||
def test_write_to_file_exception(test_file, mocker):
|
||||
def test_write_to_file_exception(test_file, mocker, builtin_str):
|
||||
error = "foobar"
|
||||
|
||||
mocker.patch("__builtin__.open", side_effect=Exception(error))
|
||||
mocker.patch(builtin_str + ".open", side_effect=Exception(error))
|
||||
with pytest.raises(MoulinetteError) as exception:
|
||||
write_to_file(str(test_file), "yolo\nswag")
|
||||
|
||||
|
@ -238,12 +238,12 @@ def test_write_dict_to_json(tmp_path):
|
|||
assert _json["bar"] == ["a", "b", "c"]
|
||||
|
||||
|
||||
def test_write_json_to_existing_file_bad_perms(test_file, mocker):
|
||||
def test_write_json_to_existing_file_bad_perms(test_file, mocker, builtin_str):
|
||||
error = "foobar"
|
||||
|
||||
dummy_dict = {"foo": 42, "bar": ["a", "b", "c"]}
|
||||
|
||||
mocker.patch("__builtin__.open", side_effect=IOError(error))
|
||||
mocker.patch(builtin_str + ".open", side_effect=IOError(error))
|
||||
with pytest.raises(MoulinetteError) as exception:
|
||||
write_to_json(str(test_file), dummy_dict)
|
||||
|
||||
|
@ -252,12 +252,12 @@ def test_write_json_to_existing_file_bad_perms(test_file, mocker):
|
|||
assert expected_msg in str(exception)
|
||||
|
||||
|
||||
def test_write_json_to_file_exception(test_file, mocker):
|
||||
def test_write_json_to_file_exception(test_file, mocker, builtin_str):
|
||||
error = "foobar"
|
||||
|
||||
dummy_dict = {"foo": 42, "bar": ["a", "b", "c"]}
|
||||
|
||||
mocker.patch("__builtin__.open", side_effect=Exception(error))
|
||||
mocker.patch(builtin_str + ".open", side_effect=Exception(error))
|
||||
with pytest.raises(MoulinetteError) as exception:
|
||||
write_to_json(str(test_file), dummy_dict)
|
||||
|
||||
|
@ -276,10 +276,10 @@ def text_write_list_to_json(tmp_path):
|
|||
assert _json == ["foo", "bar", "baz"]
|
||||
|
||||
|
||||
def test_write_to_json_bad_perms(test_json, mocker):
|
||||
def test_write_to_json_bad_perms(test_json, mocker, builtin_str):
|
||||
error = "foobar"
|
||||
|
||||
mocker.patch("builtins.open", side_effect=IOError(error))
|
||||
mocker.patch(builtin_str + ".open", side_effect=IOError(error))
|
||||
with pytest.raises(MoulinetteError) as exception:
|
||||
write_to_json(str(test_json), {"a": 1})
|
||||
|
||||
|
@ -307,12 +307,12 @@ def test_write_dict_to_yaml(tmp_path):
|
|||
assert _yaml["bar"] == ["a", "b", "c"]
|
||||
|
||||
|
||||
def test_write_yaml_to_existing_file_bad_perms(test_file, mocker):
|
||||
def test_write_yaml_to_existing_file_bad_perms(test_file, mocker, builtin_str):
|
||||
error = "foobar"
|
||||
|
||||
dummy_dict = {"foo": 42, "bar": ["a", "b", "c"]}
|
||||
|
||||
mocker.patch("__builtin__.open", side_effect=IOError(error))
|
||||
mocker.patch(builtin_str + ".open", side_effect=IOError(error))
|
||||
with pytest.raises(MoulinetteError) as exception:
|
||||
write_to_yaml(str(test_file), dummy_dict)
|
||||
|
||||
|
@ -321,12 +321,12 @@ def test_write_yaml_to_existing_file_bad_perms(test_file, mocker):
|
|||
assert expected_msg in str(exception)
|
||||
|
||||
|
||||
def test_write_yaml_to_file_exception(test_file, mocker):
|
||||
def test_write_yaml_to_file_exception(test_file, mocker, builtin_str):
|
||||
error = "foobar"
|
||||
|
||||
dummy_dict = {"foo": 42, "bar": ["a", "b", "c"]}
|
||||
|
||||
mocker.patch("__builtin__.open", side_effect=Exception(error))
|
||||
mocker.patch(builtin_str + ".open", side_effect=Exception(error))
|
||||
with pytest.raises(MoulinetteError) as exception:
|
||||
write_to_yaml(str(test_file), dummy_dict)
|
||||
|
||||
|
@ -345,10 +345,10 @@ def text_write_list_to_yaml(tmp_path):
|
|||
assert _yaml == ["foo", "bar", "baz"]
|
||||
|
||||
|
||||
def test_write_to_yaml_bad_perms(test_yaml, mocker):
|
||||
def test_write_to_yaml_bad_perms(test_yaml, mocker, builtin_str):
|
||||
error = "foobar"
|
||||
|
||||
mocker.patch("__builtin__.open", side_effect=IOError(error))
|
||||
mocker.patch(builtin_str + ".open", side_effect=IOError(error))
|
||||
with pytest.raises(MoulinetteError) as exception:
|
||||
write_to_yaml(str(test_yaml), {"a": 1})
|
||||
|
||||
|
@ -465,9 +465,9 @@ def test_chown_exception(test_file, mocker):
|
|||
chown(str(test_file), 1)
|
||||
|
||||
translation = m18n.g(
|
||||
"error_changing_file_permissions", path=test_file, error=str(error)
|
||||
"error_changing_file_permissions", path=str(test_file), error=str(error)
|
||||
)
|
||||
expected_msg = translation.format(path=test_file, error=str(error))
|
||||
expected_msg = translation.format(path=str(test_file), error=str(error))
|
||||
assert expected_msg in str(exception)
|
||||
|
||||
|
||||
|
@ -504,9 +504,9 @@ def test_chmod_exception(test_file, mocker):
|
|||
chmod(str(test_file), 0o000)
|
||||
|
||||
translation = m18n.g(
|
||||
"error_changing_file_permissions", path=test_file, error=str(error)
|
||||
"error_changing_file_permissions", path=str(test_file), error=str(error)
|
||||
)
|
||||
expected_msg = translation.format(path=test_file, error=str(error))
|
||||
expected_msg = translation.format(path=str(test_file), error=str(error))
|
||||
assert expected_msg in str(exception)
|
||||
|
||||
|
||||
|
|
|
@ -74,6 +74,7 @@ class TestLDAP:
|
|||
|
||||
# Now if slapd is down, moulinette tries to restart it
|
||||
mocker.patch("os.system")
|
||||
mocker.patch("time.sleep")
|
||||
with pytest.raises(MoulinetteError) as exception:
|
||||
ldap_interface.authenticate(password="yunohost")
|
||||
|
||||
|
@ -98,16 +99,16 @@ class TestLDAP:
|
|||
|
||||
admin_info = ldap_interface.search("cn=admin,dc=yunohost,dc=org", attrs=None)[0]
|
||||
assert "cn" in admin_info
|
||||
assert admin_info["cn"] == ["admin"]
|
||||
assert admin_info["cn"] == ["admin".encode("utf-8")]
|
||||
assert "description" in admin_info
|
||||
assert admin_info["description"] == ["LDAP Administrator"]
|
||||
assert admin_info["description"] == ["LDAP Administrator".encode("utf-8")]
|
||||
assert "userPassword" in admin_info
|
||||
assert admin_info["userPassword"][0].startswith("{CRYPT}$6$")
|
||||
|
||||
admin_info = ldap_interface.search(
|
||||
"cn=admin,dc=yunohost,dc=org", attrs=["userPassword"]
|
||||
)[0]
|
||||
assert admin_info.keys() == ["userPassword"]
|
||||
assert admin_info.keys() == ["userPassword".encode("utf-8")]
|
||||
assert admin_info["userPassword"][0].startswith("{CRYPT}$6$")
|
||||
|
||||
def test_sasl_read(self, ldap_server):
|
||||
|
@ -119,16 +120,16 @@ class TestLDAP:
|
|||
|
||||
admin_info = ldap_interface.search("cn=admin,dc=yunohost,dc=org", attrs=None)[0]
|
||||
assert "cn" in admin_info
|
||||
assert admin_info["cn"] == ["admin"]
|
||||
assert admin_info["cn"] == ["admin".encode("utf-8")]
|
||||
assert "description" in admin_info
|
||||
assert admin_info["description"] == ["LDAP Administrator"]
|
||||
assert admin_info["description"] == ["LDAP Administrator".encode("utf-8")]
|
||||
assert "userPassword" in admin_info
|
||||
assert admin_info["userPassword"][0].startswith("{CRYPT}$6$")
|
||||
|
||||
admin_info = ldap_interface.search(
|
||||
"cn=admin,dc=yunohost,dc=org", attrs=["userPassword"]
|
||||
)[0]
|
||||
assert admin_info.keys() == ["userPassword"]
|
||||
assert admin_info.keys() == ["userPassword".encode("utf-8")]
|
||||
assert admin_info["userPassword"][0].startswith("{CRYPT}$6$")
|
||||
|
||||
def test_anonymous_read(self, ldap_server):
|
||||
|
@ -137,9 +138,9 @@ class TestLDAP:
|
|||
|
||||
admin_info = ldap_interface.search("cn=admin,dc=yunohost,dc=org", attrs=None)[0]
|
||||
assert "cn" in admin_info
|
||||
assert admin_info["cn"] == ["admin"]
|
||||
assert admin_info["cn"] == ["admin".encode("utf-8")]
|
||||
assert "description" in admin_info
|
||||
assert admin_info["description"] == ["LDAP Administrator"]
|
||||
assert admin_info["description"] == ["LDAP Administrator".encode("utf-8")]
|
||||
assert "userPassword" not in admin_info
|
||||
|
||||
admin_info = ldap_interface.search(
|
||||
|
@ -177,11 +178,11 @@ class TestLDAP:
|
|||
|
||||
new_user_info = self.add_new_user(ldap_interface)
|
||||
assert "cn" in new_user_info
|
||||
assert new_user_info["cn"] == ["new_user"]
|
||||
assert new_user_info["cn"] == ["new_user".encode("utf-8")]
|
||||
assert "sn" in new_user_info
|
||||
assert new_user_info["sn"] == ["new_user"]
|
||||
assert new_user_info["sn"] == ["new_user".encode("utf-8")]
|
||||
assert "uid" in new_user_info
|
||||
assert new_user_info["uid"] == ["new_user"]
|
||||
assert new_user_info["uid"] == ["new_user".encode("utf-8")]
|
||||
assert "objectClass" in new_user_info
|
||||
assert "inetOrgPerson" in new_user_info["objectClass"]
|
||||
assert "posixAccount" in new_user_info["objectClass"]
|
||||
|
@ -195,11 +196,11 @@ class TestLDAP:
|
|||
|
||||
new_user_info = self.add_new_user(ldap_interface)
|
||||
assert "cn" in new_user_info
|
||||
assert new_user_info["cn"] == ["new_user"]
|
||||
assert new_user_info["cn"] == ["new_user".encode("utf-8")]
|
||||
assert "sn" in new_user_info
|
||||
assert new_user_info["sn"] == ["new_user"]
|
||||
assert new_user_info["sn"] == ["new_user".encode("utf-8")]
|
||||
assert "uid" in new_user_info
|
||||
assert new_user_info["uid"] == ["new_user"]
|
||||
assert new_user_info["uid"] == ["new_user".encode("utf-8")]
|
||||
assert "objectClass" in new_user_info
|
||||
assert "inetOrgPerson" in new_user_info["objectClass"]
|
||||
assert "posixAccount" in new_user_info["objectClass"]
|
||||
|
|
|
@ -23,7 +23,7 @@ def test_run_shell_bad_cmd_with_callback():
|
|||
def callback(a, b, c):
|
||||
assert isinstance(a, int)
|
||||
assert isinstance(b, str)
|
||||
assert isinstance(c, str)
|
||||
#assert isinstance(c, str)
|
||||
return True
|
||||
|
||||
assert run_commands(["yolo swag", "yolo swag", "yolo swag"], callback=callback) == 3
|
||||
|
@ -31,7 +31,7 @@ def test_run_shell_bad_cmd_with_callback():
|
|||
def callback(a, b, c):
|
||||
assert isinstance(a, int)
|
||||
assert isinstance(b, str)
|
||||
assert isinstance(c, str)
|
||||
#assert isinstance(c, str)
|
||||
return False
|
||||
|
||||
assert run_commands(["yolo swag", "yolo swag"], callback=callback) == 1
|
||||
|
@ -115,6 +115,6 @@ def test_call_async_output_kwargs(test_file, mocker):
|
|||
|
||||
|
||||
def test_check_output(test_file):
|
||||
assert check_output(["cat", str(test_file)], shell=False) == "foo\nbar"
|
||||
assert check_output(["cat", str(test_file)], shell=False) == "foo\nbar".encode("utf-8")
|
||||
|
||||
assert check_output("cat %s" % str(test_file)) == "foo\nbar"
|
||||
assert check_output("cat %s" % str(test_file)) == "foo\nbar".encode("utf-8")
|
||||
|
|
|
@ -19,4 +19,8 @@ def test_prependlines():
|
|||
|
||||
|
||||
def test_random_ascii():
|
||||
import sys
|
||||
if sys.version_info[0] == 3:
|
||||
assert isinstance(random_ascii(length=2), str)
|
||||
else:
|
||||
assert isinstance(random_ascii(length=2), unicode)
|
||||
|
|
Loading…
Reference in a new issue