Minimal change to support python3

This commit is contained in:
Kay0u 2019-12-20 14:41:04 +08:00
parent 54b8cab133
commit 641126e344
No known key found for this signature in database
GPG key ID: 7FF262C033518333
15 changed files with 52 additions and 55 deletions

View file

@ -2,13 +2,13 @@ language: python
matrix:
include:
- python: 2.7
env: TOXENV=py27
- python: 2.7
- python: 3.5
env: TOXENV=py35
- python: 3.5
env: TOXENV=lint
- python: 3.6
env: TOXENV=format-check
- python: 2.7
- python: 3.5
env: TOXENV=docs
install:

View file

@ -4,7 +4,7 @@ import os
import re
import logging
import yaml
import cPickle as pickle
import pickle as pickle
from time import time
from collections import OrderedDict
from importlib import import_module
@ -186,7 +186,7 @@ class PatternParameter(_ExtraParameter):
# Use temporarly utf-8 encoded value
try:
v = unicode(arg_value, "utf-8")
v = str(arg_value, "utf-8")
except:
v = arg_value
@ -440,7 +440,7 @@ class ActionsMap(object):
if use_cache and os.path.exists(actionsmap_pkl):
try:
# Attempt to load cache
with open(actionsmap_pkl) as f:
with open(actionsmap_pkl, "rb") as f:
actionsmaps[n] = pickle.load(f)
# TODO: Switch to python3 and catch proper exception
except (IOError, EOFError):
@ -645,7 +645,7 @@ class ActionsMap(object):
pkl = "%s-%d-%d.pkl" % (n, am_file_stat.st_size, am_file_stat.st_mtime)
with open_cachefile(pkl, "w", subdir="actionsmap") as f:
with open_cachefile(pkl, "wb", subdir="actionsmap") as f:
pickle.dump(actionsmaps[n], f)
return actionsmaps

View file

@ -161,7 +161,7 @@ class BaseAuthenticator(object):
"""Store a session to be able to use it later to reauthenticate"""
# We store a hash of the session_id and the session_token (the token is assumed to be secret)
to_hash = "{id}:{token}".format(id=session_id, token=session_token)
to_hash = "{id}:{token}".format(id=session_id, token=session_token).encode()
hash_ = hashlib.sha256(to_hash).hexdigest()
with self._open_sessionfile(session_id, "w") as f:
f.write(hash_)
@ -194,7 +194,7 @@ class BaseAuthenticator(object):
# re-hash the {id}:{token} and compare it to the previously stored hash for this session_id ...
# It it matches, then the user is authenticated. Otherwise, the token is invalid.
#
to_hash = "{id}:{token}".format(id=session_id, token=session_token)
to_hash = "{id}:{token}".format(id=session_id, token=session_token).encode()
hash_ = hashlib.sha256(to_hash).hexdigest()
if not hmac.compare_digest(hash_, stored_hash):

View file

@ -100,13 +100,10 @@ class Translator(object):
try:
return (
self._translations[self.locale][key]
.encode("utf-8")
.format(*args, **kwargs)
)
except KeyError as e:
unformatted_string = self._translations[self.locale][key].encode(
"utf-8"
)
unformatted_string = self._translations[self.locale][key]
error_message = (
"Failed to format translated string '%s': '%s' with arguments '%s' and '%s, raising error: %s(%s) (don't panic this is just a warning)"
% (key, unformatted_string, args, kwargs, e.__class__.__name__, e)
@ -176,8 +173,8 @@ class Translator(object):
return True
try:
with open("%s/%s.json" % (self.locale_dir, locale), "r") as f:
j = json.load(f, "utf-8")
with open("%s/%s.json" % (self.locale_dir, locale), "r", encoding='utf-8') as f:
j = json.load(f)
except IOError:
return False
else:

View file

@ -478,7 +478,7 @@ class ExtendedArgumentParser(argparse.ArgumentParser):
def dequeue_callbacks(self, namespace):
queue = self._get_callbacks_queue(namespace, False)
for _i in xrange(len(queue)):
for _i in range(len(queue)):
c, v = queue.popleft()
# FIXME: break dequeue if callback returns
c.execute(namespace, v)

View file

@ -92,7 +92,7 @@ def plain_print_dict(d, depth=0):
print("{}{}".format("#" * (depth + 1), k))
plain_print_dict(v, depth + 1)
else:
if isinstance(d, unicode):
if isinstance(d, str):
d = d.encode("utf-8")
print(d)
@ -154,13 +154,13 @@ def pretty_print_dict(d, depth=0):
elif isinstance(value, dict):
pretty_print_dict({key: value}, depth + 1)
else:
if isinstance(value, unicode):
if isinstance(value, str):
value = value.encode("utf-8")
elif isinstance(v, date):
v = pretty_date(v)
print("{:s}- {}".format(" " * (depth + 1), value))
else:
if isinstance(v, unicode):
if isinstance(v, str):
v = v.encode("utf-8")
elif isinstance(v, date):
v = pretty_date(v)
@ -494,7 +494,7 @@ class Interface(BaseInterface):
if is_password:
prompt = lambda m: getpass.getpass(colorize(m18n.g("colon", m), color))
else:
prompt = lambda m: raw_input(colorize(m18n.g("colon", m), color))
prompt = lambda m: input(colorize(m18n.g("colon", m), color))
value = prompt(message)
if confirm:
@ -510,7 +510,7 @@ class Interface(BaseInterface):
Handle the core.MoulinetteSignals.display signal.
"""
if isinstance(message, unicode):
if isinstance(message, str):
message = message.encode("utf-8")
if style == "success":
print("{} {}".format(colorize(m18n.g("success"), "green"), message))

View file

@ -22,7 +22,7 @@ def read_file(file_path):
Keyword argument:
file_path -- Path to the text file
"""
assert isinstance(file_path, basestring), (
assert isinstance(file_path, str), (
"Error: file_path '%s' should be a string but is of type '%s' instead"
% (file_path, type(file_path))
)
@ -37,7 +37,7 @@ def read_file(file_path):
file_content = f.read()
except IOError as e:
raise MoulinetteError("cannot_open_file", file=file_path, error=str(e))
except Exception:
except Exception as e:
raise MoulinetteError(
"unknown_error_reading_file", file=file_path, error=str(e)
)
@ -153,7 +153,7 @@ def write_to_file(file_path, data, file_mode="w"):
file_mode -- Mode used when writing the file. Option meant to be used
by append_to_file to avoid duplicating the code of this function.
"""
assert isinstance(data, basestring) or isinstance(data, list), (
assert isinstance(data, str) or isinstance(data, list), (
"Error: data '%s' should be either a string or a list but is of type '%s'"
% (data, type(data))
)
@ -166,9 +166,9 @@ def write_to_file(file_path, data, file_mode="w"):
)
# If data is a list, check elements are strings and build a single string
if not isinstance(data, basestring):
if not isinstance(data, str):
for element in data:
assert isinstance(element, basestring), (
assert isinstance(element, str), (
"Error: element '%s' should be a string but is of type '%s' instead"
% (element, type(element))
)
@ -205,7 +205,7 @@ def write_to_json(file_path, data):
"""
# Assumptions
assert isinstance(file_path, basestring), (
assert isinstance(file_path, str), (
"Error: file_path '%s' should be a string but is of type '%s' instead"
% (file_path, type(file_path))
)
@ -240,7 +240,7 @@ def write_to_yaml(file_path, data):
data -- The data to write (must be a dict or a list)
"""
# Assumptions
assert isinstance(file_path, basestring)
assert isinstance(file_path, str)
assert isinstance(data, dict) or isinstance(data, list)
assert not os.path.isdir(file_path)
assert os.path.isdir(os.path.dirname(file_path))
@ -313,14 +313,14 @@ def chown(path, uid=None, gid=None, recursive=False):
raise ValueError("either uid or gid argument is required")
# Retrieve uid/gid
if isinstance(uid, basestring):
if isinstance(uid, str):
try:
uid = getpwnam(uid).pw_uid
except KeyError:
raise MoulinetteError("unknown_user", user=uid)
elif uid is None:
uid = -1
if isinstance(gid, basestring):
if isinstance(gid, str):
try:
gid = grp.getgrnam(gid).gr_gid
except KeyError:

View file

@ -59,6 +59,6 @@ def download_json(url, timeout=30, expected_status_code=200):
try:
loaded_json = json.loads(text)
except ValueError as e:
raise MoulinetteError("corrupted_json", ressource=url, error=e)
raise MoulinetteError("corrupted_json", ressource=url, error=str(e))
return loaded_json

View file

@ -1,7 +1,7 @@
import os
import time
from multiprocessing.process import Process
from multiprocessing.process import BaseProcess as Process
from multiprocessing.queues import SimpleQueue

View file

@ -47,9 +47,9 @@ def searchf(pattern, path, count=0, flags=re.MULTILINE):
content by using the search function.
"""
with open(path, "r+") as f:
with open(path, "rb+") as f:
data = mmap.mmap(f.fileno(), 0)
match = search(pattern, data, count, flags)
match = search(pattern, data.read().decode(), count, flags)
data.close()
return match

View file

@ -27,7 +27,7 @@ setup(name='Moulinette',
license='AGPL',
packages=find_packages(exclude=['test']),
data_files=[(LOCALES_DIR, locale_files)],
python_requires='==2.7.*',
python_requires='>=3.5',
install_requires=[
'argcomplete',
'psutil',

View file

@ -129,7 +129,7 @@ def moulinette_webapi(moulinette):
def test_file(tmp_path):
test_text = "foo\nbar\n"
test_file = tmp_path / "test.txt"
test_file.write_bytes(test_text)
test_file.write_bytes(test_text.encode())
return test_file
@ -137,7 +137,7 @@ def test_file(tmp_path):
def test_json(tmp_path):
test_json = json.dumps({"foo": "bar"})
test_file = tmp_path / "test.json"
test_file.write_bytes(test_json)
test_file.write_bytes(test_json.encode())
return test_file

View file

@ -33,9 +33,9 @@ def test_read_file_missing_file():
def test_read_file_cannot_read_ioerror(test_file, mocker):
error = "foobar"
with mocker.patch("__builtin__.open", side_effect=IOError(error)):
with pytest.raises(MoulinetteError) as exception:
read_file(str(test_file))
mocker.patch("builtins.open", side_effect=IOError(error))
with pytest.raises(MoulinetteError) as exception:
read_file(str(test_file))
translation = m18n.g("cannot_open_file", file=str(test_file), error=error)
expected_msg = translation.format(file=str(test_file), error=error)
@ -51,9 +51,9 @@ def test_read_json(test_json):
def test_read_json_cannot_read(test_json, mocker):
error = "foobar"
with mocker.patch("json.loads", side_effect=ValueError(error)):
with pytest.raises(MoulinetteError) as exception:
read_json(str(test_json))
mocker.patch("json.loads", side_effect=ValueError(error))
with pytest.raises(MoulinetteError) as exception:
read_json(str(test_json))
translation = m18n.g("corrupted_json", ressource=str(test_json), error=error)
expected_msg = translation.format(ressource=str(test_json), error=error)
@ -77,9 +77,9 @@ def test_write_to_new_file(tmp_path):
def test_write_to_existing_file_bad_perms(test_file, mocker):
error = "foobar"
with mocker.patch("__builtin__.open", side_effect=IOError(error)):
with pytest.raises(MoulinetteError) as exception:
write_to_file(str(test_file), "yolo\nswag")
mocker.patch("builtins.open", side_effect=IOError(error))
with pytest.raises(MoulinetteError) as exception:
write_to_file(str(test_file), "yolo\nswag")
translation = m18n.g("cannot_write_file", file=str(test_file), error=error)
expected_msg = translation.format(file=str(test_file), error=error)
@ -142,9 +142,9 @@ def text_write_list_to_json(tmp_path):
def test_write_to_json_bad_perms(test_json, mocker):
error = "foobar"
with mocker.patch("__builtin__.open", side_effect=IOError(error)):
with pytest.raises(MoulinetteError) as exception:
write_to_json(str(test_json), {"a": 1})
mocker.patch("builtins.open", side_effect=IOError(error))
with pytest.raises(MoulinetteError) as exception:
write_to_json(str(test_json), {"a": 1})
translation = m18n.g("cannot_write_file", file=str(test_json), error=error)
expected_msg = translation.format(file=str(test_json), error=error)
@ -165,9 +165,9 @@ def test_remove_file(test_file):
def test_remove_file_bad_perms(test_file, mocker):
error = "foobar"
with mocker.patch("os.remove", side_effect=OSError(error)):
with pytest.raises(MoulinetteError) as exception:
rm(str(test_file))
mocker.patch("os.remove", side_effect=OSError(error))
with pytest.raises(MoulinetteError) as exception:
rm(str(test_file))
translation = m18n.g("error_removing", path=str(test_file), error=error)
expected_msg = translation.format(path=str(test_file), error=error)

View file

@ -18,4 +18,4 @@ def test_prependlines():
def test_random_ascii():
assert isinstance(random_ascii(length=2), unicode)
assert isinstance(random_ascii(length=2), str)

View file

@ -1,6 +1,6 @@
[tox]
envlist =
py27
py35
lint
docs
skipdist = True