mirror of
https://github.com/YunoHost/moulinette.git
synced 2024-09-03 20:06:31 +02:00
Simplify overly complex code about cache management
This commit is contained in:
parent
2bb9b9a2eb
commit
8d1025e32b
2 changed files with 26 additions and 97 deletions
|
@ -3,7 +3,6 @@
|
|||
import os
|
||||
import re
|
||||
import logging
|
||||
import yaml
|
||||
import glob
|
||||
import pickle as pickle
|
||||
|
||||
|
@ -13,7 +12,6 @@ from importlib import import_module
|
|||
|
||||
from moulinette import m18n, Moulinette
|
||||
from moulinette.globals import init_moulinette_env
|
||||
from moulinette.cache import open_cachefile
|
||||
from moulinette.core import (
|
||||
MoulinetteError,
|
||||
MoulinetteLock,
|
||||
|
@ -21,6 +19,7 @@ from moulinette.core import (
|
|||
)
|
||||
from moulinette.interfaces import BaseActionsMapParser, TO_RETURN_PROP
|
||||
from moulinette.utils.log import start_action_logging
|
||||
from moulinette.utils.filesystem import read_yaml
|
||||
|
||||
logger = logging.getLogger("moulinette.actionsmap")
|
||||
|
||||
|
@ -380,18 +379,6 @@ class ExtraArgumentParser(object):
|
|||
|
||||
# Main class ----------------------------------------------------------
|
||||
|
||||
|
||||
def ordered_yaml_load(stream):
|
||||
class OrderedLoader(yaml.SafeLoader):
|
||||
pass
|
||||
|
||||
OrderedLoader.add_constructor(
|
||||
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
|
||||
lambda loader, node: OrderedDict(loader.construct_pairs(node)),
|
||||
)
|
||||
return yaml.load(stream, OrderedLoader)
|
||||
|
||||
|
||||
class ActionsMap(object):
|
||||
|
||||
"""Validate and process actions defined into an actions map
|
||||
|
@ -439,6 +426,29 @@ class ActionsMap(object):
|
|||
actionsmap_yml_stat.st_mtime,
|
||||
)
|
||||
|
||||
def generate_cache():
|
||||
|
||||
# Iterate over actions map namespaces
|
||||
logger.debug("generating cache for actions map namespace '%s'", n)
|
||||
|
||||
# Read actions map from yaml file
|
||||
actionsmap = read_yaml(actionsmap_yml)
|
||||
|
||||
# Delete old cache files
|
||||
for old_cache in glob.glob("%s/actionsmap/%s-*.pkl" % (CACHE_DIR, n)):
|
||||
os.remove(old_cache)
|
||||
|
||||
# at installation, cachedir might not exists
|
||||
dir_ = os.path.dirname(actionsmap_pkl)
|
||||
if not os.path.isdir(dir_):
|
||||
os.makedirs(dir_)
|
||||
|
||||
# Cache actions map into pickle file
|
||||
with open(actionsmap_pkl, "wb") as f:
|
||||
pickle.dump(actionsmap, f)
|
||||
|
||||
return actionsmap
|
||||
|
||||
if os.path.exists(actionsmap_pkl):
|
||||
try:
|
||||
# Attempt to load cache
|
||||
|
@ -448,9 +458,9 @@ class ActionsMap(object):
|
|||
self.from_cache = True
|
||||
# TODO: Switch to python3 and catch proper exception
|
||||
except (IOError, EOFError):
|
||||
actionsmaps[n] = self.generate_cache(n)
|
||||
actionsmaps[n] = generate_cache()
|
||||
else: # cache file doesn't exists
|
||||
actionsmaps[n] = self.generate_cache(n)
|
||||
actionsmaps[n] = generate_cache()
|
||||
|
||||
# If load_only_category is set, and *if* the target category
|
||||
# is in the actionsmap, we'll load only that one.
|
||||
|
@ -625,43 +635,6 @@ class ActionsMap(object):
|
|||
|
||||
return namespaces
|
||||
|
||||
@classmethod
|
||||
def generate_cache(klass, namespace):
|
||||
"""
|
||||
Generate cache for the actions map's file(s)
|
||||
|
||||
Keyword arguments:
|
||||
- namespace -- The namespace to generate cache for
|
||||
|
||||
Returns:
|
||||
The action map for the namespace
|
||||
"""
|
||||
moulinette_env = init_moulinette_env()
|
||||
CACHE_DIR = moulinette_env["CACHE_DIR"]
|
||||
DATA_DIR = moulinette_env["DATA_DIR"]
|
||||
|
||||
# Iterate over actions map namespaces
|
||||
logger.debug("generating cache for actions map namespace '%s'", namespace)
|
||||
|
||||
# Read actions map from yaml file
|
||||
am_file = "%s/actionsmap/%s.yml" % (DATA_DIR, namespace)
|
||||
with open(am_file, "r") as f:
|
||||
actionsmap = ordered_yaml_load(f)
|
||||
|
||||
# at installation, cachedir might not exists
|
||||
for old_cache in glob.glob("%s/actionsmap/%s-*.pkl" % (CACHE_DIR, namespace)):
|
||||
os.remove(old_cache)
|
||||
|
||||
# Cache actions map into pickle file
|
||||
am_file_stat = os.stat(am_file)
|
||||
|
||||
pkl = "%s-%d-%d.pkl" % (namespace, am_file_stat.st_size, am_file_stat.st_mtime)
|
||||
|
||||
with open_cachefile(pkl, "wb", subdir="actionsmap") as f:
|
||||
pickle.dump(actionsmap, f)
|
||||
|
||||
return actionsmap
|
||||
|
||||
# Private methods
|
||||
|
||||
def _construct_parser(self, actionsmaps, top_parser):
|
||||
|
|
|
@ -1,44 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
|
||||
from moulinette.globals import init_moulinette_env
|
||||
|
||||
|
||||
def get_cachedir(subdir="", make_dir=True):
|
||||
"""Get the path to a cache directory
|
||||
|
||||
Return the path to the cache directory from an optional
|
||||
subdirectory and create it if needed.
|
||||
|
||||
Keyword arguments:
|
||||
- subdir -- A cache subdirectory
|
||||
- make_dir -- False to not make directory if it not exists
|
||||
|
||||
"""
|
||||
CACHE_DIR = init_moulinette_env()["CACHE_DIR"]
|
||||
|
||||
path = os.path.join(CACHE_DIR, subdir)
|
||||
|
||||
if make_dir and not os.path.isdir(path):
|
||||
os.makedirs(path)
|
||||
return path
|
||||
|
||||
|
||||
def open_cachefile(filename, mode="r", subdir=""):
|
||||
"""Open a cache file and return a stream
|
||||
|
||||
Attempt to open in 'mode' the cache file 'filename' from the
|
||||
default cache directory and in the subdirectory 'subdir' if
|
||||
given. Directories are created if needed and a stream is
|
||||
returned if the file can be written.
|
||||
|
||||
Keyword arguments:
|
||||
- filename -- The cache filename
|
||||
- mode -- The mode in which the file is opened
|
||||
- **kwargs -- Optional arguments for get_cachedir
|
||||
|
||||
"""
|
||||
cache_dir = get_cachedir(subdir, make_dir=True if mode[0] == "w" else False)
|
||||
file_path = os.path.join(cache_dir, filename)
|
||||
return open(file_path, mode)
|
Loading…
Reference in a new issue