Implement caching mechanism

This commit is contained in:
Alexandre Aubin 2018-08-30 13:36:43 +00:00
parent 7fb694dbcc
commit d34ddcaaf2
2 changed files with 42 additions and 2 deletions

View file

@ -1,11 +1,17 @@
#!/usr/bin/env python #!/usr/bin/env python
import os
from moulinette import m18n from moulinette import m18n
from moulinette.utils.network import download_text from moulinette.utils.network import download_text
from yunohost.diagnosis import Diagnoser from yunohost.diagnosis import Diagnoser
class IPDiagnoser(Diagnoser): class IPDiagnoser(Diagnoser):
id_ = os.path.splitext(os.path.basename(__file__))[0]
description = m18n.n("internet_connectivity")
cache_duration = 60
def validate_args(self, args): def validate_args(self, args):
if "version" not in args.keys(): if "version" not in args.keys():
return { "versions" : [4, 6] } return { "versions" : [4, 6] }

View file

@ -25,10 +25,13 @@
""" """
import errno import errno
import os
import time
from moulinette import m18n from moulinette import m18n
from moulinette.core import MoulinetteError from moulinette.core import MoulinetteError
from moulinette.utils import log from moulinette.utils import log
from moulinette.utils.filesystem import read_json, write_to_json
from yunohost.hook import hook_list, hook_exec from yunohost.hook import hook_list, hook_exec
@ -87,10 +90,41 @@ class Diagnoser():
self.env = env self.env = env
self.args = self.validate_args(args) self.args = self.validate_args(args)
@property
def cache_file(self):
return os.path.join(DIAGNOSIS_CACHE, "%s.json" % self.id_)
def cached_time_ago(self):
if not os.path.exists(self.cache_file):
return 99999999
return time.time() - os.path.getmtime(self.cache_file)
def get_cached_report(self):
return read_json(self.cache_file)
def write_cache(self, report):
if not os.path.exists(DIAGNOSIS_CACHE):
os.makedirs(DIAGNOSIS_CACHE)
return write_to_json(self.cache_file, report)
def report(self): def report(self):
# TODO : implement some caching mecanism in there print(self.cached_time_ago())
return list(self.run())
if self.args.get("force", False) or self.cached_time_ago() < self.cache_duration:
self.logger_debug("Using cached report from %s" % self.cache_file)
return self.get_cached_report()
new_report = list(self.run())
# TODO / FIXME : should handle the case where we only did a partial diagnosis
self.logger_debug("Updating cache %s" % self.cache_file)
self.write_cache(new_report)
return new_report
def _list_diagnosis_categories(): def _list_diagnosis_categories():
hooks_raw = hook_list("diagnosis", list_by="priority", show_info=True)["hooks"] hooks_raw = hook_list("diagnosis", list_by="priority", show_info=True)["hooks"]