mirror of
https://github.com/YunoHost/yunohost.git
synced 2024-09-03 20:06:10 +02:00
Merge pull request #1907 from YunoHost/optimize-log-list-perf
log: optimize log list perf by creating a 'cache' symlink pointing to the log's parent
This commit is contained in:
commit
27ccd2918e
1 changed files with 52 additions and 15 deletions
67
src/log.py
67
src/log.py
|
@ -22,6 +22,7 @@ import re
|
||||||
import yaml
|
import yaml
|
||||||
import glob
|
import glob
|
||||||
import psutil
|
import psutil
|
||||||
|
import time
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
@ -82,6 +83,39 @@ BORING_LOG_LINES = [
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _update_log_parent_symlinks():
|
||||||
|
|
||||||
|
one_year_ago = (time.time() - 365 * 24 * 3600)
|
||||||
|
|
||||||
|
logs = glob.iglob(OPERATIONS_PATH + "*" + METADATA_FILE_EXT)
|
||||||
|
for log_md in logs:
|
||||||
|
if os.path.getctime(log_md) < one_year_ago:
|
||||||
|
# Let's ignore files older than one year because hmpf reading a shitload of yml is not free
|
||||||
|
continue
|
||||||
|
|
||||||
|
name = log_md[: -len(METADATA_FILE_EXT)]
|
||||||
|
parent_symlink = os.path.join(OPERATIONS_PATH, f".{name}.parent.yml")
|
||||||
|
if os.path.islink(parent_symlink):
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
metadata = (
|
||||||
|
read_yaml(log_md) or {}
|
||||||
|
) # Making sure this is a dict and not None..?
|
||||||
|
except Exception as e:
|
||||||
|
# If we can't read the yaml for some reason, report an error and ignore this entry...
|
||||||
|
logger.error(m18n.n("log_corrupted_md_file", md_file=log_md, error=e))
|
||||||
|
continue
|
||||||
|
|
||||||
|
parent = metadata.get("parent")
|
||||||
|
parent = parent + METADATA_FILE_EXT if parent else "/dev/null"
|
||||||
|
try:
|
||||||
|
print(parent, parent_symlink)
|
||||||
|
os.symlink(parent, parent_symlink)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to create symlink {parent_symlink} ? {e}")
|
||||||
|
|
||||||
|
|
||||||
def log_list(limit=None, with_details=False, with_suboperations=False):
|
def log_list(limit=None, with_details=False, with_suboperations=False):
|
||||||
"""
|
"""
|
||||||
List available logs
|
List available logs
|
||||||
|
@ -98,30 +132,35 @@ def log_list(limit=None, with_details=False, with_suboperations=False):
|
||||||
|
|
||||||
operations = {}
|
operations = {}
|
||||||
|
|
||||||
logs = [x for x in os.listdir(OPERATIONS_PATH) if x.endswith(METADATA_FILE_EXT)]
|
_update_log_parent_symlinks()
|
||||||
|
|
||||||
|
one_year_ago = (time.time() - 365 * 24 * 3600)
|
||||||
|
logs = [x for x in os.listdir(OPERATIONS_PATH) if x.endswith(METADATA_FILE_EXT) and os.path.getctime(x) > one_year_ago]
|
||||||
logs = list(reversed(sorted(logs)))
|
logs = list(reversed(sorted(logs)))
|
||||||
|
|
||||||
|
if not with_suboperations:
|
||||||
|
def parent_symlink_points_to_dev_null(log):
|
||||||
|
name = log[: -len(METADATA_FILE_EXT)]
|
||||||
|
parent_symlink = os.path.join(OPERATIONS_PATH, f".{name}.parent.yml")
|
||||||
|
return os.path.islink(parent_symlink) and os.path.realpath(parent_symlink) == "/dev/null"
|
||||||
|
|
||||||
|
logs = [log for log in logs if parent_symlink_points_to_dev_null(log)]
|
||||||
|
|
||||||
if limit is not None:
|
if limit is not None:
|
||||||
if with_suboperations:
|
logs = logs[:limit]
|
||||||
logs = logs[:limit]
|
|
||||||
else:
|
|
||||||
# If we displaying only parent, we are still gonna load up to limit * 5 logs
|
|
||||||
# because many of them are suboperations which are not gonna be kept
|
|
||||||
# Yet we still want to obtain ~limit number of logs
|
|
||||||
logs = logs[: limit * 5]
|
|
||||||
|
|
||||||
for log in logs:
|
for log in logs:
|
||||||
base_filename = log[: -len(METADATA_FILE_EXT)]
|
name = log[: -len(METADATA_FILE_EXT)]
|
||||||
md_path = os.path.join(OPERATIONS_PATH, log)
|
md_path = os.path.join(OPERATIONS_PATH, log)
|
||||||
|
|
||||||
entry = {
|
entry = {
|
||||||
"name": base_filename,
|
"name": name,
|
||||||
"path": md_path,
|
"path": md_path,
|
||||||
"description": _get_description_from_name(base_filename),
|
"description": _get_description_from_name(name),
|
||||||
}
|
}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
entry["started_at"] = _get_datetime_from_name(base_filename)
|
entry["started_at"] = _get_datetime_from_name(name)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -141,10 +180,8 @@ def log_list(limit=None, with_details=False, with_suboperations=False):
|
||||||
if with_suboperations:
|
if with_suboperations:
|
||||||
entry["parent"] = metadata.get("parent")
|
entry["parent"] = metadata.get("parent")
|
||||||
entry["suboperations"] = []
|
entry["suboperations"] = []
|
||||||
elif metadata.get("parent") is not None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
operations[base_filename] = entry
|
operations[name] = entry
|
||||||
|
|
||||||
# When displaying suboperations, we build a tree-like structure where
|
# When displaying suboperations, we build a tree-like structure where
|
||||||
# "suboperations" is a list of suboperations (each of them may also have a list of
|
# "suboperations" is a list of suboperations (each of them may also have a list of
|
||||||
|
|
Loading…
Add table
Reference in a new issue