mirror of
https://github.com/YunoHost/yunohost.git
synced 2024-09-03 20:06:10 +02:00
[fix] Backup list and create
This commit is contained in:
parent
04f85eb860
commit
3563e8dc10
7 changed files with 194 additions and 123 deletions
|
@ -84,14 +84,14 @@
|
|||
"backup_archive_broken_link": "Could not access the backup archive (broken link to {path})",
|
||||
"backup_archive_cant_retrieve_info_json": "Could not load info for archive '{archive}'... The info.json file cannot be retrieved (or is not a valid json).",
|
||||
"backup_archive_corrupted": "It looks like the backup archive '{archive}' is corrupted : {error}",
|
||||
"backup_archive_name_exists": "A backup archive with this name already exists.",
|
||||
"backup_archive_name_exists": "A backup archive with this name already exists in the repo '{repository}'.",
|
||||
"backup_archive_name_unknown": "Unknown local backup archive named '{name}'",
|
||||
"backup_archive_open_failed": "Could not open the backup archive",
|
||||
"backup_archive_system_part_not_available": "System part '{part}' unavailable in this backup",
|
||||
"backup_archive_writing_error": "Could not add the files '{source}' (named in the archive '{dest}') to be backed up into the compressed archive '{archive}'",
|
||||
"backup_ask_for_copying_if_needed": "Do you want to perform the backup using {size}MB temporarily? (This way is used since some files could not be prepared using a more efficient method.)",
|
||||
"backup_borg_init_error": "Unable initialize the borg repository: {error}",
|
||||
"backup_borg_already_initialized": "The borg repository '{repository}' already exists, it has been properly added to repositories managed by YunoHost cli.",
|
||||
"backup_borg_list_archive_error": "Unable to list files in the archive",
|
||||
"backup_cant_mount_uncompress_archive": "Could not mount the uncompressed archive as write protected",
|
||||
"backup_cleaning_failed": "Could not clean up the temporary backup folder",
|
||||
"backup_copying_to_organize_the_archive": "Copying {size}MB to organize the archive",
|
||||
|
@ -106,11 +106,14 @@
|
|||
"backup_delete_error": "Could not delete '{path}'",
|
||||
"backup_deleted": "Backup deleted",
|
||||
"backup_hook_unknown": "The backup hook '{hook}' is unknown",
|
||||
"backup_method_copy_finished": "Backup copy finalized",
|
||||
"backup_method_custom_finished": "Custom backup method '{method}' finished",
|
||||
"backup_method_tar_finished": "TAR backup archive created",
|
||||
"backuping_in_repository": "Backuping into repository '{repository}'",
|
||||
"backup_in_repository_finished": "Backup into repository '{repository}' is finished",
|
||||
"backup_in_repository_error": "Backup into repository '{repository}' failed: {error}",
|
||||
"backup_invalid_archive": "Invalid backup archive : {error}",
|
||||
"backup_mount_archive_for_restore": "Preparing archive for restoration...",
|
||||
"backup_no_uncompress_archive_dir": "There is no such uncompressed archive directory",
|
||||
"backup_not_sent": "Backup archive was not saved at all",
|
||||
"backup_partially_sent": "Backup archive was not sent into all repositories listed",
|
||||
"backup_nothings_done": "Nothing to save",
|
||||
"backup_output_directory_forbidden": "Pick a different output directory. Backups cannot be created in /bin, /boot, /dev, /etc, /lib, /root, /run, /sbin, /sys, /usr, /var or /home/yunohost.backup/archives sub-folders",
|
||||
"backup_output_directory_not_empty": "You should pick an empty output directory",
|
||||
|
|
|
@ -1019,11 +1019,10 @@ backup:
|
|||
action_help: Restore from a local backup archive. If neither --apps or --system are given, this will restore all apps and all system parts in the archive. If only --apps if given, this will only restore apps and no system parts. Similarly, if only --system is given, this will only restore system parts and no apps.
|
||||
api: PUT /backups/<name>/restore
|
||||
arguments:
|
||||
repository:
|
||||
help: Repository of the backup archive
|
||||
name:
|
||||
help: Name of the local backup archive
|
||||
-r:
|
||||
full: --repository
|
||||
help: The archive repository (local borg repo use by default)
|
||||
--system:
|
||||
help: List of system parts to restore (or all if none is given)
|
||||
nargs: "*"
|
||||
|
@ -1036,9 +1035,15 @@ backup:
|
|||
|
||||
### backup_list()
|
||||
list:
|
||||
action_help: List available local backup archives
|
||||
action_help: List available local backup archives or list files in an archive
|
||||
api: GET /backups
|
||||
arguments:
|
||||
repository:
|
||||
help: Repository of a backup archive
|
||||
nargs: "?"
|
||||
name:
|
||||
help: Name of a backup archive
|
||||
nargs: "?"
|
||||
-r:
|
||||
full: --repositories
|
||||
help: List archives in these repositories
|
||||
|
@ -1057,8 +1062,10 @@ backup:
|
|||
action_help: Show info about a local backup archive
|
||||
api: GET /backups/<name>
|
||||
arguments:
|
||||
repository:
|
||||
help: Repository of the backup archive
|
||||
name:
|
||||
help: Name of the local backup archive
|
||||
help: Name of the backup archive
|
||||
-d:
|
||||
full: --with-details
|
||||
help: Show additional backup information
|
||||
|
@ -1073,6 +1080,8 @@ backup:
|
|||
action_help: (API only) Request to download the file
|
||||
api: GET /backups/<name>/download
|
||||
arguments:
|
||||
repository:
|
||||
help: Repository of the backup archive
|
||||
name:
|
||||
help: Name of the local backup archive
|
||||
|
||||
|
@ -1081,6 +1090,8 @@ backup:
|
|||
action_help: Delete a backup archive
|
||||
api: DELETE /backups/<name>
|
||||
arguments:
|
||||
repository:
|
||||
help: Repository of the backup archive
|
||||
name:
|
||||
help: Name of the archive to delete
|
||||
extra:
|
||||
|
|
103
src/backup.py
103
src/backup.py
|
@ -273,6 +273,8 @@ class BackupManager:
|
|||
description -- (string) A description for this future backup archive
|
||||
(default: '')
|
||||
|
||||
repositories-- (List<BackupRepository>) A list of repositories
|
||||
|
||||
work_dir -- (None|string) A path where prepare the archive. If None,
|
||||
temporary work_dir will be created (default: None)
|
||||
"""
|
||||
|
@ -785,13 +787,22 @@ class BackupManager:
|
|||
#
|
||||
|
||||
def backup(self):
|
||||
"""Apply backup methods"""
|
||||
|
||||
"""Backup files in each repository"""
|
||||
result = {}
|
||||
for repo in self.repositories:
|
||||
logger.debug(m18n.n("backup_applying_method_" + repo.method_name))
|
||||
archive = BackupArchive(repo, name=self.name, manager=self)
|
||||
archive.organize_and_backup()
|
||||
logger.debug(m18n.n("backup_method_" + repo.method_name + "_finished"))
|
||||
logger.debug(m18n.n("backuping_in_repository", repository=repo.entity))
|
||||
try:
|
||||
archive = BackupArchive(repo, name=self.name, manager=self)
|
||||
archive.organize_and_backup()
|
||||
except Exception:
|
||||
import traceback
|
||||
result[repo.entity] = "Error"
|
||||
logger.error(m18n.n("backup_in_repository_error", repository=repo.entity, error=traceback.format_exc()))
|
||||
else:
|
||||
result[repo.entity] = "Sent"
|
||||
logger.debug(m18n.n("backup_in_repository_finished", repository=repo.entity))
|
||||
|
||||
return result
|
||||
|
||||
def _compute_backup_size(self):
|
||||
"""
|
||||
|
@ -1626,9 +1637,9 @@ def backup_create(
|
|||
name -- Name of the backup archive
|
||||
description -- Short description of the backup
|
||||
repositories -- Repositories in which we want to save the backup
|
||||
output_directory -- Output directory for the backup
|
||||
system -- List of system elements to backup
|
||||
apps -- List of application names to backup
|
||||
dry_run -- Run ynh backup script without send the files into a repo
|
||||
"""
|
||||
|
||||
# TODO: Add a 'clean' argument to clean output directory
|
||||
|
@ -1637,9 +1648,19 @@ def backup_create(
|
|||
# Validate / parse arguments #
|
||||
#
|
||||
|
||||
# Validate there is no archive with the same name
|
||||
# Add backup repositories
|
||||
|
||||
if name and name in backup_list(repositories)["archives"]:
|
||||
if not repositories:
|
||||
repositories = ["local-borg"]
|
||||
|
||||
# Validate there is no archive with the same name
|
||||
archives = backup_list(repositories=repositories)
|
||||
for repository in archives:
|
||||
if name and name in archives[repository]:
|
||||
repositories.pop(repository)
|
||||
logger.error(m18n.n("backup_archive_name_exists", repository=repository))
|
||||
|
||||
if not repositories:
|
||||
raise YunohostValidationError("backup_archive_name_exists")
|
||||
|
||||
# If no --system or --apps given, backup everything
|
||||
|
@ -1654,14 +1675,6 @@ def backup_create(
|
|||
|
||||
operation_logger.start()
|
||||
|
||||
# Create yunohost archives directory if it does not exists
|
||||
_create_archive_dir() # FIXME
|
||||
|
||||
# Add backup repositories
|
||||
|
||||
if not repositories:
|
||||
repositories = ["local-borg"]
|
||||
|
||||
repositories = [BackupRepository(repo) for repo in repositories]
|
||||
|
||||
# Prepare files to backup
|
||||
|
@ -1669,7 +1682,6 @@ def backup_create(
|
|||
repositories=repositories)
|
||||
|
||||
# Add backup targets (system and apps)
|
||||
|
||||
backup_manager.set_system_targets(system)
|
||||
backup_manager.set_apps_targets(apps)
|
||||
|
||||
|
@ -1684,6 +1696,12 @@ def backup_create(
|
|||
# Collect files to be backup (by calling app backup script / system hooks)
|
||||
backup_manager.collect_files()
|
||||
|
||||
parts_results = backup_manager.targets.results
|
||||
parts_results = list(parts_results["apps"].values()) + list(parts_results["system"].values())
|
||||
parts_states = [v in ["Success", "Skipped"] for v in parts_results]
|
||||
if not any(parts_states):
|
||||
raise YunohostError("backup_nothings_done")
|
||||
|
||||
if dry_run:
|
||||
return {
|
||||
"size": backup_manager.size,
|
||||
|
@ -1698,19 +1716,36 @@ def backup_create(
|
|||
size=binary_to_human(backup_manager.size) + "B",
|
||||
)
|
||||
)
|
||||
backup_manager.backup()
|
||||
repo_results = backup_manager.backup()
|
||||
repo_states = [repo_result == "Success" for repository, repo_result in repo_results.items()]
|
||||
|
||||
logger.success(m18n.n("backup_created"))
|
||||
operation_logger.success()
|
||||
if all(repo_states) and all(parts_states):
|
||||
logger.success(m18n.n("backup_created"))
|
||||
operation_logger.success()
|
||||
else:
|
||||
if not any(repo_states):
|
||||
error = m18n.n("backup_not_sent")
|
||||
elif not all(repo_states):
|
||||
error = m18n.n("backup_partially_sent")
|
||||
|
||||
if not all(parts_states):
|
||||
error += "\n" + m18n.n("backup_files_not_fully_collected")
|
||||
for repository, repo_result in repo_results.items():
|
||||
if repo_result == "Sent":
|
||||
repo_results[repository] = "Incomplete"
|
||||
|
||||
logger.error(error)
|
||||
operation_logger.error(error)
|
||||
|
||||
return {
|
||||
"name": backup_manager.name,
|
||||
"size": backup_manager.size,
|
||||
"results": backup_manager.targets.results,
|
||||
"states": repo_results
|
||||
}
|
||||
|
||||
|
||||
def backup_restore(name, repository, system=[], apps=[], force=False):
|
||||
def backup_restore(repository, name, system=[], apps=[], force=False):
|
||||
"""
|
||||
Restore from a local backup archive
|
||||
|
||||
|
@ -1744,7 +1779,7 @@ def backup_restore(name, repository, system=[], apps=[], force=False):
|
|||
name = name[: -len(".tar")]
|
||||
|
||||
repo = BackupRepository(repository)
|
||||
archive = BackupArchive(name, repo)
|
||||
archive = BackupArchive(repo, name)
|
||||
|
||||
restore_manager = RestoreManager(archive)
|
||||
|
||||
|
@ -1795,7 +1830,7 @@ def backup_restore(name, repository, system=[], apps=[], force=False):
|
|||
return restore_manager.targets.results
|
||||
|
||||
|
||||
def backup_list(repositories=[], with_info=False, human_readable=False):
|
||||
def backup_list(repository=None, name=None, repositories=[], with_info=False, human_readable=False):
|
||||
"""
|
||||
List available local backup archives
|
||||
|
||||
|
@ -1805,6 +1840,12 @@ def backup_list(repositories=[], with_info=False, human_readable=False):
|
|||
human_readable -- Print sizes in human readable format
|
||||
|
||||
"""
|
||||
if bool(repository) != bool(name):
|
||||
raise YunohostError("backup_list_bad_arguments")
|
||||
elif repository:
|
||||
repo = BackupRepository(repository)
|
||||
archive = BackupArchive(repo, name)
|
||||
return archive.list(with_info)
|
||||
|
||||
return {
|
||||
name: BackupRepository(name).list_archives(with_info)
|
||||
|
@ -1813,10 +1854,10 @@ def backup_list(repositories=[], with_info=False, human_readable=False):
|
|||
}
|
||||
|
||||
|
||||
def backup_download(name, repository):
|
||||
def backup_download(repository, name):
|
||||
|
||||
repo = BackupRepository(repository)
|
||||
archive = BackupArchive(name, repo)
|
||||
archive = BackupArchive(repo, name)
|
||||
|
||||
return archive.download()
|
||||
|
||||
|
@ -1824,12 +1865,12 @@ def backup_download(name, repository):
|
|||
def backup_mount(name, repository, path):
|
||||
|
||||
repo = BackupRepository(repository)
|
||||
archive = BackupArchive(name, repo)
|
||||
archive = BackupArchive(repo, name)
|
||||
|
||||
return archive.mount(path)
|
||||
|
||||
|
||||
def backup_info(name, repository=None, with_details=False, human_readable=False):
|
||||
def backup_info(repository, name, with_details=False, human_readable=False):
|
||||
"""
|
||||
Get info about a local backup archive
|
||||
|
||||
|
@ -1840,12 +1881,12 @@ def backup_info(name, repository=None, with_details=False, human_readable=False)
|
|||
|
||||
"""
|
||||
repo = BackupRepository(repository)
|
||||
archive = BackupArchive(name, repo)
|
||||
archive = BackupArchive(repo, name)
|
||||
|
||||
return archive.info(with_details=with_details, human_readable=human_readable)
|
||||
|
||||
|
||||
def backup_delete(name, repository):
|
||||
def backup_delete(repository, name):
|
||||
"""
|
||||
Delete a backup
|
||||
|
||||
|
@ -1854,7 +1895,7 @@ def backup_delete(name, repository):
|
|||
|
||||
"""
|
||||
repo = BackupRepository(repository)
|
||||
archive = BackupArchive(name, repo)
|
||||
archive = BackupArchive(repo, name)
|
||||
|
||||
# FIXME Those are really usefull ?
|
||||
hook_callback("pre_backup_delete", args=[name])
|
||||
|
|
|
@ -38,7 +38,7 @@ class BorgBackupRepository(LocalBackupRepository):
|
|||
method_name = "borg"
|
||||
|
||||
# TODO logs
|
||||
def _run_borg_command(self, cmd, stdout=None, stderr=None):
|
||||
def _run_borg_command(self, cmd, stdout=None, stderr=None, cwd=None):
|
||||
""" Call a submethod of borg with the good context
|
||||
"""
|
||||
env = dict(os.environ)
|
||||
|
@ -59,13 +59,15 @@ class BorgBackupRepository(LocalBackupRepository):
|
|||
|
||||
# Authorize to move the repository (borgbase do this)
|
||||
env["BORG_RELOCATED_REPO_ACCESS_IS_OK"] = "yes"
|
||||
|
||||
kwargs = {}
|
||||
if cwd:
|
||||
kwargs["cwd"] = cwd
|
||||
return subprocess.Popen(cmd, env=env,
|
||||
stdout=stdout, stderr=stderr)
|
||||
stdout=stdout, stderr=stderr, **kwargs)
|
||||
|
||||
def _call(self, action, cmd, json_output=False):
|
||||
def _call(self, action, cmd, json_output=False, cwd=None):
|
||||
borg = self._run_borg_command(cmd, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
stderr=subprocess.PIPE, cwd=cwd)
|
||||
out, err = borg.communicate()
|
||||
if borg.returncode:
|
||||
raise YunohostError(f"backup_borg_{action}_error", error=err)
|
||||
|
@ -108,7 +110,7 @@ class BorgBackupRepository(LocalBackupRepository):
|
|||
self.new_values['location'] = self.location
|
||||
|
||||
if not self.future_values.get('user'):
|
||||
raise YunohostError("")
|
||||
raise YunohostError("") # TODO
|
||||
# Local
|
||||
else:
|
||||
super().install()
|
||||
|
@ -132,7 +134,7 @@ class BorgBackupRepository(LocalBackupRepository):
|
|||
except YunohostError:
|
||||
raise e
|
||||
|
||||
logger.info(m18n.n("backup_borg_already_initialized", repository=self.location))
|
||||
logger.debug("The borg repository '{self.location}' already exists.")
|
||||
|
||||
def update(self):
|
||||
raise NotImplementedError()
|
||||
|
@ -213,21 +215,30 @@ class BorgBackupArchive(BackupArchive):
|
|||
|
||||
def backup(self):
|
||||
cmd = ['borg', 'create', self.archive_path, './']
|
||||
self.repo._call('backup', cmd)
|
||||
self.repo._call('backup', cmd, cwd=self.work_dir)
|
||||
|
||||
def delete(self):
|
||||
cmd = ['borg', 'delete', '--force', self.archive_path]
|
||||
self.repo._call('delete_archive', cmd)
|
||||
|
||||
def list(self):
|
||||
def list(self, with_info=False):
|
||||
""" Return a list of archives names
|
||||
|
||||
Exceptions:
|
||||
backup_borg_list_error -- Raised if the borg script failed
|
||||
"""
|
||||
cmd = ["borg", "list", "--json-lines", self.archive_path]
|
||||
cmd = ["borg", "list", "--json-lines" if with_info else "--short",
|
||||
self.archive_path]
|
||||
out = self.repo._call('list_archive', cmd)
|
||||
result = [json.loads(out) for line in out.splitlines()]
|
||||
|
||||
if not with_info:
|
||||
return out.decode()
|
||||
|
||||
result = {}
|
||||
for line in out.splitlines():
|
||||
_file = json.loads(line)
|
||||
filename = _file.pop("path")
|
||||
result[filename] = _file
|
||||
return result
|
||||
|
||||
def download(self, exclude_paths=[]):
|
||||
|
@ -248,12 +259,12 @@ class BorgBackupArchive(BackupArchive):
|
|||
response.content_type = "application/x-tar"
|
||||
return HTTPResponse(reader, 200)
|
||||
|
||||
def extract(self, paths=None, exclude_paths=[]):
|
||||
paths, exclude_paths = super().extract(paths, exclude_paths)
|
||||
def extract(self, paths=None, destination=None, exclude_paths=[]):
|
||||
paths, destination, exclude_paths = super().extract(paths, destination, exclude_paths)
|
||||
cmd = ['borg', 'extract', self.archive_path] + paths
|
||||
for path in exclude_paths:
|
||||
cmd += ['--exclude', path]
|
||||
return self.repo._call('extract_archive', cmd)
|
||||
return self.repo._call('extract_archive', cmd, cwd=destination)
|
||||
|
||||
def mount(self, path):
|
||||
# FIXME How to be sure the place where we mount is secure ?
|
||||
|
|
|
@ -27,6 +27,7 @@ from moulinette.utils.log import getActionLogger
|
|||
from moulinette import m18n
|
||||
|
||||
from yunohost.utils.error import YunohostError, YunohostValidationError
|
||||
from yunohost.utils.filesystem import free_space_in_directory
|
||||
from yunohost.repository import LocalBackupRepository
|
||||
from yunohost.backup import BackupManager
|
||||
from yunohost.utils.filesystem import space_used_in_directory
|
||||
|
@ -43,7 +44,13 @@ class TarBackupRepository(LocalBackupRepository):
|
|||
# (we do a realpath() to resolve symlinks)
|
||||
archives = glob(f"{self.location}/*.tar.gz") + glob(f"{self.location}/*.tar")
|
||||
archives = set([os.path.realpath(archive) for archive in archives])
|
||||
archives = sorted(archives, key=lambda x: os.path.getctime(x))
|
||||
broken_archives = set()
|
||||
for archive in archives:
|
||||
if not os.path.exists(archive):
|
||||
broken_archives.add(archive)
|
||||
logger.warning(m18n.n("backup_archive_broken_link", path=archive))
|
||||
|
||||
archives = sorted(archives - broken_archives, key=lambda x: os.path.getctime(x))
|
||||
|
||||
# Extract only filename without the extension
|
||||
def remove_extension(f):
|
||||
|
@ -57,6 +64,9 @@ class TarBackupRepository(LocalBackupRepository):
|
|||
def compute_space_used(self):
|
||||
return space_used_in_directory(self.location)
|
||||
|
||||
def compute_free_space(self):
|
||||
return free_space_in_directory(self.location)
|
||||
|
||||
def prune(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
|
|
@ -42,12 +42,12 @@ from datetime import timedelta, datetime
|
|||
import yunohost.repositories
|
||||
from yunohost.utils.config import ConfigPanel
|
||||
from yunohost.utils.error import YunohostError, YunohostValidationError
|
||||
from yunohost.utils.filesystem import disk_usage, binary_to_human, free_space_in_directory
|
||||
from yunohost.utils.filesystem import disk_usage, binary_to_human
|
||||
from yunohost.utils.network import get_ssh_public_key, SHF_BASE_URL
|
||||
|
||||
logger = getActionLogger('yunohost.repository')
|
||||
REPOSITORIES_DIR = '/etc/yunohost/repositories'
|
||||
CACHE_INFO_DIR = "/var/cache/yunohost/{repository}"
|
||||
CACHE_INFO_DIR = "/var/cache/yunohost/repositories/{repository}"
|
||||
REPOSITORY_CONFIG_PATH = "/usr/share/yunohost/other/config_repository.toml"
|
||||
MB_ALLOWED_TO_ORGANIZE = 10
|
||||
# TODO split ConfigPanel.get to extract "Format result" part and be able to override it
|
||||
|
@ -113,6 +113,23 @@ class BackupRepository(ConfigPanel):
|
|||
|
||||
return full_repositories
|
||||
|
||||
def __init__(self, entity, config_path=None, save_path=None, creation=False):
|
||||
|
||||
super().__init__(entity, config_path, save_path, creation)
|
||||
|
||||
self._load_current_values()
|
||||
|
||||
if self.__class__ == BackupRepository:
|
||||
if self.method == 'tar':
|
||||
from yunohost.repositories.tar import TarBackupRepository
|
||||
self.__class__ = TarBackupRepository
|
||||
elif self.method == 'borg':
|
||||
from yunohost.repositories.borg import BorgBackupRepository
|
||||
self.__class__ = BorgBackupRepository
|
||||
else:
|
||||
from yunohost.repositories.hook import HookBackupRepository
|
||||
self.__class__ = HookBackupRepository
|
||||
|
||||
# =================================================
|
||||
# Config Panel Hooks
|
||||
# =================================================
|
||||
|
@ -154,7 +171,6 @@ class BackupRepository(ConfigPanel):
|
|||
|
||||
if 'shf_id' in self.values:
|
||||
self.values['is_shf'] = bool(self.values['shf_id'])
|
||||
self._cast_by_method()
|
||||
|
||||
def _parse_pre_answered(self, *args):
|
||||
super()._parse_pre_answered(*args)
|
||||
|
@ -165,7 +181,6 @@ class BackupRepository(ConfigPanel):
|
|||
self.args['method'] = "borg"
|
||||
elif self.args.get('method') == 'tar':
|
||||
self.args['is_remote'] = False
|
||||
self._cast_by_method()
|
||||
|
||||
def _apply(self):
|
||||
# Activate / update services
|
||||
|
@ -194,44 +209,34 @@ class BackupRepository(ConfigPanel):
|
|||
|
||||
return f"ssh://{self.user}@{self.domain}:{self.port}/{self.path}"
|
||||
|
||||
def _cast_by_method(self):
|
||||
if not self.future_values:
|
||||
return
|
||||
@property
|
||||
def is_deduplicated(self):
|
||||
return True
|
||||
|
||||
if self.__class__ == BackupRepository:
|
||||
if self.method == 'tar':
|
||||
from yunohost.repositories.tar import TarBackupRepository
|
||||
self.__class__ = TarBackupRepository
|
||||
elif self.method == 'borg':
|
||||
from yunohost.repositories.borg import BorgBackupRepository
|
||||
self.__class__ = BorgBackupRepository
|
||||
else:
|
||||
from yunohost.repositories.hook import HookBackupRepository
|
||||
self.__class__ = HookBackupRepository
|
||||
|
||||
def _check_is_enough_free_space(self):
|
||||
def check_is_enough_free_space(self, backup_size):
|
||||
"""
|
||||
Check free space in repository or output directory before to backup
|
||||
"""
|
||||
# TODO How to do with distant repo or with deduplicated backup ?
|
||||
backup_size = self.manager.size
|
||||
if self.is_deduplicated:
|
||||
return
|
||||
|
||||
free_space = free_space_in_directory(self.repo)
|
||||
free_space = self.compute_free_space(self)
|
||||
|
||||
if free_space < backup_size:
|
||||
logger.debug(
|
||||
"Not enough space at %s (free: %s / needed: %d)",
|
||||
self.repo,
|
||||
self.entity,
|
||||
free_space,
|
||||
backup_size,
|
||||
)
|
||||
raise YunohostValidationError("not_enough_disk_space", path=self.repo)
|
||||
raise YunohostValidationError("not_enough_disk_space", path=self.entity)
|
||||
|
||||
def remove(self, purge=False):
|
||||
if purge:
|
||||
self._load_current_values()
|
||||
self.purge()
|
||||
|
||||
rm(CACHE_INFO_DIR.format(repository=self.entity), recursive=True, force=True)
|
||||
rm(self.save_path, force=True)
|
||||
logger.success(m18n.n("repository_removed", repository=self.entity))
|
||||
|
||||
|
@ -243,14 +248,13 @@ class BackupRepository(ConfigPanel):
|
|||
|
||||
return {self.entity: result}
|
||||
|
||||
def list_archives(self, with_info):
|
||||
self._cast_by_method()
|
||||
def list_archives(self, with_info=False):
|
||||
archives = self.list_archives_names()
|
||||
if with_info:
|
||||
d = {}
|
||||
for archive in archives:
|
||||
try:
|
||||
d[archive] = BackupArchive(repo=self, name=archive).info()
|
||||
d[archive] = BackupArchive(repo=self, name=archive).info(with_details=with_info)
|
||||
except YunohostError as e:
|
||||
logger.warning(str(e))
|
||||
except Exception:
|
||||
|
@ -322,6 +326,9 @@ class BackupRepository(ConfigPanel):
|
|||
def compute_space_used(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def compute_free_space(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class LocalBackupRepository(BackupRepository):
|
||||
def install(self):
|
||||
|
@ -354,7 +361,7 @@ class BackupArchive:
|
|||
self.__class__ = yunohost.repositories.hook.HookBackupArchive
|
||||
|
||||
# Assert archive exists
|
||||
if self.manager.__class__.__name__ != "BackupManager" and self.name not in self.repo.list_archives():
|
||||
if self.manager.__class__.__name__ != "BackupManager" and self.name not in self.repo.list_archives(False):
|
||||
raise YunohostValidationError("backup_archive_name_unknown", name=name)
|
||||
|
||||
@property
|
||||
|
@ -377,7 +384,7 @@ class BackupArchive:
|
|||
|
||||
# This is not a property cause it could be managed in a hook
|
||||
def need_organized_files(self):
|
||||
return self.repo.need_organised_files
|
||||
return self.repo.need_organized_files
|
||||
|
||||
def organize_and_backup(self):
|
||||
"""
|
||||
|
@ -392,7 +399,7 @@ class BackupArchive:
|
|||
self.repo.install()
|
||||
|
||||
# Check free space in output
|
||||
self._check_is_enough_free_space()
|
||||
self.repo.check_is_enough_free_space(self.manager.size)
|
||||
try:
|
||||
self.backup()
|
||||
finally:
|
||||
|
@ -443,59 +450,45 @@ class BackupArchive:
|
|||
yield f"{leading_dot}apps/{app}"
|
||||
|
||||
def _get_info_string(self):
|
||||
self.archive_file = "%s/%s.tar" % (self.repo.path, self.name)
|
||||
"""Extract info file from archive if needed and read it"""
|
||||
|
||||
# Check file exist (even if it's a broken symlink)
|
||||
if not os.path.lexists(self.archive_file):
|
||||
self.archive_file += ".gz"
|
||||
if not os.path.lexists(self.archive_file):
|
||||
raise YunohostValidationError("backup_archive_name_unknown", name=self.name)
|
||||
|
||||
# If symlink, retrieve the real path
|
||||
if os.path.islink(self.archive_file):
|
||||
archive_file = os.path.realpath(self.archive_file)
|
||||
|
||||
# Raise exception if link is broken (e.g. on unmounted external storage)
|
||||
if not os.path.exists(archive_file):
|
||||
raise YunohostValidationError(
|
||||
"backup_archive_broken_link", path=archive_file
|
||||
)
|
||||
info_file = CACHE_INFO_DIR.format(repository=self.repo.name)
|
||||
mkdir(info_file, mode=0o0700, parents=True, force=True)
|
||||
info_file += f"/{self.name}.info.json"
|
||||
cache_info_dir = CACHE_INFO_DIR.format(repository=self.repo.entity)
|
||||
mkdir(cache_info_dir, mode=0o0700, parents=True, force=True)
|
||||
info_file = f"{cache_info_dir}/{self.name}.info.json"
|
||||
|
||||
if not os.path.exists(info_file):
|
||||
info_dir = tempfile.mkdtemp()
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
files_in_archive = self.list()
|
||||
if "info.json" in files_in_archive:
|
||||
self.extract("info.json")
|
||||
self.extract("info.json", destination=tmp_dir)
|
||||
elif "./info.json" in files_in_archive:
|
||||
self.extract("./info.json")
|
||||
self.extract("./info.json", destination=tmp_dir)
|
||||
else:
|
||||
raise YunohostError(
|
||||
"backup_archive_cant_retrieve_info_json", archive=self.archive_file
|
||||
"backup_archive_cant_retrieve_info_json", archive=self.archive_path
|
||||
)
|
||||
shutil.move(os.path.join(info_dir, "info.json"), info_file)
|
||||
# FIXME should we cache there is no info.json ?
|
||||
shutil.move(os.path.join(tmp_dir, "info.json"), info_file)
|
||||
finally:
|
||||
os.rmdir(info_dir)
|
||||
os.rmdir(tmp_dir)
|
||||
|
||||
try:
|
||||
return read_file(info_file)
|
||||
except MoulinetteError:
|
||||
except MoulinetteError as e:
|
||||
logger.debug("unable to load '%s'", info_file, exc_info=1)
|
||||
raise YunohostError('backup_invalid_archive')
|
||||
raise YunohostError('backup_invalid_archive', error=e)
|
||||
|
||||
def info(self, with_details, human_readable):
|
||||
def info(self, with_details=False, human_readable=False):
|
||||
|
||||
info_json = self._get_info_string()
|
||||
if not self._info_json:
|
||||
if not info_json:
|
||||
raise YunohostError('backup_info_json_not_implemented')
|
||||
try:
|
||||
info = json.load(info_json)
|
||||
except Exception:
|
||||
info = json.loads(info_json)
|
||||
except Exception as e:
|
||||
logger.debug("unable to load info json", exc_info=1)
|
||||
raise YunohostError('backup_invalid_archive')
|
||||
raise YunohostError('backup_invalid_archive', error=e)
|
||||
|
||||
# (legacy) Retrieve backup size
|
||||
# FIXME
|
||||
|
@ -509,7 +502,7 @@ class BackupArchive:
|
|||
)
|
||||
tar.close()
|
||||
result = {
|
||||
"path": self.repo.archive_path,
|
||||
"path": self.archive_path,
|
||||
"created_at": datetime.utcfromtimestamp(info["created_at"]),
|
||||
"description": info["description"],
|
||||
"size": size,
|
||||
|
@ -571,7 +564,7 @@ class BackupArchive:
|
|||
for path in self.manager.paths_to_backup:
|
||||
src = path["source"]
|
||||
|
||||
if self.manager.__class__.__name__ != "RestoreManager":
|
||||
if self.manager.__class__.__name__ == "RestoreManager":
|
||||
# TODO Support to run this before a restore (and not only before
|
||||
# backup). To do that RestoreManager.unorganized_work_dir should
|
||||
# be implemented
|
||||
|
@ -694,16 +687,16 @@ class BackupArchive:
|
|||
)
|
||||
return
|
||||
|
||||
def extract(self, paths=None, exclude_paths=[]):
|
||||
def extract(self, paths=None, destination=None, exclude_paths=[]):
|
||||
if self.__class__ == BackupArchive:
|
||||
raise NotImplementedError()
|
||||
if isinstance(exclude_paths, str):
|
||||
if isinstance(paths, str):
|
||||
paths = [paths]
|
||||
elif paths is None:
|
||||
paths = self.select_files()
|
||||
if isinstance(exclude_paths, str):
|
||||
exclude_paths = [exclude_paths]
|
||||
return paths, exclude_paths
|
||||
return paths, destination, exclude_paths
|
||||
|
||||
def mount(self):
|
||||
if self.__class__ == BackupArchive:
|
||||
|
|
|
@ -20,6 +20,8 @@
|
|||
"""
|
||||
import os
|
||||
|
||||
from moulinette.utils.process import check_output
|
||||
|
||||
|
||||
def free_space_in_directory(dirpath):
|
||||
stat = os.statvfs(dirpath)
|
||||
|
|
Loading…
Add table
Reference in a new issue