This commit is contained in:
ljf (zamentur) 2022-10-24 21:18:43 +00:00 committed by GitHub
commit 209cf3edb3
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
15 changed files with 2521 additions and 946 deletions

1
debian/control vendored
View file

@ -29,6 +29,7 @@ Depends: ${python3:Depends}, ${misc:Depends}
, acl
, git, curl, wget, cron, unzip, jq, bc, at
, lsb-release, haveged, fake-hwclock, equivs, lsof, whois
, borgbackup
Recommends: yunohost-admin
, ntp, inetutils-ping | iputils-ping
, bash-completion, rsyslog

View file

@ -86,12 +86,15 @@
"backup_archive_broken_link": "Could not access the backup archive (broken link to {path})",
"backup_archive_cant_retrieve_info_json": "Could not load info for archive '{archive}'... The info.json file cannot be retrieved (or is not a valid json).",
"backup_archive_corrupted": "It looks like the backup archive '{archive}' is corrupted : {error}",
"backup_archive_name_exists": "A backup archive with this name already exists.",
"backup_archive_name_exists": "A backup archive with this name already exists in the repo '{repository}'.",
"backup_archive_name_unknown": "Unknown local backup archive named '{name}'",
"backup_archive_open_failed": "Could not open the backup archive",
"backup_archive_system_part_not_available": "System part '{part}' unavailable in this backup",
"backup_archive_writing_error": "Could not add the files '{source}' (named in the archive '{dest}') to be backed up into the compressed archive '{archive}'",
"backup_ask_for_copying_if_needed": "Do you want to perform the backup using {size}MB temporarily? (This way is used since some files could not be prepared using a more efficient method.)",
"backup_borg_init_error": "Unable initialize the borg repository: {error}",
"backup_borg_list_archive_error": "Unable to list files in the archive",
"backup_borg_mount_archive_error": "Unable to mount the archive here: {error}",
"backup_cant_mount_uncompress_archive": "Could not mount the uncompressed archive as write protected",
"backup_cleaning_failed": "Could not clean up the temporary backup folder",
"backup_copying_to_organize_the_archive": "Copying {size}MB to organize the archive",
@ -106,17 +109,23 @@
"backup_delete_error": "Could not delete '{path}'",
"backup_deleted": "Backup deleted",
"backup_hook_unknown": "The backup hook '{hook}' is unknown",
"backup_method_copy_finished": "Backup copy finalized",
"backup_method_custom_finished": "Custom backup method '{method}' finished",
"backup_method_tar_finished": "TAR backup archive created",
"backuping_in_repository": "Backuping into repository '{repository}'",
"backup_in_repository_finished": "Backup into repository '{repository}' is finished",
"backup_in_repository_error": "Backup into repository '{repository}' failed: {error}",
"backup_invalid_archive": "Invalid backup archive : {error}",
"backup_mount_archive_for_restore": "Preparing archive for restoration...",
"backup_no_uncompress_archive_dir": "There is no such uncompressed archive directory",
"backup_not_sent": "Backup archive was not saved at all",
"backup_partially_sent": "Backup archive was not sent into all repositories listed",
"backup_nothings_done": "Nothing to save",
"backup_nowhere_to_backup": "Nowhere to backup your file with this archive name",
"backup_output_directory_forbidden": "Pick a different output directory. Backups cannot be created in /bin, /boot, /dev, /etc, /lib, /root, /run, /sbin, /sys, /usr, /var or /home/yunohost.backup/archives sub-folders",
"backup_output_directory_not_empty": "You should pick an empty output directory",
"backup_output_directory_required": "You must provide an output directory for the backup",
"backup_output_symlink_dir_broken": "Your archive directory '{path}' is a broken symlink. Maybe you forgot to re/mount or plug in the storage medium it points to.",
"backup_permission": "Backup permission for {app}",
"backup_repository_exists": "Backup repository '{backup_repository}' already exists",
"backup_repository_unknown": "Backup repository '{backup_repository}' unknown",
"backup_running_hooks": "Running backup hooks...",
"backup_system_part_failed": "Could not backup the '{part}' system part",
"backup_unable_to_organize_files": "Could not use the quick method to organize files in the archive",
@ -473,6 +482,7 @@
"log_app_upgrade": "Upgrade the '{}' app",
"log_available_on_yunopaste": "This log is now available via {url}",
"log_backup_create": "Create a backup archive",
"log_backup_repository_add": "Add a backup repository",
"log_backup_restore_app": "Restore '{}' from a backup archive",
"log_backup_restore_system": "Restore system from a backup archive",
"log_corrupted_md_file": "The YAML metadata file associated with logs is damaged: '{md_file}\nError: {error}'",
@ -632,6 +642,22 @@
"regenconf_would_be_updated": "The configuration would have been updated for category '{category}'",
"regex_incompatible_with_tile": "/!\\ Packagers! Permission '{permission}' has show_tile set to 'true' and you therefore cannot define a regex URL as the main URL",
"regex_with_only_domain": "You can't use a regex for domain, only for path",
"repository_config_description": "Long name",
"repository_config_is_remote": "Remote repository",
"repository_config_is_shf": "It's a YunoHost",
"repository_config_is_shf_help": "Answer yes if the remote server is a YunoHost instance or an other F2F compatible provider",
"repository_config_domain": "Remote server domain",
"repository_config_public_key": "Public key to give to your BorgBackup provider : {public_key}",
"repository_config_alert": "Alert emails",
"repository_config_alert_help": "Declare emails to which sent inactivity alerts",
"repository_config_alert_delay": "Alert delay",
"repository_config_alert_delay_help": "After how many inactivity days send email alerts",
"repository_config_quota": "Quota",
"repository_config_port": "Port",
"repository_config_user": "User",
"repository_config_method": "Method",
"repository_config_path": "Archive path",
"repository_removed": "Repository '{repository}' removed",
"registrar_infos": "Registrar infos",
"restore_already_installed_app": "An app with the ID '{app}' is already installed",
"restore_already_installed_apps": "The following apps can't be restored because they are already installed: {apps}",

View file

@ -1045,16 +1045,21 @@ backup:
help: Name of the backup archive
extra:
pattern: &pattern_backup_archive_name
- !!str ^[\w\-\._]{1,50}(?<!\.)$
- !!str ^[\w\-\._]{1,50}$
- "pattern_backup_archive_name"
-p:
full: --prefix
help: Prefix of the backup archive
extra:
pattern: &pattern_backup_archive_prefix
- !!str ^[\w\-\._]{1,35}$
- "pattern_backup_archive_prefix"
-d:
full: --description
help: Short description of the backup
-o:
full: --output-directory
help: Output directory for the backup
--methods:
help: List of backup methods to apply (copy or tar by default)
-r:
full: --repositories
help: List of repositories where send backup files (local borg repo use by default)
nargs: "*"
--system:
help: List of system parts to backup (or all if none given).
@ -1071,6 +1076,8 @@ backup:
action_help: Restore from a local backup archive. If neither --apps or --system are given, this will restore all apps and all system parts in the archive. If only --apps if given, this will only restore apps and no system parts. Similarly, if only --system is given, this will only restore system parts and no apps.
api: PUT /backups/<name>/restore
arguments:
repository:
help: Repository of the backup archive
name:
help: Name of the local backup archive
--system:
@ -1085,9 +1092,19 @@ backup:
### backup_list()
list:
action_help: List available local backup archives
action_help: List available local backup archives or list files in an archive
api: GET /backups
arguments:
repository:
help: Repository of a backup archive
nargs: "?"
name:
help: Name of a backup archive
nargs: "?"
-r:
full: --repositories
help: List archives in these repositories
nargs: "*"
-i:
full: --with-info
help: Show backup information for each archive
@ -1102,8 +1119,10 @@ backup:
action_help: Show info about a local backup archive
api: GET /backups/<name>
arguments:
repository:
help: Repository of the backup archive
name:
help: Name of the local backup archive
help: Name of the backup archive
-d:
full: --with-details
help: Show additional backup information
@ -1119,19 +1138,306 @@ backup:
action_help: (API only) Request to download the file
api: GET /backups/<name>/download
arguments:
repository:
help: Repository of the backup archive
name:
help: Name of the local backup archive
### backup_mount()
mount:
action_help: Mount a backup archive if possible
api: DELETE /backups/<name>
arguments:
repository:
help: Repository of the backup archive
name:
help: Name of the backup archive
path:
help: Path where mount the archive
### backup_delete()
delete:
action_help: Delete a backup archive
api: DELETE /backups/<name>
arguments:
name:
repository:
help: Repository of the backup archive
archive_name:
help: Name of the archive to delete
extra:
pattern: *pattern_backup_archive_name
nargs: "*"
subcategories:
repository:
subcategory_help: Manage backup repositories
actions:
### backup_repository_list()
list:
action_help: List available repositories where put archives
api: GET /backups/repositories
arguments:
--full:
help: Show more details
action: store_true
--space-used:
help: Display size used
action: store_true
### backup_repository_info()
info:
action_help: Show info about a repository
api: GET /backups/repository/<shortname>
arguments:
shortname:
help: ID of the repository
extra:
pattern: &pattern_backup_repository_shortname
- !!str ^[a-zA-Z0-9-_\.]+$
- "pattern_backup_repository_shortname"
--space-used:
help: Display size used
action: store_true
### backup_repository_add()
add:
action_help: Add a backup repository
api: POST /backups/repository/<shortname>
arguments:
shortname:
help: ID of the repository
extra:
pattern: *pattern_backup_repository_shortname
-n:
full: --name
help: Short description of the repository
-l:
full: --location
help: Location on this server or on an other
extra:
pattern: &pattern_backup_repository_location
- !!str ^((ssh://)?[a-z_]\w*@\[\w\-\.]+:)?(~?/)?[\w/]*$
- "pattern_backup_repository_location"
-m:
full: --method
help: By default 'borg' method is used, could be 'tar' or a custom method
-q:
full: --quota
help: Quota to configure with this repository
-p:
full: --passphrase
help: A strong passphrase to encrypt/decrypt your backup (keep it preciously)
action: store_true
-a:
full: --alert
help: List of mails to which sent inactivity alert
nargs: "*"
-d:
full: --alert-delay
help: Inactivity delay in days after which we sent alerts mails
### backup_repository_update()
update:
action_help: Update a backup repository
api: PUT /backups/repository/<shortname>
arguments:
shortname:
help: Name of the backup repository to update
extra:
pattern: *pattern_backup_repository_shortname
-d:
full: --description
help: Short description of the repository
-q:
full: --quota
help: Quota to configure with this repository
-p:
full: --password
help: Change password
extra:
password: ask__password
pattern: *pattern_password
### backup_repository_remove()
remove:
action_help: Remove a backup repository
api: DELETE /backups/repository/<shortname>
arguments:
shortname:
help: Name of the backup repository to remove
extra:
pattern: *pattern_backup_repository_shortname
--purge:
help: Remove all archives and data inside repository
action: store_true
### backup_repository_prune()
prune:
action_help: Prune archives in a backup repository
api: POST /backups/repository/<shortname>/prune
arguments:
shortname:
help: Name of the backup repository to prune
extra:
pattern: *pattern_backup_repository_shortname
--prefix:
help: Prefix on which we prune
nargs: "?"
-H:
full: --keep-hourly
help: Number of hourly archives to keep
type: int
-d:
full: --keep-daily
help: Number of daily archives to keep
type: int
-w:
full: --keep-weekly
help: Number of weekly archives to keep
type: int
-m:
full: --keep-monthly
help: Number of monthly archives to keep
type: int
--keep-last:
help: Number of last archives to keep
type: int
--keep-within:
help: Keep all archives within this time interval
extra:
pattern: &pattern_interval
- !!str ^\d+[Hdwmy]$
- "pattern_interval"
timer:
subcategory_help: Manage backup timer
actions:
### backup_timer_list()
list:
action_help: List backup timer
api: GET /backup/timer
arguments:
--full:
help: Show more details
action: store_true
### backup_timer_create()
create:
action_help: Add a backup timer
api: POST /backup/timer/<name>
arguments:
name:
help: Short prefix of the backup archives
extra:
pattern: &pattern_backup_timer_name
- !!str ^[\w\-\._]{1,50}$
- "pattern_backup_timer_name"
-d:
full: --description
help: Short description of the backup
-r:
full: --repositories
help: List of repositories where send backup files (local borg repo use by default)
nargs: "*"
--system:
help: List of system parts to backup (or all if none given).
nargs: "*"
--apps:
help: List of application names to backup (or all if none given)
nargs: "*"
--schedule:
help: Regular backup frequency (see systemd OnCalendar format)
--alert:
help: Email to alert
--keep-hourly:
default: 0
--keep-daily:
default: 7
--keep-weekly:
default: 8
--keep-monthly:
default: 8
### backup_timer_update()
update:
action_help: Update a backup timer
api: PUT /backup/timer/<name>
arguments:
name:
help: Short prefix of the backup archives
extra:
pattern: *pattern_backup_timer_name
-d:
full: --description
help: Short description of the backup
-r:
full: --repositories
help: List of repositories where send backup files (local borg repo use by default)
nargs: "*"
--system:
help: List of system parts to backup (or all if none given).
nargs: "*"
--apps:
help: List of application names to backup (or all if none given)
nargs: "*"
--schedule:
help: Regular backup frequency (see systemd OnCalendar format)
--alert:
help: Email to alert
--keep-hourly:
default: 2
--keep-daily:
default: 7
--keep-weekly:
default: 8
--keep-monthly:
default: 12
### backup_timer_remove()
remove:
action_help: Remove a backup timer
api: DELETE /backup/timer/<name>
arguments:
name:
help: Short prefix of the backup archives
extra:
pattern: *pattern_backup_timer_name
### backup_timer_info()
info:
action_help: Get info about a backup timer
api: GET /backup/timer/<name>
arguments:
name:
help: Short prefix of the backup archives
extra:
pattern: *pattern_backup_timer_name
### backup_timer_start()
start:
action_help: Start a backup timer
api: POST /backup/timer/<name>/start
arguments:
name:
help: Backup timer to start
extra:
pattern: *pattern_backup_timer_name
--now:
help: Trigger a backup immediately
action: store_true
### backup_timer_pause()
pause:
action_help: Pause a backup timer
api: POST /backup/timer/<name>/pause
arguments:
name:
help: Backup timer to pause
extra:
pattern: *pattern_backup_timer_name
#############################
# Settings #

View file

@ -0,0 +1,89 @@
version = "1.0"
i18n = "repository_config"
[main]
name.en = ""
[main.main]
name.en = ""
optional = false
# if method == "tar": question["value"] = False
[main.main.description]
type = "string"
default = ""
[main.main.is_remote]
type = "boolean"
yes = true
no = false
visible = "creation"
default = "no"
[main.main.domain]
type = "string"
visible = "creation && is_remote"
pattern.regexp = '^([^\W_A-Z]+([-]*[^\W_A-Z]+)*\.)+((xn--)?[^\W_]{2,})$'
pattern.error = 'domain_error' # TODO "Please provide a valid domain"
default = ""
# FIXME: can't be a domain of this instances ?
[main.main.is_shf]
help = ""
type = "boolean"
yes = true
no = false
visible = "creation && is_remote"
default = false
[main.main.public_key]
type = "alert"
style = "info"
visible = "creation && is_remote && ! is_shf"
[main.main.alert]
help = ''
type = "tags"
visible = "is_remote && is_shf"
pattern.regexp = '^[\w\+.-]+@([^\W_A-Z]+([-]*[^\W_A-Z]+)*\.)+((xn--)?[^\W_]{2,})$'
pattern.error = "alert_error"
default = []
# "value": alert,
[main.main.alert_delay]
help = ''
type = "number"
visible = "is_remote && is_shf"
min = 1
default = 7
[main.main.quota]
type = "string"
visible = "is_remote && is_shf"
pattern.regexp = '^\d+[MGT]$'
pattern.error = '' # TODO ""
default = ""
[main.main.port]
type = "number"
visible = "is_remote && !is_shf"
min = 1
max = 65535
default = 22
[main.main.user]
type = "string"
visible = "is_remote && !is_shf"
default = ""
[main.main.method]
type = "select"
# "value": method,
choices.borg = "BorgBackup (recommended)"
choices.tar = "Legacy tar archive mechanism"
default = "borg"
visible = "!is_remote"
[main.main.path]
type = "path"
visible = "!is_remote or (is_remote and !is_shf)"
default = "/home/yunohost.backup/archives"

View file

@ -0,0 +1,63 @@
version = "1.0"
i18n = "backup_timer_config"
[main]
name.en = ""
[main.main]
name.en = ""
optional = false
# if method == "tar": question["value"] = False
[main.main.description]
type = "string"
default = ""
[main.main.repositories]
type = "tags"
visible = "creation"
default = []
[main.main.system]
type = "tags"
default = []
[main.main.apps]
type = "tags"
default = []
[main.main.schedule]
type = "string"
default = "Daily"
[main.main.alert]
help = ''
type = "tags"
pattern.regexp = '^[\w\+.-]+@([^\W_A-Z]+([-]*[^\W_A-Z]+)*\.)+((xn--)?[^\W_]{2,})$'
pattern.error = "alert_error"
default = []
# "value": alert,
[main.main.keep_hourly]
help = ''
type = "number"
min = 0
default = 0
[main.main.keep_daily]
help = ''
type = "number"
min = 0
default = 10
[main.main.keep_weekly]
help = ''
type = "number"
min = 0
default = 8
[main.main.keep_monthly]
help = ''
type = "number"
min = 0
default = 8

File diff suppressed because it is too large Load diff

View file

240
src/repositories/borg.py Normal file
View file

@ -0,0 +1,240 @@
# -*- coding: utf-8 -*-
""" License
Copyright (C) 2013 Yunohost
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program; if not, see http://www.gnu.org/licenses
"""
import os
import subprocess
import json
from datetime import datetime, timedelta
from moulinette import m18n
from moulinette.utils.log import getActionLogger
from yunohost.utils.error import YunohostError
from yunohost.utils.network import shf_request
from yunohost.repository import LocalBackupRepository, BackupArchive
logger = getActionLogger("yunohost.repository")
class BorgBackupRepository(LocalBackupRepository):
need_organized_files = True
method_name = "borg"
# TODO logs
def _run_borg_command(self, cmd, stdout=None, stderr=None, cwd=None):
""" Call a submethod of borg with the good context
"""
env = dict(os.environ)
if self.domain:
# TODO Use the best/good key
private_key = "/etc/ssh/ssh_host_ed25519_key"
# Don't check ssh fingerprint strictly the first time
# TODO improve this by publishing and checking this with DNS
# FIXME known_host are hashed now
try:
strict = 'yes' if self.domain in open('/root/.ssh/known_hosts').read() else 'no'
except FileNotFoundError:
strict = 'no'
env['BORG_RSH'] = "ssh -i %s -oStrictHostKeyChecking=%s"
env['BORG_RSH'] = env['BORG_RSH'] % (private_key, strict)
# In case, borg need a passphrase to get access to the repo
if "passphrase" in self.future_values:
env['BORG_PASSPHRASE'] = self.passphrase
# Authorize to move the repository (borgbase do this)
env["BORG_RELOCATED_REPO_ACCESS_IS_OK"] = "yes"
kwargs = {}
if cwd:
kwargs["cwd"] = cwd
return subprocess.Popen(cmd, env=env,
stdout=stdout, stderr=stderr, **kwargs)
def _call(self, action, cmd, json_output=False, cwd=None):
borg = self._run_borg_command(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, cwd=cwd)
out, err = borg.communicate()
if borg.returncode:
raise YunohostError(f"backup_borg_{action}_error", error=err)
if json_output:
try:
return json.loads(out)
except (json.decoder.JSONDecodeError, TypeError):
raise YunohostError(f"backup_borg_{action}_error")
return out
# =================================================
# Repository actions
# =================================================
def install(self):
# Remote
if self.is_remote:
if self.is_shf and not self.future_values.get('user'):
services = {
'borg': 'borgbackup'
}
response = shf_request(
domain=self.domain,
service=services[self.method],
shf_id=self.values.pop('shf_id', None),
data={
'origin': self.domain,
'public_key': self.public_key,
'quota': self.quota,
'alert': self.alert,
'alert_delay': self.alert_delay,
# password: "XXXXXXXX",
}
)
self.new_values['shf_id'] = response['id']
self.new_values['location'] = response['repository']
elif not self.is_shf:
self.new_values['location'] = self.location
if not self.future_values.get('user'):
raise YunohostError("") # TODO
# Local
else:
super().install()
# Initialize borg repo
encryption_mode = "repokey" if "passphrase" in self.future_values and self.future_values["passphrase"] else "none"
cmd = ["borg", "init", "--encryption", encryption_mode, self.location]
if "quota" in self.future_values and self.future_values["quota"]:
cmd += ['--storage-quota', self.quota]
logger.debug(cmd)
try:
self._call('init', cmd)
except YunohostError as e:
if e.key != "backup_borg_init_error":
raise
else:
# Check if it's possible to read the borg repo with current settings
try:
cmd = ["borg", "info", self.location]
self._call('info', cmd)
except YunohostError:
raise e
logger.debug("The borg repository '{self.location}' already exists.")
def update(self):
raise NotImplementedError()
def purge(self):
if self.is_shf:
shf_request(
domain=self.domain,
service="borgbackup",
shf_id=self.values.pop('shf_id', None),
data={
'origin': self.domain,
# password: "XXXXXXXX",
}
)
else:
cmd = ["borg", "delete", self.location]
self._call('purge', cmd)
if not self.is_remote:
super().purge()
def list_archives_names(self, prefix=None):
cmd = ["borg", "list", "--json", self.location]
if prefix:
cmd += ["-P", prefix]
response = self._call('list', cmd, True)
return [archive["name"] for archive in response['archives']]
def compute_space_used(self):
""" Return the size of this repo on the disk"""
# FIXME this size could be unrelevant, comparison between du and borg sizes doesn't match !
cmd = ["borg", "info", "--json", self.location]
response = self._call('info', cmd, json_output=True)
return response["cache"]["stats"]["unique_size"]
class BorgBackupArchive(BackupArchive):
""" Backup prepared files with borg """
def backup(self):
cmd = ['borg', 'create', self.archive_path, './']
self.repo._call('backup', cmd, cwd=self.work_dir)
def delete(self):
cmd = ['borg', 'delete', '--force', self.archive_path]
self.repo._call('delete_archive', cmd)
def list(self, with_info=False):
""" Return a list of archives names
Exceptions:
backup_borg_list_error -- Raised if the borg script failed
"""
cmd = ["borg", "list", "--json-lines" if with_info else "--short",
self.archive_path]
out = self.repo._call('list_archive', cmd)
if not with_info:
return out.decode()
result = {}
for line in out.splitlines():
_file = json.loads(line)
filename = _file.pop("path")
result[filename] = _file
return result
def download(self, exclude_paths=[]):
super().download()
paths = self.select_files()
if isinstance(exclude_paths, str):
exclude_paths = [exclude_paths]
# Here tar archive are not compressed, if we want to compress we
# should add --tar-filter=gzip.
cmd = ["borg", "export-tar", self.archive_path, "-"] + paths
for path in exclude_paths:
cmd += ['--exclude', path]
reader = self.repo._run_borg_command(cmd, stdout=subprocess.PIPE)
# We return a raw bottle HTTPresponse (instead of serializable data like
# list/dict, ...), which is gonna be picked and used directly by moulinette
from bottle import response, HTTPResponse
response.content_type = "application/x-tar"
return HTTPResponse(reader, 200)
def extract(self, paths=[], destination=None, exclude_paths=[]):
# TODO exclude_paths not available in actions map
paths, destination, exclude_paths = super().extract(paths, destination, exclude_paths)
cmd = ['borg', 'extract', self.archive_path] + paths
for path in exclude_paths:
cmd += ['--exclude', path]
return self.repo._call('extract_archive', cmd, cwd=destination)
def mount(self, path):
# FIXME How to be sure the place where we mount is secure ?
cmd = ['borg', 'mount', self.archive_path, path]
self.repo._call('mount_archive', cmd)

158
src/repositories/hook.py Normal file
View file

@ -0,0 +1,158 @@
# -*- coding: utf-8 -*-
""" License
Copyright (C) 2013 Yunohost
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program; if not, see http://www.gnu.org/licenses
"""
from moulinette import m18n
from moulinette.utils.log import getActionLogger
from moulinette.utils.filesystem import rm
from yunohost.hook import hook_callback
from yunohost.utils.error import YunohostError
from yunohost.repository import BackupRepository, BackupArchive
logger = getActionLogger("yunohost.repository")
class HookBackupRepository(BackupRepository):
method_name = "hook"
# =================================================
# Repository actions
# =================================================
def install(self):
raise NotImplementedError()
def update(self):
raise NotImplementedError()
def remove(self, purge=False):
if self.__class__ == BackupRepository:
raise NotImplementedError() # purge
rm(self.save_path, force=True)
logger.success(m18n.n("repository_removed", repository=self.shortname))
def list(self):
raise NotImplementedError()
def info(self, space_used=False):
result = super().get(mode="export")
if self.__class__ == BackupRepository and space_used is True:
raise NotImplementedError() # purge
return {self.shortname: result}
def prune(self):
raise NotImplementedError()
class HookBackupArchive(BackupArchive):
# =================================================
# Archive actions
# =================================================
def backup(self):
raise NotImplementedError()
"""
Launch a custom script to backup
"""
self._call('backup', self.work_dir, self.name, self.repo.location, self.manager.size,
self.manager.description)
def restore(self):
raise NotImplementedError()
def delete(self):
raise NotImplementedError()
def list(self):
raise NotImplementedError()
""" Return a list of archives names
Exceptions:
backup_custom_list_error -- Raised if the custom script failed
"""
out = self._call('list', self.repo.location)
result = out.strip().splitlines()
return result
def info(self):
raise NotImplementedError() # compute_space_used
""" Return json string of the info.json file
Exceptions:
backup_custom_info_error -- Raised if the custom script failed
"""
return self._call('info', self.name, self.repo.location)
def download(self):
raise NotImplementedError()
def mount(self):
raise NotImplementedError()
"""
Launch a custom script to mount the custom archive
"""
super().mount()
self._call('mount', self.work_dir, self.name, self.repo.location, self.manager.size,
self.manager.description)
def extract(self):
raise NotImplementedError()
def need_organized_files(self):
"""Call the backup_method hook to know if we need to organize files"""
if self._need_mount is not None:
return self._need_mount
try:
self._call('nedd_mount')
except YunohostError:
return False
return True
def _call(self, *args):
""" Call a submethod of backup method hook
Exceptions:
backup_custom_ACTION_error -- Raised if the custom script failed
"""
ret = hook_callback("backup_method", [self.method],
args=args)
ret_failed = [
hook
for hook, infos in ret.items()
if any(result["state"] == "failed" for result in infos.values())
]
if ret_failed:
raise YunohostError("backup_custom_" + args[0] + "_error")
return ret["succeed"][self.method]["stdreturn"]
def _get_args(self, action):
"""Return the arguments to give to the custom script"""
return [
action,
self.work_dir,
self.name,
self.repo,
self.manager.size,
self.manager.description,
]

251
src/repositories/tar.py Normal file
View file

@ -0,0 +1,251 @@
# -*- coding: utf-8 -*-
""" License
Copyright (C) 2013 Yunohost
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program; if not, see http://www.gnu.org/licenses
"""
import glob
import os
import tarfile
import shutil
from moulinette.utils.log import getActionLogger
from moulinette import m18n
from yunohost.utils.error import YunohostError, YunohostValidationError
from yunohost.utils.filesystem import free_space_in_directory
from yunohost.repository import LocalBackupRepository
from yunohost.backup import BackupManager
from yunohost.utils.filesystem import space_used_in_directory
from yunohost.settings import settings_get
logger = getActionLogger("yunohost.repository")
class TarBackupRepository(LocalBackupRepository):
need_organized_files = False
method_name = "tar"
# =================================================
# Repository actions
# =================================================
def list_archives_names(self):
# Get local archives sorted according to last modification time
# (we do a realpath() to resolve symlinks)
archives = glob(f"{self.location}/*.tar.gz") + glob(f"{self.location}/*.tar")
archives = set([os.path.realpath(archive) for archive in archives])
broken_archives = set()
for archive in archives:
if not os.path.exists(archive):
broken_archives.add(archive)
logger.warning(m18n.n("backup_archive_broken_link", path=archive))
archives = sorted(archives - broken_archives, key=lambda x: os.path.getctime(x))
# Extract only filename without the extension
def remove_extension(f):
if f.endswith(".tar.gz"):
return os.path.basename(f)[: -len(".tar.gz")]
else:
return os.path.basename(f)[: -len(".tar")]
return [remove_extension(f) for f in archives]
def compute_space_used(self):
return space_used_in_directory(self.location)
def compute_free_space(self):
return free_space_in_directory(self.location)
def prune(self):
raise NotImplementedError()
class TarBackupArchive:
@property
def archive_path(self):
if isinstance(self.manager, BackupManager) and settings_get(
"backup.compress_tar_archives"
):
return os.path.join(self.repo.location, self.name + ".tar.gz")
f = os.path.join(self.repo.path, self.name + ".tar")
if os.path.exists(f + ".gz"):
f += ".gz"
return f
def backup(self):
# Open archive file for writing
try:
tar = tarfile.open(
self.archive_path,
"w:gz" if self.archive_path.endswith(".gz") else "w",
)
except Exception:
logger.debug(
"unable to open '%s' for writing", self.archive_path, exc_info=1
)
raise YunohostError("backup_archive_open_failed")
# Add files to the archive
try:
for path in self.manager.paths_to_backup:
# Add the "source" into the archive and transform the path into
# "dest"
tar.add(path["source"], arcname=path["dest"])
except IOError:
logger.error(
m18n.n(
"backup_archive_writing_error",
source=path["source"],
archive=self._archive_file,
dest=path["dest"],
),
exc_info=1,
)
raise YunohostError("backup_creation_failed")
finally:
tar.close()
# Move info file
shutil.copy(
os.path.join(self.work_dir, "info.json"),
os.path.join(self.repo.location, self.name + ".info.json"),
)
# If backuped to a non-default location, keep a symlink of the archive
# to that location
link = os.path.join(self.repo.path, self.name + ".tar")
if not os.path.isfile(link):
os.symlink(self.archive_path, link)
def copy(self, file, target):
tar = tarfile.open(
self._archive_file, "r:gz" if self._archive_file.endswith(".gz") else "r"
)
file_to_extract = tar.getmember(file)
# Remove the path
file_to_extract.name = os.path.basename(file_to_extract.name)
tar.extract(file_to_extract, path=target)
tar.close()
def delete(self):
archive_file = f"{self.repo.location}/{self.name}.tar"
info_file = f"{self.repo.location}/{self.name}.info.json"
if os.path.exists(archive_file + ".gz"):
archive_file += ".gz"
files_to_delete = [archive_file, info_file]
# To handle the case where archive_file is in fact a symlink
if os.path.islink(archive_file):
actual_archive = os.path.realpath(archive_file)
files_to_delete.append(actual_archive)
for backup_file in files_to_delete:
if not os.path.exists(backup_file):
continue
try:
os.remove(backup_file)
except Exception:
logger.debug("unable to delete '%s'", backup_file, exc_info=1)
logger.warning(m18n.n("backup_delete_error", path=backup_file))
def list(self):
try:
tar = tarfile.open(
self.archive_path,
"r:gz" if self.archive_path.endswith(".gz") else "r",
)
except Exception:
logger.debug(
"cannot open backup archive '%s'", self.archive_path, exc_info=1
)
raise YunohostError("backup_archive_open_failed")
try:
return tar.getnames()
except (IOError, EOFError, tarfile.ReadError) as e:
tar.close()
raise YunohostError(
"backup_archive_corrupted", archive=self.archive_path, error=str(e)
)
def download(self):
super().download()
# If symlink, retrieve the real path
archive_file = self.archive_path
if os.path.islink(archive_file):
archive_file = os.path.realpath(archive_file)
# Raise exception if link is broken (e.g. on unmounted external storage)
if not os.path.exists(archive_file):
raise YunohostValidationError(
"backup_archive_broken_link", path=archive_file
)
# We return a raw bottle HTTPresponse (instead of serializable data like
# list/dict, ...), which is gonna be picked and used directly by moulinette
from bottle import static_file
archive_folder, archive_file_name = archive_file.rsplit("/", 1)
return static_file(archive_file_name, archive_folder, download=archive_file_name)
def extract(self, paths=None, exclude_paths=[]):
paths, exclude_paths = super().extract(paths, exclude_paths)
# Mount the tarball
try:
tar = tarfile.open(
self.archive_path,
"r:gz" if self.archive_path.endswith(".gz") else "r",
)
except Exception:
logger.debug(
"cannot open backup archive '%s'", self.archive_path, exc_info=1
)
raise YunohostError("backup_archive_open_failed")
subdir_and_files = [
tarinfo
for tarinfo in tar.getmembers()
if (
any([tarinfo.name.startswith(path) for path in paths])
and all([not tarinfo.name.startswith(path) for path in exclude_paths])
)
]
tar.extractall(members=subdir_and_files, path=self.work_dir)
tar.close()
def mount(self):
raise NotImplementedError()
def _archive_exists(self):
return os.path.lexists(self.archive_path)
def _assert_archive_exists(self):
if not self._archive_exists():
raise YunohostError('backup_archive_name_unknown', name=self.name)
# If symlink, retrieve the real path
if os.path.islink(self.archive_path):
archive_file = os.path.realpath(self.archive_path)
# Raise exception if link is broken (e.g. on unmounted external storage)
if not os.path.exists(archive_file):
raise YunohostError('backup_archive_broken_link',
path=archive_file)

748
src/repository.py Normal file
View file

@ -0,0 +1,748 @@
# -*- coding: utf-8 -*-
""" License
Copyright (C) 2013 Yunohost
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program; if not, see http://www.gnu.org/licenses
"""
""" yunohost_repository.py
Manage backup repositories
"""
import json
import os
import re
import shutil
import subprocess
import tarfile
import tempfile
from functools import reduce
from moulinette import Moulinette, m18n
from moulinette.core import MoulinetteError
from moulinette.utils.log import getActionLogger
from moulinette.utils.filesystem import read_file, rm, mkdir
from moulinette.utils.network import download_text
from datetime import timedelta, datetime
import yunohost.repositories
from yunohost.utils.config import ConfigPanel
from yunohost.utils.error import YunohostError, YunohostValidationError
from yunohost.utils.system import disk_usage, binary_to_human
from yunohost.utils.network import get_ssh_public_key, SHF_BASE_URL
logger = getActionLogger('yunohost.repository')
REPOSITORIES_DIR = '/etc/yunohost/backup/repositories'
CACHE_INFO_DIR = "/var/cache/yunohost/repositories/{repository}"
REPOSITORY_CONFIG_PATH = "/usr/share/yunohost/other/config_repository.toml"
MB_ALLOWED_TO_ORGANIZE = 10
# TODO split ConfigPanel.get to extract "Format result" part and be able to override it
# TODO Migration
# TODO Remove BackupRepository.get_or_create()
# TODO Backup method
# TODO API params to get description of forms
# TODO tests
# TODO detect external hard drive already mounted and suggest it
# TODO F2F client delete
# TODO F2F server
# TODO i18n pattern error
class BackupRepository(ConfigPanel):
"""
BackupRepository manage all repository the admin added to the instance
"""
entity_type = "backup_repository"
save_path_tpl = REPOSITORIES_DIR + "/{entity}.yml"
save_mode = "full"
need_organized_files = True
method_name = ""
@staticmethod
def split_location(location):
"""
Split a repository location into protocol, user, domain and path
"""
if "/" not in location:
return {"domain": location}
location_regex = r'^((?P<protocol>ssh://)?(?P<user>[^@ ]+)@(?P<domain>[^: ]+):((?P<port>\d+)/)?)?(?P<path>[^:]+)$'
location_match = re.match(location_regex, location)
if location_match is None:
raise YunohostError('backup_repositories_invalid_location',
location=location)
return {
'protocol': location_match.group('protocol'),
'user': location_match.group('user'),
'domain': location_match.group('domain'),
'port': location_match.group('port'),
'path': location_match.group('path')
}
@classmethod
def list(cls, space_used=False, full=False):
"""
List available repositories where put archives
"""
repositories = super().list()
if not full:
return repositories
full_repositories = {}
for repo in repositories:
try:
full_repositories.update(BackupRepository(repo).info(space_used))
except Exception as e:
logger.error(f"Unable to open repository {repo}: {e}")
return full_repositories
def __init__(self, entity, config_path=None, save_path=None, creation=False):
super().__init__(entity, config_path, save_path, creation)
self._load_current_values()
self._cast_by_backup_method()
def _cast_by_backup_method(self):
try:
if self.method == 'tar':
from yunohost.repositories.tar import TarBackupRepository
self.__class__ = TarBackupRepository
elif self.method == 'borg':
from yunohost.repositories.borg import BorgBackupRepository
self.__class__ = BorgBackupRepository
else:
from yunohost.repositories.hook import HookBackupRepository
self.__class__ = HookBackupRepository
except KeyError:
pass
# =================================================
# Config Panel Hooks
# =================================================
def post_ask__domain(self, question):
""" Detect if the domain support Self-Hosting Federation protocol
"""
# import requests
# FIXME What if remote server is self-signed ?
# FIXME What if remote server is unreachable temporarily ?
url = SHF_BASE_URL.format(domain=question.value) + "/"
try:
# r = requests.get(url, timeout=10)
download_text(url, timeout=10)
except MoulinetteError:
logger.debug("SHF not running")
return {'is_shf': False}
logger.debug("SHF running")
return {'is_shf': True}
def post_ask__is_remote(self, question):
if question.value:
self.method = 'borg'
self._cast_by_backup_method()
return {}
def post_ask__method(self, question):
self._cast_by_backup_method()
return {}
# =================================================
# Config Panel Override
# =================================================
def _get_default_values(self):
values = super()._get_default_values()
# TODO move that in a getter hooks ?
values["public_key"] = get_ssh_public_key()
return values
def _load_current_values(self):
super()._load_current_values()
if 'location' in self.values:
self.values.update(BackupRepository.split_location(self.values['location']))
self.values['is_remote'] = bool(self.values.get('domain'))
if self.values.get('method') == 'tar' and self.values['is_remote']:
raise YunohostError("repository_tar_only_local")
self.values['is_shf'] = bool(self.values['shf_id']) if 'shf_id' in self.values else False
def _parse_pre_answered(self, *args):
super()._parse_pre_answered(*args)
if 'location' in self.args:
self.args.update(BackupRepository.split_location(self.args['location']))
if 'domain' in self.args:
self.args['is_remote'] = bool(self.args['domain'])
self.args['method'] = "borg"
elif self.args.get('method') == 'tar':
self.args['is_remote'] = False
def _apply(self):
# Activate / update services
if not os.path.exists(self.save_path):
self.install()
else:
self.update()
# Clean redundant values before to register
for prop in ['is_remote', 'domain', 'port', 'user', 'path',
'creation', 'is_shf', 'shortname']:
self.values.pop(prop, None)
self.new_values.pop(prop, None)
super()._apply()
# =================================================
# BackupMethod encapsulation
# =================================================
@property
def location(self):
if not self.future_values:
return None
if not self.is_remote:
return self.path
return f"ssh://{self.user}@{self.domain}:{self.port}/{self.path}"
@property
def is_deduplicated(self):
return True
def check_is_enough_free_space(self, backup_size):
"""
Check free space in repository or output directory before to backup
"""
if self.is_deduplicated:
return
free_space = self.compute_free_space(self)
if free_space < backup_size:
logger.debug(
"Not enough space at %s (free: %s / needed: %d)",
self.entity,
free_space,
backup_size,
)
raise YunohostValidationError("not_enough_disk_space", path=self.entity)
def remove(self, purge=False):
if purge:
self._load_current_values()
self.purge()
rm(CACHE_INFO_DIR.format(repository=self.entity), recursive=True, force=True)
rm(self.save_path, force=True)
logger.success(m18n.n("repository_removed", repository=self.entity))
def info(self, space_used=False):
result = super().get(mode="export")
if space_used is True:
result["space_used"] = self.compute_space_used()
return {self.entity: result}
def list_archives(self, with_info=False):
archives = self.list_archives_names()
if with_info:
d = {}
for archive in archives:
try:
d[archive] = BackupArchive(repo=self, name=archive).info(with_details=with_info)
except YunohostError as e:
logger.warning(str(e))
except Exception:
import traceback
logger.warning(
"Could not check infos for archive %s: %s"
% (archive, "\n" + traceback.format_exc())
)
archives = d
return archives
def prune(self, prefix=None, keep_last=None, keep_within=None, keep_hourly=None, keep_daily=None, keep_weekly=None, keep_monthly=None):
# Default prune options
keeps = [value is None for key, value in locals().items() if key.startswith("keep_")]
if all(keeps):
keep_hourly = 0
keep_daily = 10
keep_weekly = 8
keep_monthly = 8
logger.debug(f"Prune and keep one per each {keep_hourly} last hours, {keep_daily} last days, {keep_weekly} last weeks, {keep_monthly} last months")
keep_last = keep_last if keep_last else 0
# Convert keep_within as a date
units = {
"H": "hours",
"d": "days",
"w": "weeks",
}
now = datetime.utcnow()
if keep_within:
keep_within = now - timedelta(**{units[keep_within[-1]]: int(keep_within[:-1])})
else:
keep_within = now
# List archives with creation date
archives = {}
for archive_name in self.list_archives_names(prefix):
archive = BackupArchive(repo=self, name=archive_name)
created_at = archive.info()["created_at"]
archives[created_at] = archive
if not archives:
return
# Generate periods in which keep one archive
now = datetime.utcnow()
now -= timedelta(
minutes=now.minute,
seconds=now.second,
microseconds=now.microsecond
)
periods = set([])
units = {
"keep_hourly": {"hours": 1},
"keep_daily": {"days": 1},
"keep_weekly": {"weeks": 1},
"keep_monthly": {"days": 30}
}
keeps_xly = {key: val for key, val in locals().items()
if key.startswith("keep_") and key.endswith("ly")}
for unit, qty in keeps_xly.items():
if not qty:
continue
period = timedelta(**units[unit])
periods.update(set([(now - period * i, now - period * (i - 1))
for i in range(qty)]))
# Delete unneeded archive
for created_at in sorted(archives, reverse=True):
date_created_at = datetime.utcfromtimestamp(created_at)
keep_for = set(filter(lambda period: period[0] <= date_created_at <= period[1], periods))
periods -= keep_for
if keep_for or keep_last > 0 or date_created_at >= keep_within:
keep_last -= 1
continue
archives[created_at].delete()
# =================================================
# Repository abstract actions
# =================================================
def install(self):
raise NotImplementedError()
def update(self):
raise NotImplementedError()
def purge(self):
raise NotImplementedError()
def list_archives_names(self, prefix=None):
raise NotImplementedError()
def compute_space_used(self):
raise NotImplementedError()
def compute_free_space(self):
raise NotImplementedError()
class LocalBackupRepository(BackupRepository):
def install(self):
self.new_values['location'] = self.location
mkdir(self.location, mode=0o0750, parents=True, uid="admin", gid="root", force=True)
def update(self):
self.install()
def purge(self):
rm(self.location, recursive=True, force=True)
class BackupArchive:
def __init__(self, repo, name=None, manager=None):
self.manager = manager
self.name = name or manager.name
if self.name.endswith(".tar.gz"):
self.name = self.name[: -len(".tar.gz")]
elif self.name.endswith(".tar"):
self.name = self.name[: -len(".tar")]
self.repo = repo
# Cast
if self.repo.method_name == 'tar':
self.__class__ = yunohost.repositories.tar.TarBackupArchive
elif self.repo.method_name == 'borg':
self.__class__ = yunohost.repositories.borg.BorgBackupArchive
else:
self.__class__ = yunohost.repositories.hook.HookBackupArchive
# Assert archive exists
if self.manager.__class__.__name__ != "BackupManager" and self.name not in self.repo.list_archives(False):
raise YunohostValidationError("backup_archive_name_unknown", name=name)
@property
def archive_path(self):
"""Return the archive path"""
return self.repo.location + '::' + self.name
@property
def work_dir(self):
"""
Return the working directory
For a BackupManager, it is the directory where we prepare the files to
backup
For a RestoreManager, it is the directory where we mount the archive
before restoring
"""
return self.manager.work_dir
# This is not a property cause it could be managed in a hook
def need_organized_files(self):
return self.repo.need_organized_files
def organize_and_backup(self):
"""
Run the backup on files listed by the BackupManager instance
This method shouldn't be overrided, prefer overriding self.backup() and
self.clean()
"""
if self.need_organized_files():
self._organize_files()
self.repo.install()
# Check free space in output
self.repo.check_is_enough_free_space(self.manager.size)
try:
self.backup()
finally:
self.clean()
def select_files(self):
files_in_archive = self.list()
if "info.json" in files_in_archive:
leading_dot = ""
yield "info.json"
elif "./info.json" in files_in_archive:
leading_dot = "./"
yield "./info.json"
else:
logger.debug(
"unable to retrieve 'info.json' inside the archive", exc_info=1
)
raise YunohostError(
"backup_archive_cant_retrieve_info_json", archive=self.archive_path
)
if f"{leading_dot}backup.csv" in files_in_archive:
yield f"{leading_dot}backup.csv"
else:
# Old backup archive have no backup.csv file
pass
# Extract system parts backup
conf_extracted = False
system_targets = self.manager.targets.list("system", exclude=["Skipped"])
apps_targets = self.manager.targets.list("apps", exclude=["Skipped"])
for system_part in system_targets:
if system_part.startswith("conf_"):
if conf_extracted:
continue
system_part = "conf/"
conf_extracted = True
else:
system_part = system_part.replace("_", "/") + "/"
yield leading_dot + system_part
yield f"{leading_dot}hook/restore/"
# Extract apps backup
for app in apps_targets:
yield f"{leading_dot}apps/{app}"
def _get_info_string(self):
"""Extract info file from archive if needed and read it"""
cache_info_dir = CACHE_INFO_DIR.format(repository=self.repo.entity)
mkdir(cache_info_dir, mode=0o0700, parents=True, force=True)
info_file = f"{cache_info_dir}/{self.name}.info.json"
if not os.path.exists(info_file):
tmp_dir = tempfile.mkdtemp()
try:
files_in_archive = self.list()
if "info.json" in files_in_archive:
self.extract("info.json", destination=tmp_dir)
elif "./info.json" in files_in_archive:
self.extract("./info.json", destination=tmp_dir)
else:
raise YunohostError(
"backup_archive_cant_retrieve_info_json", archive=self.archive_path
)
# FIXME should we cache there is no info.json ?
shutil.move(os.path.join(tmp_dir, "info.json"), info_file)
finally:
os.rmdir(tmp_dir)
try:
return read_file(info_file)
except MoulinetteError as e:
logger.debug("unable to load '%s'", info_file, exc_info=1)
raise YunohostError('backup_invalid_archive', error=e)
def info(self, with_details=False, human_readable=False):
info_json = self._get_info_string()
if not info_json:
raise YunohostError('backup_info_json_not_implemented')
try:
info = json.loads(info_json)
except Exception as e:
logger.debug("unable to load info json", exc_info=1)
raise YunohostError('backup_invalid_archive', error=e)
# (legacy) Retrieve backup size
# FIXME
size = info.get("size", 0)
if not size:
tar = tarfile.open(
self.archive_file, "r:gz" if self.archive_file.endswith(".gz") else "r"
)
size = reduce(
lambda x, y: getattr(x, "size", x) + getattr(y, "size", y), tar.getmembers()
)
tar.close()
result = {
"path": self.archive_path,
"created_at": datetime.utcfromtimestamp(info["created_at"]),
"description": info["description"],
"size": size,
}
if human_readable:
result['size'] = binary_to_human(result['size']) + 'B'
if with_details:
system_key = "system"
# Historically 'system' was 'hooks'
if "hooks" in info.keys():
system_key = "hooks"
if "size_details" in info.keys():
for category in ["apps", "system"]:
for name, key_info in info[category].items():
if category == "system":
# Stupid legacy fix for weird format between 3.5 and 3.6
if isinstance(key_info, dict):
key_info = key_info.keys()
info[category][name] = key_info = {"paths": key_info}
else:
info[category][name] = key_info
if name in info["size_details"][category].keys():
key_info["size"] = info["size_details"][category][name]
if human_readable:
key_info["size"] = binary_to_human(key_info["size"]) + "B"
else:
key_info["size"] = -1
if human_readable:
key_info["size"] = "?"
result["apps"] = info["apps"]
result["system"] = info[system_key]
result["from_yunohost_version"] = info.get("from_yunohost_version")
return info
def clean(self):
"""
Umount sub directories of working dirextories and delete it if temporary
"""
self.manager.clean_work_dir(self.need_organized_files())
def _organize_files(self):
"""
Mount all csv src in their related path
The goal is to organize the files app by app and hook by hook, before
custom backup method or before the restore operation (in the case of an
unorganize archive).
The usage of binding could be strange for a user because the du -sb
command will return that the working directory is big.
"""
paths_needed_to_be_copied = []
for path in self.manager.paths_to_backup:
src = path["source"]
if self.manager.__class__.__name__ == "RestoreManager":
# TODO Support to run this before a restore (and not only before
# backup). To do that RestoreManager.unorganized_work_dir should
# be implemented
src = os.path.join(self.unorganized_work_dir, src)
dest = os.path.join(self.work_dir, path["dest"])
if dest == src:
continue
dest_dir = os.path.dirname(dest)
# Be sure the parent dir of destination exists
if not os.path.isdir(dest_dir):
mkdir(dest_dir, parents=True)
# For directory, attempt to mount bind
if os.path.isdir(src):
mkdir(dest, parents=True, force=True)
try:
subprocess.check_call(["mount", "--rbind", src, dest])
subprocess.check_call(["mount", "-o", "remount,ro,bind", dest])
except Exception:
logger.warning(m18n.n("backup_couldnt_bind", src=src, dest=dest))
# To check if dest is mounted, use /proc/mounts that
# escape spaces as \040
raw_mounts = read_file("/proc/mounts").strip().split("\n")
mounts = [m.split()[1] for m in raw_mounts]
mounts = [m.replace("\\040", " ") for m in mounts]
if dest in mounts:
subprocess.check_call(["umount", "-R", dest])
else:
# Success, go to next file to organize
continue
# For files, create a hardlink
elif os.path.isfile(src) or os.path.islink(src):
# Can create a hard link only if files are on the same fs
# (i.e. we can't if it's on a different fs)
if os.stat(src).st_dev == os.stat(dest_dir).st_dev:
# Don't hardlink /etc/cron.d files to avoid cron bug
# 'NUMBER OF HARD LINKS > 1' see #1043
cron_path = os.path.abspath("/etc/cron") + "."
if not os.path.abspath(src).startswith(cron_path):
try:
os.link(src, dest)
except Exception as e:
# This kind of situation may happen when src and dest are on different
# logical volume ... even though the st_dev check previously match...
# E.g. this happens when running an encrypted hard drive
# where everything is mapped to /dev/mapper/some-stuff
# yet there are different devices behind it or idk ...
logger.warning(
"Could not link %s to %s (%s) ... falling back to regular copy."
% (src, dest, str(e))
)
else:
# Success, go to next file to organize
continue
# If mountbind or hardlink couldnt be created,
# prepare a list of files that need to be copied
paths_needed_to_be_copied.append(path)
if len(paths_needed_to_be_copied) == 0:
return
# Manage the case where we are not able to use mount bind abilities
# It could be just for some small files on different filesystems or due
# to mounting error
# Compute size to copy
size = sum(disk_usage(path["source"]) for path in paths_needed_to_be_copied)
size /= 1024 * 1024 # Convert bytes to megabytes
# Ask confirmation for copying
if size > MB_ALLOWED_TO_ORGANIZE:
try:
i = Moulinette.prompt(
m18n.n(
"backup_ask_for_copying_if_needed",
answers="y/N",
size=str(size),
)
)
except NotImplemented:
raise YunohostError("backup_unable_to_organize_files")
else:
if i != "y" and i != "Y":
raise YunohostError("backup_unable_to_organize_files")
# Copy unbinded path
logger.debug(m18n.n("backup_copying_to_organize_the_archive", size=str(size)))
for path in paths_needed_to_be_copied:
dest = os.path.join(self.work_dir, path["dest"])
if os.path.isdir(path["source"]):
shutil.copytree(path["source"], dest, symlinks=True)
else:
shutil.copy(path["source"], dest)
# =================================================
# Archive abstract actions
# =================================================
def backup(self):
if self.__class__ == BackupArchive:
raise NotImplementedError()
def delete(self):
if self.__class__ == BackupArchive:
raise NotImplementedError()
def list(self):
if self.__class__ == BackupArchive:
raise NotImplementedError()
def download(self):
if self.__class__ == BackupArchive:
raise NotImplementedError()
if Moulinette.interface.type != "api":
logger.error(
"This option is only meant for the API/webadmin and doesn't make sense for the command line."
)
return
def extract(self, paths=None, destination=None, exclude_paths=[]):
if self.__class__ == BackupArchive:
raise NotImplementedError()
if isinstance(paths, str):
paths = [paths]
elif paths is None:
paths = self.select_files()
if isinstance(exclude_paths, str):
exclude_paths = [exclude_paths]
return paths, destination, exclude_paths
def mount(self):
if self.__class__ == BackupArchive:
raise NotImplementedError()

View file

@ -51,6 +51,11 @@ CONFIG_PANEL_VERSION_SUPPORTED = 1.0
# The goal is to evaluate in the same way than js simple-evaluate
# https://github.com/shepherdwind/simple-evaluate
def evaluate_simple_ast(node, context=None):
"""
Those js-like evaluate functions are used to eval safely visible attributes
The goal is to evaluate in the same way than js simple-evaluate
https://github.com/shepherdwind/simple-evaluate
"""
if context is None:
context = {}
@ -204,7 +209,7 @@ class ConfigPanel:
try:
entities = [
re.match(
"^" + cls.save_path_tpl.format(entity="(?p<entity>)") + "$", f
"^" + cls.save_path_tpl.format(entity="(?P<entity>[^/]*)") + "$", f
).group("entity")
for f in glob.glob(cls.save_path_tpl.format(entity="*"))
if os.path.isfile(f)
@ -284,7 +289,7 @@ class ConfigPanel:
if "ask" in option:
ask = _value_for_locale(option["ask"])
elif "i18n" in self.config:
ask = m18n.n(self.config["i18n"] + "_" + option["id"])
ask = m18n.n(self.config["i18n"] + "_" + option["id"], **self.values)
if mode == "full":
option["ask"] = ask
@ -577,7 +582,9 @@ class ConfigPanel:
logger.warning(f"Unknown key '{key}' found in config panel")
# Todo search all i18n keys
out[key] = (
value if key not in ["ask", "help", "name"] else {"en": value}
value
if key not in ["ask", "help", "name"] or isinstance(value, (dict, OrderedDict))
else {"en": value}
)
return out
@ -666,10 +673,10 @@ class ConfigPanel:
if "i18n" in self.config:
for panel, section, option in self._iterate():
if "ask" not in option:
option["ask"] = m18n.n(self.config["i18n"] + "_" + option["id"])
option["ask"] = m18n.n(self.config["i18n"] + "_" + option["id"], **self.values)
# auto add i18n help text if present in locales
if m18n.key_exists(self.config["i18n"] + "_" + option["id"] + '_help'):
option["help"] = m18n.n(self.config["i18n"] + "_" + option["id"] + '_help')
option["help"] = m18n.n(self.config["i18n"] + "_" + option["id"] + '_help', **self.values)
def display_header(message):
"""CLI panel/section header display"""
@ -690,7 +697,8 @@ class ConfigPanel:
if panel == obj:
name = _value_for_locale(panel["name"])
display_header(f"\n{'='*40}\n>>>> {name}\n{'='*40}")
if name:
display_header(f"\n{'='*40}\n>>>> {name}\n{'='*40}")
else:
name = _value_for_locale(section["name"])
if name:
@ -771,7 +779,7 @@ class ConfigPanel:
logger.info("Saving the new configuration...")
dir_path = os.path.dirname(os.path.realpath(self.save_path))
if not os.path.exists(dir_path):
mkdir(dir_path, mode=0o700)
mkdir(dir_path, mode=0o700, parents=True)
values_to_save = self.future_values
if self.save_mode == "diff":
@ -908,6 +916,10 @@ class Question:
# Normalize and validate
self.value = self.normalize(self.value, self)
self._prevalidate()
# Search for validator in hooks
validator = f"validate__{self.name}"
if validator in self.hooks:
self.hooks[validator](self)
except YunohostValidationError as e:
# If in interactive cli, re-ask the current question
if i < 4 and Moulinette.interface.type == "cli" and os.isatty(1):
@ -951,7 +963,7 @@ class Question:
def _format_text_for_user_input_in_cli(self):
text_for_user_input_in_cli = _value_for_locale(self.ask)
text_for_user_input_in_cli = _value_for_locale(self.ask).format(**self.context)
if self.readonly:
text_for_user_input_in_cli = colorize(text_for_user_input_in_cli, "purple")
@ -1068,10 +1080,17 @@ class TagsQuestion(Question):
@staticmethod
def normalize(value, option={}):
if isinstance(value, list):
option = option.__dict__ if isinstance(option, Question) else option
list_mode = "default" in option and isinstance(option["default"], list)
if isinstance(value, list) and not list_mode:
return ",".join(value)
if isinstance(value, str):
value = value.strip()
if list_mode:
value = value.split(",")
return value
def _prevalidate(self):
@ -1086,7 +1105,7 @@ class TagsQuestion(Question):
self.value = values
def _post_parse_value(self):
if isinstance(self.value, list):
if isinstance(self.value, list) and not isinstance(self.default, list):
self.value = ",".join(self.value)
return super()._post_parse_value()
@ -1143,8 +1162,10 @@ class PathQuestion(Question):
name=option.get("name"),
error="Question is mandatory",
)
return "/" + value.strip().strip(" /")
value = value.strip().strip(" /")
if not value.startswith("~"):
value = "/" + value
return value
class BooleanQuestion(Question):
@ -1260,9 +1281,9 @@ class DomainQuestion(Question):
@staticmethod
def normalize(value, option={}):
if value.startswith("https://"):
value = value[len("https://") :]
value = value[len("https://"):]
elif value.startswith("http://"):
value = value[len("http://") :]
value = value[len("http://"):]
# Remove trailing slashes
value = value.rstrip("/").lower()

View file

@ -16,6 +16,7 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from collections import OrderedDict
from moulinette import m18n
@ -30,7 +31,7 @@ def _value_for_locale(values):
An utf-8 encoded string
"""
if not isinstance(values, dict):
if not isinstance(values, (dict, OrderedDict)):
return values
for lang in [m18n.locale, m18n.default_locale]:

View file

@ -27,6 +27,7 @@ from moulinette.utils.process import check_output
logger = logging.getLogger("yunohost.utils.network")
SHF_BASE_URL = "https://{domain}/.well-known/self-hosting-federation/v1"
def get_public_ip(protocol=4):
@ -163,3 +164,60 @@ def _extract_inet(string, skip_netmask=False, skip_loopback=True):
break
return result
def get_ssh_public_key():
""" Return the prefered public key
This is used by the Self-Hosting Federation protocol
"""
keys = [
'/etc/ssh/ssh_host_ed25519_key.pub',
'/etc/ssh/ssh_host_rsa_key.pub'
]
for key in keys:
if os.path.exists(key):
# We return the key without user and machine name.
# Providers don't need this info.
return " ".join(read_file(key).split(" ")[0:2])
def shf_request(domain, service, shf_id=None, data={}):
# Get missing info from SHF protocol
import requests
# We try to get the URL repo through SHFi
base_url = SHF_BASE_URL.format(domain=domain)
url = f"{base_url}/service/{service}"
# FIXME add signature mechanism and portection against replay attack
# FIXME add password to manage the service ?
# FIXME support self-signed destination domain by asking validation to user
try:
if data is None:
r = requests.delete(url, timeout=30)
else:
if shf_id:
r = requests.put(f"{url}/{shf_id}", data=data, timeout=30)
else:
r = requests.post(url, data=data, timeout=30)
# SSL exceptions
except requests.exceptions.SSLError:
raise MoulinetteError("download_ssl_error", url=url)
# Invalid URL
except requests.exceptions.ConnectionError:
raise MoulinetteError("invalid_url", url=url)
# Timeout exceptions
except requests.exceptions.Timeout:
raise MoulinetteError("download_timeout", url=url)
# Unknown stuff
except Exception as e:
raise MoulinetteError("download_unknown_error", url=url, error=str(e))
if r.status_code in [401, 403]:
if self.creation:
raise YunohostError("repository_shf_creation_{r.status_code}")
else:
response = r.json()
raise YunohostError("repository_shf_update_{r.status_code}", message=response['message'])
elif r.status_code in [200, 201, 202]:
return r.json()
# FIXME validate repository and id
else:
raise YunohostError("repository_shf_invalid")

View file

@ -58,6 +58,14 @@ def space_used_by_directory(dirpath, follow_symlinks=True):
return stat.f_frsize * stat.f_blocks # FIXME : this doesnt do what the function name suggest this does ...
def disk_usage(path):
# We don't do this in python with os.stat because we don't want
# to follow symlinks
du_output = check_output(["du", "-sb", path], shell=False)
return int(du_output.split()[0])
def human_to_binary(size: str) -> int:
symbols = ("K", "M", "G", "T", "P", "E", "Z", "Y")