2018-10-26 11:24:13 +00:00
|
|
|
import os
|
2019-01-06 00:22:11 +00:00
|
|
|
import subprocess
|
2018-10-28 09:35:25 +00:00
|
|
|
import tempfile
|
2021-10-27 04:37:28 +00:00
|
|
|
from datetime import datetime as dt
|
2023-05-01 08:28:11 +00:00
|
|
|
|
2023-03-12 07:05:46 +00:00
|
|
|
from vorta.config import LOG_DIR
|
|
|
|
from vorta.i18n import trans_late, translate
|
2023-05-01 08:28:11 +00:00
|
|
|
from vorta.store.models import (
|
|
|
|
ArchiveModel,
|
|
|
|
RepoModel,
|
|
|
|
SourceFileModel,
|
|
|
|
WifiSettingModel,
|
|
|
|
)
|
2020-09-09 00:22:54 +00:00
|
|
|
from vorta.utils import borg_compat, format_archive_name, get_network_status_monitor
|
2023-05-01 08:28:11 +00:00
|
|
|
|
2021-10-04 11:31:41 +00:00
|
|
|
from .borg_job import BorgJob
|
2018-10-28 09:35:25 +00:00
|
|
|
|
2018-10-26 11:24:13 +00:00
|
|
|
|
2021-10-04 11:31:41 +00:00
|
|
|
class BorgCreateJob(BorgJob):
|
2018-11-02 13:44:49 +00:00
|
|
|
def process_result(self, result):
|
2018-11-26 05:21:53 +00:00
|
|
|
if result['returncode'] in [0, 1] and 'archive' in result['data']:
|
2019-01-20 03:50:10 +00:00
|
|
|
new_archive, created = ArchiveModel.get_or_create(
|
2018-10-31 16:09:01 +00:00
|
|
|
snapshot_id=result['data']['archive']['id'],
|
|
|
|
defaults={
|
|
|
|
'name': result['data']['archive']['name'],
|
2023-01-20 22:12:36 +00:00
|
|
|
# SQLite can't save timezone, so we remove it here. TODO: Keep as UTC?
|
|
|
|
'time': dt.fromisoformat(result['data']['archive']['start']).replace(tzinfo=None),
|
2018-11-17 08:51:53 +00:00
|
|
|
'repo': result['params']['repo_id'],
|
2018-11-01 11:35:07 +00:00
|
|
|
'duration': result['data']['archive']['duration'],
|
|
|
|
'size': result['data']['archive']['stats']['deduplicated_size'],
|
2018-10-31 16:09:01 +00:00
|
|
|
},
|
|
|
|
)
|
2019-01-20 03:50:10 +00:00
|
|
|
new_archive.save()
|
2018-10-31 16:09:01 +00:00
|
|
|
if 'cache' in result['data'] and created:
|
|
|
|
stats = result['data']['cache']['stats']
|
2018-11-17 08:51:53 +00:00
|
|
|
repo = RepoModel.get(id=result['params']['repo_id'])
|
2018-10-31 16:09:01 +00:00
|
|
|
repo.total_size = stats['total_size']
|
2023-01-20 22:12:36 +00:00
|
|
|
# repo.unique_csize = stats['unique_csize']
|
2018-10-31 16:09:01 +00:00
|
|
|
repo.unique_size = stats['unique_size']
|
|
|
|
repo.total_unique_chunks = stats['total_unique_chunks']
|
|
|
|
repo.save()
|
2018-10-28 09:35:25 +00:00
|
|
|
|
2021-04-21 07:15:31 +00:00
|
|
|
if result['returncode'] == 1:
|
2023-03-12 07:05:46 +00:00
|
|
|
self.app.backup_progress_event.emit(
|
2023-03-22 11:16:46 +00:00
|
|
|
f"[{self.params['profile_name']}] "
|
|
|
|
+ translate(
|
2023-03-12 07:05:46 +00:00
|
|
|
'BorgCreateJob',
|
|
|
|
'Backup finished with warnings. See the <a href="{0}">logs</a> for details.',
|
|
|
|
).format(LOG_DIR.as_uri())
|
|
|
|
)
|
2021-04-21 07:15:31 +00:00
|
|
|
else:
|
2023-03-22 11:16:46 +00:00
|
|
|
self.app.backup_progress_event.emit(f"[{self.params['profile_name']}] {self.tr('Backup finished.')}")
|
2018-11-26 07:19:02 +00:00
|
|
|
|
2020-09-05 14:08:36 +00:00
|
|
|
def progress_event(self, fmt):
|
2023-03-22 11:16:46 +00:00
|
|
|
self.app.backup_progress_event.emit(f"[{self.params['profile_name']}] {fmt}")
|
2018-11-02 13:44:49 +00:00
|
|
|
|
|
|
|
def started_event(self):
|
|
|
|
self.app.backup_started_event.emit()
|
2023-03-22 11:16:46 +00:00
|
|
|
self.app.backup_progress_event.emit(f"[{self.params['profile_name']}] {self.tr('Backup started.')}")
|
2018-11-02 13:44:49 +00:00
|
|
|
|
|
|
|
def finished_event(self, result):
|
|
|
|
self.app.backup_finished_event.emit(result)
|
2020-12-16 02:39:54 +00:00
|
|
|
self.result.emit(result)
|
2019-01-06 00:22:11 +00:00
|
|
|
self.pre_post_backup_cmd(self.params, cmd='post_backup_cmd', returncode=result['returncode'])
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def pre_post_backup_cmd(cls, params, cmd='pre_backup_cmd', returncode=0):
|
|
|
|
cmd = getattr(params['profile'], cmd)
|
|
|
|
if cmd:
|
|
|
|
env = {
|
|
|
|
**os.environ.copy(),
|
|
|
|
'repo_url': params['repo'].url,
|
|
|
|
'profile_name': params['profile'].name,
|
|
|
|
'profile_slug': params['profile'].slug(),
|
|
|
|
'returncode': str(returncode),
|
|
|
|
}
|
|
|
|
proc = subprocess.run(cmd, shell=True, env=env)
|
|
|
|
return proc.returncode
|
|
|
|
else:
|
|
|
|
return 0 # 0 if no command was run.
|
2018-11-02 13:44:49 +00:00
|
|
|
|
2018-10-28 09:35:25 +00:00
|
|
|
@classmethod
|
2018-11-04 15:37:46 +00:00
|
|
|
def prepare(cls, profile):
|
2018-10-29 17:25:28 +00:00
|
|
|
"""
|
|
|
|
`borg create` is called from different places and needs some preparation.
|
|
|
|
Centralize it here and return the required arguments to the caller.
|
2018-10-28 09:35:25 +00:00
|
|
|
"""
|
2018-11-04 15:37:46 +00:00
|
|
|
ret = super().prepare(profile)
|
2018-11-03 08:55:38 +00:00
|
|
|
if not ret['ok']:
|
2018-10-29 17:25:28 +00:00
|
|
|
return ret
|
2018-11-04 08:23:17 +00:00
|
|
|
else:
|
2019-01-06 00:22:11 +00:00
|
|
|
ret['ok'] = False # Set back to False, so we can do our own checks here.
|
2018-11-04 08:23:17 +00:00
|
|
|
|
2023-03-02 16:55:31 +00:00
|
|
|
n_backup_folders = SourceFileModel.select().where(SourceFileModel.profile == profile).count()
|
2023-01-18 15:57:23 +00:00
|
|
|
|
|
|
|
# cmd options like `--paths-from-command` require a command
|
|
|
|
# that is appended to the arguments
|
|
|
|
# $ borg create --paths-from-command repo::archive1 -- find /home/user -type f -size -76M
|
|
|
|
extra_cmd_options = []
|
|
|
|
suffix_command = []
|
|
|
|
if profile.repo.create_backup_cmd:
|
|
|
|
s1, sep, s2 = profile.repo.create_backup_cmd.partition('-- ')
|
|
|
|
extra_cmd_options = s1.split()
|
|
|
|
suffix_command = (sep + s2).split()
|
|
|
|
|
|
|
|
if n_backup_folders == 0 and '--paths-from-command' not in extra_cmd_options:
|
2019-01-20 03:50:10 +00:00
|
|
|
ret['message'] = trans_late('messages', 'Add some folders to back up first.')
|
2018-11-04 08:23:17 +00:00
|
|
|
return ret
|
|
|
|
|
2020-09-09 00:22:54 +00:00
|
|
|
network_status_monitor = get_network_status_monitor()
|
2020-08-30 06:28:48 +00:00
|
|
|
current_wifi = network_status_monitor.get_current_wifi()
|
2018-11-04 08:23:17 +00:00
|
|
|
if current_wifi is not None:
|
|
|
|
wifi_is_disallowed = WifiSettingModel.select().where(
|
2018-11-22 20:21:52 +00:00
|
|
|
(WifiSettingModel.ssid == current_wifi)
|
2018-11-27 11:33:16 +00:00
|
|
|
& (WifiSettingModel.allowed == False) # noqa
|
|
|
|
& (WifiSettingModel.profile == profile)
|
2018-11-04 08:23:17 +00:00
|
|
|
)
|
2018-11-20 01:57:18 +00:00
|
|
|
if wifi_is_disallowed.count() > 0 and profile.repo.is_remote_repo():
|
2019-01-20 03:50:10 +00:00
|
|
|
ret['message'] = trans_late('messages', 'Current Wifi is not allowed.')
|
2018-11-04 08:23:17 +00:00
|
|
|
return ret
|
2020-08-30 06:28:48 +00:00
|
|
|
|
2020-11-18 07:20:46 +00:00
|
|
|
if (
|
|
|
|
profile.repo.is_remote_repo()
|
|
|
|
and profile.dont_run_on_metered_networks
|
|
|
|
and network_status_monitor.is_network_metered()
|
|
|
|
):
|
2020-08-30 06:28:48 +00:00
|
|
|
ret['message'] = trans_late('messages', 'Not running backup over metered connection.')
|
|
|
|
return ret
|
|
|
|
|
2019-04-24 06:05:54 +00:00
|
|
|
ret['profile'] = profile
|
|
|
|
ret['repo'] = profile.repo
|
|
|
|
|
|
|
|
# Run user-supplied pre-backup command
|
|
|
|
if cls.pre_post_backup_cmd(ret) != 0:
|
|
|
|
ret['message'] = trans_late('messages', 'Pre-backup command returned non-zero exit code.')
|
|
|
|
return ret
|
2018-10-29 17:25:28 +00:00
|
|
|
|
2018-11-21 09:53:11 +00:00
|
|
|
if not profile.repo.is_remote_repo() and not os.path.exists(profile.repo.url):
|
2019-01-20 03:50:10 +00:00
|
|
|
ret['message'] = trans_late('messages', 'Repo folder not mounted or moved.')
|
2018-11-21 09:53:11 +00:00
|
|
|
return ret
|
|
|
|
|
2019-04-14 06:40:29 +00:00
|
|
|
if 'zstd' in profile.compression and not borg_compat.check('ZSTD'):
|
|
|
|
ret['message'] = trans_late(
|
|
|
|
'messages',
|
|
|
|
'Your current Borg version does not support ZStd compression.',
|
|
|
|
)
|
|
|
|
return ret
|
|
|
|
|
2020-09-05 14:08:36 +00:00
|
|
|
cmd = [
|
|
|
|
'borg',
|
|
|
|
'create',
|
|
|
|
'--list',
|
|
|
|
'--progress',
|
|
|
|
'--info',
|
|
|
|
'--log-json',
|
|
|
|
'--json',
|
|
|
|
'--filter=AM',
|
|
|
|
'-C',
|
|
|
|
profile.compression,
|
|
|
|
]
|
2023-01-18 15:57:23 +00:00
|
|
|
cmd += extra_cmd_options
|
2021-02-18 03:03:03 +00:00
|
|
|
|
2018-10-28 09:35:25 +00:00
|
|
|
# Add excludes
|
2018-10-28 15:40:38 +00:00
|
|
|
# Partly inspired by borgmatic/borgmatic/borg/create.py
|
|
|
|
if profile.exclude_patterns is not None:
|
|
|
|
exclude_dirs = []
|
|
|
|
for p in profile.exclude_patterns.split('\n'):
|
|
|
|
if p.strip():
|
|
|
|
expanded_directory = os.path.expanduser(p.strip())
|
|
|
|
exclude_dirs.append(expanded_directory)
|
|
|
|
|
|
|
|
if exclude_dirs:
|
2022-08-14 18:42:00 +00:00
|
|
|
pattern_file = tempfile.NamedTemporaryFile('w', delete=True)
|
2018-10-28 15:40:38 +00:00
|
|
|
pattern_file.write('\n'.join(exclude_dirs))
|
|
|
|
pattern_file.flush()
|
|
|
|
cmd.extend(['--exclude-from', pattern_file.name])
|
2022-08-14 18:42:00 +00:00
|
|
|
ret['cleanup_files'].append(pattern_file)
|
2018-10-28 15:40:38 +00:00
|
|
|
|
|
|
|
if profile.exclude_if_present is not None:
|
|
|
|
for f in profile.exclude_if_present.split('\n'):
|
|
|
|
if f.strip():
|
|
|
|
cmd.extend(['--exclude-if-present', f.strip()])
|
2018-10-28 09:35:25 +00:00
|
|
|
|
|
|
|
# Add repo url and source dirs.
|
2018-12-14 08:03:26 +00:00
|
|
|
new_archive_name = format_archive_name(profile, profile.new_archive_name)
|
2023-01-20 16:01:12 +00:00
|
|
|
|
|
|
|
if borg_compat.check('V2'):
|
|
|
|
cmd += ["-r", profile.repo.url, new_archive_name]
|
|
|
|
else:
|
|
|
|
cmd.append(f"{profile.repo.url}::{new_archive_name}")
|
2018-10-28 09:35:25 +00:00
|
|
|
|
2018-12-05 09:05:47 +00:00
|
|
|
for f in SourceFileModel.select().where(SourceFileModel.profile == profile.id):
|
2018-10-28 09:35:25 +00:00
|
|
|
cmd.append(f.dir)
|
|
|
|
|
2023-01-18 15:57:23 +00:00
|
|
|
cmd += suffix_command
|
|
|
|
|
2022-03-24 06:27:07 +00:00
|
|
|
ret['message'] = trans_late('messages', 'Starting backup…')
|
2018-10-28 09:35:25 +00:00
|
|
|
ret['ok'] = True
|
2018-10-29 17:25:28 +00:00
|
|
|
ret['cmd'] = cmd
|
2018-10-28 09:35:25 +00:00
|
|
|
|
|
|
|
return ret
|