mirror of https://github.com/borgbase/vorta
Refactor: Split up vorta/models.py (#1112)
This commit is contained in:
parent
0c77fdde97
commit
4c3e97a76c
|
@ -7,7 +7,7 @@ from vorta._version import __version__
|
|||
from vorta.i18n import trans_late, translate
|
||||
from vorta.config import SETTINGS_DIR
|
||||
from vorta.log import init_logger, logger
|
||||
from vorta.models import init_db
|
||||
from vorta.store.connection import init_db
|
||||
from vorta.updater import get_updater
|
||||
from vorta.utils import parse_args
|
||||
|
||||
|
|
|
@ -10,7 +10,8 @@ from vorta.borg.version import BorgVersionJob
|
|||
from vorta.borg.break_lock import BorgBreakJob
|
||||
from vorta.config import TEMP_DIR, PROFILE_BOOTSTRAP_FILE
|
||||
from vorta.i18n import init_translations, translate
|
||||
from vorta.models import BackupProfileModel, SettingsModel, cleanup_db
|
||||
from vorta.store.models import BackupProfileModel, SettingsModel
|
||||
from vorta.store.connection import cleanup_db
|
||||
from vorta.qt_single_application import QtSingleApplication
|
||||
from vorta.scheduler import VortaScheduler
|
||||
from vorta.borg.jobs_manager import JobsManager
|
||||
|
|
|
@ -16,7 +16,7 @@ from subprocess import Popen, PIPE, TimeoutExpired
|
|||
|
||||
from vorta.borg.jobs_manager import JobInterface
|
||||
from vorta.i18n import trans_late, translate
|
||||
from vorta.models import EventLogModel, BackupProfileMixin
|
||||
from vorta.store.models import EventLogModel, BackupProfileMixin
|
||||
from vorta.utils import borg_compat, pretty_bytes
|
||||
from vorta.keyring.abc import VortaKeyring
|
||||
from vorta.keyring.db import VortaDBKeyring
|
||||
|
@ -256,8 +256,8 @@ class BorgJob(JobInterface, BackupProfileMixin):
|
|||
msg = (
|
||||
f"{translate('BorgJob','Files')}: {parsed['nfiles']}, "
|
||||
f"{translate('BorgJob','Original')}: {pretty_bytes(parsed['original_size'])}, "
|
||||
f"{translate('BorgJob','Deduplicated')}: {pretty_bytes(parsed['deduplicated_size'])}, " # noqa: E501
|
||||
f"{translate('BorgJob','Compressed')}: {pretty_bytes(parsed['compressed_size'])}"
|
||||
f"{translate('BorgJob','Compressed')}: {pretty_bytes(parsed['compressed_size'])}, "
|
||||
f"{translate('BorgJob','Deduplicated')}: {pretty_bytes(parsed['deduplicated_size'])}" # noqa: E501
|
||||
)
|
||||
self.app.backup_progress_event.emit(msg)
|
||||
except json.decoder.JSONDecodeError:
|
||||
|
|
|
@ -5,7 +5,7 @@ from datetime import datetime as dt
|
|||
|
||||
from vorta.i18n import trans_late
|
||||
from vorta.utils import format_archive_name, borg_compat, get_network_status_monitor
|
||||
from vorta.models import SourceFileModel, ArchiveModel, WifiSettingModel, RepoModel
|
||||
from vorta.store.models import SourceFileModel, ArchiveModel, WifiSettingModel, RepoModel
|
||||
from .borg_job import BorgJob
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from .borg_job import BorgJob
|
||||
from vorta.models import ArchiveModel, RepoModel
|
||||
from vorta.store.models import ArchiveModel, RepoModel
|
||||
|
||||
|
||||
class BorgInfoArchiveJob(BorgJob):
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from .borg_job import BorgJob, FakeProfile, FakeRepo
|
||||
from vorta.i18n import trans_late
|
||||
from vorta.models import RepoModel
|
||||
from vorta.store.models import RepoModel
|
||||
|
||||
|
||||
class BorgInfoRepoJob(BorgJob):
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from .borg_job import BorgJob, FakeProfile, FakeRepo
|
||||
from vorta.models import RepoModel
|
||||
from vorta.store.models import RepoModel
|
||||
|
||||
|
||||
class BorgInitJob(BorgJob):
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from datetime import datetime as dt
|
||||
from .borg_job import BorgJob
|
||||
from vorta.models import ArchiveModel, RepoModel
|
||||
from vorta.store.models import ArchiveModel, RepoModel
|
||||
|
||||
|
||||
class BorgListRepoJob(BorgJob):
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
from .borg_job import BorgJob
|
||||
from vorta.models import SettingsModel
|
||||
from vorta.store.models import SettingsModel
|
||||
|
||||
|
||||
class BorgMountJob(BorgJob):
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from vorta.models import ArchiveModel, RepoModel
|
||||
from vorta.store.models import ArchiveModel, RepoModel
|
||||
from .borg_job import BorgJob
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import peewee
|
||||
from .abc import VortaKeyring
|
||||
from vorta.models import SettingsModel
|
||||
from vorta.store.models import SettingsModel
|
||||
|
||||
|
||||
class VortaDBKeyring(VortaKeyring):
|
||||
|
@ -11,7 +11,7 @@ class VortaDBKeyring(VortaKeyring):
|
|||
"""
|
||||
|
||||
def set_password(self, service, repo_url, password):
|
||||
from vorta.models import RepoPassword
|
||||
from vorta.store.models import RepoPassword
|
||||
keyring_entry, created = RepoPassword.get_or_create(
|
||||
url=repo_url,
|
||||
defaults={'password': password}
|
||||
|
@ -20,7 +20,7 @@ class VortaDBKeyring(VortaKeyring):
|
|||
keyring_entry.save()
|
||||
|
||||
def get_password(self, service, repo_url):
|
||||
from vorta.models import RepoPassword
|
||||
from vorta.store.models import RepoPassword
|
||||
try:
|
||||
keyring_entry = RepoPassword.get(url=repo_url)
|
||||
return keyring_entry.password
|
||||
|
|
|
@ -1,453 +0,0 @@
|
|||
"""
|
||||
This module provides the app's data store using Peewee with SQLite.
|
||||
|
||||
At the bottom there is a simple schema migration system.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import peewee as pw
|
||||
from playhouse import signals
|
||||
from playhouse.migrate import SqliteMigrator, migrate
|
||||
|
||||
from vorta.autostart import open_app_at_startup
|
||||
from vorta.i18n import trans_late
|
||||
from vorta.utils import slugify
|
||||
|
||||
SCHEMA_VERSION = 18
|
||||
|
||||
db = pw.Proxy()
|
||||
|
||||
|
||||
class JSONField(pw.TextField):
|
||||
"""
|
||||
Class to "fake" a JSON field with a text field. Not efficient but works nicely.
|
||||
|
||||
From: https://gist.github.com/rosscdh/f4f26758b0228f475b132c688f15af2b
|
||||
"""
|
||||
|
||||
def db_value(self, value):
|
||||
"""Convert the python value for storage in the database."""
|
||||
return value if value is None else json.dumps(value)
|
||||
|
||||
def python_value(self, value):
|
||||
"""Convert the database value to a pythonic value."""
|
||||
return value if value is None else json.loads(value)
|
||||
|
||||
|
||||
class BaseModel(signals.Model):
|
||||
"""Common model superclass."""
|
||||
|
||||
|
||||
class RepoModel(BaseModel):
|
||||
"""A single remote repo with unique URL."""
|
||||
url = pw.CharField(unique=True)
|
||||
added_at = pw.DateTimeField(default=datetime.now)
|
||||
encryption = pw.CharField(null=True)
|
||||
unique_size = pw.IntegerField(null=True)
|
||||
unique_csize = pw.IntegerField(null=True)
|
||||
total_size = pw.IntegerField(null=True)
|
||||
total_unique_chunks = pw.IntegerField(null=True)
|
||||
create_backup_cmd = pw.CharField(default='')
|
||||
extra_borg_arguments = pw.CharField(default='')
|
||||
|
||||
def is_remote_repo(self):
|
||||
return not self.url.startswith('/')
|
||||
|
||||
class Meta:
|
||||
database = db
|
||||
|
||||
|
||||
class RepoPassword(BaseModel):
|
||||
"""Fallback to save repo passwords. Only used if no Keyring available."""
|
||||
url = pw.CharField(unique=True)
|
||||
password = pw.CharField()
|
||||
|
||||
class Meta:
|
||||
database = db
|
||||
|
||||
|
||||
class BackupProfileModel(BaseModel):
|
||||
"""Allows the user to switch between different configurations."""
|
||||
name = pw.CharField()
|
||||
added_at = pw.DateTimeField(default=datetime.now)
|
||||
repo = pw.ForeignKeyField(RepoModel, default=None, null=True)
|
||||
ssh_key = pw.CharField(default=None, null=True)
|
||||
compression = pw.CharField(default='lz4')
|
||||
exclude_patterns = pw.TextField(null=True)
|
||||
exclude_if_present = pw.TextField(null=True)
|
||||
schedule_mode = pw.CharField(default='off')
|
||||
schedule_interval_count = pw.IntegerField(default=3)
|
||||
schedule_interval_unit = pw.CharField(default='hours')
|
||||
schedule_fixed_hour = pw.IntegerField(default=3)
|
||||
schedule_fixed_minute = pw.IntegerField(default=42)
|
||||
schedule_interval_hours = pw.IntegerField(default=3) # no longer used
|
||||
schedule_interval_minutes = pw.IntegerField(default=42) # no longer used
|
||||
schedule_make_up_missed = pw.BooleanField(default=True)
|
||||
validation_on = pw.BooleanField(default=True)
|
||||
validation_weeks = pw.IntegerField(default=3)
|
||||
prune_on = pw.BooleanField(default=False)
|
||||
prune_hour = pw.IntegerField(default=2)
|
||||
prune_day = pw.IntegerField(default=7)
|
||||
prune_week = pw.IntegerField(default=4)
|
||||
prune_month = pw.IntegerField(default=6)
|
||||
prune_year = pw.IntegerField(default=2)
|
||||
prune_keep_within = pw.CharField(default='10H', null=True)
|
||||
new_archive_name = pw.CharField(default="{hostname}-{now:%Y-%m-%d-%H%M%S}")
|
||||
prune_prefix = pw.CharField(default="{hostname}-")
|
||||
pre_backup_cmd = pw.CharField(default='')
|
||||
post_backup_cmd = pw.CharField(default='')
|
||||
dont_run_on_metered_networks = pw.BooleanField(default=True)
|
||||
|
||||
def refresh(self):
|
||||
return type(self).get(self._pk_expr())
|
||||
|
||||
def slug(self):
|
||||
return slugify(self.name)
|
||||
|
||||
class Meta:
|
||||
database = db
|
||||
|
||||
|
||||
class SourceFileModel(BaseModel):
|
||||
"""A folder to be backed up, related to a Backup Configuration."""
|
||||
dir = pw.CharField()
|
||||
dir_size = pw.BigIntegerField(default=-1)
|
||||
dir_files_count = pw.BigIntegerField(default=-1)
|
||||
path_isdir = pw.BooleanField(default=False)
|
||||
profile = pw.ForeignKeyField(BackupProfileModel, default=1)
|
||||
added_at = pw.DateTimeField(default=datetime.utcnow)
|
||||
|
||||
class Meta:
|
||||
database = db
|
||||
table_name = 'sourcedirmodel'
|
||||
|
||||
|
||||
class ArchiveModel(BaseModel):
|
||||
"""An archive in a remote repository."""
|
||||
snapshot_id = pw.CharField()
|
||||
name = pw.CharField()
|
||||
repo = pw.ForeignKeyField(RepoModel, backref='archives')
|
||||
time = pw.DateTimeField()
|
||||
duration = pw.FloatField(null=True)
|
||||
size = pw.IntegerField(null=True)
|
||||
|
||||
def formatted_time(self):
|
||||
return
|
||||
|
||||
class Meta:
|
||||
database = db
|
||||
|
||||
|
||||
class WifiSettingModel(BaseModel):
|
||||
"""Save Wifi Settings"""
|
||||
ssid = pw.CharField()
|
||||
last_connected = pw.DateTimeField(null=True)
|
||||
allowed = pw.BooleanField(default=True)
|
||||
profile = pw.ForeignKeyField(BackupProfileModel, default=1)
|
||||
|
||||
class Meta:
|
||||
database = db
|
||||
|
||||
|
||||
class EventLogModel(BaseModel):
|
||||
"""Keep a log of background jobs."""
|
||||
start_time = pw.DateTimeField(default=datetime.now)
|
||||
end_time = pw.DateTimeField(default=datetime.now)
|
||||
category = pw.CharField()
|
||||
subcommand = pw.CharField(null=True)
|
||||
message = pw.CharField(null=True)
|
||||
returncode = pw.IntegerField(default=1)
|
||||
params = JSONField(null=True)
|
||||
profile = pw.CharField(null=True)
|
||||
repo_url = pw.CharField(null=True)
|
||||
|
||||
class Meta:
|
||||
database = db
|
||||
|
||||
|
||||
class SchemaVersion(BaseModel):
|
||||
"""Keep DB version to apply the correct migrations."""
|
||||
version = pw.IntegerField()
|
||||
changed_at = pw.DateTimeField(default=datetime.now)
|
||||
|
||||
class Meta:
|
||||
database = db
|
||||
|
||||
|
||||
class SettingsModel(BaseModel):
|
||||
"""App settings unrelated to a single profile or repo"""
|
||||
key = pw.CharField(unique=True)
|
||||
value = pw.BooleanField(default=False)
|
||||
str_value = pw.CharField(default='')
|
||||
label = pw.CharField()
|
||||
type = pw.CharField()
|
||||
|
||||
class Meta:
|
||||
database = db
|
||||
|
||||
|
||||
class BackupProfileMixin:
|
||||
"""Extend to support multiple profiles later."""
|
||||
|
||||
def profile(self):
|
||||
return BackupProfileModel.get(id=self.window().current_profile.id)
|
||||
|
||||
|
||||
def _apply_schema_update(current_schema, version_after, *operations):
|
||||
with db.atomic():
|
||||
migrate(*operations)
|
||||
current_schema.version = version_after
|
||||
current_schema.changed_at = datetime.now()
|
||||
current_schema.save()
|
||||
|
||||
|
||||
def get_misc_settings():
|
||||
''' Global settings that apply per platform '''
|
||||
# Default settings for all platforms.
|
||||
settings = [
|
||||
{
|
||||
'key': 'enable_notifications', 'value': True, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Display notifications when background tasks fail')
|
||||
},
|
||||
{
|
||||
'key': 'enable_notifications_success', 'value': False, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Also notify about successful background tasks')
|
||||
},
|
||||
{
|
||||
'key': 'autostart', 'value': False, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Automatically start Vorta at login')
|
||||
},
|
||||
{
|
||||
'key': 'foreground', 'value': True, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Open main window on startup')
|
||||
},
|
||||
{
|
||||
'key': 'get_srcpath_datasize', 'value': True, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Get statistics of file/folder when added')
|
||||
},
|
||||
{
|
||||
'key': 'use_system_keyring', 'value': True, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Store repository passwords in system keychain, if available.')
|
||||
},
|
||||
{
|
||||
'key': 'override_mount_permissions', 'value': False, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Try to replace existing permissions when mounting an archive.')
|
||||
},
|
||||
{
|
||||
'key': 'previous_profile_id', 'str_value': '1', 'type': 'internal',
|
||||
'label': 'Previously selected profile'
|
||||
},
|
||||
{
|
||||
'key': 'previous_window_width', 'str_value': '800', 'type': 'internal',
|
||||
'label': 'Previous window width'
|
||||
},
|
||||
{
|
||||
'key': 'previous_window_height', 'str_value': '600', 'type': 'internal',
|
||||
'label': 'Previous window height'
|
||||
},
|
||||
]
|
||||
if sys.platform == 'darwin':
|
||||
settings += [
|
||||
{
|
||||
'key': 'check_for_updates', 'value': True, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Check for updates on startup')
|
||||
},
|
||||
{
|
||||
'key': 'updates_include_beta', 'value': False, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Include pre-release versions when checking for updates')
|
||||
},
|
||||
]
|
||||
else:
|
||||
settings += [
|
||||
{
|
||||
'key': 'enable_background_question', 'value': True, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Display background exit dialog')
|
||||
},
|
||||
{
|
||||
'key': 'disable_background_state', 'value': False, 'type': 'internal',
|
||||
'label': 'Previous background exit button state'
|
||||
}
|
||||
]
|
||||
return settings
|
||||
|
||||
|
||||
@signals.post_save(sender=SettingsModel)
|
||||
def setup_autostart(model_class, instance, created):
|
||||
if instance.key == 'autostart':
|
||||
open_app_at_startup(instance.value)
|
||||
|
||||
|
||||
def cleanup_db():
|
||||
# Clean up database
|
||||
db.execute_sql("VACUUM")
|
||||
db.close()
|
||||
|
||||
|
||||
def init_db(con=None):
|
||||
if con is not None:
|
||||
os.umask(0o0077)
|
||||
db.initialize(con)
|
||||
db.connect()
|
||||
db.create_tables([RepoModel, RepoPassword, BackupProfileModel, SourceFileModel, SettingsModel,
|
||||
ArchiveModel, WifiSettingModel, EventLogModel, SchemaVersion])
|
||||
|
||||
# Delete old log entries after 3 months.
|
||||
three_months_ago = datetime.now() - timedelta(days=180)
|
||||
EventLogModel.delete().where(EventLogModel.start_time < three_months_ago)
|
||||
|
||||
# Migrations
|
||||
# See http://docs.peewee-orm.com/en/latest/peewee/playhouse.html#schema-migrations
|
||||
current_schema, created = SchemaVersion.get_or_create(id=1, defaults={'version': SCHEMA_VERSION})
|
||||
current_schema.save()
|
||||
if created or current_schema.version == SCHEMA_VERSION:
|
||||
pass
|
||||
else:
|
||||
migrator = SqliteMigrator(con)
|
||||
|
||||
if current_schema.version < 4: # version 3 to 4
|
||||
_apply_schema_update(
|
||||
current_schema, 4,
|
||||
migrator.add_column(ArchiveModel._meta.table_name, 'duration', pw.FloatField(null=True)),
|
||||
migrator.add_column(ArchiveModel._meta.table_name, 'size', pw.IntegerField(null=True))
|
||||
)
|
||||
if current_schema.version < 5:
|
||||
_apply_schema_update(
|
||||
current_schema, 5,
|
||||
migrator.drop_not_null(WifiSettingModel._meta.table_name, 'last_connected'),
|
||||
)
|
||||
|
||||
if current_schema.version < 6:
|
||||
_apply_schema_update(
|
||||
current_schema, 6,
|
||||
migrator.add_column(EventLogModel._meta.table_name, 'repo_url', pw.CharField(null=True))
|
||||
)
|
||||
|
||||
if current_schema.version < 7:
|
||||
_apply_schema_update(
|
||||
current_schema, 7,
|
||||
migrator.rename_column(SourceFileModel._meta.table_name, 'config_id', 'profile_id'),
|
||||
migrator.drop_column(EventLogModel._meta.table_name, 'profile_id'),
|
||||
migrator.add_column(EventLogModel._meta.table_name, 'profile', pw.CharField(null=True))
|
||||
)
|
||||
|
||||
if current_schema.version < 8:
|
||||
_apply_schema_update(
|
||||
current_schema, 8,
|
||||
migrator.add_column(BackupProfileModel._meta.table_name,
|
||||
'prune_keep_within', pw.CharField(null=True)))
|
||||
|
||||
if current_schema.version < 9:
|
||||
_apply_schema_update(
|
||||
current_schema, 9,
|
||||
migrator.add_column(BackupProfileModel._meta.table_name, 'new_archive_name',
|
||||
pw.CharField(default="{hostname}-{profile_slug}-{now:%Y-%m-%dT%H:%M:%S}")),
|
||||
migrator.add_column(BackupProfileModel._meta.table_name, 'prune_prefix',
|
||||
pw.CharField(default="{hostname}-{profile_slug}-")),
|
||||
)
|
||||
|
||||
if current_schema.version < 10:
|
||||
_apply_schema_update(
|
||||
current_schema, 10,
|
||||
migrator.add_column(BackupProfileModel._meta.table_name, 'pre_backup_cmd',
|
||||
pw.CharField(default='')),
|
||||
migrator.add_column(BackupProfileModel._meta.table_name, 'post_backup_cmd',
|
||||
pw.CharField(default='')),
|
||||
)
|
||||
|
||||
if current_schema.version < 11:
|
||||
_apply_schema_update(current_schema, 11)
|
||||
for profile in BackupProfileModel:
|
||||
if profile.compression == 'zstd':
|
||||
profile.compression = 'zstd,3'
|
||||
if profile.compression == 'lzma,6':
|
||||
profile.compression = 'auto,lzma,6'
|
||||
profile.save()
|
||||
|
||||
if current_schema.version < 12:
|
||||
_apply_schema_update(
|
||||
current_schema, 12,
|
||||
migrator.add_column(RepoModel._meta.table_name,
|
||||
'extra_borg_arguments', pw.CharField(default='')))
|
||||
|
||||
if current_schema.version < 13:
|
||||
# Migrate ArchiveModel data to new table to remove unique constraint from snapshot_id column.
|
||||
tables = db.get_tables()
|
||||
if ArchiveModel.select().count() == 0 and 'snapshotmodel' in tables:
|
||||
cursor = db.execute_sql('select * from snapshotmodel;')
|
||||
fields = [ArchiveModel.id, ArchiveModel.snapshot_id, ArchiveModel.name, ArchiveModel.repo,
|
||||
ArchiveModel.time, ArchiveModel.duration, ArchiveModel.size]
|
||||
data = [row for row in cursor.fetchall()]
|
||||
with db.atomic():
|
||||
size = 1000
|
||||
for i in range(0, len(data), size):
|
||||
ArchiveModel.insert_many(data[i: i + size], fields=fields).execute()
|
||||
|
||||
_apply_schema_update(current_schema, 13)
|
||||
|
||||
if current_schema.version < 14:
|
||||
_apply_schema_update(
|
||||
current_schema, 14,
|
||||
migrator.add_column(SettingsModel._meta.table_name,
|
||||
'str_value', pw.CharField(default='')))
|
||||
|
||||
if current_schema.version < 15:
|
||||
_apply_schema_update(
|
||||
current_schema, 15,
|
||||
migrator.add_column(BackupProfileModel._meta.table_name,
|
||||
'dont_run_on_metered_networks', pw.BooleanField(default=True))
|
||||
)
|
||||
|
||||
if current_schema.version < 16:
|
||||
_apply_schema_update(
|
||||
current_schema, 16,
|
||||
migrator.add_column(SourceFileModel._meta.table_name,
|
||||
'dir_size', pw.BigIntegerField(default=-1)),
|
||||
migrator.add_column(SourceFileModel._meta.table_name,
|
||||
'dir_files_count', pw.BigIntegerField(default=-1)),
|
||||
migrator.add_column(SourceFileModel._meta.table_name,
|
||||
'path_isdir', pw.BooleanField(default=False))
|
||||
)
|
||||
|
||||
if current_schema.version < 17:
|
||||
_apply_schema_update(
|
||||
current_schema, 17,
|
||||
migrator.add_column(RepoModel._meta.table_name,
|
||||
'create_backup_cmd', pw.CharField(default=''))
|
||||
)
|
||||
|
||||
if current_schema.version < 18:
|
||||
_apply_schema_update(
|
||||
current_schema, 18,
|
||||
migrator.add_column(BackupProfileModel._meta.table_name,
|
||||
'schedule_interval_unit', pw.CharField(default='hours')),
|
||||
migrator.add_column(BackupProfileModel._meta.table_name,
|
||||
'schedule_interval_count', pw.IntegerField(default=3)),
|
||||
migrator.add_column(BackupProfileModel._meta.table_name,
|
||||
'schedule_make_up_missed', pw.BooleanField(default=False)),
|
||||
migrator.add_column(EventLogModel._meta.table_name,
|
||||
'end_time', pw.DateTimeField(default=datetime.now))
|
||||
)
|
||||
|
||||
# Create missing settings and update labels. Leave setting values untouched.
|
||||
for setting in get_misc_settings():
|
||||
s, created = SettingsModel.get_or_create(key=setting['key'], defaults=setting)
|
||||
s.label = setting['label']
|
||||
s.save()
|
||||
|
||||
# Delete old log entries after 3 months.
|
||||
three_months_ago = datetime.now() - timedelta(days=3)
|
||||
EventLogModel.delete().where(EventLogModel.start_time < three_months_ago).execute()
|
|
@ -1,7 +1,7 @@
|
|||
import sys
|
||||
import logging
|
||||
from PyQt5 import QtCore, QtDBus
|
||||
from vorta.models import SettingsModel
|
||||
from vorta.store.models import SettingsModel
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -5,8 +5,9 @@ from json import JSONDecodeError
|
|||
from playhouse.shortcuts import model_to_dict, dict_to_model
|
||||
|
||||
from vorta.keyring.abc import VortaKeyring
|
||||
from vorta.models import RepoModel, SourceFileModel, WifiSettingModel, SchemaVersion, \
|
||||
SettingsModel, BackupProfileModel, db, SCHEMA_VERSION, init_db
|
||||
from vorta.store.models import RepoModel, SourceFileModel, WifiSettingModel, \
|
||||
SchemaVersion, SettingsModel, BackupProfileModel
|
||||
from vorta.store.connection import DB, SCHEMA_VERSION, init_db
|
||||
|
||||
|
||||
class ProfileExport:
|
||||
|
@ -114,8 +115,8 @@ class ProfileExport:
|
|||
|
||||
# Delete and recreate the tables to clear them
|
||||
if overwrite_settings:
|
||||
db.drop_tables([SettingsModel, WifiSettingModel])
|
||||
db.create_tables([SettingsModel, WifiSettingModel])
|
||||
DB.drop_tables([SettingsModel, WifiSettingModel])
|
||||
DB.create_tables([SettingsModel, WifiSettingModel])
|
||||
SettingsModel.insert_many(self._profile_dict['SettingsModel']).execute()
|
||||
WifiSettingModel.insert_many(self._profile_dict['WifiSettingModel']).execute()
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ from vorta.borg.list_repo import BorgListRepoJob
|
|||
from vorta.borg.prune import BorgPruneJob
|
||||
from vorta.i18n import translate
|
||||
|
||||
from vorta.models import BackupProfileModel, EventLogModel
|
||||
from vorta.store.models import BackupProfileModel, EventLogModel
|
||||
from vorta.notifications import VortaNotifications
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
|
@ -0,0 +1,53 @@
|
|||
import os
|
||||
from datetime import datetime, timedelta
|
||||
from playhouse import signals
|
||||
from vorta.autostart import open_app_at_startup
|
||||
from .models import (DB, RepoModel, RepoPassword, BackupProfileModel, SourceFileModel,
|
||||
SettingsModel, ArchiveModel, WifiSettingModel, EventLogModel, SchemaVersion)
|
||||
from .migrations import run_migrations
|
||||
from .settings import get_misc_settings
|
||||
|
||||
SCHEMA_VERSION = 18
|
||||
|
||||
|
||||
@signals.post_save(sender=SettingsModel)
|
||||
def setup_autostart(model_class, instance, created):
|
||||
if instance.key == 'autostart':
|
||||
open_app_at_startup(instance.value)
|
||||
|
||||
|
||||
def cleanup_db():
|
||||
# Clean up database
|
||||
DB.execute_sql("VACUUM")
|
||||
DB.close()
|
||||
|
||||
|
||||
def init_db(con=None):
|
||||
if con is not None:
|
||||
os.umask(0o0077)
|
||||
DB.initialize(con)
|
||||
DB.connect()
|
||||
DB.create_tables([RepoModel, RepoPassword, BackupProfileModel, SourceFileModel, SettingsModel,
|
||||
ArchiveModel, WifiSettingModel, EventLogModel, SchemaVersion])
|
||||
|
||||
# Delete old log entries after 3 months.
|
||||
three_months_ago = datetime.now() - timedelta(days=180)
|
||||
EventLogModel.delete().where(EventLogModel.start_time < three_months_ago)
|
||||
|
||||
# Migrations
|
||||
current_schema, created = SchemaVersion.get_or_create(id=1, defaults={'version': SCHEMA_VERSION})
|
||||
current_schema.save()
|
||||
if created or current_schema.version == SCHEMA_VERSION:
|
||||
pass
|
||||
else:
|
||||
run_migrations(current_schema, con)
|
||||
|
||||
# Create missing settings and update labels. Leave setting values untouched.
|
||||
for setting in get_misc_settings():
|
||||
s, created = SettingsModel.get_or_create(key=setting['key'], defaults=setting)
|
||||
s.label = setting['label']
|
||||
s.save()
|
||||
|
||||
# Delete old log entries after 3 months.
|
||||
three_months_ago = datetime.now() - timedelta(days=3)
|
||||
EventLogModel.delete().where(EventLogModel.start_time < three_months_ago).execute()
|
|
@ -0,0 +1,146 @@
|
|||
from datetime import datetime
|
||||
import peewee as pw
|
||||
from playhouse.migrate import SqliteMigrator, migrate
|
||||
from .models import (DB, RepoModel, BackupProfileModel, SourceFileModel,
|
||||
SettingsModel, ArchiveModel, WifiSettingModel, EventLogModel)
|
||||
|
||||
|
||||
def run_migrations(current_schema, db_connection):
|
||||
"""
|
||||
Apply new schema versions to database.
|
||||
|
||||
See http://docs.peewee-orm.com/en/latest/peewee/playhouse.html#schema-migrations
|
||||
"""
|
||||
migrator = SqliteMigrator(db_connection)
|
||||
|
||||
if current_schema.version < 4: # version 3 to 4
|
||||
_apply_schema_update(
|
||||
current_schema, 4,
|
||||
migrator.add_column(ArchiveModel._meta.table_name, 'duration', pw.FloatField(null=True)),
|
||||
migrator.add_column(ArchiveModel._meta.table_name, 'size', pw.IntegerField(null=True))
|
||||
)
|
||||
if current_schema.version < 5:
|
||||
_apply_schema_update(
|
||||
current_schema, 5,
|
||||
migrator.drop_not_null(WifiSettingModel._meta.table_name, 'last_connected'),
|
||||
)
|
||||
|
||||
if current_schema.version < 6:
|
||||
_apply_schema_update(
|
||||
current_schema, 6,
|
||||
migrator.add_column(EventLogModel._meta.table_name, 'repo_url', pw.CharField(null=True))
|
||||
)
|
||||
|
||||
if current_schema.version < 7:
|
||||
_apply_schema_update(
|
||||
current_schema, 7,
|
||||
migrator.rename_column(SourceFileModel._meta.table_name, 'config_id', 'profile_id'),
|
||||
migrator.drop_column(EventLogModel._meta.table_name, 'profile_id'),
|
||||
migrator.add_column(EventLogModel._meta.table_name, 'profile', pw.CharField(null=True))
|
||||
)
|
||||
|
||||
if current_schema.version < 8:
|
||||
_apply_schema_update(
|
||||
current_schema, 8,
|
||||
migrator.add_column(BackupProfileModel._meta.table_name,
|
||||
'prune_keep_within', pw.CharField(null=True)))
|
||||
|
||||
if current_schema.version < 9:
|
||||
_apply_schema_update(
|
||||
current_schema, 9,
|
||||
migrator.add_column(BackupProfileModel._meta.table_name, 'new_archive_name',
|
||||
pw.CharField(default="{hostname}-{profile_slug}-{now:%Y-%m-%dT%H:%M:%S}")),
|
||||
migrator.add_column(BackupProfileModel._meta.table_name, 'prune_prefix',
|
||||
pw.CharField(default="{hostname}-{profile_slug}-")),
|
||||
)
|
||||
|
||||
if current_schema.version < 10:
|
||||
_apply_schema_update(
|
||||
current_schema, 10,
|
||||
migrator.add_column(BackupProfileModel._meta.table_name, 'pre_backup_cmd',
|
||||
pw.CharField(default='')),
|
||||
migrator.add_column(BackupProfileModel._meta.table_name, 'post_backup_cmd',
|
||||
pw.CharField(default='')),
|
||||
)
|
||||
|
||||
if current_schema.version < 11:
|
||||
_apply_schema_update(current_schema, 11)
|
||||
for profile in BackupProfileModel:
|
||||
if profile.compression == 'zstd':
|
||||
profile.compression = 'zstd,3'
|
||||
if profile.compression == 'lzma,6':
|
||||
profile.compression = 'auto,lzma,6'
|
||||
profile.save()
|
||||
|
||||
if current_schema.version < 12:
|
||||
_apply_schema_update(
|
||||
current_schema, 12,
|
||||
migrator.add_column(RepoModel._meta.table_name,
|
||||
'extra_borg_arguments', pw.CharField(default='')))
|
||||
|
||||
if current_schema.version < 13:
|
||||
# Migrate ArchiveModel data to new table to remove unique constraint from snapshot_id column.
|
||||
tables = DB.get_tables()
|
||||
if ArchiveModel.select().count() == 0 and 'snapshotmodel' in tables:
|
||||
cursor = DB.execute_sql('select * from snapshotmodel;')
|
||||
fields = [ArchiveModel.id, ArchiveModel.snapshot_id, ArchiveModel.name, ArchiveModel.repo,
|
||||
ArchiveModel.time, ArchiveModel.duration, ArchiveModel.size]
|
||||
data = [row for row in cursor.fetchall()]
|
||||
with DB.atomic():
|
||||
size = 1000
|
||||
for i in range(0, len(data), size):
|
||||
ArchiveModel.insert_many(data[i: i + size], fields=fields).execute()
|
||||
|
||||
_apply_schema_update(current_schema, 13)
|
||||
|
||||
if current_schema.version < 14:
|
||||
_apply_schema_update(
|
||||
current_schema, 14,
|
||||
migrator.add_column(SettingsModel._meta.table_name,
|
||||
'str_value', pw.CharField(default='')))
|
||||
|
||||
if current_schema.version < 15:
|
||||
_apply_schema_update(
|
||||
current_schema, 15,
|
||||
migrator.add_column(BackupProfileModel._meta.table_name,
|
||||
'dont_run_on_metered_networks', pw.BooleanField(default=True))
|
||||
)
|
||||
|
||||
if current_schema.version < 16:
|
||||
_apply_schema_update(
|
||||
current_schema, 16,
|
||||
migrator.add_column(SourceFileModel._meta.table_name,
|
||||
'dir_size', pw.BigIntegerField(default=-1)),
|
||||
migrator.add_column(SourceFileModel._meta.table_name,
|
||||
'dir_files_count', pw.BigIntegerField(default=-1)),
|
||||
migrator.add_column(SourceFileModel._meta.table_name,
|
||||
'path_isdir', pw.BooleanField(default=False))
|
||||
)
|
||||
|
||||
if current_schema.version < 17:
|
||||
_apply_schema_update(
|
||||
current_schema, 17,
|
||||
migrator.add_column(RepoModel._meta.table_name,
|
||||
'create_backup_cmd', pw.CharField(default=''))
|
||||
)
|
||||
|
||||
if current_schema.version < 18:
|
||||
_apply_schema_update(
|
||||
current_schema, 18,
|
||||
migrator.add_column(BackupProfileModel._meta.table_name,
|
||||
'schedule_interval_unit', pw.CharField(default='hours')),
|
||||
migrator.add_column(BackupProfileModel._meta.table_name,
|
||||
'schedule_interval_count', pw.IntegerField(default=3)),
|
||||
migrator.add_column(BackupProfileModel._meta.table_name,
|
||||
'schedule_make_up_missed', pw.BooleanField(default=False)),
|
||||
migrator.add_column(EventLogModel._meta.table_name,
|
||||
'end_time', pw.DateTimeField(default=datetime.now))
|
||||
)
|
||||
|
||||
|
||||
def _apply_schema_update(current_schema, version_after, *operations):
|
||||
with DB.atomic():
|
||||
migrate(*operations)
|
||||
current_schema.version = version_after
|
||||
current_schema.changed_at = datetime.now()
|
||||
current_schema.save()
|
|
@ -0,0 +1,188 @@
|
|||
"""
|
||||
This module provides the app's data store using Peewee with SQLite.
|
||||
|
||||
At the bottom there is a simple schema migration system.
|
||||
"""
|
||||
|
||||
import json
|
||||
from datetime import datetime
|
||||
import peewee as pw
|
||||
from playhouse import signals
|
||||
from vorta.utils import slugify
|
||||
|
||||
DB = pw.Proxy()
|
||||
|
||||
|
||||
class JSONField(pw.TextField):
|
||||
"""
|
||||
Class to "fake" a JSON field with a text field. Not efficient but works nicely.
|
||||
|
||||
From: https://gist.github.com/rosscdh/f4f26758b0228f475b132c688f15af2b
|
||||
"""
|
||||
|
||||
def db_value(self, value):
|
||||
"""Convert the python value for storage in the database."""
|
||||
return value if value is None else json.dumps(value)
|
||||
|
||||
def python_value(self, value):
|
||||
"""Convert the database value to a pythonic value."""
|
||||
return value if value is None else json.loads(value)
|
||||
|
||||
|
||||
class BaseModel(signals.Model):
|
||||
"""Common model superclass."""
|
||||
|
||||
|
||||
class RepoModel(BaseModel):
|
||||
"""A single remote repo with unique URL."""
|
||||
url = pw.CharField(unique=True)
|
||||
added_at = pw.DateTimeField(default=datetime.now)
|
||||
encryption = pw.CharField(null=True)
|
||||
unique_size = pw.IntegerField(null=True)
|
||||
unique_csize = pw.IntegerField(null=True)
|
||||
total_size = pw.IntegerField(null=True)
|
||||
total_unique_chunks = pw.IntegerField(null=True)
|
||||
create_backup_cmd = pw.CharField(default='')
|
||||
extra_borg_arguments = pw.CharField(default='')
|
||||
|
||||
def is_remote_repo(self):
|
||||
return not self.url.startswith('/')
|
||||
|
||||
class Meta:
|
||||
database = DB
|
||||
|
||||
|
||||
class RepoPassword(BaseModel):
|
||||
"""Fallback to save repo passwords. Only used if no Keyring available."""
|
||||
url = pw.CharField(unique=True)
|
||||
password = pw.CharField()
|
||||
|
||||
class Meta:
|
||||
database = DB
|
||||
|
||||
|
||||
class BackupProfileModel(BaseModel):
|
||||
"""Allows the user to switch between different configurations."""
|
||||
name = pw.CharField()
|
||||
added_at = pw.DateTimeField(default=datetime.now)
|
||||
repo = pw.ForeignKeyField(RepoModel, default=None, null=True)
|
||||
ssh_key = pw.CharField(default=None, null=True)
|
||||
compression = pw.CharField(default='lz4')
|
||||
exclude_patterns = pw.TextField(null=True)
|
||||
exclude_if_present = pw.TextField(null=True)
|
||||
schedule_mode = pw.CharField(default='off')
|
||||
schedule_interval_count = pw.IntegerField(default=3)
|
||||
schedule_interval_unit = pw.CharField(default='hours')
|
||||
schedule_fixed_hour = pw.IntegerField(default=3)
|
||||
schedule_fixed_minute = pw.IntegerField(default=42)
|
||||
schedule_interval_hours = pw.IntegerField(default=3) # no longer used
|
||||
schedule_interval_minutes = pw.IntegerField(default=42) # no longer used
|
||||
schedule_make_up_missed = pw.BooleanField(default=True)
|
||||
validation_on = pw.BooleanField(default=True)
|
||||
validation_weeks = pw.IntegerField(default=3)
|
||||
prune_on = pw.BooleanField(default=False)
|
||||
prune_hour = pw.IntegerField(default=2)
|
||||
prune_day = pw.IntegerField(default=7)
|
||||
prune_week = pw.IntegerField(default=4)
|
||||
prune_month = pw.IntegerField(default=6)
|
||||
prune_year = pw.IntegerField(default=2)
|
||||
prune_keep_within = pw.CharField(default='10H', null=True)
|
||||
new_archive_name = pw.CharField(default="{hostname}-{now:%Y-%m-%d-%H%M%S}")
|
||||
prune_prefix = pw.CharField(default="{hostname}-")
|
||||
pre_backup_cmd = pw.CharField(default='')
|
||||
post_backup_cmd = pw.CharField(default='')
|
||||
dont_run_on_metered_networks = pw.BooleanField(default=True)
|
||||
|
||||
def refresh(self):
|
||||
return type(self).get(self._pk_expr())
|
||||
|
||||
def slug(self):
|
||||
return slugify(self.name)
|
||||
|
||||
class Meta:
|
||||
database = DB
|
||||
|
||||
|
||||
class SourceFileModel(BaseModel):
|
||||
"""A folder to be backed up, related to a Backup Configuration."""
|
||||
dir = pw.CharField()
|
||||
dir_size = pw.BigIntegerField(default=-1)
|
||||
dir_files_count = pw.BigIntegerField(default=-1)
|
||||
path_isdir = pw.BooleanField(default=False)
|
||||
profile = pw.ForeignKeyField(BackupProfileModel, default=1)
|
||||
added_at = pw.DateTimeField(default=datetime.utcnow)
|
||||
|
||||
class Meta:
|
||||
database = DB
|
||||
table_name = 'sourcedirmodel'
|
||||
|
||||
|
||||
class ArchiveModel(BaseModel):
|
||||
"""An archive in a remote repository."""
|
||||
snapshot_id = pw.CharField()
|
||||
name = pw.CharField()
|
||||
repo = pw.ForeignKeyField(RepoModel, backref='archives')
|
||||
time = pw.DateTimeField()
|
||||
duration = pw.FloatField(null=True)
|
||||
size = pw.IntegerField(null=True)
|
||||
|
||||
def formatted_time(self):
|
||||
return
|
||||
|
||||
class Meta:
|
||||
database = DB
|
||||
|
||||
|
||||
class WifiSettingModel(BaseModel):
|
||||
"""Save Wifi Settings"""
|
||||
ssid = pw.CharField()
|
||||
last_connected = pw.DateTimeField(null=True)
|
||||
allowed = pw.BooleanField(default=True)
|
||||
profile = pw.ForeignKeyField(BackupProfileModel, default=1)
|
||||
|
||||
class Meta:
|
||||
database = DB
|
||||
|
||||
|
||||
class EventLogModel(BaseModel):
|
||||
"""Keep a log of background jobs."""
|
||||
start_time = pw.DateTimeField(default=datetime.now)
|
||||
end_time = pw.DateTimeField(default=datetime.now)
|
||||
category = pw.CharField()
|
||||
subcommand = pw.CharField(null=True)
|
||||
message = pw.CharField(null=True)
|
||||
returncode = pw.IntegerField(default=1)
|
||||
params = JSONField(null=True)
|
||||
profile = pw.CharField(null=True)
|
||||
repo_url = pw.CharField(null=True)
|
||||
|
||||
class Meta:
|
||||
database = DB
|
||||
|
||||
|
||||
class SchemaVersion(BaseModel):
|
||||
"""Keep DB version to apply the correct migrations."""
|
||||
version = pw.IntegerField()
|
||||
changed_at = pw.DateTimeField(default=datetime.now)
|
||||
|
||||
class Meta:
|
||||
database = DB
|
||||
|
||||
|
||||
class SettingsModel(BaseModel):
|
||||
"""App settings unrelated to a single profile or repo"""
|
||||
key = pw.CharField(unique=True)
|
||||
value = pw.BooleanField(default=False)
|
||||
str_value = pw.CharField(default='')
|
||||
label = pw.CharField()
|
||||
type = pw.CharField()
|
||||
|
||||
class Meta:
|
||||
database = DB
|
||||
|
||||
|
||||
class BackupProfileMixin:
|
||||
"""Extend to support multiple profiles later."""
|
||||
|
||||
def profile(self):
|
||||
return BackupProfileModel.get(id=self.window().current_profile.id)
|
|
@ -0,0 +1,81 @@
|
|||
import sys
|
||||
from vorta.i18n import trans_late
|
||||
|
||||
|
||||
def get_misc_settings():
|
||||
# Default settings for all platforms.
|
||||
settings = [
|
||||
{
|
||||
'key': 'enable_notifications', 'value': True, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Display notifications when background tasks fail')
|
||||
},
|
||||
{
|
||||
'key': 'enable_notifications_success', 'value': False, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Also notify about successful background tasks')
|
||||
},
|
||||
{
|
||||
'key': 'autostart', 'value': False, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Automatically start Vorta at login')
|
||||
},
|
||||
{
|
||||
'key': 'foreground', 'value': True, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Open main window on startup')
|
||||
},
|
||||
{
|
||||
'key': 'get_srcpath_datasize', 'value': True, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Get statistics of file/folder when added')
|
||||
},
|
||||
{
|
||||
'key': 'use_system_keyring', 'value': True, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Store repository passwords in system keychain, if available.')
|
||||
},
|
||||
{
|
||||
'key': 'override_mount_permissions', 'value': False, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Try to replace existing permissions when mounting an archive.')
|
||||
},
|
||||
{
|
||||
'key': 'previous_profile_id', 'str_value': '1', 'type': 'internal',
|
||||
'label': 'Previously selected profile'
|
||||
},
|
||||
{
|
||||
'key': 'previous_window_width', 'str_value': '800', 'type': 'internal',
|
||||
'label': 'Previous window width'
|
||||
},
|
||||
{
|
||||
'key': 'previous_window_height', 'str_value': '600', 'type': 'internal',
|
||||
'label': 'Previous window height'
|
||||
},
|
||||
]
|
||||
if sys.platform == 'darwin':
|
||||
settings += [
|
||||
{
|
||||
'key': 'check_for_updates', 'value': True, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Check for updates on startup')
|
||||
},
|
||||
{
|
||||
'key': 'updates_include_beta', 'value': False, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Include pre-release versions when checking for updates')
|
||||
},
|
||||
]
|
||||
else:
|
||||
settings += [
|
||||
{
|
||||
'key': 'enable_background_question', 'value': True, 'type': 'checkbox',
|
||||
'label': trans_late('settings',
|
||||
'Display background exit dialog')
|
||||
},
|
||||
{
|
||||
'key': 'disable_background_state', 'value': False, 'type': 'internal',
|
||||
'label': 'Previous background exit button state'
|
||||
}
|
||||
]
|
||||
return settings
|
|
@ -2,7 +2,7 @@ import os
|
|||
from PyQt5.QtWidgets import QMenu, QSystemTrayIcon
|
||||
from PyQt5.QtGui import QIcon
|
||||
|
||||
from vorta.models import BackupProfileModel
|
||||
from vorta.store.models import BackupProfileModel
|
||||
from vorta.utils import get_asset
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import sys
|
||||
import os
|
||||
from vorta.models import SettingsModel
|
||||
from vorta.store.models import SettingsModel
|
||||
|
||||
|
||||
def get_updater():
|
||||
|
|
|
@ -284,7 +284,7 @@ def get_sorted_wifis(profile):
|
|||
merge with networks from other profiles. Update last connected time.
|
||||
"""
|
||||
|
||||
from vorta.models import WifiSettingModel
|
||||
from vorta.store.models import WifiSettingModel
|
||||
|
||||
# Pull networks known to OS and all other backup profiles
|
||||
system_wifis = get_network_status_monitor().get_known_wifis()
|
||||
|
|
|
@ -20,7 +20,7 @@ from vorta.borg.prune import BorgPruneJob
|
|||
from vorta.borg.umount import BorgUmountJob
|
||||
from vorta.borg.rename import BorgRenameJob
|
||||
from vorta.i18n import trans_late
|
||||
from vorta.models import ArchiveModel, BackupProfileMixin
|
||||
from vorta.store.models import ArchiveModel, BackupProfileMixin
|
||||
from vorta.utils import (choose_file_dialog, format_archive_name, get_asset,
|
||||
get_mount_points, pretty_bytes)
|
||||
from vorta.views.source_tab import SizeItem
|
||||
|
|
|
@ -6,7 +6,7 @@ from PyQt5 import uic
|
|||
from PyQt5.QtWidgets import QFileDialog, QMessageBox
|
||||
|
||||
from vorta.keyring.abc import VortaKeyring
|
||||
from vorta.models import BackupProfileModel # noqa: F401
|
||||
from vorta.store.models import BackupProfileModel # noqa: F401
|
||||
from vorta.utils import get_asset
|
||||
from ..notifications import VortaNotifications
|
||||
from ..profile_export import ProfileExport
|
||||
|
|
|
@ -2,7 +2,8 @@ from PyQt5 import QtCore
|
|||
from PyQt5.QtWidgets import QMessageBox
|
||||
|
||||
from vorta.keyring.abc import VortaKeyring
|
||||
from vorta.models import BackupProfileModel, SCHEMA_VERSION
|
||||
from vorta.store.models import BackupProfileModel
|
||||
from vorta.store.connection import SCHEMA_VERSION
|
||||
from vorta.profile_export import VersionException
|
||||
from vorta.views.export_window import ImportWindowUI, ImportWindowBase, logger
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ from PyQt5.QtCore import QPoint
|
|||
from PyQt5.QtGui import QKeySequence
|
||||
from PyQt5.QtWidgets import QShortcut, QMessageBox, QCheckBox, QMenu, QToolTip, QFileDialog
|
||||
|
||||
from vorta.models import BackupProfileModel, SettingsModel
|
||||
from vorta.store.models import BackupProfileModel, SettingsModel
|
||||
from vorta.utils import borg_compat, get_asset, is_system_tray_available, get_network_status_monitor
|
||||
from vorta.views.partials.loading_button import LoadingButton
|
||||
from vorta.views.utils import get_colored_icon
|
||||
|
|
|
@ -4,7 +4,8 @@ from PyQt5.QtWidgets import QCheckBox
|
|||
from vorta._version import __version__
|
||||
from vorta.config import LOG_DIR
|
||||
from vorta.i18n import translate
|
||||
from vorta.models import SettingsModel, BackupProfileMixin, get_misc_settings
|
||||
from vorta.store.settings import get_misc_settings
|
||||
from vorta.store.models import SettingsModel, BackupProfileMixin
|
||||
from vorta.utils import get_asset
|
||||
|
||||
uifile = get_asset('UI/misctab.ui')
|
||||
|
@ -31,7 +32,7 @@ class MiscTab(MiscTabBase, MiscTabUI, BackupProfileMixin):
|
|||
# dynamically add widgets for settings
|
||||
for setting in SettingsModel.select().where(SettingsModel.type == 'checkbox'):
|
||||
x = filter(lambda s: s['key'] == setting.key, get_misc_settings())
|
||||
if not list(x): # Skip settings that aren't specified in vorta.models.
|
||||
if not list(x): # Skip settings that aren't specified in vorta.store.models.
|
||||
continue
|
||||
b = QCheckBox(translate('settings', setting.label))
|
||||
b.setCheckState(setting.value)
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
from PyQt5 import uic, QtCore
|
||||
from PyQt5.QtWidgets import QDialogButtonBox
|
||||
from ..i18n import translate, trans_late
|
||||
from ..utils import get_asset
|
||||
from ..models import BackupProfileModel
|
||||
from vorta.i18n import translate, trans_late
|
||||
from vorta.utils import get_asset
|
||||
from vorta.store.models import BackupProfileModel
|
||||
|
||||
uifile = get_asset('UI/profileadd.ui')
|
||||
AddProfileUI, AddProfileBase = uic.loadUiType(uifile)
|
||||
|
|
|
@ -9,7 +9,7 @@ from vorta.borg.init import BorgInitJob
|
|||
from vorta.borg.info_repo import BorgInfoRepoJob
|
||||
from vorta.i18n import translate
|
||||
from vorta.views.utils import get_colored_icon
|
||||
from vorta.models import RepoModel
|
||||
from vorta.store.models import RepoModel
|
||||
|
||||
uifile = get_asset('UI/repoadd.ui')
|
||||
AddRepoUI, AddRepoBase = uic.loadUiType(uifile)
|
||||
|
|
|
@ -3,7 +3,7 @@ import os
|
|||
from PyQt5 import uic, QtCore
|
||||
from PyQt5.QtWidgets import QApplication, QMessageBox
|
||||
|
||||
from vorta.models import RepoModel, ArchiveModel, BackupProfileMixin
|
||||
from vorta.store.models import RepoModel, ArchiveModel, BackupProfileMixin
|
||||
from vorta.utils import pretty_bytes, get_private_keys, get_asset, borg_compat
|
||||
from .repo_add_dialog import AddRepoWindow, ExistingRepoWindow
|
||||
from .ssh_dialog import SSHAddWindow
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from PyQt5 import uic, QtCore
|
||||
from PyQt5.QtWidgets import QListWidgetItem, QApplication, QTableView, QHeaderView, QTableWidgetItem
|
||||
from vorta.utils import get_asset, get_sorted_wifis
|
||||
from vorta.models import EventLogModel, WifiSettingModel, BackupProfileMixin
|
||||
from vorta.store.models import EventLogModel, WifiSettingModel, BackupProfileMixin
|
||||
from vorta.views.utils import get_colored_icon
|
||||
|
||||
uifile = get_asset('UI/scheduletab.ui')
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from PyQt5 import uic
|
||||
from ..models import SourceFileModel, BackupProfileMixin, SettingsModel
|
||||
from ..utils import get_asset, choose_file_dialog, pretty_bytes, sort_sizes, FilePathInfoAsync
|
||||
from vorta.store.models import SourceFileModel, BackupProfileMixin, SettingsModel
|
||||
from vorta.utils import get_asset, choose_file_dialog, pretty_bytes, sort_sizes, FilePathInfoAsync
|
||||
from PyQt5 import QtCore
|
||||
from PyQt5.QtCore import QFileInfo
|
||||
from PyQt5.QtWidgets import QApplication, QMessageBox, QTableWidgetItem, QHeaderView
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import sys
|
||||
import os
|
||||
|
||||
import vorta.models
|
||||
resource_file = os.path.join(os.path.dirname(vorta.models.__file__), 'assets/icons')
|
||||
import vorta._version
|
||||
resource_file = os.path.join(os.path.dirname(vorta._version.__file__), 'assets/icons')
|
||||
sys.path.append(resource_file)
|
||||
|
|
|
@ -8,8 +8,8 @@ from unittest.mock import MagicMock
|
|||
import vorta
|
||||
import vorta.application
|
||||
import vorta.borg.jobs_manager
|
||||
from vorta.models import (RepoModel, RepoPassword, BackupProfileModel, SourceFileModel,
|
||||
SettingsModel, ArchiveModel, WifiSettingModel, EventLogModel, SchemaVersion)
|
||||
from vorta.store.models import RepoModel, RepoPassword, BackupProfileModel, SourceFileModel, \
|
||||
SettingsModel, ArchiveModel, WifiSettingModel, EventLogModel, SchemaVersion
|
||||
from vorta.views.main_window import MainWindow
|
||||
|
||||
models = [RepoModel, RepoPassword, BackupProfileModel, SourceFileModel,
|
||||
|
@ -27,7 +27,7 @@ def qapp(tmpdir_factory):
|
|||
# DB is required to init QApplication. New DB used for every test.
|
||||
tmp_db = tmpdir_factory.mktemp('Vorta').join('settings.sqlite')
|
||||
mock_db = SqliteDatabase(str(tmp_db))
|
||||
vorta.models.init_db(mock_db)
|
||||
vorta.store.connection.init_db(mock_db)
|
||||
|
||||
from vorta.application import VortaApp
|
||||
VortaApp.set_borg_details_action = MagicMock() # Can't use pytest-mock in session scope
|
||||
|
@ -44,7 +44,7 @@ def qapp(tmpdir_factory):
|
|||
def init_db(qapp, qtbot, tmpdir_factory):
|
||||
tmp_db = tmpdir_factory.mktemp('Vorta').join('settings.sqlite')
|
||||
mock_db = SqliteDatabase(str(tmp_db), pragmas={'journal_mode': 'wal', })
|
||||
vorta.models.init_db(mock_db)
|
||||
vorta.store.connection.init_db(mock_db)
|
||||
|
||||
default_profile = BackupProfileModel(name='Default')
|
||||
default_profile.save()
|
||||
|
|
|
@ -2,7 +2,7 @@ import psutil
|
|||
from collections import namedtuple
|
||||
import pytest
|
||||
from PyQt5 import QtCore
|
||||
from vorta.models import BackupProfileModel, ArchiveModel
|
||||
from vorta.store.models import BackupProfileModel, ArchiveModel
|
||||
import vorta.borg
|
||||
import vorta.views.archive_tab
|
||||
import vorta.utils
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import pytest
|
||||
import vorta.borg
|
||||
import vorta.models
|
||||
import vorta.store.models
|
||||
from vorta.borg.prune import BorgPruneJob
|
||||
|
||||
|
||||
|
@ -9,7 +9,7 @@ def test_borg_prune(qapp, qtbot, mocker, borg_json_output):
|
|||
popen_result = mocker.MagicMock(stdout=stdout, stderr=stderr, returncode=0)
|
||||
mocker.patch.object(vorta.borg.borg_job, 'Popen', return_value=popen_result)
|
||||
|
||||
params = BorgPruneJob.prepare(vorta.models.BackupProfileModel.select().first())
|
||||
params = BorgPruneJob.prepare(vorta.store.models.BackupProfileModel.select().first())
|
||||
thread = BorgPruneJob(params['cmd'], params, qapp)
|
||||
|
||||
with qtbot.waitSignal(thread.result, **pytest._wait_defaults) as blocker:
|
||||
|
|
|
@ -5,7 +5,7 @@ import pytest
|
|||
from PyQt5 import QtCore
|
||||
from PyQt5.QtWidgets import QFileDialog, QDialogButtonBox, QMessageBox
|
||||
|
||||
from vorta.models import BackupProfileModel, SourceFileModel
|
||||
from vorta.store.models import BackupProfileModel, SourceFileModel
|
||||
from vorta.views.import_window import ImportWindow
|
||||
|
||||
VALID_IMPORT_FILE = Path(__file__).parent / 'profile_exports' / 'valid.json'
|
||||
|
|
|
@ -3,7 +3,6 @@ import pytest
|
|||
from PyQt5 import QtDBus
|
||||
|
||||
import vorta.borg
|
||||
import vorta.models
|
||||
import vorta.notifications
|
||||
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import pytest
|
||||
from PyQt5 import QtCore
|
||||
from PyQt5.QtWidgets import QDialogButtonBox
|
||||
from vorta.models import BackupProfileModel
|
||||
from vorta.store.models import BackupProfileModel
|
||||
|
||||
|
||||
def test_profile_add(qapp, qtbot):
|
||||
|
|
|
@ -4,9 +4,8 @@ import pytest
|
|||
from PyQt5 import QtCore
|
||||
|
||||
import vorta.borg.borg_job
|
||||
import vorta.models
|
||||
from vorta.keyring.abc import VortaKeyring
|
||||
from vorta.models import EventLogModel, RepoModel, ArchiveModel
|
||||
from vorta.store.models import EventLogModel, RepoModel, ArchiveModel
|
||||
|
||||
LONG_PASSWORD = 'long-password-long'
|
||||
SHORT_PASSWORD = 'hunter2'
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import pytest
|
||||
import vorta.borg
|
||||
from vorta.models import EventLogModel
|
||||
from vorta.store.models import EventLogModel
|
||||
|
||||
|
||||
def test_scheduler_create_backup(qapp, qtbot, mocker, borg_json_output):
|
||||
|
|
Loading…
Reference in New Issue