mirror of
https://github.com/borgbase/vorta
synced 2024-12-21 23:33:13 +00:00
Integration Tests for Borg (#1716)
Move existing tests into subfolder `tests/unit`. Write integration tests that actually run the installed borg executable. Those tests can be found in `tests/integration`. Those pytest fixtures that are the same for both kinds of tests remain in `tests/conftest.py`. The others can be found in `tests/integration/conftest.py` or `tests/unit/conftest.py`. This adds nox to the project and configures it to run the tests with different borg versions. This also updates the ci workflow to run the integration tests using nox. * noxfile.py : Run pytest with a matrix of borg versions OR a specific borg version * Makefile : Run using nox. Add phonies `test-unit` and `test-integration`. * tests/conftest.py : Move some fixtures/functions to `tests/unit/conftest.py`. * tests/test_*.py --> tests/unit/ : Move unittests and assets into subfolder * tests/integration/ : Write integration tests. * requirements.d/dev.txt: Add `nox` and `pkgconfig`. The latter is needed for installing new borg versions. * .github/actions/setup/action.yml : Update to install pre-commit and nox when needed. The action now no longer installs Vorta. * .github/actions/install-dependencies/action.yml : Install system deps of borg with this new composite action. * .github/workflows/test.yml : Rename `test` ci to `test-unit` and update it for the new test setup. Implement `test-integration` ci. Signed-off-by: Chirag Aggarwal <thechiragaggarwal@gmail.com>
This commit is contained in:
parent
0e37e1cf90
commit
b015368fee
63 changed files with 1256 additions and 127 deletions
22
.github/actions/install-dependencies/action.yml
vendored
Normal file
22
.github/actions/install-dependencies/action.yml
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
name: Install Dependencies
|
||||
description: Installs system dependencies
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install system dependencies (Linux)
|
||||
if: runner.os == 'Linux'
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt update && sudo apt install -y \
|
||||
xvfb libssl-dev openssl libacl1-dev libacl1 fuse3 build-essential \
|
||||
libxkbcommon-x11-0 dbus-x11 libxcb-icccm4 libxcb-image0 libxcb-keysyms1 \
|
||||
libxcb-randr0 libxcb-render-util0 libxcb-xinerama0 libxcb-xfixes0 libxcb-shape0 \
|
||||
libegl1 libxcb-cursor0 libfuse-dev libsqlite3-dev libfuse3-dev pkg-config \
|
||||
python3-pkgconfig libxxhash-dev borgbackup
|
||||
|
||||
- name: Install system dependencies (macOS)
|
||||
if: runner.os == 'macOS'
|
||||
shell: bash
|
||||
run: |
|
||||
brew install openssl readline xz xxhash pkg-config borgbackup
|
17
.github/actions/setup/action.yml
vendored
17
.github/actions/setup/action.yml
vendored
|
@ -15,7 +15,10 @@ inputs:
|
|||
description: The python version to install
|
||||
required: true
|
||||
default: "3.10"
|
||||
|
||||
install-nox:
|
||||
description: Whether nox shall be installed
|
||||
required: false
|
||||
default: "" # == false
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
|
@ -37,16 +40,20 @@ runs:
|
|||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
|
||||
- name: Install Vorta
|
||||
- name: Install pre-commit
|
||||
shell: bash
|
||||
run: |
|
||||
pip install -e .
|
||||
pip install -r requirements.d/dev.txt
|
||||
run: pip install pre-commit
|
||||
|
||||
- name: Install nox
|
||||
if: ${{ inputs.install-nox }}
|
||||
shell: bash
|
||||
run: pip install nox
|
||||
|
||||
- name: Hash python version
|
||||
if: ${{ inputs.setup-pre-commit }}
|
||||
shell: bash
|
||||
run: echo "PY=$(python -VV | sha256sum | cut -d' ' -f1)" >> $GITHUB_ENV
|
||||
|
||||
- name: Caching for Pre-Commit
|
||||
if: ${{ inputs.setup-pre-commit }}
|
||||
uses: actions/cache@v3
|
||||
|
|
86
.github/workflows/test.yml
vendored
86
.github/workflows/test.yml
vendored
|
@ -26,7 +26,7 @@ jobs:
|
|||
shell: bash
|
||||
run: make lint
|
||||
|
||||
test:
|
||||
test-unit:
|
||||
timeout-minutes: 20
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
|
@ -35,40 +35,92 @@ jobs:
|
|||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
borg-version: ["1.2.4"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install system dependencies (Linux)
|
||||
if: runner.os == 'Linux'
|
||||
run: |
|
||||
sudo apt update && sudo apt install -y \
|
||||
xvfb libssl-dev openssl libacl1-dev libacl1 build-essential borgbackup \
|
||||
libxkbcommon-x11-0 dbus-x11 libxcb-icccm4 libxcb-image0 libxcb-keysyms1 \
|
||||
libxcb-randr0 libxcb-render-util0 libxcb-xinerama0 libxcb-xfixes0 libxcb-shape0 \
|
||||
libegl1 libxcb-cursor0
|
||||
- name: Install system dependencies (macOS)
|
||||
if: runner.os == 'macOS'
|
||||
run: |
|
||||
brew install openssl readline xz borgbackup
|
||||
- name: Install system dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
|
||||
- name: Setup python, vorta and dev deps
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
install-nox: true
|
||||
|
||||
- name: Setup tmate session
|
||||
uses: mxschmitt/action-tmate@v3
|
||||
if: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.debug_enabled }}
|
||||
|
||||
- name: Test with pytest (Linux)
|
||||
- name: Run Unit Tests with pytest (Linux)
|
||||
if: runner.os == 'Linux'
|
||||
env:
|
||||
BORG_VERSION: ${{ matrix.borg-version }}
|
||||
run: |
|
||||
xvfb-run --server-args="-screen 0 1024x768x24+32" \
|
||||
-a dbus-run-session -- make test
|
||||
- name: Test with pytest (macOS)
|
||||
-a dbus-run-session -- make test-unit
|
||||
|
||||
- name: Run Unit Tests with pytest (macOS)
|
||||
if: runner.os == 'macOS'
|
||||
run: make test
|
||||
env:
|
||||
BORG_VERSION: ${{ matrix.borg-version }}
|
||||
PKG_CONFIG_PATH: /usr/local/opt/openssl@3/lib/pkgconfig
|
||||
run: echo $PKG_CONFIG_PATH && make test-unit
|
||||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
OS: ${{ runner.os }}
|
||||
python: ${{ matrix.python-version }}
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
env_vars: OS, python
|
||||
|
||||
test-integration:
|
||||
timeout-minutes: 20
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
borg-version: ["1.1.18", "1.2.2", "1.2.4", "2.0.0b5"]
|
||||
exclude:
|
||||
- borg-version: "2.0.0b5"
|
||||
python-version: "3.8"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install system dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
|
||||
- name: Setup python, vorta and dev deps
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
install-nox: true
|
||||
|
||||
- name: Setup tmate session
|
||||
uses: mxschmitt/action-tmate@v3
|
||||
if: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.debug_enabled }}
|
||||
|
||||
- name: Run Integration Tests with pytest (Linux)
|
||||
if: runner.os == 'Linux'
|
||||
env:
|
||||
BORG_VERSION: ${{ matrix.borg-version }}
|
||||
run: |
|
||||
xvfb-run --server-args="-screen 0 1024x768x24+32" \
|
||||
-a dbus-run-session -- make test-integration
|
||||
|
||||
- name: Run Integration Tests with pytest (macOS)
|
||||
if: runner.os == 'macOS'
|
||||
env:
|
||||
BORG_VERSION: ${{ matrix.borg-version }}
|
||||
PKG_CONFIG_PATH: /usr/local/opt/openssl@3/lib/pkgconfig
|
||||
run: echo $PKG_CONFIG_PATH && make test-integration
|
||||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
|
|
8
Makefile
8
Makefile
|
@ -67,7 +67,13 @@ lint:
|
|||
pre-commit run --all-files --show-diff-on-failure
|
||||
|
||||
test:
|
||||
pytest --cov=vorta
|
||||
nox -- --cov=vorta
|
||||
|
||||
test-unit:
|
||||
nox -- --cov=vorta tests/unit
|
||||
|
||||
test-integration:
|
||||
nox -- --cov=vorta tests/integration
|
||||
|
||||
help:
|
||||
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
|
||||
|
|
56
noxfile.py
Normal file
56
noxfile.py
Normal file
|
@ -0,0 +1,56 @@
|
|||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
import nox
|
||||
|
||||
borg_version = os.getenv("BORG_VERSION")
|
||||
|
||||
if borg_version:
|
||||
# Use specified borg version
|
||||
supported_borgbackup_versions = [borg_version]
|
||||
else:
|
||||
# Generate a list of borg versions compatible with system installed python version
|
||||
system_python_version = tuple(sys.version_info[:3])
|
||||
|
||||
supported_borgbackup_versions = [
|
||||
borgbackup
|
||||
for borgbackup in ("1.1.18", "1.2.2", "1.2.4", "2.0.0b6")
|
||||
# Python version requirements for borgbackup versions
|
||||
if (borgbackup == "1.1.18" and system_python_version >= (3, 5, 0))
|
||||
or (borgbackup == "1.2.2" and system_python_version >= (3, 8, 0))
|
||||
or (borgbackup == "1.2.4" and system_python_version >= (3, 8, 0))
|
||||
or (borgbackup == "2.0.0b6" and system_python_version >= (3, 9, 0))
|
||||
]
|
||||
|
||||
|
||||
@nox.session
|
||||
@nox.parametrize("borgbackup", supported_borgbackup_versions)
|
||||
def run_tests(session, borgbackup):
|
||||
# install borgbackup
|
||||
if (sys.platform == 'darwin'):
|
||||
# in macOS there's currently no fuse package which works with borgbackup directly
|
||||
session.install(f"borgbackup=={borgbackup}")
|
||||
elif (borgbackup == "1.1.18"):
|
||||
# borgbackup 1.1.18 doesn't support pyfuse3
|
||||
session.install("llfuse")
|
||||
session.install(f"borgbackup[llfuse]=={borgbackup}")
|
||||
else:
|
||||
session.install(f"borgbackup[pyfuse3]=={borgbackup}")
|
||||
|
||||
# install dependencies
|
||||
session.install("-r", "requirements.d/dev.txt")
|
||||
session.install("-e", ".")
|
||||
|
||||
# check versions
|
||||
cli_version = session.run("borg", "--version", silent=True).strip()
|
||||
cli_version = re.search(r"borg (\S+)", cli_version).group(1)
|
||||
python_version = session.run("python", "-c", "import borg; print(borg.__version__)", silent=True).strip()
|
||||
|
||||
session.log(f"Borg CLI version: {cli_version}")
|
||||
session.log(f"Borg Python version: {python_version}")
|
||||
|
||||
assert cli_version == borgbackup
|
||||
assert python_version == borgbackup
|
||||
|
||||
session.run("pytest", *session.posargs, env={"BORG_VERSION": borgbackup})
|
|
@ -2,6 +2,8 @@ black==22.*
|
|||
coverage
|
||||
flake8
|
||||
macholib
|
||||
nox
|
||||
pkgconfig
|
||||
pre-commit
|
||||
pyinstaller
|
||||
pylint
|
||||
|
|
|
@ -1,37 +1,11 @@
|
|||
import os
|
||||
import sys
|
||||
from datetime import datetime as dt
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
import vorta
|
||||
import vorta.application
|
||||
import vorta.borg.jobs_manager
|
||||
from peewee import SqliteDatabase
|
||||
from vorta.store.models import (
|
||||
ArchiveModel,
|
||||
BackupProfileModel,
|
||||
EventLogModel,
|
||||
RepoModel,
|
||||
RepoPassword,
|
||||
SchemaVersion,
|
||||
SettingsModel,
|
||||
SourceFileModel,
|
||||
WifiSettingModel,
|
||||
)
|
||||
from vorta.views.main_window import MainWindow
|
||||
|
||||
models = [
|
||||
RepoModel,
|
||||
RepoPassword,
|
||||
BackupProfileModel,
|
||||
SourceFileModel,
|
||||
SettingsModel,
|
||||
ArchiveModel,
|
||||
WifiSettingModel,
|
||||
EventLogModel,
|
||||
SchemaVersion,
|
||||
]
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
|
@ -55,86 +29,8 @@ def qapp(tmpdir_factory):
|
|||
|
||||
from vorta.application import VortaApp
|
||||
|
||||
VortaApp.set_borg_details_action = MagicMock() # Can't use pytest-mock in session scope
|
||||
VortaApp.scheduler = MagicMock()
|
||||
|
||||
qapp = VortaApp([]) # Only init QApplication once to avoid segfaults while testing.
|
||||
|
||||
yield qapp
|
||||
mock_db.close()
|
||||
qapp.quit()
|
||||
|
||||
|
||||
@pytest.fixture(scope='function', autouse=True)
|
||||
def init_db(qapp, qtbot, tmpdir_factory):
|
||||
tmp_db = tmpdir_factory.mktemp('Vorta').join('settings.sqlite')
|
||||
mock_db = SqliteDatabase(
|
||||
str(tmp_db),
|
||||
pragmas={
|
||||
'journal_mode': 'wal',
|
||||
},
|
||||
)
|
||||
vorta.store.connection.init_db(mock_db)
|
||||
|
||||
default_profile = BackupProfileModel(name='Default')
|
||||
default_profile.save()
|
||||
|
||||
new_repo = RepoModel(url='i0fi93@i593.repo.borgbase.com:repo')
|
||||
new_repo.encryption = 'none'
|
||||
new_repo.save()
|
||||
|
||||
default_profile.repo = new_repo.id
|
||||
default_profile.dont_run_on_metered_networks = False
|
||||
default_profile.validation_on = False
|
||||
default_profile.save()
|
||||
|
||||
test_archive = ArchiveModel(snapshot_id='99999', name='test-archive', time=dt(2000, 1, 1, 0, 0), repo=1)
|
||||
test_archive.save()
|
||||
|
||||
test_archive1 = ArchiveModel(snapshot_id='99998', name='test-archive1', time=dt(2000, 1, 1, 0, 0), repo=1)
|
||||
test_archive1.save()
|
||||
|
||||
source_dir = SourceFileModel(dir='/tmp/another', repo=new_repo, dir_size=100, dir_files_count=18, path_isdir=True)
|
||||
source_dir.save()
|
||||
|
||||
qapp.main_window.deleteLater()
|
||||
del qapp.main_window
|
||||
qapp.main_window = MainWindow(qapp) # Re-open main window to apply mock data in UI
|
||||
|
||||
yield
|
||||
|
||||
qapp.jobs_manager.cancel_all_jobs()
|
||||
qapp.backup_finished_event.disconnect()
|
||||
qapp.scheduler.schedule_changed.disconnect()
|
||||
qtbot.waitUntil(lambda: not qapp.jobs_manager.is_worker_running(), **pytest._wait_defaults)
|
||||
mock_db.close()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def choose_file_dialog(*args):
|
||||
class MockFileDialog:
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def open(self, func):
|
||||
func()
|
||||
|
||||
def selectedFiles(self):
|
||||
return ['/tmp']
|
||||
|
||||
return MockFileDialog
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def borg_json_output():
|
||||
def _read_json(subcommand):
|
||||
stdout = open(f'tests/borg_json_output/{subcommand}_stdout.json')
|
||||
stderr = open(f'tests/borg_json_output/{subcommand}_stderr.json')
|
||||
return stdout, stderr
|
||||
|
||||
return _read_json
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def rootdir():
|
||||
return os.path.dirname(os.path.abspath(__file__))
|
||||
|
|
226
tests/integration/conftest.py
Normal file
226
tests/integration/conftest.py
Normal file
|
@ -0,0 +1,226 @@
|
|||
import os
|
||||
import subprocess
|
||||
|
||||
import pytest
|
||||
import vorta
|
||||
import vorta.application
|
||||
import vorta.borg.jobs_manager
|
||||
from peewee import SqliteDatabase
|
||||
from pkg_resources import parse_version
|
||||
from vorta.store.models import (
|
||||
ArchiveModel,
|
||||
BackupProfileModel,
|
||||
EventLogModel,
|
||||
RepoModel,
|
||||
RepoPassword,
|
||||
SchemaVersion,
|
||||
SettingsModel,
|
||||
SourceFileModel,
|
||||
WifiSettingModel,
|
||||
)
|
||||
from vorta.utils import borg_compat
|
||||
from vorta.views.main_window import MainWindow
|
||||
|
||||
models = [
|
||||
RepoModel,
|
||||
RepoPassword,
|
||||
BackupProfileModel,
|
||||
SourceFileModel,
|
||||
SettingsModel,
|
||||
ArchiveModel,
|
||||
WifiSettingModel,
|
||||
EventLogModel,
|
||||
SchemaVersion,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope='function', autouse=True)
|
||||
def borg_version():
|
||||
borg_version = os.getenv('BORG_VERSION')
|
||||
if not borg_version:
|
||||
borg_version = subprocess.run(['borg', '--version'], stdout=subprocess.PIPE).stdout.decode('utf-8')
|
||||
borg_version = borg_version.split(' ')[1]
|
||||
|
||||
# test window does not automatically set borg version
|
||||
borg_compat.set_version(borg_version, borg_compat.path)
|
||||
|
||||
parsed_borg_version = parse_version(borg_version)
|
||||
return borg_version, parsed_borg_version
|
||||
|
||||
|
||||
@pytest.fixture(scope='function', autouse=True)
|
||||
def create_test_repo(tmpdir_factory, borg_version):
|
||||
repo_path = tmpdir_factory.mktemp('repo')
|
||||
source_files_dir = tmpdir_factory.mktemp('borg_src')
|
||||
|
||||
is_borg_v2 = borg_version[1] >= parse_version('2.0.0b1')
|
||||
|
||||
if is_borg_v2:
|
||||
subprocess.run(['borg', '-r', str(repo_path), 'rcreate', '--encryption=none'], check=True)
|
||||
else:
|
||||
subprocess.run(['borg', 'init', '--encryption=none', str(repo_path)], check=True)
|
||||
|
||||
def create_archive(timestamp, name):
|
||||
if is_borg_v2:
|
||||
subprocess.run(
|
||||
['borg', '-r', str(repo_path), 'create', '--timestamp', timestamp, name, str(source_files_dir)],
|
||||
cwd=str(repo_path),
|
||||
check=True,
|
||||
)
|
||||
else:
|
||||
subprocess.run(
|
||||
['borg', 'create', '--timestamp', timestamp, f'{repo_path}::{name}', str(source_files_dir)],
|
||||
cwd=str(repo_path),
|
||||
check=True,
|
||||
)
|
||||
|
||||
# /src/file
|
||||
file_path = os.path.join(source_files_dir, 'file')
|
||||
with open(file_path, 'w') as f:
|
||||
f.write('test')
|
||||
|
||||
# /src/dir/
|
||||
dir_path = os.path.join(source_files_dir, 'dir')
|
||||
os.mkdir(dir_path)
|
||||
|
||||
# /src/dir/file
|
||||
file_path = os.path.join(dir_path, 'file')
|
||||
with open(file_path, 'w') as f:
|
||||
f.write('test')
|
||||
|
||||
# Create first archive
|
||||
create_archive('2023-06-14T01:00:00', 'test-archive1')
|
||||
|
||||
# /src/dir/symlink
|
||||
symlink_path = os.path.join(dir_path, 'symlink')
|
||||
os.symlink(file_path, symlink_path)
|
||||
|
||||
# /src/dir/hardlink
|
||||
hardlink_path = os.path.join(dir_path, 'hardlink')
|
||||
os.link(file_path, hardlink_path)
|
||||
|
||||
# /src/dir/fifo
|
||||
fifo_path = os.path.join(dir_path, 'fifo')
|
||||
os.mkfifo(fifo_path)
|
||||
|
||||
# /src/dir/chrdev
|
||||
supports_chrdev = True
|
||||
try:
|
||||
chrdev_path = os.path.join(dir_path, 'chrdev')
|
||||
os.mknod(chrdev_path, mode=0o600 | 0o020000)
|
||||
except PermissionError:
|
||||
supports_chrdev = False
|
||||
|
||||
create_archive('2023-06-14T02:00:00', 'test-archive2')
|
||||
|
||||
# Rename dir to dir1
|
||||
os.rename(dir_path, os.path.join(source_files_dir, 'dir1'))
|
||||
|
||||
create_archive('2023-06-14T03:00:00', 'test-archive3')
|
||||
|
||||
# Rename all files under dir1
|
||||
for file in os.listdir(os.path.join(source_files_dir, 'dir1')):
|
||||
os.rename(os.path.join(source_files_dir, 'dir1', file), os.path.join(source_files_dir, 'dir1', file + '1'))
|
||||
|
||||
create_archive('2023-06-14T04:00:00', 'test-archive4')
|
||||
|
||||
# Delete all file under dir1
|
||||
for file in os.listdir(os.path.join(source_files_dir, 'dir1')):
|
||||
os.remove(os.path.join(source_files_dir, 'dir1', file))
|
||||
|
||||
create_archive('2023-06-14T05:00:00', 'test-archive5')
|
||||
|
||||
# change permission of dir1
|
||||
os.chmod(os.path.join(source_files_dir, 'dir1'), 0o700)
|
||||
|
||||
create_archive('2023-06-14T06:00:00', 'test-archive6')
|
||||
|
||||
return repo_path, source_files_dir, supports_chrdev
|
||||
|
||||
|
||||
@pytest.fixture(scope='function', autouse=True)
|
||||
def init_db(qapp, qtbot, tmpdir_factory, create_test_repo):
|
||||
tmp_db = tmpdir_factory.mktemp('Vorta').join('settings.sqlite')
|
||||
mock_db = SqliteDatabase(
|
||||
str(tmp_db),
|
||||
pragmas={
|
||||
'journal_mode': 'wal',
|
||||
},
|
||||
)
|
||||
vorta.store.connection.init_db(mock_db)
|
||||
|
||||
default_profile = BackupProfileModel(name='Default')
|
||||
default_profile.save()
|
||||
|
||||
repo_path, source_dir, _ = create_test_repo
|
||||
|
||||
new_repo = RepoModel(url=repo_path)
|
||||
new_repo.encryption = 'none'
|
||||
new_repo.save()
|
||||
|
||||
default_profile.repo = new_repo.id
|
||||
default_profile.dont_run_on_metered_networks = False
|
||||
default_profile.validation_on = False
|
||||
default_profile.save()
|
||||
|
||||
source_dir = SourceFileModel(dir=source_dir, repo=new_repo, dir_size=12, dir_files_count=3, path_isdir=True)
|
||||
source_dir.save()
|
||||
|
||||
qapp.main_window.deleteLater()
|
||||
del qapp.main_window
|
||||
qapp.main_window = MainWindow(qapp) # Re-open main window to apply mock data in UI
|
||||
|
||||
qapp.scheduler.schedule_changed.disconnect()
|
||||
|
||||
yield
|
||||
|
||||
qapp.jobs_manager.cancel_all_jobs()
|
||||
qapp.backup_finished_event.disconnect()
|
||||
qtbot.waitUntil(lambda: not qapp.jobs_manager.is_worker_running(), **pytest._wait_defaults)
|
||||
mock_db.close()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def choose_file_dialog(tmpdir):
|
||||
class MockFileDialog:
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.directory = kwargs.get('directory', None)
|
||||
self.subdirectory = kwargs.get('subdirectory', None)
|
||||
|
||||
def open(self, func):
|
||||
func()
|
||||
|
||||
def selectedFiles(self):
|
||||
if self.subdirectory:
|
||||
return [str(tmpdir.join(self.subdirectory))]
|
||||
elif self.directory:
|
||||
return [str(self.directory)]
|
||||
else:
|
||||
return [str(tmpdir)]
|
||||
|
||||
return MockFileDialog
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def rootdir():
|
||||
return os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def min_borg_version(borg_version, request):
|
||||
if request.node.get_closest_marker('min_borg_version'):
|
||||
parsed_borg_version = borg_version[1]
|
||||
|
||||
if parsed_borg_version < parse_version(request.node.get_closest_marker('min_borg_version').args[0]):
|
||||
pytest.skip(
|
||||
'skipped due to borg version requirement for test: {}'.format(
|
||||
request.node.get_closest_marker('min_borg_version').args[0]
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
config.addinivalue_line(
|
||||
"markers",
|
||||
"min_borg_version(): set minimum required borg version for a test",
|
||||
)
|
185
tests/integration/test_archives.py
Normal file
185
tests/integration/test_archives.py
Normal file
|
@ -0,0 +1,185 @@
|
|||
"""
|
||||
This file contains tests for the Archive tab to test the various archive related borg commands.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from collections import namedtuple
|
||||
|
||||
import psutil
|
||||
import pytest
|
||||
import vorta.borg
|
||||
import vorta.utils
|
||||
import vorta.views.archive_tab
|
||||
from PyQt6 import QtCore
|
||||
from vorta.store.models import ArchiveModel
|
||||
|
||||
|
||||
def test_repo_list(qapp, qtbot):
|
||||
"""Test that the archives are created and repo list is populated correctly"""
|
||||
main = qapp.main_window
|
||||
tab = main.archiveTab
|
||||
|
||||
main.tabWidget.setCurrentIndex(3)
|
||||
tab.refresh_archive_list()
|
||||
qtbot.waitUntil(lambda: not tab.bCheck.isEnabled(), **pytest._wait_defaults)
|
||||
|
||||
assert not tab.bCheck.isEnabled()
|
||||
|
||||
qtbot.waitUntil(lambda: 'Refreshing archives done.' in main.progressText.text(), **pytest._wait_defaults)
|
||||
assert ArchiveModel.select().count() == 6
|
||||
assert 'Refreshing archives done.' in main.progressText.text()
|
||||
assert tab.bCheck.isEnabled()
|
||||
|
||||
|
||||
def test_repo_prune(qapp, qtbot):
|
||||
"""Test for archive pruning"""
|
||||
main = qapp.main_window
|
||||
tab = main.archiveTab
|
||||
|
||||
main.tabWidget.setCurrentIndex(3)
|
||||
tab.refresh_archive_list()
|
||||
qtbot.waitUntil(lambda: tab.archiveTable.rowCount() > 0, **pytest._wait_defaults)
|
||||
|
||||
qtbot.mouseClick(tab.bPrune, QtCore.Qt.MouseButton.LeftButton)
|
||||
qtbot.waitUntil(lambda: 'Pruning old archives' in main.progressText.text(), **pytest._wait_defaults)
|
||||
qtbot.waitUntil(lambda: 'Refreshing archives done.' in main.progressText.text(), **pytest._wait_defaults)
|
||||
|
||||
|
||||
@pytest.mark.min_borg_version('1.2.0a1')
|
||||
def test_repo_compact(qapp, qtbot):
|
||||
"""Test for archive compaction"""
|
||||
main = qapp.main_window
|
||||
tab = main.archiveTab
|
||||
|
||||
main.tabWidget.setCurrentIndex(3)
|
||||
tab.refresh_archive_list()
|
||||
qtbot.waitUntil(lambda: tab.archiveTable.rowCount() > 0, **pytest._wait_defaults)
|
||||
|
||||
qtbot.waitUntil(lambda: tab.compactButton.isEnabled(), **pytest._wait_defaults)
|
||||
assert tab.compactButton.isEnabled()
|
||||
|
||||
qtbot.mouseClick(tab.compactButton, QtCore.Qt.MouseButton.LeftButton)
|
||||
qtbot.waitUntil(lambda: 'compaction freed about' in main.logText.text().lower(), **pytest._wait_defaults)
|
||||
|
||||
|
||||
def test_check(qapp, qtbot):
|
||||
"""Test for archive consistency check"""
|
||||
main = qapp.main_window
|
||||
tab = main.archiveTab
|
||||
|
||||
main.tabWidget.setCurrentIndex(3)
|
||||
tab.refresh_archive_list()
|
||||
qtbot.waitUntil(lambda: tab.archiveTable.rowCount() > 0, **pytest._wait_defaults)
|
||||
|
||||
qapp.check_failed_event.disconnect()
|
||||
|
||||
qtbot.waitUntil(lambda: tab.bCheck.isEnabled(), **pytest._wait_defaults)
|
||||
qtbot.mouseClick(tab.bCheck, QtCore.Qt.MouseButton.LeftButton)
|
||||
success_text = 'INFO: Archive consistency check complete'
|
||||
|
||||
qtbot.waitUntil(lambda: success_text in main.logText.text(), **pytest._wait_defaults)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == 'darwin', reason="Macos fuse support is uncertain")
|
||||
def test_mount(qapp, qtbot, monkeypatch, choose_file_dialog, tmpdir):
|
||||
"""Test for archive mounting and unmounting"""
|
||||
|
||||
def psutil_disk_partitions(**kwargs):
|
||||
DiskPartitions = namedtuple('DiskPartitions', ['device', 'mountpoint'])
|
||||
return [DiskPartitions('borgfs', str(tmpdir))]
|
||||
|
||||
monkeypatch.setattr(psutil, "disk_partitions", psutil_disk_partitions)
|
||||
monkeypatch.setattr(vorta.views.archive_tab, "choose_file_dialog", choose_file_dialog)
|
||||
|
||||
main = qapp.main_window
|
||||
tab = main.archiveTab
|
||||
|
||||
main.tabWidget.setCurrentIndex(3)
|
||||
tab.refresh_archive_list()
|
||||
qtbot.waitUntil(lambda: tab.archiveTable.rowCount() > 0, **pytest._wait_defaults)
|
||||
tab.archiveTable.selectRow(0)
|
||||
|
||||
qtbot.waitUntil(lambda: tab.bMountRepo.isEnabled(), **pytest._wait_defaults)
|
||||
|
||||
qtbot.mouseClick(tab.bMountArchive, QtCore.Qt.MouseButton.LeftButton)
|
||||
qtbot.waitUntil(lambda: tab.mountErrors.text().startswith('Mounted'), **pytest._wait_defaults)
|
||||
|
||||
tab.bmountarchive_clicked()
|
||||
qtbot.waitUntil(lambda: tab.mountErrors.text().startswith('Un-mounted successfully.'), **pytest._wait_defaults)
|
||||
|
||||
tab.bmountrepo_clicked()
|
||||
qtbot.waitUntil(lambda: tab.mountErrors.text().startswith('Mounted'), **pytest._wait_defaults)
|
||||
|
||||
tab.bmountrepo_clicked()
|
||||
qtbot.waitUntil(lambda: tab.mountErrors.text().startswith('Un-mounted successfully.'), **pytest._wait_defaults)
|
||||
|
||||
|
||||
def test_archive_extract(qapp, qtbot, monkeypatch, choose_file_dialog, tmpdir):
|
||||
"""Test for archive extraction"""
|
||||
main = qapp.main_window
|
||||
tab = main.archiveTab
|
||||
|
||||
main.tabWidget.setCurrentIndex(3)
|
||||
tab.refresh_archive_list()
|
||||
qtbot.waitUntil(lambda: tab.archiveTable.rowCount() > 0, **pytest._wait_defaults)
|
||||
|
||||
tab.archiveTable.selectRow(2)
|
||||
tab.extract_action()
|
||||
|
||||
qtbot.waitUntil(lambda: hasattr(tab, '_window'), **pytest._wait_defaults)
|
||||
|
||||
# Select all files
|
||||
tree_view = tab._window.treeView.model()
|
||||
tree_view.setData(tree_view.index(0, 0), QtCore.Qt.CheckState.Checked, QtCore.Qt.ItemDataRole.CheckStateRole)
|
||||
monkeypatch.setattr(vorta.views.archive_tab, "choose_file_dialog", choose_file_dialog)
|
||||
qtbot.mouseClick(tab._window.extractButton, QtCore.Qt.MouseButton.LeftButton)
|
||||
|
||||
qtbot.waitUntil(lambda: 'Restored files from archive.' in main.progressText.text(), **pytest._wait_defaults)
|
||||
|
||||
assert [item.basename for item in tmpdir.listdir()] == ['private' if sys.platform == 'darwin' else 'tmp']
|
||||
|
||||
|
||||
def test_archive_delete(qapp, qtbot, mocker):
|
||||
"""Test for archive deletion"""
|
||||
main = qapp.main_window
|
||||
tab = main.archiveTab
|
||||
|
||||
main.tabWidget.setCurrentIndex(3)
|
||||
tab.refresh_archive_list()
|
||||
qtbot.waitUntil(lambda: tab.archiveTable.rowCount() > 0, **pytest._wait_defaults)
|
||||
|
||||
archivesCount = tab.archiveTable.rowCount()
|
||||
|
||||
mocker.patch.object(vorta.views.archive_tab.ArchiveTab, 'confirm_dialog', lambda x, y, z: True)
|
||||
|
||||
tab.archiveTable.selectRow(0)
|
||||
tab.delete_action()
|
||||
qtbot.waitUntil(lambda: 'Archive deleted.' in main.progressText.text(), **pytest._wait_defaults)
|
||||
|
||||
assert ArchiveModel.select().count() == archivesCount - 1
|
||||
assert tab.archiveTable.rowCount() == archivesCount - 1
|
||||
|
||||
|
||||
def test_archive_rename(qapp, qtbot, mocker):
|
||||
"""Test for archive renaming"""
|
||||
main = qapp.main_window
|
||||
tab = main.archiveTab
|
||||
|
||||
main.tabWidget.setCurrentIndex(3)
|
||||
tab.refresh_archive_list()
|
||||
qtbot.waitUntil(lambda: tab.archiveTable.rowCount() > 0, **pytest._wait_defaults)
|
||||
|
||||
tab.archiveTable.selectRow(0)
|
||||
new_archive_name = 'idf89d8f9d8fd98'
|
||||
mocker.patch.object(vorta.views.archive_tab.QInputDialog, 'getText', return_value=(new_archive_name, True))
|
||||
tab.rename_action()
|
||||
|
||||
# Successful rename case
|
||||
qtbot.waitUntil(lambda: tab.mountErrors.text() == 'Archive renamed.', **pytest._wait_defaults)
|
||||
assert ArchiveModel.select().filter(name=new_archive_name).count() == 1
|
||||
|
||||
# Duplicate name case
|
||||
tab.archiveTable.selectRow(0)
|
||||
exp_text = 'An archive with this name already exists.'
|
||||
tab.rename_action()
|
||||
qtbot.waitUntil(lambda: tab.mountErrors.text() == exp_text, **pytest._wait_defaults)
|
63
tests/integration/test_borg.py
Normal file
63
tests/integration/test_borg.py
Normal file
|
@ -0,0 +1,63 @@
|
|||
"""
|
||||
This file contains tests that directly call borg commands and verify the exit code.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import vorta.borg
|
||||
import vorta.store.models
|
||||
from vorta.borg.info_archive import BorgInfoArchiveJob
|
||||
from vorta.borg.info_repo import BorgInfoRepoJob
|
||||
from vorta.borg.prune import BorgPruneJob
|
||||
|
||||
|
||||
def test_borg_prune(qapp, qtbot):
|
||||
"""This test runs borg prune on a test repo directly without UI"""
|
||||
params = BorgPruneJob.prepare(vorta.store.models.BackupProfileModel.select().first())
|
||||
thread = BorgPruneJob(params['cmd'], params, qapp)
|
||||
|
||||
with qtbot.waitSignal(thread.result, **pytest._wait_defaults) as blocker:
|
||||
blocker.connect(thread.updated)
|
||||
thread.run()
|
||||
|
||||
assert blocker.args[0]['returncode'] == 0
|
||||
|
||||
|
||||
# test borg info
|
||||
def test_borg_repo_info(qapp, qtbot, tmpdir):
|
||||
"""This test runs borg info on a test repo directly without UI"""
|
||||
repo_info = {
|
||||
'repo_url': str(Path(tmpdir).parent / 'repo0'),
|
||||
'repo_name': 'repo0',
|
||||
'extra_borg_arguments': '',
|
||||
'ssh_key': '',
|
||||
'password': '',
|
||||
}
|
||||
|
||||
params = BorgInfoRepoJob.prepare(repo_info)
|
||||
thread = BorgInfoRepoJob(params['cmd'], params, qapp)
|
||||
|
||||
with qtbot.waitSignal(thread.result, **pytest._wait_defaults) as blocker:
|
||||
blocker.connect(thread.result)
|
||||
thread.run()
|
||||
|
||||
assert blocker.args[0]['returncode'] == 0
|
||||
|
||||
|
||||
def test_borg_archive_info(qapp, qtbot, tmpdir):
|
||||
"""Check that archive info command works"""
|
||||
main = qapp.main_window
|
||||
tab = main.archiveTab
|
||||
main.tabWidget.setCurrentIndex(3)
|
||||
tab.refresh_archive_list()
|
||||
qtbot.waitUntil(lambda: tab.archiveTable.rowCount() > 0, **pytest._wait_defaults)
|
||||
|
||||
params = BorgInfoArchiveJob.prepare(vorta.store.models.BackupProfileModel.select().first(), "test-archive1")
|
||||
thread = BorgInfoArchiveJob(params['cmd'], params, qapp)
|
||||
|
||||
with qtbot.waitSignal(thread.result, **pytest._wait_defaults) as blocker:
|
||||
blocker.connect(thread.result)
|
||||
thread.run()
|
||||
|
||||
assert blocker.args[0]['returncode'] == 0
|
385
tests/integration/test_diff.py
Normal file
385
tests/integration/test_diff.py
Normal file
|
@ -0,0 +1,385 @@
|
|||
"""
|
||||
These tests compare the output of the diff command with the expected output.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import vorta.borg
|
||||
import vorta.utils
|
||||
import vorta.views.archive_tab
|
||||
from pkg_resources import parse_version
|
||||
from vorta.borg.diff import BorgDiffJob
|
||||
from vorta.views.diff_result import (
|
||||
ChangeType,
|
||||
DiffTree,
|
||||
FileType,
|
||||
ParseThread,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'archive_name_1, archive_name_2, expected',
|
||||
[
|
||||
(
|
||||
'test-archive1',
|
||||
'test-archive2',
|
||||
[
|
||||
{
|
||||
'subpath': 'dir',
|
||||
'data': {
|
||||
'file_type': FileType.FILE,
|
||||
'change_type': ChangeType.MODIFIED,
|
||||
'modified': None,
|
||||
},
|
||||
'min_version': '1.2.4',
|
||||
'max_version': '1.2.4',
|
||||
},
|
||||
{
|
||||
'subpath': 'file',
|
||||
'data': {
|
||||
'file_type': FileType.FILE,
|
||||
'change_type': ChangeType.MODIFIED,
|
||||
'modified': (0, 0),
|
||||
},
|
||||
'min_version': '1.2.4',
|
||||
'max_version': '1.2.4',
|
||||
},
|
||||
{
|
||||
'subpath': 'chrdev',
|
||||
'data': {
|
||||
'file_type': FileType.CHRDEV,
|
||||
'change_type': ChangeType.ADDED,
|
||||
'modified': None,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'fifo',
|
||||
'data': {
|
||||
'file_type': FileType.FIFO,
|
||||
'change_type': ChangeType.ADDED,
|
||||
'modified': None,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'hardlink',
|
||||
'data': {
|
||||
'file_type': FileType.FILE,
|
||||
'change_type': ChangeType.ADDED,
|
||||
'modified': None,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'symlink',
|
||||
'data': {
|
||||
'file_type': FileType.LINK,
|
||||
'change_type': ChangeType.ADDED,
|
||||
'modified': None,
|
||||
},
|
||||
},
|
||||
],
|
||||
),
|
||||
(
|
||||
'test-archive2',
|
||||
'test-archive3',
|
||||
[
|
||||
{
|
||||
'subpath': 'borg_src',
|
||||
'match_startsWith': True,
|
||||
'data': {
|
||||
'file_type': FileType.FILE,
|
||||
'change_type': ChangeType.MODIFIED,
|
||||
'modified': None,
|
||||
},
|
||||
'min_version': '1.2.4',
|
||||
'max_version': '1.2.4',
|
||||
},
|
||||
{
|
||||
'subpath': 'dir',
|
||||
'data': {
|
||||
'file_type': FileType.DIRECTORY,
|
||||
'change_type': ChangeType.REMOVED,
|
||||
'modified': None,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'chrdev',
|
||||
'data': {
|
||||
'file_type': FileType.CHRDEV,
|
||||
'change_type': ChangeType.REMOVED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'fifo',
|
||||
'data': {
|
||||
'file_type': FileType.FIFO,
|
||||
'change_type': ChangeType.REMOVED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'file',
|
||||
'data': {
|
||||
'file_type': FileType.FILE,
|
||||
'change_type': ChangeType.REMOVED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'hardlink',
|
||||
'data': {
|
||||
'file_type': FileType.FILE,
|
||||
'change_type': ChangeType.REMOVED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'symlink',
|
||||
'data': {
|
||||
'file_type': FileType.LINK,
|
||||
'change_type': ChangeType.REMOVED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'dir1',
|
||||
'data': {
|
||||
'file_type': FileType.DIRECTORY,
|
||||
'change_type': ChangeType.ADDED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'chrdev',
|
||||
'data': {
|
||||
'file_type': FileType.CHRDEV,
|
||||
'change_type': ChangeType.ADDED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'fifo',
|
||||
'data': {
|
||||
'file_type': FileType.FIFO,
|
||||
'change_type': ChangeType.ADDED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'file',
|
||||
'data': {
|
||||
'file_type': FileType.FILE,
|
||||
'change_type': ChangeType.ADDED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'hardlink',
|
||||
'data': {
|
||||
'file_type': FileType.FILE,
|
||||
'change_type': ChangeType.ADDED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'symlink',
|
||||
'data': {
|
||||
'file_type': FileType.LINK,
|
||||
'change_type': ChangeType.ADDED,
|
||||
},
|
||||
},
|
||||
],
|
||||
),
|
||||
(
|
||||
'test-archive3',
|
||||
'test-archive4',
|
||||
[
|
||||
{
|
||||
'subpath': 'dir1',
|
||||
'data': {
|
||||
'file_type': FileType.FILE,
|
||||
'change_type': ChangeType.MODIFIED,
|
||||
},
|
||||
'min_version': '1.2.4',
|
||||
'max_version': '1.2.4',
|
||||
},
|
||||
{
|
||||
'subpath': 'chrdev',
|
||||
'data': {
|
||||
'file_type': FileType.CHRDEV,
|
||||
'change_type': ChangeType.REMOVED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'chrdev1',
|
||||
'data': {
|
||||
'file_type': FileType.CHRDEV,
|
||||
'change_type': ChangeType.ADDED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'fifo',
|
||||
'data': {
|
||||
'file_type': FileType.FIFO,
|
||||
'change_type': ChangeType.REMOVED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'fifo1',
|
||||
'data': {
|
||||
'file_type': FileType.FIFO,
|
||||
'change_type': ChangeType.ADDED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'file',
|
||||
'data': {
|
||||
'file_type': FileType.FILE,
|
||||
'change_type': ChangeType.REMOVED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'file1',
|
||||
'data': {
|
||||
'file_type': FileType.FILE,
|
||||
'change_type': ChangeType.ADDED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'hardlink',
|
||||
'data': {
|
||||
'file_type': FileType.FILE,
|
||||
'change_type': ChangeType.REMOVED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'hardlink1',
|
||||
'data': {
|
||||
'file_type': FileType.FILE,
|
||||
'change_type': ChangeType.ADDED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'symlink',
|
||||
'data': {
|
||||
'file_type': FileType.LINK,
|
||||
'change_type': ChangeType.REMOVED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'symlink1',
|
||||
'data': {
|
||||
'file_type': FileType.LINK,
|
||||
'change_type': ChangeType.ADDED,
|
||||
},
|
||||
},
|
||||
],
|
||||
),
|
||||
(
|
||||
'test-archive4',
|
||||
'test-archive5',
|
||||
[
|
||||
{
|
||||
'subpath': 'dir1',
|
||||
'data': {
|
||||
'file_type': FileType.FILE,
|
||||
'change_type': ChangeType.MODIFIED,
|
||||
},
|
||||
'min_version': '1.2.4',
|
||||
'max_version': '1.2.4',
|
||||
},
|
||||
{
|
||||
'subpath': 'chrdev1',
|
||||
'data': {
|
||||
'file_type': FileType.CHRDEV,
|
||||
'change_type': ChangeType.REMOVED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'fifo1',
|
||||
'data': {
|
||||
'file_type': FileType.FIFO,
|
||||
'change_type': ChangeType.REMOVED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'file1',
|
||||
'data': {
|
||||
'file_type': FileType.FILE,
|
||||
'change_type': ChangeType.REMOVED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'hardlink1',
|
||||
'data': {
|
||||
'file_type': FileType.FILE,
|
||||
'change_type': ChangeType.REMOVED,
|
||||
},
|
||||
},
|
||||
{
|
||||
'subpath': 'symlink1',
|
||||
'data': {
|
||||
'file_type': FileType.LINK,
|
||||
'change_type': ChangeType.REMOVED,
|
||||
},
|
||||
},
|
||||
],
|
||||
),
|
||||
(
|
||||
'test-archive5',
|
||||
'test-archive6',
|
||||
[
|
||||
{
|
||||
'subpath': 'dir1',
|
||||
'data': {
|
||||
'file_type': FileType.FILE,
|
||||
'change_type': ChangeType.MODIFIED,
|
||||
},
|
||||
'min_version': '1.2.4',
|
||||
'max_version': '1.2.4',
|
||||
},
|
||||
],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_archive_diff_lines(qapp, qtbot, borg_version, create_test_repo, archive_name_1, archive_name_2, expected):
|
||||
"""Test that the diff lines are parsed correctly for supported borg versions"""
|
||||
parsed_borg_version = borg_version[1]
|
||||
supports_fifo = parsed_borg_version > parse_version('1.1.18')
|
||||
supports_chrdev = create_test_repo[2]
|
||||
|
||||
params = BorgDiffJob.prepare(vorta.store.models.BackupProfileModel.select().first(), archive_name_1, archive_name_2)
|
||||
thread = BorgDiffJob(params['cmd'], params, qapp)
|
||||
|
||||
with qtbot.waitSignal(thread.result, **pytest._wait_defaults) as blocker:
|
||||
blocker.connect(thread.updated)
|
||||
thread.run()
|
||||
|
||||
diff_lines = blocker.args[0]['data']
|
||||
json_lines = blocker.args[0]['params']['json_lines']
|
||||
|
||||
model = DiffTree()
|
||||
model.setMode(model.DisplayMode.FLAT)
|
||||
|
||||
# Use ParseThread to parse the diff lines
|
||||
parse_thread = ParseThread(diff_lines, json_lines, model)
|
||||
parse_thread.start()
|
||||
qtbot.waitUntil(lambda: parse_thread.isFinished(), **pytest._wait_defaults)
|
||||
|
||||
expected = [
|
||||
item
|
||||
for item in expected
|
||||
if (
|
||||
('min_version' not in item or parse_version(item['min_version']) <= parsed_borg_version)
|
||||
and ('max_version' not in item or parse_version(item['max_version']) >= parsed_borg_version)
|
||||
and (item['data']['file_type'] != FileType.FIFO or supports_fifo)
|
||||
and (item['data']['file_type'] != FileType.CHRDEV or supports_chrdev)
|
||||
)
|
||||
]
|
||||
|
||||
# diff versions of borg produce inconsistent ordering of diff lines so we sort the expected and model
|
||||
expected = sorted(expected, key=lambda item: item['subpath'])
|
||||
sorted_model = sorted(
|
||||
[model.index(index, 0).internalPointer() for index in range(model.rowCount())],
|
||||
key=lambda item: item.subpath,
|
||||
)
|
||||
|
||||
assert len(sorted_model) == len(expected)
|
||||
|
||||
for index, item in enumerate(expected):
|
||||
if 'match_startsWith' in item and item['match_startsWith']:
|
||||
assert sorted_model[index].subpath.startswith(item['subpath'])
|
||||
else:
|
||||
assert sorted_model[index].subpath == item['subpath']
|
||||
|
||||
for key, value in item['data'].items():
|
||||
assert getattr(sorted_model[index].data, key) == value
|
98
tests/integration/test_init.py
Normal file
98
tests/integration/test_init.py
Normal file
|
@ -0,0 +1,98 @@
|
|||
"""
|
||||
Test initialization of new repositories and adding existing ones.
|
||||
"""
|
||||
|
||||
import os
|
||||
from pathlib import PurePath
|
||||
|
||||
import pytest
|
||||
import vorta.borg
|
||||
import vorta.utils
|
||||
import vorta.views.repo_add_dialog
|
||||
from PyQt6.QtCore import Qt
|
||||
from PyQt6.QtWidgets import QMessageBox
|
||||
|
||||
LONG_PASSWORD = 'long-password-long'
|
||||
TEST_REPO_NAME = 'TEST - REPONAME'
|
||||
|
||||
|
||||
def test_create_repo(qapp, qtbot, monkeypatch, choose_file_dialog, tmpdir):
|
||||
"""Test initializing a new repository"""
|
||||
main = qapp.main_window
|
||||
main.repoTab.new_repo()
|
||||
add_repo_window = main.repoTab._window
|
||||
main.show()
|
||||
|
||||
# create new folder in tmpdir
|
||||
new_repo_path = tmpdir.join('new_repo')
|
||||
new_repo_path.mkdir()
|
||||
|
||||
monkeypatch.setattr(
|
||||
vorta.views.repo_add_dialog,
|
||||
"choose_file_dialog",
|
||||
lambda *args, **kwargs: choose_file_dialog(*args, **kwargs, subdirectory=new_repo_path.basename),
|
||||
)
|
||||
qtbot.mouseClick(add_repo_window.chooseLocalFolderButton, Qt.MouseButton.LeftButton)
|
||||
|
||||
# clear auto input of repo name from url
|
||||
add_repo_window.repoName.selectAll()
|
||||
add_repo_window.repoName.del_()
|
||||
qtbot.keyClicks(add_repo_window.repoName, TEST_REPO_NAME)
|
||||
|
||||
qtbot.keyClicks(add_repo_window.passwordInput.passwordLineEdit, LONG_PASSWORD)
|
||||
qtbot.keyClicks(add_repo_window.passwordInput.confirmLineEdit, LONG_PASSWORD)
|
||||
|
||||
add_repo_window.run()
|
||||
|
||||
qtbot.waitUntil(lambda: main.repoTab.repoSelector.count() == 2, **pytest._wait_defaults)
|
||||
|
||||
# Check if repo was created in tmpdir
|
||||
repo_url = (
|
||||
vorta.store.models.RepoModel.select().where(vorta.store.models.RepoModel.name == TEST_REPO_NAME).get().url
|
||||
)
|
||||
assert PurePath(repo_url).parent == tmpdir
|
||||
assert PurePath(repo_url).name == 'new_repo'
|
||||
|
||||
# check that new_repo_path contains folder data
|
||||
assert os.path.exists(new_repo_path.join('data'))
|
||||
assert os.path.exists(new_repo_path.join('config'))
|
||||
assert os.path.exists(new_repo_path.join('README'))
|
||||
|
||||
|
||||
def test_add_existing_repo(qapp, qtbot, monkeypatch, choose_file_dialog):
|
||||
"""Test adding an existing repository"""
|
||||
main = qapp.main_window
|
||||
tab = main.repoTab
|
||||
|
||||
main.tabWidget.setCurrentIndex(0)
|
||||
current_repo_path = vorta.store.models.RepoModel.select().first().url
|
||||
|
||||
monkeypatch.setattr(QMessageBox, "show", lambda *args: True)
|
||||
qtbot.mouseClick(main.repoTab.repoRemoveToolbutton, Qt.MouseButton.LeftButton)
|
||||
qtbot.waitUntil(
|
||||
lambda: tab.repoSelector.count() == 1 and tab.repoSelector.currentText() == "No repository selected",
|
||||
**pytest._wait_defaults,
|
||||
)
|
||||
|
||||
# add existing repo again
|
||||
main.repoTab.add_existing_repo()
|
||||
add_repo_window = main.repoTab._window
|
||||
|
||||
monkeypatch.setattr(
|
||||
vorta.views.repo_add_dialog,
|
||||
"choose_file_dialog",
|
||||
lambda *args, **kwargs: choose_file_dialog(*args, **kwargs, directory=current_repo_path),
|
||||
)
|
||||
qtbot.mouseClick(add_repo_window.chooseLocalFolderButton, Qt.MouseButton.LeftButton)
|
||||
|
||||
# clear auto input of repo name from url
|
||||
add_repo_window.repoName.selectAll()
|
||||
add_repo_window.repoName.del_()
|
||||
qtbot.keyClicks(add_repo_window.repoName, TEST_REPO_NAME)
|
||||
|
||||
add_repo_window.run()
|
||||
|
||||
# check that repo was added
|
||||
qtbot.waitUntil(lambda: tab.repoSelector.count() == 1, **pytest._wait_defaults)
|
||||
assert vorta.store.models.RepoModel.select().first().url == str(current_repo_path)
|
||||
assert vorta.store.models.RepoModel.select().first().name == TEST_REPO_NAME
|
24
tests/integration/test_repo.py
Normal file
24
tests/integration/test_repo.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
"""
|
||||
Test backup creation
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from PyQt6 import QtCore
|
||||
from vorta.store.models import ArchiveModel, EventLogModel
|
||||
|
||||
|
||||
def test_create(qapp, qtbot):
|
||||
"""Test for manual archive creation"""
|
||||
main = qapp.main_window
|
||||
main.archiveTab.refresh_archive_list()
|
||||
qtbot.waitUntil(lambda: main.archiveTab.archiveTable.rowCount() > 0, **pytest._wait_defaults)
|
||||
|
||||
qtbot.mouseClick(main.createStartBtn, QtCore.Qt.MouseButton.LeftButton)
|
||||
qtbot.waitUntil(lambda: 'Backup finished.' in main.progressText.text(), **pytest._wait_defaults)
|
||||
qtbot.waitUntil(lambda: main.createStartBtn.isEnabled(), **pytest._wait_defaults)
|
||||
|
||||
assert EventLogModel.select().count() == 2
|
||||
assert ArchiveModel.select().count() == 7
|
||||
assert main.createStartBtn.isEnabled()
|
||||
assert main.archiveTab.archiveTable.rowCount() == 7
|
||||
assert main.scheduleTab.logTableWidget.rowCount() == 2
|
0
tests/unit/borg_json_output/rename_stderr.json
Normal file
0
tests/unit/borg_json_output/rename_stderr.json
Normal file
0
tests/unit/borg_json_output/rename_stdout.json
Normal file
0
tests/unit/borg_json_output/rename_stdout.json
Normal file
107
tests/unit/conftest.py
Normal file
107
tests/unit/conftest.py
Normal file
|
@ -0,0 +1,107 @@
|
|||
import os
|
||||
from datetime import datetime as dt
|
||||
|
||||
import pytest
|
||||
import vorta
|
||||
import vorta.application
|
||||
import vorta.borg.jobs_manager
|
||||
from peewee import SqliteDatabase
|
||||
from vorta.store.models import (
|
||||
ArchiveModel,
|
||||
BackupProfileModel,
|
||||
EventLogModel,
|
||||
RepoModel,
|
||||
RepoPassword,
|
||||
SchemaVersion,
|
||||
SettingsModel,
|
||||
SourceFileModel,
|
||||
WifiSettingModel,
|
||||
)
|
||||
from vorta.views.main_window import MainWindow
|
||||
|
||||
models = [
|
||||
RepoModel,
|
||||
RepoPassword,
|
||||
BackupProfileModel,
|
||||
SourceFileModel,
|
||||
SettingsModel,
|
||||
ArchiveModel,
|
||||
WifiSettingModel,
|
||||
EventLogModel,
|
||||
SchemaVersion,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope='function', autouse=True)
|
||||
def init_db(qapp, qtbot, tmpdir_factory):
|
||||
tmp_db = tmpdir_factory.mktemp('Vorta').join('settings.sqlite')
|
||||
mock_db = SqliteDatabase(
|
||||
str(tmp_db),
|
||||
pragmas={
|
||||
'journal_mode': 'wal',
|
||||
},
|
||||
)
|
||||
vorta.store.connection.init_db(mock_db)
|
||||
|
||||
default_profile = BackupProfileModel(name='Default')
|
||||
default_profile.save()
|
||||
|
||||
new_repo = RepoModel(url='i0fi93@i593.repo.borgbase.com:repo')
|
||||
new_repo.encryption = 'none'
|
||||
new_repo.save()
|
||||
|
||||
default_profile.repo = new_repo.id
|
||||
default_profile.dont_run_on_metered_networks = False
|
||||
default_profile.validation_on = False
|
||||
default_profile.save()
|
||||
|
||||
test_archive = ArchiveModel(snapshot_id='99999', name='test-archive', time=dt(2000, 1, 1, 0, 0), repo=1)
|
||||
test_archive.save()
|
||||
|
||||
test_archive1 = ArchiveModel(snapshot_id='99998', name='test-archive1', time=dt(2000, 1, 1, 0, 0), repo=1)
|
||||
test_archive1.save()
|
||||
|
||||
source_dir = SourceFileModel(dir='/tmp/another', repo=new_repo, dir_size=100, dir_files_count=18, path_isdir=True)
|
||||
source_dir.save()
|
||||
|
||||
qapp.main_window.deleteLater()
|
||||
del qapp.main_window
|
||||
qapp.main_window = MainWindow(qapp) # Re-open main window to apply mock data in UI
|
||||
|
||||
yield
|
||||
|
||||
qapp.jobs_manager.cancel_all_jobs()
|
||||
qapp.backup_finished_event.disconnect()
|
||||
qapp.scheduler.schedule_changed.disconnect()
|
||||
qtbot.waitUntil(lambda: not qapp.jobs_manager.is_worker_running(), **pytest._wait_defaults)
|
||||
mock_db.close()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def choose_file_dialog(*args):
|
||||
class MockFileDialog:
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def open(self, func):
|
||||
func()
|
||||
|
||||
def selectedFiles(self):
|
||||
return ['/tmp']
|
||||
|
||||
return MockFileDialog
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def borg_json_output():
|
||||
def _read_json(subcommand):
|
||||
stdout = open(f'tests/unit/borg_json_output/{subcommand}_stdout.json')
|
||||
stderr = open(f'tests/unit/borg_json_output/{subcommand}_stderr.json')
|
||||
return stdout, stderr
|
||||
|
||||
return _read_json
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def rootdir():
|
||||
return os.path.dirname(os.path.abspath(__file__))
|
Loading…
Reference in a new issue