mirror of
https://github.com/borgbackup/borg.git
synced 2025-02-20 21:27:32 +00:00
Merge pull request #2648 from textshell/feature/mandatory-features-master
Add minimal version of in repository mandatory feature flags. (master)
This commit is contained in:
commit
86363dcd4b
7 changed files with 299 additions and 45 deletions
36
conftest.py
36
conftest.py
|
@ -2,10 +2,20 @@
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
# IMPORTANT keep this above all other borg imports to avoid inconsistent values
|
||||||
|
# for `from borg.constants import PBKDF2_ITERATIONS` (or star import) usages before
|
||||||
|
# this is executed
|
||||||
|
from borg import constants
|
||||||
|
# no fixture-based monkey-patching since star-imports are used for the constants module
|
||||||
|
constants.PBKDF2_ITERATIONS = 1
|
||||||
|
|
||||||
|
|
||||||
# needed to get pretty assertion failures in unit tests:
|
# needed to get pretty assertion failures in unit tests:
|
||||||
if hasattr(pytest, 'register_assert_rewrite'):
|
if hasattr(pytest, 'register_assert_rewrite'):
|
||||||
pytest.register_assert_rewrite('borg.testsuite')
|
pytest.register_assert_rewrite('borg.testsuite')
|
||||||
|
|
||||||
|
|
||||||
|
import borg.cache
|
||||||
from borg.logger import setup_logging
|
from borg.logger import setup_logging
|
||||||
|
|
||||||
# Ensure that the loggers exist for all tests
|
# Ensure that the loggers exist for all tests
|
||||||
|
@ -14,12 +24,7 @@
|
||||||
from borg.testsuite import has_lchflags, has_llfuse
|
from borg.testsuite import has_lchflags, has_llfuse
|
||||||
from borg.testsuite import are_symlinks_supported, are_hardlinks_supported, is_utime_fully_supported
|
from borg.testsuite import are_symlinks_supported, are_hardlinks_supported, is_utime_fully_supported
|
||||||
from borg.testsuite.platform import fakeroot_detected, are_acls_working
|
from borg.testsuite.platform import fakeroot_detected, are_acls_working
|
||||||
from borg import xattr, constants
|
from borg import xattr
|
||||||
|
|
||||||
|
|
||||||
def pytest_configure(config):
|
|
||||||
# no fixture-based monkey-patching since star-imports are used for the constants module
|
|
||||||
constants.PBKDF2_ITERATIONS = 1
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
|
@ -53,3 +58,22 @@ def pytest_report_header(config, startdir):
|
||||||
output = "Tests enabled: " + ", ".join(enabled) + "\n"
|
output = "Tests enabled: " + ", ".join(enabled) + "\n"
|
||||||
output += "Tests disabled: " + ", ".join(disabled)
|
output += "Tests disabled: " + ", ".join(disabled)
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
class DefaultPatches:
|
||||||
|
def __init__(self, request):
|
||||||
|
self.org_cache_wipe_cache = borg.cache.Cache.wipe_cache
|
||||||
|
|
||||||
|
def wipe_should_not_be_called(*a, **kw):
|
||||||
|
raise AssertionError("Cache wipe was triggered, if this is part of the test add @pytest.mark.allow_cache_wipe")
|
||||||
|
if 'allow_cache_wipe' not in request.keywords:
|
||||||
|
borg.cache.Cache.wipe_cache = wipe_should_not_be_called
|
||||||
|
request.addfinalizer(self.undo)
|
||||||
|
|
||||||
|
def undo(self):
|
||||||
|
borg.cache.Cache.wipe_cache = self.org_cache_wipe_cache
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def default_patches(request):
|
||||||
|
return DefaultPatches(request)
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
from distutils.version import LooseVersion
|
from distutils.version import LooseVersion
|
||||||
|
|
||||||
|
# IMPORTANT keep imports from borg here to a minimum because our testsuite depends on
|
||||||
|
# beeing able to import borg.constants and then monkey patching borg.constants.PBKDF2_ITERATIONS
|
||||||
from ._version import version as __version__
|
from ._version import version as __version__
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1152,7 +1152,7 @@ def check(self, repository, repair=False, archive=None, first=0, last=0, sort_by
|
||||||
self.manifest = self.rebuild_manifest()
|
self.manifest = self.rebuild_manifest()
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
self.manifest, _ = Manifest.load(repository, key=self.key)
|
self.manifest, _ = Manifest.load(repository, (Manifest.Operation.CHECK,), key=self.key)
|
||||||
except IntegrityError as exc:
|
except IntegrityError as exc:
|
||||||
logger.error('Repository manifest is corrupted: %s', exc)
|
logger.error('Repository manifest is corrupted: %s', exc)
|
||||||
self.error_found = True
|
self.error_found = True
|
||||||
|
|
|
@ -87,8 +87,9 @@ def argument(args, str_or_bool):
|
||||||
return str_or_bool
|
return str_or_bool
|
||||||
|
|
||||||
|
|
||||||
def with_repository(fake=False, invert_fake=False, create=False, lock=True, exclusive=False, manifest=True, cache=False,
|
def with_repository(fake=False, invert_fake=False, create=False, lock=True,
|
||||||
secure=True):
|
exclusive=False, manifest=True, cache=False, secure=True,
|
||||||
|
compatibility=None):
|
||||||
"""
|
"""
|
||||||
Method decorator for subcommand-handling methods: do_XYZ(self, args, repository, …)
|
Method decorator for subcommand-handling methods: do_XYZ(self, args, repository, …)
|
||||||
|
|
||||||
|
@ -100,7 +101,20 @@ def with_repository(fake=False, invert_fake=False, create=False, lock=True, excl
|
||||||
:param manifest: load manifest and key, pass them as keyword arguments
|
:param manifest: load manifest and key, pass them as keyword arguments
|
||||||
:param cache: open cache, pass it as keyword argument (implies manifest)
|
:param cache: open cache, pass it as keyword argument (implies manifest)
|
||||||
:param secure: do assert_secure after loading manifest
|
:param secure: do assert_secure after loading manifest
|
||||||
|
:param compatibility: mandatory if not create and (manifest or cache), specifies mandatory feature categories to check
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
if not create and (manifest or cache):
|
||||||
|
if compatibility is None:
|
||||||
|
raise AssertionError("with_repository decorator used without compatibility argument")
|
||||||
|
if type(compatibility) is not tuple:
|
||||||
|
raise AssertionError("with_repository decorator compatibility argument must be of type tuple")
|
||||||
|
else:
|
||||||
|
if compatibility is not None:
|
||||||
|
raise AssertionError("with_repository called with compatibility argument but would not check" + repr(compatibility))
|
||||||
|
if create:
|
||||||
|
compatibility = Manifest.NO_OPERATION_CHECK
|
||||||
|
|
||||||
def decorator(method):
|
def decorator(method):
|
||||||
@functools.wraps(method)
|
@functools.wraps(method)
|
||||||
def wrapper(self, args, **kwargs):
|
def wrapper(self, args, **kwargs):
|
||||||
|
@ -117,7 +131,7 @@ def wrapper(self, args, **kwargs):
|
||||||
append_only=append_only)
|
append_only=append_only)
|
||||||
with repository:
|
with repository:
|
||||||
if manifest or cache:
|
if manifest or cache:
|
||||||
kwargs['manifest'], kwargs['key'] = Manifest.load(repository)
|
kwargs['manifest'], kwargs['key'] = Manifest.load(repository, compatibility)
|
||||||
if 'compression' in args:
|
if 'compression' in args:
|
||||||
kwargs['key'].compressor = args.compression.compressor
|
kwargs['key'].compressor = args.compression.compressor
|
||||||
if secure:
|
if secure:
|
||||||
|
@ -276,7 +290,7 @@ def do_check(self, args, repository):
|
||||||
return EXIT_WARNING
|
return EXIT_WARNING
|
||||||
return EXIT_SUCCESS
|
return EXIT_SUCCESS
|
||||||
|
|
||||||
@with_repository()
|
@with_repository(compatibility=(Manifest.Operation.CHECK,))
|
||||||
def do_change_passphrase(self, args, repository, manifest, key):
|
def do_change_passphrase(self, args, repository, manifest, key):
|
||||||
"""Change repository key file passphrase"""
|
"""Change repository key file passphrase"""
|
||||||
if not hasattr(key, 'change_passphrase'):
|
if not hasattr(key, 'change_passphrase'):
|
||||||
|
@ -413,7 +427,7 @@ def test_files(path, count, size, random):
|
||||||
print(fmt % ('U', msg, total_size_MB / dt_update, count, file_size_formatted, content, dt_update))
|
print(fmt % ('U', msg, total_size_MB / dt_update, count, file_size_formatted, content, dt_update))
|
||||||
print(fmt % ('D', msg, total_size_MB / dt_delete, count, file_size_formatted, content, dt_delete))
|
print(fmt % ('D', msg, total_size_MB / dt_delete, count, file_size_formatted, content, dt_delete))
|
||||||
|
|
||||||
@with_repository(fake='dry_run', exclusive=True)
|
@with_repository(fake='dry_run', exclusive=True, compatibility=(Manifest.Operation.WRITE,))
|
||||||
def do_create(self, args, repository, manifest=None, key=None):
|
def do_create(self, args, repository, manifest=None, key=None):
|
||||||
"""Create new archive"""
|
"""Create new archive"""
|
||||||
matcher = PatternMatcher(fallback=True)
|
matcher = PatternMatcher(fallback=True)
|
||||||
|
@ -632,7 +646,7 @@ def item_filter(item):
|
||||||
return matched
|
return matched
|
||||||
return item_filter
|
return item_filter
|
||||||
|
|
||||||
@with_repository()
|
@with_repository(compatibility=(Manifest.Operation.READ,))
|
||||||
@with_archive
|
@with_archive
|
||||||
def do_extract(self, args, repository, manifest, key, archive):
|
def do_extract(self, args, repository, manifest, key, archive):
|
||||||
"""Extract archive contents"""
|
"""Extract archive contents"""
|
||||||
|
@ -714,7 +728,7 @@ def peek_and_store_hardlink_masters(item, matched):
|
||||||
pi.finish()
|
pi.finish()
|
||||||
return self.exit_code
|
return self.exit_code
|
||||||
|
|
||||||
@with_repository()
|
@with_repository(compatibility=(Manifest.Operation.READ,))
|
||||||
@with_archive
|
@with_archive
|
||||||
def do_export_tar(self, args, repository, manifest, key, archive):
|
def do_export_tar(self, args, repository, manifest, key, archive):
|
||||||
"""Export archive contents as a tarball"""
|
"""Export archive contents as a tarball"""
|
||||||
|
@ -927,7 +941,7 @@ def item_to_tarinfo(item, original_path):
|
||||||
self.print_warning("Include pattern '%s' never matched.", pattern)
|
self.print_warning("Include pattern '%s' never matched.", pattern)
|
||||||
return self.exit_code
|
return self.exit_code
|
||||||
|
|
||||||
@with_repository()
|
@with_repository(compatibility=(Manifest.Operation.READ,))
|
||||||
@with_archive
|
@with_archive
|
||||||
def do_diff(self, args, repository, manifest, key, archive):
|
def do_diff(self, args, repository, manifest, key, archive):
|
||||||
"""Diff contents of two archives"""
|
"""Diff contents of two archives"""
|
||||||
|
@ -1140,7 +1154,7 @@ def compare_or_defer(item1, item2):
|
||||||
|
|
||||||
return self.exit_code
|
return self.exit_code
|
||||||
|
|
||||||
@with_repository(exclusive=True, cache=True)
|
@with_repository(exclusive=True, cache=True, compatibility=(Manifest.Operation.CHECK,))
|
||||||
@with_archive
|
@with_archive
|
||||||
def do_rename(self, args, repository, manifest, key, cache, archive):
|
def do_rename(self, args, repository, manifest, key, cache, archive):
|
||||||
"""Rename an existing archive"""
|
"""Rename an existing archive"""
|
||||||
|
@ -1161,7 +1175,7 @@ def do_delete(self, args, repository):
|
||||||
|
|
||||||
def _delete_archives(self, args, repository):
|
def _delete_archives(self, args, repository):
|
||||||
"""Delete archives"""
|
"""Delete archives"""
|
||||||
manifest, key = Manifest.load(repository)
|
manifest, key = Manifest.load(repository, (Manifest.Operation.DELETE,))
|
||||||
|
|
||||||
if args.location.archive:
|
if args.location.archive:
|
||||||
archive_names = (args.location.archive,)
|
archive_names = (args.location.archive,)
|
||||||
|
@ -1219,7 +1233,7 @@ def _delete_repository(self, args, repository):
|
||||||
if not args.cache_only:
|
if not args.cache_only:
|
||||||
msg = []
|
msg = []
|
||||||
try:
|
try:
|
||||||
manifest, key = Manifest.load(repository)
|
manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
|
||||||
except NoManifestError:
|
except NoManifestError:
|
||||||
msg.append("You requested to completely DELETE the repository *including* all archives it may "
|
msg.append("You requested to completely DELETE the repository *including* all archives it may "
|
||||||
"contain.")
|
"contain.")
|
||||||
|
@ -1258,7 +1272,7 @@ def do_mount(self, args):
|
||||||
|
|
||||||
return self._do_mount(args)
|
return self._do_mount(args)
|
||||||
|
|
||||||
@with_repository()
|
@with_repository(compatibility=(Manifest.Operation.READ,))
|
||||||
def _do_mount(self, args, repository, manifest, key):
|
def _do_mount(self, args, repository, manifest, key):
|
||||||
from .fuse import FuseOperations
|
from .fuse import FuseOperations
|
||||||
|
|
||||||
|
@ -1276,7 +1290,7 @@ def do_umount(self, args):
|
||||||
"""un-mount the FUSE filesystem"""
|
"""un-mount the FUSE filesystem"""
|
||||||
return umount(args.mountpoint)
|
return umount(args.mountpoint)
|
||||||
|
|
||||||
@with_repository()
|
@with_repository(compatibility=(Manifest.Operation.READ,))
|
||||||
def do_list(self, args, repository, manifest, key):
|
def do_list(self, args, repository, manifest, key):
|
||||||
"""List archive or repository contents"""
|
"""List archive or repository contents"""
|
||||||
if not hasattr(sys.stdout, 'buffer'):
|
if not hasattr(sys.stdout, 'buffer'):
|
||||||
|
@ -1348,7 +1362,7 @@ def _list_repository(self, args, manifest, write):
|
||||||
|
|
||||||
return self.exit_code
|
return self.exit_code
|
||||||
|
|
||||||
@with_repository(cache=True)
|
@with_repository(cache=True, compatibility=(Manifest.Operation.READ,))
|
||||||
def do_info(self, args, repository, manifest, key, cache):
|
def do_info(self, args, repository, manifest, key, cache):
|
||||||
"""Show archive details such as disk space used"""
|
"""Show archive details such as disk space used"""
|
||||||
if any((args.location.archive, args.first, args.last, args.prefix)):
|
if any((args.location.archive, args.first, args.last, args.prefix)):
|
||||||
|
@ -1438,7 +1452,7 @@ def _info_repository(self, args, repository, manifest, key, cache):
|
||||||
print(str(cache))
|
print(str(cache))
|
||||||
return self.exit_code
|
return self.exit_code
|
||||||
|
|
||||||
@with_repository(exclusive=True)
|
@with_repository(exclusive=True, compatibility=(Manifest.Operation.DELETE,))
|
||||||
def do_prune(self, args, repository, manifest, key):
|
def do_prune(self, args, repository, manifest, key):
|
||||||
"""Prune repository archives according to specified rules"""
|
"""Prune repository archives according to specified rules"""
|
||||||
if not any((args.secondly, args.minutely, args.hourly, args.daily,
|
if not any((args.secondly, args.minutely, args.hourly, args.daily,
|
||||||
|
@ -1517,7 +1531,7 @@ def do_prune(self, args, repository, manifest, key):
|
||||||
def do_upgrade(self, args, repository, manifest=None, key=None):
|
def do_upgrade(self, args, repository, manifest=None, key=None):
|
||||||
"""upgrade a repository from a previous version"""
|
"""upgrade a repository from a previous version"""
|
||||||
if args.tam:
|
if args.tam:
|
||||||
manifest, key = Manifest.load(repository, force_tam_not_required=args.force)
|
manifest, key = Manifest.load(repository, (Manifest.Operation.CHECK,), force_tam_not_required=args.force)
|
||||||
|
|
||||||
if not hasattr(key, 'change_passphrase'):
|
if not hasattr(key, 'change_passphrase'):
|
||||||
print('This repository is not encrypted, cannot enable TAM.')
|
print('This repository is not encrypted, cannot enable TAM.')
|
||||||
|
@ -1542,7 +1556,7 @@ def do_upgrade(self, args, repository, manifest=None, key=None):
|
||||||
open(tam_file, 'w').close()
|
open(tam_file, 'w').close()
|
||||||
print('Updated security database')
|
print('Updated security database')
|
||||||
elif args.disable_tam:
|
elif args.disable_tam:
|
||||||
manifest, key = Manifest.load(repository, force_tam_not_required=True)
|
manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK, force_tam_not_required=True)
|
||||||
if tam_required(repository):
|
if tam_required(repository):
|
||||||
os.unlink(tam_required_file(repository))
|
os.unlink(tam_required_file(repository))
|
||||||
if key.tam_required:
|
if key.tam_required:
|
||||||
|
@ -1570,7 +1584,7 @@ def do_upgrade(self, args, repository, manifest=None, key=None):
|
||||||
print("warning: %s" % e)
|
print("warning: %s" % e)
|
||||||
return self.exit_code
|
return self.exit_code
|
||||||
|
|
||||||
@with_repository(cache=True, exclusive=True)
|
@with_repository(cache=True, exclusive=True, compatibility=(Manifest.Operation.CHECK,))
|
||||||
def do_recreate(self, args, repository, manifest, key, cache):
|
def do_recreate(self, args, repository, manifest, key, cache):
|
||||||
"""Re-create archives"""
|
"""Re-create archives"""
|
||||||
msg = ("recreate is an experimental feature.\n"
|
msg = ("recreate is an experimental feature.\n"
|
||||||
|
@ -1647,7 +1661,7 @@ def do_debug_info(self, args):
|
||||||
print('Process ID:', get_process_id())
|
print('Process ID:', get_process_id())
|
||||||
return EXIT_SUCCESS
|
return EXIT_SUCCESS
|
||||||
|
|
||||||
@with_repository()
|
@with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
|
||||||
def do_debug_dump_archive_items(self, args, repository, manifest, key):
|
def do_debug_dump_archive_items(self, args, repository, manifest, key):
|
||||||
"""dump (decrypted, decompressed) archive items metadata (not: data)"""
|
"""dump (decrypted, decompressed) archive items metadata (not: data)"""
|
||||||
archive = Archive(repository, key, manifest, args.location.archive,
|
archive = Archive(repository, key, manifest, args.location.archive,
|
||||||
|
@ -1661,7 +1675,7 @@ def do_debug_dump_archive_items(self, args, repository, manifest, key):
|
||||||
print('Done.')
|
print('Done.')
|
||||||
return EXIT_SUCCESS
|
return EXIT_SUCCESS
|
||||||
|
|
||||||
@with_repository()
|
@with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
|
||||||
def do_debug_dump_archive(self, args, repository, manifest, key):
|
def do_debug_dump_archive(self, args, repository, manifest, key):
|
||||||
"""dump decoded archive metadata (not: data)"""
|
"""dump decoded archive metadata (not: data)"""
|
||||||
|
|
||||||
|
@ -1714,7 +1728,7 @@ def output(fd):
|
||||||
output(fd)
|
output(fd)
|
||||||
return EXIT_SUCCESS
|
return EXIT_SUCCESS
|
||||||
|
|
||||||
@with_repository()
|
@with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
|
||||||
def do_debug_dump_manifest(self, args, repository, manifest, key):
|
def do_debug_dump_manifest(self, args, repository, manifest, key):
|
||||||
"""dump decoded repository manifest"""
|
"""dump decoded repository manifest"""
|
||||||
|
|
||||||
|
@ -1729,7 +1743,7 @@ def do_debug_dump_manifest(self, args, repository, manifest, key):
|
||||||
json.dump(meta, fd, indent=4)
|
json.dump(meta, fd, indent=4)
|
||||||
return EXIT_SUCCESS
|
return EXIT_SUCCESS
|
||||||
|
|
||||||
@with_repository()
|
@with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
|
||||||
def do_debug_dump_repo_objs(self, args, repository, manifest, key):
|
def do_debug_dump_repo_objs(self, args, repository, manifest, key):
|
||||||
"""dump (decrypted, decompressed) repo objects"""
|
"""dump (decrypted, decompressed) repo objects"""
|
||||||
marker = None
|
marker = None
|
||||||
|
@ -1803,7 +1817,7 @@ def do_debug_delete_obj(self, args, repository):
|
||||||
print('Done.')
|
print('Done.')
|
||||||
return EXIT_SUCCESS
|
return EXIT_SUCCESS
|
||||||
|
|
||||||
@with_repository(manifest=False, exclusive=True, cache=True)
|
@with_repository(manifest=False, exclusive=True, cache=True, compatibility=Manifest.NO_OPERATION_CHECK)
|
||||||
def do_debug_refcount_obj(self, args, repository, manifest, key, cache):
|
def do_debug_refcount_obj(self, args, repository, manifest, key, cache):
|
||||||
"""display refcounts for the objects with the given IDs"""
|
"""display refcounts for the objects with the given IDs"""
|
||||||
for hex_id in args.ids:
|
for hex_id in args.ids:
|
||||||
|
|
|
@ -15,8 +15,9 @@
|
||||||
from .hashindex import ChunkIndex, ChunkIndexEntry, CacheSynchronizer
|
from .hashindex import ChunkIndex, ChunkIndexEntry, CacheSynchronizer
|
||||||
from .helpers import Location
|
from .helpers import Location
|
||||||
from .helpers import Error
|
from .helpers import Error
|
||||||
|
from .helpers import Manifest
|
||||||
from .helpers import get_cache_dir, get_security_dir
|
from .helpers import get_cache_dir, get_security_dir
|
||||||
from .helpers import int_to_bigint, bigint_to_int, bin_to_hex
|
from .helpers import int_to_bigint, bigint_to_int, bin_to_hex, parse_stringified_list
|
||||||
from .helpers import format_file_size
|
from .helpers import format_file_size
|
||||||
from .helpers import safe_ns
|
from .helpers import safe_ns
|
||||||
from .helpers import yes, hostname_is_unique
|
from .helpers import yes, hostname_is_unique
|
||||||
|
@ -257,6 +258,8 @@ def load(self):
|
||||||
self.manifest_id = unhexlify(self._config.get('cache', 'manifest'))
|
self.manifest_id = unhexlify(self._config.get('cache', 'manifest'))
|
||||||
self.timestamp = self._config.get('cache', 'timestamp', fallback=None)
|
self.timestamp = self._config.get('cache', 'timestamp', fallback=None)
|
||||||
self.key_type = self._config.get('cache', 'key_type', fallback=None)
|
self.key_type = self._config.get('cache', 'key_type', fallback=None)
|
||||||
|
self.ignored_features = set(parse_stringified_list(self._config.get('cache', 'ignored_features', fallback='')))
|
||||||
|
self.mandatory_features = set(parse_stringified_list(self._config.get('cache', 'mandatory_features', fallback='')))
|
||||||
try:
|
try:
|
||||||
self.integrity = dict(self._config.items('integrity'))
|
self.integrity = dict(self._config.items('integrity'))
|
||||||
if self._config.get('cache', 'manifest') != self.integrity.pop('manifest'):
|
if self._config.get('cache', 'manifest') != self.integrity.pop('manifest'):
|
||||||
|
@ -281,6 +284,8 @@ def save(self, manifest=None, key=None):
|
||||||
if manifest:
|
if manifest:
|
||||||
self._config.set('cache', 'manifest', manifest.id_str)
|
self._config.set('cache', 'manifest', manifest.id_str)
|
||||||
self._config.set('cache', 'timestamp', manifest.timestamp)
|
self._config.set('cache', 'timestamp', manifest.timestamp)
|
||||||
|
self._config.set('cache', 'ignored_features', ','.join(self.ignored_features))
|
||||||
|
self._config.set('cache', 'mandatory_features', ','.join(self.mandatory_features))
|
||||||
if not self._config.has_section('integrity'):
|
if not self._config.has_section('integrity'):
|
||||||
self._config.add_section('integrity')
|
self._config.add_section('integrity')
|
||||||
for file, integrity_data in self.integrity.items():
|
for file, integrity_data in self.integrity.items():
|
||||||
|
@ -370,6 +375,12 @@ def __init__(self, repository, key, manifest, path=None, sync=True, do_files=Fal
|
||||||
self.open()
|
self.open()
|
||||||
try:
|
try:
|
||||||
self.security_manager.assert_secure(manifest, key, cache_config=self.cache_config)
|
self.security_manager.assert_secure(manifest, key, cache_config=self.cache_config)
|
||||||
|
|
||||||
|
if not self.check_cache_compatibility():
|
||||||
|
self.wipe_cache()
|
||||||
|
|
||||||
|
self.update_compatibility()
|
||||||
|
|
||||||
if sync and self.manifest.id != self.cache_config.manifest_id:
|
if sync and self.manifest.id != self.cache_config.manifest_id:
|
||||||
self.sync()
|
self.sync()
|
||||||
self.commit()
|
self.commit()
|
||||||
|
@ -718,6 +729,43 @@ def legacy_cleanup():
|
||||||
self.do_cache = os.path.isdir(archive_path)
|
self.do_cache = os.path.isdir(archive_path)
|
||||||
self.chunks = create_master_idx(self.chunks)
|
self.chunks = create_master_idx(self.chunks)
|
||||||
|
|
||||||
|
def check_cache_compatibility(self):
|
||||||
|
my_features = Manifest.SUPPORTED_REPO_FEATURES
|
||||||
|
if self.cache_config.ignored_features & my_features:
|
||||||
|
# The cache might not contain references of chunks that need a feature that is mandatory for some operation
|
||||||
|
# and which this version supports. To avoid corruption while executing that operation force rebuild.
|
||||||
|
return False
|
||||||
|
if not self.cache_config.mandatory_features <= my_features:
|
||||||
|
# The cache was build with consideration to at least one feature that this version does not understand.
|
||||||
|
# This client might misinterpret the cache. Thus force a rebuild.
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def wipe_cache(self):
|
||||||
|
logger.warning("Discarding incompatible cache and forcing a cache rebuild")
|
||||||
|
archive_path = os.path.join(self.path, 'chunks.archive.d')
|
||||||
|
if os.path.isdir(archive_path):
|
||||||
|
shutil.rmtree(os.path.join(self.path, 'chunks.archive.d'))
|
||||||
|
os.makedirs(os.path.join(self.path, 'chunks.archive.d'))
|
||||||
|
self.chunks = ChunkIndex()
|
||||||
|
with open(os.path.join(self.path, 'files'), 'wb'):
|
||||||
|
pass # empty file
|
||||||
|
self.cache_config.manifest_id = ''
|
||||||
|
self.cache_config._config.set('cache', 'manifest', '')
|
||||||
|
|
||||||
|
self.cache_config.ignored_features = set()
|
||||||
|
self.cache_config.mandatory_features = set()
|
||||||
|
|
||||||
|
def update_compatibility(self):
|
||||||
|
operation_to_features_map = self.manifest.get_all_mandatory_features()
|
||||||
|
my_features = Manifest.SUPPORTED_REPO_FEATURES
|
||||||
|
repo_features = set()
|
||||||
|
for operation, features in operation_to_features_map.items():
|
||||||
|
repo_features.update(features)
|
||||||
|
|
||||||
|
self.cache_config.ignored_features.update(repo_features - my_features)
|
||||||
|
self.cache_config.mandatory_features.update(repo_features & my_features)
|
||||||
|
|
||||||
def add_chunk(self, id, chunk, stats, overwrite=False, wait=True):
|
def add_chunk(self, id, chunk, stats, overwrite=False, wait=True):
|
||||||
if not self.txn_active:
|
if not self.txn_active:
|
||||||
self.begin_txn()
|
self.begin_txn()
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import argparse
|
import argparse
|
||||||
import contextlib
|
import contextlib
|
||||||
import collections
|
import collections
|
||||||
|
import enum
|
||||||
import grp
|
import grp
|
||||||
import hashlib
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
|
@ -123,6 +124,10 @@ def check_python():
|
||||||
raise PythonLibcTooOld
|
raise PythonLibcTooOld
|
||||||
|
|
||||||
|
|
||||||
|
class MandatoryFeatureUnsupported(Error):
|
||||||
|
"""Unsupported repository feature(s) {}. A newer version of borg is required to access this repository."""
|
||||||
|
|
||||||
|
|
||||||
def check_extension_modules():
|
def check_extension_modules():
|
||||||
from . import platform, compress, item
|
from . import platform, compress, item
|
||||||
if hashindex.API_VERSION != '1.1_03':
|
if hashindex.API_VERSION != '1.1_03':
|
||||||
|
@ -222,6 +227,34 @@ def get_raw_dict(self):
|
||||||
|
|
||||||
class Manifest:
|
class Manifest:
|
||||||
|
|
||||||
|
@enum.unique
|
||||||
|
class Operation(enum.Enum):
|
||||||
|
# The comments here only roughly describe the scope of each feature. In the end, additions need to be
|
||||||
|
# based on potential problems older clients could produce when accessing newer repositories and the
|
||||||
|
# tradeofs of locking version out or still allowing access. As all older versions and their exact
|
||||||
|
# behaviours are known when introducing new features sometimes this might not match the general descriptions
|
||||||
|
# below.
|
||||||
|
|
||||||
|
# The READ operation describes which features are needed to safely list and extract the archives in the
|
||||||
|
# repository.
|
||||||
|
READ = 'read'
|
||||||
|
# The CHECK operation is for all operations that need either to understand every detail
|
||||||
|
# of the repository (for consistency checks and repairs) or are seldom used functions that just
|
||||||
|
# should use the most restrictive feature set because more fine grained compatibility tracking is
|
||||||
|
# not needed.
|
||||||
|
CHECK = 'check'
|
||||||
|
# The WRITE operation is for adding archives. Features here ensure that older clients don't add archives
|
||||||
|
# in an old format, or is used to lock out clients that for other reasons can no longer safely add new
|
||||||
|
# archives.
|
||||||
|
WRITE = 'write'
|
||||||
|
# The DELETE operation is for all operations (like archive deletion) that need a 100% correct reference
|
||||||
|
# count and the need to be able to find all (directly and indirectly) referenced chunks of a given archive.
|
||||||
|
DELETE = 'delete'
|
||||||
|
|
||||||
|
NO_OPERATION_CHECK = tuple()
|
||||||
|
|
||||||
|
SUPPORTED_REPO_FEATURES = frozenset([])
|
||||||
|
|
||||||
MANIFEST_ID = b'\0' * 32
|
MANIFEST_ID = b'\0' * 32
|
||||||
|
|
||||||
def __init__(self, key, repository, item_keys=None):
|
def __init__(self, key, repository, item_keys=None):
|
||||||
|
@ -242,7 +275,7 @@ def last_timestamp(self):
|
||||||
return datetime.strptime(self.timestamp, "%Y-%m-%dT%H:%M:%S.%f")
|
return datetime.strptime(self.timestamp, "%Y-%m-%dT%H:%M:%S.%f")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls, repository, key=None, force_tam_not_required=False):
|
def load(cls, repository, operations, key=None, force_tam_not_required=False):
|
||||||
from .item import ManifestItem
|
from .item import ManifestItem
|
||||||
from .crypto.key import key_factory, tam_required_file, tam_required
|
from .crypto.key import key_factory, tam_required_file, tam_required
|
||||||
from .repository import Repository
|
from .repository import Repository
|
||||||
|
@ -257,7 +290,7 @@ def load(cls, repository, key=None, force_tam_not_required=False):
|
||||||
manifest_dict, manifest.tam_verified = key.unpack_and_verify_manifest(data, force_tam_not_required=force_tam_not_required)
|
manifest_dict, manifest.tam_verified = key.unpack_and_verify_manifest(data, force_tam_not_required=force_tam_not_required)
|
||||||
m = ManifestItem(internal_dict=manifest_dict)
|
m = ManifestItem(internal_dict=manifest_dict)
|
||||||
manifest.id = key.id_hash(data)
|
manifest.id = key.id_hash(data)
|
||||||
if m.get('version') != 1:
|
if m.get('version') not in (1, 2):
|
||||||
raise ValueError('Invalid manifest version')
|
raise ValueError('Invalid manifest version')
|
||||||
manifest.archives.set_raw_dict(m.archives)
|
manifest.archives.set_raw_dict(m.archives)
|
||||||
manifest.timestamp = m.get('timestamp')
|
manifest.timestamp = m.get('timestamp')
|
||||||
|
@ -275,8 +308,34 @@ def load(cls, repository, key=None, force_tam_not_required=False):
|
||||||
if not manifest_required and security_required:
|
if not manifest_required and security_required:
|
||||||
logger.debug('Manifest is TAM verified and says TAM is *not* required, updating security database...')
|
logger.debug('Manifest is TAM verified and says TAM is *not* required, updating security database...')
|
||||||
os.unlink(tam_required_file(repository))
|
os.unlink(tam_required_file(repository))
|
||||||
|
manifest.check_repository_compatibility(operations)
|
||||||
return manifest, key
|
return manifest, key
|
||||||
|
|
||||||
|
def check_repository_compatibility(self, operations):
|
||||||
|
for operation in operations:
|
||||||
|
assert isinstance(operation, self.Operation)
|
||||||
|
feature_flags = self.config.get(b'feature_flags', None)
|
||||||
|
if feature_flags is None:
|
||||||
|
return
|
||||||
|
if operation.value.encode() not in feature_flags:
|
||||||
|
continue
|
||||||
|
requirements = feature_flags[operation.value.encode()]
|
||||||
|
if b'mandatory' in requirements:
|
||||||
|
unsupported = set(requirements[b'mandatory']) - self.SUPPORTED_REPO_FEATURES
|
||||||
|
if unsupported:
|
||||||
|
raise MandatoryFeatureUnsupported([f.decode() for f in unsupported])
|
||||||
|
|
||||||
|
def get_all_mandatory_features(self):
|
||||||
|
result = {}
|
||||||
|
feature_flags = self.config.get(b'feature_flags', None)
|
||||||
|
if feature_flags is None:
|
||||||
|
return result
|
||||||
|
|
||||||
|
for operation, requirements in feature_flags.items():
|
||||||
|
if b'mandatory' in requirements:
|
||||||
|
result[operation.decode()] = set([feature.decode() for feature in requirements[b'mandatory']])
|
||||||
|
return result
|
||||||
|
|
||||||
def write(self):
|
def write(self):
|
||||||
from .item import ManifestItem
|
from .item import ManifestItem
|
||||||
if self.key.tam_required:
|
if self.key.tam_required:
|
||||||
|
@ -775,6 +834,11 @@ def bin_to_hex(binary):
|
||||||
return hexlify(binary).decode('ascii')
|
return hexlify(binary).decode('ascii')
|
||||||
|
|
||||||
|
|
||||||
|
def parse_stringified_list(s):
|
||||||
|
l = re.split(" *, *", s)
|
||||||
|
return [item for item in l if item != '']
|
||||||
|
|
||||||
|
|
||||||
class Location:
|
class Location:
|
||||||
"""Object representing a repository / archive location
|
"""Object representing a repository / archive location
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -41,7 +41,7 @@
|
||||||
from ..crypto.keymanager import RepoIdMismatch, NotABorgKeyFile
|
from ..crypto.keymanager import RepoIdMismatch, NotABorgKeyFile
|
||||||
from ..crypto.file_integrity import FileIntegrityError
|
from ..crypto.file_integrity import FileIntegrityError
|
||||||
from ..helpers import Location, get_security_dir
|
from ..helpers import Location, get_security_dir
|
||||||
from ..helpers import Manifest
|
from ..helpers import Manifest, MandatoryFeatureUnsupported
|
||||||
from ..helpers import EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR
|
from ..helpers import EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR
|
||||||
from ..helpers import bin_to_hex
|
from ..helpers import bin_to_hex
|
||||||
from ..helpers import MAX_S
|
from ..helpers import MAX_S
|
||||||
|
@ -289,7 +289,7 @@ def create_src_archive(self, name):
|
||||||
def open_archive(self, name):
|
def open_archive(self, name):
|
||||||
repository = Repository(self.repository_path, exclusive=True)
|
repository = Repository(self.repository_path, exclusive=True)
|
||||||
with repository:
|
with repository:
|
||||||
manifest, key = Manifest.load(repository)
|
manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
|
||||||
archive = Archive(repository, key, manifest, name)
|
archive = Archive(repository, key, manifest, name)
|
||||||
return archive, repository
|
return archive, repository
|
||||||
|
|
||||||
|
@ -1248,7 +1248,7 @@ def test_rename(self):
|
||||||
self.cmd('extract', '--dry-run', self.repository_location + '::test.4')
|
self.cmd('extract', '--dry-run', self.repository_location + '::test.4')
|
||||||
# Make sure both archives have been renamed
|
# Make sure both archives have been renamed
|
||||||
with Repository(self.repository_path) as repository:
|
with Repository(self.repository_path) as repository:
|
||||||
manifest, key = Manifest.load(repository)
|
manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
|
||||||
self.assert_equal(len(manifest.archives), 2)
|
self.assert_equal(len(manifest.archives), 2)
|
||||||
self.assert_in('test.3', manifest.archives)
|
self.assert_in('test.3', manifest.archives)
|
||||||
self.assert_in('test.4', manifest.archives)
|
self.assert_in('test.4', manifest.archives)
|
||||||
|
@ -1349,7 +1349,7 @@ def test_delete_force(self):
|
||||||
self.cmd('init', '--encryption=none', self.repository_location)
|
self.cmd('init', '--encryption=none', self.repository_location)
|
||||||
self.create_src_archive('test')
|
self.create_src_archive('test')
|
||||||
with Repository(self.repository_path, exclusive=True) as repository:
|
with Repository(self.repository_path, exclusive=True) as repository:
|
||||||
manifest, key = Manifest.load(repository)
|
manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
|
||||||
archive = Archive(repository, key, manifest, 'test')
|
archive = Archive(repository, key, manifest, 'test')
|
||||||
for item in archive.iter_items():
|
for item in archive.iter_items():
|
||||||
if 'chunks' in item:
|
if 'chunks' in item:
|
||||||
|
@ -1367,7 +1367,7 @@ def test_delete_double_force(self):
|
||||||
self.cmd('init', '--encryption=none', self.repository_location)
|
self.cmd('init', '--encryption=none', self.repository_location)
|
||||||
self.create_src_archive('test')
|
self.create_src_archive('test')
|
||||||
with Repository(self.repository_path, exclusive=True) as repository:
|
with Repository(self.repository_path, exclusive=True) as repository:
|
||||||
manifest, key = Manifest.load(repository)
|
manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
|
||||||
archive = Archive(repository, key, manifest, 'test')
|
archive = Archive(repository, key, manifest, 'test')
|
||||||
id = archive.metadata.items[0]
|
id = archive.metadata.items[0]
|
||||||
repository.put(id, b'corrupted items metadata stream chunk')
|
repository.put(id, b'corrupted items metadata stream chunk')
|
||||||
|
@ -1417,9 +1417,111 @@ def test_create_dry_run(self):
|
||||||
self.cmd('create', '--dry-run', self.repository_location + '::test', 'input')
|
self.cmd('create', '--dry-run', self.repository_location + '::test', 'input')
|
||||||
# Make sure no archive has been created
|
# Make sure no archive has been created
|
||||||
with Repository(self.repository_path) as repository:
|
with Repository(self.repository_path) as repository:
|
||||||
manifest, key = Manifest.load(repository)
|
manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
|
||||||
self.assert_equal(len(manifest.archives), 0)
|
self.assert_equal(len(manifest.archives), 0)
|
||||||
|
|
||||||
|
def add_unknown_feature(self, operation):
|
||||||
|
with Repository(self.repository_path, exclusive=True) as repository:
|
||||||
|
manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
|
||||||
|
manifest.config[b'feature_flags'] = {operation.value.encode(): {b'mandatory': [b'unknown-feature']}}
|
||||||
|
manifest.write()
|
||||||
|
repository.commit()
|
||||||
|
|
||||||
|
def cmd_raises_unknown_feature(self, args):
|
||||||
|
if self.FORK_DEFAULT:
|
||||||
|
self.cmd(*args, exit_code=EXIT_ERROR)
|
||||||
|
else:
|
||||||
|
with pytest.raises(MandatoryFeatureUnsupported) as excinfo:
|
||||||
|
self.cmd(*args)
|
||||||
|
assert excinfo.value.args == (['unknown-feature'],)
|
||||||
|
|
||||||
|
def test_unknown_feature_on_create(self):
|
||||||
|
print(self.cmd('init', '--encryption=repokey', self.repository_location))
|
||||||
|
self.add_unknown_feature(Manifest.Operation.WRITE)
|
||||||
|
self.cmd_raises_unknown_feature(['create', self.repository_location + '::test', 'input'])
|
||||||
|
|
||||||
|
def test_unknown_feature_on_change_passphrase(self):
|
||||||
|
print(self.cmd('init', '--encryption=repokey', self.repository_location))
|
||||||
|
self.add_unknown_feature(Manifest.Operation.CHECK)
|
||||||
|
self.cmd_raises_unknown_feature(['change-passphrase', self.repository_location])
|
||||||
|
|
||||||
|
def test_unknown_feature_on_read(self):
|
||||||
|
print(self.cmd('init', '--encryption=repokey', self.repository_location))
|
||||||
|
self.cmd('create', self.repository_location + '::test', 'input')
|
||||||
|
self.add_unknown_feature(Manifest.Operation.READ)
|
||||||
|
with changedir('output'):
|
||||||
|
self.cmd_raises_unknown_feature(['extract', self.repository_location + '::test'])
|
||||||
|
|
||||||
|
self.cmd_raises_unknown_feature(['list', self.repository_location])
|
||||||
|
self.cmd_raises_unknown_feature(['info', self.repository_location + '::test'])
|
||||||
|
|
||||||
|
def test_unknown_feature_on_rename(self):
|
||||||
|
print(self.cmd('init', '--encryption=repokey', self.repository_location))
|
||||||
|
self.cmd('create', self.repository_location + '::test', 'input')
|
||||||
|
self.add_unknown_feature(Manifest.Operation.CHECK)
|
||||||
|
self.cmd_raises_unknown_feature(['rename', self.repository_location + '::test', 'other'])
|
||||||
|
|
||||||
|
def test_unknown_feature_on_delete(self):
|
||||||
|
print(self.cmd('init', '--encryption=repokey', self.repository_location))
|
||||||
|
self.cmd('create', self.repository_location + '::test', 'input')
|
||||||
|
self.add_unknown_feature(Manifest.Operation.DELETE)
|
||||||
|
# delete of an archive raises
|
||||||
|
self.cmd_raises_unknown_feature(['delete', self.repository_location + '::test'])
|
||||||
|
self.cmd_raises_unknown_feature(['prune', '--keep-daily=3', self.repository_location])
|
||||||
|
# delete of the whole repository ignores features
|
||||||
|
self.cmd('delete', self.repository_location)
|
||||||
|
|
||||||
|
@unittest.skipUnless(has_llfuse, 'llfuse not installed')
|
||||||
|
def test_unknown_feature_on_mount(self):
|
||||||
|
self.cmd('init', '--encryption=repokey', self.repository_location)
|
||||||
|
self.cmd('create', self.repository_location + '::test', 'input')
|
||||||
|
self.add_unknown_feature(Manifest.Operation.READ)
|
||||||
|
mountpoint = os.path.join(self.tmpdir, 'mountpoint')
|
||||||
|
os.mkdir(mountpoint)
|
||||||
|
# XXX this might hang if it doesn't raise an error
|
||||||
|
self.cmd_raises_unknown_feature(['mount', self.repository_location + '::test', mountpoint])
|
||||||
|
|
||||||
|
@pytest.mark.allow_cache_wipe
|
||||||
|
def test_unknown_mandatory_feature_in_cache(self):
|
||||||
|
if self.prefix:
|
||||||
|
path_prefix = 'ssh://__testsuite__'
|
||||||
|
else:
|
||||||
|
path_prefix = ''
|
||||||
|
|
||||||
|
print(self.cmd('init', '--encryption=repokey', self.repository_location))
|
||||||
|
|
||||||
|
with Repository(self.repository_path, exclusive=True) as repository:
|
||||||
|
if path_prefix:
|
||||||
|
repository._location = Location(self.repository_location)
|
||||||
|
manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
|
||||||
|
with Cache(repository, key, manifest) as cache:
|
||||||
|
cache.begin_txn()
|
||||||
|
cache.cache_config.mandatory_features = set(['unknown-feature'])
|
||||||
|
cache.commit()
|
||||||
|
|
||||||
|
if self.FORK_DEFAULT:
|
||||||
|
self.cmd('create', self.repository_location + '::test', 'input')
|
||||||
|
else:
|
||||||
|
called = False
|
||||||
|
wipe_cache_safe = Cache.wipe_cache
|
||||||
|
|
||||||
|
def wipe_wrapper(*args):
|
||||||
|
nonlocal called
|
||||||
|
called = True
|
||||||
|
wipe_cache_safe(*args)
|
||||||
|
|
||||||
|
with patch.object(Cache, 'wipe_cache', wipe_wrapper):
|
||||||
|
self.cmd('create', self.repository_location + '::test', 'input')
|
||||||
|
|
||||||
|
assert called
|
||||||
|
|
||||||
|
with Repository(self.repository_path, exclusive=True) as repository:
|
||||||
|
if path_prefix:
|
||||||
|
repository._location = Location(self.repository_location)
|
||||||
|
manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
|
||||||
|
with Cache(repository, key, manifest) as cache:
|
||||||
|
assert cache.cache_config.mandatory_features == set([])
|
||||||
|
|
||||||
def test_progress_on(self):
|
def test_progress_on(self):
|
||||||
self.create_regular_file('file1', size=1024 * 80)
|
self.create_regular_file('file1', size=1024 * 80)
|
||||||
self.cmd('init', '--encryption=repokey', self.repository_location)
|
self.cmd('init', '--encryption=repokey', self.repository_location)
|
||||||
|
@ -2091,7 +2193,7 @@ def check_cache(self):
|
||||||
self.cmd('check', self.repository_location)
|
self.cmd('check', self.repository_location)
|
||||||
# Then check that the cache on disk matches exactly what's in the repo.
|
# Then check that the cache on disk matches exactly what's in the repo.
|
||||||
with self.open_repository() as repository:
|
with self.open_repository() as repository:
|
||||||
manifest, key = Manifest.load(repository)
|
manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
|
||||||
with Cache(repository, key, manifest, sync=False) as cache:
|
with Cache(repository, key, manifest, sync=False) as cache:
|
||||||
original_chunks = cache.chunks
|
original_chunks = cache.chunks
|
||||||
cache.destroy(repository)
|
cache.destroy(repository)
|
||||||
|
@ -2112,7 +2214,7 @@ def test_check_cache(self):
|
||||||
self.cmd('init', '--encryption=repokey', self.repository_location)
|
self.cmd('init', '--encryption=repokey', self.repository_location)
|
||||||
self.cmd('create', self.repository_location + '::test', 'input')
|
self.cmd('create', self.repository_location + '::test', 'input')
|
||||||
with self.open_repository() as repository:
|
with self.open_repository() as repository:
|
||||||
manifest, key = Manifest.load(repository)
|
manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
|
||||||
with Cache(repository, key, manifest, sync=False) as cache:
|
with Cache(repository, key, manifest, sync=False) as cache:
|
||||||
cache.begin_txn()
|
cache.begin_txn()
|
||||||
cache.chunks.incref(list(cache.chunks.iteritems())[0][0])
|
cache.chunks.incref(list(cache.chunks.iteritems())[0][0])
|
||||||
|
@ -2750,7 +2852,7 @@ def as_dict():
|
||||||
|
|
||||||
archive, repository = self.open_archive('archive1')
|
archive, repository = self.open_archive('archive1')
|
||||||
with repository:
|
with repository:
|
||||||
manifest, key = Manifest.load(repository)
|
manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
|
||||||
with Cache(repository, key, manifest) as cache:
|
with Cache(repository, key, manifest) as cache:
|
||||||
archive = Archive(repository, key, manifest, '0.13', cache=cache, create=True)
|
archive = Archive(repository, key, manifest, '0.13', cache=cache, create=True)
|
||||||
archive.items_buffer.add(Attic013Item)
|
archive.items_buffer.add(Attic013Item)
|
||||||
|
@ -2762,7 +2864,7 @@ def as_dict():
|
||||||
class ManifestAuthenticationTest(ArchiverTestCaseBase):
|
class ManifestAuthenticationTest(ArchiverTestCaseBase):
|
||||||
def spoof_manifest(self, repository):
|
def spoof_manifest(self, repository):
|
||||||
with repository:
|
with repository:
|
||||||
_, key = Manifest.load(repository)
|
_, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
|
||||||
repository.put(Manifest.MANIFEST_ID, key.encrypt(msgpack.packb({
|
repository.put(Manifest.MANIFEST_ID, key.encrypt(msgpack.packb({
|
||||||
'version': 1,
|
'version': 1,
|
||||||
'archives': {},
|
'archives': {},
|
||||||
|
@ -2775,7 +2877,7 @@ def test_fresh_init_tam_required(self):
|
||||||
self.cmd('init', '--encryption=repokey', self.repository_location)
|
self.cmd('init', '--encryption=repokey', self.repository_location)
|
||||||
repository = Repository(self.repository_path, exclusive=True)
|
repository = Repository(self.repository_path, exclusive=True)
|
||||||
with repository:
|
with repository:
|
||||||
manifest, key = Manifest.load(repository)
|
manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
|
||||||
repository.put(Manifest.MANIFEST_ID, key.encrypt(msgpack.packb({
|
repository.put(Manifest.MANIFEST_ID, key.encrypt(msgpack.packb({
|
||||||
'version': 1,
|
'version': 1,
|
||||||
'archives': {},
|
'archives': {},
|
||||||
|
@ -2792,7 +2894,7 @@ def test_not_required(self):
|
||||||
repository = Repository(self.repository_path, exclusive=True)
|
repository = Repository(self.repository_path, exclusive=True)
|
||||||
with repository:
|
with repository:
|
||||||
shutil.rmtree(get_security_dir(bin_to_hex(repository.id)))
|
shutil.rmtree(get_security_dir(bin_to_hex(repository.id)))
|
||||||
_, key = Manifest.load(repository)
|
_, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
|
||||||
key.tam_required = False
|
key.tam_required = False
|
||||||
key.change_passphrase(key._passphrase)
|
key.change_passphrase(key._passphrase)
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue