mirror of
https://github.com/borgbackup/borg.git
synced 2025-02-23 14:41:43 +00:00
TestAdHocCache
This commit is contained in:
parent
3c8257432a
commit
5eeca3493b
3 changed files with 94 additions and 5 deletions
|
@ -2827,7 +2827,7 @@ def define_common_options(add_common_option):
|
|||
subparser.add_argument('--json', action='store_true',
|
||||
help='output stats as JSON (implies --stats)')
|
||||
subparser.add_argument('--no-cache-sync', dest='no_cache_sync', action='store_true',
|
||||
help='experimental: do not synchronize the cache')
|
||||
help='experimental: do not synchronize the cache. Implies --no-files-cache.')
|
||||
|
||||
exclude_group = subparser.add_argument_group('Exclusion options')
|
||||
exclude_group.add_argument('-e', '--exclude', dest='patterns',
|
||||
|
|
|
@ -587,7 +587,7 @@ def sync(self):
|
|||
archive indexes.
|
||||
"""
|
||||
archive_path = os.path.join(self.path, 'chunks.archive.d')
|
||||
# An index of chunks were the size had to be fetched
|
||||
# An index of chunks whose size had to be fetched
|
||||
chunks_fetched_size_index = ChunkIndex()
|
||||
# Instrumentation
|
||||
processed_item_metadata_bytes = 0
|
||||
|
@ -965,7 +965,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
|||
do_files = False
|
||||
|
||||
def file_known_and_unchanged(self, path_hash, st, ignore_inode=False):
|
||||
pass
|
||||
return None
|
||||
|
||||
def memorize_file(self, path_hash, st, ids):
|
||||
pass
|
||||
|
@ -986,7 +986,15 @@ def add_chunk(self, id, chunk, stats, overwrite=False, wait=True):
|
|||
return ChunkListEntry(id, size, csize)
|
||||
|
||||
def seen_chunk(self, id, size=None):
|
||||
return self.chunks.get(id, ChunkIndexEntry(0, None, None)).refcount
|
||||
if not self._txn_active:
|
||||
self._begin_txn()
|
||||
entry = self.chunks.get(id, ChunkIndexEntry(0, None, None))
|
||||
if entry.refcount and size and not entry.size:
|
||||
# The LocalCache has existing size information and uses *size* to make an effort at detecting collisions.
|
||||
# This is of course not possible for the AdHocCache.
|
||||
# Here *size* is used to update the chunk's size information, which will be zero for existing chunks.
|
||||
self.chunks[id] = entry._replace(size=size)
|
||||
return entry.refcount
|
||||
|
||||
def chunk_incref(self, id, stats, size_=None):
|
||||
if not self._txn_active:
|
||||
|
|
|
@ -1,11 +1,19 @@
|
|||
import io
|
||||
import os.path
|
||||
|
||||
from msgpack import packb
|
||||
|
||||
import pytest
|
||||
|
||||
from ..hashindex import ChunkIndex, CacheSynchronizer
|
||||
from .hashindex import H
|
||||
from .key import TestKey
|
||||
from ..archive import Statistics
|
||||
from ..cache import AdHocCache
|
||||
from ..compress import CompressionSpec
|
||||
from ..crypto.key import RepoKey
|
||||
from ..hashindex import ChunkIndex, CacheSynchronizer
|
||||
from ..helpers import Manifest
|
||||
from ..repository import Repository
|
||||
|
||||
|
||||
class TestCacheSynchronizer:
|
||||
|
@ -196,3 +204,76 @@ def test_refcount_one_below_max_value(self):
|
|||
assert index[H(0)] == (ChunkIndex.MAX_VALUE, 1234, 5678)
|
||||
sync.feed(data)
|
||||
assert index[H(0)] == (ChunkIndex.MAX_VALUE, 1234, 5678)
|
||||
|
||||
|
||||
class TestAdHocCache:
|
||||
@pytest.yield_fixture
|
||||
def repository(self, tmpdir):
|
||||
self.repository_location = os.path.join(str(tmpdir), 'repository')
|
||||
with Repository(self.repository_location, exclusive=True, create=True) as repository:
|
||||
repository.put(H(1), b'1234')
|
||||
repository.put(Manifest.MANIFEST_ID, b'5678')
|
||||
yield repository
|
||||
|
||||
@pytest.fixture
|
||||
def key(self, repository, monkeypatch):
|
||||
monkeypatch.setenv('BORG_PASSPHRASE', 'test')
|
||||
key = RepoKey.create(repository, TestKey.MockArgs())
|
||||
key.compressor = CompressionSpec('none').compressor
|
||||
return key
|
||||
|
||||
@pytest.fixture
|
||||
def manifest(self, repository, key):
|
||||
Manifest(key, repository).write()
|
||||
return Manifest.load(repository, key=key, operations=Manifest.NO_OPERATION_CHECK)[0]
|
||||
|
||||
@pytest.fixture
|
||||
def cache(self, repository, key, manifest):
|
||||
return AdHocCache(repository, key, manifest)
|
||||
|
||||
def test_does_not_contain_manifest(self, cache):
|
||||
assert not cache.seen_chunk(Manifest.MANIFEST_ID)
|
||||
|
||||
def test_does_not_delete_existing_chunks(self, repository, cache):
|
||||
assert cache.seen_chunk(H(1)) == ChunkIndex.MAX_VALUE
|
||||
cache.chunk_decref(H(1), Statistics())
|
||||
assert repository.get(H(1)) == b'1234'
|
||||
|
||||
def test_does_not_overwrite(self, cache):
|
||||
with pytest.raises(AssertionError):
|
||||
cache.add_chunk(H(1), b'5678', Statistics(), overwrite=True)
|
||||
|
||||
def test_seen_chunk_add_chunk_size(self, cache):
|
||||
assert cache.add_chunk(H(1), b'5678', Statistics()) == (H(1), 4, 0)
|
||||
|
||||
def test_deletes_chunks_during_lifetime(self, cache, repository):
|
||||
"""E.g. checkpoint archives"""
|
||||
cache.add_chunk(H(5), b'1010', Statistics())
|
||||
assert cache.seen_chunk(H(5)) == 1
|
||||
cache.chunk_decref(H(5), Statistics())
|
||||
assert not cache.seen_chunk(H(5))
|
||||
with pytest.raises(Repository.ObjectNotFound):
|
||||
repository.get(H(5))
|
||||
|
||||
def test_files_cache(self, cache):
|
||||
assert cache.file_known_and_unchanged(bytes(32), None) is None
|
||||
assert not cache.do_files
|
||||
assert cache.files is None
|
||||
|
||||
def test_txn(self, cache):
|
||||
assert not cache._txn_active
|
||||
cache.seen_chunk(H(5))
|
||||
assert cache._txn_active
|
||||
assert cache.chunks
|
||||
cache.rollback()
|
||||
assert not cache._txn_active
|
||||
assert not hasattr(cache, 'chunks')
|
||||
|
||||
def test_incref_after_add_chunk(self, cache):
|
||||
assert cache.add_chunk(H(3), b'5678', Statistics()) == (H(3), 4, 47)
|
||||
assert cache.chunk_incref(H(3), Statistics()) == (H(3), 4, 47)
|
||||
|
||||
def test_existing_incref_after_add_chunk(self, cache):
|
||||
"""This case occurs with part files, see Archive.chunk_file."""
|
||||
assert cache.add_chunk(H(1), b'5678', Statistics()) == (H(1), 4, 0)
|
||||
assert cache.chunk_incref(H(1), Statistics()) == (H(1), 4, 0)
|
||||
|
|
Loading…
Reference in a new issue