1
0
Fork 0
mirror of https://github.com/borgbackup/borg.git synced 2025-01-03 05:35:58 +00:00

when building the chunk index, merge all we have in the cache

This commit is contained in:
Thomas Waldmann 2024-11-08 20:24:33 +01:00
parent 4a6fcc26d7
commit 00f8cdc9a7
No known key found for this signature in database
GPG key ID: 243ACFA951F78E01

View file

@ -739,28 +739,45 @@ def write_chunkindex_to_repo_cache(repository, chunks, *, clear=False, force_wri
return new_hash return new_hash
def read_chunkindex_from_repo_cache(repository, hash):
cache_name = f"cache/chunks.{hash}"
logger.debug(f"trying to load {cache_name} from the repo...")
try:
chunks_data = repository.store_load(cache_name)
except (Repository.ObjectNotFound, StoreObjectNotFound):
# TODO: ^ seem like RemoteRepository raises Repository.ONF instead of StoreONF
logger.debug(f"{cache_name} not found in the repository.")
else:
if xxh64(chunks_data, seed=CHUNKINDEX_HASH_SEED) == hex_to_bin(hash):
logger.debug(f"{cache_name} is valid.")
with io.BytesIO(chunks_data) as f:
chunks = ChunkIndex.read(f)
return chunks
else:
logger.debug(f"{cache_name} is invalid.")
def build_chunkindex_from_repo(repository, *, disable_caches=False, cache_immediately=False): def build_chunkindex_from_repo(repository, *, disable_caches=False, cache_immediately=False):
try_upgrade_to_b14(repository) try_upgrade_to_b14(repository)
# first, try to load a pre-computed and centrally cached chunks index: # first, try to build a fresh, mostly complete chunk index from centrally cached chunk indexes:
if not disable_caches: if not disable_caches:
hashes = list_chunkindex_hashes(repository) hashes = list_chunkindex_hashes(repository)
assert len(hashes) <= 1, f"chunk indexes: {hashes}" # later we change to multiple chunkindexes... if hashes: # we have at least one cached chunk index!
for hash in hashes: merged = 0
cache_name = f"cache/chunks.{hash}" chunks = ChunkIndex() # we'll merge all we find into this
logger.debug(f"trying to load {cache_name} from the repo...") for hash in hashes:
try: chunks_to_merge = read_chunkindex_from_repo_cache(repository, hash)
chunks_data = repository.store_load(cache_name) if chunks_to_merge is not None:
except (Repository.ObjectNotFound, StoreObjectNotFound): logger.debug(f"cached chunk index {hash} gets merged...")
# TODO: ^ seem like RemoteRepository raises Repository.ONF instead of StoreONF for k, v in chunks_to_merge.items():
logger.debug(f"{cache_name} not found in the repository.") chunks[k] = v
else: merged += 1
if xxh64(chunks_data, seed=CHUNKINDEX_HASH_SEED) == hex_to_bin(hash): chunks_to_merge.clear()
logger.debug(f"{cache_name} is valid.") if merged > 0:
with io.BytesIO(chunks_data) as f: if merged > 1 and cache_immediately:
chunks = ChunkIndex.read(f) # immediately update cache/chunks, so we don't have to merge these again:
return chunks write_chunkindex_to_repo_cache(repository, chunks, clear=False, force_write=True, delete_other=True)
else: return chunks
logger.debug(f"{cache_name} is invalid.")
# if we didn't get anything from the cache, compute the ChunkIndex the slow way: # if we didn't get anything from the cache, compute the ChunkIndex the slow way:
logger.debug("querying the chunk IDs list from the repo...") logger.debug("querying the chunk IDs list from the repo...")
chunks = ChunkIndex() chunks = ChunkIndex()
@ -858,7 +875,7 @@ def add_chunk(
def _write_chunks_cache(self, chunks): def _write_chunks_cache(self, chunks):
# this is called from .close, so we can clear here: # this is called from .close, so we can clear here:
write_chunkindex_to_repo_cache(self.repository, self._chunks, clear=True, delete_other=True) write_chunkindex_to_repo_cache(self.repository, self._chunks, clear=True, delete_other=False)
self._chunks = None # nothing there (cleared!) self._chunks = None # nothing there (cleared!)
def refresh_lock(self, now): def refresh_lock(self, now):