1
0
Fork 0
mirror of https://github.com/borgbackup/borg.git synced 2024-12-27 02:08:54 +00:00

cache sync: cleanup progress handling, unused parameters

This commit is contained in:
Marian Beermann 2017-05-31 20:46:57 +02:00
parent 7f04e00ba2
commit 67b97f2223

View file

@ -564,7 +564,7 @@ def cleanup_cached_archive(id):
except FileNotFoundError:
pass
def fetch_and_build_idx(archive_id, decrypted_repository, key, chunk_idx):
def fetch_and_build_idx(archive_id, decrypted_repository, chunk_idx):
csize, data = decrypted_repository.get(archive_id)
chunk_idx.add(archive_id, 1, len(data), csize)
archive = ArchiveItem(internal_dict=msgpack.unpackb(data))
@ -595,6 +595,7 @@ def get_archive_ids_to_names(archive_ids):
for info in self.manifest.archives.list():
if info.id in archive_ids:
archive_names[info.id] = info.name
assert len(archive_names) == len(archive_ids)
return archive_names
def create_master_idx(chunk_idx):
@ -612,15 +613,12 @@ def create_master_idx(chunk_idx):
master_index_capacity = int(len(self.repository) / ChunkIndex.MAX_LOAD_FACTOR)
if archive_ids:
chunk_idx = None
if self.progress:
pi = ProgressIndicatorPercent(total=len(archive_ids), step=0.1,
msg='%3.0f%% Syncing chunks cache. Processing archive %s',
msgid='cache.sync')
pi = ProgressIndicatorPercent(total=len(archive_ids), step=0.1,
msg='%3.0f%% Syncing chunks cache. Processing archive %s',
msgid='cache.sync')
archive_ids_to_names = get_archive_ids_to_names(archive_ids)
for archive_id in archive_ids:
archive_name = archive_ids_to_names.pop(archive_id)
if self.progress:
pi.show(info=[remove_surrogates(archive_name)])
for archive_id, archive_name in archive_ids_to_names.items():
pi.show(info=[remove_surrogates(archive_name)])
if self.do_cache:
if archive_id in cached_ids:
archive_chunk_idx_path = mkpath(archive_id)
@ -639,7 +637,7 @@ def create_master_idx(chunk_idx):
# above can remove *archive_id* from *cached_ids*.
logger.info('Fetching and building archive index for %s ...', archive_name)
archive_chunk_idx = ChunkIndex()
fetch_and_build_idx(archive_id, decrypted_repository, self.key, archive_chunk_idx)
fetch_and_build_idx(archive_id, decrypted_repository, archive_chunk_idx)
logger.info("Merging into master chunks index ...")
if chunk_idx is None:
# we just use the first archive's idx as starting point,
@ -651,9 +649,8 @@ def create_master_idx(chunk_idx):
else:
chunk_idx = chunk_idx or ChunkIndex(master_index_capacity)
logger.info('Fetching archive index for %s ...', archive_name)
fetch_and_build_idx(archive_id, decrypted_repository, self.key, chunk_idx)
if self.progress:
pi.finish()
fetch_and_build_idx(archive_id, decrypted_repository, chunk_idx)
pi.finish()
logger.info('Done.')
return chunk_idx