cache: no archive caches => work directly on master cache (no merges)

This commit is contained in:
Marian Beermann 2016-12-03 12:06:22 +01:00
parent e169510116
commit be18418b74
1 changed files with 21 additions and 17 deletions

View File

@ -418,8 +418,7 @@ Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
for id in ids: for id in ids:
os.unlink(mkpath(id)) os.unlink(mkpath(id))
def fetch_and_build_idx(archive_id, repository, key): def fetch_and_build_idx(archive_id, repository, key, chunk_idx):
chunk_idx = ChunkIndex()
cdata = repository.get(archive_id) cdata = repository.get(archive_id)
_, data = key.decrypt(archive_id, cdata) _, data = key.decrypt(archive_id, cdata)
chunk_idx.add(archive_id, 1, len(data), len(cdata)) chunk_idx.add(archive_id, 1, len(data), len(cdata))
@ -446,7 +445,6 @@ Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
os.unlink(fn_tmp) os.unlink(fn_tmp)
else: else:
os.rename(fn_tmp, fn) os.rename(fn_tmp, fn)
return chunk_idx
def lookup_name(archive_id): def lookup_name(archive_id):
for info in self.manifest.archives.list(): for info in self.manifest.archives.list():
@ -472,13 +470,15 @@ Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
archive_name = lookup_name(archive_id) archive_name = lookup_name(archive_id)
if self.progress: if self.progress:
pi.show(info=[remove_surrogates(archive_name)]) pi.show(info=[remove_surrogates(archive_name)])
if self.do_cache:
if archive_id in cached_ids: if archive_id in cached_ids:
archive_chunk_idx_path = mkpath(archive_id) archive_chunk_idx_path = mkpath(archive_id)
logger.info("Reading cached archive chunk index for %s ..." % archive_name) logger.info("Reading cached archive chunk index for %s ..." % archive_name)
archive_chunk_idx = ChunkIndex.read(archive_chunk_idx_path) archive_chunk_idx = ChunkIndex.read(archive_chunk_idx_path)
else: else:
logger.info('Fetching and building archive index for %s ...' % archive_name) logger.info('Fetching and building archive index for %s ...' % archive_name)
archive_chunk_idx = fetch_and_build_idx(archive_id, repository, self.key) archive_chunk_idx = ChunkIndex()
fetch_and_build_idx(archive_id, repository, self.key, archive_chunk_idx)
logger.info("Merging into master chunks index ...") logger.info("Merging into master chunks index ...")
if chunk_idx is None: if chunk_idx is None:
# we just use the first archive's idx as starting point, # we just use the first archive's idx as starting point,
@ -487,6 +487,10 @@ Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
chunk_idx = archive_chunk_idx chunk_idx = archive_chunk_idx
else: else:
chunk_idx.merge(archive_chunk_idx) chunk_idx.merge(archive_chunk_idx)
else:
chunk_idx = chunk_idx or ChunkIndex()
logger.info('Fetching archive index for %s ...' % archive_name)
fetch_and_build_idx(archive_id, repository, self.key, chunk_idx)
if self.progress: if self.progress:
pi.finish() pi.finish()
logger.info('Done.') logger.info('Done.')