Add cache.sync progress display

This commit is contained in:
Abogical 2016-11-26 22:15:59 +02:00
parent be6341b956
commit f3ce6be30b
2 changed files with 16 additions and 5 deletions

View File

@ -95,7 +95,8 @@ def with_repository(fake=False, create=False, lock=True, exclusive=False, manife
kwargs['manifest'], kwargs['key'] = Manifest.load(repository)
if cache:
with Cache(repository, kwargs['key'], kwargs['manifest'],
do_files=getattr(args, 'cache_files', False), lock_wait=self.lock_wait) as cache_:
do_files=getattr(args, 'cache_files', False),
progress=getattr(args, 'progress', False), lock_wait=self.lock_wait) as cache_:
return method(self, args, repository=repository, cache=cache_, **kwargs)
else:
return method(self, args, repository=repository, **kwargs)
@ -341,7 +342,8 @@ class Archiver:
dry_run = args.dry_run
t0 = datetime.utcnow()
if not dry_run:
with Cache(repository, key, manifest, do_files=args.cache_files, lock_wait=self.lock_wait) as cache:
with Cache(repository, key, manifest, do_files=args.cache_files, progress=args.progress,
lock_wait=self.lock_wait) as cache:
archive = Archive(repository, key, manifest, args.location.archive, cache=cache,
create=True, checkpoint_interval=args.checkpoint_interval,
numeric_owner=args.numeric_owner, progress=args.progress,
@ -794,7 +796,7 @@ class Archiver:
if args.stats:
log_multi(DASHES, STATS_HEADER, logger=stats_logger)
with Cache(repository, key, manifest, lock_wait=self.lock_wait) as cache:
with Cache(repository, key, manifest, progress=args.progress, lock_wait=self.lock_wait) as cache:
for i, archive_name in enumerate(archive_names, 1):
logger.info('Deleting {} ({}/{}):'.format(archive_name, i, len(archive_names)))
archive = Archive(repository, key, manifest, archive_name, cache=cache)

View File

@ -18,7 +18,8 @@ from .helpers import get_cache_dir
from .helpers import decode_dict, int_to_bigint, bigint_to_int, bin_to_hex
from .helpers import format_file_size
from .helpers import yes
from .helpers import ProgressIndicatorMessage
from .helpers import remove_surrogates
from .helpers import ProgressIndicatorPercent, ProgressIndicatorMessage
from .item import Item, ArchiveItem
from .key import PlaintextKey
from .locking import Lock
@ -62,7 +63,7 @@ class Cache:
shutil.rmtree(path)
def __init__(self, repository, key, manifest, path=None, sync=True, do_files=False, warn_if_unencrypted=True,
lock_wait=None):
progress=False, lock_wait=None):
"""
:param do_files: use file metadata cache
:param warn_if_unencrypted: print warning if accessing unknown unencrypted repository
@ -76,6 +77,7 @@ class Cache:
self.repository = repository
self.key = key
self.manifest = manifest
self.progress = progress
self.path = path or os.path.join(get_cache_dir(), repository.id_str)
self.hostname_is_unique = yes(env_var_override='BORG_HOSTNAME_IS_UNIQUE', prompt=False, env_msg=None)
if self.hostname_is_unique:
@ -379,8 +381,13 @@ Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
cleanup_outdated(cached_ids - archive_ids)
if archive_ids:
chunk_idx = None
if self.progress:
pi = ProgressIndicatorPercent(total=len(archive_ids), step=0.1,
msg='%3.0f%% Syncing chunks cache. Processing archive %s')
for archive_id in archive_ids:
archive_name = lookup_name(archive_id)
if self.progress:
pi.show(info=[remove_surrogates(archive_name)])
if archive_id in cached_ids:
archive_chunk_idx_path = mkpath(archive_id)
logger.info("Reading cached archive chunk index for %s ..." % archive_name)
@ -396,6 +403,8 @@ Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
chunk_idx = archive_chunk_idx
else:
chunk_idx.merge(archive_chunk_idx)
if self.progress:
pi.finish()
logger.info('Done.')
return chunk_idx