1
0
Fork 0
mirror of https://github.com/borgbackup/borg.git synced 2024-12-25 17:27:31 +00:00

Merge pull request #6785 from ThomasWaldmann/new-info

simplified stats
This commit is contained in:
TW 2022-06-23 23:01:52 +02:00 committed by GitHub
commit b14bf8110f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 36 additions and 53 deletions

View file

@ -84,10 +84,12 @@ def __add__(self, other):
stats.nfiles_parts = self.nfiles_parts + other.nfiles_parts
return stats
summary = "{label:15} {stats.osize_fmt:>20s} {stats.usize_fmt:>20s}"
def __str__(self):
return self.summary.format(stats=self, label='This archive:')
return """\
Number of files: {stats.nfiles}
Original size: {stats.osize_fmt}
Deduplicated size: {stats.usize_fmt}
""".format(stats=self)
def __repr__(self):
return "<{cls} object at {hash:#x} ({self.osize}, {self.usize})>".format(
@ -538,7 +540,6 @@ def __str__(self):
Time (start): {start}
Time (end): {end}
Duration: {0.duration}
Number of files: {0.stats.nfiles}
Utilization of max. archive size: {csize_max:.0%}
'''.format(
self,
@ -2141,12 +2142,8 @@ def save(self, archive, target, comment=None, replace_original=True):
if self.stats:
target.start = _start
target.end = datetime.utcnow()
log_multi(DASHES,
str(target),
DASHES,
str(target.stats),
str(self.cache),
DASHES)
log_multi(str(target),
str(target.stats))
def matcher_add_tagged_dirs(self, archive):
"""Add excludes to the matcher created by exclude_cache and exclude_if_present."""

View file

@ -993,13 +993,9 @@ def create_inner(archive, cache, fso):
'archive': archive,
}))
else:
log_multi(DASHES,
str(archive),
DASHES,
STATS_HEADER,
log_multi(str(archive),
str(archive.stats),
str(cache),
DASHES, logger=logging.getLogger('borg.output.stats'))
logger=logging.getLogger('borg.output.stats'))
self.output_filter = args.output_filter
self.output_list = args.output_list
@ -1643,11 +1639,8 @@ def do_delete(self, args, repository):
repository.commit(compact=False, save_space=args.save_space)
cache.commit()
if args.stats:
log_multi(DASHES,
STATS_HEADER,
stats.summary.format(label='Deleted data:', stats=stats),
str(cache),
DASHES, logger=logging.getLogger('borg.output.stats'))
log_multi(str(stats),
logger=logging.getLogger('borg.output.stats'))
return self.exit_code
@ -1767,8 +1760,6 @@ def do_rinfo(self, args, repository, manifest, key, cache):
id=bin_to_hex(repository.id),
location=repository._location.canonical_path(),
**info))
print(DASHES)
print(STATS_HEADER)
print(str(cache))
return self.exit_code
@ -1801,14 +1792,12 @@ def format_cmdline(cmdline):
Time (start): {start}
Time (end): {end}
Duration: {duration}
Number of files: {stats[nfiles]}
Command line: {command_line}
Utilization of maximum supported archive size: {limits[max_archive_size]:.0%}
------------------------------------------------------------------------------
Original size Deduplicated size
This archive: {stats[original_size]:>20s} {stats[deduplicated_size]:>20s}
{cache}
""").strip().format(cache=cache, **info))
Number of files: {stats[nfiles]}
Original size: {stats[original_size]}
Deduplicated size: {stats[deduplicated_size]}
""").strip().format(**info))
if self.exit_code:
break
if not args.json and len(archive_names) - i:
@ -1899,11 +1888,8 @@ def do_prune(self, args, repository, manifest, key):
repository.commit(compact=False, save_space=args.save_space)
cache.commit()
if args.stats:
log_multi(DASHES,
STATS_HEADER,
stats.summary.format(label='Deleted data:', stats=stats),
str(cache),
DASHES, logger=logging.getLogger('borg.output.stats'))
log_multi(str(stats),
logger=logging.getLogger('borg.output.stats'))
return self.exit_code
@with_repository(fake=('tam', 'disable_tam'), invert_fake=True, manifest=False, exclusive=True)
@ -2066,13 +2052,9 @@ def _import_tar(self, args, repository, manifest, key, cache, tarstream):
'archive': archive,
}))
else:
log_multi(DASHES,
str(archive),
DASHES,
STATS_HEADER,
log_multi(str(archive),
str(archive.stats),
str(archive.cache),
DASHES, logger=logging.getLogger('borg.output.stats'))
logger=logging.getLogger('borg.output.stats'))
@with_repository(manifest=False, exclusive=True)
def do_with_lock(self, args, repository):

View file

@ -405,10 +405,11 @@ def adhoc():
class CacheStatsMixin:
str_format = """\
All archives: {0.total_size:>20s} {0.unique_size:>20s}
Unique chunks Total chunks
Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
Original size: {0.total_size}
Deduplicated size: {0.unique_size}
Unique chunks: {0.total_unique_chunks}
Total chunks: {0.total_chunks}
"""
def __init__(self, iec=False):
self.iec = iec

View file

@ -20,6 +20,7 @@
def stats():
stats = Statistics()
stats.update(20, unique=True)
stats.nfiles = 1
return stats
@ -35,26 +36,29 @@ def tests_stats_progress(stats, monkeypatch, columns=80):
monkeypatch.setenv('COLUMNS', str(columns))
out = StringIO()
stats.show_progress(stream=out)
s = '20 B O 20 B U 0 N '
s = '20 B O 20 B U 1 N '
buf = ' ' * (columns - len(s))
assert out.getvalue() == s + buf + "\r"
out = StringIO()
stats.update(10 ** 3, unique=False)
stats.show_progress(item=Item(path='foo'), final=False, stream=out)
s = '1.02 kB O 20 B U 0 N foo'
s = '1.02 kB O 20 B U 1 N foo'
buf = ' ' * (columns - len(s))
assert out.getvalue() == s + buf + "\r"
out = StringIO()
stats.show_progress(item=Item(path='foo'*40), final=False, stream=out)
s = '1.02 kB O 20 B U 0 N foofoofoofoofoofoofoofoofo...foofoofoofoofoofoofoofoofoofoo'
s = '1.02 kB O 20 B U 1 N foofoofoofoofoofoofoofoofo...foofoofoofoofoofoofoofoofoofoo'
buf = ' ' * (columns - len(s))
assert out.getvalue() == s + buf + "\r"
def test_stats_format(stats):
assert str(stats) == """\
This archive: 20 B 20 B"""
Number of files: 1
Original size: 20 B
Deduplicated size: 20 B
"""
s = f"{stats.osize_fmt}"
assert s == "20 B"
# kind of redundant, but id is variable so we can't match reliably
@ -72,7 +76,7 @@ def test_stats_progress_json(stats):
assert result['finished'] is False
assert result['path'] == 'foo'
assert result['original_size'] == 20
assert result['nfiles'] == 0 # this counter gets updated elsewhere
assert result['nfiles'] == 1
out = StringIO()
stats.show_progress(stream=out, final=True)

View file

@ -397,7 +397,6 @@ def test_basic_functionality(self):
self.cmd(f'--repo={self.repository_location}', 'create', '--exclude-nodump', 'test', 'input')
output = self.cmd(f'--repo={self.repository_location}', 'create', '--exclude-nodump', '--stats', 'test.2', 'input')
self.assert_in('Archive name: test.2', output)
self.assert_in('This archive: ', output)
with changedir('output'):
self.cmd(f'--repo={self.repository_location}', 'extract', 'test')
list_output = self.cmd(f'--repo={self.repository_location}', 'rlist', '--short')
@ -1525,7 +1524,7 @@ def test_info(self):
self.cmd(f'--repo={self.repository_location}', 'rcreate', '--encryption=repokey')
self.cmd(f'--repo={self.repository_location}', 'create', 'test', 'input')
info_repo = self.cmd(f'--repo={self.repository_location}', 'rinfo')
assert 'All archives:' in info_repo
assert 'Original size:' in info_repo
info_archive = self.cmd(f'--repo={self.repository_location}', 'info', '-a', 'test')
assert 'Archive name: test\n' in info_archive
info_archive = self.cmd(f'--repo={self.repository_location}', 'info', '--first', '1')
@ -1604,7 +1603,7 @@ def test_delete(self):
self.cmd(f'--repo={self.repository_location}', 'delete', '-a', 'test')
self.cmd(f'--repo={self.repository_location}', 'extract', 'test.2', '--dry-run')
output = self.cmd(f'--repo={self.repository_location}', 'delete', '-a', 'test.2', '--stats')
self.assert_in('Deleted data:', output)
self.assert_in('Original size: -', output) # negative size == deleted data
# Make sure all data except the manifest has been deleted
with Repository(self.repository_path) as repository:
self.assert_equal(len(repository), 1)
@ -3514,7 +3513,7 @@ def test_can_read_repo_even_if_nonce_is_deleted(self):
# The repo should still be readable
repo_info = self.cmd(f'--repo={self.repository_location}', 'rinfo')
assert 'All archives:' in repo_info
assert 'Original size:' in repo_info
repo_list = self.cmd(f'--repo={self.repository_location}', 'rlist')
assert 'test' in repo_list
# The archive should still be readable