mirror of
https://github.com/borgbackup/borg.git
synced 2025-02-22 22:22:27 +00:00
create: add --json option
This commit is contained in:
parent
cc26bdf810
commit
7cbade2f8c
2 changed files with 38 additions and 8 deletions
|
@ -68,6 +68,14 @@ def __repr__(self):
|
|||
return "<{cls} object at {hash:#x} ({self.osize}, {self.csize}, {self.usize})>".format(
|
||||
cls=type(self).__name__, hash=id(self), self=self)
|
||||
|
||||
def as_dict(self):
|
||||
return {
|
||||
'original_size': self.osize,
|
||||
'compressed_size': self.csize,
|
||||
'deduplicated_size': self.usize,
|
||||
'nfiles': self.nfiles,
|
||||
}
|
||||
|
||||
@property
|
||||
def osize_fmt(self):
|
||||
return format_file_size(self.osize)
|
||||
|
@ -343,6 +351,19 @@ def duration(self):
|
|||
def duration_from_meta(self):
|
||||
return format_timedelta(self.ts_end - self.ts)
|
||||
|
||||
def info(self):
|
||||
return {
|
||||
'name': self.name,
|
||||
'id': self.fpr,
|
||||
'start': format_time(to_localtime(self.start.replace(tzinfo=timezone.utc))),
|
||||
'end': format_time(to_localtime(self.end.replace(tzinfo=timezone.utc))),
|
||||
'duration': (self.end - self.start).total_seconds(),
|
||||
'nfiles': self.stats.nfiles,
|
||||
'limits': {
|
||||
'max_archive_size': self.cache.chunks[self.id].csize / MAX_DATA_SIZE,
|
||||
},
|
||||
}
|
||||
|
||||
def __str__(self):
|
||||
return '''\
|
||||
Archive name: {0.name}
|
||||
|
|
|
@ -369,13 +369,20 @@ def create_inner(archive, cache):
|
|||
if args.progress:
|
||||
archive.stats.show_progress(final=True)
|
||||
if args.stats:
|
||||
log_multi(DASHES,
|
||||
str(archive),
|
||||
DASHES,
|
||||
STATS_HEADER,
|
||||
str(archive.stats),
|
||||
str(cache),
|
||||
DASHES, logger=logging.getLogger('borg.output.stats'))
|
||||
if args.json:
|
||||
print_as_json({
|
||||
'cache_stats': cache.stats(),
|
||||
'stats': archive.stats.as_dict(),
|
||||
'archive': archive.info(),
|
||||
})
|
||||
else:
|
||||
log_multi(DASHES,
|
||||
str(archive),
|
||||
DASHES,
|
||||
STATS_HEADER,
|
||||
str(archive.stats),
|
||||
str(cache),
|
||||
DASHES, logger=logging.getLogger('borg.output.stats'))
|
||||
|
||||
self.output_filter = args.output_filter
|
||||
self.output_list = args.output_list
|
||||
|
@ -1027,7 +1034,7 @@ def _info_repository(self, args, repository, key, cache):
|
|||
}
|
||||
|
||||
if args.json:
|
||||
info['cache-stats'] = cache.stats()
|
||||
info['cache_stats'] = cache.stats()
|
||||
print_as_json(info)
|
||||
else:
|
||||
print(textwrap.dedent("""
|
||||
|
@ -2174,6 +2181,8 @@ def process_epilog(epilog):
|
|||
help='output verbose list of items (files, dirs, ...)')
|
||||
subparser.add_argument('--filter', dest='output_filter', metavar='STATUSCHARS',
|
||||
help='only display items with the given status characters')
|
||||
subparser.add_argument('--json', action='store_true',
|
||||
help='output stats as JSON')
|
||||
|
||||
exclude_group = subparser.add_argument_group('Exclusion options')
|
||||
exclude_group.add_argument('-e', '--exclude', dest='patterns',
|
||||
|
|
Loading…
Reference in a new issue