mirror of
https://github.com/borgbackup/borg.git
synced 2025-02-23 14:41:43 +00:00
info: --json for archives
This commit is contained in:
parent
2ab5d0f213
commit
6180f5055c
2 changed files with 53 additions and 24 deletions
|
@ -290,7 +290,8 @@ def __init__(self, repository, key, manifest, name, cache=None, create=False,
|
||||||
self.end = end
|
self.end = end
|
||||||
self.consider_part_files = consider_part_files
|
self.consider_part_files = consider_part_files
|
||||||
self.pipeline = DownloadPipeline(self.repository, self.key)
|
self.pipeline = DownloadPipeline(self.repository, self.key)
|
||||||
if create:
|
self.create = create
|
||||||
|
if self.create:
|
||||||
self.file_compression_logger = create_logger('borg.debug.file-compression')
|
self.file_compression_logger = create_logger('borg.debug.file-compression')
|
||||||
self.items_buffer = CacheChunkBuffer(self.cache, self.key, self.stats)
|
self.items_buffer = CacheChunkBuffer(self.cache, self.key, self.stats)
|
||||||
self.chunker = Chunker(self.key.chunk_seed, *chunker_params)
|
self.chunker = Chunker(self.key.chunk_seed, *chunker_params)
|
||||||
|
@ -352,17 +353,34 @@ def duration_from_meta(self):
|
||||||
return format_timedelta(self.ts_end - self.ts)
|
return format_timedelta(self.ts_end - self.ts)
|
||||||
|
|
||||||
def info(self):
|
def info(self):
|
||||||
return {
|
if self.create:
|
||||||
|
stats = self.stats
|
||||||
|
start = self.start.replace(tzinfo=timezone.utc)
|
||||||
|
end = self.end.replace(tzinfo=timezone.utc)
|
||||||
|
else:
|
||||||
|
stats = self.calc_stats(self.cache)
|
||||||
|
start = self.ts
|
||||||
|
end = self.ts_end
|
||||||
|
info = {
|
||||||
'name': self.name,
|
'name': self.name,
|
||||||
'id': self.fpr,
|
'id': self.fpr,
|
||||||
'start': format_time(to_localtime(self.start.replace(tzinfo=timezone.utc))),
|
'start': format_time(to_localtime(start)),
|
||||||
'end': format_time(to_localtime(self.end.replace(tzinfo=timezone.utc))),
|
'end': format_time(to_localtime(end)),
|
||||||
'duration': (self.end - self.start).total_seconds(),
|
'duration': (end - start).total_seconds(),
|
||||||
'nfiles': self.stats.nfiles,
|
'stats': stats.as_dict(),
|
||||||
'limits': {
|
'limits': {
|
||||||
'max_archive_size': self.cache.chunks[self.id].csize / MAX_DATA_SIZE,
|
'max_archive_size': self.cache.chunks[self.id].csize / MAX_DATA_SIZE,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
if self.create:
|
||||||
|
info['command_line'] = sys.argv
|
||||||
|
else:
|
||||||
|
info.update({
|
||||||
|
'command_line': self.metadata.cmdline,
|
||||||
|
'hostname': self.metadata.hostname,
|
||||||
|
'username': self.metadata.username,
|
||||||
|
})
|
||||||
|
return info
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return '''\
|
return '''\
|
||||||
|
|
|
@ -390,7 +390,6 @@ def create_inner(archive, cache):
|
||||||
print_as_json({
|
print_as_json({
|
||||||
'repository': repository,
|
'repository': repository,
|
||||||
'cache': cache,
|
'cache': cache,
|
||||||
'stats': archive.stats.as_dict(),
|
|
||||||
'archive': archive,
|
'archive': archive,
|
||||||
})
|
})
|
||||||
else:
|
else:
|
||||||
|
@ -1002,29 +1001,41 @@ def format_cmdline(cmdline):
|
||||||
if not archive_names:
|
if not archive_names:
|
||||||
return self.exit_code
|
return self.exit_code
|
||||||
|
|
||||||
|
output_data = []
|
||||||
|
|
||||||
for i, archive_name in enumerate(archive_names, 1):
|
for i, archive_name in enumerate(archive_names, 1):
|
||||||
archive = Archive(repository, key, manifest, archive_name, cache=cache,
|
archive = Archive(repository, key, manifest, archive_name, cache=cache,
|
||||||
consider_part_files=args.consider_part_files)
|
consider_part_files=args.consider_part_files)
|
||||||
stats = archive.calc_stats(cache)
|
if args.json:
|
||||||
print('Archive name: %s' % archive.name)
|
output_data.append(archive.info())
|
||||||
print('Archive fingerprint: %s' % archive.fpr)
|
else:
|
||||||
print('Comment: %s' % archive.metadata.get('comment', ''))
|
stats = archive.calc_stats(cache)
|
||||||
print('Hostname: %s' % archive.metadata.hostname)
|
print('Archive name: %s' % archive.name)
|
||||||
print('Username: %s' % archive.metadata.username)
|
print('Archive fingerprint: %s' % archive.fpr)
|
||||||
print('Time (start): %s' % format_time(to_localtime(archive.ts)))
|
print('Comment: %s' % archive.metadata.get('comment', ''))
|
||||||
print('Time (end): %s' % format_time(to_localtime(archive.ts_end)))
|
print('Hostname: %s' % archive.metadata.hostname)
|
||||||
print('Duration: %s' % archive.duration_from_meta)
|
print('Username: %s' % archive.metadata.username)
|
||||||
print('Number of files: %d' % stats.nfiles)
|
print('Time (start): %s' % format_time(to_localtime(archive.ts)))
|
||||||
print('Command line: %s' % format_cmdline(archive.metadata.cmdline))
|
print('Time (end): %s' % format_time(to_localtime(archive.ts_end)))
|
||||||
print('Utilization of max. archive size: %d%%' % (100 * cache.chunks[archive.id].csize / MAX_DATA_SIZE))
|
print('Duration: %s' % archive.duration_from_meta)
|
||||||
print(DASHES)
|
print('Number of files: %d' % stats.nfiles)
|
||||||
print(STATS_HEADER)
|
print('Command line: %s' % format_cmdline(archive.metadata.cmdline))
|
||||||
print(str(stats))
|
print('Utilization of max. archive size: %d%%' % (100 * cache.chunks[archive.id].csize / MAX_DATA_SIZE))
|
||||||
print(str(cache))
|
print(DASHES)
|
||||||
|
print(STATS_HEADER)
|
||||||
|
print(str(stats))
|
||||||
|
print(str(cache))
|
||||||
if self.exit_code:
|
if self.exit_code:
|
||||||
break
|
break
|
||||||
if len(archive_names) - i:
|
if not args.json and len(archive_names) - i:
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
if args.json:
|
||||||
|
print_as_json({
|
||||||
|
'repository': repository,
|
||||||
|
'cache': cache,
|
||||||
|
'archives': output_data,
|
||||||
|
})
|
||||||
return self.exit_code
|
return self.exit_code
|
||||||
|
|
||||||
def _info_repository(self, args, repository, key, cache):
|
def _info_repository(self, args, repository, key, cache):
|
||||||
|
|
Loading…
Reference in a new issue