Merge pull request #2197 from enkore/f/info-limits

info: show utilization of maximum archive size
This commit is contained in:
enkore 2017-02-23 09:18:04 +01:00 committed by GitHub
commit e3a8ccdf3b
4 changed files with 14 additions and 4 deletions

View File

@ -350,10 +350,13 @@ Archive fingerprint: {0.fpr}
Time (start): {start} Time (start): {start}
Time (end): {end} Time (end): {end}
Duration: {0.duration} Duration: {0.duration}
Number of files: {0.stats.nfiles}'''.format( Number of files: {0.stats.nfiles}
Utilization of max. archive size: {csize_max:.0%}
'''.format(
self, self,
start=format_time(to_localtime(self.start.replace(tzinfo=timezone.utc))), start=format_time(to_localtime(self.start.replace(tzinfo=timezone.utc))),
end=format_time(to_localtime(self.end.replace(tzinfo=timezone.utc)))) end=format_time(to_localtime(self.end.replace(tzinfo=timezone.utc))),
csize_max=self.cache.chunks[self.id].csize / MAX_DATA_SIZE)
def __repr__(self): def __repr__(self):
return 'Archive(%r)' % self.name return 'Archive(%r)' % self.name

View File

@ -987,6 +987,7 @@ class Archiver:
print('Duration: %s' % archive.duration_from_meta) print('Duration: %s' % archive.duration_from_meta)
print('Number of files: %d' % stats.nfiles) print('Number of files: %d' % stats.nfiles)
print('Command line: %s' % format_cmdline(archive.metadata.cmdline)) print('Command line: %s' % format_cmdline(archive.metadata.cmdline))
print('Utilization of max. archive size: %d%%' % (100 * cache.chunks[archive.id].csize / MAX_DATA_SIZE))
print(DASHES) print(DASHES)
print(STATS_HEADER) print(STATS_HEADER)
print(str(stats)) print(str(stats))

View File

@ -27,6 +27,10 @@ CACHE_TAG_CONTENTS = b'Signature: 8a477f597d28d172789f06886806bc55'
# bytes. That's why it's 500 MiB instead of 512 MiB. # bytes. That's why it's 500 MiB instead of 512 MiB.
DEFAULT_MAX_SEGMENT_SIZE = 500 * 1024 * 1024 DEFAULT_MAX_SEGMENT_SIZE = 500 * 1024 * 1024
# 20 MiB minus 41 bytes for a Repository header (because the "size" field in the Repository includes
# the header, and the total size was set to 20 MiB).
MAX_DATA_SIZE = 20971479
# A few hundred files per directory to go easy on filesystems which don't like too many files per dir (NTFS) # A few hundred files per directory to go easy on filesystems which don't like too many files per dir (NTFS)
DEFAULT_SEGMENTS_PER_DIR = 500 DEFAULT_SEGMENTS_PER_DIR = 500

View File

@ -26,7 +26,6 @@ from .crc32 import crc32
logger = create_logger(__name__) logger = create_logger(__name__)
MAX_OBJECT_SIZE = 20 * 1024 * 1024
MAGIC = b'BORG_SEG' MAGIC = b'BORG_SEG'
MAGIC_LEN = len(MAGIC) MAGIC_LEN = len(MAGIC)
TAG_PUT = 0 TAG_PUT = 0
@ -1204,4 +1203,7 @@ class LoggedIO:
return self.segment - 1 # close_segment() increments it return self.segment - 1 # close_segment() increments it
MAX_DATA_SIZE = MAX_OBJECT_SIZE - LoggedIO.put_header_fmt.size # MAX_OBJECT_SIZE = <20 MiB (MAX_DATA_SIZE) + 41 bytes for a Repository PUT header, which consists of
# a 1 byte tag ID, 4 byte CRC, 4 byte size and 32 bytes for the ID.
MAX_OBJECT_SIZE = MAX_DATA_SIZE + LoggedIO.put_header_fmt.size
assert MAX_OBJECT_SIZE == 20971520 == 20 * 1024 * 1024