mirror of
https://github.com/borgbackup/borg.git
synced 2024-12-26 09:47:58 +00:00
Add dsize and dcsize keys
These keys shows the amount of deduplicated size and compressed size of each file in the archive.
This commit is contained in:
parent
f043b966da
commit
5aa74abedf
1 changed files with 13 additions and 1 deletions
|
@ -1425,12 +1425,14 @@ class ItemFormatter(BaseFormatter):
|
||||||
'source': 'link target for links (identical to linktarget)',
|
'source': 'link target for links (identical to linktarget)',
|
||||||
'extra': 'prepends {source} with " -> " for soft links and " link to " for hard links',
|
'extra': 'prepends {source} with " -> " for soft links and " link to " for hard links',
|
||||||
'csize': 'compressed size',
|
'csize': 'compressed size',
|
||||||
|
'dsize': 'deduplicated size',
|
||||||
|
'dcsize': 'deduplicated compressed size',
|
||||||
'num_chunks': 'number of chunks in this file',
|
'num_chunks': 'number of chunks in this file',
|
||||||
'unique_chunks': 'number of unique chunks in this file',
|
'unique_chunks': 'number of unique chunks in this file',
|
||||||
}
|
}
|
||||||
KEY_GROUPS = (
|
KEY_GROUPS = (
|
||||||
('type', 'mode', 'uid', 'gid', 'user', 'group', 'path', 'bpath', 'source', 'linktarget', 'flags'),
|
('type', 'mode', 'uid', 'gid', 'user', 'group', 'path', 'bpath', 'source', 'linktarget', 'flags'),
|
||||||
('size', 'csize', 'num_chunks', 'unique_chunks'),
|
('size', 'csize', 'dsize', 'dcsize', 'num_chunks', 'unique_chunks'),
|
||||||
('mtime', 'ctime', 'atime', 'isomtime', 'isoctime', 'isoatime'),
|
('mtime', 'ctime', 'atime', 'isomtime', 'isoctime', 'isoatime'),
|
||||||
tuple(sorted(hashlib.algorithms_guaranteed)),
|
tuple(sorted(hashlib.algorithms_guaranteed)),
|
||||||
('archiveid', 'archivename', 'extra'),
|
('archiveid', 'archivename', 'extra'),
|
||||||
|
@ -1479,6 +1481,8 @@ def __init__(self, archive, format):
|
||||||
self.call_keys = {
|
self.call_keys = {
|
||||||
'size': self.calculate_size,
|
'size': self.calculate_size,
|
||||||
'csize': self.calculate_csize,
|
'csize': self.calculate_csize,
|
||||||
|
'dsize': self.calculate_dsize,
|
||||||
|
'dcsize': self.calculate_dcsize,
|
||||||
'num_chunks': self.calculate_num_chunks,
|
'num_chunks': self.calculate_num_chunks,
|
||||||
'unique_chunks': self.calculate_unique_chunks,
|
'unique_chunks': self.calculate_unique_chunks,
|
||||||
'isomtime': partial(self.format_time, 'mtime'),
|
'isomtime': partial(self.format_time, 'mtime'),
|
||||||
|
@ -1540,6 +1544,14 @@ def calculate_size(self, item):
|
||||||
def calculate_csize(self, item):
|
def calculate_csize(self, item):
|
||||||
return sum(c.csize for c in item.get('chunks', []))
|
return sum(c.csize for c in item.get('chunks', []))
|
||||||
|
|
||||||
|
def calculate_dsize(self, item):
|
||||||
|
chunk_index = self.archive.cache.chunks
|
||||||
|
return sum(c.size for c in item.get('chunks', []) if chunk_index[c.id].refcount == 1)
|
||||||
|
|
||||||
|
def calculate_dcsize(self, item):
|
||||||
|
chunk_index = self.archive.cache.chunks
|
||||||
|
return sum(c.csize for c in item.get('chunks', []) if chunk_index[c.id].refcount == 1)
|
||||||
|
|
||||||
def hash_item(self, hash_function, item):
|
def hash_item(self, hash_function, item):
|
||||||
if 'chunks' not in item:
|
if 'chunks' not in item:
|
||||||
return ""
|
return ""
|
||||||
|
|
Loading…
Reference in a new issue