mirror of
https://github.com/borgbackup/borg.git
synced 2024-12-25 09:19:31 +00:00
Improved stats and delete performace on remote stores
This commit is contained in:
parent
b294ceba67
commit
6c77ce53d9
2 changed files with 42 additions and 37 deletions
|
@ -102,40 +102,39 @@ def save(self, name, cache):
|
||||||
self.store.commit()
|
self.store.commit()
|
||||||
cache.commit()
|
cache.commit()
|
||||||
|
|
||||||
def get_chunks(self):
|
|
||||||
for item in self.get_items():
|
|
||||||
try:
|
|
||||||
for chunk in item['chunks']:
|
|
||||||
yield chunk
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def stats(self, cache):
|
def stats(self, cache):
|
||||||
# This function is a bit evil since it abuses the cache to calculate
|
# This function is a bit evil since it abuses the cache to calculate
|
||||||
# the stats. The cache transaction must be rolled back afterwards
|
# the stats. The cache transaction must be rolled back afterwards
|
||||||
unpacker = msgpack.Unpacker()
|
def cb(chunk, error, (id, unique)):
|
||||||
cache.begin_txn()
|
assert not error
|
||||||
osize = zsize = usize = 0
|
data, items_hash = self.key.decrypt(chunk)
|
||||||
for id, size, csize in self.metadata['items']:
|
|
||||||
osize += size
|
|
||||||
zsize += csize
|
|
||||||
unique = self.cache.seen_chunk(id) == 1
|
|
||||||
if unique:
|
|
||||||
usize += csize
|
|
||||||
data, items_hash = self.key.decrypt(self.store.get(NS_CHUNK, id))
|
|
||||||
assert self.key.id_hash(data) == id
|
assert self.key.id_hash(data) == id
|
||||||
unpacker.feed(data)
|
unpacker.feed(data)
|
||||||
for item in unpacker:
|
for item in unpacker:
|
||||||
try:
|
try:
|
||||||
for id, size, csize in item['chunks']:
|
for id, size, csize in item['chunks']:
|
||||||
osize += size
|
count, _, _ = self.cache.chunks[id]
|
||||||
zsize += csize
|
stats['osize'] += size
|
||||||
if unique and self.cache.seen_chunk(id) == 1:
|
stats['csize'] += csize
|
||||||
usize += csize
|
if unique and count == 1:
|
||||||
|
stats['usize'] += csize
|
||||||
|
self.cache.chunks[id] = count - 1, size, csize
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
unpacker = msgpack.Unpacker()
|
||||||
|
cache.begin_txn()
|
||||||
|
stats = {'osize': 0, 'csize': 0, 'usize': 0}
|
||||||
|
for id, size, csize in self.metadata['items']:
|
||||||
|
stats['osize'] += size
|
||||||
|
stats['csize'] += csize
|
||||||
|
unique = self.cache.seen_chunk(id) == 1
|
||||||
|
if unique:
|
||||||
|
stats['usize'] += csize
|
||||||
|
self.store.get(NS_CHUNK, id, callback=cb, callback_data=(id, unique))
|
||||||
|
self.cache.chunk_decref(id)
|
||||||
|
self.store.flush_rpc()
|
||||||
cache.rollback()
|
cache.rollback()
|
||||||
return osize, zsize, usize
|
return stats
|
||||||
|
|
||||||
def extract_item(self, item, dest=None, start_cb=None):
|
def extract_item(self, item, dest=None, start_cb=None):
|
||||||
dest = dest or os.getcwdu()
|
dest = dest or os.getcwdu()
|
||||||
|
@ -237,19 +236,25 @@ def verify_chunk(chunk, error, (id, i, last)):
|
||||||
self.store.get(NS_CHUNK, id, callback=verify_chunk, callback_data=(id, i, i==n-1))
|
self.store.get(NS_CHUNK, id, callback=verify_chunk, callback_data=(id, i, i==n-1))
|
||||||
|
|
||||||
def delete(self, cache):
|
def delete(self, cache):
|
||||||
|
def cb(chunk, error, id):
|
||||||
|
assert not error
|
||||||
|
data, items_hash = self.key.decrypt(chunk)
|
||||||
|
assert self.key.id_hash(data) == id
|
||||||
|
unpacker.feed(data)
|
||||||
|
for item in unpacker:
|
||||||
|
try:
|
||||||
|
for chunk_id, size, csize in item['chunks']:
|
||||||
|
self.cache.chunk_decref(chunk_id)
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
self.cache.chunk_decref(id)
|
||||||
unpacker = msgpack.Unpacker()
|
unpacker = msgpack.Unpacker()
|
||||||
for id, size, csize in self.metadata['items']:
|
for id, size, csize in self.metadata['items']:
|
||||||
if self.cache.seen_chunk(id) == 1:
|
if self.cache.seen_chunk(id) == 1:
|
||||||
data, items_hash = self.key.decrypt(self.store.get(NS_CHUNK, id))
|
self.store.get(NS_CHUNK, id, callback=cb, callback_data=id)
|
||||||
assert self.key.id_hash(data) == id
|
else:
|
||||||
unpacker.feed(data)
|
self.cache.chunk_decref(id)
|
||||||
for item in unpacker:
|
self.store.flush_rpc()
|
||||||
try:
|
|
||||||
for chunk_id, size, csize in item['chunks']:
|
|
||||||
self.cache.chunk_decref(chunk_id)
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
self.cache.chunk_decref(id)
|
|
||||||
self.store.delete(NS_ARCHIVE_METADATA, self.id)
|
self.store.delete(NS_ARCHIVE_METADATA, self.id)
|
||||||
self.store.commit()
|
self.store.commit()
|
||||||
cache.commit()
|
cache.commit()
|
||||||
|
|
|
@ -196,15 +196,15 @@ def do_info(self, args):
|
||||||
key = Key(store)
|
key = Key(store)
|
||||||
cache = Cache(store, key)
|
cache = Cache(store, key)
|
||||||
archive = Archive(store, key, args.archive.archive, cache=cache)
|
archive = Archive(store, key, args.archive.archive, cache=cache)
|
||||||
osize, csize, usize = archive.stats(cache)
|
stats = archive.stats(cache)
|
||||||
print 'Name:', archive.metadata['name']
|
print 'Name:', archive.metadata['name']
|
||||||
print 'Hostname:', archive.metadata['hostname']
|
print 'Hostname:', archive.metadata['hostname']
|
||||||
print 'Username:', archive.metadata['username']
|
print 'Username:', archive.metadata['username']
|
||||||
print 'Time:', archive.metadata['time']
|
print 'Time:', archive.metadata['time']
|
||||||
print 'Command line:', ' '.join(archive.metadata['cmdline'])
|
print 'Command line:', ' '.join(archive.metadata['cmdline'])
|
||||||
print 'Original size:', format_file_size(osize)
|
print 'Original size:', format_file_size(stats['osize'])
|
||||||
print 'Compressed size:', format_file_size(csize)
|
print 'Compressed size:', format_file_size(stats['csize'])
|
||||||
print 'Unique data:', format_file_size(usize)
|
print 'Unique data:', format_file_size(stats['usize'])
|
||||||
return self.exit_code
|
return self.exit_code
|
||||||
|
|
||||||
def run(self, args=None):
|
def run(self, args=None):
|
||||||
|
|
Loading…
Reference in a new issue