borg/dedupestore/archiver.py

233 lines
8.2 KiB
Python
Raw Normal View History

2010-02-20 17:23:46 +00:00
import os
import hashlib
import zlib
2010-02-20 21:28:46 +00:00
import cPickle
2010-02-23 21:12:22 +00:00
from optparse import OptionParser
2010-03-06 17:25:35 +00:00
from chunkifier import chunkify
from cache import Cache, NS_ARCHIVES, NS_CHUNKS
2010-05-26 19:41:55 +00:00
#from sqlitestore import SqliteStore
from bandstore import BandStore
CHUNK_SIZE = 55001
2010-02-20 17:23:46 +00:00
2010-03-09 21:27:37 +00:00
class Archive(object):
def __init__(self, store, name=None):
self.store = store
self.items = []
self.chunks = []
self.chunk_idx = {}
2010-03-09 21:27:37 +00:00
if name:
self.open(name)
def add_chunk(self, id, csize, osize):
try:
return self.chunk_idx[id]
except KeyError:
idx = len(self.chunks)
self.chunks.append((id, csize, osize))
self.chunk_idx[id] = idx
return idx
2010-04-18 20:08:12 +00:00
2010-03-09 21:27:37 +00:00
def open(self, name):
archive = cPickle.loads(zlib.decompress(self.store.get(NS_ARCHIVES, name)))
self.items = archive['items']
2010-03-29 19:40:09 +00:00
self.name = archive['name']
2010-04-18 20:34:21 +00:00
self.chunks = archive['chunks']
for i, (id, csize, osize) in enumerate(archive['chunks']):
self.chunk_idx[i] = id
2010-03-09 21:27:37 +00:00
def save(self, name):
archive = {'name': name, 'items': self.items, 'chunks': self.chunks}
2010-03-09 21:27:37 +00:00
self.store.put(NS_ARCHIVES, name, zlib.compress(cPickle.dumps(archive)))
self.store.commit()
2010-04-18 20:34:21 +00:00
def stats(self, cache):
total_osize = 0
total_csize = 0
total_usize = 0
2010-04-22 20:34:33 +00:00
chunk_count = {}
2010-04-18 20:34:21 +00:00
for item in self.items:
if item['type'] == 'FILE':
total_osize += item['size']
2010-04-22 20:34:33 +00:00
for idx in item['chunks']:
id = self.chunk_idx[idx]
chunk_count.setdefault(id, 0)
chunk_count[id] += 1
for id, c in chunk_count.items():
count, csize, osize = cache.chunkmap[id]
2010-04-18 20:34:21 +00:00
total_csize += csize
2010-04-22 20:34:33 +00:00
if c == count:
2010-04-18 20:34:21 +00:00
total_usize += csize
return dict(osize=total_osize, csize=total_csize, usize=total_usize)
2010-03-09 21:27:37 +00:00
def list(self):
for item in self.items:
print item['path']
def extract(self):
for item in self.items:
assert item['path'][0] not in ('/', '\\', ':')
print item['path']
if item['type'] == 'DIR':
if not os.path.exists(item['path']):
os.makedirs(item['path'])
if item['type'] == 'FILE':
path = item['path']
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
2010-03-09 21:27:37 +00:00
with open(item['path'], 'wb') as fd:
for chunk in item['chunks']:
id = self.chunk_idx[chunk]
data = self.store.get(NS_CHUNKS, id)
if hashlib.sha1(data).digest() != id:
2010-03-09 21:27:37 +00:00
raise Exception('Invalid chunk checksum')
fd.write(zlib.decompress(data))
def verify(self):
for item in self.items:
if item['type'] == 'FILE':
print item['path'], '...',
for chunk in item['chunks']:
id = self.chunk_idx[chunk]
data = self.store.get(NS_CHUNKS, id)
if hashlib.sha1(data).digest() != id:
2010-03-09 21:27:37 +00:00
print 'ERROR'
break
else:
print 'OK'
def delete(self, cache):
self.store.delete(NS_ARCHIVES, self.name)
for item in self.items:
if item['type'] == 'FILE':
for c in item['chunks']:
2010-03-29 19:40:09 +00:00
id = self.chunk_idx[c]
cache.chunk_decref(id)
2010-03-09 21:27:37 +00:00
self.store.commit()
cache.archives.remove(self.name)
cache.save()
def create(self, name, paths, cache):
2010-02-23 21:12:22 +00:00
for path in paths:
for root, dirs, files in os.walk(path):
for d in dirs:
p = os.path.join(root, d)
2010-04-18 20:08:12 +00:00
print p
self.items.append(self.process_dir(p, cache))
2010-02-23 21:12:22 +00:00
for f in files:
p = os.path.join(root, f)
2010-04-18 20:08:12 +00:00
print p
self.items.append(self.process_file(p, cache))
self.save(name)
cache.archives.append(name)
cache.save()
def process_dir(self, path, cache):
path = path.lstrip('/\\:')
return {'type': 'DIR', 'path': path}
def process_file(self, path, cache):
with open(path, 'rb') as fd:
path = path.lstrip('/\\:')
chunks = []
2010-04-18 20:34:21 +00:00
size = 0
for chunk in chunkify(fd, CHUNK_SIZE, 30):
2010-04-18 20:34:21 +00:00
size += len(chunk)
chunks.append(self.add_chunk(*cache.add_chunk(chunk)))
2010-04-18 20:34:21 +00:00
return {'type': 'FILE', 'path': path, 'chunks': chunks, 'size': size}
class Archiver(object):
2010-04-22 20:34:33 +00:00
def pretty_size(self, v):
if v > 1024 * 1024 * 1024:
return '%.2f GB' % (v / 1024. / 1024. / 1024.)
elif v > 1024 * 1024:
return '%.2f MB' % (v / 1024. / 1024.)
elif v > 1024:
return '%.2f kB' % (v / 1024.)
else:
return str(v)
def create_archive(self, name, paths):
archive = Archive(self.store)
archive.create(name, paths, self.cache)
2010-02-23 20:34:28 +00:00
def delete_archive(self, archive_name):
2010-03-09 21:27:37 +00:00
archive = Archive(self.store, archive_name)
archive.delete(self.cache)
2010-02-20 21:28:46 +00:00
def list_archives(self):
print 'Archives:'
for archive in sorted(self.cache.archives):
print archive
def list_archive(self, archive_name):
2010-03-09 21:27:37 +00:00
archive = Archive(self.store, archive_name)
archive.list()
def verify_archive(self, archive_name):
2010-03-09 21:27:37 +00:00
archive = Archive(self.store, archive_name)
archive.verify()
def extract_archive(self, archive_name):
2010-03-09 21:27:37 +00:00
archive = Archive(self.store, archive_name)
archive.extract()
2010-04-18 20:34:21 +00:00
def archive_stats(self, archive_name):
archive = Archive(self.store, archive_name)
stats = archive.stats(self.cache)
2010-04-22 20:34:33 +00:00
print 'Original size:', self.pretty_size(stats['osize'])
print 'Compressed size:', self.pretty_size(stats['csize'])
print 'Unique data:', self.pretty_size(stats['usize'])
2010-04-18 20:34:21 +00:00
2010-02-23 21:12:22 +00:00
def run(self):
parser = OptionParser()
parser.add_option("-s", "--store", dest="store",
help="path to dedupe store", metavar="STORE")
parser.add_option("-c", "--create", dest="create_archive",
help="create ARCHIVE", metavar="ARCHIVE")
parser.add_option("-d", "--delete", dest="delete_archive",
help="delete ARCHIVE", metavar="ARCHIVE")
parser.add_option("-l", "--list-archives", dest="list_archives",
action="store_true", default=False,
2010-02-23 21:12:22 +00:00
help="list archives")
parser.add_option("-V", "--verify", dest="verify_archive",
help="verify archive consistency")
parser.add_option("-e", "--extract", dest="extract_archive",
help="extract ARCHIVE")
parser.add_option("-L", "--list-archive", dest="list_archive",
help="verify archive consistency", metavar="ARCHIVE")
2010-04-18 20:34:21 +00:00
parser.add_option("-S", "--stats", dest="archive_stats",
help="Display archive statistics", metavar="ARCHIVE")
2010-02-23 21:12:22 +00:00
(options, args) = parser.parse_args()
if options.store:
2010-05-26 19:41:55 +00:00
self.store = BandStore(options.store)
else:
parser.error('No store path specified')
2010-03-01 22:02:40 +00:00
self.cache = Cache(self.store)
if options.list_archives:
self.list_archives()
elif options.list_archive:
self.list_archive(options.list_archive)
elif options.verify_archive:
self.verify_archive(options.verify_archive)
elif options.extract_archive:
self.extract_archive(options.extract_archive)
elif options.delete_archive:
2010-02-23 21:12:22 +00:00
self.delete_archive(options.delete_archive)
2010-04-18 20:34:21 +00:00
elif options.create_archive:
2010-02-23 21:12:22 +00:00
self.create_archive(options.create_archive, args)
2010-04-18 20:34:21 +00:00
elif options.archive_stats:
self.archive_stats(options.archive_stats)
2010-03-06 17:25:35 +00:00
2010-02-20 17:23:46 +00:00
def main():
archiver = Archiver()
2010-02-23 21:12:22 +00:00
archiver.run()
2010-02-20 17:23:46 +00:00
if __name__ == '__main__':
2010-03-06 17:25:35 +00:00
main()