borg/dedupestore/archiver.py

324 lines
12 KiB
Python
Raw Normal View History

2010-02-20 17:23:46 +00:00
import os
import hashlib
2010-10-13 21:28:40 +00:00
import logging
2010-02-20 17:23:46 +00:00
import zlib
2010-10-15 18:35:49 +00:00
import argparse
2010-10-16 09:45:36 +00:00
import sys
2010-10-18 17:43:54 +00:00
import stat
from datetime import datetime
2010-02-23 21:12:22 +00:00
import msgpack
2010-03-06 17:25:35 +00:00
from chunkifier import chunkify
from cache import Cache, NS_ARCHIVES, NS_CHUNKS
2010-05-26 19:41:55 +00:00
from bandstore import BandStore
2010-10-15 20:29:36 +00:00
from helpers import location_validator, pretty_size
CHUNK_SIZE = 55001
2010-02-20 17:23:46 +00:00
2010-10-16 09:45:36 +00:00
class LevelFilter(logging.Filter):
def __init__(self, *args, **kwargs):
2010-10-18 17:03:30 +00:00
logging.Filter.__init__(self, *args, **kwargs)
2010-10-16 09:45:36 +00:00
self.count = {}
def filter(self, record):
self.count.setdefault(record.levelname, 0)
self.count[record.levelname] += 1
return record
2010-02-20 17:23:46 +00:00
2010-03-09 21:27:37 +00:00
class Archive(object):
2010-10-15 20:18:22 +00:00
def __init__(self, store, cache, name=None):
2010-03-09 21:27:37 +00:00
self.store = store
2010-10-15 20:18:22 +00:00
self.cache = cache
2010-03-09 21:27:37 +00:00
self.items = []
self.chunks = []
self.chunk_idx = {}
2010-03-09 21:27:37 +00:00
if name:
self.open(name)
def open(self, name):
2010-10-15 20:18:22 +00:00
id = self.cache.archives[name]
data = self.store.get(NS_ARCHIVES, id)
if hashlib.sha256(data).digest() != id:
raise Exception('Archive hash did not match')
archive = msgpack.unpackb(zlib.decompress(data))
2010-03-09 21:27:37 +00:00
self.items = archive['items']
2010-03-29 19:40:09 +00:00
self.name = archive['name']
2010-04-18 20:34:21 +00:00
self.chunks = archive['chunks']
for i, chunk in enumerate(archive['chunks']):
self.chunk_idx[i] = chunk[0]
2010-03-09 21:27:37 +00:00
def save(self, name):
archive = {
'name': name,
'ts': datetime.utcnow().isoformat(),
'items': self.items,
'chunks': self.chunks
}
data = zlib.compress(msgpack.packb(archive))
2010-10-15 20:18:22 +00:00
self.id = hashlib.sha256(data).digest()
self.store.put(NS_ARCHIVES, self.id, data)
2010-03-09 21:27:37 +00:00
self.store.commit()
def add_chunk(self, id, size):
2010-10-15 20:18:22 +00:00
try:
return self.chunk_idx[id]
except KeyError:
idx = len(self.chunks)
self.chunks.append((id, size))
2010-10-15 20:18:22 +00:00
self.chunk_idx[id] = idx
return idx
2010-04-18 20:34:21 +00:00
def stats(self, cache):
total_osize = 0
total_csize = 0
total_usize = 0
2010-04-22 20:34:33 +00:00
chunk_count = {}
2010-04-18 20:34:21 +00:00
for item in self.items:
if item['type'] == 'FILE':
total_osize += item['size']
2010-04-22 20:34:33 +00:00
for idx in item['chunks']:
id = self.chunk_idx[idx]
chunk_count.setdefault(id, 0)
chunk_count[id] += 1
for id, c in chunk_count.items():
count, size = cache.chunkmap[id]
total_csize += size
2010-04-22 20:34:33 +00:00
if c == count:
total_usize += size
2010-04-18 20:34:21 +00:00
return dict(osize=total_osize, csize=total_csize, usize=total_usize)
2010-03-09 21:27:37 +00:00
def list(self):
for item in self.items:
print item['path']
2010-10-15 18:35:49 +00:00
def extract(self, dest=None):
dest = dest or os.getcwdu()
2010-03-09 21:27:37 +00:00
for item in self.items:
assert item['path'][0] not in ('/', '\\', ':')
2010-10-18 22:00:52 +00:00
path = os.path.join(dest, item['path'].decode('utf-8'))
if item['type'] == 'DIRECTORY':
2010-10-18 17:43:54 +00:00
logging.info(path)
2010-10-15 18:35:49 +00:00
if not os.path.exists(path):
os.makedirs(path)
2010-10-18 17:43:54 +00:00
elif item['type'] == 'SYMLINK':
logging.info('%s => %s', path, item['source'])
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
os.symlink(item['source'], path)
elif item['type'] == 'FILE':
logging.info(path)
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
2010-10-15 18:35:49 +00:00
with open(path, 'wb') as fd:
2010-03-09 21:27:37 +00:00
for chunk in item['chunks']:
id = self.chunk_idx[chunk]
data = self.store.get(NS_CHUNKS, id)
2010-10-15 20:18:22 +00:00
cid = data[:32]
data = data[32:]
if hashlib.sha256(data).digest() != cid:
raise Exception('Invalid chunk checksum')
data = zlib.decompress(data)
fd.write(data)
2010-03-09 21:27:37 +00:00
def verify(self):
for item in self.items:
if item['type'] == 'FILE':
2010-10-18 22:00:52 +00:00
item['path'] = item['path'].decode('utf-8')
2010-03-09 21:27:37 +00:00
for chunk in item['chunks']:
id = self.chunk_idx[chunk]
data = self.store.get(NS_CHUNKS, id)
2010-10-15 20:18:22 +00:00
data = self.store.get(NS_CHUNKS, id)
cid = data[:32]
data = data[32:]
if (hashlib.sha256(data).digest() != cid):
2010-10-15 20:18:22 +00:00
logging.error('%s ... ERROR', item['path'])
2010-03-09 21:27:37 +00:00
break
else:
2010-10-13 21:28:40 +00:00
logging.info('%s ... OK', item['path'])
2010-03-09 21:27:37 +00:00
def delete(self, cache):
2010-10-15 20:18:22 +00:00
self.store.delete(NS_ARCHIVES, self.cache.archives[self.name])
2010-03-09 21:27:37 +00:00
for item in self.items:
if item['type'] == 'FILE':
for c in item['chunks']:
2010-03-29 19:40:09 +00:00
id = self.chunk_idx[c]
cache.chunk_decref(id)
2010-03-09 21:27:37 +00:00
self.store.commit()
2010-10-15 20:18:22 +00:00
del cache.archives[self.name]
2010-03-09 21:27:37 +00:00
cache.save()
2010-10-18 17:43:54 +00:00
def walk(self, path):
st = os.lstat(path)
if stat.S_ISDIR(st.st_mode):
for f in os.listdir(path):
for x in self.walk(os.path.join(path, f)):
yield x
else:
yield path, st
def create(self, name, paths, cache):
2010-10-15 20:18:22 +00:00
if name in cache.archives:
raise NameError('Archive already exists')
2010-02-23 21:12:22 +00:00
for path in paths:
2010-10-18 17:43:54 +00:00
for path, st in self.walk(unicode(path)):
if stat.S_ISDIR(st.st_mode):
self.process_dir(path, st)
elif stat.S_ISLNK(st.st_mode):
self.process_link(path, st)
elif stat.S_ISREG(st.st_mode):
self.process_file(path, st)
else:
logging.error('Unknown file type: %s', path)
self.save(name)
2010-10-15 20:18:22 +00:00
cache.archives[name] = self.id
cache.save()
2010-10-18 17:43:54 +00:00
def process_dir(self, path, st):
path = path.lstrip('/\\:')
2010-10-13 21:28:40 +00:00
logging.info(path)
2010-10-18 17:43:54 +00:00
self.items.append({'type': 'DIRECTORY', 'path': path})
def process_link(self, path, st):
source = os.readlink(path)
path = path.lstrip('/\\:')
logging.info('%s => %s', path, source)
self.items.append({'type': 'SYMLINK', 'path': path, 'source': source})
2010-10-18 17:43:54 +00:00
def process_file(self, path, st):
2010-10-13 21:28:40 +00:00
try:
fd = open(path, 'rb')
except IOError, e:
logging.error(e)
2010-10-19 19:08:42 +00:00
return
2010-10-13 21:28:40 +00:00
with fd:
path = path.lstrip('/\\:')
2010-10-13 21:28:40 +00:00
logging.info(path)
chunks = []
2010-04-18 20:34:21 +00:00
size = 0
for chunk in chunkify(fd, CHUNK_SIZE, 30):
2010-04-18 20:34:21 +00:00
size += len(chunk)
2010-10-18 17:43:54 +00:00
chunks.append(self.add_chunk(*self.cache.add_chunk(chunk)))
self.items.append({'type': 'FILE', 'path': path, 'chunks': chunks, 'size': size})
class Archiver(object):
2010-10-15 18:35:49 +00:00
def open_store(self, location):
store = BandStore(location.path)
cache = Cache(store)
return store, cache
2010-02-23 20:34:28 +00:00
2010-10-16 09:45:36 +00:00
def exit_code_from_logger(self):
if not self.level_filter.count.get('ERROR'):
return 0
else:
return 1
2010-10-15 18:35:49 +00:00
def do_create(self, args):
store, cache = self.open_store(args.archive)
2010-10-15 20:18:22 +00:00
archive = Archive(store, cache)
2010-10-15 18:35:49 +00:00
archive.create(args.archive.archive, args.paths, cache)
2010-10-16 09:45:36 +00:00
return self.exit_code_from_logger()
2010-02-20 21:28:46 +00:00
2010-10-15 18:35:49 +00:00
def do_extract(self, args):
store, cache = self.open_store(args.archive)
2010-10-15 20:18:22 +00:00
archive = Archive(store, cache, args.archive.archive)
2010-10-15 18:35:49 +00:00
archive.extract(args.dest)
2010-10-16 09:45:36 +00:00
return self.exit_code_from_logger()
2010-10-15 18:35:49 +00:00
def do_delete(self, args):
store, cache = self.open_store(args.archive)
2010-10-15 20:18:22 +00:00
archive = Archive(store, cache, args.archive.archive)
2010-10-15 18:35:49 +00:00
archive.delete(cache)
2010-10-16 09:45:36 +00:00
return self.exit_code_from_logger()
2010-10-15 18:35:49 +00:00
def do_list(self, args):
store, cache = self.open_store(args.src)
if args.src.archive:
2010-10-15 20:18:22 +00:00
archive = Archive(store, cache, args.src.archive)
2010-10-15 18:35:49 +00:00
archive.list()
else:
for archive in sorted(cache.archives):
print archive
2010-10-16 09:45:36 +00:00
return self.exit_code_from_logger()
2010-10-15 18:35:49 +00:00
def do_verify(self, args):
store, cache = self.open_store(args.archive)
2010-10-15 20:18:22 +00:00
archive = Archive(store, cache, args.archive.archive)
2010-10-15 18:35:49 +00:00
archive.verify()
2010-10-16 09:45:36 +00:00
return self.exit_code_from_logger()
2010-10-15 18:35:49 +00:00
def do_info(self, args):
store, cache = self.open_store(args.archive)
2010-10-15 20:18:22 +00:00
archive = Archive(store, cache, args.archive.archive)
2010-10-15 18:35:49 +00:00
stats = archive.stats(cache)
2010-10-15 20:29:36 +00:00
print 'Original size:', pretty_size(stats['osize'])
print 'Compressed size:', pretty_size(stats['csize'])
print 'Unique data:', pretty_size(stats['usize'])
2010-10-16 09:45:36 +00:00
return self.exit_code_from_logger()
2010-04-18 20:34:21 +00:00
2010-10-16 09:45:36 +00:00
def run(self, args=None):
2010-10-15 18:35:49 +00:00
parser = argparse.ArgumentParser(description='Dedupestore')
parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
default=False,
help='Verbose output')
subparsers = parser.add_subparsers(title='Available subcommands')
subparser = subparsers.add_parser('create')
subparser.set_defaults(func=self.do_create)
subparser.add_argument('archive', metavar='ARCHIVE',
type=location_validator(archive=True),
help='Archive to create')
subparser.add_argument('paths', metavar='PATH', nargs='+', type=str,
help='Paths to add to archive')
subparser = subparsers.add_parser('extract')
subparser.set_defaults(func=self.do_extract)
subparser.add_argument('archive', metavar='ARCHIVE',
type=location_validator(archive=True),
help='Archive to create')
subparser.add_argument('dest', metavar='DEST', type=str, nargs='?',
help='Where to extract files')
subparser = subparsers.add_parser('delete')
subparser.set_defaults(func=self.do_delete)
subparser.add_argument('archive', metavar='ARCHIVE',
type=location_validator(archive=True),
help='Archive to delete')
subparser = subparsers.add_parser('list')
subparser.set_defaults(func=self.do_list)
subparser.add_argument('src', metavar='SRC', type=location_validator(),
help='Store/Archive to list contents of')
subparser= subparsers.add_parser('verify')
subparser.set_defaults(func=self.do_verify)
subparser.add_argument('archive', metavar='ARCHIVE',
type=location_validator(archive=True),
help='Archive to verity integrity of')
subparser= subparsers.add_parser('info')
subparser.set_defaults(func=self.do_info)
subparser.add_argument('archive', metavar='ARCHIVE',
type=location_validator(archive=True),
help='Archive to display information about')
2010-10-16 09:45:36 +00:00
args = parser.parse_args(args)
2010-10-15 18:35:49 +00:00
if args.verbose:
2010-10-13 21:28:40 +00:00
logging.basicConfig(level=logging.INFO, format='%(message)s')
else:
logging.basicConfig(level=logging.WARNING, format='%(message)s')
2010-10-16 09:45:36 +00:00
self.level_filter = LevelFilter()
logging.getLogger('').addFilter(self.level_filter)
return args.func(args)
2010-03-06 17:25:35 +00:00
2010-02-20 17:23:46 +00:00
def main():
archiver = Archiver()
2010-10-16 09:45:36 +00:00
sys.exit(archiver.run())
2010-02-20 17:23:46 +00:00
if __name__ == '__main__':
2010-03-06 17:25:35 +00:00
main()