borg/darc/archiver.py

293 lines
12 KiB
Python
Raw Normal View History

2010-10-15 18:35:49 +00:00
import argparse
2010-10-30 11:44:25 +00:00
from datetime import datetime
2010-10-25 18:22:20 +00:00
import os
2010-10-30 11:44:25 +00:00
import stat
2010-10-16 09:45:36 +00:00
import sys
2010-02-23 21:12:22 +00:00
2010-10-20 17:59:15 +00:00
from .archive import Archive
2010-10-26 19:25:25 +00:00
from .store import Store
2010-10-20 17:59:15 +00:00
from .cache import Cache
from .keychain import Keychain
2010-11-02 21:47:39 +00:00
from .helpers import location_validator, format_file_size, format_time,\
format_file_mode, IncludePattern, ExcludePattern, exclude_path
2010-11-15 21:18:47 +00:00
from .remote import StoreServer, RemoteStore
class Archiver(object):
2010-10-30 11:44:25 +00:00
def __init__(self):
self.exit_code = 0
2010-11-15 21:18:47 +00:00
def open_store(self, location, create=False):
if location.proto == 'ssh':
2010-11-15 21:18:47 +00:00
return RemoteStore(location, create=create)
else:
return Store(location.path, create=create)
2010-02-23 20:34:28 +00:00
2010-10-30 11:44:25 +00:00
def print_error(self, msg, *args):
msg = args and msg % args or msg
if hasattr(sys.stderr, 'encoding'):
msg = msg.encode(sys.stderr.encoding, 'ignore')
2010-10-30 11:44:25 +00:00
self.exit_code = 1
print >> sys.stderr, msg
def print_verbose(self, msg, *args, **kw):
if self.verbose:
msg = args and msg % args or msg
if hasattr(sys.stdout, 'encoding'):
msg = msg.encode(sys.stdout.encoding, 'ignore')
2010-10-30 11:44:25 +00:00
if kw.get('newline', True):
print msg
else:
print msg,
2010-10-26 19:48:43 +00:00
def do_init(self, args):
2010-11-15 21:18:47 +00:00
self.open_store(args.store, create=True)
2010-10-30 11:44:25 +00:00
return self.exit_code
2010-10-26 19:48:43 +00:00
2010-11-15 21:18:47 +00:00
def do_serve(self, args):
2010-11-17 21:40:39 +00:00
return StoreServer().serve()
2010-11-15 21:18:47 +00:00
2010-10-15 18:35:49 +00:00
def do_create(self, args):
store = self.open_store(args.archive)
keychain = Keychain(args.keychain)
2010-10-30 11:44:25 +00:00
try:
Archive(store, keychain, args.archive.archive)
2010-10-30 11:44:25 +00:00
except Archive.DoesNotExist:
pass
else:
self.print_error('Archive already exists')
return self.exit_code
archive = Archive(store, keychain)
cache = Cache(store, keychain)
# Add darc cache dir to inode_skip list
skip_inodes = []
try:
st = os.stat(Cache.cache_dir_path())
skip_inodes.append((st.st_ino, st.st_dev))
except IOError:
pass
# Add local store dir to inode_skip list
if not args.archive.host:
try:
st = os.stat(args.archive.path)
skip_inodes.append((st.st_ino, st.st_dev))
except IOError:
pass
2010-10-30 11:44:25 +00:00
for path in args.paths:
self._process(archive, cache, args.patterns, unicode(path))
2010-11-23 13:46:53 +00:00
archive.save(args.archive.archive, cache)
2010-10-30 11:44:25 +00:00
return self.exit_code
2010-02-20 21:28:46 +00:00
def _process(self, archive, cache, patterns, path):
if exclude_path(path, patterns):
return
try:
st = os.lstat(path)
except OSError, e:
self.print_error('%s: %s', path, e)
return
self.print_verbose(path)
if stat.S_ISDIR(st.st_mode):
archive.process_dir(path, st)
try:
entries = os.listdir(path)
except OSError, e:
self.print_error('%s: %s', path, e)
else:
for filename in entries:
self._process(archive, cache, patterns,
os.path.join(path, filename))
elif stat.S_ISLNK(st.st_mode):
archive.process_symlink(path, st)
elif stat.S_ISFIFO(st.st_mode):
archive.process_fifo(path, st)
elif stat.S_ISREG(st.st_mode):
try:
archive.process_file(path, st, cache)
except IOError, e:
self.print_error('%s: %s', path, e)
else:
self.print_error('Unknown file type: %s', path)
2010-10-15 18:35:49 +00:00
def do_extract(self, args):
store = self.open_store(args.archive)
keychain = Keychain(args.keychain)
archive = Archive(store, keychain, args.archive.archive)
2010-10-31 19:12:32 +00:00
dirs = []
for item in archive.get_items():
2010-11-02 21:47:39 +00:00
if exclude_path(item['path'], args.patterns):
continue
2010-11-04 22:04:09 +00:00
self.print_verbose(item['path'].decode('utf-8'))
2010-10-30 11:44:25 +00:00
archive.extract_item(item, args.dest)
2010-10-31 19:12:32 +00:00
if stat.S_ISDIR(item['mode']):
dirs.append(item)
if dirs and not item['path'].startswith(dirs[-1]['path']):
# Extract directories twice to make sure mtime is correctly restored
archive.extract_item(dirs.pop(-1), args.dest)
while dirs:
archive.extract_item(dirs.pop(-1), args.dest)
2010-10-30 11:44:25 +00:00
return self.exit_code
2010-10-15 18:35:49 +00:00
def do_delete(self, args):
store = self.open_store(args.archive)
keychain = Keychain(args.keychain)
archive = Archive(store, keychain, args.archive.archive)
cache = Cache(store, keychain)
2010-10-15 18:35:49 +00:00
archive.delete(cache)
2010-10-30 11:44:25 +00:00
return self.exit_code
2010-10-15 18:35:49 +00:00
def do_list(self, args):
store = self.open_store(args.src)
keychain = Keychain(args.keychain)
2010-10-15 18:35:49 +00:00
if args.src.archive:
2010-10-30 11:44:25 +00:00
tmap = {1: 'p', 2: 'c', 4: 'd', 6: 'b', 010: '-', 012: 'l', 014: 's'}
archive = Archive(store, keychain, args.src.archive)
for item in archive.get_items():
2010-10-30 11:44:25 +00:00
type = tmap.get(item['mode'] / 4096, '?')
mode = format_file_mode(item['mode'])
size = item.get('size', 0)
mtime = format_time(datetime.fromtimestamp(item['mtime']))
print '%s%s %-6s %-6s %8d %s %s' % (type, mode, item['user'],
item['group'], size, mtime, item['path'])
2010-10-15 18:35:49 +00:00
else:
for archive in Archive.list_archives(store, keychain):
2010-10-27 19:01:57 +00:00
print '%(name)-20s %(time)s' % archive.metadata
2010-10-30 11:44:25 +00:00
return self.exit_code
2010-10-15 18:35:49 +00:00
def do_verify(self, args):
store = self.open_store(args.archive)
keychain = Keychain(args.keychain)
archive = Archive(store, keychain, args.archive.archive)
for item in archive.get_items():
2010-10-30 11:44:25 +00:00
if stat.S_ISREG(item['mode']) and not 'source' in item:
2010-11-15 21:18:47 +00:00
self.print_verbose('%s ...', item['path'].decode('utf-8'), newline=False)
2010-10-30 11:44:25 +00:00
if archive.verify_file(item):
self.print_verbose('OK')
else:
self.print_verbose('ERROR')
self.print_error('%s: verification failed' % item['path'])
return self.exit_code
2010-10-15 18:35:49 +00:00
def do_info(self, args):
store = self.open_store(args.archive)
keychain = Keychain(args.keychain)
archive = Archive(store, keychain, args.archive.archive)
cache = Cache(store, keychain)
2010-10-20 19:08:46 +00:00
osize, csize, usize = archive.stats(cache)
2010-10-25 17:57:54 +00:00
print 'Name:', archive.metadata['name']
print 'Hostname:', archive.metadata['hostname']
print 'Username:', archive.metadata['username']
print 'Time:', archive.metadata['time']
print 'Command line:', ' '.join(archive.metadata['cmdline'])
2010-10-27 17:30:21 +00:00
print 'Original size:', format_file_size(osize)
print 'Compressed size:', format_file_size(csize)
print 'Unique data:', format_file_size(usize)
2010-10-30 11:44:25 +00:00
return self.exit_code
2010-04-18 20:34:21 +00:00
2010-10-31 20:55:09 +00:00
def do_init_keychain(self, args):
return Keychain.generate(args.keychain)
2010-10-24 20:00:46 +00:00
2010-10-31 20:55:09 +00:00
def do_export_restricted(self, args):
keychain = Keychain(args.keychain)
2010-10-31 20:55:09 +00:00
keychain.restrict(args.output)
return self.exit_code
2010-10-24 20:13:34 +00:00
def do_keychain_chpass(self, args):
return Keychain(args.keychain).chpass()
2010-10-24 20:13:34 +00:00
2010-10-16 09:45:36 +00:00
def run(self, args=None):
2011-01-04 22:00:39 +00:00
dot_path = os.path.join(os.path.expanduser('~'), '.darc')
if not os.path.exists(dot_path):
os.mkdir(dot_path)
2010-10-25 18:22:20 +00:00
default_keychain = os.path.join(os.path.expanduser('~'),
2010-10-27 18:12:40 +00:00
'.darc', 'keychain')
parser = argparse.ArgumentParser(description='DARC - Deduplicating Archiver')
2010-10-25 18:22:20 +00:00
parser.add_argument('-k', '--keychain', dest='keychain', type=str,
default=default_keychain,
help='Keychain to use')
2010-10-15 18:35:49 +00:00
parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
default=False,
help='Verbose output')
2010-10-15 18:35:49 +00:00
subparsers = parser.add_subparsers(title='Available subcommands')
2010-10-31 20:55:09 +00:00
subparser = subparsers.add_parser('init-keychain')
subparser.set_defaults(func=self.do_init_keychain)
subparser = subparsers.add_parser('export-restricted')
2010-10-24 20:00:46 +00:00
subparser.add_argument('output', metavar='OUTPUT', type=str,
help='Keychain to create')
2010-10-31 20:55:09 +00:00
subparser.set_defaults(func=self.do_export_restricted)
subparser = subparsers.add_parser('change-password')
2010-10-24 20:13:34 +00:00
subparser.set_defaults(func=self.do_keychain_chpass)
2010-10-26 19:48:43 +00:00
subparser = subparsers.add_parser('init')
subparser.set_defaults(func=self.do_init)
subparser.add_argument('store', metavar='STORE',
type=location_validator(archive=False),
help='Store to initialize')
2010-11-15 21:18:47 +00:00
subparser = subparsers.add_parser('serve')
subparser.set_defaults(func=self.do_serve)
2010-10-15 18:35:49 +00:00
subparser = subparsers.add_parser('create')
subparser.set_defaults(func=self.do_create)
2010-11-02 21:47:39 +00:00
subparser.add_argument('-i', '--include', dest='patterns',
type=IncludePattern, action='append',
help='Include condition')
subparser.add_argument('-e', '--exclude', dest='patterns',
type=ExcludePattern, action='append',
help='Include condition')
2010-10-15 18:35:49 +00:00
subparser.add_argument('archive', metavar='ARCHIVE',
type=location_validator(archive=True),
help='Archive to create')
subparser.add_argument('paths', metavar='PATH', nargs='+', type=str,
help='Paths to add to archive')
subparser = subparsers.add_parser('extract')
subparser.set_defaults(func=self.do_extract)
2010-11-02 21:47:39 +00:00
subparser.add_argument('-i', '--include', dest='patterns',
type=IncludePattern, action='append',
help='Include condition')
subparser.add_argument('-e', '--exclude', dest='patterns',
type=ExcludePattern, action='append',
help='Include condition')
2010-10-15 18:35:49 +00:00
subparser.add_argument('archive', metavar='ARCHIVE',
type=location_validator(archive=True),
help='Archive to create')
subparser.add_argument('dest', metavar='DEST', type=str, nargs='?',
help='Where to extract files')
subparser = subparsers.add_parser('delete')
subparser.set_defaults(func=self.do_delete)
subparser.add_argument('archive', metavar='ARCHIVE',
type=location_validator(archive=True),
help='Archive to delete')
subparser = subparsers.add_parser('list')
subparser.set_defaults(func=self.do_list)
subparser.add_argument('src', metavar='SRC', type=location_validator(),
help='Store/Archive to list contents of')
subparser= subparsers.add_parser('verify')
subparser.set_defaults(func=self.do_verify)
subparser.add_argument('archive', metavar='ARCHIVE',
type=location_validator(archive=True),
help='Archive to verity integrity of')
subparser= subparsers.add_parser('info')
subparser.set_defaults(func=self.do_info)
subparser.add_argument('archive', metavar='ARCHIVE',
type=location_validator(archive=True),
help='Archive to display information about')
2010-10-16 09:45:36 +00:00
args = parser.parse_args(args)
2010-10-30 11:44:25 +00:00
self.verbose = args.verbose
2010-10-16 09:45:36 +00:00
return args.func(args)
2010-03-06 17:25:35 +00:00
2010-02-20 17:23:46 +00:00
def main():
archiver = Archiver()
2010-10-16 09:45:36 +00:00
sys.exit(archiver.run())
2010-02-20 17:23:46 +00:00
if __name__ == '__main__':
2010-03-06 17:25:35 +00:00
main()