2010-10-15 18:35:49 +00:00
|
|
|
import argparse
|
2010-10-30 11:44:25 +00:00
|
|
|
from datetime import datetime
|
2010-10-25 18:22:20 +00:00
|
|
|
import os
|
2010-10-30 11:44:25 +00:00
|
|
|
import stat
|
2010-10-16 09:45:36 +00:00
|
|
|
import sys
|
2010-02-23 21:12:22 +00:00
|
|
|
|
2010-10-20 17:59:15 +00:00
|
|
|
from .archive import Archive
|
2010-10-26 19:25:25 +00:00
|
|
|
from .store import Store
|
2010-10-20 17:59:15 +00:00
|
|
|
from .cache import Cache
|
2010-10-31 21:21:59 +00:00
|
|
|
from .keychain import Keychain
|
2010-10-31 19:12:32 +00:00
|
|
|
from .helpers import location_validator, format_file_size, format_time, format_file_mode, walk_dir
|
2010-03-15 20:23:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Archiver(object):
|
|
|
|
|
2010-10-30 11:44:25 +00:00
|
|
|
def __init__(self):
|
|
|
|
self.exit_code = 0
|
|
|
|
|
2010-10-15 18:35:49 +00:00
|
|
|
def open_store(self, location):
|
2010-10-26 19:25:25 +00:00
|
|
|
return Store(location.path)
|
2010-02-23 20:34:28 +00:00
|
|
|
|
2010-10-30 11:44:25 +00:00
|
|
|
def print_error(self, msg, *args):
|
|
|
|
msg = args and msg % args or msg
|
|
|
|
self.exit_code = 1
|
|
|
|
print >> sys.stderr, msg
|
|
|
|
|
|
|
|
def print_verbose(self, msg, *args, **kw):
|
|
|
|
if self.verbose:
|
|
|
|
msg = args and msg % args or msg
|
|
|
|
if kw.get('newline', True):
|
|
|
|
print msg
|
|
|
|
else:
|
|
|
|
print msg,
|
|
|
|
|
2010-10-26 19:48:43 +00:00
|
|
|
def do_init(self, args):
|
|
|
|
Store(args.store.path, create=True)
|
2010-10-30 11:44:25 +00:00
|
|
|
return self.exit_code
|
2010-10-26 19:48:43 +00:00
|
|
|
|
2010-10-15 18:35:49 +00:00
|
|
|
def do_create(self, args):
|
2010-10-21 19:21:43 +00:00
|
|
|
store = self.open_store(args.archive)
|
2010-10-31 21:21:59 +00:00
|
|
|
keychain = Keychain(args.keychain)
|
2010-10-30 11:44:25 +00:00
|
|
|
try:
|
2010-10-31 21:21:59 +00:00
|
|
|
Archive(store, keychain, args.archive.archive)
|
2010-10-30 11:44:25 +00:00
|
|
|
except Archive.DoesNotExist:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
self.print_error('Archive already exists')
|
|
|
|
return self.exit_code
|
2010-10-31 21:21:59 +00:00
|
|
|
archive = Archive(store, keychain)
|
|
|
|
cache = Cache(store, keychain)
|
2010-10-30 11:44:25 +00:00
|
|
|
for path in args.paths:
|
2010-10-31 19:12:32 +00:00
|
|
|
for path, st in walk_dir(unicode(path)):
|
2010-11-02 17:33:53 +00:00
|
|
|
self.print_verbose(path)
|
2010-10-30 11:44:25 +00:00
|
|
|
if stat.S_ISDIR(st.st_mode):
|
|
|
|
archive.process_dir(path, st)
|
|
|
|
elif stat.S_ISLNK(st.st_mode):
|
|
|
|
archive.process_symlink(path, st)
|
2010-10-31 19:12:32 +00:00
|
|
|
elif stat.S_ISFIFO(st.st_mode):
|
|
|
|
archive.process_fifo(path, st)
|
2010-10-30 11:44:25 +00:00
|
|
|
elif stat.S_ISREG(st.st_mode):
|
|
|
|
try:
|
|
|
|
archive.process_file(path, st, cache)
|
|
|
|
except IOError, e:
|
|
|
|
self.print_error('%s: %s', path, e)
|
|
|
|
else:
|
|
|
|
self.print_error('Unknown file type: %s', path)
|
|
|
|
archive.save(args.archive.archive)
|
|
|
|
cache.save()
|
|
|
|
return self.exit_code
|
2010-02-20 21:28:46 +00:00
|
|
|
|
2010-10-15 18:35:49 +00:00
|
|
|
def do_extract(self, args):
|
2010-10-21 19:21:43 +00:00
|
|
|
store = self.open_store(args.archive)
|
2010-10-31 21:21:59 +00:00
|
|
|
keychain = Keychain(args.keychain)
|
|
|
|
archive = Archive(store, keychain, args.archive.archive)
|
2010-10-30 11:44:25 +00:00
|
|
|
archive.get_items()
|
2010-10-31 19:12:32 +00:00
|
|
|
dirs = []
|
2010-10-30 11:44:25 +00:00
|
|
|
for item in archive.items:
|
|
|
|
self.print_verbose(item['path'])
|
|
|
|
archive.extract_item(item, args.dest)
|
2010-10-31 19:12:32 +00:00
|
|
|
if stat.S_ISDIR(item['mode']):
|
|
|
|
dirs.append(item)
|
|
|
|
if dirs and not item['path'].startswith(dirs[-1]['path']):
|
|
|
|
# Extract directories twice to make sure mtime is correctly restored
|
|
|
|
archive.extract_item(dirs.pop(-1), args.dest)
|
|
|
|
while dirs:
|
|
|
|
archive.extract_item(dirs.pop(-1), args.dest)
|
2010-10-30 11:44:25 +00:00
|
|
|
return self.exit_code
|
2010-02-24 22:24:19 +00:00
|
|
|
|
2010-10-15 18:35:49 +00:00
|
|
|
def do_delete(self, args):
|
2010-10-21 19:21:43 +00:00
|
|
|
store = self.open_store(args.archive)
|
2010-10-31 21:21:59 +00:00
|
|
|
keychain = Keychain(args.keychain)
|
|
|
|
archive = Archive(store, keychain, args.archive.archive)
|
|
|
|
cache = Cache(store, keychain)
|
2010-10-15 18:35:49 +00:00
|
|
|
archive.delete(cache)
|
2010-10-30 11:44:25 +00:00
|
|
|
return self.exit_code
|
2010-02-24 22:24:19 +00:00
|
|
|
|
2010-10-15 18:35:49 +00:00
|
|
|
def do_list(self, args):
|
2010-10-21 19:21:43 +00:00
|
|
|
store = self.open_store(args.src)
|
2010-10-31 21:21:59 +00:00
|
|
|
keychain = Keychain(args.keychain)
|
2010-10-15 18:35:49 +00:00
|
|
|
if args.src.archive:
|
2010-10-30 11:44:25 +00:00
|
|
|
tmap = {1: 'p', 2: 'c', 4: 'd', 6: 'b', 010: '-', 012: 'l', 014: 's'}
|
2010-10-31 21:21:59 +00:00
|
|
|
archive = Archive(store, keychain, args.src.archive)
|
2010-10-30 11:44:25 +00:00
|
|
|
archive.get_items()
|
|
|
|
for item in archive.items:
|
|
|
|
type = tmap.get(item['mode'] / 4096, '?')
|
|
|
|
mode = format_file_mode(item['mode'])
|
|
|
|
size = item.get('size', 0)
|
|
|
|
mtime = format_time(datetime.fromtimestamp(item['mtime']))
|
|
|
|
print '%s%s %-6s %-6s %8d %s %s' % (type, mode, item['user'],
|
|
|
|
item['group'], size, mtime, item['path'])
|
2010-10-15 18:35:49 +00:00
|
|
|
else:
|
2010-10-31 21:21:59 +00:00
|
|
|
for archive in Archive.list_archives(store, keychain):
|
2010-10-27 19:01:57 +00:00
|
|
|
print '%(name)-20s %(time)s' % archive.metadata
|
2010-10-30 11:44:25 +00:00
|
|
|
return self.exit_code
|
2010-02-24 22:24:19 +00:00
|
|
|
|
2010-10-15 18:35:49 +00:00
|
|
|
def do_verify(self, args):
|
2010-10-21 19:21:43 +00:00
|
|
|
store = self.open_store(args.archive)
|
2010-10-31 21:21:59 +00:00
|
|
|
keychain = Keychain(args.keychain)
|
|
|
|
archive = Archive(store, keychain, args.archive.archive)
|
2010-10-30 11:44:25 +00:00
|
|
|
archive.get_items()
|
|
|
|
for item in archive.items:
|
|
|
|
if stat.S_ISREG(item['mode']) and not 'source' in item:
|
|
|
|
self.print_verbose('%s ...', item['path'], newline=False)
|
|
|
|
if archive.verify_file(item):
|
|
|
|
self.print_verbose('OK')
|
|
|
|
else:
|
|
|
|
self.print_verbose('ERROR')
|
|
|
|
self.print_error('%s: verification failed' % item['path'])
|
|
|
|
return self.exit_code
|
2010-02-27 22:23:39 +00:00
|
|
|
|
2010-10-15 18:35:49 +00:00
|
|
|
def do_info(self, args):
|
2010-10-21 19:21:43 +00:00
|
|
|
store = self.open_store(args.archive)
|
2010-10-31 21:21:59 +00:00
|
|
|
keychain = Keychain(args.keychain)
|
|
|
|
archive = Archive(store, keychain, args.archive.archive)
|
|
|
|
cache = Cache(store, keychain)
|
2010-10-20 19:08:46 +00:00
|
|
|
osize, csize, usize = archive.stats(cache)
|
2010-10-25 17:57:54 +00:00
|
|
|
print 'Name:', archive.metadata['name']
|
|
|
|
print 'Hostname:', archive.metadata['hostname']
|
|
|
|
print 'Username:', archive.metadata['username']
|
|
|
|
print 'Time:', archive.metadata['time']
|
|
|
|
print 'Command line:', ' '.join(archive.metadata['cmdline'])
|
|
|
|
print 'Number of Files:', len(archive.items)
|
2010-10-27 17:30:21 +00:00
|
|
|
print 'Original size:', format_file_size(osize)
|
|
|
|
print 'Compressed size:', format_file_size(csize)
|
|
|
|
print 'Unique data:', format_file_size(usize)
|
2010-10-30 11:44:25 +00:00
|
|
|
return self.exit_code
|
2010-04-18 20:34:21 +00:00
|
|
|
|
2010-10-31 20:55:09 +00:00
|
|
|
def do_init_keychain(self, args):
|
2010-10-31 21:21:59 +00:00
|
|
|
return Keychain.generate(args.keychain)
|
2010-10-24 20:00:46 +00:00
|
|
|
|
2010-10-31 20:55:09 +00:00
|
|
|
def do_export_restricted(self, args):
|
2010-10-31 21:21:59 +00:00
|
|
|
keychain = Keychain(args.keychain)
|
2010-10-31 20:55:09 +00:00
|
|
|
keychain.restrict(args.output)
|
|
|
|
return self.exit_code
|
2010-10-23 21:01:12 +00:00
|
|
|
|
2010-10-24 20:13:34 +00:00
|
|
|
def do_keychain_chpass(self, args):
|
2010-10-31 21:21:59 +00:00
|
|
|
return Keychain(args.keychain).chpass()
|
2010-10-24 20:13:34 +00:00
|
|
|
|
2010-10-16 09:45:36 +00:00
|
|
|
def run(self, args=None):
|
2010-10-25 18:22:20 +00:00
|
|
|
default_keychain = os.path.join(os.path.expanduser('~'),
|
2010-10-27 18:12:40 +00:00
|
|
|
'.darc', 'keychain')
|
|
|
|
parser = argparse.ArgumentParser(description='DARC - Deduplicating Archiver')
|
2010-10-25 18:22:20 +00:00
|
|
|
parser.add_argument('-k', '--keychain', dest='keychain', type=str,
|
|
|
|
default=default_keychain,
|
|
|
|
help='Keychain to use')
|
2010-10-15 18:35:49 +00:00
|
|
|
parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
|
|
|
|
default=False,
|
|
|
|
help='Verbose output')
|
|
|
|
|
2010-10-23 21:01:12 +00:00
|
|
|
|
2010-10-15 18:35:49 +00:00
|
|
|
subparsers = parser.add_subparsers(title='Available subcommands')
|
2010-10-31 20:55:09 +00:00
|
|
|
subparser = subparsers.add_parser('init-keychain')
|
|
|
|
subparser.set_defaults(func=self.do_init_keychain)
|
|
|
|
subparser = subparsers.add_parser('export-restricted')
|
2010-10-24 20:00:46 +00:00
|
|
|
subparser.add_argument('output', metavar='OUTPUT', type=str,
|
|
|
|
help='Keychain to create')
|
2010-10-31 20:55:09 +00:00
|
|
|
subparser.set_defaults(func=self.do_export_restricted)
|
|
|
|
subparser = subparsers.add_parser('change-password')
|
2010-10-24 20:13:34 +00:00
|
|
|
subparser.set_defaults(func=self.do_keychain_chpass)
|
2010-10-23 21:01:12 +00:00
|
|
|
|
2010-10-26 19:48:43 +00:00
|
|
|
subparser = subparsers.add_parser('init')
|
|
|
|
subparser.set_defaults(func=self.do_init)
|
|
|
|
subparser.add_argument('store', metavar='STORE',
|
|
|
|
type=location_validator(archive=False),
|
|
|
|
help='Store to initialize')
|
|
|
|
|
2010-10-15 18:35:49 +00:00
|
|
|
subparser = subparsers.add_parser('create')
|
|
|
|
subparser.set_defaults(func=self.do_create)
|
|
|
|
subparser.add_argument('archive', metavar='ARCHIVE',
|
|
|
|
type=location_validator(archive=True),
|
|
|
|
help='Archive to create')
|
|
|
|
subparser.add_argument('paths', metavar='PATH', nargs='+', type=str,
|
|
|
|
help='Paths to add to archive')
|
|
|
|
|
|
|
|
subparser = subparsers.add_parser('extract')
|
|
|
|
subparser.set_defaults(func=self.do_extract)
|
|
|
|
subparser.add_argument('archive', metavar='ARCHIVE',
|
|
|
|
type=location_validator(archive=True),
|
|
|
|
help='Archive to create')
|
|
|
|
subparser.add_argument('dest', metavar='DEST', type=str, nargs='?',
|
|
|
|
help='Where to extract files')
|
|
|
|
|
|
|
|
subparser = subparsers.add_parser('delete')
|
|
|
|
subparser.set_defaults(func=self.do_delete)
|
|
|
|
subparser.add_argument('archive', metavar='ARCHIVE',
|
|
|
|
type=location_validator(archive=True),
|
|
|
|
help='Archive to delete')
|
|
|
|
|
|
|
|
subparser = subparsers.add_parser('list')
|
|
|
|
subparser.set_defaults(func=self.do_list)
|
|
|
|
subparser.add_argument('src', metavar='SRC', type=location_validator(),
|
|
|
|
help='Store/Archive to list contents of')
|
|
|
|
|
|
|
|
subparser= subparsers.add_parser('verify')
|
|
|
|
subparser.set_defaults(func=self.do_verify)
|
|
|
|
subparser.add_argument('archive', metavar='ARCHIVE',
|
|
|
|
type=location_validator(archive=True),
|
|
|
|
help='Archive to verity integrity of')
|
|
|
|
|
|
|
|
subparser= subparsers.add_parser('info')
|
|
|
|
subparser.set_defaults(func=self.do_info)
|
|
|
|
subparser.add_argument('archive', metavar='ARCHIVE',
|
|
|
|
type=location_validator(archive=True),
|
|
|
|
help='Archive to display information about')
|
|
|
|
|
2010-10-16 09:45:36 +00:00
|
|
|
args = parser.parse_args(args)
|
2010-10-30 11:44:25 +00:00
|
|
|
self.verbose = args.verbose
|
2010-10-16 09:45:36 +00:00
|
|
|
return args.func(args)
|
2010-03-06 17:25:35 +00:00
|
|
|
|
2010-02-20 17:23:46 +00:00
|
|
|
def main():
|
|
|
|
archiver = Archiver()
|
2010-10-16 09:45:36 +00:00
|
|
|
sys.exit(archiver.run())
|
2010-02-20 17:23:46 +00:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2010-03-06 17:25:35 +00:00
|
|
|
main()
|