2010-10-15 18:35:49 +00:00
|
|
|
import argparse
|
2013-06-03 11:45:48 +00:00
|
|
|
from binascii import hexlify
|
2011-08-12 06:49:01 +00:00
|
|
|
from datetime import datetime
|
2011-06-16 19:55:54 +00:00
|
|
|
from operator import attrgetter
|
2014-03-21 21:12:15 +00:00
|
|
|
import functools
|
2014-02-25 11:33:23 +00:00
|
|
|
import io
|
2010-10-25 18:22:20 +00:00
|
|
|
import os
|
2010-10-30 11:44:25 +00:00
|
|
|
import stat
|
2010-10-16 09:45:36 +00:00
|
|
|
import sys
|
2014-02-26 22:13:48 +00:00
|
|
|
import textwrap
|
2010-02-23 21:12:22 +00:00
|
|
|
|
2013-08-05 20:45:24 +00:00
|
|
|
from attic import __version__
|
2014-02-16 21:21:18 +00:00
|
|
|
from attic.archive import Archive, ArchiveChecker
|
2013-08-04 11:43:35 +00:00
|
|
|
from attic.repository import Repository
|
|
|
|
from attic.cache import Cache
|
|
|
|
from attic.key import key_creator
|
2013-12-15 19:35:29 +00:00
|
|
|
from attic.helpers import Error, location_validator, format_time, \
|
2013-10-18 19:00:20 +00:00
|
|
|
format_file_mode, ExcludePattern, exclude_path, adjust_patterns, to_localtime, \
|
2014-02-08 20:37:27 +00:00
|
|
|
get_cache_dir, get_keys_dir, format_timedelta, prune_within, prune_split, \
|
2014-04-30 21:27:04 +00:00
|
|
|
Manifest, remove_surrogates, update_excludes, format_archive, check_extension_modules, Statistics, \
|
2014-05-18 16:28:26 +00:00
|
|
|
is_cachedir, bigint_to_int
|
2014-01-23 20:46:53 +00:00
|
|
|
from attic.remote import RepositoryServer, RemoteRepository
|
2010-03-15 20:23:34 +00:00
|
|
|
|
2011-10-29 15:01:07 +00:00
|
|
|
|
2013-06-26 19:20:31 +00:00
|
|
|
class Archiver:
|
2010-03-15 20:23:34 +00:00
|
|
|
|
2010-10-30 11:44:25 +00:00
|
|
|
def __init__(self):
|
|
|
|
self.exit_code = 0
|
|
|
|
|
2014-05-31 13:39:51 +00:00
|
|
|
def open_repository(self, location, create=False, exclusive=False):
|
2010-11-17 20:28:13 +00:00
|
|
|
if location.proto == 'ssh':
|
2013-06-20 10:44:58 +00:00
|
|
|
repository = RemoteRepository(location, create=create)
|
2010-11-15 21:18:47 +00:00
|
|
|
else:
|
2014-05-31 13:39:51 +00:00
|
|
|
repository = Repository(location.path, create=create, exclusive=exclusive)
|
2013-06-20 10:44:58 +00:00
|
|
|
repository._location = location
|
|
|
|
return repository
|
2010-02-23 20:34:28 +00:00
|
|
|
|
2010-10-30 11:44:25 +00:00
|
|
|
def print_error(self, msg, *args):
|
|
|
|
msg = args and msg % args or msg
|
|
|
|
self.exit_code = 1
|
2013-07-08 21:38:27 +00:00
|
|
|
print('attic: ' + msg, file=sys.stderr)
|
2010-10-30 11:44:25 +00:00
|
|
|
|
|
|
|
def print_verbose(self, msg, *args, **kw):
|
|
|
|
if self.verbose:
|
|
|
|
msg = args and msg % args or msg
|
|
|
|
if kw.get('newline', True):
|
2013-06-03 11:45:48 +00:00
|
|
|
print(msg)
|
2010-10-30 11:44:25 +00:00
|
|
|
else:
|
2013-06-03 11:45:48 +00:00
|
|
|
print(msg, end=' ')
|
2010-10-30 11:44:25 +00:00
|
|
|
|
2014-03-24 20:28:59 +00:00
|
|
|
def do_serve(self, args):
|
|
|
|
"""Start Attic in server mode. This command is usually not used manually.
|
|
|
|
"""
|
|
|
|
return RepositoryServer(restrict_to_paths=args.restrict_to_paths).serve()
|
2010-11-15 21:18:47 +00:00
|
|
|
|
2011-07-30 19:13:48 +00:00
|
|
|
def do_init(self, args):
|
2014-04-06 13:16:25 +00:00
|
|
|
"""Initialize an empty repository"""
|
2013-06-20 10:44:58 +00:00
|
|
|
print('Initializing repository at "%s"' % args.repository.orig)
|
2014-05-31 13:39:51 +00:00
|
|
|
repository = self.open_repository(args.repository, create=True, exclusive=True)
|
2013-06-20 10:44:58 +00:00
|
|
|
key = key_creator(repository, args)
|
2014-02-16 21:21:18 +00:00
|
|
|
manifest = Manifest(key, repository)
|
2012-12-04 22:02:10 +00:00
|
|
|
manifest.key = key
|
2011-09-04 21:02:47 +00:00
|
|
|
manifest.write()
|
2013-06-20 10:44:58 +00:00
|
|
|
repository.commit()
|
2011-08-06 11:01:58 +00:00
|
|
|
return self.exit_code
|
2011-07-30 19:13:48 +00:00
|
|
|
|
2014-02-04 22:49:10 +00:00
|
|
|
def do_check(self, args):
|
2014-04-06 13:16:25 +00:00
|
|
|
"""Check repository consistency"""
|
2014-05-31 13:39:51 +00:00
|
|
|
repository = self.open_repository(args.repository, exclusive=args.repair)
|
2014-02-09 14:52:36 +00:00
|
|
|
if args.repair:
|
2014-02-17 17:25:25 +00:00
|
|
|
while not os.environ.get('ATTIC_CHECK_I_KNOW_WHAT_I_AM_DOING'):
|
2014-02-16 21:21:18 +00:00
|
|
|
self.print_error("""Warning: 'check --repair' is an experimental feature that might result
|
|
|
|
in data loss.
|
2014-02-09 14:52:36 +00:00
|
|
|
|
|
|
|
Type "Yes I am sure" if you understand this and want to continue.\n""")
|
|
|
|
if input('Do you want to continue? ') == 'Yes I am sure':
|
|
|
|
break
|
2014-03-04 20:56:37 +00:00
|
|
|
if not args.archives_only:
|
2014-03-04 20:21:58 +00:00
|
|
|
print('Starting repository check...')
|
|
|
|
if repository.check(repair=args.repair):
|
|
|
|
print('Repository check complete, no problems found.')
|
|
|
|
else:
|
2014-02-26 22:13:48 +00:00
|
|
|
return 1
|
2014-03-04 20:56:37 +00:00
|
|
|
if not args.repo_only and not ArchiveChecker().check(repository, repair=args.repair):
|
2014-02-26 22:13:48 +00:00
|
|
|
return 1
|
2014-02-16 21:21:18 +00:00
|
|
|
return 0
|
2014-02-04 22:49:10 +00:00
|
|
|
|
2012-12-04 22:02:10 +00:00
|
|
|
def do_change_passphrase(self, args):
|
2014-04-06 13:16:25 +00:00
|
|
|
"""Change repository key file passphrase"""
|
2013-07-31 18:51:01 +00:00
|
|
|
repository = self.open_repository(args.repository)
|
2013-06-20 10:44:58 +00:00
|
|
|
manifest, key = Manifest.load(repository)
|
2012-12-04 22:02:10 +00:00
|
|
|
key.change_passphrase()
|
2014-02-16 21:21:18 +00:00
|
|
|
return 0
|
2011-10-27 20:17:47 +00:00
|
|
|
|
2010-10-15 18:35:49 +00:00
|
|
|
def do_create(self, args):
|
2014-04-06 13:16:25 +00:00
|
|
|
"""Create new archive"""
|
2011-08-07 15:10:21 +00:00
|
|
|
t0 = datetime.now()
|
2014-05-31 13:39:51 +00:00
|
|
|
repository = self.open_repository(args.archive, exclusive=True)
|
2013-06-20 10:44:58 +00:00
|
|
|
manifest, key = Manifest.load(repository)
|
|
|
|
cache = Cache(repository, key, manifest)
|
|
|
|
archive = Archive(repository, key, manifest, args.archive.archive, cache=cache,
|
2012-02-29 22:59:17 +00:00
|
|
|
create=True, checkpoint_interval=args.checkpoint_interval,
|
|
|
|
numeric_owner=args.numeric_owner)
|
2013-07-08 21:38:27 +00:00
|
|
|
# Add Attic cache dir to inode_skip list
|
2011-01-04 22:16:55 +00:00
|
|
|
skip_inodes = set()
|
2010-11-09 19:49:21 +00:00
|
|
|
try:
|
2011-08-06 11:01:58 +00:00
|
|
|
st = os.stat(get_cache_dir())
|
2011-01-04 22:16:55 +00:00
|
|
|
skip_inodes.add((st.st_ino, st.st_dev))
|
2010-11-09 19:49:21 +00:00
|
|
|
except IOError:
|
|
|
|
pass
|
2013-06-20 10:44:58 +00:00
|
|
|
# Add local repository dir to inode_skip list
|
2010-11-09 19:49:21 +00:00
|
|
|
if not args.archive.host:
|
|
|
|
try:
|
|
|
|
st = os.stat(args.archive.path)
|
2011-01-04 22:16:55 +00:00
|
|
|
skip_inodes.add((st.st_ino, st.st_dev))
|
2010-11-09 19:49:21 +00:00
|
|
|
except IOError:
|
|
|
|
pass
|
2010-10-30 11:44:25 +00:00
|
|
|
for path in args.paths:
|
2013-08-03 11:34:14 +00:00
|
|
|
path = os.path.normpath(path)
|
2012-02-04 16:32:46 +00:00
|
|
|
if args.dontcross:
|
|
|
|
try:
|
|
|
|
restrict_dev = os.lstat(path).st_dev
|
2013-06-03 11:45:48 +00:00
|
|
|
except OSError as e:
|
2012-02-04 16:32:46 +00:00
|
|
|
self.print_error('%s: %s', path, e)
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
restrict_dev = None
|
2014-04-30 21:27:04 +00:00
|
|
|
self._process(archive, cache, args.excludes, args.exclude_caches, skip_inodes, path, restrict_dev)
|
2011-09-10 15:19:02 +00:00
|
|
|
archive.save()
|
2011-08-07 15:10:21 +00:00
|
|
|
if args.stats:
|
|
|
|
t = datetime.now()
|
|
|
|
diff = t - t0
|
2014-03-19 20:52:49 +00:00
|
|
|
print('-' * 78)
|
2013-06-03 11:45:48 +00:00
|
|
|
print('Archive name: %s' % args.archive.archive)
|
|
|
|
print('Archive fingerprint: %s' % hexlify(archive.id).decode('ascii'))
|
|
|
|
print('Start time: %s' % t0.strftime('%c'))
|
|
|
|
print('End time: %s' % t.strftime('%c'))
|
|
|
|
print('Duration: %s' % format_timedelta(diff))
|
2014-03-19 21:32:07 +00:00
|
|
|
print('Number of files: %d' % archive.stats.nfiles)
|
|
|
|
archive.stats.print_('This archive:', cache)
|
2014-03-19 20:52:49 +00:00
|
|
|
print('-' * 78)
|
2010-10-30 11:44:25 +00:00
|
|
|
return self.exit_code
|
2010-02-20 21:28:46 +00:00
|
|
|
|
2014-04-30 21:27:04 +00:00
|
|
|
def _process(self, archive, cache, excludes, exclude_caches, skip_inodes, path, restrict_dev):
|
2013-06-30 20:32:27 +00:00
|
|
|
if exclude_path(path, excludes):
|
2010-11-23 11:50:09 +00:00
|
|
|
return
|
|
|
|
try:
|
|
|
|
st = os.lstat(path)
|
2013-06-03 11:45:48 +00:00
|
|
|
except OSError as e:
|
2010-11-23 11:50:09 +00:00
|
|
|
self.print_error('%s: %s', path, e)
|
|
|
|
return
|
2011-01-04 22:16:55 +00:00
|
|
|
if (st.st_ino, st.st_dev) in skip_inodes:
|
|
|
|
return
|
2012-02-04 16:32:46 +00:00
|
|
|
# Entering a new filesystem?
|
|
|
|
if restrict_dev and st.st_dev != restrict_dev:
|
|
|
|
return
|
2011-08-07 17:44:13 +00:00
|
|
|
# Ignore unix sockets
|
|
|
|
if stat.S_ISSOCK(st.st_mode):
|
|
|
|
return
|
2013-06-03 11:45:48 +00:00
|
|
|
self.print_verbose(remove_surrogates(path))
|
2012-03-03 13:02:22 +00:00
|
|
|
if stat.S_ISREG(st.st_mode):
|
|
|
|
try:
|
|
|
|
archive.process_file(path, st, cache)
|
2013-06-03 11:45:48 +00:00
|
|
|
except IOError as e:
|
2012-03-03 13:02:22 +00:00
|
|
|
self.print_error('%s: %s', path, e)
|
|
|
|
elif stat.S_ISDIR(st.st_mode):
|
2014-04-30 21:27:04 +00:00
|
|
|
if exclude_caches and is_cachedir(path):
|
|
|
|
return
|
2012-03-01 21:35:43 +00:00
|
|
|
archive.process_item(path, st)
|
2010-11-23 11:50:09 +00:00
|
|
|
try:
|
|
|
|
entries = os.listdir(path)
|
2013-06-03 11:45:48 +00:00
|
|
|
except OSError as e:
|
2010-11-23 11:50:09 +00:00
|
|
|
self.print_error('%s: %s', path, e)
|
|
|
|
else:
|
2011-07-01 20:01:24 +00:00
|
|
|
for filename in sorted(entries):
|
2014-04-30 21:27:04 +00:00
|
|
|
self._process(archive, cache, excludes, exclude_caches, skip_inodes,
|
2012-02-04 16:32:46 +00:00
|
|
|
os.path.join(path, filename), restrict_dev)
|
2010-11-23 11:50:09 +00:00
|
|
|
elif stat.S_ISLNK(st.st_mode):
|
|
|
|
archive.process_symlink(path, st)
|
2012-03-03 13:02:22 +00:00
|
|
|
elif stat.S_ISFIFO(st.st_mode):
|
|
|
|
archive.process_item(path, st)
|
|
|
|
elif stat.S_ISCHR(st.st_mode) or stat.S_ISBLK(st.st_mode):
|
|
|
|
archive.process_dev(path, st)
|
2010-11-23 11:50:09 +00:00
|
|
|
else:
|
|
|
|
self.print_error('Unknown file type: %s', path)
|
|
|
|
|
2010-10-15 18:35:49 +00:00
|
|
|
def do_extract(self, args):
|
2014-04-06 13:16:25 +00:00
|
|
|
"""Extract archive contents"""
|
2014-10-31 22:28:45 +00:00
|
|
|
# be restrictive when restoring files, restore permissions later
|
|
|
|
os.umask(0o077)
|
2013-06-20 10:44:58 +00:00
|
|
|
repository = self.open_repository(args.archive)
|
|
|
|
manifest, key = Manifest.load(repository)
|
|
|
|
archive = Archive(repository, key, manifest, args.archive.archive,
|
2012-10-17 09:40:23 +00:00
|
|
|
numeric_owner=args.numeric_owner)
|
2013-06-30 20:32:27 +00:00
|
|
|
patterns = adjust_patterns(args.paths, args.excludes)
|
2014-08-02 20:15:21 +00:00
|
|
|
dry_run = args.dry_run
|
|
|
|
strip_components = args.strip_components
|
2012-10-17 09:40:23 +00:00
|
|
|
dirs = []
|
2014-01-23 21:13:08 +00:00
|
|
|
for item in archive.iter_items(lambda item: not exclude_path(item[b'path'], patterns), preload=True):
|
2014-08-02 20:15:21 +00:00
|
|
|
orig_path = item[b'path']
|
|
|
|
if strip_components:
|
|
|
|
item[b'path'] = os.sep.join(orig_path.split(os.sep)[strip_components:])
|
|
|
|
if not item[b'path']:
|
|
|
|
continue
|
2014-02-18 20:33:06 +00:00
|
|
|
if not args.dry_run:
|
|
|
|
while dirs and not item[b'path'].startswith(dirs[-1][b'path']):
|
|
|
|
archive.extract_item(dirs.pop(-1))
|
2014-08-02 20:15:21 +00:00
|
|
|
self.print_verbose(remove_surrogates(orig_path))
|
2012-12-06 22:04:01 +00:00
|
|
|
try:
|
2014-08-02 20:15:21 +00:00
|
|
|
if dry_run:
|
2014-02-18 20:33:06 +00:00
|
|
|
archive.extract_item(item, dry_run=True)
|
2012-12-06 22:04:01 +00:00
|
|
|
else:
|
2014-02-18 20:33:06 +00:00
|
|
|
if stat.S_ISDIR(item[b'mode']):
|
|
|
|
dirs.append(item)
|
|
|
|
archive.extract_item(item, restore_attrs=False)
|
|
|
|
else:
|
|
|
|
archive.extract_item(item)
|
2013-06-03 11:45:48 +00:00
|
|
|
except IOError as e:
|
2014-08-02 20:15:21 +00:00
|
|
|
self.print_error('%s: %s', remove_surrogates(orig_path), e)
|
2012-12-06 22:04:01 +00:00
|
|
|
|
2014-02-18 20:33:06 +00:00
|
|
|
if not args.dry_run:
|
|
|
|
while dirs:
|
|
|
|
archive.extract_item(dirs.pop(-1))
|
2010-10-30 11:44:25 +00:00
|
|
|
return self.exit_code
|
2010-02-24 22:24:19 +00:00
|
|
|
|
2010-10-15 18:35:49 +00:00
|
|
|
def do_delete(self, args):
|
2014-04-06 13:16:25 +00:00
|
|
|
"""Delete an existing archive"""
|
2014-05-31 13:39:51 +00:00
|
|
|
repository = self.open_repository(args.archive, exclusive=True)
|
2013-06-20 10:44:58 +00:00
|
|
|
manifest, key = Manifest.load(repository)
|
|
|
|
cache = Cache(repository, key, manifest)
|
|
|
|
archive = Archive(repository, key, manifest, args.archive.archive, cache=cache)
|
2014-03-19 21:32:07 +00:00
|
|
|
stats = Statistics()
|
|
|
|
archive.delete(stats)
|
2014-03-19 20:58:25 +00:00
|
|
|
manifest.write()
|
|
|
|
repository.commit()
|
|
|
|
cache.commit()
|
2014-03-19 21:32:07 +00:00
|
|
|
if args.stats:
|
|
|
|
stats.print_('Deleted data:', cache)
|
2010-10-30 11:44:25 +00:00
|
|
|
return self.exit_code
|
2010-02-24 22:24:19 +00:00
|
|
|
|
2013-07-21 22:41:06 +00:00
|
|
|
def do_mount(self, args):
|
2014-04-06 13:16:25 +00:00
|
|
|
"""Mount archive or an entire repository as a FUSE fileystem"""
|
2013-07-24 11:23:51 +00:00
|
|
|
try:
|
|
|
|
from attic.fuse import AtticOperations
|
|
|
|
except ImportError:
|
|
|
|
self.print_error('the "llfuse" module is required to use this feature')
|
|
|
|
return self.exit_code
|
|
|
|
|
|
|
|
if not os.path.isdir(args.mountpoint) or not os.access(args.mountpoint, os.R_OK | os.W_OK | os.X_OK):
|
|
|
|
self.print_error('%s: Mountpoint must be a writable directory' % args.mountpoint)
|
2013-07-24 11:05:47 +00:00
|
|
|
return self.exit_code
|
|
|
|
|
2014-03-26 20:47:01 +00:00
|
|
|
repository = self.open_repository(args.src)
|
2013-07-21 22:41:06 +00:00
|
|
|
manifest, key = Manifest.load(repository)
|
2014-03-26 20:47:01 +00:00
|
|
|
if args.src.archive:
|
|
|
|
archive = Archive(repository, key, manifest, args.src.archive)
|
|
|
|
else:
|
|
|
|
archive = None
|
|
|
|
operations = AtticOperations(key, repository, manifest, archive)
|
2013-07-24 11:23:51 +00:00
|
|
|
self.print_verbose("Mounting filesystem")
|
2013-07-27 12:44:12 +00:00
|
|
|
try:
|
|
|
|
operations.mount(args.mountpoint, args.options, args.foreground)
|
|
|
|
except RuntimeError:
|
|
|
|
# Relevant error message already printed to stderr by fuse
|
|
|
|
self.exit_code = 1
|
2013-07-24 11:23:51 +00:00
|
|
|
return self.exit_code
|
2013-07-21 22:41:06 +00:00
|
|
|
|
2010-10-15 18:35:49 +00:00
|
|
|
def do_list(self, args):
|
2014-04-06 20:47:22 +00:00
|
|
|
"""List archive or repository contents"""
|
2013-06-20 10:44:58 +00:00
|
|
|
repository = self.open_repository(args.src)
|
|
|
|
manifest, key = Manifest.load(repository)
|
2010-10-15 18:35:49 +00:00
|
|
|
if args.src.archive:
|
2013-06-03 11:45:48 +00:00
|
|
|
tmap = {1: 'p', 2: 'c', 4: 'd', 6: 'b', 0o10: '-', 0o12: 'l', 0o14: 's'}
|
2013-06-20 10:44:58 +00:00
|
|
|
archive = Archive(repository, key, manifest, args.src.archive)
|
2014-01-23 21:13:08 +00:00
|
|
|
for item in archive.iter_items():
|
2013-06-03 11:45:48 +00:00
|
|
|
type = tmap.get(item[b'mode'] // 4096, '?')
|
|
|
|
mode = format_file_mode(item[b'mode'])
|
2012-10-17 09:40:23 +00:00
|
|
|
size = 0
|
|
|
|
if type == '-':
|
|
|
|
try:
|
2013-06-03 11:45:48 +00:00
|
|
|
size = sum(size for _, size, _ in item[b'chunks'])
|
2012-10-17 09:40:23 +00:00
|
|
|
except KeyError:
|
|
|
|
pass
|
2014-05-18 16:28:26 +00:00
|
|
|
mtime = format_time(datetime.fromtimestamp(bigint_to_int(item[b'mtime']) / 1e9))
|
2013-06-03 11:45:48 +00:00
|
|
|
if b'source' in item:
|
2012-10-17 09:40:23 +00:00
|
|
|
if type == 'l':
|
2013-06-03 11:45:48 +00:00
|
|
|
extra = ' -> %s' % item[b'source']
|
2012-10-17 09:40:23 +00:00
|
|
|
else:
|
|
|
|
type = 'h'
|
2013-06-03 11:45:48 +00:00
|
|
|
extra = ' link to %s' % item[b'source']
|
2012-10-17 09:40:23 +00:00
|
|
|
else:
|
|
|
|
extra = ''
|
2013-06-03 11:45:48 +00:00
|
|
|
print('%s%s %-6s %-6s %8d %s %s%s' % (type, mode, item[b'user'] or item[b'uid'],
|
|
|
|
item[b'group'] or item[b'gid'], size, mtime,
|
|
|
|
remove_surrogates(item[b'path']), extra))
|
2010-10-15 18:35:49 +00:00
|
|
|
else:
|
2013-06-20 10:44:58 +00:00
|
|
|
for archive in sorted(Archive.list_archives(repository, key, manifest), key=attrgetter('ts')):
|
2014-02-25 00:46:56 +00:00
|
|
|
print(format_archive(archive))
|
2010-10-30 11:44:25 +00:00
|
|
|
return self.exit_code
|
2010-02-24 22:24:19 +00:00
|
|
|
|
2010-10-15 18:35:49 +00:00
|
|
|
def do_info(self, args):
|
2014-04-06 13:16:25 +00:00
|
|
|
"""Show archive details such as disk space used"""
|
2013-06-20 10:44:58 +00:00
|
|
|
repository = self.open_repository(args.archive)
|
|
|
|
manifest, key = Manifest.load(repository)
|
|
|
|
cache = Cache(repository, key, manifest)
|
|
|
|
archive = Archive(repository, key, manifest, args.archive.archive, cache=cache)
|
2011-08-07 15:10:21 +00:00
|
|
|
stats = archive.calc_stats(cache)
|
2013-06-03 11:45:48 +00:00
|
|
|
print('Name:', archive.name)
|
|
|
|
print('Fingerprint: %s' % hexlify(archive.id).decode('ascii'))
|
|
|
|
print('Hostname:', archive.metadata[b'hostname'])
|
|
|
|
print('Username:', archive.metadata[b'username'])
|
|
|
|
print('Time: %s' % to_localtime(archive.ts).strftime('%c'))
|
|
|
|
print('Command line:', remove_surrogates(' '.join(archive.metadata[b'cmdline'])))
|
2014-10-27 21:00:56 +00:00
|
|
|
print('Number of files: %d' % stats.nfiles)
|
2014-03-19 21:32:07 +00:00
|
|
|
stats.print_('This archive:', cache)
|
2010-10-30 11:44:25 +00:00
|
|
|
return self.exit_code
|
2010-04-18 20:34:21 +00:00
|
|
|
|
2011-11-22 20:47:17 +00:00
|
|
|
def do_prune(self, args):
|
2014-04-06 13:16:25 +00:00
|
|
|
"""Prune repository archives according to specified rules"""
|
2014-05-31 13:39:51 +00:00
|
|
|
repository = self.open_repository(args.repository, exclusive=True)
|
2013-06-20 10:44:58 +00:00
|
|
|
manifest, key = Manifest.load(repository)
|
|
|
|
cache = Cache(repository, key, manifest)
|
|
|
|
archives = list(sorted(Archive.list_archives(repository, key, manifest, cache),
|
2011-08-06 21:33:06 +00:00
|
|
|
key=attrgetter('ts'), reverse=True))
|
2014-02-08 20:37:27 +00:00
|
|
|
if args.hourly + args.daily + args.weekly + args.monthly + args.yearly == 0 and args.within is None:
|
|
|
|
self.print_error('At least one of the "within", "hourly", "daily", "weekly", "monthly" or "yearly" '
|
2011-08-06 21:33:06 +00:00
|
|
|
'settings must be specified')
|
|
|
|
return 1
|
2011-08-11 19:18:13 +00:00
|
|
|
if args.prefix:
|
|
|
|
archives = [archive for archive in archives if archive.name.startswith(args.prefix)]
|
2011-08-16 20:02:42 +00:00
|
|
|
keep = []
|
2014-02-08 20:37:27 +00:00
|
|
|
if args.within:
|
|
|
|
keep += prune_within(archives, args.within)
|
2011-08-21 20:17:00 +00:00
|
|
|
if args.hourly:
|
2014-02-08 20:37:27 +00:00
|
|
|
keep += prune_split(archives, '%Y-%m-%d %H', args.hourly, keep)
|
2011-08-11 19:18:13 +00:00
|
|
|
if args.daily:
|
2011-11-22 20:47:17 +00:00
|
|
|
keep += prune_split(archives, '%Y-%m-%d', args.daily, keep)
|
2011-08-12 06:49:01 +00:00
|
|
|
if args.weekly:
|
2012-12-10 19:48:39 +00:00
|
|
|
keep += prune_split(archives, '%G-%V', args.weekly, keep)
|
2011-08-12 06:49:01 +00:00
|
|
|
if args.monthly:
|
2011-11-22 20:47:17 +00:00
|
|
|
keep += prune_split(archives, '%Y-%m', args.monthly, keep)
|
2011-08-12 06:49:01 +00:00
|
|
|
if args.yearly:
|
2011-11-22 20:47:17 +00:00
|
|
|
keep += prune_split(archives, '%Y', args.yearly, keep)
|
2011-08-16 20:02:42 +00:00
|
|
|
|
|
|
|
keep.sort(key=attrgetter('ts'), reverse=True)
|
|
|
|
to_delete = [a for a in archives if a not in keep]
|
2014-03-19 21:32:07 +00:00
|
|
|
stats = Statistics()
|
2011-08-16 20:02:42 +00:00
|
|
|
for archive in keep:
|
2014-02-25 00:46:56 +00:00
|
|
|
self.print_verbose('Keeping archive: %s' % format_archive(archive))
|
2011-08-11 19:18:13 +00:00
|
|
|
for archive in to_delete:
|
2014-02-20 02:33:05 +00:00
|
|
|
if args.dry_run:
|
2014-02-25 00:46:56 +00:00
|
|
|
self.print_verbose('Would prune: %s' % format_archive(archive))
|
2014-02-20 02:33:05 +00:00
|
|
|
else:
|
2014-02-25 00:46:56 +00:00
|
|
|
self.print_verbose('Pruning archive: %s' % format_archive(archive))
|
2014-03-19 21:32:07 +00:00
|
|
|
archive.delete(stats)
|
2014-03-19 20:58:25 +00:00
|
|
|
if to_delete and not args.dry_run:
|
|
|
|
manifest.write()
|
|
|
|
repository.commit()
|
|
|
|
cache.commit()
|
2014-03-19 21:32:07 +00:00
|
|
|
if args.stats:
|
|
|
|
stats.print_('Deleted data:', cache)
|
2011-08-06 21:33:06 +00:00
|
|
|
return self.exit_code
|
|
|
|
|
2014-02-08 14:44:31 +00:00
|
|
|
helptext = {}
|
|
|
|
helptext['patterns'] = '''
|
|
|
|
Exclude patterns use a variant of shell pattern syntax, with '*' matching any
|
|
|
|
number of characters, '?' matching any single character, '[...]' matching any
|
|
|
|
single character specified, including ranges, and '[!...]' matching any
|
|
|
|
character not specified. For the purpose of these patterns, the path
|
2014-02-08 16:45:36 +00:00
|
|
|
separator ('\\' for Windows and '/' on other systems) is not treated
|
2014-02-08 14:44:31 +00:00
|
|
|
specially. For a path to match a pattern, it must completely match from
|
|
|
|
start to end, or must match from the start to just before a path separator.
|
|
|
|
Except for the root path, paths will never end in the path separator when
|
|
|
|
matching is attempted. Thus, if a given pattern ends in a path separator, a
|
|
|
|
'*' is appended before matching is attempted. Patterns with wildcards should
|
|
|
|
be quoted to protect them from shell expansion.
|
2014-02-25 11:33:23 +00:00
|
|
|
|
2014-02-08 14:44:31 +00:00
|
|
|
Examples:
|
2014-02-25 11:33:23 +00:00
|
|
|
|
2014-02-08 14:44:31 +00:00
|
|
|
# Exclude '/home/user/file.o' but not '/home/user/file.odt':
|
|
|
|
$ attic create -e '*.o' repo.attic /
|
2014-02-25 11:33:23 +00:00
|
|
|
|
2014-02-08 14:44:31 +00:00
|
|
|
# Exclude '/home/user/junk' and '/home/user/subdir/junk' but
|
|
|
|
# not '/home/user/importantjunk' or '/etc/junk':
|
|
|
|
$ attic create -e '/home/*/junk' repo.attic /
|
2014-02-25 11:33:23 +00:00
|
|
|
|
2014-02-08 14:44:31 +00:00
|
|
|
# Exclude the contents of '/home/user/cache' but not the directory itself:
|
|
|
|
$ attic create -e /home/user/cache/ repo.attic /
|
2014-02-25 11:33:23 +00:00
|
|
|
|
2014-02-08 14:44:31 +00:00
|
|
|
# The file '/home/user/cache/important' is *not* backed up:
|
|
|
|
$ attic create -e /home/user/cache/ repo.attic / /home/user/cache/important
|
|
|
|
'''
|
|
|
|
|
2014-03-21 21:12:15 +00:00
|
|
|
def do_help(self, parser, commands, args):
|
|
|
|
if not args.topic:
|
|
|
|
parser.print_help()
|
|
|
|
elif args.topic in self.helptext:
|
2014-02-08 14:44:31 +00:00
|
|
|
print(self.helptext[args.topic])
|
2014-03-21 21:12:15 +00:00
|
|
|
elif args.topic in commands:
|
2014-04-06 13:16:25 +00:00
|
|
|
if args.epilog_only:
|
|
|
|
print(commands[args.topic].epilog)
|
|
|
|
elif args.usage_only:
|
|
|
|
commands[args.topic].epilog = None
|
|
|
|
commands[args.topic].print_help()
|
|
|
|
else:
|
|
|
|
commands[args.topic].print_help()
|
2014-02-08 14:44:31 +00:00
|
|
|
else:
|
2014-03-21 21:12:15 +00:00
|
|
|
parser.error('No help available on %s' % (args.topic,))
|
2014-02-08 16:51:44 +00:00
|
|
|
return self.exit_code
|
2014-02-08 14:44:31 +00:00
|
|
|
|
2014-02-19 21:46:15 +00:00
|
|
|
def preprocess_args(self, args):
|
|
|
|
deprecations = [
|
|
|
|
('--hourly', '--keep-hourly', 'Warning: "--hourly" has been deprecated. Use "--keep-hourly" instead.'),
|
|
|
|
('--daily', '--keep-daily', 'Warning: "--daily" has been deprecated. Use "--keep-daily" instead.'),
|
|
|
|
('--weekly', '--keep-weekly', 'Warning: "--weekly" has been deprecated. Use "--keep-weekly" instead.'),
|
|
|
|
('--monthly', '--keep-monthly', 'Warning: "--monthly" has been deprecated. Use "--keep-monthly" instead.'),
|
|
|
|
('--yearly', '--keep-yearly', 'Warning: "--yearly" has been deprecated. Use "--keep-yearly" instead.')
|
|
|
|
]
|
|
|
|
if args and args[0] == 'verify':
|
|
|
|
print('Warning: "attic verify" has been deprecated. Use "attic extract --dry-run" instead.')
|
|
|
|
args = ['extract', '--dry-run'] + args[1:]
|
|
|
|
for i, arg in enumerate(args[:]):
|
|
|
|
for old_name, new_name, warning in deprecations:
|
|
|
|
if arg.startswith(old_name):
|
|
|
|
args[i] = arg.replace(old_name, new_name)
|
|
|
|
print(warning)
|
|
|
|
return args
|
|
|
|
|
2010-10-16 09:45:36 +00:00
|
|
|
def run(self, args=None):
|
2014-03-18 21:04:08 +00:00
|
|
|
check_extension_modules()
|
2013-06-26 11:55:41 +00:00
|
|
|
keys_dir = get_keys_dir()
|
|
|
|
if not os.path.exists(keys_dir):
|
|
|
|
os.makedirs(keys_dir)
|
|
|
|
os.chmod(keys_dir, stat.S_IRWXU)
|
|
|
|
cache_dir = get_cache_dir()
|
|
|
|
if not os.path.exists(cache_dir):
|
|
|
|
os.makedirs(cache_dir)
|
|
|
|
os.chmod(cache_dir, stat.S_IRWXU)
|
2014-05-01 12:56:21 +00:00
|
|
|
with open(os.path.join(cache_dir, 'CACHEDIR.TAG'), 'w') as fd:
|
|
|
|
fd.write(textwrap.dedent("""
|
|
|
|
Signature: 8a477f597d28d172789f06886806bc55
|
|
|
|
# This file is a cache directory tag created by Attic.
|
|
|
|
# For information about cache directory tags, see:
|
|
|
|
# http://www.brynosaurus.com/cachedir/
|
|
|
|
""").lstrip())
|
2011-09-10 15:32:05 +00:00
|
|
|
common_parser = argparse.ArgumentParser(add_help=False)
|
|
|
|
common_parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
|
2010-10-15 18:35:49 +00:00
|
|
|
default=False,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='verbose output')
|
2010-10-15 18:35:49 +00:00
|
|
|
|
2014-02-08 14:44:31 +00:00
|
|
|
# We can't use argparse for "serve" since we don't want it to show up in "Available commands"
|
2014-02-19 21:46:15 +00:00
|
|
|
if args:
|
|
|
|
args = self.preprocess_args(args)
|
2013-08-09 11:03:48 +00:00
|
|
|
|
2013-08-05 20:45:24 +00:00
|
|
|
parser = argparse.ArgumentParser(description='Attic %s - Deduplicated Backups' % __version__)
|
2013-08-04 11:43:35 +00:00
|
|
|
subparsers = parser.add_subparsers(title='Available commands')
|
2010-10-23 21:01:12 +00:00
|
|
|
|
2014-03-24 20:28:59 +00:00
|
|
|
subparser = subparsers.add_parser('serve', parents=[common_parser],
|
|
|
|
description=self.do_serve.__doc__)
|
|
|
|
subparser.set_defaults(func=self.do_serve)
|
|
|
|
subparser.add_argument('--restrict-to-path', dest='restrict_to_paths', action='append',
|
|
|
|
metavar='PATH', help='restrict repository access to PATH')
|
2014-04-06 13:16:25 +00:00
|
|
|
init_epilog = textwrap.dedent("""
|
|
|
|
This command initializes an empty repository. A repository is a filesystem
|
|
|
|
directory containing the deduplicated data from zero or more archives.
|
|
|
|
Encryption can be enabled at repository init time.
|
|
|
|
""")
|
2013-07-05 10:32:56 +00:00
|
|
|
subparser = subparsers.add_parser('init', parents=[common_parser],
|
2014-04-06 13:16:25 +00:00
|
|
|
description=self.do_init.__doc__, epilog=init_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2011-07-30 19:13:48 +00:00
|
|
|
subparser.set_defaults(func=self.do_init)
|
2013-07-31 18:51:01 +00:00
|
|
|
subparser.add_argument('repository', metavar='REPOSITORY',
|
2011-07-30 19:13:48 +00:00
|
|
|
type=location_validator(archive=False),
|
2013-07-05 10:32:56 +00:00
|
|
|
help='repository to create')
|
2013-08-10 11:02:20 +00:00
|
|
|
subparser.add_argument('-e', '--encryption', dest='encryption',
|
|
|
|
choices=('none', 'passphrase', 'keyfile'), default='none',
|
|
|
|
help='select encryption method')
|
2011-07-30 19:13:48 +00:00
|
|
|
|
2014-02-26 22:13:48 +00:00
|
|
|
check_epilog = textwrap.dedent("""
|
2014-03-04 20:56:37 +00:00
|
|
|
The check command verifies the consistency of a repository and the corresponding
|
|
|
|
archives. The underlying repository data files are first checked to detect bit rot
|
|
|
|
and other types of damage. After that the consistency and correctness of the archive
|
|
|
|
metadata is verified.
|
|
|
|
|
|
|
|
The archive metadata checks can be time consuming and requires access to the key
|
|
|
|
file and/or passphrase if encryption is enabled. These checks can be skipped using
|
|
|
|
the --repository-only option.
|
2014-02-26 22:13:48 +00:00
|
|
|
""")
|
2014-02-04 22:49:10 +00:00
|
|
|
subparser = subparsers.add_parser('check', parents=[common_parser],
|
|
|
|
description=self.do_check.__doc__,
|
2014-02-26 22:13:48 +00:00
|
|
|
epilog=check_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2014-02-04 22:49:10 +00:00
|
|
|
subparser.set_defaults(func=self.do_check)
|
|
|
|
subparser.add_argument('repository', metavar='REPOSITORY',
|
|
|
|
type=location_validator(archive=False),
|
|
|
|
help='repository to check consistency of')
|
2014-03-04 20:56:37 +00:00
|
|
|
subparser.add_argument('--repository-only', dest='repo_only', action='store_true',
|
|
|
|
default=False,
|
|
|
|
help='only perform repository checks')
|
|
|
|
subparser.add_argument('--archives-only', dest='archives_only', action='store_true',
|
|
|
|
default=False,
|
|
|
|
help='only perform archives checks')
|
2014-02-08 23:17:32 +00:00
|
|
|
subparser.add_argument('--repair', dest='repair', action='store_true',
|
|
|
|
default=False,
|
2014-02-26 22:13:48 +00:00
|
|
|
help='attempt to repair any inconsistencies found')
|
2014-02-04 22:49:10 +00:00
|
|
|
|
2014-04-06 13:16:25 +00:00
|
|
|
change_passphrase_epilog = textwrap.dedent("""
|
|
|
|
The key files used for repository encryption are optionally passphrase
|
|
|
|
protected. This command can be used to change this passphrase.
|
|
|
|
""")
|
2013-07-05 10:32:56 +00:00
|
|
|
subparser = subparsers.add_parser('change-passphrase', parents=[common_parser],
|
2014-04-06 13:16:25 +00:00
|
|
|
description=self.do_change_passphrase.__doc__,
|
|
|
|
epilog=change_passphrase_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2012-12-04 22:02:10 +00:00
|
|
|
subparser.set_defaults(func=self.do_change_passphrase)
|
2013-07-31 18:51:01 +00:00
|
|
|
subparser.add_argument('repository', metavar='REPOSITORY',
|
|
|
|
type=location_validator(archive=False))
|
2011-10-27 20:17:47 +00:00
|
|
|
|
2014-04-06 13:16:25 +00:00
|
|
|
create_epilog = textwrap.dedent("""
|
|
|
|
This command creates a backup archive containing all files found while recursively
|
|
|
|
traversing all paths specified. The archive will consume almost no disk space for
|
|
|
|
files or parts of files that have already been stored in other archives.
|
|
|
|
|
|
|
|
See "attic help patterns" for more help on exclude patterns.
|
|
|
|
""")
|
2014-02-08 14:44:31 +00:00
|
|
|
|
2013-07-05 10:32:56 +00:00
|
|
|
subparser = subparsers.add_parser('create', parents=[common_parser],
|
2014-02-08 14:44:31 +00:00
|
|
|
description=self.do_create.__doc__,
|
2014-04-06 13:16:25 +00:00
|
|
|
epilog=create_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2010-10-15 18:35:49 +00:00
|
|
|
subparser.set_defaults(func=self.do_create)
|
2011-08-07 15:10:21 +00:00
|
|
|
subparser.add_argument('-s', '--stats', dest='stats',
|
|
|
|
action='store_true', default=False,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='print statistics for the created archive')
|
2013-06-30 20:32:27 +00:00
|
|
|
subparser.add_argument('-e', '--exclude', dest='excludes',
|
2010-11-02 21:47:39 +00:00
|
|
|
type=ExcludePattern, action='append',
|
2013-07-05 10:32:56 +00:00
|
|
|
metavar="PATTERN", help='exclude paths matching PATTERN')
|
2014-02-08 17:44:48 +00:00
|
|
|
subparser.add_argument('--exclude-from', dest='exclude_files',
|
|
|
|
type=argparse.FileType('r'), action='append',
|
|
|
|
metavar='EXCLUDEFILE', help='read exclude patterns from EXCLUDEFILE, one per line')
|
2014-04-30 21:27:04 +00:00
|
|
|
subparser.add_argument('--exclude-caches', dest='exclude_caches',
|
|
|
|
action='store_true', default=False,
|
|
|
|
help='exclude directories that contain a CACHEDIR.TAG file (http://www.brynosaurus.com/cachedir/spec.html)')
|
2011-09-10 15:19:02 +00:00
|
|
|
subparser.add_argument('-c', '--checkpoint-interval', dest='checkpoint_interval',
|
|
|
|
type=int, default=300, metavar='SECONDS',
|
2013-07-31 18:51:01 +00:00
|
|
|
help='write checkpoint every SECONDS seconds (Default: 300)')
|
2012-02-04 16:32:46 +00:00
|
|
|
subparser.add_argument('--do-not-cross-mountpoints', dest='dontcross',
|
|
|
|
action='store_true', default=False,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='do not cross mount points')
|
2012-02-29 22:59:17 +00:00
|
|
|
subparser.add_argument('--numeric-owner', dest='numeric_owner',
|
|
|
|
action='store_true', default=False,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='only store numeric user and group identifiers')
|
2010-10-15 18:35:49 +00:00
|
|
|
subparser.add_argument('archive', metavar='ARCHIVE',
|
|
|
|
type=location_validator(archive=True),
|
2013-07-05 10:32:56 +00:00
|
|
|
help='archive to create')
|
2013-06-30 20:32:27 +00:00
|
|
|
subparser.add_argument('paths', metavar='PATH', nargs='+', type=str,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='paths to archive')
|
2010-10-15 18:35:49 +00:00
|
|
|
|
2014-04-06 13:16:25 +00:00
|
|
|
extract_epilog = textwrap.dedent("""
|
|
|
|
This command extracts the contents of an archive. By default the entire
|
|
|
|
archive is extracted but a subset of files and directories can be selected
|
|
|
|
by passing a list of ``PATHs`` as arguments. The file selection can further
|
|
|
|
be restricted by using the ``--exclude`` option.
|
2014-02-08 14:44:31 +00:00
|
|
|
|
2014-04-06 13:16:25 +00:00
|
|
|
See "attic help patterns" for more help on exclude patterns.
|
|
|
|
""")
|
2013-07-05 10:32:56 +00:00
|
|
|
subparser = subparsers.add_parser('extract', parents=[common_parser],
|
2014-02-08 14:44:31 +00:00
|
|
|
description=self.do_extract.__doc__,
|
2014-04-06 13:16:25 +00:00
|
|
|
epilog=extract_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2010-10-15 18:35:49 +00:00
|
|
|
subparser.set_defaults(func=self.do_extract)
|
2014-02-19 21:46:15 +00:00
|
|
|
subparser.add_argument('-n', '--dry-run', dest='dry_run',
|
2014-02-18 20:33:06 +00:00
|
|
|
default=False, action='store_true',
|
|
|
|
help='do not actually change any files')
|
2013-06-30 20:32:27 +00:00
|
|
|
subparser.add_argument('-e', '--exclude', dest='excludes',
|
2010-11-02 21:47:39 +00:00
|
|
|
type=ExcludePattern, action='append',
|
2013-07-05 10:32:56 +00:00
|
|
|
metavar="PATTERN", help='exclude paths matching PATTERN')
|
2014-02-08 17:44:48 +00:00
|
|
|
subparser.add_argument('--exclude-from', dest='exclude_files',
|
|
|
|
type=argparse.FileType('r'), action='append',
|
|
|
|
metavar='EXCLUDEFILE', help='read exclude patterns from EXCLUDEFILE, one per line')
|
2012-02-29 22:59:17 +00:00
|
|
|
subparser.add_argument('--numeric-owner', dest='numeric_owner',
|
|
|
|
action='store_true', default=False,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='only obey numeric user and group identifiers')
|
2014-08-02 20:15:21 +00:00
|
|
|
subparser.add_argument('--strip-components', dest='strip_components',
|
|
|
|
type=int, default=0, metavar='NUMBER',
|
|
|
|
help='Remove the specified number of leading path elements. Pathnames with fewer elements will be silently skipped.')
|
2010-10-15 18:35:49 +00:00
|
|
|
subparser.add_argument('archive', metavar='ARCHIVE',
|
|
|
|
type=location_validator(archive=True),
|
2013-07-05 10:32:56 +00:00
|
|
|
help='archive to extract')
|
2013-06-30 20:32:27 +00:00
|
|
|
subparser.add_argument('paths', metavar='PATH', nargs='*', type=str,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='paths to extract')
|
2010-10-15 18:35:49 +00:00
|
|
|
|
2014-04-06 13:16:25 +00:00
|
|
|
delete_epilog = textwrap.dedent("""
|
|
|
|
This command deletes an archive from the repository. Any disk space not
|
|
|
|
shared with any other existing archive is also reclaimed.
|
|
|
|
""")
|
2013-07-05 10:32:56 +00:00
|
|
|
subparser = subparsers.add_parser('delete', parents=[common_parser],
|
2014-04-06 13:16:25 +00:00
|
|
|
description=self.do_delete.__doc__,
|
|
|
|
epilog=delete_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2010-10-15 18:35:49 +00:00
|
|
|
subparser.set_defaults(func=self.do_delete)
|
2014-03-19 21:32:07 +00:00
|
|
|
subparser.add_argument('-s', '--stats', dest='stats',
|
|
|
|
action='store_true', default=False,
|
|
|
|
help='print statistics for the deleted archive')
|
2010-10-15 18:35:49 +00:00
|
|
|
subparser.add_argument('archive', metavar='ARCHIVE',
|
|
|
|
type=location_validator(archive=True),
|
2013-07-05 10:32:56 +00:00
|
|
|
help='archive to delete')
|
2010-10-15 18:35:49 +00:00
|
|
|
|
2014-04-06 13:16:25 +00:00
|
|
|
list_epilog = textwrap.dedent("""
|
|
|
|
This command lists the contents of a repository or an archive.
|
|
|
|
""")
|
2013-07-05 10:32:56 +00:00
|
|
|
subparser = subparsers.add_parser('list', parents=[common_parser],
|
2014-04-06 13:16:25 +00:00
|
|
|
description=self.do_list.__doc__,
|
|
|
|
epilog=list_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2010-10-15 18:35:49 +00:00
|
|
|
subparser.set_defaults(func=self.do_list)
|
2013-07-31 18:51:01 +00:00
|
|
|
subparser.add_argument('src', metavar='REPOSITORY_OR_ARCHIVE', type=location_validator(),
|
2013-07-24 11:23:51 +00:00
|
|
|
help='repository/archive to list contents of')
|
2014-04-06 13:16:25 +00:00
|
|
|
mount_epilog = textwrap.dedent("""
|
|
|
|
This command mounts an archive as a FUSE filesystem. This can be useful for
|
|
|
|
browsing an archive or restoring individual files. Unless the ``--foreground``
|
|
|
|
option is given the command will run in the background until the filesystem
|
|
|
|
is ``umounted``.
|
|
|
|
""")
|
2013-07-21 22:41:06 +00:00
|
|
|
subparser = subparsers.add_parser('mount', parents=[common_parser],
|
2014-04-06 13:16:25 +00:00
|
|
|
description=self.do_mount.__doc__,
|
|
|
|
epilog=mount_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2013-07-21 22:41:06 +00:00
|
|
|
subparser.set_defaults(func=self.do_mount)
|
2014-03-26 20:47:01 +00:00
|
|
|
subparser.add_argument('src', metavar='REPOSITORY_OR_ARCHIVE', type=location_validator(),
|
|
|
|
help='repository/archive to mount')
|
2013-07-24 11:23:51 +00:00
|
|
|
subparser.add_argument('mountpoint', metavar='MOUNTPOINT', type=str,
|
2013-07-24 11:05:47 +00:00
|
|
|
help='where to mount filesystem')
|
2013-07-24 11:23:51 +00:00
|
|
|
subparser.add_argument('-f', '--foreground', dest='foreground',
|
|
|
|
action='store_true', default=False,
|
|
|
|
help='stay in foreground, do not daemonize')
|
2013-07-27 12:44:12 +00:00
|
|
|
subparser.add_argument('-o', dest='options', type=str,
|
|
|
|
help='Extra mount options')
|
2013-07-21 22:41:06 +00:00
|
|
|
|
2014-04-06 13:16:25 +00:00
|
|
|
info_epilog = textwrap.dedent("""
|
|
|
|
This command displays some detailed information about the specified archive.
|
|
|
|
""")
|
2013-07-05 10:32:56 +00:00
|
|
|
subparser = subparsers.add_parser('info', parents=[common_parser],
|
2014-04-06 13:16:25 +00:00
|
|
|
description=self.do_info.__doc__,
|
|
|
|
epilog=info_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2010-10-15 18:35:49 +00:00
|
|
|
subparser.set_defaults(func=self.do_info)
|
|
|
|
subparser.add_argument('archive', metavar='ARCHIVE',
|
|
|
|
type=location_validator(archive=True),
|
2013-07-05 10:32:56 +00:00
|
|
|
help='archive to display information about')
|
2010-10-15 18:35:49 +00:00
|
|
|
|
2014-04-06 13:16:25 +00:00
|
|
|
prune_epilog = textwrap.dedent("""
|
|
|
|
The prune command prunes a repository by deleting archives not matching
|
|
|
|
any of the specified retention options. This command is normally used by
|
|
|
|
automated backup scripts wanting to keep a certain number of historic backups.
|
|
|
|
|
|
|
|
As an example, "-d 7" means to keep the latest backup on each day for 7 days.
|
|
|
|
Days without backups do not count towards the total.
|
|
|
|
The rules are applied from hourly to yearly, and backups selected by previous
|
|
|
|
rules do not count towards those of later rules. The time that each backup
|
|
|
|
completes is used for pruning purposes. Dates and times are interpreted in
|
2014-02-04 01:11:47 +00:00
|
|
|
the local timezone, and weeks go from Monday to Sunday. Specifying a
|
2014-02-08 20:37:27 +00:00
|
|
|
negative number of archives to keep means that there is no limit.
|
2014-04-06 13:16:25 +00:00
|
|
|
|
2014-02-25 00:32:18 +00:00
|
|
|
The "--keep-within" option takes an argument of the form "<int><char>",
|
|
|
|
where char is "H", "d", "w", "m", "y". For example, "--keep-within 2d" means
|
2014-02-08 20:37:27 +00:00
|
|
|
to keep all archives that were created within the past 48 hours.
|
|
|
|
"1m" is taken to mean "31d". The archives kept with this option do not
|
2014-04-06 13:16:25 +00:00
|
|
|
count towards the totals specified by any other options.
|
2014-02-04 01:11:47 +00:00
|
|
|
|
2014-04-06 13:16:25 +00:00
|
|
|
If a prefix is set with -p, then only archives that start with the prefix are
|
|
|
|
considered for deletion and only those archives count towards the totals
|
|
|
|
specified by the rules.
|
|
|
|
""")
|
2013-07-05 10:32:56 +00:00
|
|
|
subparser = subparsers.add_parser('prune', parents=[common_parser],
|
2014-02-04 01:11:47 +00:00
|
|
|
description=self.do_prune.__doc__,
|
2014-04-06 13:16:25 +00:00
|
|
|
epilog=prune_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2011-11-22 20:47:17 +00:00
|
|
|
subparser.set_defaults(func=self.do_prune)
|
2014-02-20 02:33:05 +00:00
|
|
|
subparser.add_argument('-n', '--dry-run', dest='dry_run',
|
|
|
|
default=False, action='store_true',
|
|
|
|
help='do not change repository')
|
2014-03-19 21:32:07 +00:00
|
|
|
subparser.add_argument('-s', '--stats', dest='stats',
|
|
|
|
action='store_true', default=False,
|
|
|
|
help='print statistics for the deleted archive')
|
2014-02-19 21:46:15 +00:00
|
|
|
subparser.add_argument('--keep-within', dest='within', type=str, metavar='WITHIN',
|
2014-02-08 20:37:27 +00:00
|
|
|
help='keep all archives within this time interval')
|
2014-02-19 21:46:15 +00:00
|
|
|
subparser.add_argument('-H', '--keep-hourly', dest='hourly', type=int, default=0,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='number of hourly archives to keep')
|
2014-02-19 21:46:15 +00:00
|
|
|
subparser.add_argument('-d', '--keep-daily', dest='daily', type=int, default=0,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='number of daily archives to keep')
|
2014-02-19 21:46:15 +00:00
|
|
|
subparser.add_argument('-w', '--keep-weekly', dest='weekly', type=int, default=0,
|
2014-02-09 21:15:49 +00:00
|
|
|
help='number of weekly archives to keep')
|
2014-02-19 21:46:15 +00:00
|
|
|
subparser.add_argument('-m', '--keep-monthly', dest='monthly', type=int, default=0,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='number of monthly archives to keep')
|
2014-02-19 21:46:15 +00:00
|
|
|
subparser.add_argument('-y', '--keep-yearly', dest='yearly', type=int, default=0,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='number of yearly archives to keep')
|
2011-08-07 12:04:14 +00:00
|
|
|
subparser.add_argument('-p', '--prefix', dest='prefix', type=str,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='only consider archive names starting with this prefix')
|
2013-06-20 10:44:58 +00:00
|
|
|
subparser.add_argument('repository', metavar='REPOSITORY',
|
2011-08-06 21:33:06 +00:00
|
|
|
type=location_validator(archive=False),
|
2013-07-05 10:32:56 +00:00
|
|
|
help='repository to prune')
|
2014-02-08 14:44:31 +00:00
|
|
|
|
|
|
|
subparser = subparsers.add_parser('help', parents=[common_parser],
|
|
|
|
description='Extra help')
|
2014-04-06 13:16:25 +00:00
|
|
|
subparser.add_argument('--epilog-only', dest='epilog_only',
|
|
|
|
action='store_true', default=False)
|
|
|
|
subparser.add_argument('--usage-only', dest='usage_only',
|
|
|
|
action='store_true', default=False)
|
|
|
|
subparser.set_defaults(func=functools.partial(self.do_help, parser, subparsers.choices))
|
2014-03-21 21:12:15 +00:00
|
|
|
subparser.add_argument('topic', metavar='TOPIC', type=str, nargs='?',
|
2014-02-08 14:44:31 +00:00
|
|
|
help='additional help on TOPIC')
|
|
|
|
|
2013-06-26 19:20:31 +00:00
|
|
|
args = parser.parse_args(args or ['-h'])
|
2010-10-30 11:44:25 +00:00
|
|
|
self.verbose = args.verbose
|
2014-02-08 17:44:48 +00:00
|
|
|
update_excludes(args)
|
2010-10-16 09:45:36 +00:00
|
|
|
return args.func(args)
|
2010-03-06 17:25:35 +00:00
|
|
|
|
2011-10-29 15:01:07 +00:00
|
|
|
|
2010-02-20 17:23:46 +00:00
|
|
|
def main():
|
2014-02-25 11:33:23 +00:00
|
|
|
# Make sure stdout and stderr have errors='replace') to avoid unicode
|
|
|
|
# issues when print()-ing unicode file names
|
2014-03-05 18:58:29 +00:00
|
|
|
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, sys.stdout.encoding, 'replace', line_buffering=True)
|
|
|
|
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, sys.stderr.encoding, 'replace', line_buffering=True)
|
2010-02-20 17:23:46 +00:00
|
|
|
archiver = Archiver()
|
2012-12-09 22:06:33 +00:00
|
|
|
try:
|
2013-06-26 19:20:31 +00:00
|
|
|
exit_code = archiver.run(sys.argv[1:])
|
2013-12-15 19:35:29 +00:00
|
|
|
except Error as e:
|
|
|
|
archiver.print_error(e.get_message())
|
|
|
|
exit_code = e.exit_code
|
2012-12-09 22:06:33 +00:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
archiver.print_error('Error: Keyboard interrupt')
|
|
|
|
exit_code = 1
|
|
|
|
else:
|
|
|
|
if exit_code:
|
|
|
|
archiver.print_error('Exiting with failure status due to previous errors')
|
2012-12-06 22:04:01 +00:00
|
|
|
sys.exit(exit_code)
|
2010-02-20 17:23:46 +00:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2010-03-06 17:25:35 +00:00
|
|
|
main()
|