2015-09-12 22:58:57 +00:00
|
|
|
from .support import argparse # see support/__init__.py docstring
|
|
|
|
# DEPRECATED - remove after requiring py 3.4
|
|
|
|
|
2013-06-03 11:45:48 +00:00
|
|
|
from binascii import hexlify
|
2011-08-12 06:49:01 +00:00
|
|
|
from datetime import datetime
|
2011-06-16 19:55:54 +00:00
|
|
|
from operator import attrgetter
|
2014-03-21 21:12:15 +00:00
|
|
|
import functools
|
2015-05-14 14:46:44 +00:00
|
|
|
import inspect
|
2014-02-25 11:33:23 +00:00
|
|
|
import io
|
2010-10-25 18:22:20 +00:00
|
|
|
import os
|
2015-05-14 14:46:44 +00:00
|
|
|
import signal
|
2010-10-30 11:44:25 +00:00
|
|
|
import stat
|
2010-10-16 09:45:36 +00:00
|
|
|
import sys
|
2014-02-26 22:13:48 +00:00
|
|
|
import textwrap
|
2015-04-01 21:12:06 +00:00
|
|
|
import traceback
|
2010-02-23 21:12:22 +00:00
|
|
|
|
2015-05-22 17:21:41 +00:00
|
|
|
from . import __version__
|
|
|
|
from .helpers import Error, location_validator, format_time, format_file_size, \
|
2015-09-19 16:16:47 +00:00
|
|
|
format_file_mode, ExcludePattern, IncludePattern, exclude_path, adjust_patterns, to_localtime, timestamp, \
|
2014-02-08 20:37:27 +00:00
|
|
|
get_cache_dir, get_keys_dir, format_timedelta, prune_within, prune_split, \
|
2014-04-30 21:27:04 +00:00
|
|
|
Manifest, remove_surrogates, update_excludes, format_archive, check_extension_modules, Statistics, \
|
2015-10-08 19:34:44 +00:00
|
|
|
is_cachedir, bigint_to_int, ChunkerParams, CompressionSpec, have_cython
|
2015-10-09 16:44:31 +00:00
|
|
|
from .logger import create_logger, setup_logging
|
|
|
|
logger = create_logger()
|
2015-10-08 19:34:44 +00:00
|
|
|
if have_cython():
|
2015-10-08 02:24:30 +00:00
|
|
|
from .compress import Compressor, COMPR_BUFFER
|
|
|
|
from .upgrader import AtticRepositoryUpgrader
|
|
|
|
from .repository import Repository
|
|
|
|
from .cache import Cache
|
|
|
|
from .key import key_creator
|
2015-06-19 23:20:46 +00:00
|
|
|
from .archive import Archive, ArchiveChecker, CHUNKER_PARAMS
|
2015-05-22 17:21:41 +00:00
|
|
|
from .remote import RepositoryServer, RemoteRepository
|
2010-03-15 20:23:34 +00:00
|
|
|
|
2015-07-28 22:01:42 +00:00
|
|
|
has_lchflags = hasattr(os, 'lchflags')
|
2011-10-29 15:01:07 +00:00
|
|
|
|
|
|
|
|
2013-06-26 19:20:31 +00:00
|
|
|
class Archiver:
|
2010-03-15 20:23:34 +00:00
|
|
|
|
2010-10-30 11:44:25 +00:00
|
|
|
def __init__(self):
|
|
|
|
self.exit_code = 0
|
|
|
|
|
2014-05-31 13:39:51 +00:00
|
|
|
def open_repository(self, location, create=False, exclusive=False):
|
2010-11-17 20:28:13 +00:00
|
|
|
if location.proto == 'ssh':
|
2013-06-20 10:44:58 +00:00
|
|
|
repository = RemoteRepository(location, create=create)
|
2010-11-15 21:18:47 +00:00
|
|
|
else:
|
2014-05-31 13:39:51 +00:00
|
|
|
repository = Repository(location.path, create=create, exclusive=exclusive)
|
2013-06-20 10:44:58 +00:00
|
|
|
repository._location = location
|
|
|
|
return repository
|
2010-02-23 20:34:28 +00:00
|
|
|
|
2010-10-30 11:44:25 +00:00
|
|
|
def print_error(self, msg, *args):
|
|
|
|
msg = args and msg % args or msg
|
|
|
|
self.exit_code = 1
|
2015-10-02 14:58:08 +00:00
|
|
|
logger.error('borg: ' + msg)
|
2010-10-30 11:44:25 +00:00
|
|
|
|
|
|
|
def print_verbose(self, msg, *args, **kw):
|
convert most print() calls to logging
the logging level varies: most is logging.info(), in some place
logging.warning() or logging.error() are used when the condition is
clearly an error or warning. in other cases, we keep using print, but
force writing to sys.stderr, unless we interact with the user.
there were 77 calls to print before this commit, now there are 7, most
of which in the archiver module, which interacts directly with the
user. in one case there, we still use print() only because logging is
not setup properly yet during argument parsing.
it could be argued that commands like info or list should use print
directly, but we have converted them anyways, without ill effects on
the unit tests
unit tests still use print() in some places
this switches all informational output to stderr, which should help
with, if not fix jborg/attic#312 directly
2015-10-01 17:41:42 +00:00
|
|
|
msg = args and msg % args or msg
|
2015-10-02 14:58:08 +00:00
|
|
|
logger.info(msg)
|
2010-10-30 11:44:25 +00:00
|
|
|
|
2014-03-24 20:28:59 +00:00
|
|
|
def do_serve(self, args):
|
2015-05-09 16:40:55 +00:00
|
|
|
"""Start in server mode. This command is usually not used manually.
|
2014-03-24 20:28:59 +00:00
|
|
|
"""
|
|
|
|
return RepositoryServer(restrict_to_paths=args.restrict_to_paths).serve()
|
2010-11-15 21:18:47 +00:00
|
|
|
|
2011-07-30 19:13:48 +00:00
|
|
|
def do_init(self, args):
|
2014-04-06 13:16:25 +00:00
|
|
|
"""Initialize an empty repository"""
|
2015-10-02 14:58:08 +00:00
|
|
|
logger.info('Initializing repository at "%s"' % args.repository.orig)
|
2014-05-31 13:39:51 +00:00
|
|
|
repository = self.open_repository(args.repository, create=True, exclusive=True)
|
2013-06-20 10:44:58 +00:00
|
|
|
key = key_creator(repository, args)
|
2014-02-16 21:21:18 +00:00
|
|
|
manifest = Manifest(key, repository)
|
2012-12-04 22:02:10 +00:00
|
|
|
manifest.key = key
|
2011-09-04 21:02:47 +00:00
|
|
|
manifest.write()
|
2013-06-20 10:44:58 +00:00
|
|
|
repository.commit()
|
2015-04-06 21:07:10 +00:00
|
|
|
Cache(repository, key, manifest, warn_if_unencrypted=False)
|
2011-08-06 11:01:58 +00:00
|
|
|
return self.exit_code
|
2011-07-30 19:13:48 +00:00
|
|
|
|
2014-02-04 22:49:10 +00:00
|
|
|
def do_check(self, args):
|
2014-04-06 13:16:25 +00:00
|
|
|
"""Check repository consistency"""
|
2014-05-31 13:39:51 +00:00
|
|
|
repository = self.open_repository(args.repository, exclusive=args.repair)
|
2014-02-09 14:52:36 +00:00
|
|
|
if args.repair:
|
2015-05-09 16:40:55 +00:00
|
|
|
while not os.environ.get('BORG_CHECK_I_KNOW_WHAT_I_AM_DOING'):
|
2014-02-16 21:21:18 +00:00
|
|
|
self.print_error("""Warning: 'check --repair' is an experimental feature that might result
|
|
|
|
in data loss.
|
2014-02-09 14:52:36 +00:00
|
|
|
|
|
|
|
Type "Yes I am sure" if you understand this and want to continue.\n""")
|
|
|
|
if input('Do you want to continue? ') == 'Yes I am sure':
|
|
|
|
break
|
2014-03-04 20:56:37 +00:00
|
|
|
if not args.archives_only:
|
2015-10-09 16:11:42 +00:00
|
|
|
logger.warning('Starting repository check...')
|
2014-03-04 20:21:58 +00:00
|
|
|
if repository.check(repair=args.repair):
|
2015-10-02 14:58:08 +00:00
|
|
|
logger.info('Repository check complete, no problems found.')
|
2014-03-04 20:21:58 +00:00
|
|
|
else:
|
2014-02-26 22:13:48 +00:00
|
|
|
return 1
|
2015-08-08 20:11:40 +00:00
|
|
|
if not args.repo_only and not ArchiveChecker().check(
|
|
|
|
repository, repair=args.repair, archive=args.repository.archive, last=args.last):
|
|
|
|
return 1
|
2014-02-16 21:21:18 +00:00
|
|
|
return 0
|
2014-02-04 22:49:10 +00:00
|
|
|
|
2012-12-04 22:02:10 +00:00
|
|
|
def do_change_passphrase(self, args):
|
2014-04-06 13:16:25 +00:00
|
|
|
"""Change repository key file passphrase"""
|
2013-07-31 18:51:01 +00:00
|
|
|
repository = self.open_repository(args.repository)
|
2013-06-20 10:44:58 +00:00
|
|
|
manifest, key = Manifest.load(repository)
|
2012-12-04 22:02:10 +00:00
|
|
|
key.change_passphrase()
|
2014-02-16 21:21:18 +00:00
|
|
|
return 0
|
2011-10-27 20:17:47 +00:00
|
|
|
|
2010-10-15 18:35:49 +00:00
|
|
|
def do_create(self, args):
|
2014-04-06 13:16:25 +00:00
|
|
|
"""Create new archive"""
|
2015-09-08 01:12:45 +00:00
|
|
|
dry_run = args.dry_run
|
2011-08-07 15:10:21 +00:00
|
|
|
t0 = datetime.now()
|
2015-09-08 01:12:45 +00:00
|
|
|
if not dry_run:
|
|
|
|
repository = self.open_repository(args.archive, exclusive=True)
|
|
|
|
manifest, key = Manifest.load(repository)
|
|
|
|
compr_args = dict(buffer=COMPR_BUFFER)
|
|
|
|
compr_args.update(args.compression)
|
|
|
|
key.compressor = Compressor(**compr_args)
|
|
|
|
cache = Cache(repository, key, manifest, do_files=args.cache_files)
|
|
|
|
archive = Archive(repository, key, manifest, args.archive.archive, cache=cache,
|
|
|
|
create=True, checkpoint_interval=args.checkpoint_interval,
|
|
|
|
numeric_owner=args.numeric_owner, progress=args.progress,
|
2015-10-02 19:56:21 +00:00
|
|
|
chunker_params=args.chunker_params, start=t0)
|
2015-09-08 01:12:45 +00:00
|
|
|
else:
|
|
|
|
archive = cache = None
|
2015-05-09 16:40:55 +00:00
|
|
|
# Add cache dir to inode_skip list
|
2011-01-04 22:16:55 +00:00
|
|
|
skip_inodes = set()
|
2010-11-09 19:49:21 +00:00
|
|
|
try:
|
2011-08-06 11:01:58 +00:00
|
|
|
st = os.stat(get_cache_dir())
|
2011-01-04 22:16:55 +00:00
|
|
|
skip_inodes.add((st.st_ino, st.st_dev))
|
2010-11-09 19:49:21 +00:00
|
|
|
except IOError:
|
|
|
|
pass
|
2013-06-20 10:44:58 +00:00
|
|
|
# Add local repository dir to inode_skip list
|
2010-11-09 19:49:21 +00:00
|
|
|
if not args.archive.host:
|
|
|
|
try:
|
|
|
|
st = os.stat(args.archive.path)
|
2011-01-04 22:16:55 +00:00
|
|
|
skip_inodes.add((st.st_ino, st.st_dev))
|
2010-11-09 19:49:21 +00:00
|
|
|
except IOError:
|
|
|
|
pass
|
2010-10-30 11:44:25 +00:00
|
|
|
for path in args.paths:
|
2015-03-01 03:29:44 +00:00
|
|
|
if path == '-': # stdin
|
|
|
|
path = 'stdin'
|
2015-09-08 01:12:45 +00:00
|
|
|
if not dry_run:
|
|
|
|
try:
|
|
|
|
status = archive.process_stdin(path, cache)
|
|
|
|
except IOError as e:
|
|
|
|
self.print_error('%s: %s', path, e)
|
|
|
|
else:
|
|
|
|
status = '-'
|
|
|
|
self.print_verbose("%1s %s", status, path)
|
2015-03-01 03:29:44 +00:00
|
|
|
continue
|
2013-08-03 11:34:14 +00:00
|
|
|
path = os.path.normpath(path)
|
2012-02-04 16:32:46 +00:00
|
|
|
if args.dontcross:
|
|
|
|
try:
|
|
|
|
restrict_dev = os.lstat(path).st_dev
|
2013-06-03 11:45:48 +00:00
|
|
|
except OSError as e:
|
2012-02-04 16:32:46 +00:00
|
|
|
self.print_error('%s: %s', path, e)
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
restrict_dev = None
|
2015-09-05 22:29:46 +00:00
|
|
|
self._process(archive, cache, args.excludes, args.exclude_caches, skip_inodes, path, restrict_dev,
|
2015-09-08 01:12:45 +00:00
|
|
|
read_special=args.read_special, dry_run=dry_run)
|
|
|
|
if not dry_run:
|
|
|
|
archive.save(timestamp=args.timestamp)
|
|
|
|
if args.progress:
|
|
|
|
archive.stats.show_progress(final=True)
|
|
|
|
if args.stats:
|
2015-10-02 19:56:21 +00:00
|
|
|
archive.end = datetime.now()
|
2015-10-02 20:04:35 +00:00
|
|
|
print('-' * 78)
|
2015-10-02 19:56:21 +00:00
|
|
|
print(str(archive))
|
2015-10-02 15:13:01 +00:00
|
|
|
print(archive.stats.print_('This archive:', cache))
|
2015-10-02 20:04:35 +00:00
|
|
|
print('-' * 78)
|
2010-10-30 11:44:25 +00:00
|
|
|
return self.exit_code
|
2010-02-20 21:28:46 +00:00
|
|
|
|
2015-09-05 22:29:46 +00:00
|
|
|
def _process(self, archive, cache, excludes, exclude_caches, skip_inodes, path, restrict_dev,
|
2015-09-08 01:12:45 +00:00
|
|
|
read_special=False, dry_run=False):
|
2013-06-30 20:32:27 +00:00
|
|
|
if exclude_path(path, excludes):
|
2010-11-23 11:50:09 +00:00
|
|
|
return
|
|
|
|
try:
|
|
|
|
st = os.lstat(path)
|
2013-06-03 11:45:48 +00:00
|
|
|
except OSError as e:
|
2010-11-23 11:50:09 +00:00
|
|
|
self.print_error('%s: %s', path, e)
|
|
|
|
return
|
2011-01-04 22:16:55 +00:00
|
|
|
if (st.st_ino, st.st_dev) in skip_inodes:
|
|
|
|
return
|
2012-02-04 16:32:46 +00:00
|
|
|
# Entering a new filesystem?
|
|
|
|
if restrict_dev and st.st_dev != restrict_dev:
|
|
|
|
return
|
2015-03-08 18:18:21 +00:00
|
|
|
status = None
|
2015-08-12 13:57:54 +00:00
|
|
|
# Ignore if nodump flag is set
|
2015-07-28 22:01:42 +00:00
|
|
|
if has_lchflags and (st.st_flags & stat.UF_NODUMP):
|
2015-07-28 18:39:00 +00:00
|
|
|
return
|
2015-09-05 22:29:46 +00:00
|
|
|
if (stat.S_ISREG(st.st_mode) or
|
|
|
|
read_special and not stat.S_ISDIR(st.st_mode)):
|
2015-09-08 01:12:45 +00:00
|
|
|
if not dry_run:
|
|
|
|
try:
|
|
|
|
status = archive.process_file(path, st, cache)
|
|
|
|
except IOError as e:
|
|
|
|
self.print_error('%s: %s', path, e)
|
2012-03-03 13:02:22 +00:00
|
|
|
elif stat.S_ISDIR(st.st_mode):
|
2014-04-30 21:27:04 +00:00
|
|
|
if exclude_caches and is_cachedir(path):
|
|
|
|
return
|
2015-09-08 01:12:45 +00:00
|
|
|
if not dry_run:
|
|
|
|
status = archive.process_dir(path, st)
|
2010-11-23 11:50:09 +00:00
|
|
|
try:
|
|
|
|
entries = os.listdir(path)
|
2013-06-03 11:45:48 +00:00
|
|
|
except OSError as e:
|
2010-11-23 11:50:09 +00:00
|
|
|
self.print_error('%s: %s', path, e)
|
|
|
|
else:
|
2011-07-01 20:01:24 +00:00
|
|
|
for filename in sorted(entries):
|
2014-12-14 18:15:54 +00:00
|
|
|
entry_path = os.path.normpath(os.path.join(path, filename))
|
2014-04-30 21:27:04 +00:00
|
|
|
self._process(archive, cache, excludes, exclude_caches, skip_inodes,
|
2015-09-08 01:12:45 +00:00
|
|
|
entry_path, restrict_dev, read_special=read_special,
|
|
|
|
dry_run=dry_run)
|
2010-11-23 11:50:09 +00:00
|
|
|
elif stat.S_ISLNK(st.st_mode):
|
2015-09-08 01:12:45 +00:00
|
|
|
if not dry_run:
|
|
|
|
status = archive.process_symlink(path, st)
|
2012-03-03 13:02:22 +00:00
|
|
|
elif stat.S_ISFIFO(st.st_mode):
|
2015-09-08 01:12:45 +00:00
|
|
|
if not dry_run:
|
|
|
|
status = archive.process_fifo(path, st)
|
2012-03-03 13:02:22 +00:00
|
|
|
elif stat.S_ISCHR(st.st_mode) or stat.S_ISBLK(st.st_mode):
|
2015-09-08 01:12:45 +00:00
|
|
|
if not dry_run:
|
|
|
|
status = archive.process_dev(path, st)
|
2015-05-31 19:23:36 +00:00
|
|
|
elif stat.S_ISSOCK(st.st_mode):
|
|
|
|
# Ignore unix sockets
|
|
|
|
return
|
2010-11-23 11:50:09 +00:00
|
|
|
else:
|
|
|
|
self.print_error('Unknown file type: %s', path)
|
2015-03-08 18:18:21 +00:00
|
|
|
return
|
|
|
|
# Status output
|
|
|
|
# A lowercase character means a file type other than a regular file,
|
2015-05-09 16:40:55 +00:00
|
|
|
# borg usually just stores them. E.g. (d)irectory.
|
2015-03-08 18:18:21 +00:00
|
|
|
# Hardlinks to already seen content are indicated by (h).
|
|
|
|
# A uppercase character means a regular file that was (A)dded,
|
|
|
|
# (M)odified or was (U)nchanged.
|
|
|
|
# Note: A/M/U is relative to the "files" cache, not to the repo.
|
|
|
|
# This would be an issue if the files cache is not used.
|
|
|
|
if status is None:
|
2015-09-08 01:12:45 +00:00
|
|
|
if not dry_run:
|
|
|
|
status = '?' # need to add a status code somewhere
|
|
|
|
else:
|
|
|
|
status = '-' # dry run, item was not backed up
|
2015-03-08 18:18:21 +00:00
|
|
|
# output ALL the stuff - it can be easily filtered using grep.
|
|
|
|
# even stuff considered unchanged might be interesting.
|
|
|
|
self.print_verbose("%1s %s", status, remove_surrogates(path))
|
2010-11-23 11:50:09 +00:00
|
|
|
|
2010-10-15 18:35:49 +00:00
|
|
|
def do_extract(self, args):
|
2014-04-06 13:16:25 +00:00
|
|
|
"""Extract archive contents"""
|
2014-10-31 22:28:45 +00:00
|
|
|
# be restrictive when restoring files, restore permissions later
|
2015-04-21 20:29:10 +00:00
|
|
|
if sys.getfilesystemencoding() == 'ascii':
|
2015-10-02 14:58:08 +00:00
|
|
|
logger.warning('Warning: File system encoding is "ascii", extracting non-ascii filenames will not be supported.')
|
2013-06-20 10:44:58 +00:00
|
|
|
repository = self.open_repository(args.archive)
|
|
|
|
manifest, key = Manifest.load(repository)
|
|
|
|
archive = Archive(repository, key, manifest, args.archive.archive,
|
2012-10-17 09:40:23 +00:00
|
|
|
numeric_owner=args.numeric_owner)
|
2013-06-30 20:32:27 +00:00
|
|
|
patterns = adjust_patterns(args.paths, args.excludes)
|
2014-08-02 20:15:21 +00:00
|
|
|
dry_run = args.dry_run
|
2015-03-01 04:07:29 +00:00
|
|
|
stdout = args.stdout
|
2015-04-17 20:28:40 +00:00
|
|
|
sparse = args.sparse
|
2014-08-02 20:15:21 +00:00
|
|
|
strip_components = args.strip_components
|
2012-10-17 09:40:23 +00:00
|
|
|
dirs = []
|
2014-01-23 21:13:08 +00:00
|
|
|
for item in archive.iter_items(lambda item: not exclude_path(item[b'path'], patterns), preload=True):
|
2014-08-02 20:15:21 +00:00
|
|
|
orig_path = item[b'path']
|
|
|
|
if strip_components:
|
|
|
|
item[b'path'] = os.sep.join(orig_path.split(os.sep)[strip_components:])
|
|
|
|
if not item[b'path']:
|
|
|
|
continue
|
2014-02-18 20:33:06 +00:00
|
|
|
if not args.dry_run:
|
|
|
|
while dirs and not item[b'path'].startswith(dirs[-1][b'path']):
|
2015-03-01 04:07:29 +00:00
|
|
|
archive.extract_item(dirs.pop(-1), stdout=stdout)
|
2014-08-02 20:15:21 +00:00
|
|
|
self.print_verbose(remove_surrogates(orig_path))
|
2012-12-06 22:04:01 +00:00
|
|
|
try:
|
2014-08-02 20:15:21 +00:00
|
|
|
if dry_run:
|
2014-02-18 20:33:06 +00:00
|
|
|
archive.extract_item(item, dry_run=True)
|
2012-12-06 22:04:01 +00:00
|
|
|
else:
|
2014-02-18 20:33:06 +00:00
|
|
|
if stat.S_ISDIR(item[b'mode']):
|
|
|
|
dirs.append(item)
|
|
|
|
archive.extract_item(item, restore_attrs=False)
|
|
|
|
else:
|
2015-04-17 23:16:26 +00:00
|
|
|
archive.extract_item(item, stdout=stdout, sparse=sparse)
|
2013-06-03 11:45:48 +00:00
|
|
|
except IOError as e:
|
2014-08-02 20:15:21 +00:00
|
|
|
self.print_error('%s: %s', remove_surrogates(orig_path), e)
|
2012-12-06 22:04:01 +00:00
|
|
|
|
2014-02-18 20:33:06 +00:00
|
|
|
if not args.dry_run:
|
|
|
|
while dirs:
|
|
|
|
archive.extract_item(dirs.pop(-1))
|
2015-09-19 16:38:44 +00:00
|
|
|
for pattern in (patterns or []):
|
2015-09-19 16:16:47 +00:00
|
|
|
if isinstance(pattern, IncludePattern) and pattern.match_count == 0:
|
|
|
|
self.print_error("Warning: Include pattern '%s' never matched.", pattern)
|
2010-10-30 11:44:25 +00:00
|
|
|
return self.exit_code
|
2010-02-24 22:24:19 +00:00
|
|
|
|
2015-03-24 06:11:00 +00:00
|
|
|
def do_rename(self, args):
|
|
|
|
"""Rename an existing archive"""
|
|
|
|
repository = self.open_repository(args.archive, exclusive=True)
|
|
|
|
manifest, key = Manifest.load(repository)
|
|
|
|
cache = Cache(repository, key, manifest)
|
|
|
|
archive = Archive(repository, key, manifest, args.archive.archive, cache=cache)
|
|
|
|
archive.rename(args.name)
|
|
|
|
manifest.write()
|
|
|
|
repository.commit()
|
|
|
|
cache.commit()
|
|
|
|
return self.exit_code
|
|
|
|
|
2010-10-15 18:35:49 +00:00
|
|
|
def do_delete(self, args):
|
2015-03-09 15:02:06 +00:00
|
|
|
"""Delete an existing repository or archive"""
|
|
|
|
repository = self.open_repository(args.target, exclusive=True)
|
2013-06-20 10:44:58 +00:00
|
|
|
manifest, key = Manifest.load(repository)
|
2015-03-08 14:01:24 +00:00
|
|
|
cache = Cache(repository, key, manifest, do_files=args.cache_files)
|
2015-03-09 15:02:06 +00:00
|
|
|
if args.target.archive:
|
|
|
|
archive = Archive(repository, key, manifest, args.target.archive, cache=cache)
|
|
|
|
stats = Statistics()
|
|
|
|
archive.delete(stats)
|
|
|
|
manifest.write()
|
|
|
|
repository.commit()
|
|
|
|
cache.commit()
|
|
|
|
if args.stats:
|
2015-10-02 15:16:21 +00:00
|
|
|
logger.info(stats.print_('Deleted data:', cache))
|
2015-03-09 15:02:06 +00:00
|
|
|
else:
|
2015-10-03 17:29:45 +00:00
|
|
|
if not args.cache_only:
|
2015-10-03 18:23:53 +00:00
|
|
|
print("You requested to completely DELETE the repository *including* all archives it contains:", file=sys.stderr)
|
2015-10-03 17:29:45 +00:00
|
|
|
for archive_info in manifest.list_archive_infos(sort_by='ts'):
|
2015-10-03 18:23:53 +00:00
|
|
|
print(format_archive(archive_info), file=sys.stderr)
|
2015-10-03 17:29:45 +00:00
|
|
|
if not os.environ.get('BORG_CHECK_I_KNOW_WHAT_I_AM_DOING'):
|
2015-10-03 18:23:53 +00:00
|
|
|
print("""Type "YES" if you understand this and want to continue.\n""", file=sys.stderr)
|
|
|
|
# XXX: prompt may end up on stdout, but we'll assume that input() does the right thing
|
2015-10-03 17:29:45 +00:00
|
|
|
if input('Do you want to continue? ') != 'YES':
|
|
|
|
self.exit_code = 1
|
|
|
|
return self.exit_code
|
|
|
|
repository.destroy()
|
2015-10-03 18:23:53 +00:00
|
|
|
logger.info("Repository deleted.")
|
2015-07-26 15:38:16 +00:00
|
|
|
cache.destroy()
|
2015-10-03 18:23:53 +00:00
|
|
|
logger.info("Cache deleted.")
|
2010-10-30 11:44:25 +00:00
|
|
|
return self.exit_code
|
2010-02-24 22:24:19 +00:00
|
|
|
|
2013-07-21 22:41:06 +00:00
|
|
|
def do_mount(self, args):
|
2014-04-06 13:16:25 +00:00
|
|
|
"""Mount archive or an entire repository as a FUSE fileystem"""
|
2013-07-24 11:23:51 +00:00
|
|
|
try:
|
2015-05-22 17:21:41 +00:00
|
|
|
from .fuse import FuseOperations
|
2015-03-18 01:33:34 +00:00
|
|
|
except ImportError as e:
|
|
|
|
self.print_error('loading fuse support failed [ImportError: %s]' % str(e))
|
2013-07-24 11:23:51 +00:00
|
|
|
return self.exit_code
|
|
|
|
|
|
|
|
if not os.path.isdir(args.mountpoint) or not os.access(args.mountpoint, os.R_OK | os.W_OK | os.X_OK):
|
|
|
|
self.print_error('%s: Mountpoint must be a writable directory' % args.mountpoint)
|
2013-07-24 11:05:47 +00:00
|
|
|
return self.exit_code
|
|
|
|
|
2014-03-26 20:47:01 +00:00
|
|
|
repository = self.open_repository(args.src)
|
2013-07-21 22:41:06 +00:00
|
|
|
manifest, key = Manifest.load(repository)
|
2014-03-26 20:47:01 +00:00
|
|
|
if args.src.archive:
|
|
|
|
archive = Archive(repository, key, manifest, args.src.archive)
|
|
|
|
else:
|
|
|
|
archive = None
|
2015-05-09 18:52:04 +00:00
|
|
|
operations = FuseOperations(key, repository, manifest, archive)
|
2013-07-24 11:23:51 +00:00
|
|
|
self.print_verbose("Mounting filesystem")
|
2013-07-27 12:44:12 +00:00
|
|
|
try:
|
|
|
|
operations.mount(args.mountpoint, args.options, args.foreground)
|
|
|
|
except RuntimeError:
|
|
|
|
# Relevant error message already printed to stderr by fuse
|
|
|
|
self.exit_code = 1
|
2013-07-24 11:23:51 +00:00
|
|
|
return self.exit_code
|
2013-07-21 22:41:06 +00:00
|
|
|
|
2010-10-15 18:35:49 +00:00
|
|
|
def do_list(self, args):
|
2014-04-06 20:47:22 +00:00
|
|
|
"""List archive or repository contents"""
|
2013-06-20 10:44:58 +00:00
|
|
|
repository = self.open_repository(args.src)
|
|
|
|
manifest, key = Manifest.load(repository)
|
2010-10-15 18:35:49 +00:00
|
|
|
if args.src.archive:
|
2013-06-20 10:44:58 +00:00
|
|
|
archive = Archive(repository, key, manifest, args.src.archive)
|
2015-08-15 18:52:14 +00:00
|
|
|
if args.short:
|
|
|
|
for item in archive.iter_items():
|
2015-10-02 15:13:01 +00:00
|
|
|
print(remove_surrogates(item[b'path']))
|
2015-08-15 18:52:14 +00:00
|
|
|
else:
|
|
|
|
tmap = {1: 'p', 2: 'c', 4: 'd', 6: 'b', 0o10: '-', 0o12: 'l', 0o14: 's'}
|
|
|
|
for item in archive.iter_items():
|
|
|
|
type = tmap.get(item[b'mode'] // 4096, '?')
|
|
|
|
mode = format_file_mode(item[b'mode'])
|
|
|
|
size = 0
|
|
|
|
if type == '-':
|
|
|
|
try:
|
|
|
|
size = sum(size for _, size, _ in item[b'chunks'])
|
|
|
|
except KeyError:
|
|
|
|
pass
|
2012-10-17 09:40:23 +00:00
|
|
|
try:
|
2015-08-15 18:52:14 +00:00
|
|
|
mtime = datetime.fromtimestamp(bigint_to_int(item[b'mtime']) / 1e9)
|
|
|
|
except ValueError:
|
|
|
|
# likely a broken mtime and datetime did not want to go beyond year 9999
|
|
|
|
mtime = datetime(9999, 12, 31, 23, 59, 59)
|
|
|
|
if b'source' in item:
|
|
|
|
if type == 'l':
|
|
|
|
extra = ' -> %s' % item[b'source']
|
|
|
|
else:
|
|
|
|
type = 'h'
|
|
|
|
extra = ' link to %s' % item[b'source']
|
2012-10-17 09:40:23 +00:00
|
|
|
else:
|
2015-08-15 18:52:14 +00:00
|
|
|
extra = ''
|
2015-10-01 18:20:29 +00:00
|
|
|
print('%s%s %-6s %-6s %8d %s %s%s' % (
|
2015-08-15 18:52:14 +00:00
|
|
|
type, mode, item[b'user'] or item[b'uid'],
|
|
|
|
item[b'group'] or item[b'gid'], size, format_time(mtime),
|
2015-10-02 15:13:01 +00:00
|
|
|
remove_surrogates(item[b'path']), extra))
|
2010-10-15 18:35:49 +00:00
|
|
|
else:
|
2015-05-26 00:04:41 +00:00
|
|
|
for archive_info in manifest.list_archive_infos(sort_by='ts'):
|
2015-10-02 15:13:01 +00:00
|
|
|
print(format_archive(archive_info))
|
2010-10-30 11:44:25 +00:00
|
|
|
return self.exit_code
|
2010-02-24 22:24:19 +00:00
|
|
|
|
2010-10-15 18:35:49 +00:00
|
|
|
def do_info(self, args):
|
2014-04-06 13:16:25 +00:00
|
|
|
"""Show archive details such as disk space used"""
|
2013-06-20 10:44:58 +00:00
|
|
|
repository = self.open_repository(args.archive)
|
|
|
|
manifest, key = Manifest.load(repository)
|
2015-03-08 14:01:24 +00:00
|
|
|
cache = Cache(repository, key, manifest, do_files=args.cache_files)
|
2013-06-20 10:44:58 +00:00
|
|
|
archive = Archive(repository, key, manifest, args.archive.archive, cache=cache)
|
2015-04-21 18:50:19 +00:00
|
|
|
stats = archive.calc_stats(cache)
|
2015-10-02 15:13:01 +00:00
|
|
|
print('Name:', archive.name)
|
|
|
|
print('Fingerprint: %s' % hexlify(archive.id).decode('ascii'))
|
|
|
|
print('Hostname:', archive.metadata[b'hostname'])
|
|
|
|
print('Username:', archive.metadata[b'username'])
|
|
|
|
print('Time: %s' % to_localtime(archive.ts).strftime('%c'))
|
|
|
|
print('Command line:', remove_surrogates(' '.join(archive.metadata[b'cmdline'])))
|
|
|
|
print('Number of files: %d' % stats.nfiles)
|
|
|
|
print(stats.print_('This archive:', cache))
|
2010-10-30 11:44:25 +00:00
|
|
|
return self.exit_code
|
2010-04-18 20:34:21 +00:00
|
|
|
|
2011-11-22 20:47:17 +00:00
|
|
|
def do_prune(self, args):
|
2014-04-06 13:16:25 +00:00
|
|
|
"""Prune repository archives according to specified rules"""
|
2014-05-31 13:39:51 +00:00
|
|
|
repository = self.open_repository(args.repository, exclusive=True)
|
2013-06-20 10:44:58 +00:00
|
|
|
manifest, key = Manifest.load(repository)
|
2015-03-08 14:01:24 +00:00
|
|
|
cache = Cache(repository, key, manifest, do_files=args.cache_files)
|
2015-05-26 00:04:41 +00:00
|
|
|
archives = manifest.list_archive_infos(sort_by='ts', reverse=True) # just a ArchiveInfo list
|
2014-02-08 20:37:27 +00:00
|
|
|
if args.hourly + args.daily + args.weekly + args.monthly + args.yearly == 0 and args.within is None:
|
2015-06-25 22:04:35 +00:00
|
|
|
self.print_error('At least one of the "within", "keep-hourly", "keep-daily", "keep-weekly", '
|
|
|
|
'"keep-monthly" or "keep-yearly" settings must be specified')
|
2011-08-06 21:33:06 +00:00
|
|
|
return 1
|
2011-08-11 19:18:13 +00:00
|
|
|
if args.prefix:
|
|
|
|
archives = [archive for archive in archives if archive.name.startswith(args.prefix)]
|
2011-08-16 20:02:42 +00:00
|
|
|
keep = []
|
2014-02-08 20:37:27 +00:00
|
|
|
if args.within:
|
|
|
|
keep += prune_within(archives, args.within)
|
2011-08-21 20:17:00 +00:00
|
|
|
if args.hourly:
|
2014-02-08 20:37:27 +00:00
|
|
|
keep += prune_split(archives, '%Y-%m-%d %H', args.hourly, keep)
|
2011-08-11 19:18:13 +00:00
|
|
|
if args.daily:
|
2011-11-22 20:47:17 +00:00
|
|
|
keep += prune_split(archives, '%Y-%m-%d', args.daily, keep)
|
2011-08-12 06:49:01 +00:00
|
|
|
if args.weekly:
|
2012-12-10 19:48:39 +00:00
|
|
|
keep += prune_split(archives, '%G-%V', args.weekly, keep)
|
2011-08-12 06:49:01 +00:00
|
|
|
if args.monthly:
|
2011-11-22 20:47:17 +00:00
|
|
|
keep += prune_split(archives, '%Y-%m', args.monthly, keep)
|
2011-08-12 06:49:01 +00:00
|
|
|
if args.yearly:
|
2011-11-22 20:47:17 +00:00
|
|
|
keep += prune_split(archives, '%Y', args.yearly, keep)
|
2011-08-16 20:02:42 +00:00
|
|
|
|
|
|
|
keep.sort(key=attrgetter('ts'), reverse=True)
|
|
|
|
to_delete = [a for a in archives if a not in keep]
|
2014-03-19 21:32:07 +00:00
|
|
|
stats = Statistics()
|
2011-08-16 20:02:42 +00:00
|
|
|
for archive in keep:
|
2014-02-25 00:46:56 +00:00
|
|
|
self.print_verbose('Keeping archive: %s' % format_archive(archive))
|
2011-08-11 19:18:13 +00:00
|
|
|
for archive in to_delete:
|
2014-02-20 02:33:05 +00:00
|
|
|
if args.dry_run:
|
2014-02-25 00:46:56 +00:00
|
|
|
self.print_verbose('Would prune: %s' % format_archive(archive))
|
2014-02-20 02:33:05 +00:00
|
|
|
else:
|
2014-02-25 00:46:56 +00:00
|
|
|
self.print_verbose('Pruning archive: %s' % format_archive(archive))
|
2015-05-26 00:04:41 +00:00
|
|
|
Archive(repository, key, manifest, archive.name, cache).delete(stats)
|
2014-03-19 20:58:25 +00:00
|
|
|
if to_delete and not args.dry_run:
|
|
|
|
manifest.write()
|
|
|
|
repository.commit()
|
|
|
|
cache.commit()
|
2014-03-19 21:32:07 +00:00
|
|
|
if args.stats:
|
2015-10-02 15:16:21 +00:00
|
|
|
logger.info(stats.print_('Deleted data:', cache))
|
2011-08-06 21:33:06 +00:00
|
|
|
return self.exit_code
|
|
|
|
|
2015-10-03 16:36:52 +00:00
|
|
|
def do_upgrade(self, args):
|
|
|
|
"""upgrade a repository from a previous version"""
|
|
|
|
# XXX: currently only upgrades from Attic repositories, but may
|
|
|
|
# eventually be extended to deal with major upgrades for borg
|
|
|
|
# itself.
|
|
|
|
#
|
|
|
|
# in this case, it should auto-detect the current repository
|
|
|
|
# format and fire up necessary upgrade mechanism. this remains
|
|
|
|
# to be implemented.
|
|
|
|
|
|
|
|
# XXX: should auto-detect if it is an attic repository here
|
|
|
|
repo = AtticRepositoryUpgrader(args.repository.path, create=False)
|
2015-10-01 03:50:46 +00:00
|
|
|
try:
|
do not upgrade repositories in place by default
instead, we perform the equivalent of `cp -al` on the repository to
keep a backup, and then rewrite the files, breaking the hardlinks as
necessary.
it has to be confirmed that the rest of Borg will also break hardlinks
when operating on files in the repository. if Borg operates in place
on any files of the repository, it could jeoperdize the backup, so
this needs to be verified. I believe that most files are written to a
temporary file and moved into place, however, so the backup should be
safe.
the rationale behind the backup copy is that we want to be extra
careful with user's data by default. the old behavior is retained
through the `--inplace`/`-i` commandline flag. plus, this way we don't
need to tell users to go through extra steps (`cp -a`, in particular)
before running the command.
also, it can take a long time to do the copy of the attic repository
we wish to work on. since `cp -a` doesn't provide progress
information, the new default behavior provides a nicer user experience
of giving an overall impression of the upgrade progress, while
retaining compatibility with Attic by default (in a separate
repository, of course).
this makes the upgrade command much less scary to use and hopefully
will convert drones to the borg collective.
the only place where the default inplace behavior is retained is in
the header_replace() function, to avoid breaking the cache conversion
code and to keep API stability and semantic coherence ("replace" by
defaults means in place).
2015-10-15 22:02:24 +00:00
|
|
|
repo.upgrade(args.dry_run, inplace=args.inplace)
|
2015-10-01 12:46:30 +00:00
|
|
|
except NotImplementedError as e:
|
2015-10-01 03:50:46 +00:00
|
|
|
print("warning: %s" % e)
|
|
|
|
return self.exit_code
|
|
|
|
|
2014-02-08 14:44:31 +00:00
|
|
|
helptext = {}
|
|
|
|
helptext['patterns'] = '''
|
|
|
|
Exclude patterns use a variant of shell pattern syntax, with '*' matching any
|
|
|
|
number of characters, '?' matching any single character, '[...]' matching any
|
|
|
|
single character specified, including ranges, and '[!...]' matching any
|
|
|
|
character not specified. For the purpose of these patterns, the path
|
2014-02-08 16:45:36 +00:00
|
|
|
separator ('\\' for Windows and '/' on other systems) is not treated
|
2014-02-08 14:44:31 +00:00
|
|
|
specially. For a path to match a pattern, it must completely match from
|
|
|
|
start to end, or must match from the start to just before a path separator.
|
|
|
|
Except for the root path, paths will never end in the path separator when
|
|
|
|
matching is attempted. Thus, if a given pattern ends in a path separator, a
|
|
|
|
'*' is appended before matching is attempted. Patterns with wildcards should
|
|
|
|
be quoted to protect them from shell expansion.
|
2014-02-25 11:33:23 +00:00
|
|
|
|
2014-02-08 14:44:31 +00:00
|
|
|
Examples:
|
2014-02-25 11:33:23 +00:00
|
|
|
|
2014-02-08 14:44:31 +00:00
|
|
|
# Exclude '/home/user/file.o' but not '/home/user/file.odt':
|
2015-05-09 17:41:03 +00:00
|
|
|
$ borg create -e '*.o' backup /
|
2014-02-25 11:33:23 +00:00
|
|
|
|
2014-02-08 14:44:31 +00:00
|
|
|
# Exclude '/home/user/junk' and '/home/user/subdir/junk' but
|
|
|
|
# not '/home/user/importantjunk' or '/etc/junk':
|
2015-05-09 17:41:03 +00:00
|
|
|
$ borg create -e '/home/*/junk' backup /
|
2014-02-25 11:33:23 +00:00
|
|
|
|
2014-02-08 14:44:31 +00:00
|
|
|
# Exclude the contents of '/home/user/cache' but not the directory itself:
|
2015-05-09 17:41:03 +00:00
|
|
|
$ borg create -e /home/user/cache/ backup /
|
2014-02-25 11:33:23 +00:00
|
|
|
|
2014-02-08 14:44:31 +00:00
|
|
|
# The file '/home/user/cache/important' is *not* backed up:
|
2015-05-09 17:41:03 +00:00
|
|
|
$ borg create -e /home/user/cache/ backup / /home/user/cache/important
|
2014-02-08 14:44:31 +00:00
|
|
|
'''
|
|
|
|
|
2014-03-21 21:12:15 +00:00
|
|
|
def do_help(self, parser, commands, args):
|
|
|
|
if not args.topic:
|
|
|
|
parser.print_help()
|
|
|
|
elif args.topic in self.helptext:
|
2015-10-02 15:13:01 +00:00
|
|
|
print(self.helptext[args.topic])
|
2014-03-21 21:12:15 +00:00
|
|
|
elif args.topic in commands:
|
2014-04-06 13:16:25 +00:00
|
|
|
if args.epilog_only:
|
2015-10-02 15:13:01 +00:00
|
|
|
print(commands[args.topic].epilog)
|
2014-04-06 13:16:25 +00:00
|
|
|
elif args.usage_only:
|
|
|
|
commands[args.topic].epilog = None
|
|
|
|
commands[args.topic].print_help()
|
|
|
|
else:
|
|
|
|
commands[args.topic].print_help()
|
2014-02-08 14:44:31 +00:00
|
|
|
else:
|
2014-03-21 21:12:15 +00:00
|
|
|
parser.error('No help available on %s' % (args.topic,))
|
2014-02-08 16:51:44 +00:00
|
|
|
return self.exit_code
|
2014-02-08 14:44:31 +00:00
|
|
|
|
2014-02-19 21:46:15 +00:00
|
|
|
def preprocess_args(self, args):
|
|
|
|
deprecations = [
|
|
|
|
('--hourly', '--keep-hourly', 'Warning: "--hourly" has been deprecated. Use "--keep-hourly" instead.'),
|
|
|
|
('--daily', '--keep-daily', 'Warning: "--daily" has been deprecated. Use "--keep-daily" instead.'),
|
|
|
|
('--weekly', '--keep-weekly', 'Warning: "--weekly" has been deprecated. Use "--keep-weekly" instead.'),
|
|
|
|
('--monthly', '--keep-monthly', 'Warning: "--monthly" has been deprecated. Use "--keep-monthly" instead.'),
|
|
|
|
('--yearly', '--keep-yearly', 'Warning: "--yearly" has been deprecated. Use "--keep-yearly" instead.')
|
|
|
|
]
|
|
|
|
if args and args[0] == 'verify':
|
2015-10-02 15:13:01 +00:00
|
|
|
print('Warning: "borg verify" has been deprecated. Use "borg extract --dry-run" instead.')
|
2014-02-19 21:46:15 +00:00
|
|
|
args = ['extract', '--dry-run'] + args[1:]
|
|
|
|
for i, arg in enumerate(args[:]):
|
|
|
|
for old_name, new_name, warning in deprecations:
|
|
|
|
if arg.startswith(old_name):
|
|
|
|
args[i] = arg.replace(old_name, new_name)
|
2015-10-02 15:13:01 +00:00
|
|
|
print(warning)
|
2014-02-19 21:46:15 +00:00
|
|
|
return args
|
|
|
|
|
2015-10-08 01:07:12 +00:00
|
|
|
def build_parser(self, args=None, prog=None):
|
|
|
|
common_parser = argparse.ArgumentParser(add_help=False, prog=prog)
|
2015-10-01 18:20:29 +00:00
|
|
|
common_parser.add_argument('-v', '--verbose', dest='verbose', action='count',
|
|
|
|
help='verbose output, defaults to warnings only')
|
2015-08-08 18:50:21 +00:00
|
|
|
common_parser.add_argument('--no-files-cache', dest='cache_files', action='store_false',
|
|
|
|
help='do not load/update the file metadata cache used to detect unchanged files')
|
2015-10-05 22:43:54 +00:00
|
|
|
common_parser.add_argument('--umask', dest='umask', type=lambda s: int(s, 8), default=RemoteRepository.umask, metavar='M',
|
|
|
|
help='set umask to M (local and remote, default: %(default)s)')
|
|
|
|
common_parser.add_argument('--remote-path', dest='remote_path', default=RemoteRepository.remote_path, metavar='PATH',
|
|
|
|
help='set remote path to executable (default: "%(default)s")')
|
2010-10-15 18:35:49 +00:00
|
|
|
|
2015-10-08 01:07:12 +00:00
|
|
|
parser = argparse.ArgumentParser(prog=prog, description='Borg %s - Deduplicated Backups' % __version__)
|
2013-08-04 11:43:35 +00:00
|
|
|
subparsers = parser.add_subparsers(title='Available commands')
|
2010-10-23 21:01:12 +00:00
|
|
|
|
2015-06-11 20:18:12 +00:00
|
|
|
serve_epilog = textwrap.dedent("""
|
|
|
|
This command starts a repository server process. This command is usually not used manually.
|
|
|
|
""")
|
2014-03-24 20:28:59 +00:00
|
|
|
subparser = subparsers.add_parser('serve', parents=[common_parser],
|
2015-06-11 20:18:12 +00:00
|
|
|
description=self.do_serve.__doc__, epilog=serve_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2014-03-24 20:28:59 +00:00
|
|
|
subparser.set_defaults(func=self.do_serve)
|
|
|
|
subparser.add_argument('--restrict-to-path', dest='restrict_to_paths', action='append',
|
|
|
|
metavar='PATH', help='restrict repository access to PATH')
|
2014-04-06 13:16:25 +00:00
|
|
|
init_epilog = textwrap.dedent("""
|
|
|
|
This command initializes an empty repository. A repository is a filesystem
|
|
|
|
directory containing the deduplicated data from zero or more archives.
|
|
|
|
Encryption can be enabled at repository init time.
|
2015-08-09 11:47:36 +00:00
|
|
|
Please note that the 'passphrase' encryption mode is DEPRECATED (instead of it,
|
|
|
|
consider using 'repokey').
|
2014-04-06 13:16:25 +00:00
|
|
|
""")
|
2013-07-05 10:32:56 +00:00
|
|
|
subparser = subparsers.add_parser('init', parents=[common_parser],
|
2014-04-06 13:16:25 +00:00
|
|
|
description=self.do_init.__doc__, epilog=init_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2011-07-30 19:13:48 +00:00
|
|
|
subparser.set_defaults(func=self.do_init)
|
2015-09-06 18:19:28 +00:00
|
|
|
subparser.add_argument('repository', metavar='REPOSITORY', nargs='?', default='',
|
2011-07-30 19:13:48 +00:00
|
|
|
type=location_validator(archive=False),
|
2013-07-05 10:32:56 +00:00
|
|
|
help='repository to create')
|
2013-08-10 11:02:20 +00:00
|
|
|
subparser.add_argument('-e', '--encryption', dest='encryption',
|
2015-08-09 11:47:36 +00:00
|
|
|
choices=('none', 'keyfile', 'repokey', 'passphrase'), default='none',
|
|
|
|
help='select encryption key mode')
|
2011-07-30 19:13:48 +00:00
|
|
|
|
2014-02-26 22:13:48 +00:00
|
|
|
check_epilog = textwrap.dedent("""
|
2015-08-08 22:36:17 +00:00
|
|
|
The check command verifies the consistency of a repository and the corresponding archives.
|
|
|
|
|
|
|
|
First, the underlying repository data files are checked:
|
2015-08-29 02:00:22 +00:00
|
|
|
|
2015-08-08 22:36:17 +00:00
|
|
|
- For all segments the segment magic (header) is checked
|
|
|
|
- For all objects stored in the segments, all metadata (e.g. crc and size) and
|
|
|
|
all data is read. The read data is checked by size and CRC. Bit rot and other
|
|
|
|
types of accidental damage can be detected this way.
|
|
|
|
- If we are in repair mode and a integrity error is detected for a segment,
|
|
|
|
we try to recover as many objects from the segment as possible.
|
|
|
|
- In repair mode, it makes sure that the index is consistent with the data
|
|
|
|
stored in the segments.
|
|
|
|
- If you use a remote repo server via ssh:, the repo check is executed on the
|
|
|
|
repo server without causing significant network traffic.
|
|
|
|
- The repository check can be skipped using the --archives-only option.
|
|
|
|
|
|
|
|
Second, the consistency and correctness of the archive metadata is verified:
|
2015-08-29 02:00:22 +00:00
|
|
|
|
2015-08-08 22:36:17 +00:00
|
|
|
- Is the repo manifest present? If not, it is rebuilt from archive metadata
|
2015-08-09 10:52:39 +00:00
|
|
|
chunks (this requires reading and decrypting of all metadata and data).
|
2015-08-08 22:36:17 +00:00
|
|
|
- Check if archive metadata chunk is present. if not, remove archive from
|
|
|
|
manifest.
|
|
|
|
- For all files (items) in the archive, for all chunks referenced by these
|
|
|
|
files, check if chunk is present (if not and we are in repair mode, replace
|
2015-08-09 10:52:39 +00:00
|
|
|
it with a same-size chunk of zeros). This requires reading of archive and
|
|
|
|
file metadata, but not data.
|
2015-08-08 22:36:17 +00:00
|
|
|
- If we are in repair mode and we checked all the archives: delete orphaned
|
2015-08-09 10:52:39 +00:00
|
|
|
chunks from the repo.
|
2015-08-08 22:36:17 +00:00
|
|
|
- if you use a remote repo server via ssh:, the archive check is executed on
|
|
|
|
the client machine (because if encryption is enabled, the checks will require
|
|
|
|
decryption and this is always done client-side, because key access will be
|
2015-08-09 10:52:39 +00:00
|
|
|
required).
|
2015-08-08 22:36:17 +00:00
|
|
|
- The archive checks can be time consuming, they can be skipped using the
|
|
|
|
--repository-only option.
|
2014-02-26 22:13:48 +00:00
|
|
|
""")
|
2014-02-04 22:49:10 +00:00
|
|
|
subparser = subparsers.add_parser('check', parents=[common_parser],
|
|
|
|
description=self.do_check.__doc__,
|
2014-02-26 22:13:48 +00:00
|
|
|
epilog=check_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2014-02-04 22:49:10 +00:00
|
|
|
subparser.set_defaults(func=self.do_check)
|
2015-09-06 18:19:28 +00:00
|
|
|
subparser.add_argument('repository', metavar='REPOSITORY_OR_ARCHIVE', nargs='?', default='',
|
2015-08-08 20:11:40 +00:00
|
|
|
type=location_validator(),
|
|
|
|
help='repository or archive to check consistency of')
|
2014-03-04 20:56:37 +00:00
|
|
|
subparser.add_argument('--repository-only', dest='repo_only', action='store_true',
|
|
|
|
default=False,
|
|
|
|
help='only perform repository checks')
|
|
|
|
subparser.add_argument('--archives-only', dest='archives_only', action='store_true',
|
|
|
|
default=False,
|
|
|
|
help='only perform archives checks')
|
2014-02-08 23:17:32 +00:00
|
|
|
subparser.add_argument('--repair', dest='repair', action='store_true',
|
|
|
|
default=False,
|
2014-02-26 22:13:48 +00:00
|
|
|
help='attempt to repair any inconsistencies found')
|
2015-03-11 02:04:12 +00:00
|
|
|
subparser.add_argument('--last', dest='last',
|
|
|
|
type=int, default=None, metavar='N',
|
|
|
|
help='only check last N archives (Default: all)')
|
2014-02-04 22:49:10 +00:00
|
|
|
|
2014-04-06 13:16:25 +00:00
|
|
|
change_passphrase_epilog = textwrap.dedent("""
|
|
|
|
The key files used for repository encryption are optionally passphrase
|
|
|
|
protected. This command can be used to change this passphrase.
|
|
|
|
""")
|
2013-07-05 10:32:56 +00:00
|
|
|
subparser = subparsers.add_parser('change-passphrase', parents=[common_parser],
|
2014-04-06 13:16:25 +00:00
|
|
|
description=self.do_change_passphrase.__doc__,
|
|
|
|
epilog=change_passphrase_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2012-12-04 22:02:10 +00:00
|
|
|
subparser.set_defaults(func=self.do_change_passphrase)
|
2015-09-06 18:19:28 +00:00
|
|
|
subparser.add_argument('repository', metavar='REPOSITORY', nargs='?', default='',
|
2013-07-31 18:51:01 +00:00
|
|
|
type=location_validator(archive=False))
|
2011-10-27 20:17:47 +00:00
|
|
|
|
2014-04-06 13:16:25 +00:00
|
|
|
create_epilog = textwrap.dedent("""
|
|
|
|
This command creates a backup archive containing all files found while recursively
|
|
|
|
traversing all paths specified. The archive will consume almost no disk space for
|
|
|
|
files or parts of files that have already been stored in other archives.
|
|
|
|
|
2015-06-28 12:02:38 +00:00
|
|
|
See the output of the "borg help patterns" command for more help on exclude patterns.
|
2014-04-06 13:16:25 +00:00
|
|
|
""")
|
2014-02-08 14:44:31 +00:00
|
|
|
|
2013-07-05 10:32:56 +00:00
|
|
|
subparser = subparsers.add_parser('create', parents=[common_parser],
|
2014-02-08 14:44:31 +00:00
|
|
|
description=self.do_create.__doc__,
|
2014-04-06 13:16:25 +00:00
|
|
|
epilog=create_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2010-10-15 18:35:49 +00:00
|
|
|
subparser.set_defaults(func=self.do_create)
|
2011-08-07 15:10:21 +00:00
|
|
|
subparser.add_argument('-s', '--stats', dest='stats',
|
|
|
|
action='store_true', default=False,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='print statistics for the created archive')
|
2015-03-24 03:24:54 +00:00
|
|
|
subparser.add_argument('-p', '--progress', dest='progress',
|
|
|
|
action='store_true', default=False,
|
|
|
|
help='print progress while creating the archive')
|
2013-06-30 20:32:27 +00:00
|
|
|
subparser.add_argument('-e', '--exclude', dest='excludes',
|
2010-11-02 21:47:39 +00:00
|
|
|
type=ExcludePattern, action='append',
|
2013-07-05 10:32:56 +00:00
|
|
|
metavar="PATTERN", help='exclude paths matching PATTERN')
|
2014-02-08 17:44:48 +00:00
|
|
|
subparser.add_argument('--exclude-from', dest='exclude_files',
|
|
|
|
type=argparse.FileType('r'), action='append',
|
|
|
|
metavar='EXCLUDEFILE', help='read exclude patterns from EXCLUDEFILE, one per line')
|
2014-04-30 21:27:04 +00:00
|
|
|
subparser.add_argument('--exclude-caches', dest='exclude_caches',
|
|
|
|
action='store_true', default=False,
|
|
|
|
help='exclude directories that contain a CACHEDIR.TAG file (http://www.brynosaurus.com/cachedir/spec.html)')
|
2011-09-10 15:19:02 +00:00
|
|
|
subparser.add_argument('-c', '--checkpoint-interval', dest='checkpoint_interval',
|
|
|
|
type=int, default=300, metavar='SECONDS',
|
2013-07-31 18:51:01 +00:00
|
|
|
help='write checkpoint every SECONDS seconds (Default: 300)')
|
2012-02-04 16:32:46 +00:00
|
|
|
subparser.add_argument('--do-not-cross-mountpoints', dest='dontcross',
|
|
|
|
action='store_true', default=False,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='do not cross mount points')
|
2012-02-29 22:59:17 +00:00
|
|
|
subparser.add_argument('--numeric-owner', dest='numeric_owner',
|
|
|
|
action='store_true', default=False,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='only store numeric user and group identifiers')
|
2015-04-18 19:36:10 +00:00
|
|
|
subparser.add_argument('--timestamp', dest='timestamp',
|
|
|
|
type=timestamp, default=None,
|
|
|
|
metavar='yyyy-mm-ddThh:mm:ss',
|
|
|
|
help='manually specify the archive creation date/time (UTC). '
|
|
|
|
'alternatively, give a reference file/directory.')
|
2015-06-19 23:20:46 +00:00
|
|
|
subparser.add_argument('--chunker-params', dest='chunker_params',
|
|
|
|
type=ChunkerParams, default=CHUNKER_PARAMS,
|
2015-06-20 23:46:41 +00:00
|
|
|
metavar='CHUNK_MIN_EXP,CHUNK_MAX_EXP,HASH_MASK_BITS,HASH_WINDOW_SIZE',
|
|
|
|
help='specify the chunker parameters. default: %d,%d,%d,%d' % CHUNKER_PARAMS)
|
2015-06-25 20:16:23 +00:00
|
|
|
subparser.add_argument('-C', '--compression', dest='compression',
|
2015-08-14 21:00:04 +00:00
|
|
|
type=CompressionSpec, default=dict(name='none'), metavar='COMPRESSION',
|
|
|
|
help='select compression algorithm (and level): '
|
|
|
|
'none == no compression (default), '
|
|
|
|
'lz4 == lz4, '
|
2015-08-02 16:10:30 +00:00
|
|
|
'zlib == zlib (default level 6), '
|
|
|
|
'zlib,0 .. zlib,9 == zlib (with level 0..9), '
|
2015-08-14 21:00:04 +00:00
|
|
|
'lzma == lzma (default level 6), '
|
2015-08-02 22:31:33 +00:00
|
|
|
'lzma,0 .. lzma,9 == lzma (with level 0..9).')
|
2015-09-05 22:29:46 +00:00
|
|
|
subparser.add_argument('--read-special', dest='read_special',
|
|
|
|
action='store_true', default=False,
|
|
|
|
help='open and read special files as if they were regular files')
|
2015-09-08 01:12:45 +00:00
|
|
|
subparser.add_argument('-n', '--dry-run', dest='dry_run',
|
|
|
|
action='store_true', default=False,
|
|
|
|
help='do not create a backup archive')
|
2010-10-15 18:35:49 +00:00
|
|
|
subparser.add_argument('archive', metavar='ARCHIVE',
|
|
|
|
type=location_validator(archive=True),
|
2015-09-19 14:09:20 +00:00
|
|
|
help='name of archive to create (must be also a valid directory name)')
|
2013-06-30 20:32:27 +00:00
|
|
|
subparser.add_argument('paths', metavar='PATH', nargs='+', type=str,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='paths to archive')
|
2010-10-15 18:35:49 +00:00
|
|
|
|
2014-04-06 13:16:25 +00:00
|
|
|
extract_epilog = textwrap.dedent("""
|
|
|
|
This command extracts the contents of an archive. By default the entire
|
|
|
|
archive is extracted but a subset of files and directories can be selected
|
|
|
|
by passing a list of ``PATHs`` as arguments. The file selection can further
|
|
|
|
be restricted by using the ``--exclude`` option.
|
2014-02-08 14:44:31 +00:00
|
|
|
|
2015-06-28 12:02:38 +00:00
|
|
|
See the output of the "borg help patterns" command for more help on exclude patterns.
|
2014-04-06 13:16:25 +00:00
|
|
|
""")
|
2013-07-05 10:32:56 +00:00
|
|
|
subparser = subparsers.add_parser('extract', parents=[common_parser],
|
2014-02-08 14:44:31 +00:00
|
|
|
description=self.do_extract.__doc__,
|
2014-04-06 13:16:25 +00:00
|
|
|
epilog=extract_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2010-10-15 18:35:49 +00:00
|
|
|
subparser.set_defaults(func=self.do_extract)
|
2014-02-19 21:46:15 +00:00
|
|
|
subparser.add_argument('-n', '--dry-run', dest='dry_run',
|
2014-02-18 20:33:06 +00:00
|
|
|
default=False, action='store_true',
|
|
|
|
help='do not actually change any files')
|
2013-06-30 20:32:27 +00:00
|
|
|
subparser.add_argument('-e', '--exclude', dest='excludes',
|
2010-11-02 21:47:39 +00:00
|
|
|
type=ExcludePattern, action='append',
|
2013-07-05 10:32:56 +00:00
|
|
|
metavar="PATTERN", help='exclude paths matching PATTERN')
|
2014-02-08 17:44:48 +00:00
|
|
|
subparser.add_argument('--exclude-from', dest='exclude_files',
|
|
|
|
type=argparse.FileType('r'), action='append',
|
|
|
|
metavar='EXCLUDEFILE', help='read exclude patterns from EXCLUDEFILE, one per line')
|
2012-02-29 22:59:17 +00:00
|
|
|
subparser.add_argument('--numeric-owner', dest='numeric_owner',
|
|
|
|
action='store_true', default=False,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='only obey numeric user and group identifiers')
|
2014-08-02 20:15:21 +00:00
|
|
|
subparser.add_argument('--strip-components', dest='strip_components',
|
|
|
|
type=int, default=0, metavar='NUMBER',
|
|
|
|
help='Remove the specified number of leading path elements. Pathnames with fewer elements will be silently skipped.')
|
2015-03-01 04:07:29 +00:00
|
|
|
subparser.add_argument('--stdout', dest='stdout',
|
|
|
|
action='store_true', default=False,
|
|
|
|
help='write all extracted data to stdout')
|
2015-04-17 20:28:40 +00:00
|
|
|
subparser.add_argument('--sparse', dest='sparse',
|
|
|
|
action='store_true', default=False,
|
|
|
|
help='create holes in output sparse file from all-zero chunks')
|
2010-10-15 18:35:49 +00:00
|
|
|
subparser.add_argument('archive', metavar='ARCHIVE',
|
|
|
|
type=location_validator(archive=True),
|
2013-07-05 10:32:56 +00:00
|
|
|
help='archive to extract')
|
2013-06-30 20:32:27 +00:00
|
|
|
subparser.add_argument('paths', metavar='PATH', nargs='*', type=str,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='paths to extract')
|
2010-10-15 18:35:49 +00:00
|
|
|
|
2015-03-24 06:11:00 +00:00
|
|
|
rename_epilog = textwrap.dedent("""
|
|
|
|
This command renames an archive in the repository.
|
|
|
|
""")
|
|
|
|
subparser = subparsers.add_parser('rename', parents=[common_parser],
|
|
|
|
description=self.do_rename.__doc__,
|
|
|
|
epilog=rename_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
|
|
|
subparser.set_defaults(func=self.do_rename)
|
|
|
|
subparser.add_argument('archive', metavar='ARCHIVE',
|
|
|
|
type=location_validator(archive=True),
|
|
|
|
help='archive to rename')
|
|
|
|
subparser.add_argument('name', metavar='NEWNAME', type=str,
|
|
|
|
help='the new archive name to use')
|
|
|
|
|
2014-04-06 13:16:25 +00:00
|
|
|
delete_epilog = textwrap.dedent("""
|
2015-03-09 15:02:06 +00:00
|
|
|
This command deletes an archive from the repository or the complete repository.
|
|
|
|
Disk space is reclaimed accordingly. If you delete the complete repository, the
|
|
|
|
local cache for it (if any) is also deleted.
|
2014-04-06 13:16:25 +00:00
|
|
|
""")
|
2013-07-05 10:32:56 +00:00
|
|
|
subparser = subparsers.add_parser('delete', parents=[common_parser],
|
2014-04-06 13:16:25 +00:00
|
|
|
description=self.do_delete.__doc__,
|
|
|
|
epilog=delete_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2010-10-15 18:35:49 +00:00
|
|
|
subparser.set_defaults(func=self.do_delete)
|
2014-03-19 21:32:07 +00:00
|
|
|
subparser.add_argument('-s', '--stats', dest='stats',
|
|
|
|
action='store_true', default=False,
|
|
|
|
help='print statistics for the deleted archive')
|
2015-10-03 17:29:45 +00:00
|
|
|
subparser.add_argument('-c', '--cache-only', dest='cache_only',
|
|
|
|
action='store_true', default=False,
|
|
|
|
help='delete only the local cache for the given repository')
|
2015-09-06 18:19:28 +00:00
|
|
|
subparser.add_argument('target', metavar='TARGET', nargs='?', default='',
|
2015-03-09 15:02:06 +00:00
|
|
|
type=location_validator(),
|
|
|
|
help='archive or repository to delete')
|
2010-10-15 18:35:49 +00:00
|
|
|
|
2014-04-06 13:16:25 +00:00
|
|
|
list_epilog = textwrap.dedent("""
|
|
|
|
This command lists the contents of a repository or an archive.
|
|
|
|
""")
|
2013-07-05 10:32:56 +00:00
|
|
|
subparser = subparsers.add_parser('list', parents=[common_parser],
|
2014-04-06 13:16:25 +00:00
|
|
|
description=self.do_list.__doc__,
|
|
|
|
epilog=list_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2010-10-15 18:35:49 +00:00
|
|
|
subparser.set_defaults(func=self.do_list)
|
2015-08-15 18:52:14 +00:00
|
|
|
subparser.add_argument('--short', dest='short',
|
|
|
|
action='store_true', default=False,
|
|
|
|
help='only print file/directory names, nothing else')
|
2015-09-06 18:19:28 +00:00
|
|
|
subparser.add_argument('src', metavar='REPOSITORY_OR_ARCHIVE', nargs='?', default='',
|
2015-09-06 16:18:24 +00:00
|
|
|
type=location_validator(),
|
2013-07-24 11:23:51 +00:00
|
|
|
help='repository/archive to list contents of')
|
2014-04-06 13:16:25 +00:00
|
|
|
mount_epilog = textwrap.dedent("""
|
|
|
|
This command mounts an archive as a FUSE filesystem. This can be useful for
|
|
|
|
browsing an archive or restoring individual files. Unless the ``--foreground``
|
|
|
|
option is given the command will run in the background until the filesystem
|
|
|
|
is ``umounted``.
|
|
|
|
""")
|
2013-07-21 22:41:06 +00:00
|
|
|
subparser = subparsers.add_parser('mount', parents=[common_parser],
|
2014-04-06 13:16:25 +00:00
|
|
|
description=self.do_mount.__doc__,
|
|
|
|
epilog=mount_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2013-07-21 22:41:06 +00:00
|
|
|
subparser.set_defaults(func=self.do_mount)
|
2014-03-26 20:47:01 +00:00
|
|
|
subparser.add_argument('src', metavar='REPOSITORY_OR_ARCHIVE', type=location_validator(),
|
|
|
|
help='repository/archive to mount')
|
2013-07-24 11:23:51 +00:00
|
|
|
subparser.add_argument('mountpoint', metavar='MOUNTPOINT', type=str,
|
2013-07-24 11:05:47 +00:00
|
|
|
help='where to mount filesystem')
|
2013-07-24 11:23:51 +00:00
|
|
|
subparser.add_argument('-f', '--foreground', dest='foreground',
|
|
|
|
action='store_true', default=False,
|
|
|
|
help='stay in foreground, do not daemonize')
|
2013-07-27 12:44:12 +00:00
|
|
|
subparser.add_argument('-o', dest='options', type=str,
|
|
|
|
help='Extra mount options')
|
2013-07-21 22:41:06 +00:00
|
|
|
|
2014-04-06 13:16:25 +00:00
|
|
|
info_epilog = textwrap.dedent("""
|
|
|
|
This command displays some detailed information about the specified archive.
|
|
|
|
""")
|
2013-07-05 10:32:56 +00:00
|
|
|
subparser = subparsers.add_parser('info', parents=[common_parser],
|
2014-04-06 13:16:25 +00:00
|
|
|
description=self.do_info.__doc__,
|
|
|
|
epilog=info_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2010-10-15 18:35:49 +00:00
|
|
|
subparser.set_defaults(func=self.do_info)
|
|
|
|
subparser.add_argument('archive', metavar='ARCHIVE',
|
|
|
|
type=location_validator(archive=True),
|
2013-07-05 10:32:56 +00:00
|
|
|
help='archive to display information about')
|
2010-10-15 18:35:49 +00:00
|
|
|
|
2014-04-06 13:16:25 +00:00
|
|
|
prune_epilog = textwrap.dedent("""
|
|
|
|
The prune command prunes a repository by deleting archives not matching
|
|
|
|
any of the specified retention options. This command is normally used by
|
|
|
|
automated backup scripts wanting to keep a certain number of historic backups.
|
|
|
|
|
|
|
|
As an example, "-d 7" means to keep the latest backup on each day for 7 days.
|
|
|
|
Days without backups do not count towards the total.
|
|
|
|
The rules are applied from hourly to yearly, and backups selected by previous
|
|
|
|
rules do not count towards those of later rules. The time that each backup
|
|
|
|
completes is used for pruning purposes. Dates and times are interpreted in
|
2014-02-04 01:11:47 +00:00
|
|
|
the local timezone, and weeks go from Monday to Sunday. Specifying a
|
2014-02-08 20:37:27 +00:00
|
|
|
negative number of archives to keep means that there is no limit.
|
2014-04-06 13:16:25 +00:00
|
|
|
|
2014-02-25 00:32:18 +00:00
|
|
|
The "--keep-within" option takes an argument of the form "<int><char>",
|
|
|
|
where char is "H", "d", "w", "m", "y". For example, "--keep-within 2d" means
|
2014-02-08 20:37:27 +00:00
|
|
|
to keep all archives that were created within the past 48 hours.
|
|
|
|
"1m" is taken to mean "31d". The archives kept with this option do not
|
2014-04-06 13:16:25 +00:00
|
|
|
count towards the totals specified by any other options.
|
2014-02-04 01:11:47 +00:00
|
|
|
|
2014-04-06 13:16:25 +00:00
|
|
|
If a prefix is set with -p, then only archives that start with the prefix are
|
|
|
|
considered for deletion and only those archives count towards the totals
|
|
|
|
specified by the rules.
|
2015-09-19 14:58:02 +00:00
|
|
|
Otherwise, *all* archives in the repository are candidates for deletion!
|
2014-04-06 13:16:25 +00:00
|
|
|
""")
|
2013-07-05 10:32:56 +00:00
|
|
|
subparser = subparsers.add_parser('prune', parents=[common_parser],
|
2014-02-04 01:11:47 +00:00
|
|
|
description=self.do_prune.__doc__,
|
2014-04-06 13:16:25 +00:00
|
|
|
epilog=prune_epilog,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2011-11-22 20:47:17 +00:00
|
|
|
subparser.set_defaults(func=self.do_prune)
|
2014-02-20 02:33:05 +00:00
|
|
|
subparser.add_argument('-n', '--dry-run', dest='dry_run',
|
|
|
|
default=False, action='store_true',
|
|
|
|
help='do not change repository')
|
2014-03-19 21:32:07 +00:00
|
|
|
subparser.add_argument('-s', '--stats', dest='stats',
|
|
|
|
action='store_true', default=False,
|
|
|
|
help='print statistics for the deleted archive')
|
2014-02-19 21:46:15 +00:00
|
|
|
subparser.add_argument('--keep-within', dest='within', type=str, metavar='WITHIN',
|
2014-02-08 20:37:27 +00:00
|
|
|
help='keep all archives within this time interval')
|
2014-02-19 21:46:15 +00:00
|
|
|
subparser.add_argument('-H', '--keep-hourly', dest='hourly', type=int, default=0,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='number of hourly archives to keep')
|
2014-02-19 21:46:15 +00:00
|
|
|
subparser.add_argument('-d', '--keep-daily', dest='daily', type=int, default=0,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='number of daily archives to keep')
|
2014-02-19 21:46:15 +00:00
|
|
|
subparser.add_argument('-w', '--keep-weekly', dest='weekly', type=int, default=0,
|
2014-02-09 21:15:49 +00:00
|
|
|
help='number of weekly archives to keep')
|
2014-02-19 21:46:15 +00:00
|
|
|
subparser.add_argument('-m', '--keep-monthly', dest='monthly', type=int, default=0,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='number of monthly archives to keep')
|
2014-02-19 21:46:15 +00:00
|
|
|
subparser.add_argument('-y', '--keep-yearly', dest='yearly', type=int, default=0,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='number of yearly archives to keep')
|
2011-08-07 12:04:14 +00:00
|
|
|
subparser.add_argument('-p', '--prefix', dest='prefix', type=str,
|
2013-07-05 10:32:56 +00:00
|
|
|
help='only consider archive names starting with this prefix')
|
2015-09-06 18:19:28 +00:00
|
|
|
subparser.add_argument('repository', metavar='REPOSITORY', nargs='?', default='',
|
2011-08-06 21:33:06 +00:00
|
|
|
type=location_validator(archive=False),
|
2013-07-05 10:32:56 +00:00
|
|
|
help='repository to prune')
|
2014-02-08 14:44:31 +00:00
|
|
|
|
2015-10-03 16:36:52 +00:00
|
|
|
upgrade_epilog = textwrap.dedent("""
|
do not upgrade repositories in place by default
instead, we perform the equivalent of `cp -al` on the repository to
keep a backup, and then rewrite the files, breaking the hardlinks as
necessary.
it has to be confirmed that the rest of Borg will also break hardlinks
when operating on files in the repository. if Borg operates in place
on any files of the repository, it could jeoperdize the backup, so
this needs to be verified. I believe that most files are written to a
temporary file and moved into place, however, so the backup should be
safe.
the rationale behind the backup copy is that we want to be extra
careful with user's data by default. the old behavior is retained
through the `--inplace`/`-i` commandline flag. plus, this way we don't
need to tell users to go through extra steps (`cp -a`, in particular)
before running the command.
also, it can take a long time to do the copy of the attic repository
we wish to work on. since `cp -a` doesn't provide progress
information, the new default behavior provides a nicer user experience
of giving an overall impression of the upgrade progress, while
retaining compatibility with Attic by default (in a separate
repository, of course).
this makes the upgrade command much less scary to use and hopefully
will convert drones to the borg collective.
the only place where the default inplace behavior is retained is in
the header_replace() function, to avoid breaking the cache conversion
code and to keep API stability and semantic coherence ("replace" by
defaults means in place).
2015-10-15 22:02:24 +00:00
|
|
|
upgrade an existing Borg repository. this currently
|
2015-10-03 16:36:52 +00:00
|
|
|
only support converting an Attic repository, but may
|
|
|
|
eventually be extended to cover major Borg upgrades as well.
|
2015-10-01 03:50:46 +00:00
|
|
|
|
2015-10-01 12:36:20 +00:00
|
|
|
it will change the magic strings in the repository's segments
|
|
|
|
to match the new Borg magic strings. the keyfiles found in
|
2015-10-01 03:50:46 +00:00
|
|
|
$ATTIC_KEYS_DIR or ~/.attic/keys/ will also be converted and
|
|
|
|
copied to $BORG_KEYS_DIR or ~/.borg/keys.
|
|
|
|
|
2015-10-02 14:12:13 +00:00
|
|
|
the cache files are converted, from $ATTIC_CACHE_DIR or
|
|
|
|
~/.cache/attic to $BORG_CACHE_DIR or ~/.cache/borg, but the
|
|
|
|
cache layout between Borg and Attic changed, so it is possible
|
|
|
|
the first backup after the conversion takes longer than expected
|
|
|
|
due to the cache resync.
|
2015-10-01 03:50:46 +00:00
|
|
|
|
2015-10-03 16:46:23 +00:00
|
|
|
upgrade should be able to resume if interrupted, although it
|
|
|
|
will still iterate over all segments. if you want to start
|
|
|
|
from scratch, use `borg delete` over the copied repository to
|
|
|
|
make sure the cache files are also removed:
|
|
|
|
|
|
|
|
borg delete borg
|
|
|
|
|
do not upgrade repositories in place by default
instead, we perform the equivalent of `cp -al` on the repository to
keep a backup, and then rewrite the files, breaking the hardlinks as
necessary.
it has to be confirmed that the rest of Borg will also break hardlinks
when operating on files in the repository. if Borg operates in place
on any files of the repository, it could jeoperdize the backup, so
this needs to be verified. I believe that most files are written to a
temporary file and moved into place, however, so the backup should be
safe.
the rationale behind the backup copy is that we want to be extra
careful with user's data by default. the old behavior is retained
through the `--inplace`/`-i` commandline flag. plus, this way we don't
need to tell users to go through extra steps (`cp -a`, in particular)
before running the command.
also, it can take a long time to do the copy of the attic repository
we wish to work on. since `cp -a` doesn't provide progress
information, the new default behavior provides a nicer user experience
of giving an overall impression of the upgrade progress, while
retaining compatibility with Attic by default (in a separate
repository, of course).
this makes the upgrade command much less scary to use and hopefully
will convert drones to the borg collective.
the only place where the default inplace behavior is retained is in
the header_replace() function, to avoid breaking the cache conversion
code and to keep API stability and semantic coherence ("replace" by
defaults means in place).
2015-10-15 22:02:24 +00:00
|
|
|
unless ``--inplace`` is specified, the upgrade process first
|
|
|
|
creates a backup copy of the repository, in
|
|
|
|
REPOSITORY.upgrade-DATETIME, using hardlinks. this takes
|
|
|
|
longer than in place upgrades, but is much safer and gives
|
|
|
|
progress information (as opposed to ``cp -al``). once you are
|
|
|
|
satisfied with the conversion, you can safely destroy the
|
|
|
|
backup copy.
|
|
|
|
|
|
|
|
WARNING: running the upgrade in place will make the current
|
|
|
|
copy unuseable with older version, with no way of going back
|
|
|
|
to previous versions. this can PERMANENTLY DAMAGE YOUR
|
|
|
|
REPOSITORY! Attic CAN NOT READ BORG REPOSITORIES, as the
|
|
|
|
magic strings have changed. you have been warned.""")
|
2015-10-03 16:36:52 +00:00
|
|
|
subparser = subparsers.add_parser('upgrade', parents=[common_parser],
|
|
|
|
description=self.do_upgrade.__doc__,
|
|
|
|
epilog=upgrade_epilog,
|
2015-10-01 03:50:46 +00:00
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
2015-10-03 16:36:52 +00:00
|
|
|
subparser.set_defaults(func=self.do_upgrade)
|
2015-10-01 03:50:46 +00:00
|
|
|
subparser.add_argument('-n', '--dry-run', dest='dry_run',
|
|
|
|
default=False, action='store_true',
|
|
|
|
help='do not change repository')
|
do not upgrade repositories in place by default
instead, we perform the equivalent of `cp -al` on the repository to
keep a backup, and then rewrite the files, breaking the hardlinks as
necessary.
it has to be confirmed that the rest of Borg will also break hardlinks
when operating on files in the repository. if Borg operates in place
on any files of the repository, it could jeoperdize the backup, so
this needs to be verified. I believe that most files are written to a
temporary file and moved into place, however, so the backup should be
safe.
the rationale behind the backup copy is that we want to be extra
careful with user's data by default. the old behavior is retained
through the `--inplace`/`-i` commandline flag. plus, this way we don't
need to tell users to go through extra steps (`cp -a`, in particular)
before running the command.
also, it can take a long time to do the copy of the attic repository
we wish to work on. since `cp -a` doesn't provide progress
information, the new default behavior provides a nicer user experience
of giving an overall impression of the upgrade progress, while
retaining compatibility with Attic by default (in a separate
repository, of course).
this makes the upgrade command much less scary to use and hopefully
will convert drones to the borg collective.
the only place where the default inplace behavior is retained is in
the header_replace() function, to avoid breaking the cache conversion
code and to keep API stability and semantic coherence ("replace" by
defaults means in place).
2015-10-15 22:02:24 +00:00
|
|
|
subparser.add_argument('-i', '--inplace', dest='inplace',
|
|
|
|
default=False, action='store_true',
|
|
|
|
help="""rewrite repository in-place, with no chance of going back to older
|
|
|
|
versions of the repository.""")
|
2015-10-01 03:50:46 +00:00
|
|
|
subparser.add_argument('repository', metavar='REPOSITORY', nargs='?', default='',
|
|
|
|
type=location_validator(archive=False),
|
2015-10-03 16:36:52 +00:00
|
|
|
help='path to the repository to be upgraded')
|
2015-10-01 03:50:46 +00:00
|
|
|
|
2014-02-08 14:44:31 +00:00
|
|
|
subparser = subparsers.add_parser('help', parents=[common_parser],
|
|
|
|
description='Extra help')
|
2014-04-06 13:16:25 +00:00
|
|
|
subparser.add_argument('--epilog-only', dest='epilog_only',
|
|
|
|
action='store_true', default=False)
|
|
|
|
subparser.add_argument('--usage-only', dest='usage_only',
|
|
|
|
action='store_true', default=False)
|
|
|
|
subparser.set_defaults(func=functools.partial(self.do_help, parser, subparsers.choices))
|
2014-03-21 21:12:15 +00:00
|
|
|
subparser.add_argument('topic', metavar='TOPIC', type=str, nargs='?',
|
2014-02-08 14:44:31 +00:00
|
|
|
help='additional help on TOPIC')
|
2015-10-08 01:07:12 +00:00
|
|
|
return parser
|
|
|
|
|
|
|
|
def run(self, args=None):
|
|
|
|
check_extension_modules()
|
|
|
|
keys_dir = get_keys_dir()
|
|
|
|
if not os.path.exists(keys_dir):
|
|
|
|
os.makedirs(keys_dir)
|
|
|
|
os.chmod(keys_dir, stat.S_IRWXU)
|
|
|
|
cache_dir = get_cache_dir()
|
|
|
|
if not os.path.exists(cache_dir):
|
|
|
|
os.makedirs(cache_dir)
|
|
|
|
os.chmod(cache_dir, stat.S_IRWXU)
|
|
|
|
with open(os.path.join(cache_dir, 'CACHEDIR.TAG'), 'w') as fd:
|
|
|
|
fd.write(textwrap.dedent("""
|
|
|
|
Signature: 8a477f597d28d172789f06886806bc55
|
|
|
|
# This file is a cache directory tag created by Borg.
|
|
|
|
# For information about cache directory tags, see:
|
|
|
|
# http://www.brynosaurus.com/cachedir/
|
|
|
|
""").lstrip())
|
|
|
|
|
|
|
|
# We can't use argparse for "serve" since we don't want it to show up in "Available commands"
|
|
|
|
if args:
|
|
|
|
args = self.preprocess_args(args)
|
|
|
|
parser = self.build_parser(args)
|
2014-02-08 14:44:31 +00:00
|
|
|
|
2013-06-26 19:20:31 +00:00
|
|
|
args = parser.parse_args(args or ['-h'])
|
2015-10-06 16:22:57 +00:00
|
|
|
setup_logging(args)
|
2015-08-04 10:31:06 +00:00
|
|
|
os.umask(args.umask)
|
2015-08-04 07:53:26 +00:00
|
|
|
RemoteRepository.remote_path = args.remote_path
|
2015-08-04 10:31:06 +00:00
|
|
|
RemoteRepository.umask = args.umask
|
2014-02-08 17:44:48 +00:00
|
|
|
update_excludes(args)
|
2010-10-16 09:45:36 +00:00
|
|
|
return args.func(args)
|
2010-03-06 17:25:35 +00:00
|
|
|
|
2011-10-29 15:01:07 +00:00
|
|
|
|
2015-08-12 02:09:36 +00:00
|
|
|
def sig_info_handler(signum, stack): # pragma: no cover
|
2015-05-14 14:46:44 +00:00
|
|
|
"""search the stack for infos about the currently processed file and print them"""
|
|
|
|
for frame in inspect.getouterframes(stack):
|
|
|
|
func, loc = frame[3], frame[0].f_locals
|
2015-05-22 17:21:41 +00:00
|
|
|
if func in ('process_file', '_process', ): # create op
|
2015-05-14 14:46:44 +00:00
|
|
|
path = loc['path']
|
|
|
|
try:
|
|
|
|
pos = loc['fd'].tell()
|
|
|
|
total = loc['st'].st_size
|
|
|
|
except Exception:
|
|
|
|
pos, total = 0, 0
|
2015-10-02 14:58:08 +00:00
|
|
|
logger.warning("{0} {1}/{2}".format(path, format_file_size(pos), format_file_size(total)))
|
2015-05-14 14:46:44 +00:00
|
|
|
break
|
2015-05-22 17:21:41 +00:00
|
|
|
if func in ('extract_item', ): # extract op
|
2015-05-14 14:46:44 +00:00
|
|
|
path = loc['item'][b'path']
|
|
|
|
try:
|
|
|
|
pos = loc['fd'].tell()
|
|
|
|
except Exception:
|
|
|
|
pos = 0
|
2015-10-02 14:58:08 +00:00
|
|
|
logger.warning("{0} {1}/???".format(path, format_file_size(pos)))
|
2015-05-14 14:46:44 +00:00
|
|
|
break
|
|
|
|
|
|
|
|
|
2015-08-12 02:09:36 +00:00
|
|
|
def setup_signal_handlers(): # pragma: no cover
|
2015-05-14 14:46:44 +00:00
|
|
|
sigs = []
|
|
|
|
if hasattr(signal, 'SIGUSR1'):
|
|
|
|
sigs.append(signal.SIGUSR1) # kill -USR1 pid
|
|
|
|
if hasattr(signal, 'SIGINFO'):
|
|
|
|
sigs.append(signal.SIGINFO) # kill -INFO pid (or ctrl-t)
|
|
|
|
for sig in sigs:
|
|
|
|
signal.signal(sig, sig_info_handler)
|
|
|
|
|
|
|
|
|
2015-08-12 02:09:36 +00:00
|
|
|
def main(): # pragma: no cover
|
2014-02-25 11:33:23 +00:00
|
|
|
# Make sure stdout and stderr have errors='replace') to avoid unicode
|
|
|
|
# issues when print()-ing unicode file names
|
2014-03-05 18:58:29 +00:00
|
|
|
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, sys.stdout.encoding, 'replace', line_buffering=True)
|
|
|
|
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, sys.stderr.encoding, 'replace', line_buffering=True)
|
2015-05-14 14:46:44 +00:00
|
|
|
setup_signal_handlers()
|
2010-02-20 17:23:46 +00:00
|
|
|
archiver = Archiver()
|
2012-12-09 22:06:33 +00:00
|
|
|
try:
|
2013-06-26 19:20:31 +00:00
|
|
|
exit_code = archiver.run(sys.argv[1:])
|
2013-12-15 19:35:29 +00:00
|
|
|
except Error as e:
|
2015-06-25 21:57:38 +00:00
|
|
|
archiver.print_error(e.get_message() + "\n%s" % traceback.format_exc())
|
2013-12-15 19:35:29 +00:00
|
|
|
exit_code = e.exit_code
|
2015-03-21 01:17:19 +00:00
|
|
|
except RemoteRepository.RPCError as e:
|
2015-06-25 21:57:38 +00:00
|
|
|
archiver.print_error('Error: Remote Exception.\n%s' % str(e))
|
|
|
|
exit_code = 1
|
|
|
|
except Exception:
|
|
|
|
archiver.print_error('Error: Local Exception.\n%s' % traceback.format_exc())
|
2015-03-21 01:17:19 +00:00
|
|
|
exit_code = 1
|
2012-12-09 22:06:33 +00:00
|
|
|
except KeyboardInterrupt:
|
2015-06-25 21:57:38 +00:00
|
|
|
archiver.print_error('Error: Keyboard interrupt.\n%s' % traceback.format_exc())
|
2012-12-09 22:06:33 +00:00
|
|
|
exit_code = 1
|
2015-06-25 21:57:38 +00:00
|
|
|
if exit_code:
|
|
|
|
archiver.print_error('Exiting with failure status due to previous errors')
|
2012-12-06 22:04:01 +00:00
|
|
|
sys.exit(exit_code)
|
2010-02-20 17:23:46 +00:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2010-03-06 17:25:35 +00:00
|
|
|
main()
|