mirror of https://github.com/borgbackup/borg.git
Implement --exclude-if-present
Add a new --exclude-if-present command-line flag to ``borg create``. If specified, directories containing the specified tag file will be excluded from the backup. The flag can be repeated to ignore more than a single tag file, irregardless of the contents. This is taken from a attic PR (and adapted for borg): commit 3462a9ca90388dc5d8b4fa4218a32769676b3623 Author: Yuri D'Elia <yuri.delia@eurac.edu> Date: Sun Dec 7 19:15:17 2014 +0100
This commit is contained in:
parent
4a2e4ec683
commit
a6a8a4ebd9
|
@ -20,7 +20,7 @@ from .helpers import Error, location_validator, format_time, format_file_size, \
|
||||||
format_file_mode, ExcludePattern, IncludePattern, exclude_path, adjust_patterns, to_localtime, timestamp, \
|
format_file_mode, ExcludePattern, IncludePattern, exclude_path, adjust_patterns, to_localtime, timestamp, \
|
||||||
get_cache_dir, get_keys_dir, prune_within, prune_split, unhexlify, \
|
get_cache_dir, get_keys_dir, prune_within, prune_split, unhexlify, \
|
||||||
Manifest, remove_surrogates, update_excludes, format_archive, check_extension_modules, Statistics, \
|
Manifest, remove_surrogates, update_excludes, format_archive, check_extension_modules, Statistics, \
|
||||||
is_cachedir, bigint_to_int, ChunkerParams, CompressionSpec, have_cython, is_slow_msgpack, yes, \
|
dir_is_tagged, bigint_to_int, ChunkerParams, CompressionSpec, have_cython, is_slow_msgpack, yes, \
|
||||||
EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR
|
EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR
|
||||||
from .logger import create_logger, setup_logging
|
from .logger import create_logger, setup_logging
|
||||||
logger = create_logger()
|
logger = create_logger()
|
||||||
|
@ -166,8 +166,8 @@ class Archiver:
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
restrict_dev = None
|
restrict_dev = None
|
||||||
self._process(archive, cache, args.excludes, args.exclude_caches, skip_inodes, path, restrict_dev,
|
self._process(archive, cache, args.excludes, args.exclude_caches, args.exclude_if_present,
|
||||||
read_special=args.read_special, dry_run=dry_run)
|
skip_inodes, path, restrict_dev, read_special=args.read_special, dry_run=dry_run)
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
archive.save(timestamp=args.timestamp)
|
archive.save(timestamp=args.timestamp)
|
||||||
if args.progress:
|
if args.progress:
|
||||||
|
@ -182,8 +182,8 @@ class Archiver:
|
||||||
print('-' * 78)
|
print('-' * 78)
|
||||||
return self.exit_code
|
return self.exit_code
|
||||||
|
|
||||||
def _process(self, archive, cache, excludes, exclude_caches, skip_inodes, path, restrict_dev,
|
def _process(self, archive, cache, excludes, exclude_caches, exclude_if_present,
|
||||||
read_special=False, dry_run=False):
|
skip_inodes, path, restrict_dev, read_special=False, dry_run=False):
|
||||||
if exclude_path(path, excludes):
|
if exclude_path(path, excludes):
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
|
@ -209,7 +209,7 @@ class Archiver:
|
||||||
status = 'E'
|
status = 'E'
|
||||||
self.print_warning('%s: %s', path, e)
|
self.print_warning('%s: %s', path, e)
|
||||||
elif stat.S_ISDIR(st.st_mode):
|
elif stat.S_ISDIR(st.st_mode):
|
||||||
if exclude_caches and is_cachedir(path):
|
if dir_is_tagged(path, exclude_caches, exclude_if_present):
|
||||||
return
|
return
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
status = archive.process_dir(path, st)
|
status = archive.process_dir(path, st)
|
||||||
|
@ -221,8 +221,8 @@ class Archiver:
|
||||||
else:
|
else:
|
||||||
for filename in sorted(entries):
|
for filename in sorted(entries):
|
||||||
entry_path = os.path.normpath(os.path.join(path, filename))
|
entry_path = os.path.normpath(os.path.join(path, filename))
|
||||||
self._process(archive, cache, excludes, exclude_caches, skip_inodes,
|
self._process(archive, cache, excludes, exclude_caches, exclude_if_present,
|
||||||
entry_path, restrict_dev, read_special=read_special,
|
skip_inodes, entry_path, restrict_dev, read_special=read_special,
|
||||||
dry_run=dry_run)
|
dry_run=dry_run)
|
||||||
elif stat.S_ISLNK(st.st_mode):
|
elif stat.S_ISLNK(st.st_mode):
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
|
@ -785,6 +785,9 @@ class Archiver:
|
||||||
subparser.add_argument('--exclude-caches', dest='exclude_caches',
|
subparser.add_argument('--exclude-caches', dest='exclude_caches',
|
||||||
action='store_true', default=False,
|
action='store_true', default=False,
|
||||||
help='exclude directories that contain a CACHEDIR.TAG file (http://www.brynosaurus.com/cachedir/spec.html)')
|
help='exclude directories that contain a CACHEDIR.TAG file (http://www.brynosaurus.com/cachedir/spec.html)')
|
||||||
|
subparser.add_argument('--exclude-if-present', dest='exclude_if_present',
|
||||||
|
metavar='FILENAME', action='append', type=str,
|
||||||
|
help='exclude directories that contain the specified file')
|
||||||
subparser.add_argument('-c', '--checkpoint-interval', dest='checkpoint_interval',
|
subparser.add_argument('-c', '--checkpoint-interval', dest='checkpoint_interval',
|
||||||
type=int, default=300, metavar='SECONDS',
|
type=int, default=300, metavar='SECONDS',
|
||||||
help='write checkpoint every SECONDS seconds (Default: 300)')
|
help='write checkpoint every SECONDS seconds (Default: 300)')
|
||||||
|
|
|
@ -428,7 +428,7 @@ def CompressionSpec(s):
|
||||||
raise ValueError
|
raise ValueError
|
||||||
|
|
||||||
|
|
||||||
def is_cachedir(path):
|
def dir_is_cachedir(path):
|
||||||
"""Determines whether the specified path is a cache directory (and
|
"""Determines whether the specified path is a cache directory (and
|
||||||
therefore should potentially be excluded from the backup) according to
|
therefore should potentially be excluded from the backup) according to
|
||||||
the CACHEDIR.TAG protocol
|
the CACHEDIR.TAG protocol
|
||||||
|
@ -448,6 +448,20 @@ def is_cachedir(path):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def dir_is_tagged(path, exclude_caches, exclude_if_present):
|
||||||
|
"""Determines whether the specified path is excluded by being a cache
|
||||||
|
directory or containing the user-specified tag file.
|
||||||
|
"""
|
||||||
|
if exclude_caches and dir_is_cachedir(path):
|
||||||
|
return True
|
||||||
|
if exclude_if_present is not None:
|
||||||
|
for tag in exclude_if_present:
|
||||||
|
tag_path = os.path.join(path, tag)
|
||||||
|
if os.path.isfile(tag_path):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def format_time(t):
|
def format_time(t):
|
||||||
"""Format datetime suitable for fixed length list output
|
"""Format datetime suitable for fixed length list output
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -499,6 +499,17 @@ class ArchiverTestCase(ArchiverTestCaseBase):
|
||||||
self.assert_equal(sorted(os.listdir('output/input')), ['cache2', 'file1'])
|
self.assert_equal(sorted(os.listdir('output/input')), ['cache2', 'file1'])
|
||||||
self.assert_equal(sorted(os.listdir('output/input/cache2')), ['CACHEDIR.TAG'])
|
self.assert_equal(sorted(os.listdir('output/input/cache2')), ['CACHEDIR.TAG'])
|
||||||
|
|
||||||
|
def test_exclude_tagged(self):
|
||||||
|
self.cmd('init', self.repository_location)
|
||||||
|
self.create_regular_file('file1', size=1024 * 80)
|
||||||
|
self.create_regular_file('tagged1/.NOBACKUP')
|
||||||
|
self.create_regular_file('tagged2/00-NOBACKUP')
|
||||||
|
self.create_regular_file('tagged3/.NOBACKUP/file2')
|
||||||
|
self.cmd('create', '--exclude-if-present', '.NOBACKUP', '--exclude-if-present', '00-NOBACKUP', self.repository_location + '::test', 'input')
|
||||||
|
with changedir('output'):
|
||||||
|
self.cmd('extract', self.repository_location + '::test')
|
||||||
|
self.assert_equal(sorted(os.listdir('output/input')), ['file1', 'tagged3'])
|
||||||
|
|
||||||
def test_path_normalization(self):
|
def test_path_normalization(self):
|
||||||
self.cmd('init', self.repository_location)
|
self.cmd('init', self.repository_location)
|
||||||
self.create_regular_file('dir1/dir2/file', size=1024 * 80)
|
self.create_regular_file('dir1/dir2/file', size=1024 * 80)
|
||||||
|
|
Loading…
Reference in New Issue