borg create: add --ignore-inode option

This is mainly meant for use with network file systems like sshfs and
possibly CIFS, which don't convey proper inode numbers.
This commit is contained in:
Marian Beermann 2016-03-15 15:38:55 +01:00
parent 648761fc03
commit 57c2d03e3e
3 changed files with 16 additions and 5 deletions

View File

@ -517,7 +517,7 @@ Number of files: {0.stats.nfiles}'''.format(
self.add_item(item)
return 'i' # stdin
def process_file(self, path, st, cache):
def process_file(self, path, st, cache, ignore_inode=False):
status = None
safe_path = make_path_safe(path)
# Is it a hard link?
@ -533,7 +533,7 @@ Number of files: {0.stats.nfiles}'''.format(
self.hard_links[st.st_ino, st.st_dev] = safe_path
path_hash = self.key.id_hash(os.path.join(self.cwd, path).encode('utf-8', 'surrogateescape'))
first_run = not cache.files
ids = cache.file_known_and_unchanged(path_hash, st)
ids = cache.file_known_and_unchanged(path_hash, st, ignore_inode)
if first_run:
logger.info('processing files')
chunks = None

View File

@ -201,6 +201,7 @@ class Archiver:
self.output_filter = args.output_filter
self.output_list = args.output_list
self.ignore_inode = args.ignore_inode
dry_run = args.dry_run
t0 = datetime.utcnow()
if not dry_run:
@ -242,7 +243,7 @@ class Archiver:
if stat.S_ISREG(st.st_mode) or read_special and not stat.S_ISDIR(st.st_mode):
if not dry_run:
try:
status = archive.process_file(path, st, cache)
status = archive.process_file(path, st, cache, self.ignore_inode)
except OSError as e:
status = 'E'
self.print_warning('%s: %s', path, e)
@ -1018,6 +1019,9 @@ class Archiver:
type=ChunkerParams, default=CHUNKER_PARAMS,
metavar='CHUNK_MIN_EXP,CHUNK_MAX_EXP,HASH_MASK_BITS,HASH_WINDOW_SIZE',
help='specify the chunker parameters. default: %d,%d,%d,%d' % CHUNKER_PARAMS)
subparser.add_argument('--ignore-inode', dest='ignore_inode',
action='store_true', default=False,
help='ignore inode data in the file metadata cache used to detect unchanged files.')
subparser.add_argument('-C', '--compression', dest='compression',
type=CompressionSpec, default=dict(name='none'), metavar='COMPRESSION',
help='select compression algorithm (and level): '

View File

@ -39,6 +39,12 @@ class Cache:
def __init__(self, repository, key, manifest, path=None, sync=True, do_files=False, warn_if_unencrypted=True,
lock_wait=None):
"""
:param do_files: use file metadata cache
:param warn_if_unencrypted: print warning if accessing unknown unencrypted repository
:param lock_wait: timeout for lock acquisition (None: return immediately if lock unavailable)
:param sync: do :meth:`.sync`
"""
self.lock = None
self.timestamp = None
self.lock = None
@ -394,7 +400,7 @@ Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
self.chunks[id] = (count - 1, size, csize)
stats.update(-size, -csize, False)
def file_known_and_unchanged(self, path_hash, st):
def file_known_and_unchanged(self, path_hash, st, ignore_inode=False):
if not (self.do_files and stat.S_ISREG(st.st_mode)):
return None
if self.files is None:
@ -403,7 +409,8 @@ Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
if not entry:
return None
entry = msgpack.unpackb(entry)
if entry[2] == st.st_size and bigint_to_int(entry[3]) == st.st_mtime_ns and entry[1] == st.st_ino:
if (entry[2] == st.st_size and bigint_to_int(entry[3]) == st.st_mtime_ns and
(ignore_inode or entry[1] == st.st_ino)):
# reset entry age
entry[0] = 0
self.files[path_hash] = msgpack.packb(entry)