diff --git a/borg/archive.py b/borg/archive.py index 791caaa7f..f47e60b8b 100644 --- a/borg/archive.py +++ b/borg/archive.py @@ -517,7 +517,7 @@ Number of files: {0.stats.nfiles}'''.format( self.add_item(item) return 'i' # stdin - def process_file(self, path, st, cache): + def process_file(self, path, st, cache, ignore_inode=False): status = None safe_path = make_path_safe(path) # Is it a hard link? @@ -533,7 +533,7 @@ Number of files: {0.stats.nfiles}'''.format( self.hard_links[st.st_ino, st.st_dev] = safe_path path_hash = self.key.id_hash(os.path.join(self.cwd, path).encode('utf-8', 'surrogateescape')) first_run = not cache.files - ids = cache.file_known_and_unchanged(path_hash, st) + ids = cache.file_known_and_unchanged(path_hash, st, ignore_inode) if first_run: logger.info('processing files') chunks = None diff --git a/borg/archiver.py b/borg/archiver.py index aa6056a68..c4505e673 100644 --- a/borg/archiver.py +++ b/borg/archiver.py @@ -201,6 +201,7 @@ class Archiver: self.output_filter = args.output_filter self.output_list = args.output_list + self.ignore_inode = args.ignore_inode dry_run = args.dry_run t0 = datetime.utcnow() if not dry_run: @@ -242,7 +243,7 @@ class Archiver: if stat.S_ISREG(st.st_mode) or read_special and not stat.S_ISDIR(st.st_mode): if not dry_run: try: - status = archive.process_file(path, st, cache) + status = archive.process_file(path, st, cache, self.ignore_inode) except OSError as e: status = 'E' self.print_warning('%s: %s', path, e) @@ -1018,6 +1019,9 @@ class Archiver: type=ChunkerParams, default=CHUNKER_PARAMS, metavar='CHUNK_MIN_EXP,CHUNK_MAX_EXP,HASH_MASK_BITS,HASH_WINDOW_SIZE', help='specify the chunker parameters. default: %d,%d,%d,%d' % CHUNKER_PARAMS) + subparser.add_argument('--ignore-inode', dest='ignore_inode', + action='store_true', default=False, + help='ignore inode data in the file metadata cache used to detect unchanged files.') subparser.add_argument('-C', '--compression', dest='compression', type=CompressionSpec, default=dict(name='none'), metavar='COMPRESSION', help='select compression algorithm (and level): ' diff --git a/borg/cache.py b/borg/cache.py index c46fa6be8..4fc8f0177 100644 --- a/borg/cache.py +++ b/borg/cache.py @@ -39,6 +39,12 @@ class Cache: def __init__(self, repository, key, manifest, path=None, sync=True, do_files=False, warn_if_unencrypted=True, lock_wait=None): + """ + :param do_files: use file metadata cache + :param warn_if_unencrypted: print warning if accessing unknown unencrypted repository + :param lock_wait: timeout for lock acquisition (None: return immediately if lock unavailable) + :param sync: do :meth:`.sync` + """ self.lock = None self.timestamp = None self.lock = None @@ -394,7 +400,7 @@ Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}""" self.chunks[id] = (count - 1, size, csize) stats.update(-size, -csize, False) - def file_known_and_unchanged(self, path_hash, st): + def file_known_and_unchanged(self, path_hash, st, ignore_inode=False): if not (self.do_files and stat.S_ISREG(st.st_mode)): return None if self.files is None: @@ -403,7 +409,8 @@ Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}""" if not entry: return None entry = msgpack.unpackb(entry) - if entry[2] == st.st_size and bigint_to_int(entry[3]) == st.st_mtime_ns and entry[1] == st.st_ino: + if (entry[2] == st.st_size and bigint_to_int(entry[3]) == st.st_mtime_ns and + (ignore_inode or entry[1] == st.st_ino)): # reset entry age entry[0] = 0 self.files[path_hash] = msgpack.packb(entry)