Merge pull request #852 from enkore/feature/1.0ignore-inode

1.0-maint, borg create: add --ignore-inode option [to fix sshfs performance]
This commit is contained in:
TW 2016-04-06 23:40:57 +02:00
commit 305273d053
3 changed files with 16 additions and 5 deletions

View File

@ -517,7 +517,7 @@ Number of files: {0.stats.nfiles}'''.format(
self.add_item(item)
return 'i' # stdin
def process_file(self, path, st, cache):
def process_file(self, path, st, cache, ignore_inode=False):
status = None
safe_path = make_path_safe(path)
# Is it a hard link?
@ -533,7 +533,7 @@ Number of files: {0.stats.nfiles}'''.format(
self.hard_links[st.st_ino, st.st_dev] = safe_path
path_hash = self.key.id_hash(os.path.join(self.cwd, path).encode('utf-8', 'surrogateescape'))
first_run = not cache.files
ids = cache.file_known_and_unchanged(path_hash, st)
ids = cache.file_known_and_unchanged(path_hash, st, ignore_inode)
if first_run:
logger.debug('Processing files ...')
chunks = None

View File

@ -231,6 +231,7 @@ class Archiver:
self.output_filter = args.output_filter
self.output_list = args.output_list
self.ignore_inode = args.ignore_inode
dry_run = args.dry_run
t0 = datetime.utcnow()
if not dry_run:
@ -270,7 +271,7 @@ class Archiver:
if stat.S_ISREG(st.st_mode) or read_special and not stat.S_ISDIR(st.st_mode):
if not dry_run:
try:
status = archive.process_file(path, st, cache)
status = archive.process_file(path, st, cache, self.ignore_inode)
except OSError as e:
status = 'E'
self.print_warning('%s: %s', path, e)
@ -984,6 +985,12 @@ class Archiver:
traversing all paths specified. The archive will consume almost no disk space for
files or parts of files that have already been stored in other archives.
To speed up pulling backups over sshfs and similar network file systems which do
not provide correct inode information the --ignore-inode flag can be used. This
potentially decreases reliability of change detection, while avoiding always reading
all files on these file systems.
See the output of the "borg help patterns" command for more help on exclude patterns.
""")
@ -1039,6 +1046,9 @@ class Archiver:
type=ChunkerParams, default=CHUNKER_PARAMS,
metavar='CHUNK_MIN_EXP,CHUNK_MAX_EXP,HASH_MASK_BITS,HASH_WINDOW_SIZE',
help='specify the chunker parameters. default: %d,%d,%d,%d' % CHUNKER_PARAMS)
subparser.add_argument('--ignore-inode', dest='ignore_inode',
action='store_true', default=False,
help='ignore inode data in the file metadata cache used to detect unchanged files.')
subparser.add_argument('-C', '--compression', dest='compression',
type=CompressionSpec, default=dict(name='none'), metavar='COMPRESSION',
help='select compression algorithm (and level): '

View File

@ -394,7 +394,7 @@ Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
self.chunks[id] = (count - 1, size, csize)
stats.update(-size, -csize, False)
def file_known_and_unchanged(self, path_hash, st):
def file_known_and_unchanged(self, path_hash, st, ignore_inode=False):
if not (self.do_files and stat.S_ISREG(st.st_mode)):
return None
if self.files is None:
@ -403,7 +403,8 @@ Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
if not entry:
return None
entry = msgpack.unpackb(entry)
if entry[2] == st.st_size and bigint_to_int(entry[3]) == st.st_mtime_ns and entry[1] == st.st_ino:
if (entry[2] == st.st_size and bigint_to_int(entry[3]) == st.st_mtime_ns and
(ignore_inode or entry[1] == st.st_ino)):
# reset entry age
entry[0] = 0
self.files[path_hash] = msgpack.packb(entry)