1
0
Fork 0
mirror of https://github.com/borgbackup/borg.git synced 2025-03-04 02:28:34 +00:00

Merge pull request #5433 from luke-jr/files_cache_suffix

Allow appending the files cache filename with BORG_FILES_CACHE_SUFFIX env var
This commit is contained in:
TW 2020-11-16 18:51:40 +01:00 committed by GitHub
commit ac77dd491d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 21 additions and 8 deletions

View file

@ -56,6 +56,10 @@ General:
BORG_REMOTE_PATH BORG_REMOTE_PATH
When set, use the given path as borg executable on the remote (defaults to "borg" if unset). When set, use the given path as borg executable on the remote (defaults to "borg" if unset).
Using ``--remote-path PATH`` commandline option overrides the environment variable. Using ``--remote-path PATH`` commandline option overrides the environment variable.
BORG_FILES_CACHE_SUFFIX
When set to a value at least one character long, instructs borg to use a specifically named
(based on the suffix) alternative files cache. This can be used to avoid loading and saving
cache entries for backup sources other than the current sources.
BORG_FILES_CACHE_TTL BORG_FILES_CACHE_TTL
When set to a numeric value, this determines the maximum "time to live" for the files cache When set to a numeric value, this determines the maximum "time to live" for the files cache
entries (default: 20). The files cache is used to quickly determine whether a file is unchanged. entries (default: 20). The files cache is used to quickly determine whether a file is unchanged.

View file

@ -226,6 +226,11 @@ def cache_dir(repository, path=None):
return path or os.path.join(get_cache_dir(), repository.id_str) return path or os.path.join(get_cache_dir(), repository.id_str)
def files_cache_name():
suffix = os.environ.get('BORG_FILES_CACHE_SUFFIX', '')
return 'files.' + suffix if suffix else 'files'
class CacheConfig: class CacheConfig:
def __init__(self, repository, path=None, lock_wait=None): def __init__(self, repository, path=None, lock_wait=None):
self.repository = repository self.repository = repository
@ -497,7 +502,7 @@ class LocalCache(CacheStatsMixin):
self.cache_config.create() self.cache_config.create()
ChunkIndex().write(os.path.join(self.path, 'chunks')) ChunkIndex().write(os.path.join(self.path, 'chunks'))
os.makedirs(os.path.join(self.path, 'chunks.archive.d')) os.makedirs(os.path.join(self.path, 'chunks.archive.d'))
with SaveFile(os.path.join(self.path, 'files'), binary=True): with SaveFile(os.path.join(self.path, files_cache_name()), binary=True):
pass # empty file pass # empty file
def _do_open(self): def _do_open(self):
@ -527,8 +532,8 @@ class LocalCache(CacheStatsMixin):
logger.debug('Reading files cache ...') logger.debug('Reading files cache ...')
msg = None msg = None
try: try:
with IntegrityCheckedFile(path=os.path.join(self.path, 'files'), write=False, with IntegrityCheckedFile(path=os.path.join(self.path, files_cache_name()), write=False,
integrity_data=self.cache_config.integrity.get('files')) as fd: integrity_data=self.cache_config.integrity.get(files_cache_name())) as fd:
u = msgpack.Unpacker(use_list=True) u = msgpack.Unpacker(use_list=True)
while True: while True:
data = fd.read(64 * 1024) data = fd.read(64 * 1024)
@ -562,7 +567,11 @@ class LocalCache(CacheStatsMixin):
pi.output('Initializing cache transaction: Reading chunks') pi.output('Initializing cache transaction: Reading chunks')
shutil.copy(os.path.join(self.path, 'chunks'), txn_dir) shutil.copy(os.path.join(self.path, 'chunks'), txn_dir)
pi.output('Initializing cache transaction: Reading files') pi.output('Initializing cache transaction: Reading files')
shutil.copy(os.path.join(self.path, 'files'), txn_dir) try:
shutil.copy(os.path.join(self.path, files_cache_name()), txn_dir)
except FileNotFoundError:
with SaveFile(os.path.join(txn_dir, files_cache_name()), binary=True):
pass # empty file
os.rename(os.path.join(self.path, 'txn.tmp'), os.rename(os.path.join(self.path, 'txn.tmp'),
os.path.join(self.path, 'txn.active')) os.path.join(self.path, 'txn.active'))
self.txn_active = True self.txn_active = True
@ -581,7 +590,7 @@ class LocalCache(CacheStatsMixin):
self._newest_cmtime = 2 ** 63 - 1 # nanoseconds, good until y2262 self._newest_cmtime = 2 ** 63 - 1 # nanoseconds, good until y2262
ttl = int(os.environ.get('BORG_FILES_CACHE_TTL', 20)) ttl = int(os.environ.get('BORG_FILES_CACHE_TTL', 20))
pi.output('Saving files cache') pi.output('Saving files cache')
with IntegrityCheckedFile(path=os.path.join(self.path, 'files'), write=True) as fd: with IntegrityCheckedFile(path=os.path.join(self.path, files_cache_name()), write=True) as fd:
for path_hash, item in self.files.items(): for path_hash, item in self.files.items():
# Only keep files seen in this backup that are older than newest cmtime seen in this backup - # Only keep files seen in this backup that are older than newest cmtime seen in this backup -
# this is to avoid issues with filesystem snapshots and cmtime granularity. # this is to avoid issues with filesystem snapshots and cmtime granularity.
@ -590,7 +599,7 @@ class LocalCache(CacheStatsMixin):
if entry.age == 0 and bigint_to_int(entry.cmtime) < self._newest_cmtime or \ if entry.age == 0 and bigint_to_int(entry.cmtime) < self._newest_cmtime or \
entry.age > 0 and entry.age < ttl: entry.age > 0 and entry.age < ttl:
msgpack.pack((path_hash, entry), fd) msgpack.pack((path_hash, entry), fd)
self.cache_config.integrity['files'] = fd.integrity_data self.cache_config.integrity[files_cache_name()] = fd.integrity_data
pi.output('Saving chunks cache') pi.output('Saving chunks cache')
with IntegrityCheckedFile(path=os.path.join(self.path, 'chunks'), write=True) as fd: with IntegrityCheckedFile(path=os.path.join(self.path, 'chunks'), write=True) as fd:
self.chunks.write(fd) self.chunks.write(fd)
@ -614,7 +623,7 @@ class LocalCache(CacheStatsMixin):
if os.path.exists(txn_dir): if os.path.exists(txn_dir):
shutil.copy(os.path.join(txn_dir, 'config'), self.path) shutil.copy(os.path.join(txn_dir, 'config'), self.path)
shutil.copy(os.path.join(txn_dir, 'chunks'), self.path) shutil.copy(os.path.join(txn_dir, 'chunks'), self.path)
shutil.copy(os.path.join(txn_dir, 'files'), self.path) shutil.copy(os.path.join(txn_dir, files_cache_name()), self.path)
os.rename(txn_dir, os.path.join(self.path, 'txn.tmp')) os.rename(txn_dir, os.path.join(self.path, 'txn.tmp'))
if os.path.exists(os.path.join(self.path, 'txn.tmp')): if os.path.exists(os.path.join(self.path, 'txn.tmp')):
shutil.rmtree(os.path.join(self.path, 'txn.tmp')) shutil.rmtree(os.path.join(self.path, 'txn.tmp'))
@ -881,7 +890,7 @@ class LocalCache(CacheStatsMixin):
shutil.rmtree(os.path.join(self.path, 'chunks.archive.d')) shutil.rmtree(os.path.join(self.path, 'chunks.archive.d'))
os.makedirs(os.path.join(self.path, 'chunks.archive.d')) os.makedirs(os.path.join(self.path, 'chunks.archive.d'))
self.chunks = ChunkIndex() self.chunks = ChunkIndex()
with SaveFile(os.path.join(self.path, 'files'), binary=True): with SaveFile(os.path.join(self.path, files_cache_name()), binary=True):
pass # empty file pass # empty file
self.cache_config.manifest_id = '' self.cache_config.manifest_id = ''
self.cache_config._config.set('cache', 'manifest', '') self.cache_config._config.set('cache', 'manifest', '')