Merge pull request #8281 from Aztorius/no-cache-option

Add BORG_USE_CHUNKS_ARCHIVE env var, fixes #8280
This commit is contained in:
TW 2024-07-13 22:34:20 +02:00 committed by GitHub
commit 246727f12d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 23 additions and 16 deletions

View File

@ -720,18 +720,8 @@ will make the subsequent rebuilds faster (because it needs to transfer less data
from the repository). While being faster, the cache needs quite some disk space,
which might be unwanted.
There is a temporary (but maybe long lived) hack to avoid using lots of disk
space for chunks.archive.d (see :issue:`235` for details):
::
# this assumes you are working with the same user as the backup.
cd ~/.cache/borg/$(borg config id)
rm -rf chunks.archive.d ; touch chunks.archive.d
This deletes all the cached archive chunk indexes and replaces the directory
that kept them with a file, so borg won't be able to store anything "in" there
in future.
You can disable the cached archive chunk indexes by setting the environment
variable ``BORG_USE_CHUNKS_ARCHIVE`` to ``no``.
This has some pros and cons, though:

View File

@ -67,6 +67,9 @@ General:
When set to a numeric value, this determines the maximum "time to live" for the files cache
entries (default: 20). The files cache is used to determine quickly whether a file is unchanged.
The FAQ explains this more detailed in: :ref:`always_chunking`
BORG_USE_CHUNKS_ARCHIVE
When set to no (default: yes), the ``chunks.archive.d`` folder will not be used. This reduces
disk space usage but slows down cache resyncs.
BORG_SHOW_SYSINFO
When set to no (default: yes), system information (like OS, Python version, ...) in
exceptions is not shown.

View File

@ -521,6 +521,7 @@ class LocalCache(CacheStatsMixin):
self.cache_mode = cache_mode
self.timestamp = None
self.txn_active = False
self.do_cache = os.environ.get("BORG_USE_CHUNKS_ARCHIVE", "yes").lower() in ["yes", "1", "true"]
self.path = cache_dir(self.repository, path)
self.security_manager = SecurityManager(self.repository)
@ -910,10 +911,6 @@ class LocalCache(CacheStatsMixin):
self.begin_txn()
with cache_if_remote(self.repository, decrypted_cache=self.repo_objs) as decrypted_repository:
# TEMPORARY HACK:
# to avoid archive index caching, create a FILE named ~/.cache/borg/REPOID/chunks.archive.d -
# this is only recommended if you have a fast, low latency connection to your repo (e.g. if repo is local).
self.do_cache = os.path.isdir(archive_path)
self.chunks = create_master_idx(self.chunks)
def check_cache_compatibility(self):

View File

@ -319,6 +319,23 @@ def test_check_cache(archivers, request):
check_cache(archiver)
def test_env_use_chunks_archive(archivers, request, monkeypatch):
archiver = request.getfixturevalue(archivers)
create_test_files(archiver.input_path)
monkeypatch.setenv("BORG_USE_CHUNKS_ARCHIVE", "no")
cmd(archiver, "rcreate", RK_ENCRYPTION)
repository_id = bin_to_hex(_extract_repository_id(archiver.repository_path))
cache_path = os.path.join(archiver.cache_path, repository_id)
cmd(archiver, "create", "test", "input")
assert os.path.exists(cache_path)
assert os.path.exists(os.path.join(cache_path, "chunks.archive.d"))
assert len(os.listdir(os.path.join(cache_path, "chunks.archive.d"))) == 0
cmd(archiver, "rdelete", "--cache-only")
monkeypatch.setenv("BORG_USE_CHUNKS_ARCHIVE", "yes")
cmd(archiver, "create", "test2", "input")
assert len(os.listdir(os.path.join(cache_path, "chunks.archive.d"))) > 0
# Begin Remote Tests
def test_remote_repo_restrict_to_path(remote_archiver):
original_location, repo_path = remote_archiver.repository_location, remote_archiver.repository_path