Merge branch 'master' into enhancements/issue-6656

This commit is contained in:
TW 2023-04-08 17:40:42 +02:00 committed by GitHub
commit 0301451bd4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
45 changed files with 800 additions and 120 deletions

View File

@ -65,8 +65,8 @@ jobs:
python-version: '3.11'
toxenv: py311-fuse3
- os: macos-12
python-version: '3.9'
toxenv: py39-none # note: no fuse testing, due to #6099, see also #6196.
python-version: '3.11'
toxenv: py311-none # note: no fuse testing, due to #6099, see also #6196.
env:
# Configure pkg-config to use OpenSSL from Homebrew
@ -74,7 +74,7 @@ jobs:
TOXENV: ${{ matrix.toxenv }}
runs-on: ${{ matrix.os }}
timeout-minutes: 40
timeout-minutes: 60
steps:
- uses: actions/checkout@v3

View File

@ -52,7 +52,7 @@ jobs:
sudo apt-get install -y libssl-dev libacl1-dev libxxhash-dev liblz4-dev libzstd-dev
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@ -66,4 +66,4 @@ jobs:
pip3 install -r requirements.d/development.txt
pip3 install -e .
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1
uses: github/codeql-action/analyze@v2

10
.pre-commit-config.yaml Normal file
View File

@ -0,0 +1,10 @@
repos:
- repo: https://github.com/psf/black
rev: 23.1.0
hooks:
- id: black
- repo: https://github.com/pycqa/flake8
rev: 6.0.0
hooks:
- id: flake8
files: '(src|scripts|conftest.py)'

View File

@ -1,4 +1,4 @@
Copyright (C) 2015-2022 The Borg Collective (see AUTHORS file)
Copyright (C) 2015-2023 The Borg Collective (see AUTHORS file)
Copyright (C) 2010-2014 Jonas Borgström <jonas@borgstrom.se>
All rights reserved.

109
Vagrantfile vendored
View File

@ -15,7 +15,8 @@ def packages_debianoid(user)
apt-get -y -qq update
apt-get -y -qq dist-upgrade
# for building borgbackup and dependencies:
apt install -y libssl-dev libacl1-dev libxxhash-dev liblz4-dev libzstd-dev pkg-config
apt install -y pkg-config
apt install -y libssl-dev libacl1-dev libxxhash-dev liblz4-dev libzstd-dev || true
apt install -y libfuse-dev fuse || true
apt install -y libfuse3-dev fuse3 || true
apt install -y locales || true
@ -136,9 +137,95 @@ def packages_openindiana
EOF
end
# Build and install borg dependencies from source
def install_source_dependencies(user)
return <<-EOF
set -e -o pipefail
# Install in /usr/local
export PREFIX=/usr/local
# Make PKG_CONFIG_PATH explicit, even if /usr/local/lib/pkgconfig is enabled per default
export PKG_CONFIG_PATH=${PREFIX}/lib/pkgconfig
echo 'export PKG_CONFIG_PATH="'${PKG_CONFIG_PATH}'"' >> ~#{user}/.bash_profile
# All source packages integrate with pkg-config, remove any previous overrides
sed -i '/BORG_.*_PREFIX/d' ~#{user}/.bash_profile
# Setup pyenv to pick up the custom openssl version (python >= 3.9 requires openssl >= 1.1.1)
echo 'export PYTHON_CONFIGURE_OPTS="--with-openssl='"${PREFIX}"' --with-openssl-rpath=auto"' >> ~#{user}/.bash_profile
echo 'export LDFLAGS=-Wl,-rpath,'"${PREFIX}"'/lib' >> ~#{user}/.bash_profile
# Silence git warning about shallow clones
git config --global advice.detachedHead false
# libattr
VERSION_LIBATTR=2.5.1
curl -s -L https://download.savannah.nongnu.org/releases/attr/attr-${VERSION_LIBATTR}.tar.gz | tar xvz --strip-components=1 --one-top-level=attr -f - -C ${PREFIX}/src
cd ${PREFIX}/src/attr
./configure --prefix=${PREFIX}
make -j$(nproc) install
# libacl
VERSION_LIBACL=2.3.1
curl -s -L https://download.savannah.nongnu.org/releases/acl/acl-${VERSION_LIBACL}.tar.gz | tar xvz --strip-components=1 --one-top-level=acl -f - -C ${PREFIX}/src
cd ${PREFIX}/src/acl
./configure --prefix=${PREFIX}
make -j$(nproc) install
# liblz4
VERSION_LIBLZ4=1.9.4
git -C ${PREFIX}/src clone --depth 1 --branch v${VERSION_LIBLZ4} https://github.com/lz4/lz4.git
cd ${PREFIX}/src/lz4
make -j$(nproc) install PREFIX=${PREFIX}
# libzstd
VERSION_LIBZSTD=1.5.4
git -C ${PREFIX}/src clone --depth 1 --branch v${VERSION_LIBZSTD} https://github.com/facebook/zstd.git
cd ${PREFIX}/src/zstd
make -j$(nproc) install PREFIX=${PREFIX}
# xxHash
VERSION_LIBXXHASH=0.8.1
git -C ${PREFIX}/src clone --depth 1 --branch v${VERSION_LIBXXHASH} https://github.com/Cyan4973/xxHash.git
cd ${PREFIX}/src/xxHash
make -j$(nproc) install PREFIX=${PREFIX}
# openssl
VERSION_OPENSSL=1_1_1t
git -C ${PREFIX}/src clone --depth 1 --branch OpenSSL_${VERSION_OPENSSL} https://github.com/openssl/openssl.git
cd ${PREFIX}/src/openssl
./config --prefix=${PREFIX} --openssldir=${PREFIX}/lib/ssl
make -j$(nproc)
make -j$(nproc) install
# libfuse3 requires ninja
VERSION_NINJA=1.11.1
git -C ${PREFIX}/src clone --depth 1 --branch v${VERSION_NINJA} https://github.com/ninja-build/ninja.git
cd ${PREFIX}/src/ninja
python3 configure.py --bootstrap
install --mode=755 --target-directory=${PREFIX}/bin ninja
# libfuse3 requires meson >= 0.50; python3.5 support is dropped in meson >= 0.57
VERSION_MESON=0.56.2
git -C ${PREFIX}/src clone --depth 1 --branch ${VERSION_MESON} https://github.com/mesonbuild/meson.git
ln -s ${PREFIX}/src/meson/meson.py ${PREFIX}/bin/meson
# libfuse3
VERSION_LIBFUSE=3.14.0
git -C ${PREFIX}/src clone --depth 1 --branch fuse-${VERSION_LIBFUSE} https://github.com/libfuse/libfuse.git
cd ${PREFIX}/src/libfuse
mkdir build; cd build
meson setup --prefix ${PREFIX} --libdir ${PREFIX}/lib ..
ninja
ninja install
EOF
end
def install_pyenv(boxname)
return <<-EOF
echo 'export PYTHON_CONFIGURE_OPTS="--enable-shared"' >> ~/.bash_profile
echo 'export PYTHON_CONFIGURE_OPTS="${PYTHON_CONFIGURE_OPTS} --enable-shared"' >> ~/.bash_profile
echo 'export PYENV_ROOT="$HOME/.pyenv"' >> ~/.bash_profile
echo 'export PATH="$PYENV_ROOT/bin:$PATH"' >> ~/.bash_profile
. ~/.bash_profile
@ -160,6 +247,7 @@ end
def install_pythons(boxname)
return <<-EOF
. ~/.bash_profile
echo "PYTHON_CONFIGURE_OPTS: ${PYTHON_CONFIGURE_OPTS}"
pyenv install 3.11.2 # tests, binary build
pyenv install 3.10.1 # tests
pyenv install 3.9.1 # tests
@ -330,6 +418,23 @@ Vagrant.configure(2) do |config|
b.vm.provision "run tests", :type => :shell, :privileged => false, :inline => run_tests("buster64", ".*none.*")
end
config.vm.define "stretch64" do |b|
b.vm.box = "debian/stretch64"
b.vm.provider :virtualbox do |v|
v.memory = 1024 + $wmem
end
b.vm.provision "fs init", :type => :shell, :inline => fs_init("vagrant")
b.vm.provision "packages debianoid", :type => :shell, :inline => packages_debianoid("vagrant")
b.vm.provision "install source dependencies", :type => :shell, :privileged => true, :inline => install_source_dependencies("vagrant")
b.vm.provision "install pyenv", :type => :shell, :privileged => false, :inline => install_pyenv("stretch64")
b.vm.provision "install pythons", :type => :shell, :privileged => false, :inline => install_pythons("stretch64")
b.vm.provision "build env", :type => :shell, :privileged => false, :inline => build_pyenv_venv("stretch64")
b.vm.provision "install borg", :type => :shell, :privileged => false, :inline => install_borg("llfuse")
b.vm.provision "install pyinstaller", :type => :shell, :privileged => false, :inline => install_pyinstaller()
b.vm.provision "build binary with pyinstaller", :type => :shell, :privileged => false, :inline => build_binary_with_pyinstaller("stretch64")
b.vm.provision "run tests", :type => :shell, :privileged => false, :inline => run_tests("stretch64", ".*none.*")
end
config.vm.define "freebsd64" do |b|
b.vm.box = "generic/freebsd13"
b.vm.provider :virtualbox do |v|

View File

@ -12,8 +12,8 @@ This section provides information about security and corruption issues.
Change Log 2.x
==============
Version 2.0.0b5 (2023-02-27)
----------------------------
Version 2.0.0b6 (not released yet)
----------------------------------
Please note:
@ -108,7 +108,59 @@ Compatibility notes:
Option --filter=... might need an update, if you filter for the status chars
that were changed.
- borg is now more strict and disallows giving some options multiple times -
if that makes no sense. Highlander options, see #6269. That might make scripts
fail now that somehow "worked" before (but maybe didn't work as intended due to
the contradicting options).
New features:
- diff: include changes in ctime and mtime, #7248
- diff: sort JSON output alphabetically
- diff --content-only: option added to ignore metadata changes
- import-tar --ignore-zeros: new option to support importing concatenated tars, #7432
- debug id-hash / parse-obj / format-obj: new debug commands, #7406
Fixes:
- do not retry on permission errors (pointless)
- transfer: verify chunks we get using assert_id, #7383
- fix config/cache dir compatibility issues, #7445
- xattrs: fix namespace processing on FreeBSD, #6997
- ProgressIndicatorPercent: fix space computation for wide chars, #3027
- delete: remove --cache-only option, #7440.
for deleting the cache only, use: borg rdelete --cache-only
- borg debug get-obj/put-obj: fixed chunk id
Other changes:
- allow msgpack 1.0.5 also
- clarify platformdirs requirements, #7393.
3.0.0 is only required for macOS due to breaking changes.
2.6.0 was the last breaking change for Linux/UNIX.
- mount: improve mountpoint error msgs, see #7496
- more Highlander options, #6269
- Windows: simplify building (just use pip)
- docs:
- add installation instructions for Windows
- improve --one-file-system help and docs (macOS APFS), #5618 #4876
- BORG_KEY_FILE: clarify docs, #7444
- installation: add link to OS dependencies, #7356
- update FAQ about locale/unicode issues, #6999
- improve mount options rendering, #7359
- make timestamps in manual pages reproducible.
- CI / tests / vagrant:
- added pre-commit for linting purposes, #7476
- resolved mode bug and added sleep clause for darwin systems, #7470
- "auto" compressor tests: do not assume zlib is better than lz4, #7363
- add stretch64 VM with deps built from source
- misc. other CI / test fixes and updates
Version 2.0.0b5 (2023-02-27)
----------------------------
New features:

View File

@ -179,6 +179,13 @@ virtual env and run::
pip install -r requirements.d/development.txt
This project utilizes pre-commit to format and lint code before it is committed.
Although pre-commit is installed when running the command above, the pre-commit hooks
will have to be installed separately. Run this command to install the pre-commit hooks::
pre-commit install
Running the tests
-----------------

View File

@ -104,6 +104,14 @@ General:
caused EROFS. You will need this to make archives from volume shadow copies
in WSL1 (Windows Subsystem for Linux 1).
Output formatting:
BORG_LIST_FORMAT
Giving the default value for ``borg list --format=X``.
BORG_RLIST_FORMAT
Giving the default value for ``borg rlist --format=X``.
BORG_PRUNE_FORMAT
Giving the default value for ``borg prune --format=X``.
Some automatic "answerers" (if set, they automatically answer confirmation questions):
BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK=no (or =yes)
For "Warning: Attempting to access a previously unknown unencrypted repository"
@ -145,7 +153,16 @@ Directories and files:
Defaults to ``$BORG_CONFIG_DIR/keys``.
This directory contains keys for encrypted repositories.
BORG_KEY_FILE
When set, use the given filename as repository key file.
When set, use the given path as repository key file. Please note that this is only
for rather special applications that externally fully manage the key files:
- this setting only applies to the keyfile modes (not to the repokey modes).
- using a full, absolute path to the key file is recommended.
- all directories in the given path must exist.
- this setting forces borg to use the key file at the given location.
- the key file must either exist (for most commands) or will be created (``borg rcreate``).
- you need to give a different path for different repositories.
- you need to point to the correct key file matching the repository the command will operate on.
TMPDIR
This is where temporary files are stored (might need a lot of temporary space for some
operations), see tempfile_ for details.

View File

@ -10,3 +10,4 @@ pytest-cov
pytest-benchmark
Cython
twine
pre-commit

View File

@ -885,7 +885,7 @@ Duration: {0.duration}
item_size = item.size
if item_size != item_chunks_size:
raise BackupError(
"Size inconsistency detected: size {}, chunks size {}".format(item_size, item_chunks_size)
f"Size inconsistency detected: size {item_size}, chunks size {item_chunks_size}"
)
if has_damaged_chunks:
raise BackupError("File has damaged (all-zero) chunks. Try running borg check --repair.")

View File

@ -21,6 +21,7 @@ try:
logger = create_logger()
from ._common import Highlander
from .. import __version__
from ..constants import * # NOQA
from ..helpers import EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR, EXIT_SIGNAL_BASE
@ -219,7 +220,15 @@ class Archiver(
def add_argument(*args, **kwargs):
if "dest" in kwargs:
kwargs.setdefault("action", "store")
assert kwargs["action"] in ("help", "store_const", "store_true", "store_false", "store", "append")
assert kwargs["action"] in (
Highlander,
"help",
"store_const",
"store_true",
"store_false",
"store",
"append",
)
is_append = kwargs["action"] == "append"
if is_append:
self.append_options.add(kwargs["dest"])
@ -432,7 +441,6 @@ class Archiver(
"""turn on INFO level logging for args that imply that they will produce output"""
# map of option name to name of logger for that option
option_logger = {
"output_list": "borg.output.list",
"show_version": "borg.output.show-version",
"show_rc": "borg.output.show-rc",
"stats": "borg.output.stats",
@ -442,6 +450,10 @@ class Archiver(
option_set = args.get(option, False)
logging.getLogger(logger_name).setLevel("INFO" if option_set else "WARN")
# special-case --list / --list-kept / --list-pruned as they all work on same logger
options = [args.get(name, False) for name in ("output_list", "list_kept", "list_pruned")]
logging.getLogger("borg.output.list").setLevel("INFO" if any(options) else "WARN")
def _setup_topic_debugging(self, args):
"""Turn on DEBUG level logging for specified --debug-topics."""
for topic in args.debug_topics:

View File

@ -339,6 +339,7 @@ def define_exclude_and_patterns(add_option, *, tag_files=False, strip_components
dest="strip_components",
type=int,
default=0,
action=Highlander,
help="Remove the specified number of leading path elements. "
"Paths with fewer elements will be silently skipped.",
)
@ -372,6 +373,7 @@ def define_archive_filters_group(subparser, *, sort_by=True, first_last=True, ol
dest="sort_by",
type=SortBySpec,
default=sort_by_default,
action=Highlander,
help="Comma-separated list of sorting keys; valid keys are: {}; default is: {}".format(
", ".join(AI_HUMAN_SORT_KEYS), sort_by_default
),
@ -383,16 +385,18 @@ def define_archive_filters_group(subparser, *, sort_by=True, first_last=True, ol
"--first",
metavar="N",
dest="first",
default=0,
type=positive_int_validator,
default=0,
action=Highlander,
help="consider first N archives after other filters were applied",
)
group.add_argument(
"--last",
metavar="N",
dest="last",
default=0,
type=positive_int_validator,
default=0,
action=Highlander,
help="consider last N archives after other filters were applied",
)
@ -401,15 +405,17 @@ def define_archive_filters_group(subparser, *, sort_by=True, first_last=True, ol
group.add_argument(
"--oldest",
metavar="TIMESPAN",
type=relative_time_marker_validator,
dest="oldest",
type=relative_time_marker_validator,
action=Highlander,
help="consider archives between the oldest archive's timestamp and (oldest + TIMESPAN), e.g. 7d or 12m.",
)
group.add_argument(
"--newest",
metavar="TIMESPAN",
type=relative_time_marker_validator,
dest="newest",
type=relative_time_marker_validator,
action=Highlander,
help="consider archives between the newest archive's timestamp and (newest - TIMESPAN), e.g. 7d or 12m.",
)
@ -418,15 +424,17 @@ def define_archive_filters_group(subparser, *, sort_by=True, first_last=True, ol
group.add_argument(
"--older",
metavar="TIMESPAN",
type=relative_time_marker_validator,
dest="older",
type=relative_time_marker_validator,
action=Highlander,
help="consider archives older than (now - TIMESPAN), e.g. 7d oder 12m.",
)
group.add_argument(
"--newer",
metavar="TIMESPAN",
type=relative_time_marker_validator,
dest="newer",
type=relative_time_marker_validator,
action=Highlander,
help="consider archives newer than (now - TIMESPAN), e.g. 7d or 12m.",
)
@ -500,6 +508,7 @@ def define_common_options(add_common_option):
dest="lock_wait",
type=int,
default=int(os.environ.get("BORG_LOCK_WAIT", 1)),
action=Highlander,
help="wait at most SECONDS for acquiring a repository/cache lock (default: %(default)d).",
)
add_common_option(
@ -517,12 +526,14 @@ def define_common_options(add_common_option):
dest="umask",
type=lambda s: int(s, 8),
default=UMASK_DEFAULT,
action=Highlander,
help="set umask to M (local only, default: %(default)04o)",
)
add_common_option(
"--remote-path",
metavar="PATH",
dest="remote_path",
action=Highlander,
help='use PATH as borg executable on the remote (default: "borg")',
)
add_common_option(
@ -530,6 +541,7 @@ def define_common_options(add_common_option):
metavar="RATE",
dest="upload_ratelimit",
type=int,
action=Highlander,
help="set network upload rate limit in kiByte/s (default: 0=unlimited)",
)
add_common_option(
@ -537,6 +549,7 @@ def define_common_options(add_common_option):
metavar="UPLOAD_BUFFER",
dest="upload_buffer",
type=int,
action=Highlander,
help="set network upload buffer size in MiB. (default: 0=no buffer)",
)
add_common_option(
@ -544,6 +557,7 @@ def define_common_options(add_common_option):
metavar="FILE",
dest="debug_profile",
default=None,
action=Highlander,
help="Write execution profile in Borg format into FILE. For local use a Python-"
'compatible file can be generated by suffixing FILE with ".pyprof".',
)
@ -551,6 +565,7 @@ def define_common_options(add_common_option):
"--rsh",
metavar="RSH",
dest="rsh",
action=Highlander,
help="Use this command to connect to the 'borg serve' process (default: 'ssh')",
)
add_common_option(
@ -560,6 +575,7 @@ def define_common_options(add_common_option):
dest="location",
type=location_validator(other=False),
default=Location(other=False),
action=Highlander,
help="repository to use",
)
@ -575,7 +591,7 @@ def build_filter(matcher, strip_components):
if strip_components:
def item_filter(item):
matched = matcher.match(item.path) and os.sep.join(item.path.split(os.sep)[strip_components:])
matched = matcher.match(item.path) and len(item.path.split(os.sep)) > strip_components
return matched
else:

View File

@ -2,7 +2,7 @@ import argparse
from contextlib import contextmanager
import functools
import os
import shutil
import tempfile
import time
from ..constants import * # NOQA
@ -60,9 +60,7 @@ class BenchmarkMixIn:
@contextmanager
def test_files(path, count, size, random):
try:
path = os.path.join(path, "borg-test-data")
os.makedirs(path)
with tempfile.TemporaryDirectory(prefix="borg-test-data-", dir=path) as path:
z_buff = None if random else memoryview(zeros)[:size] if size <= len(zeros) else b"\0" * size
for i in range(count):
fname = os.path.join(path, "file_%d" % i)
@ -70,8 +68,6 @@ class BenchmarkMixIn:
with SyncFile(fname, binary=True) as fd: # used for posix_fadvise's sake
fd.write(data)
yield path
finally:
shutil.rmtree(path)
if "_BORG_BENCHMARK_CRUD_TEST" in os.environ:
tests = [("Z-TEST", 1, 1, False), ("R-TEST", 1, 1, True)]

View File

@ -1,5 +1,5 @@
import argparse
from ._common import with_repository
from ._common import with_repository, Highlander
from ..archive import ArchiveChecker
from ..constants import * # NOQA
from ..helpers import EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR
@ -176,6 +176,7 @@ class CheckMixIn:
dest="max_duration",
type=int,
default=0,
action=Highlander,
help="do only a partial repo check for max. SECONDS seconds (Default: unlimited)",
)
define_archive_filters_group(subparser)

View File

@ -1,6 +1,6 @@
import argparse
from ._common import with_repository
from ._common import with_repository, Highlander
from ..constants import * # NOQA
from ..helpers import EXIT_SUCCESS
from ..manifest import Manifest
@ -61,5 +61,6 @@ class CompactMixIn:
dest="threshold",
type=int,
default=10,
action=Highlander,
help="set minimum threshold for saved space in PERCENT (Default: 10)",
)

View File

@ -132,6 +132,9 @@ class CreateMixIn:
return self.exit_code
else:
for path in args.paths:
if path == "": # issue #5637
self.print_warning("An empty string was given as PATH, ignoring.")
continue
if path == "-": # stdin
path = args.stdin_name
mode = args.stdin_mode
@ -635,13 +638,13 @@ class CreateMixIn:
The ``-x`` or ``--one-file-system`` option excludes directories, that are mountpoints (and everything in them).
It detects mountpoints by comparing the device number from the output of ``stat()`` of the directory and its
parent directory. Specifically, it excludes directories for which ``stat()`` reports a device number different
from the device number of their parent. Be aware that in Linux (and possibly elsewhere) there are directories
with device number different from their parent, which the kernel does not consider a mountpoint and also the
other way around. Examples are bind mounts (possibly same device number, but always a mountpoint) and ALL
subvolumes of a btrfs (different device number from parent but not necessarily a mountpoint). Therefore when
using ``--one-file-system``, one should make doubly sure that the backup works as intended especially when using
btrfs. This is even more important, if the btrfs layout was created by someone else, e.g. a distribution
installer.
from the device number of their parent.
In general: be aware that there are directories with device number different from their parent, which the kernel
does not consider a mountpoint and also the other way around.
Linux examples for this are bind mounts (possibly same device number, but always a mountpoint) and ALL
subvolumes of a btrfs (different device number from parent but not necessarily a mountpoint).
macOS examples are the apfs mounts of a typical macOS installation.
Therefore, when using ``--one-file-system``, you should double-check that the backup works as intended.
.. _list_item_flags:
@ -763,6 +766,7 @@ class CreateMixIn:
metavar="NAME",
dest="stdin_name",
default="stdin",
action=Highlander,
help="use NAME in archive for stdin data (default: %(default)r)",
)
subparser.add_argument(
@ -770,6 +774,7 @@ class CreateMixIn:
metavar="USER",
dest="stdin_user",
default=None,
action=Highlander,
help="set user USER in archive for stdin data (default: do not store user/uid)",
)
subparser.add_argument(
@ -777,6 +782,7 @@ class CreateMixIn:
metavar="GROUP",
dest="stdin_group",
default=None,
action=Highlander,
help="set group GROUP in archive for stdin data (default: do not store group/gid)",
)
subparser.add_argument(
@ -785,6 +791,7 @@ class CreateMixIn:
dest="stdin_mode",
type=lambda s: int(s, 8),
default=STDIN_MODE_DEFAULT,
action=Highlander,
help="set mode to M in archive for stdin data (default: %(default)04o)",
)
subparser.add_argument(
@ -804,6 +811,7 @@ class CreateMixIn:
)
subparser.add_argument(
"--paths-delimiter",
action=Highlander,
metavar="DELIM",
help="set path delimiter for ``--paths-from-stdin`` and ``--paths-from-command`` (default: \\n) ",
)
@ -819,7 +827,8 @@ class CreateMixIn:
"--one-file-system",
dest="one_file_system",
action="store_true",
help="stay in the same file system and do not store mount points of other file systems. This might behave different from your expectations, see the docs.",
help="stay in the same file system and do not store mount points of other file systems - "
"this might behave different from your expectations, see the description below.",
)
fs_group.add_argument(
"--numeric-ids",
@ -877,6 +886,7 @@ class CreateMixIn:
dest="comment",
type=comment_validator,
default="",
action=Highlander,
help="add a comment text to the archive",
)
archive_group.add_argument(
@ -885,6 +895,7 @@ class CreateMixIn:
dest="timestamp",
type=timestamp,
default=None,
action=Highlander,
help="manually specify the archive creation date/time (yyyy-mm-ddThh:mm:ss[(+|-)HH:MM] format, "
"(+|-)HH:MM is the UTC offset, default: local time zone). Alternatively, give a reference file/directory.",
)
@ -895,6 +906,7 @@ class CreateMixIn:
dest="checkpoint_interval",
type=int,
default=1800,
action=Highlander,
help="write checkpoint every SECONDS seconds (Default: 1800)",
)
archive_group.add_argument(
@ -903,6 +915,7 @@ class CreateMixIn:
dest="checkpoint_volume",
type=int,
default=0,
action=Highlander,
help="write checkpoint every BYTES bytes (Default: 0, meaning no volume based checkpointing)",
)
archive_group.add_argument(
@ -922,6 +935,7 @@ class CreateMixIn:
dest="compression",
type=CompressionSpec,
default=CompressionSpec("lz4"),
action=Highlander,
help="select compression algorithm, see the output of the " '"borg help compression" command for details.',
)

View File

@ -5,6 +5,7 @@ import json
import textwrap
from ..archive import Archive
from ..compress import CompressionSpec
from ..constants import * # NOQA
from ..helpers import msgpack
from ..helpers import sysinfo
@ -17,7 +18,7 @@ from ..platform import get_process_id
from ..repository import Repository, LIST_SCAN_LIMIT, TAG_PUT, TAG_DELETE, TAG_COMMIT
from ..repoobj import RepoObj
from ._common import with_repository
from ._common import with_repository, Highlander
from ._common import process_epilog
@ -243,7 +244,7 @@ class DebugMixIn:
if len(id) != 32: # 256bit
raise ValueError("id must be 256bits or 64 hex digits")
except ValueError as err:
print("object id %s is invalid [%s]." % (hex_id, str(err)))
print(f"object id {hex_id} is invalid [{str(err)}].")
return EXIT_ERROR
try:
data = repository.get(id)
@ -265,6 +266,61 @@ class DebugMixIn:
print(id.hex())
return EXIT_SUCCESS
@with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
def do_debug_parse_obj(self, args, repository, manifest):
"""parse borg object file into meta dict and data (decrypting, decompressing)"""
# get the object from id
hex_id = args.id
try:
id = unhexlify(hex_id)
if len(id) != 32: # 256bit
raise ValueError("id must be 256bits or 64 hex digits")
except ValueError as err:
print(f"object id {hex_id} is invalid [{str(err)}].")
return EXIT_ERROR
with open(args.object_path, "rb") as f:
cdata = f.read()
repo_objs = manifest.repo_objs
meta, data = repo_objs.parse(id=id, cdata=cdata)
with open(args.json_path, "w") as f:
json.dump(meta, f)
with open(args.binary_path, "wb") as f:
f.write(data)
return EXIT_SUCCESS
@with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
def do_debug_format_obj(self, args, repository, manifest):
"""format file and metadata into borg object file"""
# get the object from id
hex_id = args.id
try:
id = unhexlify(hex_id)
if len(id) != 32: # 256bit
raise ValueError("id must be 256bits or 64 hex digits")
except ValueError as err:
print(f"object id {hex_id} is invalid [{str(err)}].")
return EXIT_ERROR
with open(args.binary_path, "rb") as f:
data = f.read()
with open(args.json_path) as f:
meta = json.load(f)
repo_objs = manifest.repo_objs
data_encrypted = repo_objs.format(id=id, meta=meta, data=data)
with open(args.object_path, "wb") as f:
f.write(data_encrypted)
return EXIT_SUCCESS
@with_repository(manifest=False, exclusive=True)
def do_debug_put_obj(self, args, repository):
"""put file contents into the repository"""
@ -276,7 +332,7 @@ class DebugMixIn:
if len(id) != 32: # 256bit
raise ValueError("id must be 256bits or 64 hex digits")
except ValueError as err:
print("object id %s is invalid [%s]." % (hex_id, str(err)))
print(f"object id {hex_id} is invalid [{str(err)}].")
return EXIT_ERROR
repository.put(id, data)
print("object %s put." % hex_id)
@ -465,16 +521,18 @@ class DebugMixIn:
"--segment",
metavar="SEG",
dest="segment",
default=None,
type=positive_int_validator,
default=None,
action=Highlander,
help="used together with --ghost: limit processing to given segment.",
)
subparser.add_argument(
"--offset",
metavar="OFFS",
dest="offset",
default=None,
type=positive_int_validator,
default=None,
action=Highlander,
help="used together with --ghost: limit processing to given offset.",
)
@ -497,6 +555,7 @@ class DebugMixIn:
"wanted",
metavar="WANTED",
type=str,
action=Highlander,
help="term to search the repo for, either 0x1234abcd hex term or a string",
)
debug_id_hash_epilog = process_epilog(
@ -518,6 +577,73 @@ class DebugMixIn:
"path", metavar="PATH", type=str, help="content for which the id-hash shall get computed"
)
# parse_obj
debug_parse_obj_epilog = process_epilog(
"""
This command parses the object file into metadata (as json) and uncompressed data.
"""
)
subparser = debug_parsers.add_parser(
"parse-obj",
parents=[common_parser],
add_help=False,
description=self.do_debug_parse_obj.__doc__,
epilog=debug_parse_obj_epilog,
formatter_class=argparse.RawDescriptionHelpFormatter,
help="parse borg object file into meta dict and data",
)
subparser.set_defaults(func=self.do_debug_parse_obj)
subparser.add_argument("id", metavar="ID", type=str, help="hex object ID to get from the repo")
subparser.add_argument(
"object_path", metavar="OBJECT_PATH", type=str, help="path of the object file to parse data from"
)
subparser.add_argument(
"binary_path", metavar="BINARY_PATH", type=str, help="path of the file to write uncompressed data into"
)
subparser.add_argument(
"json_path", metavar="JSON_PATH", type=str, help="path of the json file to write metadata into"
)
# format_obj
debug_format_obj_epilog = process_epilog(
"""
This command formats the file and metadata into objectfile.
"""
)
subparser = debug_parsers.add_parser(
"format-obj",
parents=[common_parser],
add_help=False,
description=self.do_debug_format_obj.__doc__,
epilog=debug_format_obj_epilog,
formatter_class=argparse.RawDescriptionHelpFormatter,
help="format file and metadata into borg objectfile",
)
subparser.set_defaults(func=self.do_debug_format_obj)
subparser.add_argument("id", metavar="ID", type=str, help="hex object ID to get from the repo")
subparser.add_argument(
"binary_path", metavar="BINARY_PATH", type=str, help="path of the file to convert into objectfile"
)
subparser.add_argument(
"json_path", metavar="JSON_PATH", type=str, help="path of the json file to read metadata from"
)
subparser.add_argument(
"-C",
"--compression",
metavar="COMPRESSION",
dest="compression",
type=CompressionSpec,
default=CompressionSpec("lz4"),
action=Highlander,
help="select compression algorithm, see the output of the " '"borg help compression" command for details.',
)
subparser.add_argument(
"object_path",
metavar="OBJECT_PATH",
type=str,
help="path of the objectfile to write compressed encrypted data into",
)
debug_get_obj_epilog = process_epilog(
"""
This command gets an object from the repository.

View File

@ -1,7 +1,7 @@
import argparse
import logging
from ._common import with_repository
from ._common import with_repository, Highlander
from ..archive import Archive, Statistics
from ..cache import Cache
from ..constants import * # NOQA
@ -156,6 +156,7 @@ class DeleteMixIn:
dest="checkpoint_interval",
type=int,
default=1800,
action=Highlander,
help="write checkpoint every SECONDS seconds (Default: 1800)",
)
define_archive_filters_group(subparser)

View File

@ -1,8 +1,9 @@
import argparse
import os
import textwrap
import sys
from ._common import with_repository, build_matcher
from ._common import with_repository, build_matcher, Highlander
from ..archive import Archive
from ..cache import Cache
from ..constants import * # NOQA
@ -24,7 +25,7 @@ class ListMixIn:
elif args.short:
format = "{path}{NL}"
else:
format = "{mode} {user:6} {group:6} {size:8} {mtime} {path}{extra}{NL}"
format = os.environ.get("BORG_LIST_FORMAT", "{mode} {user:6} {group:6} {size:8} {mtime} {path}{extra}{NL}")
def _list_inner(cache):
archive = Archive(manifest, args.name, cache=cache)
@ -105,6 +106,7 @@ class ListMixIn:
"--format",
metavar="FORMAT",
dest="format",
action=Highlander,
help="specify format for file listing "
'(default: "{mode} {user:6} {group:6} {size:8} {mtime} {path}{extra}{NL}")',
)

View File

@ -24,8 +24,12 @@ class MountMixIn:
self.print_error("borg mount not available: no FUSE support, BORG_FUSE_IMPL=%s." % BORG_FUSE_IMPL)
return self.exit_code
if not os.path.isdir(args.mountpoint) or not os.access(args.mountpoint, os.R_OK | os.W_OK | os.X_OK):
self.print_error("%s: Mountpoint must be a writable directory" % args.mountpoint)
if not os.path.isdir(args.mountpoint):
self.print_error(f"{args.mountpoint}: Mountpoint must be an **existing directory**")
return self.exit_code
if not os.access(args.mountpoint, os.R_OK | os.W_OK | os.X_OK):
self.print_error(f"{args.mountpoint}: Mountpoint must be a **writable** directory")
return self.exit_code
return self._do_mount(args)

View File

@ -3,13 +3,14 @@ from collections import OrderedDict
from datetime import datetime, timezone, timedelta
import logging
from operator import attrgetter
import os
import re
from ._common import with_repository
from ._common import with_repository, Highlander
from ..archive import Archive, Statistics
from ..cache import Cache
from ..constants import * # NOQA
from ..helpers import format_archive, interval, sig_int, log_multi, ProgressIndicatorPercent
from ..helpers import ArchiveFormatter, interval, sig_int, log_multi, ProgressIndicatorPercent
from ..manifest import Manifest
from ..logger import create_logger
@ -82,6 +83,14 @@ class PruneMixIn:
'"keep-weekly", "keep-monthly", "keep-yearly" or "keep-all" settings must be specified.'
)
return self.exit_code
if args.format is not None:
format = args.format
elif args.short:
format = "{archive}"
else:
format = os.environ.get("BORG_PRUNE_FORMAT", "{archive:<36} {time} [{id}]")
formatter = ArchiveFormatter(format, repository, manifest, manifest.key, json=False, iec=args.iec)
checkpoint_re = r"\.checkpoint(\.\d+)?"
archives_checkpoints = manifest.archives.list(
match=args.match_archives,
@ -155,10 +164,12 @@ class PruneMixIn:
log_message = "Keeping archive (rule: {rule} #{num}):".format(
rule=kept_because[archive.id][0], num=kept_because[archive.id][1]
)
if args.output_list:
list_logger.info(
"{message:<40} {archive}".format(message=log_message, archive=format_archive(archive))
)
if (
args.output_list
or (args.list_pruned and archive in to_delete)
or (args.list_kept and archive not in to_delete)
):
list_logger.info(f"{log_message:<40} {formatter.format_item(archive)}")
pi.finish()
if sig_int:
# Ctrl-C / SIGINT: do not checkpoint (commit) again, we already have a checkpoint in this case.
@ -229,6 +240,10 @@ class PruneMixIn:
deleted - the "Deleted data" deduplicated size there is most interesting as
that is how much your repository will shrink.
Please note that the "All archives" stats refer to the state after pruning.
You can influence how the ``--list`` output is formatted by using the ``--short``
option (less wide output) or by giving a custom format using ``--format`` (see
the ``borg rlist`` description for more details about the format string).
"""
)
subparser = subparsers.add_parser(
@ -254,11 +269,26 @@ class PruneMixIn:
subparser.add_argument(
"--list", dest="output_list", action="store_true", help="output verbose list of archives it keeps/prunes"
)
subparser.add_argument("--short", dest="short", action="store_true", help="use a less wide archive part format")
subparser.add_argument(
"--list-pruned", dest="list_pruned", action="store_true", help="output verbose list of archives it prunes"
)
subparser.add_argument(
"--list-kept", dest="list_kept", action="store_true", help="output verbose list of archives it keeps"
)
subparser.add_argument(
"--format",
metavar="FORMAT",
dest="format",
action=Highlander,
help="specify format for the archive part " '(default: "{archive:<36} {time} [{id}]")',
)
subparser.add_argument(
"--keep-within",
metavar="INTERVAL",
dest="within",
type=interval,
action=Highlander,
help="keep all archives within this time interval",
)
subparser.add_argument(
@ -267,6 +297,7 @@ class PruneMixIn:
dest="secondly",
type=int,
default=0,
action=Highlander,
help="number of secondly archives to keep",
)
subparser.add_argument(
@ -277,22 +308,57 @@ class PruneMixIn:
help="keep all archives (alias of --keep-last=<infinite>)",
)
subparser.add_argument(
"--keep-minutely", dest="minutely", type=int, default=0, help="number of minutely archives to keep"
"--keep-minutely",
dest="minutely",
type=int,
default=0,
action=Highlander,
help="number of minutely archives to keep",
)
subparser.add_argument(
"-H", "--keep-hourly", dest="hourly", type=int, default=0, help="number of hourly archives to keep"
"-H",
"--keep-hourly",
dest="hourly",
type=int,
default=0,
action=Highlander,
help="number of hourly archives to keep",
)
subparser.add_argument(
"-d", "--keep-daily", dest="daily", type=int, default=0, help="number of daily archives to keep"
"-d",
"--keep-daily",
dest="daily",
type=int,
default=0,
action=Highlander,
help="number of daily archives to keep",
)
subparser.add_argument(
"-w", "--keep-weekly", dest="weekly", type=int, default=0, help="number of weekly archives to keep"
"-w",
"--keep-weekly",
dest="weekly",
type=int,
default=0,
action=Highlander,
help="number of weekly archives to keep",
)
subparser.add_argument(
"-m", "--keep-monthly", dest="monthly", type=int, default=0, help="number of monthly archives to keep"
"-m",
"--keep-monthly",
dest="monthly",
type=int,
default=0,
action=Highlander,
help="number of monthly archives to keep",
)
subparser.add_argument(
"-y", "--keep-yearly", dest="yearly", type=int, default=0, help="number of yearly archives to keep"
"-y",
"--keep-yearly",
dest="yearly",
type=int,
default=0,
action=Highlander,
help="number of yearly archives to keep",
)
define_archive_filters_group(subparser, sort_by=False, first_last=False)
subparser.add_argument(
@ -302,5 +368,6 @@ class PruneMixIn:
dest="checkpoint_interval",
type=int,
default=1800,
action=Highlander,
help="write checkpoint every SECONDS seconds (Default: 1800)",
)

View File

@ -1,7 +1,7 @@
import argparse
from collections import defaultdict
from ._common import with_repository
from ._common import with_repository, Highlander
from ..constants import * # NOQA
from ..compress import CompressionSpec, ObfuscateSize, Auto, COMPRESSOR_TABLE
from ..helpers import sig_int, ProgressIndicatorPercent
@ -230,6 +230,7 @@ class RCompressMixIn:
dest="compression",
type=CompressionSpec,
default=CompressionSpec("lz4"),
action=Highlander,
help="select compression algorithm, see the output of the " '"borg help compression" command for details.',
)
@ -242,5 +243,6 @@ class RCompressMixIn:
dest="checkpoint_interval",
type=int,
default=1800,
action=Highlander,
help="write checkpoint every SECONDS seconds (Default: 1800)",
)

View File

@ -1,6 +1,6 @@
import argparse
from ._common import with_repository, with_other_repository
from ._common import with_repository, with_other_repository, Highlander
from ..cache import Cache
from ..constants import * # NOQA
from ..crypto.key import key_creator, key_argument_names, tam_required_file
@ -184,6 +184,7 @@ class RCreateMixIn:
dest="other_location",
type=location_validator(other=True),
default=Location(other=True),
action=Highlander,
help="reuse the key material from the other repository",
)
subparser.add_argument(
@ -193,6 +194,7 @@ class RCreateMixIn:
dest="encryption",
required=True,
choices=key_argument_names(),
action=Highlander,
help="select encryption key mode **(required)**",
)
subparser.add_argument(
@ -210,6 +212,7 @@ class RCreateMixIn:
dest="storage_quota",
default=None,
type=parse_storage_quota,
action=Highlander,
help="Set storage quota of the new repository (e.g. 5G, 1.5T). Default: no quota.",
)
subparser.add_argument(

View File

@ -149,6 +149,7 @@ class RecreateMixIn:
metavar="TARGET",
default=None,
type=archivename_validator,
action=Highlander,
help="create a new archive with the name ARCHIVE, do not replace existing archive "
"(only applies for a single archive)",
)
@ -158,6 +159,7 @@ class RecreateMixIn:
dest="checkpoint_interval",
type=int,
default=1800,
action=Highlander,
metavar="SECONDS",
help="write checkpoint every SECONDS seconds (Default: 1800)",
)
@ -167,6 +169,7 @@ class RecreateMixIn:
dest="checkpoint_volume",
type=int,
default=0,
action=Highlander,
help="write checkpoint every BYTES bytes (Default: 0, meaning no volume based checkpointing)",
)
archive_group.add_argument(
@ -175,6 +178,7 @@ class RecreateMixIn:
dest="comment",
type=comment_validator,
default=None,
action=Highlander,
help="add a comment text to the archive",
)
archive_group.add_argument(
@ -183,6 +187,7 @@ class RecreateMixIn:
dest="timestamp",
type=timestamp,
default=None,
action=Highlander,
help="manually specify the archive creation date/time (yyyy-mm-ddThh:mm:ss[(+|-)HH:MM] format, "
"(+|-)HH:MM is the UTC offset, default: local time zone). Alternatively, give a reference file/directory.",
)
@ -193,6 +198,7 @@ class RecreateMixIn:
dest="compression",
type=CompressionSpec,
default=CompressionSpec("lz4"),
action=Highlander,
help="select compression algorithm, see the output of the " '"borg help compression" command for details.',
)
archive_group.add_argument(
@ -203,6 +209,7 @@ class RecreateMixIn:
default="never",
const="if-different",
choices=("never", "if-different", "always"),
action=Highlander,
help="recompress data chunks according to `MODE` and ``--compression``. "
"Possible modes are "
"`if-different`: recompress if current compression is with a different "
@ -217,9 +224,9 @@ class RecreateMixIn:
"--chunker-params",
metavar="PARAMS",
dest="chunker_params",
action=Highlander,
type=ChunkerParams,
default=None,
action=Highlander,
help="rechunk using given chunker parameters (ALGO, CHUNK_MIN_EXP, CHUNK_MAX_EXP, "
"HASH_MASK_BITS, HASH_WINDOW_SIZE) or `default` to use the chunker defaults. "
"default: do not rechunk",

View File

@ -1,8 +1,9 @@
import argparse
import os
import textwrap
import sys
from ._common import with_repository
from ._common import with_repository, Highlander
from ..constants import * # NOQA
from ..helpers import BaseFormatter, ArchiveFormatter, json_print, basic_json_data
from ..manifest import Manifest
@ -21,7 +22,7 @@ class RListMixIn:
elif args.short:
format = "{archive}{NL}"
else:
format = "{archive:<36} {time} [{id}]{NL}"
format = os.environ.get("BORG_RLIST_FORMAT", "{archive:<36} {time} [{id}]{NL}")
formatter = ArchiveFormatter(format, repository, manifest, manifest.key, json=args.json, iec=args.iec)
output_data = []
@ -106,6 +107,7 @@ class RListMixIn:
"--format",
metavar="FORMAT",
dest="format",
action=Highlander,
help="specify format for archive listing " '(default: "{archive:<36} {time} [{id}]{NL}")',
)
subparser.add_argument(

View File

@ -1,5 +1,6 @@
import argparse
from ._common import Highlander
from ..constants import * # NOQA
from ..helpers import EXIT_SUCCESS
from ..helpers import parse_storage_quota
@ -76,6 +77,7 @@ class ServeMixIn:
dest="storage_quota",
type=parse_storage_quota,
default=None,
action=Highlander,
help="Override storage quota of the repository (e.g. 5G, 1.5T). "
"When a new repository is initialized, sets the storage quota on the new "
"repository as well. Default: no quota.",

View File

@ -289,7 +289,7 @@ class TarMixIn:
file_status_printer=self.print_file_status,
)
tar = tarfile.open(fileobj=tarstream, mode="r|")
tar = tarfile.open(fileobj=tarstream, mode="r|", ignore_zeros=args.ignore_zeros)
while True:
tarinfo = tar.next()
@ -390,7 +390,11 @@ class TarMixIn:
)
subparser.set_defaults(func=self.do_export_tar)
subparser.add_argument(
"--tar-filter", dest="tar_filter", default="auto", help="filter program to pipe data through"
"--tar-filter",
dest="tar_filter",
default="auto",
action=Highlander,
help="filter program to pipe data through",
)
subparser.add_argument(
"--list", dest="output_list", action="store_true", help="output verbose list of items (files, dirs, ...)"
@ -401,6 +405,7 @@ class TarMixIn:
dest="tar_format",
default="GNU",
choices=("BORG", "PAX", "GNU"),
action=Highlander,
help="select tar format: BORG, PAX or GNU",
)
subparser.add_argument("name", metavar="NAME", type=archivename_validator, help="specify the archive name")
@ -445,6 +450,9 @@ class TarMixIn:
- UNIX V7 tar
- SunOS tar with extended attributes
To import multiple tarballs into a single archive, they can be simply
concatenated (e.g. using "cat") into a single file, and imported with an
``--ignore-zeros`` option to skip through the stop markers between them.
"""
)
subparser = subparsers.add_parser(
@ -487,6 +495,12 @@ class TarMixIn:
help="only display items with the given status characters",
)
subparser.add_argument("--json", action="store_true", help="output stats as JSON (implies --stats)")
subparser.add_argument(
"--ignore-zeros",
dest="ignore_zeros",
action="store_true",
help="ignore zero-filled blocks in the input tarball",
)
archive_group = subparser.add_argument_group("Archive options")
archive_group.add_argument(
@ -495,6 +509,7 @@ class TarMixIn:
dest="comment",
type=comment_validator,
default="",
action=Highlander,
help="add a comment text to the archive",
)
archive_group.add_argument(
@ -502,6 +517,7 @@ class TarMixIn:
dest="timestamp",
type=timestamp,
default=None,
action=Highlander,
metavar="TIMESTAMP",
help="manually specify the archive creation date/time (yyyy-mm-ddThh:mm:ss[(+|-)HH:MM] format, "
"(+|-)HH:MM is the UTC offset, default: local time zone). Alternatively, give a reference file/directory.",
@ -512,6 +528,7 @@ class TarMixIn:
dest="checkpoint_interval",
type=int,
default=1800,
action=Highlander,
metavar="SECONDS",
help="write checkpoint every SECONDS seconds (Default: 1800)",
)
@ -521,14 +538,15 @@ class TarMixIn:
dest="checkpoint_volume",
type=int,
default=0,
action=Highlander,
help="write checkpoint every BYTES bytes (Default: 0, meaning no volume based checkpointing)",
)
archive_group.add_argument(
"--chunker-params",
dest="chunker_params",
action=Highlander,
type=ChunkerParams,
default=CHUNKER_PARAMS,
action=Highlander,
metavar="PARAMS",
help="specify the chunker parameters (ALGO, CHUNK_MIN_EXP, CHUNK_MAX_EXP, "
"HASH_MASK_BITS, HASH_WINDOW_SIZE). default: %s,%d,%d,%d,%d" % CHUNKER_PARAMS,
@ -540,6 +558,7 @@ class TarMixIn:
dest="compression",
type=CompressionSpec,
default=CompressionSpec("lz4"),
action=Highlander,
help="select compression algorithm, see the output of the " '"borg help compression" command for details.',
)

View File

@ -1,6 +1,6 @@
import argparse
from ._common import with_repository, with_other_repository
from ._common import with_repository, with_other_repository, Highlander
from ..archive import Archive
from ..constants import * # NOQA
from ..crypto.key import uses_same_id_hash, uses_same_chunker_secret
@ -106,8 +106,11 @@ class TransferMixIn:
if refcount == 0: # target repo does not yet have this chunk
if not dry_run:
cdata = other_repository.get(chunk_id)
# keep compressed payload same, avoid decompression / recompression
meta, data = other_manifest.repo_objs.parse(chunk_id, cdata, decompress=False)
# keep compressed payload same, verify via assert_id (that will
# decompress, but avoid needing to compress it again):
meta, data = other_manifest.repo_objs.parse(
chunk_id, cdata, decompress=True, want_compressed=True
)
meta, data = upgrader.upgrade_compressed_chunk(meta, data)
chunk_entry = cache.add_chunk(
chunk_id,
@ -207,6 +210,7 @@ class TransferMixIn:
dest="other_location",
type=location_validator(other=True),
default=Location(other=True),
action=Highlander,
help="transfer archives from the other repository",
)
subparser.add_argument(
@ -215,6 +219,7 @@ class TransferMixIn:
dest="upgrader",
type=str,
default="NoOp",
action=Highlander,
help="use the upgrader to convert transferred data (default: no conversion)",
)
define_archive_filters_group(subparser)

View File

@ -562,14 +562,14 @@ class FuseOperations(llfuse.Operations, FuseBackend):
@async_wrapper
def statfs(self, ctx=None):
stat_ = llfuse.StatvfsData()
stat_.f_bsize = 512
stat_.f_frsize = 512
stat_.f_blocks = 0
stat_.f_bfree = 0
stat_.f_bavail = 0
stat_.f_files = 0
stat_.f_ffree = 0
stat_.f_favail = 0
stat_.f_bsize = 512 # Filesystem block size
stat_.f_frsize = 512 # Fragment size
stat_.f_blocks = 0 # Size of fs in f_frsize units
stat_.f_bfree = 0 # Number of free blocks
stat_.f_bavail = 0 # Number of free blocks for unprivileged users
stat_.f_files = 0 # Number of inodes
stat_.f_ffree = 0 # Number of free inodes
stat_.f_favail = 0 # Number of free inodes for unprivileged users
stat_.f_namemax = 255 # == NAME_MAX (depends on archive source OS / FS)
return stat_

View File

@ -106,7 +106,7 @@ def get_cache_dir(*, legacy=False):
cache_dir = os.environ.get("BORG_CACHE_DIR", os.path.join(cache_home, "borg"))
else:
cache_dir = os.environ.get(
"BORG_CACHE_DIR", join_base_dir(".cache", legacy=legacy) or platformdirs.user_cache_dir("borg")
"BORG_CACHE_DIR", join_base_dir(".cache", "borg", legacy=legacy) or platformdirs.user_cache_dir("borg")
)
# Create path if it doesn't exist yet
@ -143,7 +143,7 @@ def get_config_dir(*, legacy=False):
config_dir = os.environ.get("BORG_CONFIG_DIR", os.path.join(config_home, "borg"))
else:
config_dir = os.environ.get(
"BORG_CONFIG_DIR", join_base_dir(".config", legacy=legacy) or platformdirs.user_config_dir("borg")
"BORG_CONFIG_DIR", join_base_dir(".config", "borg", legacy=legacy) or platformdirs.user_config_dir("borg")
)
# Create path if it doesn't exist yet

View File

@ -672,11 +672,14 @@ class ArchiveFormatter(BaseFormatter):
"id": "internal ID of the archive",
"hostname": "hostname of host on which this archive was created",
"username": "username of user who created this archive",
"size": "size of this archive (data plus metadata, not considering compression and deduplication)",
"nfiles": "count of files in this archive",
}
KEY_GROUPS = (
("archive", "name", "comment", "id"),
("start", "time", "end", "command_line"),
("hostname", "username"),
("size", "nfiles"),
)
@classmethod
@ -722,10 +725,12 @@ class ArchiveFormatter(BaseFormatter):
self.format = partial_format(format, static_keys)
self.format_keys = {f[1] for f in Formatter().parse(format)}
self.call_keys = {
"hostname": partial(self.get_meta, "hostname"),
"username": partial(self.get_meta, "username"),
"comment": partial(self.get_meta, "comment"),
"command_line": partial(self.get_meta, "command_line"),
"hostname": partial(self.get_meta, "hostname", ""),
"username": partial(self.get_meta, "username", ""),
"comment": partial(self.get_meta, "comment", ""),
"command_line": partial(self.get_meta, "command_line", ""),
"size": partial(self.get_meta, "size", 0),
"nfiles": partial(self.get_meta, "nfiles", 0),
"end": self.get_ts_end,
}
self.used_call_keys = set(self.call_keys) & self.format_keys
@ -771,8 +776,8 @@ class ArchiveFormatter(BaseFormatter):
self._archive = Archive(self.manifest, self.name, iec=self.iec)
return self._archive
def get_meta(self, key):
return self.archive.metadata.get(key, "")
def get_meta(self, key, default=None):
return self.archive.metadata.get(key, default)
def get_ts_end(self):
return self.format_time(self.archive.ts_end)
@ -1033,7 +1038,7 @@ def ellipsis_truncate(msg, space):
# if there is very little space, just show ...
return "..." + " " * (space - ellipsis_width)
if space < ellipsis_width + msg_width:
return "{}...{}".format(swidth_slice(msg, space // 2 - ellipsis_width), swidth_slice(msg, -space // 2))
return f"{swidth_slice(msg, space // 2 - ellipsis_width)}...{swidth_slice(msg, -space // 2)}"
return msg + " " * (space - msg_width)

View File

@ -56,7 +56,7 @@ class Passphrase(str):
# passcommand is a system command (not inside pyinstaller env)
env = prepare_subprocess_env(system=True)
try:
passphrase = subprocess.check_output(shlex.split(passcommand), universal_newlines=True, env=env)
passphrase = subprocess.check_output(shlex.split(passcommand), text=True, env=env)
except (subprocess.CalledProcessError, FileNotFoundError) as e:
raise PasscommandFailure(e)
return cls(passphrase.rstrip("\n"))

View File

@ -144,10 +144,12 @@ class ProgressIndicatorPercent(ProgressIndicatorBase):
# truncate the last argument, if no space is available
if info is not None:
if not self.json:
from ..platform import swidth # avoid circular import
# no need to truncate if we're not outputting to a terminal
terminal_space = get_terminal_size(fallback=(-1, -1))[0]
if terminal_space != -1:
space = terminal_space - len(self.msg % tuple([pct] + info[:-1] + [""]))
space = terminal_space - swidth(self.msg % tuple([pct] + info[:-1] + [""]))
info[-1] = ellipsis_truncate(info[-1], space)
return self.output(self.msg % tuple([pct] + info), justify=False, info=info)

View File

@ -5,7 +5,7 @@ import re
from collections import abc, namedtuple
from datetime import datetime, timedelta, timezone
from operator import attrgetter
from typing import Sequence, FrozenSet
from collections.abc import Sequence
from .logger import create_logger
@ -211,7 +211,7 @@ class Manifest:
NO_OPERATION_CHECK: Sequence[Operation] = tuple()
SUPPORTED_REPO_FEATURES: FrozenSet[str] = frozenset([])
SUPPORTED_REPO_FEATURES: frozenset[str] = frozenset([])
MANIFEST_ID = b"\0" * 32

View File

@ -70,7 +70,20 @@ class RepoObj:
meta = msgpack.unpackb(meta_packed)
return meta
def parse(self, id: bytes, cdata: bytes, decompress: bool = True) -> tuple[dict, bytes]:
def parse(
self, id: bytes, cdata: bytes, decompress: bool = True, want_compressed: bool = False
) -> tuple[dict, bytes]:
"""
Parse a repo object into metadata and data (decrypt it, maybe decompress, maybe verify if the chunk plaintext
corresponds to the chunk id via assert_id()).
Tweaking options (default is usually fine):
- decompress=True, want_compressed=False: slow, verifying. returns decompressed data (default).
- decompress=True, want_compressed=True: slow, verifying. returns compressed data (caller wants to reuse it).
- decompress=False, want_compressed=True: quick, not verifying. returns compressed data (caller wants to reuse).
- decompress=False, want_compressed=False: invalid
"""
assert not (not decompress and not want_compressed), "invalid parameter combination!"
assert isinstance(id, bytes)
assert isinstance(cdata, bytes)
obj = memoryview(cdata)
@ -81,24 +94,26 @@ class RepoObj:
meta_encrypted = obj[offs : offs + len_meta_encrypted]
offs += len_meta_encrypted
meta_packed = self.key.decrypt(id, meta_encrypted)
meta = msgpack.unpackb(meta_packed)
meta_compressed = msgpack.unpackb(meta_packed) # means: before adding more metadata in decompress block
data_encrypted = obj[offs:]
data_compressed = self.key.decrypt(id, data_encrypted)
data_compressed = self.key.decrypt(id, data_encrypted) # does not include the type/level bytes
if decompress:
ctype = meta["ctype"]
clevel = meta["clevel"]
csize = meta["csize"] # always the overall size
ctype = meta_compressed["ctype"]
clevel = meta_compressed["clevel"]
csize = meta_compressed["csize"] # always the overall size
assert csize == len(data_compressed)
psize = meta.get("psize", csize) # obfuscation: psize (payload size) is potentially less than csize.
psize = meta_compressed.get(
"psize", csize
) # obfuscation: psize (payload size) is potentially less than csize.
assert psize <= csize
compr_hdr = bytes((ctype, clevel))
compressor_cls, compression_level = Compressor.detect(compr_hdr)
compressor = compressor_cls(level=compression_level)
meta, data = compressor.decompress(meta, data_compressed[:psize])
meta, data = compressor.decompress(dict(meta_compressed), data_compressed[:psize])
self.key.assert_id(id, data)
else:
data = data_compressed # does not include the type/level bytes
return meta, data
meta, data = None, None
return meta_compressed if want_compressed else meta, data_compressed if want_compressed else data
class RepoObj1: # legacy
@ -140,19 +155,22 @@ class RepoObj1: # legacy
def parse_meta(self, id: bytes, cdata: bytes) -> dict:
raise NotImplementedError("parse_meta is not available for RepoObj1")
def parse(self, id: bytes, cdata: bytes, decompress: bool = True) -> tuple[dict, bytes]:
def parse(
self, id: bytes, cdata: bytes, decompress: bool = True, want_compressed: bool = False
) -> tuple[dict, bytes]:
assert not (not decompress and not want_compressed), "invalid parameter combination!"
assert isinstance(id, bytes)
assert isinstance(cdata, bytes)
data_compressed = self.key.decrypt(id, cdata)
compressor_cls, compression_level = Compressor.detect(data_compressed[:2])
compressor = compressor_cls(level=compression_level, legacy_mode=True)
meta_compressed = {}
meta_compressed["ctype"] = compressor.ID
meta_compressed["clevel"] = compressor.level
meta_compressed["csize"] = len(data_compressed)
if decompress:
meta, data = compressor.decompress(None, data_compressed)
self.key.assert_id(id, data)
else:
meta = {}
meta["ctype"] = compressor.ID
meta["clevel"] = compressor.level
meta["csize"] = len(data_compressed)
data = data_compressed
return meta, data
meta, data = None, None
return meta_compressed if want_compressed else meta, data_compressed if want_compressed else data

View File

@ -1693,7 +1693,7 @@ class LoggedIO:
size, tag, key, data = self._read(fd, header, segment, offset, (TAG_PUT2, TAG_PUT), read_data=read_data)
if id != key:
raise IntegrityError(
"Invalid segment entry header, is not for wanted id [segment {}, offset {}]".format(segment, offset)
f"Invalid segment entry header, is not for wanted id [segment {segment}, offset {offset}]"
)
data_size_from_header = size - header_size(tag)
if expected_size is not None and expected_size != data_size_from_header:

View File

@ -18,6 +18,7 @@ import unittest
from ..xattr import get_all
from ..platform import get_flags
from ..platformflags import is_win32
from ..helpers import umount
from ..helpers import EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR
from .. import platform
@ -66,6 +67,14 @@ def unopened_tempfile():
yield os.path.join(tempdir, "file")
def is_root():
"""return True if running with high privileges, like as root"""
if is_win32:
return False # TODO
else:
return os.getuid() == 0
@functools.lru_cache
def are_symlinks_supported():
with unopened_tempfile() as filepath:

View File

@ -25,6 +25,7 @@ from .. import (
is_utime_fully_supported,
is_birthtime_fully_supported,
same_ts_ns,
is_root,
)
from . import (
ArchiverTestCaseBase,
@ -219,6 +220,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
assert "input/file2" in out
assert "input/file3" in out
@pytest.mark.skipif(is_root(), reason="test must not be run as (fake)root")
def test_create_no_permission_file(self):
file_path = os.path.join(self.input_path, "file")
self.create_regular_file(file_path + "1", size=1000)
@ -228,6 +230,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
if is_win32:
subprocess.run(["icacls.exe", file_path + "2", "/deny", "everyone:(R)"])
else:
# note: this will NOT take away read permissions for root
os.chmod(file_path + "2", 0o000)
self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
flist = "".join(f"input/file{n}\n" for n in range(1, 4))

View File

@ -6,6 +6,7 @@ import unittest
from ...constants import * # NOQA
from .. import changedir
from . import ArchiverTestCaseBase, RemoteArchiverTestCaseBase, ArchiverTestCaseBinaryBase, RK_ENCRYPTION, BORG_EXES
from ..compress import Compressor
class ArchiverTestCase(ArchiverTestCaseBase):
@ -63,6 +64,64 @@ class ArchiverTestCase(ArchiverTestCaseBase):
output = self.cmd(f"--repo={self.repository_location}", "debug", "delete-obj", "invalid")
assert "is invalid" in output
def test_debug_id_hash_format_put_get_parse_obj(self):
"""Test format-obj and parse-obj commands"""
self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
data = b"some data" * 100
meta_dict = {"some": "property"}
meta = json.dumps(meta_dict).encode()
self.create_regular_file("plain.bin", contents=data)
self.create_regular_file("meta.json", contents=meta)
output = self.cmd(f"--repo={self.repository_location}", "debug", "id-hash", "input/plain.bin")
id_hash = output.strip()
output = self.cmd(
f"--repo={self.repository_location}",
"debug",
"format-obj",
id_hash,
"input/plain.bin",
"input/meta.json",
"output/data.bin",
"--compression=zstd,2",
)
output = self.cmd(f"--repo={self.repository_location}", "debug", "put-obj", id_hash, "output/data.bin")
assert id_hash in output
output = self.cmd(f"--repo={self.repository_location}", "debug", "get-obj", id_hash, "output/object.bin")
assert id_hash in output
output = self.cmd(
f"--repo={self.repository_location}",
"debug",
"parse-obj",
id_hash,
"output/object.bin",
"output/plain.bin",
"output/meta.json",
)
with open("output/plain.bin", "rb") as f:
data_read = f.read()
assert data == data_read
with open("output/meta.json") as f:
meta_read = json.load(f)
for key, value in meta_dict.items():
assert meta_read.get(key) == value
assert meta_read.get("size") == len(data_read)
c = Compressor(name="zstd", level=2)
_, data_compressed = c.compress(meta_dict, data=data)
assert meta_read.get("csize") == len(data_compressed)
assert meta_read.get("ctype") == c.compressor.ID
assert meta_read.get("clevel") == c.compressor.level
def test_debug_dump_manifest(self):
self.create_regular_file("file1", size=1024 * 80)
self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)

View File

@ -6,7 +6,7 @@ import unittest
from ...constants import * # NOQA
from .. import are_symlinks_supported, are_hardlinks_supported
from ..platform import is_win32
from ..platform import is_win32, is_darwin
from . import ArchiverTestCaseBase, RemoteArchiverTestCaseBase, ArchiverTestCaseBinaryBase, RK_ENCRYPTION, BORG_EXES
@ -247,12 +247,16 @@ class ArchiverTestCase(ArchiverTestCaseBase):
if is_win32:
# Sleeping for 15s because Windows doesn't refresh ctime if file is deleted and recreated within 15 seconds.
time.sleep(15)
elif is_darwin:
time.sleep(1) # HFS has a 1s timestamp granularity
self.create_regular_file("test_file", size=15)
self.cmd(f"--repo={self.repository_location}", "create", "archive2", "input")
output = self.cmd(f"--repo={self.repository_location}", "diff", "archive1", "archive2")
self.assert_in("mtime", output)
self.assert_in("ctime", output) # Should show up on windows as well since it is a new file.
os.chmod("input/test_file", 777)
if is_darwin:
time.sleep(1) # HFS has a 1s timestamp granularity
os.chmod("input/test_file", 0o777)
self.cmd(f"--repo={self.repository_location}", "create", "archive3", "input")
output = self.cmd(f"--repo={self.repository_location}", "diff", "archive2", "archive3")
self.assert_not_in("mtime", output)

View File

@ -22,6 +22,5 @@ class TestBuildFilter:
matcher = PatternMatcher(fallback=True)
filter = build_filter(matcher, strip_components=1)
assert not filter(Item(path="shallow"))
assert not filter(Item(path="shallow/")) # can this even happen? paths are normalized...
assert filter(Item(path="deep enough/file"))
assert filter(Item(path="something/dir/file"))

View File

@ -40,6 +40,19 @@ class ArchiverTestCase(ArchiverTestCaseBase):
self.assert_in("test-1 comment 1" + os.linesep, output_3)
self.assert_in("test-2 comment 2" + os.linesep, output_3)
def test_size_nfiles(self):
self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
self.create_regular_file("file1", size=123000)
self.create_regular_file("file2", size=456)
self.cmd(f"--repo={self.repository_location}", "create", "test", "input/file1", "input/file2")
output = self.cmd(f"--repo={self.repository_location}", "list", "test")
print(output)
output = self.cmd(f"--repo={self.repository_location}", "rlist", "--format", "{name} {nfiles} {size}")
o_t = output.split()
assert o_t[0] == "test"
assert int(o_t[1]) == 2
assert 123456 <= int(o_t[2]) < 123999 # there is some metadata overhead
def test_date_matching(self):
self.cmd(f"--repo={self.repository_location}", "rcreate", RK_ENCRYPTION)
earliest_ts = "2022-11-20T23:59:59"

View File

@ -143,6 +143,57 @@ class ArchiverTestCase(ArchiverTestCaseBase):
self.cmd(f"--repo={self.repository_location}", "extract", "dst")
self.assert_dirs_equal("input", "output/input", ignore_ns=True, ignore_xattrs=True)
@requires_gnutar
def test_import_concatenated_tar_with_ignore_zeros(self):
self.create_test_files(create_hardlinks=False) # hardlinks become separate files
os.unlink("input/flagfile")
with changedir("input"):
subprocess.check_call(["tar", "cf", "file1.tar", "file1"])
subprocess.check_call(["tar", "cf", "the_rest.tar", "--exclude", "file1*", "."])
with open("concatenated.tar", "wb") as concatenated:
with open("file1.tar", "rb") as file1:
concatenated.write(file1.read())
# Clean up for assert_dirs_equal.
os.unlink("file1.tar")
with open("the_rest.tar", "rb") as the_rest:
concatenated.write(the_rest.read())
# Clean up for assert_dirs_equal.
os.unlink("the_rest.tar")
self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
self.cmd(f"--repo={self.repository_location}", "import-tar", "--ignore-zeros", "dst", "input/concatenated.tar")
# Clean up for assert_dirs_equal.
os.unlink("input/concatenated.tar")
with changedir(self.output_path):
self.cmd(f"--repo={self.repository_location}", "extract", "dst")
self.assert_dirs_equal("input", "output", ignore_ns=True, ignore_xattrs=True)
@requires_gnutar
def test_import_concatenated_tar_without_ignore_zeros(self):
self.create_test_files(create_hardlinks=False) # hardlinks become separate files
os.unlink("input/flagfile")
with changedir("input"):
subprocess.check_call(["tar", "cf", "file1.tar", "file1"])
subprocess.check_call(["tar", "cf", "the_rest.tar", "--exclude", "file1*", "."])
with open("concatenated.tar", "wb") as concatenated:
with open("file1.tar", "rb") as file1:
concatenated.write(file1.read())
with open("the_rest.tar", "rb") as the_rest:
concatenated.write(the_rest.read())
os.unlink("the_rest.tar")
self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")
self.cmd(f"--repo={self.repository_location}", "import-tar", "dst", "input/concatenated.tar")
with changedir(self.output_path):
self.cmd(f"--repo={self.repository_location}", "extract", "dst")
# Negative test -- assert that only file1 has been extracted, and the_rest has been ignored
# due to zero-filled block marker.
self.assert_equal(os.listdir("output"), ["file1"])
def test_roundtrip_pax_borg(self):
self.create_test_files()
self.cmd(f"--repo={self.repository_location}", "rcreate", "--encryption=none")

View File

@ -45,7 +45,7 @@ from ..helpers import eval_escapes
from ..helpers import safe_unlink
from ..helpers import text_to_json, binary_to_json
from ..helpers.passphrase import Passphrase, PasswordRetriesExceeded
from ..platform import is_cygwin, is_win32, is_darwin
from ..platform import is_cygwin, is_win32, is_darwin, swidth
from . import BaseTestCase, FakeInputs, are_hardlinks_supported
@ -635,19 +635,20 @@ def test_get_config_dir(monkeypatch):
def test_get_config_dir_compat(monkeypatch):
"""test that it works the same for legacy and for non-legacy implementation"""
monkeypatch.delenv("BORG_CONFIG_DIR", raising=False)
monkeypatch.delenv("BORG_BASE_DIR", raising=False)
monkeypatch.delenv("XDG_CONFIG_HOME", raising=False)
if not is_darwin and not is_win32:
monkeypatch.delenv("BORG_CONFIG_DIR", raising=False)
monkeypatch.delenv("XDG_CONFIG_HOME", raising=False)
# fails on macOS: assert '/Users/tw/Library/Application Support/borg' == '/Users/tw/.config/borg'
# fails on win32 MSYS2 (but we do not need legacy compat there).
assert get_config_dir(legacy=False) == get_config_dir(legacy=True)
if not is_darwin and not is_win32:
monkeypatch.setenv("XDG_CONFIG_HOME", "/var/tmp/.config1")
# fails on macOS: assert '/Users/tw/Library/Application Support/borg' == '/var/tmp/.config1/borg'
monkeypatch.setenv("XDG_CONFIG_HOME", "/var/tmp/xdg.config.d")
# fails on macOS: assert '/Users/tw/Library/Application Support/borg' == '/var/tmp/xdg.config.d'
# fails on win32 MSYS2 (but we do not need legacy compat there).
assert get_config_dir(legacy=False) == get_config_dir(legacy=True)
monkeypatch.setenv("BORG_CONFIG_DIR", "/var/tmp/.config2")
monkeypatch.setenv("BORG_BASE_DIR", "/var/tmp/base")
assert get_config_dir(legacy=False) == get_config_dir(legacy=True)
monkeypatch.setenv("BORG_CONFIG_DIR", "/var/tmp/borg.config.d")
assert get_config_dir(legacy=False) == get_config_dir(legacy=True)
@ -675,6 +676,25 @@ def test_get_cache_dir(monkeypatch):
assert get_cache_dir() == "/var/tmp"
def test_get_cache_dir_compat(monkeypatch):
"""test that it works the same for legacy and for non-legacy implementation"""
monkeypatch.delenv("BORG_CACHE_DIR", raising=False)
monkeypatch.delenv("BORG_BASE_DIR", raising=False)
monkeypatch.delenv("XDG_CACHE_HOME", raising=False)
if not is_darwin and not is_win32:
# fails on macOS: assert '/Users/tw/Library/Caches/borg' == '/Users/tw/.cache/borg'
# fails on win32 MSYS2 (but we do not need legacy compat there).
assert get_cache_dir(legacy=False) == get_cache_dir(legacy=True)
# fails on macOS: assert '/Users/tw/Library/Caches/borg' == '/var/tmp/xdg.cache.d'
# fails on win32 MSYS2 (but we do not need legacy compat there).
monkeypatch.setenv("XDG_CACHE_HOME", "/var/tmp/xdg.cache.d")
assert get_cache_dir(legacy=False) == get_cache_dir(legacy=True)
monkeypatch.setenv("BORG_BASE_DIR", "/var/tmp/base")
assert get_cache_dir(legacy=False) == get_cache_dir(legacy=True)
monkeypatch.setenv("BORG_CACHE_DIR", "/var/tmp/borg.cache.d")
assert get_cache_dir(legacy=False) == get_cache_dir(legacy=True)
def test_get_keys_dir(monkeypatch):
"""test that get_keys_dir respects environment"""
monkeypatch.delenv("BORG_BASE_DIR", raising=False)
@ -997,6 +1017,29 @@ def test_progress_percentage_sameline(capfd, monkeypatch):
assert err == " " * 4 + "\r"
@pytest.mark.skipif(is_win32, reason="no working swidth() implementation on this platform")
def test_progress_percentage_widechars(capfd, monkeypatch):
st = "スター・トレック" # "startrek" :-)
assert swidth(st) == 16
path = "/カーク船長です。" # "Captain Kirk"
assert swidth(path) == 17
spaces = " " * 4 # to avoid usage of "..."
width = len("100%") + 1 + swidth(st) + 1 + swidth(path) + swidth(spaces)
monkeypatch.setenv("COLUMNS", str(width))
monkeypatch.setenv("LINES", "1")
pi = ProgressIndicatorPercent(100, step=5, start=0, msg=f"%3.0f%% {st} %s")
pi.logger.setLevel("INFO")
pi.show(0, info=[path])
out, err = capfd.readouterr()
assert err == f" 0% {st} {path}{spaces}\r"
pi.show(100, info=[path])
out, err = capfd.readouterr()
assert err == f"100% {st} {path}{spaces}\r"
pi.finish()
out, err = capfd.readouterr()
assert err == " " * width + "\r"
def test_progress_percentage_step(capfd, monkeypatch):
# run the test as if it was in a 4x1 terminal
monkeypatch.setenv("COLUMNS", "4")

View File

@ -68,7 +68,9 @@ def test_borg1_borg2_transition(key):
repo_objs1 = RepoObj1(key)
id = repo_objs1.id_hash(data)
borg1_cdata = repo_objs1.format(id, meta, data)
meta1, compr_data1 = repo_objs1.parse(id, borg1_cdata, decompress=False) # borg transfer avoids (de)compression
meta1, compr_data1 = repo_objs1.parse(
id, borg1_cdata, decompress=True, want_compressed=True
) # avoid re-compression
# in borg 1, we can only get this metadata after decrypting the whole chunk (and we do not have "size" here):
assert meta1["ctype"] == LZ4.ID # default compression
assert meta1["clevel"] == 0xFF # lz4 does not know levels (yet?)