Merge branch 'master' of github.com:borgbackup/borg into docfix-6231

This commit is contained in:
Jeff Turner 2023-07-26 14:32:47 +10:00
commit e00c12519f
33 changed files with 138 additions and 205 deletions

View File

@ -76,112 +76,33 @@ markers =
[flake8]
# for reference ...
# E121 continuation line under-indented for hanging indent
# E122 continuation line missing indentation or outdented
# E123 closing bracket does not match indentation of opening bracket's line
# E125 continuation line with same indent as next logical line
# E126 continuation line over-indented for hanging indent
# E127 continuation line over-indented for visual indent
# E128 continuation line under-indented for visual indent
# E221 multiple spaces before operator
# E226 missing whitespace around arithmetic operator
# E261 at least two spaces before inline comment
# E265 block comment should start with '# '
# E301 expected 1 blank line
# E305 expected 2 blank lines after class or function definition
# E401 multiple imports on one line
# E402 module level import not at top
# E501 line too long
# E722 do not use bare except
# E731 do not assign a lambda expression, use def
# E741 ambiguous variable name
# F401 import unused
# F403 from ... import * used, unable to detect undefined names
# F405 undefined or defined from star imports
# F811 redef of unused var
# F821 undefined name
# W391 blank line at end of file
# #### Pick either W503, or W504 - latest recommendation from pep8 is to ignore W503
# W503 line break before binary operator
# W504 line break after binary operator
# borg code style guidelines:
# Ignoring E203 due to https://github.com/PyCQA/pycodestyle/issues/373
ignore = E226, W503, E203
ignore = W503, E203, F405, E402
# Code style violation exceptions:
# please note that the values are adjusted so that they do not cause failures
# with existing code. if you want to change them, you should first fix all
# flake8 failures that appear with your change.
per_file_ignores =
docs/conf.py:E121,E126,E265,E305,E401,E402
src/borg/archive.py:E122,E125,E127,E402,E501,F401,F405,W504
src/borg/archiver/__init__.py:E402,E501,E722,E741,F405
src/borg/archiver/_common.py:E501,F405
src/borg/archiver/benchmark_cmd.py:F405
src/borg/archiver/config_cmd.py:F405,E722
src/borg/archiver/create_cmd.py:E501,F405
src/borg/archiver/debug_cmd.py:F405
src/borg/archiver/delete_cmd.py:F405
src/borg/archiver/diff_cmd.py:F405
src/borg/archiver/help_cmd.py:E501,F405
src/borg/archiver/key_cmds.py:F405
src/borg/archiver/prune_cmd.py:F405
src/borg/archiver/rcompress_cmd.py:F405
src/borg/archiver/recreate_cmd.py:F405
src/borg/archiver/rdelete_cmd.py:F405
src/borg/archiver/rlist_cmd.py:E501
src/borg/archiver/tar_cmds.py:F405
src/borg/cache.py:E127,E128,E402,E501,E722,W504
src/borg/fuse.py:E402,E501,E722,W504
src/borg/fuse_impl.py:F811
src/borg/locking.py:E128,E501,E722
src/borg/manifest.py:E128,E402,E501,F405
src/borg/remote.py:E128,E501,F405
src/borg/repository.py:E126,E128,E501,F401,F405,W504
src/borg/upgrader.py:E501
src/borg/xattr.py:E402
src/borg/crypto/key.py:E125,E128,E402,E501,F401,F405,W504
src/borg/crypto/keymanager.py:E126,E128,E501,F401
src/borg/crypto/nonces.py:E128,E501
src/borg/helpers/__init__.py:F401,F405
src/borg/helpers/checks.py:F401
src/borg/helpers/errors.py:F405
src/borg/helpers/fs.py:F405
src/borg/helpers/misc.py:E402,E722,F401,F405
src/borg/helpers/msgpack.py:E127,F405
src/borg/helpers/parseformat.py:E402,E501,E741,F401,F405
src/borg/helpers/process.py:E402,F401,W504
src/borg/helpers/progress.py:E402
src/borg/helpers/shellpattern.py:E501
src/borg/platform/__init__.py:F401,F811
src/borg/platform/base.py:E402
src/borg/testsuite/__init__.py:E501,F401
src/borg/testsuite/archive.py:E128,W504
src/borg/testsuite/archiver/__init__.py:E128,E501,E722,F401,F405,F811
src/borg/testsuite/archiver/debug_cmds.py:E501,F405
src/borg/testsuite/archiver/disk_full.py:F401,F405,F811
src/borg/testsuite/archiver/extract_cmd.py:F405
src/borg/testsuite/archiver/mount_cmds.py:E501,E722
src/borg/testsuite/archiver/prune_cmd.py:F405
src/borg/testsuite/archiver/rcompress_cmd.py:F405
src/borg/testsuite/archiver/recreate_cmd.py:F405
src/borg/testsuite/archiver/return_codes.py:F401,F405,F811
src/borg/testsuite/benchmark.py:F401,F811
src/borg/testsuite/chunker.py:E501,F405
src/borg/testsuite/chunker_pytest.py:F401,F405
src/borg/testsuite/chunker_slow.py:F405
src/borg/testsuite/crypto.py:E126,E501,E741
src/borg/testsuite/file_integrity.py:F401
src/borg/testsuite/hashindex.py:F401
src/borg/testsuite/helpers.py:E126,E127,E128,E501,F401
src/borg/testsuite/key.py:E501,F401
src/borg/testsuite/locking.py:E126,E128,E501,E722,F401
src/borg/testsuite/patterns.py:E123
src/borg/testsuite/platform.py:E128,E501,F401,F811
src/borg/testsuite/repository.py:E128,E501,F401
src/borg/testsuite/shellpattern.py:E123
src/borg/testsuite/upgrader.py:F405
src/borg/archive.py:E501
src/borg/archiver/help_cmd.py:E501
src/borg/cache.py:E501
src/borg/helpers/__init__.py:F401
src/borg/platform/__init__.py:F401
src/borg/testsuite/archiver/disk_full.py:F811
src/borg/testsuite/archiver/return_codes.py:F811
src/borg/testsuite/benchmark.py:F811
src/borg/testsuite/platform.py:F811
max_line_length = 120
exclude = build,dist,.git,.idea,.cache,.tox

View File

@ -6,7 +6,7 @@ import sys
import time
from collections import OrderedDict, defaultdict
from contextlib import contextmanager
from datetime import datetime, timedelta
from datetime import timedelta
from functools import partial
from getpass import getuser
from io import BytesIO
@ -22,7 +22,7 @@ logger = create_logger()
from . import xattr
from .chunker import get_chunker, Chunk
from .cache import ChunkListEntry
from .crypto.key import key_factory, UnsupportedPayloadError, AEADKeyBase
from .crypto.key import key_factory, UnsupportedPayloadError
from .compress import Compressor, CompressionSpec
from .constants import * # NOQA
from .crypto.low_level import IntegrityError as IntegrityErrorBase
@ -974,8 +974,8 @@ Duration: {0.duration}
if not self.noacls:
acl_set(path, item, self.numeric_ids, fd=fd)
if not self.noxattrs and "xattrs" in item:
# chown removes Linux capabilities, so set the extended attributes at the end, after chown, since they include
# the Linux capabilities in the "security.capability" attribute.
# chown removes Linux capabilities, so set the extended attributes at the end, after chown,
# since they include the Linux capabilities in the "security.capability" attribute.
warning = xattr.set_all(fd or path, item.xattrs, follow_symlinks=False)
if warning:
set_ec(EXIT_WARNING)

View File

@ -570,8 +570,8 @@ def format_tb(exc):
remote = isinstance(exc, RemoteRepository.RPCError)
if remote:
prefix = "Borg server: "
trace_back = "\n".join(prefix + l for l in exc.exception_full.splitlines())
sys_info = "\n".join(prefix + l for l in exc.sysinfo.splitlines())
trace_back = "\n".join(prefix + line for line in exc.exception_full.splitlines())
sys_info = "\n".join(prefix + line for line in exc.sysinfo.splitlines())
else:
trace_back = traceback.format_exc()
sys_info = sysinfo()

View File

@ -85,7 +85,8 @@ def with_repository(
:param manifest: load manifest and repo_objs (key), pass them as keyword arguments
:param cache: open cache, pass it as keyword argument (implies manifest)
:param secure: do assert_secure after loading manifest
:param compatibility: mandatory if not create and (manifest or cache), specifies mandatory feature categories to check
:param compatibility: mandatory if not create and (manifest or cache), specifies mandatory
feature categories to check
"""
# Note: with_repository decorator does not have a "key" argument (yet?)
compatibility = compat_check(

View File

@ -50,7 +50,7 @@ class ConfigMixIn:
if check_value:
try:
bin_id = unhexlify(value)
except:
except: # noqa
raise ValueError("Invalid value, must be 64 hex digits") from None
if len(bin_id) != 32:
raise ValueError("Invalid value, must be 64 hex digits")

View File

@ -66,7 +66,7 @@ class RListMixIn:
# Strings are left-aligned, numbers are right-aligned.
# Note: time columns except ``isomtime``, ``isoctime`` and ``isoatime`` cannot be padded.
$ borg rlist --format '{archive:36} {time} [{id}]{NL}' /path/to/repo
ArchiveFoo Thu, 2021-12-09 10:22:28 [0b8e9a312bef3f2f6e2d0fc110c196827786c15eba0188738e81697a7fa3b274]
ArchiveFoo Thu, 2021-12-09 10:22:28 [0b8e9...3b274]
...
The following keys are always available:

View File

@ -534,7 +534,7 @@ class LocalCache(CacheStatsMixin):
if sync and self.manifest.id != self.cache_config.manifest_id:
self.sync()
self.commit()
except:
except: # noqa
self.close()
raise
@ -895,8 +895,9 @@ class LocalCache(CacheStatsMixin):
self.begin_txn()
with cache_if_remote(self.repository, decrypted_cache=self.repo_objs) as decrypted_repository:
# TEMPORARY HACK: to avoid archive index caching, create a FILE named ~/.cache/borg/REPOID/chunks.archive.d -
# this is only recommended if you have a fast, low latency connection to your repo (e.g. if repo is local disk)
# TEMPORARY HACK:
# to avoid archive index caching, create a FILE named ~/.cache/borg/REPOID/chunks.archive.d -
# this is only recommended if you have a fast, low latency connection to your repo (e.g. if repo is local).
self.do_cache = os.path.isdir(archive_path)
self.chunks = create_master_idx(self.chunks)

View File

@ -158,7 +158,7 @@ class KeyManager:
def import_paperkey(self, args):
try:
# imported here because it has global side effects
import readline
import readline # noqa
except ImportError:
print("Note: No line editing available due to missing readline support")

View File

@ -52,7 +52,7 @@ def fuse_main():
if has_pyfuse3:
try:
trio.run(llfuse.main)
except:
except: # noqa
return 1 # TODO return signal number if it was killed by signal
else:
return None

View File

@ -31,6 +31,6 @@ for FUSE_IMPL in BORG_FUSE_IMPL.split(","):
else:
raise RuntimeError("unknown fuse implementation in BORG_FUSE_IMPL: '%s'" % BORG_FUSE_IMPL)
else:
llfuse = None
llfuse = None # noqa
has_llfuse = False
has_pyfuse3 = False

View File

@ -1,7 +1,7 @@
import os
from .errors import Error
from ..platformflags import is_win32, is_linux, is_freebsd, is_darwin
from ..platformflags import is_win32
class PythonLibcTooOld(Error):

View File

@ -13,7 +13,6 @@ logger = create_logger()
from . import msgpack
from .. import __version__ as borg_version
from .. import chunker
def sysinfo():
@ -35,7 +34,7 @@ def sysinfo():
linux_distribution = None
try:
msgpack_version = ".".join(str(v) for v in msgpack.version)
except:
except: # noqa
msgpack_version = "unknown"
from ..fuse_impl import llfuse, BORG_FUSE_IMPL

View File

@ -9,7 +9,7 @@ import re
import shlex
import stat
import uuid
from typing import List, Dict, Set, Tuple, ClassVar, Any, TYPE_CHECKING, Literal
from typing import Dict, Set, Tuple, ClassVar, Any, TYPE_CHECKING, Literal
from binascii import hexlify
from collections import Counter, OrderedDict
from datetime import datetime, timezone
@ -27,7 +27,6 @@ from .time import OutputTimestamp, format_time, safe_timestamp
from .. import __version__ as borg_version
from .. import __version_tuple__ as borg_version_tuple
from ..constants import * # NOQA
from ..platformflags import is_win32
if TYPE_CHECKING:
from ..item import ItemDiff
@ -374,8 +373,8 @@ def format_archive(archive):
def parse_stringified_list(s):
l = re.split(" *, *", s)
return [item for item in l if item != ""]
items = re.split(" *, *", s)
return [item for item in items if item != ""]
class Location:
@ -457,7 +456,7 @@ class Location:
(?:file://)? # optional file protocol
(?P<path>
(?:[a-zA-Z]:)? # Drive letter followed by a colon (optional)
(?:[^:]+) # Anything which does not contain a :, at least one character
(?:[^:]+) # Anything which does not contain a :, at least one char
)
""",
re.VERBOSE,

View File

@ -1,7 +1,6 @@
import contextlib
import os
import os.path
import re
import shlex
import signal
import subprocess
@ -11,7 +10,7 @@ import traceback
from .. import __version__
from ..platformflags import is_win32, is_linux, is_freebsd, is_darwin
from ..platformflags import is_win32
from ..logger import create_logger
logger = create_logger()

View File

@ -6,10 +6,9 @@ from queue import LifoQueue
def translate(pat, match_end=r"\Z"):
"""Translate a shell-style pattern to a regular expression.
The pattern may include ``**<sep>`` (<sep> stands for the platform-specific path separator; "/" on POSIX systems) for
matching zero or more directory levels and "*" for matching zero or more arbitrary characters with the exception of
any path separator. Wrap meta-characters in brackets for a literal match (i.e. "[?]" to match the literal character
"?").
The pattern may include ``**<sep>`` (<sep> stands for the platform-specific path separator; "/" on POSIX systems)
for matching zero or more directory levels and "*" for matching zero or more arbitrary characters except any path
separator. Wrap meta-characters in brackets for a literal match (i.e. "[?]" to match the literal character "?").
Using match_end=regex one can give a regular expression that is used to match after the regex that is generated from
the pattern. The default is to match the end of the string.

View File

@ -157,11 +157,11 @@ class ExclusiveLock:
# should be cleaned up anyway. Try to clean up, but don't crash.
try:
os.unlink(temp_unique_name)
except:
except: # noqa
pass
try:
os.rmdir(temp_path)
except:
except: # noqa
pass
def release(self):
@ -417,7 +417,7 @@ class Lock:
# restore the roster state as before (undo the roster change):
if remove is not None:
self._roster.modify(remove, ADD)
except:
except: # noqa
# avoid orphan lock when an exception happens here, e.g. Ctrl-C!
self._lock.release()
raise

View File

@ -164,7 +164,8 @@ class Archives(abc.MutableMapping):
consider_checkpoints = getattr(args, "consider_checkpoints", None)
if name is not None:
raise Error(
"Giving a specific name is incompatible with options --first, --last, -a / --match-archives, and --consider-checkpoints."
"Giving a specific name is incompatible with options --first, --last, "
"-a / --match-archives, and --consider-checkpoints."
)
return self.list(
sort_by=args.sort_by.split(","),

View File

@ -108,7 +108,7 @@ class UnexpectedRPCDataFormatFromServer(Error):
# All method calls on the remote repository object must be allowlisted in RepositoryServer.rpc_methods and have api
# stubs in RemoteRepository. The @api decorator on these stubs is used to set server version requirements.
#
# Method parameters are identified only by name and never by position. Unknown parameters are ignored by the server side.
# Method parameters are identified only by name and never by position. Unknown parameters are ignored by the server.
# If a new parameter is important and may not be ignored, on the client a parameter specific version requirement needs
# to be added.
# When parameters are removed, they need to be preserved as defaulted parameters on the client stubs so that older

View File

@ -881,13 +881,15 @@ class Repository:
#
# Now we crash. But only segment 2 gets deleted, while segment 1 is still around. Now key 1
# is suddenly undeleted (because the delete in segment 2 is now missing).
# Again, note the requirement here. We delete these in the correct order that this doesn't happen,
# and only if the FS materialization of these deletes is reordered or parts dropped this can happen.
# In this case it doesn't cause outright corruption, 'just' an index count mismatch, which will be
# fixed by borg-check --repair.
# Again, note the requirement here. We delete these in the correct order that this doesn't
# happen, and only if the FS materialization of these deletes is reordered or parts dropped
# this can happen.
# In this case it doesn't cause outright corruption, 'just' an index count mismatch, which
# will be fixed by borg-check --repair.
#
# Note that in this check the index state is the proxy for a "most definitely settled" repository state,
# i.e. the assumption is that *all* operations on segments <= index state are completed and stable.
# Note that in this check the index state is the proxy for a "most definitely settled"
# repository state, i.e. the assumption is that *all* operations on segments <= index state
# are completed and stable.
try:
new_segment, size = self.io.write_delete(key, raise_full=True)
except LoggedIO.SegmentFull:

View File

@ -19,7 +19,7 @@ try:
except: # noqa
raises = None
from ..fuse_impl import llfuse, has_llfuse, has_pyfuse3
from ..fuse_impl import llfuse, has_llfuse, has_pyfuse3 # NOQA
from .. import platform
from ..platformflags import is_win32

View File

@ -5,7 +5,7 @@ import time
from ...constants import * # NOQA
from .. import are_symlinks_supported, are_hardlinks_supported
from ..platform import is_win32, is_darwin
from ...platformflags import is_win32, is_darwin
from . import cmd, create_regular_file, RK_ENCRYPTION, assert_line_exists, generate_archiver_tests
pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary") # NOQA

View File

@ -1,14 +1,16 @@
"""
test_disk_full is very slow and not recommended to be included in daily testing.
for this test, an empty, writable 16MB filesystem mounted on DF_MOUNT is required.
for this test, an empty, writable 700MB filesystem mounted on DF_MOUNT is required.
for speed and other reasons, it is recommended that the underlying block device is
in RAM, not a magnetic or flash disk.
assuming /tmp is a tmpfs (in memory filesystem), one can use this:
dd if=/dev/zero of=/tmp/borg-disk bs=16M count=1
mkfs.ext4 /tmp/borg-disk
assuming /dev/shm is a tmpfs (in memory filesystem), one can use this:
dd if=/dev/zero of=/dev/shm/borg-disk bs=1M count=700
mkfs.ext4 /dev/shm/borg-disk
mkdir /tmp/borg-mount
sudo mount /tmp/borg-disk /tmp/borg-mount
sudo mount /dev/shm/borg-disk /tmp/borg-mount
sudo chown myuser /tmp/borg-mount/
if the directory does not exist, the test will be skipped.
"""
@ -20,47 +22,46 @@ import shutil
import pytest
from ...constants import * # NOQA
from . import cmd_fixture
from . import cmd_fixture # NOQA
DF_MOUNT = "/tmp/borg-mount"
@pytest.mark.skipif(not os.path.exists(DF_MOUNT), reason="needs a 16MB fs mounted on %s" % DF_MOUNT)
def test_disk_full(cmd_fixture, monkeypatch):
def make_files(dir, count, size, rnd=True):
shutil.rmtree(dir, ignore_errors=True)
os.mkdir(dir)
if rnd:
count = random.randint(1, count)
if size > 1:
size = random.randint(1, size)
for i in range(count):
fn = os.path.join(dir, "file%03d" % i)
with open(fn, "wb") as f:
data = os.urandom(size)
f.write(data)
def make_files(dir, count, size, rnd=True):
shutil.rmtree(dir, ignore_errors=True)
os.mkdir(dir)
if rnd:
count = random.randint(1, count)
if size > 1:
size = random.randint(1, size)
for i in range(count):
fn = os.path.join(dir, "file%03d" % i)
with open(fn, "wb") as f:
data = os.urandom(size)
f.write(data)
@pytest.mark.skipif(not os.path.exists(DF_MOUNT), reason="needs a 700MB fs mounted on %s" % DF_MOUNT)
@pytest.mark.parametrize("test_pass", range(10))
def test_disk_full(test_pass, cmd_fixture, monkeypatch):
monkeypatch.setenv("BORG_CHECK_I_KNOW_WHAT_I_AM_DOING", "YES")
mount = DF_MOUNT
assert os.path.exists(mount)
repo = os.path.join(mount, "repo")
input = os.path.join(mount, "input")
reserve = os.path.join(mount, "reserve")
for j in range(100):
shutil.rmtree(repo, ignore_errors=True)
shutil.rmtree(input, ignore_errors=True)
# keep some space and some inodes in reserve that we can free up later:
make_files(reserve, 80, 100000, rnd=False)
rc, out = cmd_fixture(f"--repo={repo}", "rcreate")
if rc != EXIT_SUCCESS:
print("rcreate", rc, out)
assert rc == EXIT_SUCCESS
monkeypatch.setenv("BORG_DELETE_I_KNOW_WHAT_I_AM_DOING", "YES")
repo = os.path.join(DF_MOUNT, "repo")
input = os.path.join(DF_MOUNT, "input")
shutil.rmtree(repo, ignore_errors=True)
shutil.rmtree(input, ignore_errors=True)
rc, out = cmd_fixture(f"--repo={repo}", "rcreate", "--encryption=none")
if rc != EXIT_SUCCESS:
print("rcreate", rc, out)
assert rc == EXIT_SUCCESS
try:
try:
success, i = True, 0
while success:
i += 1
try:
make_files(input, 20, 200000)
# have some randomness here to produce different out of space conditions:
make_files(input, 40, 1000000, rnd=True)
except OSError as err:
if err.errno == errno.ENOSPC:
# already out of space
@ -74,11 +75,11 @@ def test_disk_full(cmd_fixture, monkeypatch):
finally:
# make sure repo is not locked
shutil.rmtree(os.path.join(repo, "lock.exclusive"), ignore_errors=True)
os.remove(os.path.join(repo, "lock.roster"))
shutil.rmtree(os.path.join(repo, "lock.roster"), ignore_errors=True)
finally:
# now some error happened, likely we are out of disk space.
# free some space such that we can expect borg to be able to work normally:
shutil.rmtree(reserve, ignore_errors=True)
shutil.rmtree(input, ignore_errors=True)
rc, out = cmd_fixture(f"--repo={repo}", "rlist")
if rc != EXIT_SUCCESS:
print("rlist", rc, out)
@ -86,3 +87,6 @@ def test_disk_full(cmd_fixture, monkeypatch):
if rc != EXIT_SUCCESS:
print("check", rc, out)
assert rc == EXIT_SUCCESS
finally:
# try to free the space allocated for the repo
cmd_fixture(f"--repo={repo}", "rdelete")

View File

@ -13,7 +13,7 @@ from ...helpers import EXIT_WARNING
from ...helpers import flags_noatime, flags_normal
from .. import changedir, same_ts_ns
from .. import are_symlinks_supported, are_hardlinks_supported, is_utime_fully_supported, is_birthtime_fully_supported
from ..platform import is_darwin, is_win32
from ...platformflags import is_darwin, is_win32
from . import (
RK_ENCRYPTION,
requires_hardlinks,

View File

@ -283,7 +283,7 @@ def test_migrate_lock_alive(archivers, request):
try:
with open(assert_data_file, "wb") as _out:
pickle.dump(assert_data, _out)
except:
except: # noqa
pass
try:
return migrate_lock(self, old_id, new_id)
@ -297,7 +297,7 @@ def test_migrate_lock_alive(archivers, request):
try:
with open(assert_data_file, "wb") as _out:
pickle.dump(assert_data, _out)
except:
except: # noqa
pass
wrapper.num_calls = 0

View File

@ -1,5 +1,5 @@
from ...constants import * # NOQA
from . import cmd_fixture, changedir
from . import cmd_fixture, changedir # NOQA
def test_return_codes(cmd_fixture, tmpdir):

View File

@ -10,7 +10,7 @@ import os
import pytest
from .archiver import changedir, cmd_fixture
from .archiver import changedir, cmd_fixture # NOQA
from .item import Item
from ..constants import zeros

View File

@ -202,9 +202,9 @@ class CryptoTestCase(BaseTestCase):
ikm = b"\x0b" * 22
salt = bytes.fromhex("000102030405060708090a0b0c")
info = bytes.fromhex("f0f1f2f3f4f5f6f7f8f9")
l = 42
length = 42
okm = hkdf_hmac_sha512(ikm, salt, info, l)
okm = hkdf_hmac_sha512(ikm, salt, info, length)
assert okm == bytes.fromhex(
"832390086cda71fb47625bb5ceb168e4c8e26a1a16ed34d9fc7fe92c1481579338da362cb8d9f925d7cb"
)
@ -222,9 +222,9 @@ class CryptoTestCase(BaseTestCase):
"b0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7"
"d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff"
)
l = 82
length = 82
okm = hkdf_hmac_sha512(ikm, salt, info, l)
okm = hkdf_hmac_sha512(ikm, salt, info, length)
assert okm == bytes.fromhex(
"ce6c97192805b346e6161e821ed165673b84f400a2b514b2fe23d84cd189ddf1b695b48cbd1c838844"
"1137b3ce28f16aa64ba33ba466b24df6cfcb021ecff235f6a2056ce3af1de44d572097a8505d9e7a93"
@ -234,9 +234,9 @@ class CryptoTestCase(BaseTestCase):
ikm = bytes.fromhex("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b")
salt = None
info = b""
l = 42
length = 42
okm = hkdf_hmac_sha512(ikm, salt, info, l)
okm = hkdf_hmac_sha512(ikm, salt, info, length)
assert okm == bytes.fromhex(
"f5fa02b18298a72a8c23898a8703472c6eb179dc204c03425c970e3b164bf90fff22d04836d0e2343bac"
)
@ -245,9 +245,9 @@ class CryptoTestCase(BaseTestCase):
ikm = bytes.fromhex("0b0b0b0b0b0b0b0b0b0b0b")
salt = bytes.fromhex("000102030405060708090a0b0c")
info = bytes.fromhex("f0f1f2f3f4f5f6f7f8f9")
l = 42
length = 42
okm = hkdf_hmac_sha512(ikm, salt, info, l)
okm = hkdf_hmac_sha512(ikm, salt, info, length)
assert okm == bytes.fromhex(
"7413e8997e020610fbf6823f2ce14bff01875db1ca55f68cfcf3954dc8aff53559bd5e3028b080f7c068"
)
@ -256,9 +256,9 @@ class CryptoTestCase(BaseTestCase):
ikm = bytes.fromhex("0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c")
salt = None
info = b""
l = 42
length = 42
okm = hkdf_hmac_sha512(ikm, salt, info, l)
okm = hkdf_hmac_sha512(ikm, salt, info, length)
assert okm == bytes.fromhex(
"1407d46013d98bc6decefcfee55f0f90b0c7f63d68eb1a80eaf07e953cfc0a3a5240a155d6e4daa965bb"
)
@ -315,7 +315,8 @@ def test_repo_key_detect_does_not_raise_integrity_error(getpass, monkeypatch):
This is a regression test for a bug I introduced and fixed:
Traceback (most recent call last):
File "/home/user/borg-master/src/borg/testsuite/crypto.py", line 384, in test_repo_key_detect_does_not_raise_integrity_error
File "/home/user/borg-master/src/borg/testsuite/crypto.py", line 384,
in test_repo_key_detect_does_not_raise_integrity_error
RepoKey.detect(repository, manifest_data=None)
File "/home/user/borg-master/src/borg/crypto/key.py", line 402, in detect
if not key.load(target, passphrase):
@ -335,7 +336,8 @@ def test_repo_key_detect_does_not_raise_integrity_error(getpass, monkeypatch):
1. FlexiKey.decrypt_key_file() is supposed to signal the decryption failure by returning None
2. FlexiKey.detect() relies on that interface - it tries an empty passphrase before prompting the user
3. my initial implementation of decrypt_key_file_argon2() was simply passing through the IntegrityError() from AES256_CTR_BASE.decrypt()
3. my initial implementation of decrypt_key_file_argon2() was simply passing through the IntegrityError()
from AES256_CTR_BASE.decrypt()
"""
repository = MagicMock(id=b"repository_id")
getpass.return_value = "hello, pass phrase"

View File

@ -1,6 +1,6 @@
import pytest
from ..crypto.file_integrity import IntegrityCheckedFile, DetachedIntegrityCheckedFile, FileIntegrityError
from ..crypto.file_integrity import DetachedIntegrityCheckedFile, FileIntegrityError
class TestReadIntegrityFile:

View File

@ -8,7 +8,7 @@ import os
import tempfile
import zlib
from ..hashindex import NSIndex, ChunkIndex, ChunkIndexEntry
from ..hashindex import NSIndex, ChunkIndex
from ..crypto.file_integrity import IntegrityCheckedFile, FileIntegrityError
from . import BaseTestCase, unopened_tempfile

View File

@ -45,7 +45,7 @@ from ..helpers import eval_escapes
from ..helpers import safe_unlink
from ..helpers import text_to_json, binary_to_json
from ..helpers.passphrase import Passphrase, PasswordRetriesExceeded
from ..platform import is_cygwin, is_win32, is_darwin, swidth
from ..platform import is_cygwin, is_win32, is_darwin
from . import BaseTestCase, FakeInputs, are_hardlinks_supported
from . import rejected_dotdot_paths
@ -178,11 +178,13 @@ class TestLocationWithoutEnv:
)
assert (
repr(Location("ssh://user@[2a02:0001:0002:0003:0004:0005:0006:0007]/some/path"))
== "Location(proto='ssh', user='user', host='2a02:0001:0002:0003:0004:0005:0006:0007', port=None, path='/some/path')"
== "Location(proto='ssh', user='user', "
"host='2a02:0001:0002:0003:0004:0005:0006:0007', port=None, path='/some/path')"
)
assert (
repr(Location("ssh://user@[2a02:0001:0002:0003:0004:0005:0006:0007]:1234/some/path"))
== "Location(proto='ssh', user='user', host='2a02:0001:0002:0003:0004:0005:0006:0007', port=1234, path='/some/path')"
== "Location(proto='ssh', user='user', "
"host='2a02:0001:0002:0003:0004:0005:0006:0007', port=1234, path='/some/path')"
)
def test_socket(self, monkeypatch, keys_dir):

View File

@ -1,4 +1,3 @@
import re
import tempfile
from binascii import hexlify, unhexlify, a2b_base64
from unittest.mock import MagicMock

View File

@ -148,28 +148,28 @@ class TestExclusiveLock:
while not timer.timed_out():
cycle += 1
try:
with ExclusiveLock(
lockpath, id=id, timeout=timeout / 20, sleep=-1
): # This timeout is only for not exceeding the given timeout by more than 5%. With sleep<0 it's constantly polling anyway.
# This timeout is only for not exceeding the given timeout by more than 5%.
# With sleep<0 it's constantly polling anyway.
with ExclusiveLock(lockpath, id=id, timeout=timeout / 20, sleep=-1):
lock_owner_count = lock_owner_counter.incr()
print_locked(
"Thread %2d: Acquired the lock. It's my %d. loop cycle. I am the %d. who has the lock concurrently."
% (thread_id, cycle, lock_owner_count)
"Thread %2d: Acquired the lock. It's my %d. loop cycle. "
"I am the %d. who has the lock concurrently." % (thread_id, cycle, lock_owner_count)
)
time.sleep(0.005)
lock_owner_count = lock_owner_counter.decr()
print_locked(
"Thread %2d: Releasing the lock, finishing my %d. loop cycle. Currently, %d colleagues still have the lock."
% (thread_id, cycle, lock_owner_count)
"Thread %2d: Releasing the lock, finishing my %d. loop cycle. "
"Currently, %d colleagues still have the lock." % (thread_id, cycle, lock_owner_count)
)
except LockTimeout:
print_locked("Thread %2d: Got LockTimeout, finishing my %d. loop cycle." % (thread_id, cycle))
except:
except: # noqa
exception_count = exception_counter.incr()
e = format_exc()
print_locked(
"Thread %2d: Exception thrown, finishing my %d. loop cycle. It's the %d. exception seen until now: %s"
% (thread_id, cycle, exception_count, e)
"Thread %2d: Exception thrown, finishing my %d. loop cycle. "
"It's the %d. exception seen until now: %s" % (thread_id, cycle, exception_count, e)
)
print_locked("Thread %2d: Loop timed out--terminating after %d loop cycles." % (thread_id, cycle))

View File

@ -5,11 +5,11 @@ import sys
import tempfile
import unittest
from ..platformflags import is_win32, is_linux, is_freebsd, is_darwin
from ..platformflags import is_win32
from ..platform import acl_get, acl_set, swidth
from ..platform import get_process_id, process_alive
from . import BaseTestCase, unopened_tempfile
from .locking import free_pid
from .locking import free_pid # NOQA
ACCESS_ACL = """
@ -186,7 +186,9 @@ class PlatformDarwinTestCase(BaseTestCase):
self.assert_equal(self.get_acl(file.name), {})
self.set_acl(
file.name,
b"!#acl 1\ngroup:ABCDEFAB-CDEF-ABCD-EFAB-CDEF00000000:staff:0:allow:read\nuser:FFFFEEEE-DDDD-CCCC-BBBB-AAAA00000000:root:0:allow:read\n",
b"!#acl 1\n"
b"group:ABCDEFAB-CDEF-ABCD-EFAB-CDEF00000000:staff:0:allow:read\n"
b"user:FFFFEEEE-DDDD-CCCC-BBBB-AAAA00000000:root:0:allow:read\n",
numeric_ids=False,
)
self.assert_in(
@ -197,7 +199,9 @@ class PlatformDarwinTestCase(BaseTestCase):
)
self.set_acl(
file2.name,
b"!#acl 1\ngroup:ABCDEFAB-CDEF-ABCD-EFAB-CDEF00000000:staff:0:allow:read\nuser:FFFFEEEE-DDDD-CCCC-BBBB-AAAA00000000:root:0:allow:read\n",
b"!#acl 1\n"
b"group:ABCDEFAB-CDEF-ABCD-EFAB-CDEF00000000:staff:0:allow:read\n"
b"user:FFFFEEEE-DDDD-CCCC-BBBB-AAAA00000000:root:0:allow:read\n",
numeric_ids=True,
)
self.assert_in(