Merge pull request #8098 from ThomasWaldmann/new-rc-master

optional more specific return codes (master)
This commit is contained in:
TW 2024-02-18 13:18:36 +01:00 committed by GitHub
commit ad3f1365f5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
58 changed files with 973 additions and 447 deletions

View File

@ -565,92 +565,169 @@ Message IDs are strings that essentially give a log message or operation a name,
full text, since texts change more frequently. Message IDs are unambiguous and reduce the need to parse
log messages.
Assigned message IDs are:
Assigned message IDs and related error RCs (exit codes) are:
.. See scripts/errorlist.py; this is slightly edited.
Errors
Archive.AlreadyExists
Archive {} already exists
Archive.DoesNotExist
Archive {} does not exist
Archive.IncompatibleFilesystemEncodingError
Failed to encode filename "{}" into file system encoding "{}". Consider configuring the LANG environment variable.
Cache.CacheInitAbortedError
Cache initialization aborted
Cache.EncryptionMethodMismatch
Repository encryption method changed since last access, refusing to continue
Cache.RepositoryAccessAborted
Repository access aborted
Cache.RepositoryIDNotUnique
Cache is newer than repository - do you have multiple, independently updated repos with same ID?
Cache.RepositoryReplay
Cache is newer than repository - this is either an attack or unsafe (multiple repos with same ID)
Buffer.MemoryLimitExceeded
Error rc: 2 traceback: no
Error: {}
ErrorWithTraceback rc: 2 traceback: yes
Error: {}
Buffer.MemoryLimitExceeded rc: 2 traceback: no
Requested buffer size {} is above the limit of {}.
ExtensionModuleError
The Borg binary extension modules do not seem to be installed properly
IntegrityError
Data integrity error: {}
NoManifestError
Repository has no manifest.
PlaceholderError
EfficientCollectionQueue.SizeUnderflow rc: 2 traceback: no
Could not pop_front first {} elements, collection only has {} elements..
RTError rc: 2 traceback: no
Runtime Error: {}
CancelledByUser rc: 3 traceback: no
Cancelled by user.
CommandError rc: 4 traceback: no
Command Error: {}
PlaceholderError rc: 5 traceback: no
Formatting Error: "{}".format({}): {}({})
KeyfileInvalidError
Invalid key file for repository {} found in {}.
KeyfileMismatchError
Mismatch between repository {} and key file {}.
KeyfileNotFoundError
No key file for repository {} found in {}.
PassphraseWrong
passphrase supplied in BORG_PASSPHRASE is incorrect
PasswordRetriesExceeded
exceeded the maximum password retries
RepoKeyNotFoundError
No key entry found in the config of repository {}.
UnsupportedManifestError
InvalidPlaceholder rc: 6 traceback: no
Invalid placeholder "{}" in string: {}
Repository.AlreadyExists rc: 10 traceback: no
A repository already exists at {}.
Repository.CheckNeeded rc: 12 traceback: yes
Inconsistency detected. Please run "borg check {}".
Repository.DoesNotExist rc: 13 traceback: no
Repository {} does not exist.
Repository.InsufficientFreeSpaceError rc: 14 traceback: no
Insufficient free space to complete transaction (required: {}, available: {}).
Repository.InvalidRepository rc: 15 traceback: no
{} is not a valid repository. Check repo config.
Repository.InvalidRepositoryConfig rc: 16 traceback: no
{} does not have a valid configuration. Check repo config [{}].
Repository.ObjectNotFound rc: 17 traceback: yes
Object with key {} not found in repository {}.
Repository.ParentPathDoesNotExist rc: 18 traceback: no
The parent path of the repo directory [{}] does not exist.
Repository.PathAlreadyExists rc: 19 traceback: no
There is already something at {}.
Repository.StorageQuotaExceeded rc: 20 traceback: no
The storage quota ({}) has been exceeded ({}). Try deleting some archives.
MandatoryFeatureUnsupported rc: 25 traceback: no
Unsupported repository feature(s) {}. A newer version of borg is required to access this repository.
NoManifestError rc: 26 traceback: no
Repository has no manifest.
UnsupportedManifestError rc: 27 traceback: no
Unsupported manifest envelope. A newer version is required to access this repository.
UnsupportedPayloadError
Unsupported payload type {}. A newer version is required to access this repository.
NotABorgKeyFile
Archive.AlreadyExists rc: 30 traceback: no
Archive {} already exists
Archive.DoesNotExist rc: 31 traceback: no
Archive {} does not exist
Archive.IncompatibleFilesystemEncodingError rc: 32 traceback: no
Failed to encode filename "{}" into file system encoding "{}". Consider configuring the LANG environment variable.
KeyfileInvalidError rc: 40 traceback: no
Invalid key file for repository {} found in {}.
KeyfileMismatchError rc: 41 traceback: no
Mismatch between repository {} and key file {}.
KeyfileNotFoundError rc: 42 traceback: no
No key file for repository {} found in {}.
NotABorgKeyFile rc: 43 traceback: no
This file is not a borg key backup, aborting.
RepoIdMismatch
RepoKeyNotFoundError rc: 44 traceback: no
No key entry found in the config of repository {}.
RepoIdMismatch rc: 45 traceback: no
This key backup seems to be for a different backup repository, aborting.
UnencryptedRepo
Keymanagement not available for unencrypted repositories.
UnknownKeyType
Keytype {0} is unknown.
LockError
UnencryptedRepo rc: 46 traceback: no
Key management not available for unencrypted repositories.
UnknownKeyType rc: 47 traceback: no
Key type {0} is unknown.
UnsupportedPayloadError rc: 48 traceback: no
Unsupported payload type {}. A newer version is required to access this repository.
UnsupportedKeyFormatError rc: 49 traceback:no
Your borg key is stored in an unsupported format. Try using a newer version of borg.
NoPassphraseFailure rc: 50 traceback: no
can not acquire a passphrase: {}
PasscommandFailure rc: 51 traceback: no
passcommand supplied in BORG_PASSCOMMAND failed: {}
PassphraseWrong rc: 52 traceback: no
passphrase supplied in BORG_PASSPHRASE, by BORG_PASSCOMMAND or via BORG_PASSPHRASE_FD is incorrect.
PasswordRetriesExceeded rc: 53 traceback: no
exceeded the maximum password retries
Cache.CacheInitAbortedError rc: 60 traceback: no
Cache initialization aborted
Cache.EncryptionMethodMismatch rc: 61 traceback: no
Repository encryption method changed since last access, refusing to continue
Cache.RepositoryAccessAborted rc: 62 traceback: no
Repository access aborted
Cache.RepositoryIDNotUnique rc: 63 traceback: no
Cache is newer than repository - do you have multiple, independently updated repos with same ID?
Cache.RepositoryReplay rc: 64 traceback: no
Cache, or information obtained from the security directory is newer than repository - this is either an attack or unsafe (multiple repos with same ID)
LockError rc: 70 traceback: no
Failed to acquire the lock {}.
LockErrorT
LockErrorT rc: 71 traceback: yes
Failed to acquire the lock {}.
ConnectionClosed
LockFailed rc: 72 traceback: yes
Failed to create/acquire the lock {} ({}).
LockTimeout rc: 73 traceback: no
Failed to create/acquire the lock {} (timeout).
NotLocked rc: 74 traceback: yes
Failed to release the lock {} (was not locked).
NotMyLock rc: 75 traceback: yes
Failed to release the lock {} (was/is locked, but not by me).
ConnectionClosed rc: 80 traceback: no
Connection closed by remote host
InvalidRPCMethod
ConnectionClosedWithHint rc: 81 traceback: no
Connection closed by remote host. {}
InvalidRPCMethod rc: 82 traceback: no
RPC method {} is not valid
PathNotAllowed
Repository path not allowed
RemoteRepository.RPCServerOutdated
PathNotAllowed rc: 83 traceback: no
Repository path not allowed: {}
RemoteRepository.RPCServerOutdated rc: 84 traceback: no
Borg server is too old for {}. Required version {}
UnexpectedRPCDataFormatFromClient
UnexpectedRPCDataFormatFromClient rc: 85 traceback: no
Borg {}: Got unexpected RPC data format from client.
UnexpectedRPCDataFormatFromServer
UnexpectedRPCDataFormatFromServer rc: 86 traceback: no
Got unexpected RPC data format from server:
{}
Repository.AlreadyExists
Repository {} already exists.
Repository.CheckNeeded
Inconsistency detected. Please run "borg check {}".
Repository.DoesNotExist
Repository {} does not exist.
Repository.InsufficientFreeSpaceError
Insufficient free space to complete transaction (required: {}, available: {}).
Repository.InvalidRepository
{} is not a valid repository. Check repo config.
Repository.AtticRepository
Attic repository detected. Please run "borg upgrade {}".
Repository.ObjectNotFound
Object with key {} not found in repository {}.
IntegrityError rc: 90 traceback: yes
Data integrity error: {}
FileIntegrityError rc: 91 traceback: yes
File failed integrity check: {}
DecompressionError rc: 92 traceback: yes
Decompression error: {}
Warnings
BorgWarning rc: 1
Warning: {}
BackupWarning rc: 1
{}: {}
FileChangedWarning rc: 100
{}: file changed while we backed it up
IncludePatternNeverMatchedWarning rc: 101
Include pattern '{}' never matched.
BackupError rc: 102
{}: backup error
BackupRaceConditionError rc: 103
{}: file type or inode changed while we backed it up (race condition, skipped file)
BackupOSError rc: 104
{}: {}
BackupPermissionError rc: 105
{}: {}
BackupIOError rc: 106
{}: {}
BackupFileNotFoundError rc: 107
{}: {}
Operations
- cache.begin_transaction

View File

@ -36,6 +36,9 @@ General:
Main usecase for this is to automate fully ``borg change-passphrase``.
BORG_DISPLAY_PASSPHRASE
When set, use the value to answer the "display the passphrase for verification" question when defining a new passphrase for encrypted repositories.
BORG_EXIT_CODES
When set to "modern", the borg process will return more specific exit codes (rc).
Default is "legacy" and returns rc 2 for all errors, 1 for all warnings, 0 for success.
BORG_HOST_ID
Borg usually computes a host id from the FQDN plus the results of ``uuid.getnode()`` (which usually returns
a unique id based on the MAC address of the network interface. Except if that MAC happens to be all-zero - in

View File

@ -7,10 +7,12 @@ Borg can exit with the following return codes (rc):
Return code Meaning
=========== =======
0 success (logged as INFO)
1 warning (operation reached its normal end, but there were warnings --
1 generic warning (operation reached its normal end, but there were warnings --
you should check the log, logged as WARNING)
2 error (like a fatal error, a local or remote exception, the operation
2 generic error (like a fatal error, a local or remote exception, the operation
did not reach its normal end, logged as ERROR)
3..99 specific error (enabled by BORG_EXIT_CODES=modern)
100..127 specific warning (enabled by BORG_EXIT_CODES=modern)
128+N killed by signal N (e.g. 137 == kill -9)
=========== =======

View File

@ -1,14 +1,63 @@
#!/usr/bin/env python3
# this script automatically generates the error list for the docs by
# looking at the "Error" class and its subclasses.
from textwrap import indent
import borg.archiver # noqa: F401 - need import to get Error and ErrorWithTraceback subclasses.
from borg.helpers import Error, ErrorWithTraceback
import borg.archiver # noqa: F401 - need import to get Error subclasses.
from borg.constants import * # NOQA
from borg.helpers import Error, BackupError, BorgWarning
classes = Error.__subclasses__() + ErrorWithTraceback.__subclasses__()
for cls in sorted(classes, key=lambda cls: (cls.__module__, cls.__qualname__)):
if cls is ErrorWithTraceback:
continue
print(" ", cls.__qualname__)
def subclasses(cls):
direct_subclasses = cls.__subclasses__()
return set(direct_subclasses) | set(s for c in direct_subclasses for s in subclasses(c))
# 0, 1, 2 are used for success, generic warning, generic error
# 3..99 are available for specific errors
# 100..127 are available for specific warnings
# 128+ are reserved for signals
free_error_rcs = set(range(EXIT_ERROR_BASE, EXIT_WARNING_BASE)) # 3 .. 99
free_warning_rcs = set(range(EXIT_WARNING_BASE, EXIT_SIGNAL_BASE)) # 100 .. 127
# these classes map to rc 2
generic_error_rc_classes = set()
generic_warning_rc_classes = set()
error_classes = {Error} | subclasses(Error)
for cls in sorted(error_classes, key=lambda cls: (cls.__module__, cls.__qualname__)):
traceback = "yes" if cls.traceback else "no"
rc = cls.exit_mcode
print(" ", cls.__qualname__, "rc:", rc, "traceback:", traceback)
print(indent(cls.__doc__, " " * 8))
if rc in free_error_rcs:
free_error_rcs.remove(rc)
elif rc == 2:
generic_error_rc_classes.add(cls.__qualname__)
else: # rc != 2
# if we did not intentionally map this to the generic error rc, this might be an issue:
print(f"ERROR: {rc} is not a free/available RC, but either duplicate or invalid")
print()
print("free error RCs:", sorted(free_error_rcs))
print("generic errors:", sorted(generic_error_rc_classes))
warning_classes = {BorgWarning} | subclasses(BorgWarning) | {BackupError} | subclasses(BackupError)
for cls in sorted(warning_classes, key=lambda cls: (cls.__module__, cls.__qualname__)):
rc = cls.exit_mcode
print(" ", cls.__qualname__, "rc:", rc)
print(indent(cls.__doc__, " " * 8))
if rc in free_warning_rcs:
free_warning_rcs.remove(rc)
elif rc == 1:
generic_warning_rc_classes.add(cls.__qualname__)
else: # rc != 1
# if we did not intentionally map this to the generic warning rc, this might be an issue:
print(f"ERROR: {rc} is not a free/available RC, but either duplicate or invalid")
print("\n")
print("free warning RCs:", sorted(free_warning_rcs))
print("generic warnings:", sorted(generic_warning_rc_classes))

View File

@ -1,4 +1,5 @@
import base64
import errno
import json
import os
import stat
@ -26,6 +27,8 @@ from .crypto.key import key_factory, UnsupportedPayloadError
from .compress import CompressionSpec
from .constants import * # NOQA
from .crypto.low_level import IntegrityError as IntegrityErrorBase
from .helpers import BackupError, BackupRaceConditionError
from .helpers import BackupOSError, BackupPermissionError, BackupFileNotFoundError, BackupIOError
from .hashindex import ChunkIndex, ChunkIndexEntry, CacheSynchronizer
from .helpers import HardLinkManager
from .helpers import ChunkIteratorFileWrapper, open_item
@ -181,37 +184,6 @@ def is_special(mode):
return stat.S_ISBLK(mode) or stat.S_ISCHR(mode) or stat.S_ISFIFO(mode)
class BackupError(Exception):
"""
Exception raised for non-OSError-based exceptions while accessing backup files.
"""
class BackupOSError(Exception):
"""
Wrapper for OSError raised while accessing backup files.
Borg does different kinds of IO, and IO failures have different consequences.
This wrapper represents failures of input file or extraction IO.
These are non-critical and are only reported (exit code = 1, warning).
Any unwrapped IO error is critical and aborts execution (for example repository IO failure).
"""
def __init__(self, op, os_error):
self.op = op
self.os_error = os_error
self.errno = os_error.errno
self.strerror = os_error.strerror
self.filename = os_error.filename
def __str__(self):
if self.op:
return f"{self.op}: {self.os_error}"
else:
return str(self.os_error)
class BackupIO:
op = ""
@ -224,7 +196,14 @@ class BackupIO:
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type and issubclass(exc_type, OSError):
raise BackupOSError(self.op, exc_val) from exc_val
E_MAP = {
errno.EPERM: BackupPermissionError,
errno.EACCES: BackupPermissionError,
errno.ENOENT: BackupFileNotFoundError,
errno.EIO: BackupIOError,
}
e_cls = E_MAP.get(exc_val.errno, BackupOSError)
raise e_cls(self.op, exc_val) from exc_val
backup_io = BackupIO()
@ -259,10 +238,10 @@ def stat_update_check(st_old, st_curr):
# are not duplicate in a short timeframe, this check is redundant and solved by the ino check:
if stat.S_IFMT(st_old.st_mode) != stat.S_IFMT(st_curr.st_mode):
# in this case, we dispatched to wrong handler - abort
raise BackupError("file type changed (race condition), skipping file")
raise BackupRaceConditionError("file type changed (race condition), skipping file")
if st_old.st_ino != st_curr.st_ino:
# in this case, the hardlinks-related code in create_helper has the wrong inode - abort!
raise BackupError("file inode changed (race condition), skipping file")
raise BackupRaceConditionError("file inode changed (race condition), skipping file")
# looks ok, we are still dealing with the same thing - return current stat:
return st_curr
@ -454,15 +433,21 @@ def archive_put_items(chunk_ids, *, repo_objs, cache=None, stats=None, add_refer
class Archive:
class DoesNotExist(Error):
"""Archive {} does not exist"""
class AlreadyExists(Error):
"""Archive {} already exists"""
exit_mcode = 30
class DoesNotExist(Error):
"""Archive {} does not exist"""
exit_mcode = 31
class IncompatibleFilesystemEncodingError(Error):
"""Failed to encode filename "{}" into file system encoding "{}". Consider configuring the LANG environment variable."""
exit_mcode = 32
def __init__(
self,
manifest,

View File

@ -24,8 +24,9 @@ try:
from ._common import Highlander
from .. import __version__
from ..constants import * # NOQA
from ..helpers import EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR, EXIT_SIGNAL_BASE
from ..helpers import Error, set_ec
from ..helpers import EXIT_WARNING, EXIT_ERROR, EXIT_SIGNAL_BASE, classify_ec
from ..helpers import Error, CommandError, get_ec, modern_ec
from ..helpers import add_warning, BorgWarning, BackupWarning
from ..helpers import format_file_size
from ..helpers import remove_surrogates, text_to_json
from ..helpers import DatetimeWrapper, replace_placeholders
@ -123,20 +124,29 @@ class Archiver(
VersionMixIn,
):
def __init__(self, lock_wait=None, prog=None):
self.exit_code = EXIT_SUCCESS
self.lock_wait = lock_wait
self.prog = prog
self.last_checkpoint = time.monotonic()
def print_error(self, msg, *args):
msg = args and msg % args or msg
self.exit_code = EXIT_ERROR
logger.error(msg)
def print_warning(self, msg, *args, **kw):
warning_code = kw.get("wc", EXIT_WARNING) # note: wc=None can be used to not influence exit code
warning_type = kw.get("wt", "percent")
assert warning_type in ("percent", "curly")
warning_msgid = kw.get("msgid")
if warning_code is not None:
add_warning(msg, *args, wc=warning_code, wt=warning_type)
if warning_type == "percent":
output = args and msg % args or msg
else: # == "curly"
output = args and msg.format(*args) or msg
logger.warning(output, msgid=warning_msgid) if warning_msgid else logger.warning(output)
def print_warning(self, msg, *args):
msg = args and msg % args or msg
self.exit_code = EXIT_WARNING # we do not terminate here, so it is a warning
logger.warning(msg)
def print_warning_instance(self, warning):
assert isinstance(warning, BorgWarning)
# if it is a BackupWarning, use the wrapped BackupError exception instance:
cls = type(warning.args[1]) if isinstance(warning, BackupWarning) else type(warning)
msg, msgid, args, wc = cls.__doc__, cls.__qualname__, warning.args, warning.exit_code
self.print_warning(msg, *args, wc=wc, wt="curly", msgid=msgid)
def print_file_status(self, status, path):
# if we get called with status == None, the final file status was already printed
@ -503,7 +513,7 @@ class Archiver(
logger.error("You do not have a supported version of the msgpack python package installed. Terminating.")
logger.error("This should never happen as specific, supported versions are required by our pyproject.toml.")
logger.error("Do not contact borgbackup support about this.")
return set_ec(EXIT_ERROR)
raise Error("unsupported msgpack version")
if is_slow_msgpack():
logger.warning(PURE_PYTHON_MSGPACK_WARNING)
if args.debug_profile:
@ -519,7 +529,7 @@ class Archiver(
variables = dict(locals())
profiler.enable()
try:
return set_ec(func(args))
return get_ec(func(args))
finally:
profiler.disable()
profiler.snapshot_stats()
@ -536,7 +546,9 @@ class Archiver(
# it compatible (see above).
msgpack.pack(profiler.stats, fd, use_bin_type=True)
else:
return set_ec(func(args))
rc = func(args)
assert rc is None
return get_ec(rc)
def sig_info_handler(sig_no, stack): # pragma: no cover
@ -631,7 +643,7 @@ def main(): # pragma: no cover
except argparse.ArgumentTypeError as e:
# we might not have logging setup yet, so get out quickly
print(str(e), file=sys.stderr)
sys.exit(EXIT_ERROR)
sys.exit(CommandError.exit_mcode if modern_ec else EXIT_ERROR)
except Exception:
msg = "Local Exception"
tb = f"{traceback.format_exc()}\n{sysinfo()}"
@ -649,7 +661,7 @@ def main(): # pragma: no cover
tb = format_tb(e)
exit_code = e.exit_code
except RemoteRepository.RPCError as e:
important = e.exception_class not in ("LockTimeout",) and e.traceback
important = e.traceback
msg = e.exception_full if important else e.get_message()
msgid = e.exception_class
tb_log_level = logging.ERROR if important else logging.DEBUG
@ -685,16 +697,19 @@ def main(): # pragma: no cover
if args.show_rc:
rc_logger = logging.getLogger("borg.output.show-rc")
exit_msg = "terminating with %s status, rc %d"
if exit_code == EXIT_SUCCESS:
rc_logger.info(exit_msg % ("success", exit_code))
elif exit_code == EXIT_WARNING:
rc_logger.warning(exit_msg % ("warning", exit_code))
elif exit_code == EXIT_ERROR:
rc_logger.error(exit_msg % ("error", exit_code))
elif exit_code >= EXIT_SIGNAL_BASE:
rc_logger.error(exit_msg % ("signal", exit_code))
else:
try:
ec_class = classify_ec(exit_code)
except ValueError:
rc_logger.error(exit_msg % ("abnormal", exit_code or 666))
else:
if ec_class == "success":
rc_logger.info(exit_msg % (ec_class, exit_code))
elif ec_class == "warning":
rc_logger.warning(exit_msg % (ec_class, exit_code))
elif ec_class == "error":
rc_logger.error(exit_msg % (ec_class, exit_code))
elif ec_class == "signal":
rc_logger.error(exit_msg % (ec_class, exit_code))
sys.exit(exit_code)

View File

@ -9,6 +9,7 @@ from ..constants import * # NOQA
from ..crypto.key import FlexiKey
from ..helpers import format_file_size
from ..helpers import msgpack
from ..helpers import get_reset_ec
from ..item import Item
from ..platform import SyncFile
@ -21,38 +22,55 @@ class BenchmarkMixIn:
compression = "--compression=none"
# measure create perf (without files cache to always have it chunking)
t_start = time.monotonic()
rc = self.do_create(
self.parse_args(
[f"--repo={repo}", "create", compression, "--files-cache=disabled", "borg-benchmark-crud1", path]
rc = get_reset_ec(
self.do_create(
self.parse_args(
[
f"--repo={repo}",
"create",
compression,
"--files-cache=disabled",
"borg-benchmark-crud1",
path,
]
)
)
)
t_end = time.monotonic()
dt_create = t_end - t_start
assert rc == 0
# now build files cache
rc1 = self.do_create(
self.parse_args([f"--repo={repo}", "create", compression, "borg-benchmark-crud2", path])
rc1 = get_reset_ec(
self.do_create(self.parse_args([f"--repo={repo}", "create", compression, "borg-benchmark-crud2", path]))
)
rc2 = get_reset_ec(
self.do_delete(self.parse_args([f"--repo={repo}", "delete", "-a", "borg-benchmark-crud2"]))
)
rc2 = self.do_delete(self.parse_args([f"--repo={repo}", "delete", "-a", "borg-benchmark-crud2"]))
assert rc1 == rc2 == 0
# measure a no-change update (archive1 is still present)
t_start = time.monotonic()
rc1 = self.do_create(
self.parse_args([f"--repo={repo}", "create", compression, "borg-benchmark-crud3", path])
rc1 = get_reset_ec(
self.do_create(self.parse_args([f"--repo={repo}", "create", compression, "borg-benchmark-crud3", path]))
)
t_end = time.monotonic()
dt_update = t_end - t_start
rc2 = self.do_delete(self.parse_args([f"--repo={repo}", "delete", "-a", "borg-benchmark-crud3"]))
rc2 = get_reset_ec(
self.do_delete(self.parse_args([f"--repo={repo}", "delete", "-a", "borg-benchmark-crud3"]))
)
assert rc1 == rc2 == 0
# measure extraction (dry-run: without writing result to disk)
t_start = time.monotonic()
rc = self.do_extract(self.parse_args([f"--repo={repo}", "extract", "borg-benchmark-crud1", "--dry-run"]))
rc = get_reset_ec(
self.do_extract(self.parse_args([f"--repo={repo}", "extract", "borg-benchmark-crud1", "--dry-run"]))
)
t_end = time.monotonic()
dt_extract = t_end - t_start
assert rc == 0
# measure archive deletion (of LAST present archive with the data)
t_start = time.monotonic()
rc = self.do_delete(self.parse_args([f"--repo={repo}", "delete", "-a", "borg-benchmark-crud1"]))
rc = get_reset_ec(
self.do_delete(self.parse_args([f"--repo={repo}", "delete", "-a", "borg-benchmark-crud1"]))
)
t_end = time.monotonic()
dt_delete = t_end - t_start
assert rc == 0
@ -93,8 +111,6 @@ class BenchmarkMixIn:
print(fmt % ("U", msg, total_size_MB / dt_update, count, file_size_formatted, content, dt_update))
print(fmt % ("D", msg, total_size_MB / dt_delete, count, file_size_formatted, content, dt_delete))
return 0
def do_benchmark_cpu(self, args):
"""Benchmark CPU bound operations."""
from timeit import timeit

View File

@ -2,7 +2,7 @@ import argparse
from ._common import with_repository, Highlander
from ..archive import ArchiveChecker
from ..constants import * # NOQA
from ..helpers import EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR
from ..helpers import set_ec, EXIT_WARNING, CancelledByUser, CommandError
from ..helpers import yes
from ..logger import create_logger
@ -30,25 +30,22 @@ class CheckMixIn:
retry=False,
env_var_override="BORG_CHECK_I_KNOW_WHAT_I_AM_DOING",
):
return EXIT_ERROR
raise CancelledByUser()
if args.repo_only and any((args.verify_data, args.first, args.last, args.match_archives)):
self.print_error(
raise CommandError(
"--repository-only contradicts --first, --last, -a / --match-archives and --verify-data arguments."
)
return EXIT_ERROR
if args.repair and args.max_duration:
self.print_error("--repair does not allow --max-duration argument.")
return EXIT_ERROR
raise CommandError("--repair does not allow --max-duration argument.")
if args.max_duration and not args.repo_only:
# when doing a partial repo check, we can only check crc32 checksums in segment files,
# we can't build a fresh repo index in memory to verify the on-disk index against it.
# thus, we should not do an archives check based on a unknown-quality on-disk repo index.
# also, there is no max_duration support in the archives check code anyway.
self.print_error("--repository-only is required for --max-duration support.")
return EXIT_ERROR
raise CommandError("--repository-only is required for --max-duration support.")
if not args.archives_only:
if not repository.check(repair=args.repair, max_duration=args.max_duration):
return EXIT_WARNING
set_ec(EXIT_WARNING)
if not args.repo_only and not ArchiveChecker().check(
repository,
verify_data=args.verify_data,
@ -62,8 +59,8 @@ class CheckMixIn:
oldest=args.oldest,
newest=args.newest,
):
return EXIT_WARNING
return EXIT_SUCCESS
set_ec(EXIT_WARNING)
return
def build_parser_check(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -2,7 +2,6 @@ import argparse
from ._common import with_repository, Highlander
from ..constants import * # NOQA
from ..helpers import EXIT_SUCCESS
from ..manifest import Manifest
from ..logger import create_logger
@ -19,7 +18,6 @@ class CompactMixIn:
repository.put(Manifest.MANIFEST_ID, data)
threshold = args.threshold / 100
repository.commit(compact=True, threshold=threshold)
return EXIT_SUCCESS
def build_parser_compact(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -1,4 +1,3 @@
import sys
import argparse
import configparser
from binascii import unhexlify
@ -6,8 +5,7 @@ from binascii import unhexlify
from ._common import with_repository
from ..cache import Cache, assert_secure
from ..constants import * # NOQA
from ..helpers import EXIT_SUCCESS, EXIT_WARNING
from ..helpers import Error
from ..helpers import Error, CommandError
from ..helpers import Location
from ..helpers import parse_file_size
from ..manifest import Manifest
@ -99,9 +97,7 @@ class ConfigMixIn:
if not args.list:
if args.name is None:
self.print_error("No config key name was provided.")
return self.exit_code
raise CommandError("No config key name was provided.")
try:
section, name = args.name.split(".")
except ValueError:
@ -142,9 +138,7 @@ class ConfigMixIn:
try:
print(config.get(section, name))
except (configparser.NoOptionError, configparser.NoSectionError) as e:
print(e, file=sys.stderr)
return EXIT_WARNING
return EXIT_SUCCESS
raise Error(e)
finally:
if args.cache:
cache.close()

View File

@ -29,6 +29,7 @@ from ..helpers import prepare_subprocess_env
from ..helpers import sig_int, ignore_sigint
from ..helpers import iter_separated
from ..helpers import MakePathSafeAction
from ..helpers import Error, CommandError, BackupWarning, FileChangedWarning
from ..manifest import Manifest
from ..patterns import PatternMatcher
from ..platform import is_win32
@ -79,18 +80,15 @@ class CreateMixIn:
preexec_fn=None if is_win32 else ignore_sigint,
)
except (FileNotFoundError, PermissionError) as e:
self.print_error("Failed to execute command: %s", e)
return self.exit_code
raise CommandError(f"Failed to execute command: {e}")
status = fso.process_pipe(
path=path, cache=cache, fd=proc.stdout, mode=mode, user=user, group=group
)
rc = proc.wait()
if rc != 0:
self.print_error("Command %r exited with status %d", args.paths[0], rc)
return self.exit_code
except BackupOSError as e:
self.print_error("%s: %s", path, e)
return self.exit_code
raise CommandError(f"Command {args.paths[0]!r} exited with status {rc}")
except BackupError as e:
raise Error(f"{path!r}: {e}")
else:
status = "+" # included
self.print_file_status(status, path)
@ -103,8 +101,7 @@ class CreateMixIn:
args.paths, stdout=subprocess.PIPE, env=env, preexec_fn=None if is_win32 else ignore_sigint
)
except (FileNotFoundError, PermissionError) as e:
self.print_error("Failed to execute command: %s", e)
return self.exit_code
raise CommandError(f"Failed to execute command: {e}")
pipe_bin = proc.stdout
else: # args.paths_from_stdin == True
pipe_bin = sys.stdin.buffer
@ -124,19 +121,18 @@ class CreateMixIn:
read_special=args.read_special,
dry_run=dry_run,
)
except (BackupOSError, BackupError) as e:
self.print_warning("%s: %s", path, e)
except BackupError as e:
self.print_warning_instance(BackupWarning(path, e))
status = "E"
if status == "C":
self.print_warning("%s: file changed while we backed it up", path)
self.print_warning_instance(FileChangedWarning(path))
self.print_file_status(status, path)
if not dry_run and status is not None:
fso.stats.files_stats[status] += 1
if args.paths_from_command:
rc = proc.wait()
if rc != 0:
self.print_error("Command %r exited with status %d", args.paths[0], rc)
return self.exit_code
raise CommandError(f"Command {args.paths[0]!r} exited with status {rc}")
else:
for path in args.paths:
if path == "": # issue #5637
@ -152,9 +148,9 @@ class CreateMixIn:
status = fso.process_pipe(
path=path, cache=cache, fd=sys.stdin.buffer, mode=mode, user=user, group=group
)
except BackupOSError as e:
except BackupError as e:
self.print_warning_instance(BackupWarning(path, e))
status = "E"
self.print_warning("%s: %s", path, e)
else:
status = "+" # included
self.print_file_status(status, path)
@ -184,9 +180,9 @@ class CreateMixIn:
# if we get back here, we've finished recursing into <path>,
# we do not ever want to get back in there (even if path is given twice as recursion root)
skip_inodes.add((st.st_ino, st.st_dev))
except (BackupOSError, BackupError) as e:
except BackupError as e:
# this comes from os.stat, self._rec_walk has own exception handler
self.print_warning("%s: %s", path, e)
self.print_warning_instance(BackupWarning(path, e))
continue
if not dry_run:
if args.progress:
@ -197,7 +193,7 @@ class CreateMixIn:
if sig_int:
# do not save the archive if the user ctrl-c-ed - it is valid, but incomplete.
# we already have a checkpoint archive in this case.
self.print_error("Got Ctrl-C / SIGINT.")
raise Error("Got Ctrl-C / SIGINT.")
else:
archive.save(comment=args.comment, timestamp=args.timestamp)
args.stats |= args.json
@ -277,7 +273,6 @@ class CreateMixIn:
create_inner(archive, cache, fso)
else:
create_inner(None, None, None)
return self.exit_code
def _process_any(self, *, path, parent_fd, name, st, fso, cache, read_special, dry_run):
"""
@ -369,7 +364,7 @@ class CreateMixIn:
else:
self.print_warning("Unknown file type: %s", path)
return
except (BackupError, BackupOSError) as err:
except BackupError as err:
if isinstance(err, BackupOSError):
if err.errno in (errno.EPERM, errno.EACCES):
# Do not try again, such errors can not be fixed by retrying.
@ -525,11 +520,11 @@ class CreateMixIn:
dry_run=dry_run,
)
except (BackupOSError, BackupError) as e:
self.print_warning("%s: %s", path, e)
except BackupError as e:
self.print_warning_instance(BackupWarning(path, e))
status = "E"
if status == "C":
self.print_warning("%s: file changed while we backed it up", path)
self.print_warning_instance(FileChangedWarning(path))
if not recurse_excluded_dir:
self.print_file_status(status, path)
if not dry_run and status is not None:

View File

@ -13,6 +13,7 @@ from ..helpers import bin_to_hex, prepare_dump_dict
from ..helpers import dash_open
from ..helpers import StableDict
from ..helpers import positive_int_validator, archivename_validator
from ..helpers import CommandError, RTError
from ..manifest import Manifest
from ..platform import get_process_id
from ..repository import Repository, LIST_SCAN_LIMIT, TAG_PUT, TAG_DELETE, TAG_COMMIT
@ -27,7 +28,6 @@ class DebugMixIn:
"""display system information for debugging / bug reports"""
print(sysinfo())
print("Process ID:", get_process_id())
return EXIT_SUCCESS
@with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
def do_debug_dump_archive_items(self, args, repository, manifest):
@ -41,7 +41,6 @@ class DebugMixIn:
with open(filename, "wb") as fd:
fd.write(data)
print("Done.")
return EXIT_SUCCESS
@with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
def do_debug_dump_archive(self, args, repository, manifest):
@ -96,7 +95,6 @@ class DebugMixIn:
with dash_open(args.path, "w") as fd:
output(fd)
return EXIT_SUCCESS
@with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
def do_debug_dump_manifest(self, args, repository, manifest):
@ -108,7 +106,6 @@ class DebugMixIn:
with dash_open(args.path, "w") as fd:
json.dump(meta, fd, indent=4)
return EXIT_SUCCESS
@with_repository(manifest=False)
def do_debug_dump_repo_objs(self, args, repository):
@ -164,7 +161,6 @@ class DebugMixIn:
decrypt_dump(i, id, cdata)
i += 1
print("Done.")
return EXIT_SUCCESS
@with_repository(manifest=False)
def do_debug_search_repo_objs(self, args, repository):
@ -191,8 +187,7 @@ class DebugMixIn:
except (ValueError, UnicodeEncodeError):
wanted = None
if not wanted:
self.print_error("search term needs to be hex:123abc or str:foobar style")
return EXIT_ERROR
raise CommandError("search term needs to be hex:123abc or str:foobar style")
from ..crypto.key import key_factory
@ -234,7 +229,6 @@ class DebugMixIn:
if i % 10000 == 0:
print("%d objects processed." % i)
print("Done.")
return EXIT_SUCCESS
@with_repository(manifest=False)
def do_debug_get_obj(self, args, repository):
@ -245,17 +239,14 @@ class DebugMixIn:
if len(id) != 32: # 256bit
raise ValueError("id must be 256bits or 64 hex digits")
except ValueError as err:
print(f"object id {hex_id} is invalid [{str(err)}].")
return EXIT_ERROR
raise CommandError(f"object id {hex_id} is invalid [{str(err)}].")
try:
data = repository.get(id)
except Repository.ObjectNotFound:
print("object %s not found." % hex_id)
return EXIT_ERROR
raise RTError("object %s not found." % hex_id)
with open(args.path, "wb") as f:
f.write(data)
print("object %s fetched." % hex_id)
return EXIT_SUCCESS
@with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
def do_debug_id_hash(self, args, repository, manifest):
@ -265,7 +256,6 @@ class DebugMixIn:
key = manifest.key
id = key.id_hash(data)
print(id.hex())
return EXIT_SUCCESS
@with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
def do_debug_parse_obj(self, args, repository, manifest):
@ -278,8 +268,7 @@ class DebugMixIn:
if len(id) != 32: # 256bit
raise ValueError("id must be 256bits or 64 hex digits")
except ValueError as err:
print(f"object id {hex_id} is invalid [{str(err)}].")
return EXIT_ERROR
raise CommandError(f"object id {hex_id} is invalid [{str(err)}].")
with open(args.object_path, "rb") as f:
cdata = f.read()
@ -293,8 +282,6 @@ class DebugMixIn:
with open(args.binary_path, "wb") as f:
f.write(data)
return EXIT_SUCCESS
@with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
def do_debug_format_obj(self, args, repository, manifest):
"""format file and metadata into borg object file"""
@ -306,8 +293,7 @@ class DebugMixIn:
if len(id) != 32: # 256bit
raise ValueError("id must be 256bits or 64 hex digits")
except ValueError as err:
print(f"object id {hex_id} is invalid [{str(err)}].")
return EXIT_ERROR
raise CommandError(f"object id {hex_id} is invalid [{str(err)}].")
with open(args.binary_path, "rb") as f:
data = f.read()
@ -321,7 +307,6 @@ class DebugMixIn:
with open(args.object_path, "wb") as f:
f.write(data_encrypted)
return EXIT_SUCCESS
@with_repository(manifest=False, exclusive=True)
def do_debug_put_obj(self, args, repository):
@ -334,12 +319,11 @@ class DebugMixIn:
if len(id) != 32: # 256bit
raise ValueError("id must be 256bits or 64 hex digits")
except ValueError as err:
print(f"object id {hex_id} is invalid [{str(err)}].")
return EXIT_ERROR
raise CommandError(f"object id {hex_id} is invalid [{str(err)}].")
repository.put(id, data)
print("object %s put." % hex_id)
repository.commit(compact=False)
return EXIT_SUCCESS
@with_repository(manifest=False, exclusive=True)
def do_debug_delete_obj(self, args, repository):
@ -360,7 +344,6 @@ class DebugMixIn:
if modified:
repository.commit(compact=False)
print("Done.")
return EXIT_SUCCESS
@with_repository(manifest=False, exclusive=True, cache=True, compatibility=Manifest.NO_OPERATION_CHECK)
def do_debug_refcount_obj(self, args, repository, manifest, cache):
@ -376,7 +359,6 @@ class DebugMixIn:
print("object %s has %d referrers [info from chunks cache]." % (hex_id, refcount))
except KeyError:
print("object %s not found [info from chunks cache]." % hex_id)
return EXIT_SUCCESS
@with_repository(manifest=False, exclusive=True)
def do_debug_dump_hints(self, args, repository):
@ -394,7 +376,6 @@ class DebugMixIn:
json.dump(hints, fd, indent=4)
finally:
repository.rollback()
return EXIT_SUCCESS
def do_debug_convert_profile(self, args):
"""convert Borg profile to Python profile"""
@ -402,7 +383,6 @@ class DebugMixIn:
with args.output, args.input:
marshal.dump(msgpack.unpack(args.input, use_list=False, raw=False), args.output)
return EXIT_SUCCESS
def build_parser_debug(self, subparsers, common_parser, mid_common_parser):
debug_epilog = process_epilog(

View File

@ -22,13 +22,13 @@ class DeleteMixIn:
manifest = Manifest.load(repository, (Manifest.Operation.DELETE,))
archive_names = tuple(x.name for x in manifest.archives.list_considering(args))
if not archive_names:
return self.exit_code
return
if args.match_archives is None and args.first == 0 and args.last == 0:
self.print_error(
"Aborting: if you really want to delete all archives, please use -a 'sh:*' "
"or just delete the whole repository (might be much faster)."
)
return EXIT_ERROR
return
if args.forced == 2:
deleted = False
@ -37,8 +37,7 @@ class DeleteMixIn:
try:
current_archive = manifest.archives.pop(archive_name)
except KeyError:
self.exit_code = EXIT_WARNING
logger.warning(f"Archive {archive_name} not found ({i}/{len(archive_names)}).")
self.print_warning(f"Archive {archive_name} not found ({i}/{len(archive_names)}).")
else:
deleted = True
if self.output_list:
@ -50,10 +49,10 @@ class DeleteMixIn:
manifest.write()
# note: might crash in compact() after committing the repo
repository.commit(compact=False)
logger.warning('Done. Run "borg check --repair" to clean up the mess.')
self.print_warning('Done. Run "borg check --repair" to clean up the mess.', wc=None)
else:
logger.warning("Aborted.")
return self.exit_code
self.print_warning("Aborted.", wc=None)
return
stats = Statistics(iec=args.iec)
with Cache(repository, manifest, progress=args.progress, lock_wait=self.lock_wait, iec=args.iec) as cache:
@ -73,7 +72,7 @@ class DeleteMixIn:
try:
archive_info = manifest.archives[archive_name]
except KeyError:
logger.warning(msg_not_found.format(archive_name, i, len(archive_names)))
self.print_warning(msg_not_found.format(archive_name, i, len(archive_names)))
else:
if self.output_list:
logger_list.info(msg_delete.format(format_archive(archive_info), i, len(archive_names)))
@ -87,14 +86,12 @@ class DeleteMixIn:
uncommitted_deletes = 0 if checkpointed else (uncommitted_deletes + 1)
if sig_int:
# Ctrl-C / SIGINT: do not checkpoint (commit) again, we already have a checkpoint in this case.
self.print_error("Got Ctrl-C / SIGINT.")
raise Error("Got Ctrl-C / SIGINT.")
elif uncommitted_deletes > 0:
checkpoint_func()
if args.stats:
log_multi(str(stats), logger=logging.getLogger("borg.output.stats"))
return self.exit_code
def build_parser_delete(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog, define_archive_filters_group

View File

@ -37,7 +37,8 @@ class DiffMixIn:
self.print_warning(
"--chunker-params might be different between archives, diff will be slow.\n"
"If you know for certain that they are the same, pass --same-chunker-params "
"to override this check."
"to override this check.",
wc=None,
)
matcher = build_matcher(args.patterns, args.paths)
@ -74,9 +75,7 @@ class DiffMixIn:
sys.stdout.write(res)
for pattern in matcher.get_unmatched_include_patterns():
self.print_warning("Include pattern '%s' never matched.", pattern)
return self.exit_code
self.print_warning_instance(IncludePatternNeverMatchedWarning(pattern))
def build_parser_diff(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -6,12 +6,13 @@ import stat
from ._common import with_repository, with_archive
from ._common import build_filter, build_matcher
from ..archive import BackupError, BackupOSError
from ..archive import BackupError
from ..constants import * # NOQA
from ..helpers import archivename_validator, PathSpec
from ..helpers import remove_surrogates
from ..helpers import HardLinkManager
from ..helpers import ProgressIndicatorPercent
from ..helpers import BackupWarning, IncludePatternNeverMatchedWarning
from ..manifest import Manifest
from ..logger import create_logger
@ -64,8 +65,8 @@ class ExtractMixIn:
dir_item = dirs.pop(-1)
try:
archive.extract_item(dir_item, stdout=stdout)
except BackupOSError as e:
self.print_warning("%s: %s", remove_surrogates(dir_item.path), e)
except BackupError as e:
self.print_warning_instance(BackupWarning(remove_surrogates(dir_item.path), e))
if output_list:
logging.getLogger("borg.output.list").info(remove_surrogates(item.path))
try:
@ -79,9 +80,8 @@ class ExtractMixIn:
archive.extract_item(
item, stdout=stdout, sparse=sparse, hlm=hlm, pi=pi, continue_extraction=continue_extraction
)
except (BackupOSError, BackupError) as e:
self.print_warning("%s: %s", remove_surrogates(orig_path), e)
except BackupError as e:
self.print_warning_instance(BackupWarning(remove_surrogates(orig_path), e))
if pi:
pi.finish()
@ -94,14 +94,13 @@ class ExtractMixIn:
dir_item = dirs.pop(-1)
try:
archive.extract_item(dir_item, stdout=stdout)
except BackupOSError as e:
self.print_warning("%s: %s", remove_surrogates(dir_item.path), e)
except BackupError as e:
self.print_warning_instance(BackupWarning(remove_surrogates(dir_item.path), e))
for pattern in matcher.get_unmatched_include_patterns():
self.print_warning("Include pattern '%s' never matched.", pattern)
self.print_warning_instance(IncludePatternNeverMatchedWarning(pattern))
if pi:
# clear progress output
pi.finish()
return self.exit_code
def build_parser_extract(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -475,12 +475,10 @@ class HelpMixIn:
msg_lines += [" Commands: %s" % ", ".join(sorted(commands.keys()))]
msg_lines += [" Topics: %s" % ", ".join(sorted(self.helptext.keys()))]
parser.error("\n".join(msg_lines))
return self.exit_code
def do_subcommand_help(self, parser, args):
"""display infos about subcommand"""
parser.print_help()
return EXIT_SUCCESS
do_maincommand_help = do_subcommand_help

View File

@ -50,14 +50,11 @@ class InfoMixIn:
.strip()
.format(**info)
)
if self.exit_code:
break
if not args.json and len(archive_names) - i:
print()
if args.json:
json_print(basic_json_data(manifest, cache=cache, extra={"archives": output_data}))
return self.exit_code
def build_parser_info(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog, define_archive_filters_group

View File

@ -6,7 +6,7 @@ from ..constants import * # NOQA
from ..crypto.key import AESOCBRepoKey, CHPORepoKey, Blake2AESOCBRepoKey, Blake2CHPORepoKey
from ..crypto.key import AESOCBKeyfileKey, CHPOKeyfileKey, Blake2AESOCBKeyfileKey, Blake2CHPOKeyfileKey
from ..crypto.keymanager import KeyManager
from ..helpers import PathSpec
from ..helpers import PathSpec, CommandError
from ..manifest import Manifest
from ._common import with_repository
@ -22,22 +22,19 @@ class KeysMixIn:
"""Change repository key file passphrase"""
key = manifest.key
if not hasattr(key, "change_passphrase"):
print("This repository is not encrypted, cannot change the passphrase.")
return EXIT_ERROR
raise CommandError("This repository is not encrypted, cannot change the passphrase.")
key.change_passphrase()
logger.info("Key updated")
if hasattr(key, "find_key"):
# print key location to make backing it up easier
logger.info("Key location: %s", key.find_key())
return EXIT_SUCCESS
@with_repository(exclusive=True, manifest=True, cache=True, compatibility=(Manifest.Operation.CHECK,))
def do_change_location(self, args, repository, manifest, cache):
"""Change repository key location"""
key = manifest.key
if not hasattr(key, "change_passphrase"):
print("This repository is not encrypted, cannot change the key location.")
return EXIT_ERROR
raise CommandError("This repository is not encrypted, cannot change the key location.")
if args.key_mode == "keyfile":
if isinstance(key, AESOCBRepoKey):
@ -50,7 +47,7 @@ class KeysMixIn:
key_new = Blake2CHPOKeyfileKey(repository)
else:
print("Change not needed or not supported.")
return EXIT_WARNING
return
if args.key_mode == "repokey":
if isinstance(key, AESOCBKeyfileKey):
key_new = AESOCBRepoKey(repository)
@ -62,7 +59,7 @@ class KeysMixIn:
key_new = Blake2CHPORepoKey(repository)
else:
print("Change not needed or not supported.")
return EXIT_WARNING
return
for name in ("repository_id", "crypt_key", "id_key", "chunk_seed", "sessionid", "cipher"):
value = getattr(key, name)
@ -91,8 +88,6 @@ class KeysMixIn:
key.remove(key.target) # remove key from current location
logger.info(f"Key moved to {loc}")
return EXIT_SUCCESS
@with_repository(lock=False, exclusive=False, manifest=False, cache=False)
def do_key_export(self, args, repository):
"""Export the repository key for backup"""
@ -109,9 +104,7 @@ class KeysMixIn:
else:
manager.export(args.path)
except IsADirectoryError:
self.print_error(f"'{args.path}' must be a file, not a directory")
return EXIT_ERROR
return EXIT_SUCCESS
raise CommandError(f"'{args.path}' must be a file, not a directory")
@with_repository(lock=False, exclusive=False, manifest=False, cache=False)
def do_key_import(self, args, repository):
@ -119,18 +112,14 @@ class KeysMixIn:
manager = KeyManager(repository)
if args.paper:
if args.path:
self.print_error("with --paper import from file is not supported")
return EXIT_ERROR
raise CommandError("with --paper import from file is not supported")
manager.import_paperkey(args)
else:
if not args.path:
self.print_error("input file to import key from expected")
return EXIT_ERROR
raise CommandError("expected input file to import key from")
if args.path != "-" and not os.path.exists(args.path):
self.print_error("input file does not exist: " + args.path)
return EXIT_ERROR
raise CommandError("input file does not exist: " + args.path)
manager.import_keyfile(args)
return EXIT_SUCCESS
def build_parser_keys(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -40,8 +40,6 @@ class ListMixIn:
else:
_list_inner(cache=None)
return self.exit_code
def build_parser_list(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog, define_exclusion_group

View File

@ -4,7 +4,7 @@ import subprocess
from ._common import with_repository
from ..cache import Cache
from ..constants import * # NOQA
from ..helpers import prepare_subprocess_env
from ..helpers import prepare_subprocess_env, set_ec
from ..manifest import Manifest
from ..logger import create_logger
@ -33,7 +33,8 @@ class LocksMixIn:
env = prepare_subprocess_env(system=True)
try:
# we exit with the return code we get from the subprocess
return subprocess.call([args.command] + args.args, env=env)
rc = subprocess.call([args.command] + args.args, env=env)
set_ec(rc)
finally:
# we need to commit the "no change" operation we did to the manifest
# because it created a new segment file in the repository. if we would
@ -48,7 +49,6 @@ class LocksMixIn:
"""Break the repository lock (e.g. in case it was left by a dead borg."""
repository.break_lock()
Cache.break_lock(repository)
return self.exit_code
def build_parser_locks(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -3,7 +3,7 @@ import os
from ._common import with_repository, Highlander
from ..constants import * # NOQA
from ..helpers import EXIT_ERROR
from ..helpers import RTError
from ..helpers import PathSpec
from ..helpers import umount
from ..manifest import Manifest
@ -22,18 +22,15 @@ class MountMixIn:
from ..fuse_impl import llfuse, BORG_FUSE_IMPL
if llfuse is None:
self.print_error("borg mount not available: no FUSE support, BORG_FUSE_IMPL=%s." % BORG_FUSE_IMPL)
return self.exit_code
raise RTError("borg mount not available: no FUSE support, BORG_FUSE_IMPL=%s." % BORG_FUSE_IMPL)
if not os.path.isdir(args.mountpoint):
self.print_error(f"{args.mountpoint}: Mountpoint must be an **existing directory**")
return self.exit_code
raise RTError(f"{args.mountpoint}: Mountpoint must be an **existing directory**")
if not os.access(args.mountpoint, os.R_OK | os.W_OK | os.X_OK):
self.print_error(f"{args.mountpoint}: Mountpoint must be a **writable** directory")
return self.exit_code
raise RTError(f"{args.mountpoint}: Mountpoint must be a **writable** directory")
return self._do_mount(args)
self._do_mount(args)
@with_repository(compatibility=(Manifest.Operation.READ,))
def _do_mount(self, args, repository, manifest):
@ -46,12 +43,11 @@ class MountMixIn:
operations.mount(args.mountpoint, args.options, args.foreground)
except RuntimeError:
# Relevant error message already printed to stderr by FUSE
self.exit_code = EXIT_ERROR
return self.exit_code
raise RTError("FUSE mount failed")
def do_umount(self, args):
"""un-mount the FUSE filesystem"""
return umount(args.mountpoint)
umount(args.mountpoint)
def build_parser_mount_umount(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -10,7 +10,7 @@ from ._common import with_repository, Highlander
from ..archive import Archive, Statistics
from ..cache import Cache
from ..constants import * # NOQA
from ..helpers import ArchiveFormatter, interval, sig_int, log_multi, ProgressIndicatorPercent
from ..helpers import ArchiveFormatter, interval, sig_int, log_multi, ProgressIndicatorPercent, CommandError, Error
from ..manifest import Manifest
from ..logger import create_logger
@ -77,12 +77,12 @@ class PruneMixIn:
if not any(
(args.secondly, args.minutely, args.hourly, args.daily, args.weekly, args.monthly, args.yearly, args.within)
):
self.print_error(
raise CommandError(
'At least one of the "keep-within", "keep-last", '
'"keep-secondly", "keep-minutely", "keep-hourly", "keep-daily", '
'"keep-weekly", "keep-monthly" or "keep-yearly" settings must be specified.'
)
return self.exit_code
if args.format is not None:
format = args.format
elif args.short:
@ -173,12 +173,11 @@ class PruneMixIn:
pi.finish()
if sig_int:
# Ctrl-C / SIGINT: do not checkpoint (commit) again, we already have a checkpoint in this case.
self.print_error("Got Ctrl-C / SIGINT.")
raise Error("Got Ctrl-C / SIGINT.")
elif uncommitted_deletes > 0:
checkpoint_func()
if args.stats:
log_multi(str(stats), logger=logging.getLogger("borg.output.stats"))
return self.exit_code
def build_parser_prune(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -178,8 +178,6 @@ class RCompressMixIn:
print(f"Kept as is: {stats_process['kept_count']}")
print(f"Total: {stats_process['recompressed_count'] + stats_process['kept_count']}")
return self.exit_code
def build_parser_rcompress(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -4,7 +4,7 @@ from ._common import with_repository, with_other_repository, Highlander
from ..cache import Cache
from ..constants import * # NOQA
from ..crypto.key import key_creator, key_argument_names
from ..helpers import EXIT_WARNING
from ..helpers import CancelledByUser
from ..helpers import location_validator, Location
from ..helpers import parse_storage_quota
from ..manifest import Manifest
@ -28,7 +28,7 @@ class RCreateMixIn:
key = key_creator(repository, args, other_key=other_key)
except (EOFError, KeyboardInterrupt):
repository.destroy()
return EXIT_WARNING
raise CancelledByUser()
manifest = Manifest(key, repository)
manifest.key = key
manifest.write()
@ -51,7 +51,6 @@ class RCreateMixIn:
" borg key export -r REPOSITORY --qr-html encrypted-key-backup.html\n"
"2. Write down the borg key passphrase and store it at safe place.\n"
)
return self.exit_code
def build_parser_rcreate(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -3,7 +3,7 @@ import argparse
from ._common import with_repository
from ..cache import Cache, SecurityManager
from ..constants import * # NOQA
from ..helpers import EXIT_ERROR
from ..helpers import CancelledByUser
from ..helpers import format_archive
from ..helpers import bin_to_hex
from ..helpers import yes
@ -72,8 +72,7 @@ class RDeleteMixIn:
retry=False,
env_var_override="BORG_DELETE_I_KNOW_WHAT_I_AM_DOING",
):
self.exit_code = EXIT_ERROR
return self.exit_code
raise CancelledByUser()
if not dry_run:
repository.destroy()
logger.info("Repository deleted.")
@ -87,7 +86,6 @@ class RDeleteMixIn:
logger.info("Cache deleted.")
else:
logger.info("Would delete cache.")
return self.exit_code
def build_parser_rdelete(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -5,7 +5,7 @@ from ._common import build_matcher
from ..archive import ArchiveRecreater
from ..constants import * # NOQA
from ..compress import CompressionSpec
from ..helpers import archivename_validator, comment_validator, PathSpec, ChunkerParams
from ..helpers import archivename_validator, comment_validator, PathSpec, ChunkerParams, CommandError
from ..helpers import timestamp
from ..manifest import Manifest
@ -42,8 +42,7 @@ class RecreateMixIn:
archive_names = tuple(archive.name for archive in manifest.archives.list_considering(args))
if args.target is not None and len(archive_names) != 1:
self.print_error("--target: Need to specify single archive")
return self.exit_code
raise CommandError("--target: Need to specify single archive")
for name in archive_names:
if recreater.is_temporary_archive(name):
continue
@ -54,7 +53,6 @@ class RecreateMixIn:
manifest.write()
repository.commit(compact=False)
cache.commit()
return self.exit_code
def build_parser_recreate(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -19,7 +19,6 @@ class RenameMixIn:
manifest.write()
repository.commit(compact=False)
cache.commit()
return self.exit_code
def build_parser_rename(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -72,7 +72,6 @@ class RInfoMixIn:
print(output)
print(str(cache))
return self.exit_code
def build_parser_rinfo(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -36,8 +36,6 @@ class RListMixIn:
if args.json:
json_print(basic_json_data(manifest, extra={"archives": output_data}))
return self.exit_code
def build_parser_rlist(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog, define_archive_filters_group

View File

@ -2,7 +2,6 @@ import argparse
from ._common import Highlander
from ..constants import * # NOQA
from ..helpers import EXIT_SUCCESS
from ..helpers import parse_storage_quota
from ..remote import RepositoryServer
@ -21,7 +20,6 @@ class ServeMixIn:
storage_quota=args.storage_quota,
use_socket=args.use_socket,
).serve()
return EXIT_SUCCESS
def build_parser_serve(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -86,8 +86,6 @@ class TarMixIn:
with create_filter_process(filter, stream=tarstream, stream_close=tarstream_close, inbound=False) as _stream:
self._export_tar(args, archive, _stream)
return self.exit_code
def _export_tar(self, args, archive, tarstream):
matcher = build_matcher(args.patterns, args.paths)
@ -240,8 +238,7 @@ class TarMixIn:
tar.close()
for pattern in matcher.get_unmatched_include_patterns():
self.print_warning("Include pattern '%s' never matched.", pattern)
return self.exit_code
self.print_warning_instance(IncludePatternNeverMatchedWarning(pattern))
@with_repository(cache=True, exclusive=True, compatibility=(Manifest.Operation.WRITE,))
def do_import_tar(self, args, repository, manifest, cache):
@ -257,8 +254,6 @@ class TarMixIn:
with create_filter_process(filter, stream=tarstream, stream_close=tarstream_close, inbound=True) as _stream:
self._import_tar(args, repository, manifest, manifest.key, cache, _stream)
return self.exit_code
def _import_tar(self, args, repository, manifest, key, cache, tarstream):
t0 = archive_ts_now()
t0_monotonic = time.monotonic()

View File

@ -5,7 +5,7 @@ from ..archive import Archive
from ..compress import CompressionSpec
from ..constants import * # NOQA
from ..crypto.key import uses_same_id_hash, uses_same_chunker_secret
from ..helpers import EXIT_SUCCESS, EXIT_ERROR, Error
from ..helpers import Error
from ..helpers import location_validator, Location, archivename_validator, comment_validator
from ..helpers import format_file_size
from ..manifest import Manifest
@ -23,22 +23,20 @@ class TransferMixIn:
key = manifest.key
other_key = other_manifest.key
if not uses_same_id_hash(other_key, key):
self.print_error(
raise Error(
"You must keep the same ID hash ([HMAC-]SHA256 or BLAKE2b) or deduplication will break. "
"Use a related repository!"
)
return EXIT_ERROR
if not uses_same_chunker_secret(other_key, key):
self.print_error(
raise Error(
"You must use the same chunker secret or deduplication will break. " "Use a related repository!"
)
return EXIT_ERROR
dry_run = args.dry_run
args.consider_checkpoints = True
archive_names = tuple(x.name for x in other_manifest.archives.list_considering(args))
if not archive_names:
return EXIT_SUCCESS
return
an_errors = []
for archive_name in archive_names:
@ -47,10 +45,8 @@ class TransferMixIn:
except argparse.ArgumentTypeError as err:
an_errors.append(str(err))
if an_errors:
self.print_error("Invalid archive names detected, please rename them before transfer:")
for err_msg in an_errors:
self.print_error(err_msg)
return EXIT_ERROR
an_errors.insert(0, "Invalid archive names detected, please rename them before transfer:")
raise Error("\n".join(an_errors))
ac_errors = []
for archive_name in archive_names:
@ -58,20 +54,17 @@ class TransferMixIn:
try:
comment_validator(archive.metadata.get("comment", ""))
except argparse.ArgumentTypeError as err:
ac_errors.append((archive_name, str(err)))
ac_errors.append(f"{archive_name}: {err}")
if ac_errors:
self.print_error("Invalid archive comments detected, please fix them before transfer:")
for archive_name, err_msg in ac_errors:
self.print_error(f"{archive_name}: {err_msg}")
return EXIT_ERROR
ac_errors.insert(0, "Invalid archive comments detected, please fix them before transfer:")
raise Error("\n".join(ac_errors))
from .. import upgrade as upgrade_mod
try:
UpgraderCls = getattr(upgrade_mod, f"Upgrader{args.upgrader}")
except AttributeError:
self.print_error(f"No such upgrader: {args.upgrader}")
return EXIT_ERROR
raise Error(f"No such upgrader: {args.upgrader}")
if UpgraderCls is not upgrade_mod.UpgraderFrom12To20 and other_manifest.repository.version == 1:
raise Error("To transfer from a borg 1.x repo, you need to use: --upgrader=From12To20")
@ -177,7 +170,6 @@ class TransferMixIn:
f"transfer_size: {format_file_size(transfer_size)} "
f"present_size: {format_file_size(present_size)}"
)
return EXIT_SUCCESS
def build_parser_transfer(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -2,7 +2,6 @@ import argparse
from .. import __version__
from ..constants import * # NOQA
from ..helpers import EXIT_SUCCESS
from ..remote import RemoteRepository
from ..logger import create_logger
@ -22,7 +21,6 @@ class VersionMixIn:
else:
server_version = client_version
print(f"{format_version(client_version)} / {format_version(server_version)}")
return EXIT_SUCCESS
def build_parser_version(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -365,20 +365,30 @@ class CacheConfig:
class Cache:
"""Client Side cache"""
class RepositoryIDNotUnique(Error):
"""Cache is newer than repository - do you have multiple, independently updated repos with same ID?"""
class RepositoryReplay(Error):
"""Cache, or information obtained from the security directory is newer than repository - this is either an attack or unsafe (multiple repos with same ID)"""
class CacheInitAbortedError(Error):
"""Cache initialization aborted"""
exit_mcode = 60
class EncryptionMethodMismatch(Error):
"""Repository encryption method changed since last access, refusing to continue"""
exit_mcode = 61
class RepositoryAccessAborted(Error):
"""Repository access aborted"""
class EncryptionMethodMismatch(Error):
"""Repository encryption method changed since last access, refusing to continue"""
exit_mcode = 62
class RepositoryIDNotUnique(Error):
"""Cache is newer than repository - do you have multiple, independently updated repos with same ID?"""
exit_mcode = 63
class RepositoryReplay(Error):
"""Cache, or information obtained from the security directory is newer than repository - this is either an attack or unsafe (multiple repos with same ID)"""
exit_mcode = 64
@staticmethod
def break_lock(repository, path=None):

View File

@ -114,10 +114,11 @@ FILES_CACHE_MODE_UI_DEFAULT = "ctime,size,inode" # default for "borg create" co
FILES_CACHE_MODE_DISABLED = "d" # most borg commands do not use the files cache at all (disable)
# return codes returned by borg command
# when borg is killed by signal N, rc = 128 + N
EXIT_SUCCESS = 0 # everything done, no problems
EXIT_WARNING = 1 # reached normal end of operation, but there were issues
EXIT_ERROR = 2 # terminated abruptly, did not reach end of operation
EXIT_WARNING = 1 # reached normal end of operation, but there were issues (generic warning)
EXIT_ERROR = 2 # terminated abruptly, did not reach end of operation (generic error)
EXIT_ERROR_BASE = 3 # specific error codes are 3..99 (enabled by BORG_EXIT_CODES=modern)
EXIT_WARNING_BASE = 100 # specific warning codes are 100..127 (enabled by BORG_EXIT_CODES=modern)
EXIT_SIGNAL_BASE = 128 # terminated due to signal, rc = 128 + sig_no
ISO_FORMAT_NO_USECS = "%Y-%m-%dT%H:%M:%S"

View File

@ -124,6 +124,8 @@ SUPPORTED_ALGORITHMS = {
class FileIntegrityError(IntegrityError):
"""File failed integrity check: {}"""
exit_mcode = 91
class IntegrityCheckedFile(FileLikeWrapper):
def __init__(self, path, write, filename=None, override_fd=None, integrity_data=None):

View File

@ -38,30 +38,44 @@ AUTHENTICATED_NO_KEY = "authenticated_no_key" in workarounds
class UnsupportedPayloadError(Error):
"""Unsupported payload type {}. A newer version is required to access this repository."""
exit_mcode = 48
class UnsupportedManifestError(Error):
"""Unsupported manifest envelope. A newer version is required to access this repository."""
exit_mcode = 27
class KeyfileNotFoundError(Error):
"""No key file for repository {} found in {}."""
exit_mcode = 42
class KeyfileInvalidError(Error):
"""Invalid key file for repository {} found in {}."""
exit_mcode = 40
class KeyfileMismatchError(Error):
"""Mismatch between repository {} and key file {}."""
exit_mcode = 41
class RepoKeyNotFoundError(Error):
"""No key entry found in the config of repository {}."""
exit_mcode = 44
class UnsupportedKeyFormatError(Error):
"""Your borg key is stored in an unsupported format. Try using a newer version of borg."""
exit_mcode = 49
def key_creator(repository, args, *, other_key=None):
for key in AVAILABLE_KEY_TYPES:

View File

@ -13,20 +13,28 @@ from ..repoobj import RepoObj
from .key import CHPOKeyfileKey, RepoKeyNotFoundError, KeyBlobStorage, identify_key
class UnencryptedRepo(Error):
"""Keymanagement not available for unencrypted repositories."""
class NotABorgKeyFile(Error):
"""This file is not a borg key backup, aborting."""
class UnknownKeyType(Error):
"""Keytype {0} is unknown."""
exit_mcode = 43
class RepoIdMismatch(Error):
"""This key backup seems to be for a different backup repository, aborting."""
exit_mcode = 45
class NotABorgKeyFile(Error):
"""This file is not a borg key backup, aborting."""
class UnencryptedRepo(Error):
"""Key management not available for unencrypted repositories."""
exit_mcode = 46
class UnknownKeyType(Error):
"""Key type {0} is unknown."""
exit_mcode = 47
def sha256_truncated(data, num):

View File

@ -6,11 +6,17 @@ Code used to be in borg/helpers.py but was split into the modules in this
package, which are imported into here for compatibility.
"""
import os
from typing import List
from collections import namedtuple
from ..constants import * # NOQA
from .checks import check_extension_modules, check_python
from .datastruct import StableDict, Buffer, EfficientCollectionQueue
from .errors import Error, ErrorWithTraceback, IntegrityError, DecompressionError
from .errors import Error, ErrorWithTraceback, IntegrityError, DecompressionError, CancelledByUser, CommandError
from .errors import RTError, modern_ec
from .errors import BorgWarning, FileChangedWarning, BackupWarning, IncludePatternNeverMatchedWarning
from .errors import BackupError, BackupOSError, BackupRaceConditionError
from .errors import BackupPermissionError, BackupIOError, BackupFileNotFoundError
from .fs import ensure_dir, join_base_dir, get_socket_filename
from .fs import get_security_dir, get_keys_dir, get_base_dir, get_cache_dir, get_config_dir, get_runtime_dir
from .fs import dir_is_tagged, dir_is_cachedir, remove_dotdot_prefixes, make_path_safe, scandir_inorder
@ -43,28 +49,126 @@ from .yes_no import yes, TRUISH, FALSISH, DEFAULTISH
from .msgpack import is_slow_msgpack, is_supported_msgpack, get_limited_unpacker
from . import msgpack
from ..logger import create_logger
logger = create_logger()
# generic mechanism to enable users to invoke workarounds by setting the
# BORG_WORKAROUNDS environment variable to a list of comma-separated strings.
# see the docs for a list of known workaround strings.
workarounds = tuple(os.environ.get("BORG_WORKAROUNDS", "").split(","))
# element data type for warnings_list:
warning_info = namedtuple("warning_info", "wc,msg,args,wt")
"""
The global warnings_list variable is used to collect warning_info elements while borg is running.
"""
_warnings_list: List[warning_info] = []
def add_warning(msg, *args, **kwargs):
global _warnings_list
warning_code = kwargs.get("wc", EXIT_WARNING)
assert isinstance(warning_code, int)
warning_type = kwargs.get("wt", "percent")
assert warning_type in ("percent", "curly")
_warnings_list.append(warning_info(warning_code, msg, args, warning_type))
"""
The global exit_code variable is used so that modules other than archiver can increase the program exit code if a
warning or error occurred during their operation. This is different from archiver.exit_code, which is only accessible
from the archiver object.
Note: keep this in helpers/__init__.py as the code expects to be able to assign to helpers.exit_code.
warning or error occurred during their operation.
"""
exit_code = EXIT_SUCCESS
_exit_code = EXIT_SUCCESS
def classify_ec(ec):
if not isinstance(ec, int):
raise TypeError("ec must be of type int")
if EXIT_SIGNAL_BASE <= ec <= 255:
return "signal"
elif ec == EXIT_ERROR or EXIT_ERROR_BASE <= ec < EXIT_WARNING_BASE:
return "error"
elif ec == EXIT_WARNING or EXIT_WARNING_BASE <= ec < EXIT_SIGNAL_BASE:
return "warning"
elif ec == EXIT_SUCCESS:
return "success"
else:
raise ValueError(f"invalid error code: {ec}")
def max_ec(ec1, ec2):
"""return the more severe error code of ec1 and ec2"""
# note: usually, there can be only 1 error-class ec, the other ec is then either success or warning.
ec1_class = classify_ec(ec1)
ec2_class = classify_ec(ec2)
if ec1_class == "signal":
return ec1
if ec2_class == "signal":
return ec2
if ec1_class == "error":
return ec1
if ec2_class == "error":
return ec2
if ec1_class == "warning":
return ec1
if ec2_class == "warning":
return ec2
assert ec1 == ec2 == EXIT_SUCCESS
return EXIT_SUCCESS
def set_ec(ec):
"""
Sets the exit code of the program, if an exit code higher or equal than this is set, this does nothing. This
makes EXIT_ERROR override EXIT_WARNING, etc..
ec: exit code to set
Sets the exit code of the program to ec IF ec is more severe than the current exit code.
"""
global exit_code
exit_code = max(exit_code, ec)
return exit_code
global _exit_code
_exit_code = max_ec(_exit_code, ec)
def init_ec_warnings(ec=EXIT_SUCCESS, warnings=None):
"""
(Re-)Init the globals for the exit code and the warnings list.
"""
global _exit_code, _warnings_list
_exit_code = ec
warnings = [] if warnings is None else warnings
assert isinstance(warnings, list)
_warnings_list = warnings
def get_ec(ec=None):
"""
compute the final return code of the borg process
"""
if ec is not None:
set_ec(ec)
global _exit_code
exit_code_class = classify_ec(_exit_code)
if exit_code_class in ("signal", "error", "warning"):
# there was a signal/error/warning, return its exit code
return _exit_code
assert exit_code_class == "success"
global _warnings_list
if not _warnings_list:
# we do not have any warnings in warnings list, return success exit code
return _exit_code
# looks like we have some warning(s)
rcs = sorted(set(w_info.wc for w_info in _warnings_list))
logger.debug(f"rcs: {rcs!r}")
if len(rcs) == 1:
# easy: there was only one kind of warning, so we can be specific
return rcs[0]
# there were different kinds of warnings
return EXIT_WARNING # generic warning rc, user has to look into the logs
def get_reset_ec(ec=None):
"""Like get_ec, but re-initialize ec/warnings afterwards."""
rc = get_ec(ec)
init_ec_warnings()
return rc

View File

@ -1,38 +1,31 @@
import os
from .errors import Error
from .errors import RTError
from ..platformflags import is_win32
class PythonLibcTooOld(Error):
"""FATAL: this Python was compiled for a too old (g)libc and misses required functionality."""
def check_python():
if is_win32:
required_funcs = {os.stat}
else:
required_funcs = {os.stat, os.utime, os.chown}
if not os.supports_follow_symlinks.issuperset(required_funcs):
raise PythonLibcTooOld
class ExtensionModuleError(Error):
"""The Borg binary extension modules do not seem to be properly installed."""
raise RTError("""FATAL: this Python was compiled for a too old (g)libc and misses required functionality.""")
def check_extension_modules():
from .. import platform, compress, crypto, item, chunker, hashindex
msg = """The Borg binary extension modules do not seem to be properly installed."""
if hashindex.API_VERSION != "1.2_01":
raise ExtensionModuleError
raise RTError(msg)
if chunker.API_VERSION != "1.2_01":
raise ExtensionModuleError
raise RTError(msg)
if compress.API_VERSION != "1.2_02":
raise ExtensionModuleError
raise RTError(msg)
if crypto.low_level.API_VERSION != "1.3_01":
raise ExtensionModuleError
raise RTError(msg)
if item.API_VERSION != "1.2_01":
raise ExtensionModuleError
raise RTError(msg)
if platform.API_VERSION != platform.OS_API_VERSION or platform.API_VERSION != "1.2_05":
raise ExtensionModuleError
raise RTError(msg)

View File

@ -1,17 +1,23 @@
import os
from ..constants import * # NOQA
from ..crypto.low_level import IntegrityError as IntegrityErrorBase
class Error(Exception):
"""Error: {}"""
modern_ec = os.environ.get("BORG_EXIT_CODES", "legacy") == "modern"
class ErrorBase(Exception):
"""ErrorBase: {}"""
# Error base class
# if we raise such an Error and it is only caught by the uppermost
# exception handler (that exits short after with the given exit_code),
# it is always a (fatal and abrupt) EXIT_ERROR, never just a warning.
exit_code = EXIT_ERROR
# it is always a (fatal and abrupt) error, never just a warning.
exit_mcode = EXIT_ERROR # modern, more specific exit code (defaults to EXIT_ERROR)
# show a traceback?
traceback = False
@ -24,6 +30,16 @@ class Error(Exception):
__str__ = get_message
@property
def exit_code(self):
# legacy: borg used to always use rc 2 (EXIT_ERROR) for all errors.
# modern: users can opt in to more specific return codes, using BORG_EXIT_CODES:
return self.exit_mcode if modern_ec else EXIT_ERROR
class Error(ErrorBase):
"""Error: {}"""
class ErrorWithTraceback(Error):
"""Error: {}"""
@ -35,6 +51,134 @@ class ErrorWithTraceback(Error):
class IntegrityError(ErrorWithTraceback, IntegrityErrorBase):
"""Data integrity error: {}"""
exit_mcode = 90
class DecompressionError(IntegrityError):
"""Decompression error: {}"""
exit_mcode = 92
class CancelledByUser(Error):
"""Cancelled by user."""
exit_mcode = 3
class RTError(Error):
"""Runtime Error: {}"""
class CommandError(Error):
"""Command Error: {}"""
exit_mcode = 4
class BorgWarning:
"""Warning: {}"""
# Warning base class
# please note that this class and its subclasses are NOT exceptions, we do not raise them.
# so this is just to have inheritance, inspectability and the exit_code property.
exit_mcode = EXIT_WARNING # modern, more specific exit code (defaults to EXIT_WARNING)
def __init__(self, *args):
self.args = args
def get_message(self):
return type(self).__doc__.format(*self.args)
__str__ = get_message
@property
def exit_code(self):
# legacy: borg used to always use rc 1 (EXIT_WARNING) for all warnings.
# modern: users can opt in to more specific return codes, using BORG_EXIT_CODES:
return self.exit_mcode if modern_ec else EXIT_WARNING
class FileChangedWarning(BorgWarning):
"""{}: file changed while we backed it up"""
exit_mcode = 100
class IncludePatternNeverMatchedWarning(BorgWarning):
"""Include pattern '{}' never matched."""
exit_mcode = 101
class BackupWarning(BorgWarning):
"""{}: {}"""
# this is to wrap a caught BackupError exception, so it can be given to print_warning_instance
@property
def exit_code(self):
if not modern_ec:
return EXIT_WARNING
exc = self.args[1]
assert isinstance(exc, BackupError)
return exc.exit_mcode
class BackupError(ErrorBase):
"""{}: backup error"""
# Exception raised for non-OSError-based exceptions while accessing backup files.
exit_mcode = 102
class BackupRaceConditionError(BackupError):
"""{}: file type or inode changed while we backed it up (race condition, skipped file)"""
# Exception raised when encountering a critical race condition while trying to back up a file.
exit_mcode = 103
class BackupOSError(BackupError):
"""{}: {}"""
# Wrapper for OSError raised while accessing backup files.
#
# Borg does different kinds of IO, and IO failures have different consequences.
# This wrapper represents failures of input file or extraction IO.
# These are non-critical and are only reported (warnings).
#
# Any unwrapped IO error is critical and aborts execution (for example repository IO failure).
exit_mcode = 104
def __init__(self, op, os_error):
self.op = op
self.os_error = os_error
self.errno = os_error.errno
self.strerror = os_error.strerror
self.filename = os_error.filename
def __str__(self):
if self.op:
return f"{self.op}: {self.os_error}"
else:
return str(self.os_error)
class BackupPermissionError(BackupOSError):
"""{}: {}"""
exit_mcode = 105
class BackupIOError(BackupOSError):
"""{}: {}"""
exit_mcode = 106
class BackupFileNotFoundError(BackupOSError):
"""{}: {}"""
exit_mcode = 107

View File

@ -519,11 +519,14 @@ def os_stat(*, path=None, parent_fd=None, name=None, follow_symlinks=False):
def umount(mountpoint):
from . import set_ec
env = prepare_subprocess_env(system=True)
try:
return subprocess.call(["fusermount", "-u", mountpoint], env=env)
rc = subprocess.call(["fusermount", "-u", mountpoint], env=env)
except FileNotFoundError:
return subprocess.call(["umount", mountpoint], env=env)
rc = subprocess.call(["umount", mountpoint], env=env)
set_ec(rc)
# below is a slightly modified tempfile.mkstemp that has an additional mode parameter.

View File

@ -224,10 +224,14 @@ class DatetimeWrapper:
class PlaceholderError(Error):
"""Formatting Error: "{}".format({}): {}({})"""
exit_mcode = 5
class InvalidPlaceholder(PlaceholderError):
"""Invalid placeholder "{}" in string: {}"""
exit_mcode = 6
def format_line(format, data):
for _, key, _, conversion in Formatter().parse(format):

View File

@ -17,18 +17,26 @@ logger = create_logger()
class NoPassphraseFailure(Error):
"""can not acquire a passphrase: {}"""
class PassphraseWrong(Error):
"""passphrase supplied in BORG_PASSPHRASE, by BORG_PASSCOMMAND or via BORG_PASSPHRASE_FD is incorrect."""
exit_mcode = 50
class PasscommandFailure(Error):
"""passcommand supplied in BORG_PASSCOMMAND failed: {}"""
exit_mcode = 51
class PassphraseWrong(Error):
"""passphrase supplied in BORG_PASSPHRASE, by BORG_PASSCOMMAND or via BORG_PASSPHRASE_FD is incorrect."""
exit_mcode = 52
class PasswordRetriesExceeded(Error):
"""exceeded the maximum password retries"""
exit_mcode = 53
class Passphrase(str):
@classmethod

View File

@ -71,26 +71,38 @@ class TimeoutTimer:
class LockError(Error):
"""Failed to acquire the lock {}."""
exit_mcode = 70
class LockErrorT(ErrorWithTraceback):
"""Failed to acquire the lock {}."""
class LockTimeout(LockError):
"""Failed to create/acquire the lock {} (timeout)."""
exit_mcode = 71
class LockFailed(LockErrorT):
"""Failed to create/acquire the lock {} ({})."""
exit_mcode = 72
class LockTimeout(LockError):
"""Failed to create/acquire the lock {} (timeout)."""
exit_mcode = 73
class NotLocked(LockErrorT):
"""Failed to release the lock {} (was not locked)."""
exit_mcode = 74
class NotMyLock(LockErrorT):
"""Failed to release the lock {} (was/is locked, but not by me)."""
exit_mcode = 75
class ExclusiveLock:
"""An exclusive Lock based on mkdir fs operation being atomic.

View File

@ -18,12 +18,16 @@ from .patterns import get_regex_from_pattern
from .repoobj import RepoObj
class MandatoryFeatureUnsupported(Error):
"""Unsupported repository feature(s) {}. A newer version of borg is required to access this repository."""
exit_mcode = 25
class NoManifestError(Error):
"""Repository has no manifest."""
class MandatoryFeatureUnsupported(Error):
"""Unsupported repository feature(s) {}. A newer version of borg is required to access this repository."""
exit_mcode = 26
ArchiveInfo = namedtuple("ArchiveInfo", "name id ts")

View File

@ -30,6 +30,7 @@ from .helpers import format_file_size
from .helpers import safe_unlink
from .helpers import prepare_subprocess_env, ignore_sigint
from .helpers import get_socket_filename
from .locking import LockTimeout, NotLocked, NotMyLock, LockFailed
from .logger import create_logger, borg_serve_log_queue
from .helpers import msgpack
from .repository import Repository
@ -69,26 +70,38 @@ def os_write(fd, data):
class ConnectionClosed(Error):
"""Connection closed by remote host"""
exit_mcode = 80
class ConnectionClosedWithHint(ConnectionClosed):
"""Connection closed by remote host. {}"""
exit_mcode = 81
class PathNotAllowed(Error):
"""Repository path not allowed: {}"""
exit_mcode = 83
class InvalidRPCMethod(Error):
"""RPC method {} is not valid"""
exit_mcode = 82
class UnexpectedRPCDataFormatFromClient(Error):
"""Borg {}: Got unexpected RPC data format from client."""
exit_mcode = 85
class UnexpectedRPCDataFormatFromServer(Error):
"""Got unexpected RPC data format from server:\n{}"""
exit_mcode = 86
def __init__(self, data):
try:
data = data.decode()[:128]
@ -513,6 +526,8 @@ class RemoteRepository:
class RPCServerOutdated(Error):
"""Borg server is too old for {}. Required version {}"""
exit_mcode = 84
@property
def method(self):
return self.args[0]
@ -767,6 +782,14 @@ class RemoteRepository:
raise Repository.ObjectNotFound(args[0], self.location.processed)
elif error == "InvalidRPCMethod":
raise InvalidRPCMethod(args[0])
elif error == "LockTimeout":
raise LockTimeout(args[0])
elif error == "LockFailed":
raise LockFailed(args[0], args[1])
elif error == "NotLocked":
raise NotLocked(args[0])
elif error == "NotMyLock":
raise NotMyLock(args[0])
else:
raise self.RPCError(unpacked)

View File

@ -134,41 +134,61 @@ class Repository:
will still get rid of them.
"""
class DoesNotExist(Error):
"""Repository {} does not exist."""
class AlreadyExists(Error):
"""A repository already exists at {}."""
class PathAlreadyExists(Error):
"""There is already something at {}."""
class ParentPathDoesNotExist(Error):
"""The parent path of the repo directory [{}] does not exist."""
class InvalidRepository(Error):
"""{} is not a valid repository. Check repo config."""
class InvalidRepositoryConfig(Error):
"""{} does not have a valid configuration. Check repo config [{}]."""
exit_mcode = 10
class CheckNeeded(ErrorWithTraceback):
"""Inconsistency detected. Please run "borg check {}"."""
exit_mcode = 12
class DoesNotExist(Error):
"""Repository {} does not exist."""
exit_mcode = 13
class InsufficientFreeSpaceError(Error):
"""Insufficient free space to complete transaction (required: {}, available: {})."""
exit_mcode = 14
class InvalidRepository(Error):
"""{} is not a valid repository. Check repo config."""
exit_mcode = 15
class InvalidRepositoryConfig(Error):
"""{} does not have a valid configuration. Check repo config [{}]."""
exit_mcode = 16
class ObjectNotFound(ErrorWithTraceback):
"""Object with key {} not found in repository {}."""
exit_mcode = 17
def __init__(self, id, repo):
if isinstance(id, bytes):
id = bin_to_hex(id)
super().__init__(id, repo)
class InsufficientFreeSpaceError(Error):
"""Insufficient free space to complete transaction (required: {}, available: {})."""
class ParentPathDoesNotExist(Error):
"""The parent path of the repo directory [{}] does not exist."""
exit_mcode = 18
class PathAlreadyExists(Error):
"""There is already something at {}."""
exit_mcode = 19
class StorageQuotaExceeded(Error):
"""The storage quota ({}) has been exceeded ({}). Try deleting some archives."""
exit_mcode = 20
def __init__(
self,
path,

View File

@ -15,7 +15,7 @@ from io import BytesIO, StringIO
import pytest
from ... import xattr, helpers, platform
from ... import xattr, platform
from ...archive import Archive
from ...archiver import Archiver, PURE_PYTHON_MSGPACK_WARNING
from ...cache import Cache
@ -23,6 +23,7 @@ from ...constants import * # NOQA
from ...helpers import Location, umount
from ...helpers import EXIT_SUCCESS
from ...helpers import bin_to_hex
from ...helpers import init_ec_warnings
from ...logger import flush_logging
from ...manifest import Manifest
from ...platform import get_flags
@ -76,8 +77,7 @@ def exec_cmd(*args, archiver=None, fork=False, exe=None, input=b"", binary_outpu
if archiver is None:
archiver = Archiver()
archiver.prerun_checks = lambda *args: None
archiver.exit_code = EXIT_SUCCESS
helpers.exit_code = EXIT_SUCCESS
init_ec_warnings()
try:
args = archiver.parse_args(list(args))
# argparse parsing may raise SystemExit when the command line is bad or

View File

@ -1,7 +1,9 @@
import os
import pytest
from ...constants import * # NOQA
from . import RK_ENCRYPTION, create_test_files, cmd, generate_archiver_tests
from ...helpers import CommandError, Error
pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,binary") # NOQA
@ -21,8 +23,13 @@ def test_config(archivers, request):
assert "id" in output
assert "last_segment_checked" not in output
output = cmd(archiver, "config", "last_segment_checked", exit_code=1)
assert "No option " in output
if archiver.FORK_DEFAULT:
output = cmd(archiver, "config", "last_segment_checked", exit_code=2)
assert "No option " in output
else:
with pytest.raises(Error):
cmd(archiver, "config", "last_segment_checked")
cmd(archiver, "config", "last_segment_checked", "123")
output = cmd(archiver, "config", "last_segment_checked")
assert output == "123" + os.linesep
@ -37,8 +44,20 @@ def test_config(archivers, request):
output = cmd(archiver, "config", cfg_key)
assert output == cfg_value + os.linesep
cmd(archiver, "config", "--delete", cfg_key)
cmd(archiver, "config", cfg_key, exit_code=1)
if archiver.FORK_DEFAULT:
cmd(archiver, "config", cfg_key, exit_code=2)
else:
with pytest.raises(Error):
cmd(archiver, "config", cfg_key)
cmd(archiver, "config", "--list", "--delete", exit_code=2)
cmd(archiver, "config", exit_code=2)
cmd(archiver, "config", "invalid-option", exit_code=1)
if archiver.FORK_DEFAULT:
cmd(archiver, "config", exit_code=2)
else:
with pytest.raises(CommandError):
cmd(archiver, "config")
if archiver.FORK_DEFAULT:
cmd(archiver, "config", "invalid-option", exit_code=2)
else:
with pytest.raises(Error):
cmd(archiver, "config", "invalid-option")

View File

@ -7,7 +7,7 @@ import pytest
from ...constants import * # NOQA
from ...crypto.file_integrity import FileIntegrityError
from ...helpers import bin_to_hex
from ...helpers import bin_to_hex, Error
from . import cmd, create_src_archive, create_test_files, RK_ENCRYPTION
@ -22,7 +22,11 @@ def test_check_corrupted_repository(archiver):
fd.seek(100)
fd.write(b"XXXX")
cmd(archiver, "check", exit_code=1)
if archiver.FORK_DEFAULT:
cmd(archiver, "check", exit_code=1)
else:
with pytest.raises(Error):
cmd(archiver, "check")
def corrupt_archiver(archiver):

View File

@ -16,6 +16,7 @@ from ...constants import * # NOQA
from ...manifest import Manifest
from ...platform import is_cygwin, is_win32, is_darwin
from ...repository import Repository
from ...helpers import CommandError
from .. import has_lchflags
from .. import changedir
from .. import (
@ -360,8 +361,12 @@ def test_create_content_from_command(archivers, request):
def test_create_content_from_command_with_failed_command(archivers, request):
archiver = request.getfixturevalue(archivers)
cmd(archiver, "rcreate", RK_ENCRYPTION)
output = cmd(archiver, "create", "--content-from-command", "test", "--", "sh", "-c", "exit 73;", exit_code=2)
assert output.endswith("Command 'sh' exited with status 73" + os.linesep)
if archiver.FORK_DEFAULT:
output = cmd(archiver, "create", "--content-from-command", "test", "--", "sh", "-c", "exit 73;", exit_code=2)
assert output.endswith("Command 'sh' exited with status 73" + os.linesep)
else:
with pytest.raises(CommandError):
cmd(archiver, "create", "--content-from-command", "test", "--", "sh", "-c", "exit 73;")
archive_list = json.loads(cmd(archiver, "rlist", "--json"))
assert archive_list["archives"] == []
@ -408,8 +413,12 @@ def test_create_paths_from_command(archivers, request):
def test_create_paths_from_command_with_failed_command(archivers, request):
archiver = request.getfixturevalue(archivers)
cmd(archiver, "rcreate", RK_ENCRYPTION)
output = cmd(archiver, "create", "--paths-from-command", "test", "--", "sh", "-c", "exit 73;", exit_code=2)
assert output.endswith("Command 'sh' exited with status 73" + os.linesep)
if archiver.FORK_DEFAULT:
output = cmd(archiver, "create", "--paths-from-command", "test", "--", "sh", "-c", "exit 73;", exit_code=2)
assert output.endswith("Command 'sh' exited with status 73" + os.linesep)
else:
with pytest.raises(CommandError):
cmd(archiver, "create", "--paths-from-command", "test", "--", "sh", "-c", "exit 73;")
archive_list = json.loads(cmd(archiver, "rlist", "--json"))
assert archive_list["archives"] == []

View File

@ -220,8 +220,7 @@ def test_basic_functionality(archivers, request):
output = cmd(archiver, "diff", "test0", "test1a")
do_asserts(output, True)
# We expect exit_code=1 due to the chunker params warning
output = cmd(archiver, "diff", "test0", "test1b", "--content-only", exit_code=1)
output = cmd(archiver, "diff", "test0", "test1b", "--content-only")
do_asserts(output, False, content_only=True)
output = cmd(archiver, "diff", "test0", "test1a", "--json-lines")

View File

@ -6,7 +6,7 @@ import pytest
from ...constants import * # NOQA
from ...crypto.key import AESOCBRepoKey, AESOCBKeyfileKey, CHPOKeyfileKey, Passphrase
from ...crypto.keymanager import RepoIdMismatch, NotABorgKeyFile
from ...helpers import EXIT_ERROR
from ...helpers import EXIT_ERROR, CommandError
from ...helpers import bin_to_hex
from ...helpers import msgpack
from ...repository import Repository
@ -170,7 +170,11 @@ def test_key_export_directory(archivers, request):
export_directory = archiver.output_path + "/exported"
os.mkdir(export_directory)
cmd(archiver, "rcreate", RK_ENCRYPTION)
cmd(archiver, "key", "export", export_directory, exit_code=EXIT_ERROR)
if archiver.FORK_DEFAULT:
cmd(archiver, "key", "export", export_directory, exit_code=EXIT_ERROR)
else:
with pytest.raises(CommandError):
cmd(archiver, "key", "export", export_directory)
def test_key_export_qr_directory(archivers, request):
@ -178,14 +182,22 @@ def test_key_export_qr_directory(archivers, request):
export_directory = archiver.output_path + "/exported"
os.mkdir(export_directory)
cmd(archiver, "rcreate", RK_ENCRYPTION)
cmd(archiver, "key", "export", "--qr-html", export_directory, exit_code=EXIT_ERROR)
if archiver.FORK_DEFAULT:
cmd(archiver, "key", "export", "--qr-html", export_directory, exit_code=EXIT_ERROR)
else:
with pytest.raises(CommandError):
cmd(archiver, "key", "export", "--qr-html", export_directory)
def test_key_import_errors(archivers, request):
archiver = request.getfixturevalue(archivers)
export_file = archiver.output_path + "/exported"
cmd(archiver, "rcreate", KF_ENCRYPTION)
cmd(archiver, "key", "import", export_file, exit_code=EXIT_ERROR)
if archiver.FORK_DEFAULT:
cmd(archiver, "key", "import", export_file, exit_code=EXIT_ERROR)
else:
with pytest.raises(CommandError):
cmd(archiver, "key", "import", export_file)
with open(export_file, "w") as fd:
fd.write("something not a key\n")

View File

@ -3,7 +3,7 @@ from unittest.mock import patch
import pytest
from ...helpers.errors import Error
from ...helpers.errors import Error, CancelledByUser
from ...constants import * # NOQA
from ...crypto.key import FlexiKey
from ...repository import Repository
@ -37,7 +37,12 @@ def test_rcreate_interrupt(archivers, request):
raise EOFError
with patch.object(FlexiKey, "create", raise_eof):
cmd(archiver, "rcreate", RK_ENCRYPTION, exit_code=1)
if archiver.FORK_DEFAULT:
cmd(archiver, "rcreate", RK_ENCRYPTION, exit_code=2)
else:
with pytest.raises(CancelledByUser):
cmd(archiver, "rcreate", RK_ENCRYPTION)
assert not os.path.exists(archiver.repository_location)

View File

@ -1,6 +1,9 @@
import os
import pytest
from ...constants import * # NOQA
from ...helpers import CancelledByUser
from . import create_regular_file, cmd, generate_archiver_tests, RK_ENCRYPTION
pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary") # NOQA
@ -14,7 +17,11 @@ def test_delete_repo(archivers, request):
cmd(archiver, "create", "test", "input")
cmd(archiver, "create", "test.2", "input")
os.environ["BORG_DELETE_I_KNOW_WHAT_I_AM_DOING"] = "no"
cmd(archiver, "rdelete", exit_code=2)
if archiver.FORK_DEFAULT:
cmd(archiver, "rdelete", exit_code=2)
else:
with pytest.raises(CancelledByUser):
cmd(archiver, "rdelete")
assert os.path.exists(archiver.repository_path)
os.environ["BORG_DELETE_I_KNOW_WHAT_I_AM_DOING"] = "YES"
cmd(archiver, "rdelete")

View File

@ -5,6 +5,7 @@ from datetime import datetime
import pytest
from ...constants import * # NOQA
from ...helpers import CommandError
from .. import changedir, are_hardlinks_supported
from . import (
_create_test_caches,
@ -82,8 +83,12 @@ def test_recreate_hardlinked_tags(archivers, request): # test for issue #4911
def test_recreate_target_rc(archivers, request):
archiver = request.getfixturevalue(archivers)
cmd(archiver, "rcreate", RK_ENCRYPTION)
output = cmd(archiver, "recreate", "--target=asdf", exit_code=2)
assert "Need to specify single archive" in output
if archiver.FORK_DEFAULT:
output = cmd(archiver, "recreate", "--target=asdf", exit_code=2)
assert "Need to specify single archive" in output
else:
with pytest.raises(CommandError):
cmd(archiver, "recreate", "--target=asdf")
def test_recreate_target(archivers, request):

View File

@ -13,7 +13,7 @@ import pytest
from ..archiver.prune_cmd import prune_within, prune_split
from .. import platform
from ..constants import MAX_DATA_SIZE
from ..constants import * # NOQA
from ..helpers import Location
from ..helpers import Buffer
from ..helpers import (
@ -44,6 +44,7 @@ from ..helpers import iter_separated
from ..helpers import eval_escapes
from ..helpers import safe_unlink
from ..helpers import text_to_json, binary_to_json
from ..helpers import classify_ec, max_ec
from ..helpers.passphrase import Passphrase, PasswordRetriesExceeded
from ..platform import is_cygwin, is_win32, is_darwin
from . import FakeInputs, are_hardlinks_supported
@ -1408,3 +1409,63 @@ class TestPassphrase:
def test_passphrase_repr(self):
assert "secret" not in repr(Passphrase("secret"))
@pytest.mark.parametrize(
"ec_range,ec_class",
(
# inclusive range start, exclusive range end
((0, 1), "success"),
((1, 2), "warning"),
((2, 3), "error"),
((EXIT_ERROR_BASE, EXIT_WARNING_BASE), "error"),
((EXIT_WARNING_BASE, EXIT_SIGNAL_BASE), "warning"),
((EXIT_SIGNAL_BASE, 256), "signal"),
),
)
def test_classify_ec(ec_range, ec_class):
for ec in range(*ec_range):
classify_ec(ec) == ec_class
def test_ec_invalid():
with pytest.raises(ValueError):
classify_ec(666)
with pytest.raises(ValueError):
classify_ec(-1)
with pytest.raises(TypeError):
classify_ec(None)
@pytest.mark.parametrize(
"ec1,ec2,ec_max",
(
# same for modern / legacy
(EXIT_SUCCESS, EXIT_SUCCESS, EXIT_SUCCESS),
(EXIT_SUCCESS, EXIT_SIGNAL_BASE, EXIT_SIGNAL_BASE),
# legacy exit codes
(EXIT_SUCCESS, EXIT_WARNING, EXIT_WARNING),
(EXIT_SUCCESS, EXIT_ERROR, EXIT_ERROR),
(EXIT_WARNING, EXIT_SUCCESS, EXIT_WARNING),
(EXIT_WARNING, EXIT_WARNING, EXIT_WARNING),
(EXIT_WARNING, EXIT_ERROR, EXIT_ERROR),
(EXIT_WARNING, EXIT_SIGNAL_BASE, EXIT_SIGNAL_BASE),
(EXIT_ERROR, EXIT_SUCCESS, EXIT_ERROR),
(EXIT_ERROR, EXIT_WARNING, EXIT_ERROR),
(EXIT_ERROR, EXIT_ERROR, EXIT_ERROR),
(EXIT_ERROR, EXIT_SIGNAL_BASE, EXIT_SIGNAL_BASE),
# some modern codes
(EXIT_SUCCESS, EXIT_WARNING_BASE, EXIT_WARNING_BASE),
(EXIT_SUCCESS, EXIT_ERROR_BASE, EXIT_ERROR_BASE),
(EXIT_WARNING_BASE, EXIT_SUCCESS, EXIT_WARNING_BASE),
(EXIT_WARNING_BASE + 1, EXIT_WARNING_BASE + 2, EXIT_WARNING_BASE + 1),
(EXIT_WARNING_BASE, EXIT_ERROR_BASE, EXIT_ERROR_BASE),
(EXIT_WARNING_BASE, EXIT_SIGNAL_BASE, EXIT_SIGNAL_BASE),
(EXIT_ERROR_BASE, EXIT_SUCCESS, EXIT_ERROR_BASE),
(EXIT_ERROR_BASE, EXIT_WARNING_BASE, EXIT_ERROR_BASE),
(EXIT_ERROR_BASE + 1, EXIT_ERROR_BASE + 2, EXIT_ERROR_BASE + 1),
(EXIT_ERROR_BASE, EXIT_SIGNAL_BASE, EXIT_SIGNAL_BASE),
),
)
def test_max_ec(ec1, ec2, ec_max):
assert max_ec(ec1, ec2) == ec_max