do not return the rc from Archiver methods

this is not needed and getting rid of it makes
the code / behaviour simpler to understand:

if a fatal error is detected, we throw an exception.

if we encounter something warning worthy, we emit and collect the warning.

in a few cases, we directly call set_ec to set the
exit code as needed, e.g. if passing it through
from a subprocess.

also:
- get rid of Archiver.exit_code
- assert that return value of archiver methods is None
- fix a print_warning call to use the correct formatting method
This commit is contained in:
Thomas Waldmann 2023-12-06 00:14:46 +01:00
parent 5caf747011
commit abe6545853
No known key found for this signature in database
GPG Key ID: 243ACFA951F78E01
34 changed files with 91 additions and 125 deletions

View File

@ -24,7 +24,7 @@ try:
from ._common import Highlander
from .. import __version__
from ..constants import * # NOQA
from ..helpers import EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR, EXIT_SIGNAL_BASE, classify_ec
from ..helpers import EXIT_WARNING, EXIT_ERROR, EXIT_SIGNAL_BASE, classify_ec
from ..helpers import Error, CommandError, get_ec, modern_ec
from ..helpers import add_warning, BorgWarning, BackupWarning
from ..helpers import format_file_size
@ -124,7 +124,6 @@ class Archiver(
VersionMixIn,
):
def __init__(self, lock_wait=None, prog=None):
self.exit_code = EXIT_SUCCESS
self.lock_wait = lock_wait
self.prog = prog
self.last_checkpoint = time.monotonic()
@ -547,7 +546,9 @@ class Archiver(
# it compatible (see above).
msgpack.pack(profiler.stats, fd, use_bin_type=True)
else:
return get_ec(func(args))
rc = func(args)
assert rc is None
return get_ec(rc)
def sig_info_handler(sig_no, stack): # pragma: no cover

View File

@ -9,6 +9,7 @@ from ..constants import * # NOQA
from ..crypto.key import FlexiKey
from ..helpers import format_file_size
from ..helpers import msgpack
from ..helpers import get_ec
from ..item import Item
from ..platform import SyncFile
@ -21,38 +22,49 @@ class BenchmarkMixIn:
compression = "--compression=none"
# measure create perf (without files cache to always have it chunking)
t_start = time.monotonic()
rc = self.do_create(
self.parse_args(
[f"--repo={repo}", "create", compression, "--files-cache=disabled", "borg-benchmark-crud1", path]
rc = get_ec(
self.do_create(
self.parse_args(
[
f"--repo={repo}",
"create",
compression,
"--files-cache=disabled",
"borg-benchmark-crud1",
path,
]
)
)
)
t_end = time.monotonic()
dt_create = t_end - t_start
assert rc == 0
# now build files cache
rc1 = self.do_create(
self.parse_args([f"--repo={repo}", "create", compression, "borg-benchmark-crud2", path])
rc1 = get_ec(
self.do_create(self.parse_args([f"--repo={repo}", "create", compression, "borg-benchmark-crud2", path]))
)
rc2 = self.do_delete(self.parse_args([f"--repo={repo}", "delete", "-a", "borg-benchmark-crud2"]))
rc2 = get_ec(self.do_delete(self.parse_args([f"--repo={repo}", "delete", "-a", "borg-benchmark-crud2"])))
assert rc1 == rc2 == 0
# measure a no-change update (archive1 is still present)
t_start = time.monotonic()
rc1 = self.do_create(
self.parse_args([f"--repo={repo}", "create", compression, "borg-benchmark-crud3", path])
rc1 = get_ec(
self.do_create(self.parse_args([f"--repo={repo}", "create", compression, "borg-benchmark-crud3", path]))
)
t_end = time.monotonic()
dt_update = t_end - t_start
rc2 = self.do_delete(self.parse_args([f"--repo={repo}", "delete", "-a", "borg-benchmark-crud3"]))
rc2 = get_ec(self.do_delete(self.parse_args([f"--repo={repo}", "delete", "-a", "borg-benchmark-crud3"])))
assert rc1 == rc2 == 0
# measure extraction (dry-run: without writing result to disk)
t_start = time.monotonic()
rc = self.do_extract(self.parse_args([f"--repo={repo}", "extract", "borg-benchmark-crud1", "--dry-run"]))
rc = get_ec(
self.do_extract(self.parse_args([f"--repo={repo}", "extract", "borg-benchmark-crud1", "--dry-run"]))
)
t_end = time.monotonic()
dt_extract = t_end - t_start
assert rc == 0
# measure archive deletion (of LAST present archive with the data)
t_start = time.monotonic()
rc = self.do_delete(self.parse_args([f"--repo={repo}", "delete", "-a", "borg-benchmark-crud1"]))
rc = get_ec(self.do_delete(self.parse_args([f"--repo={repo}", "delete", "-a", "borg-benchmark-crud1"])))
t_end = time.monotonic()
dt_delete = t_end - t_start
assert rc == 0
@ -93,8 +105,6 @@ class BenchmarkMixIn:
print(fmt % ("U", msg, total_size_MB / dt_update, count, file_size_formatted, content, dt_update))
print(fmt % ("D", msg, total_size_MB / dt_delete, count, file_size_formatted, content, dt_delete))
return 0
def do_benchmark_cpu(self, args):
"""Benchmark CPU bound operations."""
from timeit import timeit

View File

@ -2,7 +2,7 @@ import argparse
from ._common import with_repository, Highlander
from ..archive import ArchiveChecker
from ..constants import * # NOQA
from ..helpers import EXIT_SUCCESS, EXIT_WARNING, CancelledByUser, CommandError
from ..helpers import set_ec, EXIT_WARNING, CancelledByUser, CommandError
from ..helpers import yes
from ..logger import create_logger
@ -45,7 +45,7 @@ class CheckMixIn:
raise CommandError("--repository-only is required for --max-duration support.")
if not args.archives_only:
if not repository.check(repair=args.repair, max_duration=args.max_duration):
return EXIT_WARNING
set_ec(EXIT_WARNING)
if not args.repo_only and not ArchiveChecker().check(
repository,
verify_data=args.verify_data,
@ -59,8 +59,8 @@ class CheckMixIn:
oldest=args.oldest,
newest=args.newest,
):
return EXIT_WARNING
return EXIT_SUCCESS
set_ec(EXIT_WARNING)
return
def build_parser_check(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -2,7 +2,6 @@ import argparse
from ._common import with_repository, Highlander
from ..constants import * # NOQA
from ..helpers import EXIT_SUCCESS
from ..manifest import Manifest
from ..logger import create_logger
@ -19,7 +18,6 @@ class CompactMixIn:
repository.put(Manifest.MANIFEST_ID, data)
threshold = args.threshold / 100
repository.commit(compact=True, threshold=threshold)
return EXIT_SUCCESS
def build_parser_compact(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -1,4 +1,3 @@
import sys
import argparse
import configparser
from binascii import unhexlify
@ -6,7 +5,6 @@ from binascii import unhexlify
from ._common import with_repository
from ..cache import Cache, assert_secure
from ..constants import * # NOQA
from ..helpers import EXIT_SUCCESS, EXIT_WARNING
from ..helpers import Error, CommandError
from ..helpers import Location
from ..helpers import parse_file_size
@ -140,9 +138,7 @@ class ConfigMixIn:
try:
print(config.get(section, name))
except (configparser.NoOptionError, configparser.NoSectionError) as e:
print(e, file=sys.stderr)
return EXIT_WARNING
return EXIT_SUCCESS
raise Error(e)
finally:
if args.cache:
cache.close()

View File

@ -273,7 +273,6 @@ class CreateMixIn:
create_inner(archive, cache, fso)
else:
create_inner(None, None, None)
return self.exit_code
def _process_any(self, *, path, parent_fd, name, st, fso, cache, read_special, dry_run):
"""

View File

@ -28,7 +28,6 @@ class DebugMixIn:
"""display system information for debugging / bug reports"""
print(sysinfo())
print("Process ID:", get_process_id())
return EXIT_SUCCESS
@with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
def do_debug_dump_archive_items(self, args, repository, manifest):
@ -42,7 +41,6 @@ class DebugMixIn:
with open(filename, "wb") as fd:
fd.write(data)
print("Done.")
return EXIT_SUCCESS
@with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
def do_debug_dump_archive(self, args, repository, manifest):
@ -97,7 +95,6 @@ class DebugMixIn:
with dash_open(args.path, "w") as fd:
output(fd)
return EXIT_SUCCESS
@with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
def do_debug_dump_manifest(self, args, repository, manifest):
@ -109,7 +106,6 @@ class DebugMixIn:
with dash_open(args.path, "w") as fd:
json.dump(meta, fd, indent=4)
return EXIT_SUCCESS
@with_repository(manifest=False)
def do_debug_dump_repo_objs(self, args, repository):
@ -165,7 +161,6 @@ class DebugMixIn:
decrypt_dump(i, id, cdata)
i += 1
print("Done.")
return EXIT_SUCCESS
@with_repository(manifest=False)
def do_debug_search_repo_objs(self, args, repository):
@ -234,7 +229,6 @@ class DebugMixIn:
if i % 10000 == 0:
print("%d objects processed." % i)
print("Done.")
return EXIT_SUCCESS
@with_repository(manifest=False)
def do_debug_get_obj(self, args, repository):
@ -253,7 +247,6 @@ class DebugMixIn:
with open(args.path, "wb") as f:
f.write(data)
print("object %s fetched." % hex_id)
return EXIT_SUCCESS
@with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
def do_debug_id_hash(self, args, repository, manifest):
@ -263,7 +256,6 @@ class DebugMixIn:
key = manifest.key
id = key.id_hash(data)
print(id.hex())
return EXIT_SUCCESS
@with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
def do_debug_parse_obj(self, args, repository, manifest):
@ -290,8 +282,6 @@ class DebugMixIn:
with open(args.binary_path, "wb") as f:
f.write(data)
return EXIT_SUCCESS
@with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
def do_debug_format_obj(self, args, repository, manifest):
"""format file and metadata into borg object file"""
@ -317,7 +307,6 @@ class DebugMixIn:
with open(args.object_path, "wb") as f:
f.write(data_encrypted)
return EXIT_SUCCESS
@with_repository(manifest=False, exclusive=True)
def do_debug_put_obj(self, args, repository):
@ -335,7 +324,6 @@ class DebugMixIn:
repository.put(id, data)
print("object %s put." % hex_id)
repository.commit(compact=False)
return EXIT_SUCCESS
@with_repository(manifest=False, exclusive=True)
def do_debug_delete_obj(self, args, repository):
@ -356,7 +344,6 @@ class DebugMixIn:
if modified:
repository.commit(compact=False)
print("Done.")
return EXIT_SUCCESS
@with_repository(manifest=False, exclusive=True, cache=True, compatibility=Manifest.NO_OPERATION_CHECK)
def do_debug_refcount_obj(self, args, repository, manifest, cache):
@ -372,7 +359,6 @@ class DebugMixIn:
print("object %s has %d referrers [info from chunks cache]." % (hex_id, refcount))
except KeyError:
print("object %s not found [info from chunks cache]." % hex_id)
return EXIT_SUCCESS
@with_repository(manifest=False, exclusive=True)
def do_debug_dump_hints(self, args, repository):
@ -390,7 +376,6 @@ class DebugMixIn:
json.dump(hints, fd, indent=4)
finally:
repository.rollback()
return EXIT_SUCCESS
def do_debug_convert_profile(self, args):
"""convert Borg profile to Python profile"""
@ -398,7 +383,6 @@ class DebugMixIn:
with args.output, args.input:
marshal.dump(msgpack.unpack(args.input, use_list=False, raw=False), args.output)
return EXIT_SUCCESS
def build_parser_debug(self, subparsers, common_parser, mid_common_parser):
debug_epilog = process_epilog(

View File

@ -22,13 +22,13 @@ class DeleteMixIn:
manifest = Manifest.load(repository, (Manifest.Operation.DELETE,))
archive_names = tuple(x.name for x in manifest.archives.list_considering(args))
if not archive_names:
return self.exit_code
return
if args.match_archives is None and args.first == 0 and args.last == 0:
self.print_error(
"Aborting: if you really want to delete all archives, please use -a 'sh:*' "
"or just delete the whole repository (might be much faster)."
)
return EXIT_ERROR
return
if args.forced == 2:
deleted = False
@ -52,7 +52,7 @@ class DeleteMixIn:
self.print_warning('Done. Run "borg check --repair" to clean up the mess.', wc=None)
else:
self.print_warning("Aborted.", wc=None)
return self.exit_code
return
stats = Statistics(iec=args.iec)
with Cache(repository, manifest, progress=args.progress, lock_wait=self.lock_wait, iec=args.iec) as cache:
@ -92,8 +92,6 @@ class DeleteMixIn:
if args.stats:
log_multi(str(stats), logger=logging.getLogger("borg.output.stats"))
return self.exit_code
def build_parser_delete(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog, define_archive_filters_group

View File

@ -77,8 +77,6 @@ class DiffMixIn:
for pattern in matcher.get_unmatched_include_patterns():
self.print_warning_instance(IncludePatternNeverMatchedWarning(pattern))
return self.exit_code
def build_parser_diff(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog
from ._common import define_exclusion_group

View File

@ -101,7 +101,6 @@ class ExtractMixIn:
if pi:
# clear progress output
pi.finish()
return self.exit_code
def build_parser_extract(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -475,12 +475,10 @@ class HelpMixIn:
msg_lines += [" Commands: %s" % ", ".join(sorted(commands.keys()))]
msg_lines += [" Topics: %s" % ", ".join(sorted(self.helptext.keys()))]
parser.error("\n".join(msg_lines))
return self.exit_code
def do_subcommand_help(self, parser, args):
"""display infos about subcommand"""
parser.print_help()
return EXIT_SUCCESS
do_maincommand_help = do_subcommand_help

View File

@ -50,14 +50,11 @@ class InfoMixIn:
.strip()
.format(**info)
)
if self.exit_code:
break
if not args.json and len(archive_names) - i:
print()
if args.json:
json_print(basic_json_data(manifest, cache=cache, extra={"archives": output_data}))
return self.exit_code
def build_parser_info(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog, define_archive_filters_group

View File

@ -28,7 +28,6 @@ class KeysMixIn:
if hasattr(key, "find_key"):
# print key location to make backing it up easier
logger.info("Key location: %s", key.find_key())
return EXIT_SUCCESS
@with_repository(exclusive=True, manifest=True, cache=True, compatibility=(Manifest.Operation.CHECK,))
def do_change_location(self, args, repository, manifest, cache):
@ -48,7 +47,7 @@ class KeysMixIn:
key_new = Blake2CHPOKeyfileKey(repository)
else:
print("Change not needed or not supported.")
return EXIT_WARNING
return
if args.key_mode == "repokey":
if isinstance(key, AESOCBKeyfileKey):
key_new = AESOCBRepoKey(repository)
@ -60,7 +59,7 @@ class KeysMixIn:
key_new = Blake2CHPORepoKey(repository)
else:
print("Change not needed or not supported.")
return EXIT_WARNING
return
for name in ("repository_id", "crypt_key", "id_key", "chunk_seed", "sessionid", "cipher"):
value = getattr(key, name)
@ -89,8 +88,6 @@ class KeysMixIn:
key.remove(key.target) # remove key from current location
logger.info(f"Key moved to {loc}")
return EXIT_SUCCESS
@with_repository(lock=False, exclusive=False, manifest=False, cache=False)
def do_key_export(self, args, repository):
"""Export the repository key for backup"""
@ -108,7 +105,6 @@ class KeysMixIn:
manager.export(args.path)
except IsADirectoryError:
raise CommandError(f"'{args.path}' must be a file, not a directory")
return EXIT_SUCCESS
@with_repository(lock=False, exclusive=False, manifest=False, cache=False)
def do_key_import(self, args, repository):
@ -124,7 +120,6 @@ class KeysMixIn:
if args.path != "-" and not os.path.exists(args.path):
raise CommandError("input file does not exist: " + args.path)
manager.import_keyfile(args)
return EXIT_SUCCESS
def build_parser_keys(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -40,8 +40,6 @@ class ListMixIn:
else:
_list_inner(cache=None)
return self.exit_code
def build_parser_list(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog, define_exclusion_group

View File

@ -4,7 +4,7 @@ import subprocess
from ._common import with_repository
from ..cache import Cache
from ..constants import * # NOQA
from ..helpers import prepare_subprocess_env
from ..helpers import prepare_subprocess_env, set_ec
from ..manifest import Manifest
from ..logger import create_logger
@ -33,7 +33,8 @@ class LocksMixIn:
env = prepare_subprocess_env(system=True)
try:
# we exit with the return code we get from the subprocess
return subprocess.call([args.command] + args.args, env=env)
rc = subprocess.call([args.command] + args.args, env=env)
set_ec(rc)
finally:
# we need to commit the "no change" operation we did to the manifest
# because it created a new segment file in the repository. if we would
@ -48,7 +49,6 @@ class LocksMixIn:
"""Break the repository lock (e.g. in case it was left by a dead borg."""
repository.break_lock()
Cache.break_lock(repository)
return self.exit_code
def build_parser_locks(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -30,7 +30,7 @@ class MountMixIn:
if not os.access(args.mountpoint, os.R_OK | os.W_OK | os.X_OK):
raise RTError(f"{args.mountpoint}: Mountpoint must be a **writable** directory")
return self._do_mount(args)
self._do_mount(args)
@with_repository(compatibility=(Manifest.Operation.READ,))
def _do_mount(self, args, repository, manifest):
@ -44,11 +44,10 @@ class MountMixIn:
except RuntimeError:
# Relevant error message already printed to stderr by FUSE
raise RTError("FUSE mount failed")
return self.exit_code
def do_umount(self, args):
"""un-mount the FUSE filesystem"""
return umount(args.mountpoint)
umount(args.mountpoint)
def build_parser_mount_umount(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -178,7 +178,6 @@ class PruneMixIn:
checkpoint_func()
if args.stats:
log_multi(str(stats), logger=logging.getLogger("borg.output.stats"))
return self.exit_code
def build_parser_prune(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -178,8 +178,6 @@ class RCompressMixIn:
print(f"Kept as is: {stats_process['kept_count']}")
print(f"Total: {stats_process['recompressed_count'] + stats_process['kept_count']}")
return self.exit_code
def build_parser_rcompress(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -4,7 +4,7 @@ from ._common import with_repository, with_other_repository, Highlander
from ..cache import Cache
from ..constants import * # NOQA
from ..crypto.key import key_creator, key_argument_names
from ..helpers import EXIT_WARNING
from ..helpers import CancelledByUser
from ..helpers import location_validator, Location
from ..helpers import parse_storage_quota
from ..manifest import Manifest
@ -28,7 +28,7 @@ class RCreateMixIn:
key = key_creator(repository, args, other_key=other_key)
except (EOFError, KeyboardInterrupt):
repository.destroy()
return EXIT_WARNING
raise CancelledByUser()
manifest = Manifest(key, repository)
manifest.key = key
manifest.write()
@ -51,7 +51,6 @@ class RCreateMixIn:
" borg key export -r REPOSITORY --qr-html encrypted-key-backup.html\n"
"2. Write down the borg key passphrase and store it at safe place.\n"
)
return self.exit_code
def build_parser_rcreate(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -86,7 +86,6 @@ class RDeleteMixIn:
logger.info("Cache deleted.")
else:
logger.info("Would delete cache.")
return self.exit_code
def build_parser_rdelete(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -53,7 +53,6 @@ class RecreateMixIn:
manifest.write()
repository.commit(compact=False)
cache.commit()
return self.exit_code
def build_parser_recreate(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -19,7 +19,6 @@ class RenameMixIn:
manifest.write()
repository.commit(compact=False)
cache.commit()
return self.exit_code
def build_parser_rename(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -72,7 +72,6 @@ class RInfoMixIn:
print(output)
print(str(cache))
return self.exit_code
def build_parser_rinfo(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -36,8 +36,6 @@ class RListMixIn:
if args.json:
json_print(basic_json_data(manifest, extra={"archives": output_data}))
return self.exit_code
def build_parser_rlist(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog, define_archive_filters_group

View File

@ -2,7 +2,6 @@ import argparse
from ._common import Highlander
from ..constants import * # NOQA
from ..helpers import EXIT_SUCCESS
from ..helpers import parse_storage_quota
from ..remote import RepositoryServer
@ -21,7 +20,6 @@ class ServeMixIn:
storage_quota=args.storage_quota,
use_socket=args.use_socket,
).serve()
return EXIT_SUCCESS
def build_parser_serve(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -86,8 +86,6 @@ class TarMixIn:
with create_filter_process(filter, stream=tarstream, stream_close=tarstream_close, inbound=False) as _stream:
self._export_tar(args, archive, _stream)
return self.exit_code
def _export_tar(self, args, archive, tarstream):
matcher = build_matcher(args.patterns, args.paths)
@ -241,7 +239,6 @@ class TarMixIn:
for pattern in matcher.get_unmatched_include_patterns():
self.print_warning_instance(IncludePatternNeverMatchedWarning(pattern))
return self.exit_code
@with_repository(cache=True, exclusive=True, compatibility=(Manifest.Operation.WRITE,))
def do_import_tar(self, args, repository, manifest, cache):
@ -257,8 +254,6 @@ class TarMixIn:
with create_filter_process(filter, stream=tarstream, stream_close=tarstream_close, inbound=True) as _stream:
self._import_tar(args, repository, manifest, manifest.key, cache, _stream)
return self.exit_code
def _import_tar(self, args, repository, manifest, key, cache, tarstream):
t0 = archive_ts_now()
t0_monotonic = time.monotonic()

View File

@ -5,7 +5,7 @@ from ..archive import Archive
from ..compress import CompressionSpec
from ..constants import * # NOQA
from ..crypto.key import uses_same_id_hash, uses_same_chunker_secret
from ..helpers import EXIT_SUCCESS, EXIT_ERROR, Error
from ..helpers import Error
from ..helpers import location_validator, Location, archivename_validator, comment_validator
from ..helpers import format_file_size
from ..manifest import Manifest
@ -23,22 +23,20 @@ class TransferMixIn:
key = manifest.key
other_key = other_manifest.key
if not uses_same_id_hash(other_key, key):
self.print_error(
raise Error(
"You must keep the same ID hash ([HMAC-]SHA256 or BLAKE2b) or deduplication will break. "
"Use a related repository!"
)
return EXIT_ERROR
if not uses_same_chunker_secret(other_key, key):
self.print_error(
raise Error(
"You must use the same chunker secret or deduplication will break. " "Use a related repository!"
)
return EXIT_ERROR
dry_run = args.dry_run
args.consider_checkpoints = True
archive_names = tuple(x.name for x in other_manifest.archives.list_considering(args))
if not archive_names:
return EXIT_SUCCESS
return
an_errors = []
for archive_name in archive_names:
@ -47,10 +45,8 @@ class TransferMixIn:
except argparse.ArgumentTypeError as err:
an_errors.append(str(err))
if an_errors:
self.print_error("Invalid archive names detected, please rename them before transfer:")
for err_msg in an_errors:
self.print_error(err_msg)
return EXIT_ERROR
an_errors.insert(0, "Invalid archive names detected, please rename them before transfer:")
raise Error("\n".join(an_errors))
ac_errors = []
for archive_name in archive_names:
@ -58,20 +54,17 @@ class TransferMixIn:
try:
comment_validator(archive.metadata.get("comment", ""))
except argparse.ArgumentTypeError as err:
ac_errors.append((archive_name, str(err)))
ac_errors.append(f"{archive_name}: {err}")
if ac_errors:
self.print_error("Invalid archive comments detected, please fix them before transfer:")
for archive_name, err_msg in ac_errors:
self.print_error(f"{archive_name}: {err_msg}")
return EXIT_ERROR
ac_errors.insert(0, "Invalid archive comments detected, please fix them before transfer:")
raise Error("\n".join(ac_errors))
from .. import upgrade as upgrade_mod
try:
UpgraderCls = getattr(upgrade_mod, f"Upgrader{args.upgrader}")
except AttributeError:
self.print_error(f"No such upgrader: {args.upgrader}")
return EXIT_ERROR
raise Error(f"No such upgrader: {args.upgrader}")
if UpgraderCls is not upgrade_mod.UpgraderFrom12To20 and other_manifest.repository.version == 1:
raise Error("To transfer from a borg 1.x repo, you need to use: --upgrader=From12To20")
@ -177,7 +170,6 @@ class TransferMixIn:
f"transfer_size: {format_file_size(transfer_size)} "
f"present_size: {format_file_size(present_size)}"
)
return EXIT_SUCCESS
def build_parser_transfer(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -2,7 +2,6 @@ import argparse
from .. import __version__
from ..constants import * # NOQA
from ..helpers import EXIT_SUCCESS
from ..remote import RemoteRepository
from ..logger import create_logger
@ -22,7 +21,6 @@ class VersionMixIn:
else:
server_version = client_version
print(f"{format_version(client_version)} / {format_version(server_version)}")
return EXIT_SUCCESS
def build_parser_version(self, subparsers, common_parser, mid_common_parser):
from ._common import process_epilog

View File

@ -81,8 +81,7 @@ def add_warning(msg, *args, **kwargs):
"""
The global exit_code variable is used so that modules other than archiver can increase the program exit code if a
warning or error occurred during their operation. This is different from archiver.exit_code, which is only accessible
from the archiver object.
warning or error occurred during their operation.
Note: keep this in helpers/__init__.py as the code expects to be able to assign to helpers.exit_code.
"""

View File

@ -519,11 +519,14 @@ def os_stat(*, path=None, parent_fd=None, name=None, follow_symlinks=False):
def umount(mountpoint):
from . import set_ec
env = prepare_subprocess_env(system=True)
try:
return subprocess.call(["fusermount", "-u", mountpoint], env=env)
rc = subprocess.call(["fusermount", "-u", mountpoint], env=env)
except FileNotFoundError:
return subprocess.call(["umount", mountpoint], env=env)
rc = subprocess.call(["umount", mountpoint], env=env)
set_ec(rc)
# below is a slightly modified tempfile.mkstemp that has an additional mode parameter.

View File

@ -76,7 +76,6 @@ def exec_cmd(*args, archiver=None, fork=False, exe=None, input=b"", binary_outpu
if archiver is None:
archiver = Archiver()
archiver.prerun_checks = lambda *args: None
archiver.exit_code = EXIT_SUCCESS
helpers.exit_code = EXIT_SUCCESS
helpers.warnings_list = []
try:

View File

@ -3,7 +3,7 @@ import pytest
from ...constants import * # NOQA
from . import RK_ENCRYPTION, create_test_files, cmd, generate_archiver_tests
from ...helpers import CommandError
from ...helpers import CommandError, Error
pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,binary") # NOQA
@ -23,8 +23,13 @@ def test_config(archivers, request):
assert "id" in output
assert "last_segment_checked" not in output
output = cmd(archiver, "config", "last_segment_checked", exit_code=1)
assert "No option " in output
if archiver.FORK_DEFAULT:
output = cmd(archiver, "config", "last_segment_checked", exit_code=2)
assert "No option " in output
else:
with pytest.raises(Error):
cmd(archiver, "config", "last_segment_checked")
cmd(archiver, "config", "last_segment_checked", "123")
output = cmd(archiver, "config", "last_segment_checked")
assert output == "123" + os.linesep
@ -39,7 +44,11 @@ def test_config(archivers, request):
output = cmd(archiver, "config", cfg_key)
assert output == cfg_value + os.linesep
cmd(archiver, "config", "--delete", cfg_key)
cmd(archiver, "config", cfg_key, exit_code=1)
if archiver.FORK_DEFAULT:
cmd(archiver, "config", cfg_key, exit_code=2)
else:
with pytest.raises(Error):
cmd(archiver, "config", cfg_key)
cmd(archiver, "config", "--list", "--delete", exit_code=2)
if archiver.FORK_DEFAULT:
@ -47,4 +56,8 @@ def test_config(archivers, request):
else:
with pytest.raises(CommandError):
cmd(archiver, "config")
cmd(archiver, "config", "invalid-option", exit_code=1)
if archiver.FORK_DEFAULT:
cmd(archiver, "config", "invalid-option", exit_code=2)
else:
with pytest.raises(Error):
cmd(archiver, "config", "invalid-option")

View File

@ -7,7 +7,7 @@ import pytest
from ...constants import * # NOQA
from ...crypto.file_integrity import FileIntegrityError
from ...helpers import bin_to_hex
from ...helpers import bin_to_hex, Error
from . import cmd, create_src_archive, create_test_files, RK_ENCRYPTION
@ -22,7 +22,11 @@ def test_check_corrupted_repository(archiver):
fd.seek(100)
fd.write(b"XXXX")
cmd(archiver, "check", exit_code=1)
if archiver.FORK_DEFAULT:
cmd(archiver, "check", exit_code=1)
else:
with pytest.raises(Error):
cmd(archiver, "check")
def corrupt_archiver(archiver):

View File

@ -3,7 +3,7 @@ from unittest.mock import patch
import pytest
from ...helpers.errors import Error
from ...helpers.errors import Error, CancelledByUser
from ...constants import * # NOQA
from ...crypto.key import FlexiKey
from ...repository import Repository
@ -37,7 +37,12 @@ def test_rcreate_interrupt(archivers, request):
raise EOFError
with patch.object(FlexiKey, "create", raise_eof):
cmd(archiver, "rcreate", RK_ENCRYPTION, exit_code=1)
if archiver.FORK_DEFAULT:
cmd(archiver, "rcreate", RK_ENCRYPTION, exit_code=2)
else:
with pytest.raises(CancelledByUser):
cmd(archiver, "rcreate", RK_ENCRYPTION)
assert not os.path.exists(archiver.repository_location)