1
0
Fork 0
mirror of https://github.com/borgbackup/borg.git synced 2025-02-25 07:23:28 +00:00

Merge branch '1.0-maint' into merge-1.0-maint

# Conflicts:
#	setup.py
#	src/borg/archiver.py
#	src/borg/helpers.py
This commit is contained in:
Thomas Waldmann 2016-09-27 21:31:41 +02:00
commit 2a864be84f
12 changed files with 343 additions and 163 deletions

4
Vagrantfile vendored
View file

@ -61,9 +61,9 @@ def packages_darwin
# install all the (security and other) updates # install all the (security and other) updates
sudo softwareupdate --install --all sudo softwareupdate --install --all
# get osxfuse 3.x pre-release code from github: # get osxfuse 3.x pre-release code from github:
curl -s -L https://github.com/osxfuse/osxfuse/releases/download/osxfuse-3.4.1/osxfuse-3.4.1.dmg >osxfuse.dmg curl -s -L https://github.com/osxfuse/osxfuse/releases/download/osxfuse-3.5.1/osxfuse-3.5.1.dmg >osxfuse.dmg
MOUNTDIR=$(echo `hdiutil mount osxfuse.dmg | tail -1 | awk '{$1="" ; print $0}'` | xargs -0 echo) \ MOUNTDIR=$(echo `hdiutil mount osxfuse.dmg | tail -1 | awk '{$1="" ; print $0}'` | xargs -0 echo) \
&& sudo installer -pkg "${MOUNTDIR}/Extras/FUSE for macOS 3.4.1.pkg" -target / && sudo installer -pkg "${MOUNTDIR}/Extras/FUSE for macOS 3.5.1.pkg" -target /
sudo chown -R vagrant /usr/local # brew must be able to create stuff here sudo chown -R vagrant /usr/local # brew must be able to create stuff here
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
brew update brew update

View file

@ -158,7 +158,7 @@ The documentation (in reStructuredText format, .rst) is in docs/.
To build the html version of it, you need to have sphinx installed:: To build the html version of it, you need to have sphinx installed::
pip3 install sphinx # important: this will install sphinx with Python 3 pip3 install sphinx sphinx_rtd_theme # important: this will install sphinx with Python 3
Now run:: Now run::

View file

@ -248,8 +248,13 @@ For automated backups the passphrase can be specified using the
the key in case it gets corrupted or lost. Also keep your passphrase the key in case it gets corrupted or lost. Also keep your passphrase
at a safe place. at a safe place.
The backup that is encrypted with that key/passphrase won't help you You can make backups using :ref:`borg_key_export` subcommand.
with that, of course.
If you want to print a backup of your key to paper use the ``--paper``
option of this command and print the result.
A backup inside of the backup that is encrypted with that key/passphrase
won't help you with that, of course.
.. _remote_repos: .. _remote_repos:

View file

@ -538,6 +538,12 @@ borgfs
standalone binary will have to manually create a symlink (see standalone binary will have to manually create a symlink (see
:ref:`pyinstaller-binary`). :ref:`pyinstaller-binary`).
.. include:: usage/key_export.rst.inc
.. include:: usage/key_import.rst.inc
.. include:: usage/change-passphrase.rst.inc .. include:: usage/change-passphrase.rst.inc
Examples Examples

View file

@ -168,19 +168,33 @@ def finalize_options(self):
def run(self): def run(self):
print('generating usage docs') print('generating usage docs')
if not os.path.exists('docs/usage'):
os.mkdir('docs/usage')
# allows us to build docs without the C modules fully loaded during help generation # allows us to build docs without the C modules fully loaded during help generation
from borg.archiver import Archiver from borg.archiver import Archiver
parser = Archiver(prog='borg').parser parser = Archiver(prog='borg').parser
self.generate_level("", parser, Archiver)
def generate_level(self, prefix, parser, Archiver):
is_subcommand = False
choices = {} choices = {}
for action in parser._actions: for action in parser._actions:
if action.choices is not None: if action.choices is not None and 'SubParsersAction' in str(action.__class__):
choices.update(action.choices) is_subcommand = True
for cmd, parser in action.choices.items():
choices[prefix + cmd] = parser
if prefix and not choices:
return
print('found commands: %s' % list(choices.keys())) print('found commands: %s' % list(choices.keys()))
if not os.path.exists('docs/usage'):
os.mkdir('docs/usage')
for command, parser in choices.items(): for command, parser in choices.items():
print('generating help for %s' % command) print('generating help for %s' % command)
with open('docs/usage/%s.rst.inc' % command, 'w') as doc:
if self.generate_level(command + " ", parser, Archiver):
return
with open('docs/usage/%s.rst.inc' % command.replace(" ", "_"), 'w') as doc:
doc.write(".. IMPORTANT: this file is auto-generated from borg's built-in help, do not edit!\n\n") doc.write(".. IMPORTANT: this file is auto-generated from borg's built-in help, do not edit!\n\n")
if command == 'help': if command == 'help':
for topic in Archiver.helptext: for topic in Archiver.helptext:
@ -191,8 +205,9 @@ def run(self):
doc.write(Archiver.helptext[topic]) doc.write(Archiver.helptext[topic])
else: else:
params = {"command": command, params = {"command": command,
"command_": command.replace(' ', '_'),
"underline": '-' * len('borg ' + command)} "underline": '-' * len('borg ' + command)}
doc.write(".. _borg_{command}:\n\n".format(**params)) doc.write(".. _borg_{command_}:\n\n".format(**params))
doc.write("borg {command}\n{underline}\n::\n\n borg {command}".format(**params)) doc.write("borg {command}\n{underline}\n::\n\n borg {command}".format(**params))
self.write_usage(parser, doc) self.write_usage(parser, doc)
epilog = parser.epilog epilog = parser.epilog
@ -200,9 +215,13 @@ def run(self):
self.write_options(parser, doc) self.write_options(parser, doc)
doc.write("\n\nDescription\n~~~~~~~~~~~\n") doc.write("\n\nDescription\n~~~~~~~~~~~\n")
doc.write(epilog) doc.write(epilog)
common_options = [group for group in choices['create']._action_groups if group.title == 'Common options'][0]
with open('docs/usage/common-options.rst.inc', 'w') as doc: if 'create' in choices:
self.write_options_group(common_options, doc, False) common_options = [group for group in choices['create']._action_groups if group.title == 'Common options'][0]
with open('docs/usage/common-options.rst.inc', 'w') as doc:
self.write_options_group(common_options, doc, False)
return is_subcommand
def write_usage(self, parser, fp): def write_usage(self, parser, fp):
if any(len(o.option_strings) for o in parser._actions): if any(len(o.option_strings) for o in parser._actions):

View file

@ -39,7 +39,7 @@
from .helpers import dir_is_tagged, is_slow_msgpack, yes, sysinfo from .helpers import dir_is_tagged, is_slow_msgpack, yes, sysinfo
from .helpers import log_multi from .helpers import log_multi
from .helpers import parse_pattern, PatternMatcher, PathPrefixPattern from .helpers import parse_pattern, PatternMatcher, PathPrefixPattern
from .helpers import signal_handler from .helpers import signal_handler, raising_signal_handler, SigHup, SigTerm
from .helpers import ErrorIgnoringTextIOWrapper from .helpers import ErrorIgnoringTextIOWrapper
from .helpers import ProgressIndicatorPercent from .helpers import ProgressIndicatorPercent
from .item import Item from .item import Item
@ -200,7 +200,8 @@ def do_check(self, args, repository):
msg = ("'check --repair' is an experimental feature that might result in data loss." + msg = ("'check --repair' is an experimental feature that might result in data loss." +
"\n" + "\n" +
"Type 'YES' if you understand this and want to continue: ") "Type 'YES' if you understand this and want to continue: ")
if not yes(msg, false_msg="Aborting.", truish=('YES', ), if not yes(msg, false_msg="Aborting.", invalid_msg="Invalid answer, aborting.",
truish=('YES', ), retry=False,
env_var_override='BORG_CHECK_I_KNOW_WHAT_I_AM_DOING'): env_var_override='BORG_CHECK_I_KNOW_WHAT_I_AM_DOING'):
return EXIT_ERROR return EXIT_ERROR
if args.repo_only and args.verify_data: if args.repo_only and args.verify_data:
@ -798,8 +799,8 @@ def do_delete(self, args, repository):
msg.append(format_archive(archive_info)) msg.append(format_archive(archive_info))
msg.append("Type 'YES' if you understand this and want to continue: ") msg.append("Type 'YES' if you understand this and want to continue: ")
msg = '\n'.join(msg) msg = '\n'.join(msg)
if not yes(msg, false_msg="Aborting.", truish=('YES', ), if not yes(msg, false_msg="Aborting.", invalid_msg='Invalid answer, aborting.', truish=('YES', ),
env_var_override='BORG_DELETE_I_KNOW_WHAT_I_AM_DOING'): retry=False, env_var_override='BORG_DELETE_I_KNOW_WHAT_I_AM_DOING'):
self.exit_code = EXIT_ERROR self.exit_code = EXIT_ERROR
return self.exit_code return self.exit_code
repository.destroy() repository.destroy()
@ -1621,10 +1622,37 @@ def build_parser(self, prog=None):
subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='', subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
type=location_validator(archive=False)) type=location_validator(archive=False))
subparser = subparsers.add_parser('key-export', parents=[common_parser], add_help=False, subparser = subparsers.add_parser('key', add_help=False,
description=self.do_key_export.__doc__, description="Manage a keyfile or repokey of a repository",
epilog="", epilog="",
formatter_class=argparse.RawDescriptionHelpFormatter, formatter_class=argparse.RawDescriptionHelpFormatter,
help='manage repository key')
key_parsers = subparser.add_subparsers(title='required arguments', metavar='<command>')
key_export_epilog = textwrap.dedent("""
If repository encryption is used, the repository is inaccessible
without the key. This command allows to backup this essential key.
There are two backup formats. The normal backup format is suitable for
digital storage as a file. The ``--paper`` backup format is optimized
for printing and typing in while importing, with per line checks to
reduce problems with manual input.
For repositories using keyfile encryption the key is saved locally
on the system that is capable of doing backups. To guard against loss
of this key, the key needs to be backed up independently of the main
data backup.
For repositories using the repokey encryption the key is saved in the
repository in the config file. A backup is thus not strictly needed,
but guards against the repository becoming inaccessible if the file
is damaged for some reason.
""")
subparser = key_parsers.add_parser('export', parents=[common_parser], add_help=False,
description=self.do_key_export.__doc__,
epilog=key_export_epilog,
formatter_class=argparse.RawDescriptionHelpFormatter,
help='export repository key for backup') help='export repository key for backup')
subparser.set_defaults(func=self.do_key_export) subparser.set_defaults(func=self.do_key_export)
subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='', subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
@ -1635,9 +1663,17 @@ def build_parser(self, prog=None):
default=False, default=False,
help='Create an export suitable for printing and later type-in') help='Create an export suitable for printing and later type-in')
subparser = subparsers.add_parser('key-import', parents=[common_parser], add_help=False, key_import_epilog = textwrap.dedent("""
This command allows to restore a key previously backed up with the
export command.
If the ``--paper`` option is given, the import will be an interactive
process in which each line is checked for plausibility before
proceeding to the next line. For this format PATH must not be given.
""")
subparser = key_parsers.add_parser('import', parents=[common_parser], add_help=False,
description=self.do_key_import.__doc__, description=self.do_key_import.__doc__,
epilog="", epilog=key_import_epilog,
formatter_class=argparse.RawDescriptionHelpFormatter, formatter_class=argparse.RawDescriptionHelpFormatter,
help='import repository key from backup') help='import repository key from backup')
subparser.set_defaults(func=self.do_key_import) subparser.set_defaults(func=self.do_key_import)
@ -2345,6 +2381,22 @@ def build_parser(self, prog=None):
subparser.add_argument('topic', metavar='TOPIC', type=str, nargs='?', subparser.add_argument('topic', metavar='TOPIC', type=str, nargs='?',
help='additional help on TOPIC') help='additional help on TOPIC')
debug_epilog = textwrap.dedent("""
These commands are not intended for normal use and potentially very
dangerous if used incorrectly.
They exist to improve debugging capabilities without direct system access, e.g.
in case you ever run into some severe malfunction. Use them only if you know
what you are doing or if a trusted developer tells you what to do.""")
subparser = subparsers.add_parser('debug', add_help=False,
description='debugging command (not intended for normal use)',
epilog=debug_epilog,
formatter_class=argparse.RawDescriptionHelpFormatter,
help='debugging command (not intended for normal use)')
debug_parsers = subparser.add_subparsers(title='required arguments', metavar='<command>')
debug_info_epilog = textwrap.dedent(""" debug_info_epilog = textwrap.dedent("""
This command displays some system information that might be useful for bug This command displays some system information that might be useful for bug
reports and debugging problems. If a traceback happens, this information is reports and debugging problems. If a traceback happens, this information is
@ -2357,6 +2409,13 @@ def build_parser(self, prog=None):
help='show system infos for debugging / bug reports (debug)') help='show system infos for debugging / bug reports (debug)')
subparser.set_defaults(func=self.do_debug_info) subparser.set_defaults(func=self.do_debug_info)
subparser = debug_parsers.add_parser('info', parents=[common_parser], add_help=False,
description=self.do_debug_info.__doc__,
epilog=debug_info_epilog,
formatter_class=argparse.RawDescriptionHelpFormatter,
help='show system infos for debugging / bug reports (debug)')
subparser.set_defaults(func=self.do_debug_info)
debug_dump_archive_items_epilog = textwrap.dedent(""" debug_dump_archive_items_epilog = textwrap.dedent("""
This command dumps raw (but decrypted and decompressed) archive items (only metadata) to files. This command dumps raw (but decrypted and decompressed) archive items (only metadata) to files.
""") """)
@ -2370,6 +2429,16 @@ def build_parser(self, prog=None):
type=location_validator(archive=True), type=location_validator(archive=True),
help='archive to dump') help='archive to dump')
subparser = debug_parsers.add_parser('dump-archive-items', parents=[common_parser], add_help=False,
description=self.do_debug_dump_archive_items.__doc__,
epilog=debug_dump_archive_items_epilog,
formatter_class=argparse.RawDescriptionHelpFormatter,
help='dump archive items (metadata) (debug)')
subparser.set_defaults(func=self.do_debug_dump_archive_items)
subparser.add_argument('location', metavar='ARCHIVE',
type=location_validator(archive=True),
help='archive to dump')
debug_dump_repo_objs_epilog = textwrap.dedent(""" debug_dump_repo_objs_epilog = textwrap.dedent("""
This command dumps raw (but decrypted and decompressed) repo objects to files. This command dumps raw (but decrypted and decompressed) repo objects to files.
""") """)
@ -2383,6 +2452,16 @@ def build_parser(self, prog=None):
type=location_validator(archive=False), type=location_validator(archive=False),
help='repo to dump') help='repo to dump')
subparser = debug_parsers.add_parser('dump-repo-objs', parents=[common_parser], add_help=False,
description=self.do_debug_dump_repo_objs.__doc__,
epilog=debug_dump_repo_objs_epilog,
formatter_class=argparse.RawDescriptionHelpFormatter,
help='dump repo objects (debug)')
subparser.set_defaults(func=self.do_debug_dump_repo_objs)
subparser.add_argument('location', metavar='REPOSITORY',
type=location_validator(archive=False),
help='repo to dump')
debug_get_obj_epilog = textwrap.dedent(""" debug_get_obj_epilog = textwrap.dedent("""
This command gets an object from the repository. This command gets an object from the repository.
""") """)
@ -2400,6 +2479,20 @@ def build_parser(self, prog=None):
subparser.add_argument('path', metavar='PATH', type=str, subparser.add_argument('path', metavar='PATH', type=str,
help='file to write object data into') help='file to write object data into')
subparser = debug_parsers.add_parser('get-obj', parents=[common_parser], add_help=False,
description=self.do_debug_get_obj.__doc__,
epilog=debug_get_obj_epilog,
formatter_class=argparse.RawDescriptionHelpFormatter,
help='get object from repository (debug)')
subparser.set_defaults(func=self.do_debug_get_obj)
subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
type=location_validator(archive=False),
help='repository to use')
subparser.add_argument('id', metavar='ID', type=str,
help='hex object ID to get from the repo')
subparser.add_argument('path', metavar='PATH', type=str,
help='file to write object data into')
debug_put_obj_epilog = textwrap.dedent(""" debug_put_obj_epilog = textwrap.dedent("""
This command puts objects into the repository. This command puts objects into the repository.
""") """)
@ -2415,6 +2508,18 @@ def build_parser(self, prog=None):
subparser.add_argument('paths', metavar='PATH', nargs='+', type=str, subparser.add_argument('paths', metavar='PATH', nargs='+', type=str,
help='file(s) to read and create object(s) from') help='file(s) to read and create object(s) from')
subparser = debug_parsers.add_parser('put-obj', parents=[common_parser], add_help=False,
description=self.do_debug_put_obj.__doc__,
epilog=debug_put_obj_epilog,
formatter_class=argparse.RawDescriptionHelpFormatter,
help='put object to repository (debug)')
subparser.set_defaults(func=self.do_debug_put_obj)
subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
type=location_validator(archive=False),
help='repository to use')
subparser.add_argument('paths', metavar='PATH', nargs='+', type=str,
help='file(s) to read and create object(s) from')
debug_delete_obj_epilog = textwrap.dedent(""" debug_delete_obj_epilog = textwrap.dedent("""
This command deletes objects from the repository. This command deletes objects from the repository.
""") """)
@ -2429,6 +2534,19 @@ def build_parser(self, prog=None):
help='repository to use') help='repository to use')
subparser.add_argument('ids', metavar='IDs', nargs='+', type=str, subparser.add_argument('ids', metavar='IDs', nargs='+', type=str,
help='hex object ID(s) to delete from the repo') help='hex object ID(s) to delete from the repo')
subparser = debug_parsers.add_parser('delete-obj', parents=[common_parser], add_help=False,
description=self.do_debug_delete_obj.__doc__,
epilog=debug_delete_obj_epilog,
formatter_class=argparse.RawDescriptionHelpFormatter,
help='delete object from repository (debug)')
subparser.set_defaults(func=self.do_debug_delete_obj)
subparser.add_argument('location', metavar='REPOSITORY', nargs='?', default='',
type=location_validator(archive=False),
help='repository to use')
subparser.add_argument('ids', metavar='IDs', nargs='+', type=str,
help='hex object ID(s) to delete from the repo')
return parser return parser
def get_args(self, argv, cmd): def get_args(self, argv, cmd):
@ -2494,59 +2612,28 @@ def run(self, args):
return args.func(args) return args.func(args)
def sig_info_handler(signum, stack): # pragma: no cover def sig_info_handler(sig_no, stack): # pragma: no cover
"""search the stack for infos about the currently processed file and print them""" """search the stack for infos about the currently processed file and print them"""
for frame in inspect.getouterframes(stack): with signal_handler(sig_no, signal.SIG_IGN):
func, loc = frame[3], frame[0].f_locals for frame in inspect.getouterframes(stack):
if func in ('process_file', '_process', ): # create op func, loc = frame[3], frame[0].f_locals
path = loc['path'] if func in ('process_file', '_process', ): # create op
try: path = loc['path']
pos = loc['fd'].tell() try:
total = loc['st'].st_size pos = loc['fd'].tell()
except Exception: total = loc['st'].st_size
pos, total = 0, 0 except Exception:
logger.info("{0} {1}/{2}".format(path, format_file_size(pos), format_file_size(total))) pos, total = 0, 0
break logger.info("{0} {1}/{2}".format(path, format_file_size(pos), format_file_size(total)))
if func in ('extract_item', ): # extract op break
path = loc['item'].path if func in ('extract_item', ): # extract op
try: path = loc['item'].path
pos = loc['fd'].tell() try:
except Exception: pos = loc['fd'].tell()
pos = 0 except Exception:
logger.info("{0} {1}/???".format(path, format_file_size(pos))) pos = 0
break logger.info("{0} {1}/???".format(path, format_file_size(pos)))
break
class SIGTERMReceived(BaseException):
pass
def sig_term_handler(signum, stack):
raise SIGTERMReceived
class SIGHUPReceived(BaseException):
pass
def sig_hup_handler(signum, stack):
raise SIGHUPReceived
def setup_signal_handlers(): # pragma: no cover
sigs = []
if hasattr(signal, 'SIGUSR1'):
sigs.append(signal.SIGUSR1) # kill -USR1 pid
if hasattr(signal, 'SIGINFO'):
sigs.append(signal.SIGINFO) # kill -INFO pid (or ctrl-t)
for sig in sigs:
signal.signal(sig, sig_info_handler)
# If we received SIGTERM or SIGHUP, catch them and raise a proper exception
# that can be handled for an orderly exit. SIGHUP is important especially
# for systemd systems, where logind sends it when a session exits, in
# addition to any traditional use.
signal.signal(signal.SIGTERM, sig_term_handler)
signal.signal(signal.SIGHUP, sig_hup_handler)
def main(): # pragma: no cover def main(): # pragma: no cover
@ -2558,68 +2645,79 @@ def main(): # pragma: no cover
# issues when print()-ing unicode file names # issues when print()-ing unicode file names
sys.stdout = ErrorIgnoringTextIOWrapper(sys.stdout.buffer, sys.stdout.encoding, 'replace', line_buffering=True) sys.stdout = ErrorIgnoringTextIOWrapper(sys.stdout.buffer, sys.stdout.encoding, 'replace', line_buffering=True)
sys.stderr = ErrorIgnoringTextIOWrapper(sys.stderr.buffer, sys.stderr.encoding, 'replace', line_buffering=True) sys.stderr = ErrorIgnoringTextIOWrapper(sys.stderr.buffer, sys.stderr.encoding, 'replace', line_buffering=True)
setup_signal_handlers()
archiver = Archiver() # If we receive SIGINT (ctrl-c), SIGTERM (kill) or SIGHUP (kill -HUP),
msg = tb = None # catch them and raise a proper exception that can be handled for an
tb_log_level = logging.ERROR # orderly exit.
try: # SIGHUP is important especially for systemd systems, where logind
args = archiver.get_args(sys.argv, os.environ.get('SSH_ORIGINAL_COMMAND')) # sends it when a session exits, in addition to any traditional use.
except Error as e: # Output some info if we receive SIGUSR1 or SIGINFO (ctrl-t).
msg = e.get_message() with signal_handler('SIGINT', raising_signal_handler(KeyboardInterrupt)), \
tb_log_level = logging.ERROR if e.traceback else logging.DEBUG signal_handler('SIGHUP', raising_signal_handler(SigHup)), \
tb = '%s\n%s' % (traceback.format_exc(), sysinfo()) signal_handler('SIGTERM', raising_signal_handler(SigTerm)), \
# we might not have logging setup yet, so get out quickly signal_handler('SIGUSR1', sig_info_handler), \
print(msg, file=sys.stderr) signal_handler('SIGINFO', sig_info_handler):
if tb_log_level == logging.ERROR: archiver = Archiver()
print(tb, file=sys.stderr) msg = tb = None
sys.exit(e.exit_code)
try:
exit_code = archiver.run(args)
except Error as e:
msg = e.get_message()
tb_log_level = logging.ERROR if e.traceback else logging.DEBUG
tb = "%s\n%s" % (traceback.format_exc(), sysinfo())
exit_code = e.exit_code
except RemoteRepository.RPCError as e:
msg = "%s %s" % (e.remote_type, e.name)
important = e.remote_type not in ('LockTimeout', )
tb_log_level = logging.ERROR if important else logging.DEBUG
tb = sysinfo()
exit_code = EXIT_ERROR
except Exception:
msg = 'Local Exception'
tb_log_level = logging.ERROR tb_log_level = logging.ERROR
tb = '%s\n%s' % (traceback.format_exc(), sysinfo()) try:
exit_code = EXIT_ERROR args = archiver.get_args(sys.argv, os.environ.get('SSH_ORIGINAL_COMMAND'))
except KeyboardInterrupt: except Error as e:
msg = 'Keyboard interrupt' msg = e.get_message()
tb_log_level = logging.DEBUG tb_log_level = logging.ERROR if e.traceback else logging.DEBUG
tb = '%s\n%s' % (traceback.format_exc(), sysinfo()) tb = '%s\n%s' % (traceback.format_exc(), sysinfo())
exit_code = EXIT_ERROR # we might not have logging setup yet, so get out quickly
except SIGTERMReceived: print(msg, file=sys.stderr)
msg = 'Received SIGTERM' if tb_log_level == logging.ERROR:
tb_log_level = logging.DEBUG print(tb, file=sys.stderr)
tb = '%s\n%s' % (traceback.format_exc(), sysinfo()) sys.exit(e.exit_code)
exit_code = EXIT_ERROR try:
except SIGHUPReceived: exit_code = archiver.run(args)
msg = 'Received SIGHUP.' except Error as e:
exit_code = EXIT_ERROR msg = e.get_message()
if msg: tb_log_level = logging.ERROR if e.traceback else logging.DEBUG
logger.error(msg) tb = "%s\n%s" % (traceback.format_exc(), sysinfo())
if tb: exit_code = e.exit_code
logger.log(tb_log_level, tb) except RemoteRepository.RPCError as e:
if args.show_rc: msg = "%s %s" % (e.remote_type, e.name)
rc_logger = logging.getLogger('borg.output.show-rc') important = e.remote_type not in ('LockTimeout', )
exit_msg = 'terminating with %s status, rc %d' tb_log_level = logging.ERROR if important else logging.DEBUG
if exit_code == EXIT_SUCCESS: tb = sysinfo()
rc_logger.info(exit_msg % ('success', exit_code)) exit_code = EXIT_ERROR
elif exit_code == EXIT_WARNING: except Exception:
rc_logger.warning(exit_msg % ('warning', exit_code)) msg = 'Local Exception'
elif exit_code == EXIT_ERROR: tb_log_level = logging.ERROR
rc_logger.error(exit_msg % ('error', exit_code)) tb = '%s\n%s' % (traceback.format_exc(), sysinfo())
else: exit_code = EXIT_ERROR
rc_logger.error(exit_msg % ('abnormal', exit_code or 666)) except KeyboardInterrupt:
sys.exit(exit_code) msg = 'Keyboard interrupt'
tb_log_level = logging.DEBUG
tb = '%s\n%s' % (traceback.format_exc(), sysinfo())
exit_code = EXIT_ERROR
except SigTerm:
msg = 'Received SIGTERM'
tb_log_level = logging.DEBUG
tb = '%s\n%s' % (traceback.format_exc(), sysinfo())
exit_code = EXIT_ERROR
except SigHup:
msg = 'Received SIGHUP.'
exit_code = EXIT_ERROR
if msg:
logger.error(msg)
if tb:
logger.log(tb_log_level, tb)
if args.show_rc:
rc_logger = logging.getLogger('borg.output.show-rc')
exit_msg = 'terminating with %s status, rc %d'
if exit_code == EXIT_SUCCESS:
rc_logger.info(exit_msg % ('success', exit_code))
elif exit_code == EXIT_WARNING:
rc_logger.warning(exit_msg % ('warning', exit_code))
elif exit_code == EXIT_ERROR:
rc_logger.error(exit_msg % ('error', exit_code))
else:
rc_logger.error(exit_msg % ('abnormal', exit_code or 666))
sys.exit(exit_code)
if __name__ == '__main__': if __name__ == '__main__':

View file

@ -78,7 +78,8 @@ def __init__(self, repository, key, manifest, path=None, sync=True, do_files=Fal
msg = ("Warning: Attempting to access a previously unknown unencrypted repository!" + msg = ("Warning: Attempting to access a previously unknown unencrypted repository!" +
"\n" + "\n" +
"Do you want to continue? [yN] ") "Do you want to continue? [yN] ")
if not yes(msg, false_msg="Aborting.", env_var_override='BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK'): if not yes(msg, false_msg="Aborting.", invalid_msg="Invalid answer, aborting.",
retry=False, env_var_override='BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK'):
raise self.CacheInitAbortedError() raise self.CacheInitAbortedError()
self.create() self.create()
self.open(lock_wait=lock_wait) self.open(lock_wait=lock_wait)
@ -88,7 +89,8 @@ def __init__(self, repository, key, manifest, path=None, sync=True, do_files=Fal
msg = ("Warning: The repository at location {} was previously located at {}".format(repository._location.canonical_path(), self.previous_location) + msg = ("Warning: The repository at location {} was previously located at {}".format(repository._location.canonical_path(), self.previous_location) +
"\n" + "\n" +
"Do you want to continue? [yN] ") "Do you want to continue? [yN] ")
if not yes(msg, false_msg="Aborting.", env_var_override='BORG_RELOCATED_REPO_ACCESS_IS_OK'): if not yes(msg, false_msg="Aborting.", invalid_msg="Invalid answer, aborting.",
retry=False, env_var_override='BORG_RELOCATED_REPO_ACCESS_IS_OK'):
raise self.RepositoryAccessAborted() raise self.RepositoryAccessAborted()
if sync and self.manifest.id != self.manifest_id: if sync and self.manifest.id != self.manifest_id:

View file

@ -1,4 +1,5 @@
import argparse import argparse
import contextlib
import grp import grp
import hashlib import hashlib
import logging import logging
@ -19,7 +20,6 @@
import uuid import uuid
from binascii import hexlify from binascii import hexlify
from collections import namedtuple, deque, abc from collections import namedtuple, deque, abc
from contextlib import contextmanager
from datetime import datetime, timezone, timedelta from datetime import datetime, timezone, timedelta
from fnmatch import translate from fnmatch import translate
from functools import wraps, partial, lru_cache from functools import wraps, partial, lru_cache
@ -1054,9 +1054,8 @@ def yes(msg=None, false_msg=None, true_msg=None, default_msg=None,
default=False, retry=True, env_var_override=None, ofile=None, input=input): default=False, retry=True, env_var_override=None, ofile=None, input=input):
"""Output <msg> (usually a question) and let user input an answer. """Output <msg> (usually a question) and let user input an answer.
Qualifies the answer according to falsish, truish and defaultish as True, False or <default>. Qualifies the answer according to falsish, truish and defaultish as True, False or <default>.
If it didn't qualify and retry_msg is None (no retries wanted), If it didn't qualify and retry is False (no retries wanted), return the default [which
return the default [which defaults to False]. Otherwise let user retry defaults to False]. If retry is True let user retry answering until answer is qualified.
answering until answer is qualified.
If env_var_override is given and this var is present in the environment, do not ask If env_var_override is given and this var is present in the environment, do not ask
the user, but just use the env var contents as answer as if it was typed in. the user, but just use the env var contents as answer as if it was typed in.
@ -1665,15 +1664,6 @@ def heuristic_lz4(self, compr_args, chunk):
return compr_args, Chunk(data, **meta) return compr_args, Chunk(data, **meta)
@contextmanager
def signal_handler(signo, handler):
old_signal_handler = signal.signal(signo, handler)
try:
yield
finally:
signal.signal(signo, old_signal_handler)
class ErrorIgnoringTextIOWrapper(io.TextIOWrapper): class ErrorIgnoringTextIOWrapper(io.TextIOWrapper):
def read(self, n): def read(self, n):
if not self.closed: if not self.closed:
@ -1698,6 +1688,52 @@ def write(self, s):
return len(s) return len(s)
class SignalException(BaseException):
"""base class for all signal-based exceptions"""
class SigHup(SignalException):
"""raised on SIGHUP signal"""
class SigTerm(SignalException):
"""raised on SIGTERM signal"""
@contextlib.contextmanager
def signal_handler(sig, handler):
"""
when entering context, set up signal handler <handler> for signal <sig>.
when leaving context, restore original signal handler.
<sig> can bei either a str when giving a signal.SIGXXX attribute name (it
won't crash if the attribute name does not exist as some names are platform
specific) or a int, when giving a signal number.
<handler> is any handler value as accepted by the signal.signal(sig, handler).
"""
if isinstance(sig, str):
sig = getattr(signal, sig, None)
if sig is not None:
orig_handler = signal.signal(sig, handler)
try:
yield
finally:
if sig is not None:
signal.signal(sig, orig_handler)
def raising_signal_handler(exc_cls):
def handler(sig_no, frame):
# setting SIG_IGN avoids that an incoming second signal of this
# kind would raise a 2nd exception while we still process the
# exception handler for exc_cls for the 1st signal.
signal.signal(sig_no, signal.SIG_IGN)
raise exc_cls
return handler
def swidth_slice(string, max_width): def swidth_slice(string, max_width):
""" """
Return a slice of *max_width* cells from *string*. Return a slice of *max_width* cells from *string*.

View file

@ -228,8 +228,9 @@ def getpass(cls, prompt):
@classmethod @classmethod
def verification(cls, passphrase): def verification(cls, passphrase):
if yes('Do you want your passphrase to be displayed for verification? [yN]: ', msg = 'Do you want your passphrase to be displayed for verification? [yN]: '
env_var_override='BORG_DISPLAY_PASSPHRASE'): if yes(msg, retry_msg=msg, invalid_msg='Invalid answer, try again.',
retry=True, env_var_override='BORG_DISPLAY_PASSPHRASE'):
print('Your passphrase (between double-quotes): "%s"' % passphrase, print('Your passphrase (between double-quotes): "%s"' % passphrase,
file=sys.stderr) file=sys.stderr)
print('Make sure the passphrase displayed above is exactly what you wanted.', print('Make sure the passphrase displayed above is exactly what you wanted.',

View file

@ -98,7 +98,7 @@ def grouped(s):
i += 1 i += 1
return ret return ret
export = 'To restore key use borg key-import --paper /path/to/repo\n\n' export = 'To restore key use borg key import --paper /path/to/repo\n\n'
binary = a2b_base64(self.keyblob) binary = a2b_base64(self.keyblob)
export += 'BORG PAPER KEY v1\n' export += 'BORG PAPER KEY v1\n'

View file

@ -980,6 +980,13 @@ def iter_objects(self, segment, include_data=False, read_data=True):
else: else:
yield tag, key, offset, size yield tag, key, offset, size
offset += size offset += size
# we must get the fd via get_fd() here again as we yielded to our caller and it might
# have triggered closing of the fd we had before (e.g. by calling io.read() for
# different segment(s)).
# by calling get_fd() here again we also make our fd "recently used" so it likely
# does not get kicked out of self.fds LRUcache.
fd = self.get_fd(segment)
fd.seek(offset)
header = fd.read(self.header_fmt.size) header = fd.read(self.header_fmt.size)
def recover_segment(self, segment, filename): def recover_segment(self, segment, filename):

View file

@ -76,7 +76,7 @@ def exec_cmd(*args, archiver=None, fork=False, exe=None, **kw):
sys.stdin, sys.stdout, sys.stderr = stdin, stdout, stderr sys.stdin, sys.stdout, sys.stderr = stdin, stdout, stderr
# check if the binary "borg.exe" is available # check if the binary "borg.exe" is available (for local testing a symlink to virtualenv/bin/borg should do)
try: try:
exec_cmd('help', exe='borg.exe', fork=True) exec_cmd('help', exe='borg.exe', fork=True)
BORG_EXES = ['python', 'binary', ] BORG_EXES = ['python', 'binary', ]
@ -1815,7 +1815,7 @@ def test_key_export_keyfile(self):
export_file = self.output_path + '/exported' export_file = self.output_path + '/exported'
self.cmd('init', self.repository_location, '--encryption', 'keyfile') self.cmd('init', self.repository_location, '--encryption', 'keyfile')
repo_id = self._extract_repository_id(self.repository_path) repo_id = self._extract_repository_id(self.repository_path)
self.cmd('key-export', self.repository_location, export_file) self.cmd('key', 'export', self.repository_location, export_file)
with open(export_file, 'r') as fd: with open(export_file, 'r') as fd:
export_contents = fd.read() export_contents = fd.read()
@ -1831,7 +1831,7 @@ def test_key_export_keyfile(self):
os.unlink(key_file) os.unlink(key_file)
self.cmd('key-import', self.repository_location, export_file) self.cmd('key', 'import', self.repository_location, export_file)
with open(key_file, 'r') as fd: with open(key_file, 'r') as fd:
key_contents2 = fd.read() key_contents2 = fd.read()
@ -1842,7 +1842,7 @@ def test_key_export_repokey(self):
export_file = self.output_path + '/exported' export_file = self.output_path + '/exported'
self.cmd('init', self.repository_location, '--encryption', 'repokey') self.cmd('init', self.repository_location, '--encryption', 'repokey')
repo_id = self._extract_repository_id(self.repository_path) repo_id = self._extract_repository_id(self.repository_path)
self.cmd('key-export', self.repository_location, export_file) self.cmd('key', 'export', self.repository_location, export_file)
with open(export_file, 'r') as fd: with open(export_file, 'r') as fd:
export_contents = fd.read() export_contents = fd.read()
@ -1861,7 +1861,7 @@ def test_key_export_repokey(self):
with Repository(self.repository_path) as repository: with Repository(self.repository_path) as repository:
repository.save_key(b'') repository.save_key(b'')
self.cmd('key-import', self.repository_location, export_file) self.cmd('key', 'import', self.repository_location, export_file)
with Repository(self.repository_path) as repository: with Repository(self.repository_path) as repository:
repo_key2 = RepoKey(repository) repo_key2 = RepoKey(repository)
@ -1873,17 +1873,23 @@ def test_key_import_errors(self):
export_file = self.output_path + '/exported' export_file = self.output_path + '/exported'
self.cmd('init', self.repository_location, '--encryption', 'keyfile') self.cmd('init', self.repository_location, '--encryption', 'keyfile')
self.cmd('key-import', self.repository_location, export_file, exit_code=EXIT_ERROR) self.cmd('key', 'import', self.repository_location, export_file, exit_code=EXIT_ERROR)
with open(export_file, 'w') as fd: with open(export_file, 'w') as fd:
fd.write('something not a key\n') fd.write('something not a key\n')
self.assert_raises(NotABorgKeyFile, lambda: self.cmd('key-import', self.repository_location, export_file)) if self.FORK_DEFAULT:
self.cmd('key', 'import', self.repository_location, export_file, exit_code=2)
else:
self.assert_raises(NotABorgKeyFile, lambda: self.cmd('key', 'import', self.repository_location, export_file))
with open(export_file, 'w') as fd: with open(export_file, 'w') as fd:
fd.write('BORG_KEY a0a0a0\n') fd.write('BORG_KEY a0a0a0\n')
self.assert_raises(RepoIdMismatch, lambda: self.cmd('key-import', self.repository_location, export_file)) if self.FORK_DEFAULT:
self.cmd('key', 'import', self.repository_location, export_file, exit_code=2)
else:
self.assert_raises(RepoIdMismatch, lambda: self.cmd('key', 'import', self.repository_location, export_file))
def test_key_export_paperkey(self): def test_key_export_paperkey(self):
repo_id = 'e294423506da4e1ea76e8dcdf1a3919624ae3ae496fddf905610c351d3f09239' repo_id = 'e294423506da4e1ea76e8dcdf1a3919624ae3ae496fddf905610c351d3f09239'
@ -1898,12 +1904,12 @@ def test_key_export_paperkey(self):
fd.write(KeyfileKey.FILE_ID + ' ' + repo_id + '\n') fd.write(KeyfileKey.FILE_ID + ' ' + repo_id + '\n')
fd.write(b2a_base64(b'abcdefghijklmnopqrstu').decode()) fd.write(b2a_base64(b'abcdefghijklmnopqrstu').decode())
self.cmd('key-export', '--paper', self.repository_location, export_file) self.cmd('key', 'export', '--paper', self.repository_location, export_file)
with open(export_file, 'r') as fd: with open(export_file, 'r') as fd:
export_contents = fd.read() export_contents = fd.read()
assert export_contents == """To restore key use borg key-import --paper /path/to/repo assert export_contents == """To restore key use borg key import --paper /path/to/repo
BORG PAPER KEY v1 BORG PAPER KEY v1
id: 2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02 id: 2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02