mirror of
https://github.com/borgbackup/borg.git
synced 2024-12-21 23:33:07 +00:00
undelete: undelete soft-deleted archives, fixes #8500
This commit is contained in:
parent
7ed8ed56d2
commit
9fabc19e6b
5 changed files with 169 additions and 3 deletions
|
@ -500,7 +500,7 @@ def __init__(
|
|||
self.tags = set()
|
||||
else:
|
||||
if name_is_id:
|
||||
# we also go over the manifest here to avoid quick&dirty deleted archives,
|
||||
# we also go over the manifest here to avoid soft-deleted archives,
|
||||
# except if we explicitly request one via deleted=True.
|
||||
info = self.manifest.archives.get_by_id(name, deleted=deleted)
|
||||
else:
|
||||
|
|
|
@ -92,6 +92,7 @@ def get_func(args):
|
|||
from .tag_cmd import TagMixIn
|
||||
from .tar_cmds import TarMixIn
|
||||
from .transfer_cmd import TransferMixIn
|
||||
from .undelete_cmd import UnDeleteMixIn
|
||||
from .version_cmd import VersionMixIn
|
||||
|
||||
|
||||
|
@ -124,6 +125,7 @@ class Archiver(
|
|||
TagMixIn,
|
||||
TarMixIn,
|
||||
TransferMixIn,
|
||||
UnDeleteMixIn,
|
||||
VersionMixIn,
|
||||
):
|
||||
def __init__(self, lock_wait=None, prog=None):
|
||||
|
@ -364,6 +366,7 @@ def build_parser(self):
|
|||
self.build_parser_tag(subparsers, common_parser, mid_common_parser)
|
||||
self.build_parser_tar(subparsers, common_parser, mid_common_parser)
|
||||
self.build_parser_transfer(subparsers, common_parser, mid_common_parser)
|
||||
self.build_parser_undelete(subparsers, common_parser, mid_common_parser)
|
||||
self.build_parser_version(subparsers, common_parser, mid_common_parser)
|
||||
return parser
|
||||
|
||||
|
|
90
src/borg/archiver/undelete_cmd.py
Normal file
90
src/borg/archiver/undelete_cmd.py
Normal file
|
@ -0,0 +1,90 @@
|
|||
import argparse
|
||||
import logging
|
||||
|
||||
from ._common import with_repository
|
||||
from ..constants import * # NOQA
|
||||
from ..helpers import format_archive, CommandError, bin_to_hex, archivename_validator
|
||||
from ..manifest import Manifest
|
||||
|
||||
from ..logger import create_logger
|
||||
|
||||
logger = create_logger()
|
||||
|
||||
|
||||
class UnDeleteMixIn:
|
||||
@with_repository(manifest=False)
|
||||
def do_undelete(self, args, repository):
|
||||
"""Undelete archives"""
|
||||
self.output_list = args.output_list
|
||||
dry_run = args.dry_run
|
||||
manifest = Manifest.load(repository, (Manifest.Operation.DELETE,))
|
||||
if args.name:
|
||||
archive_infos = [manifest.archives.get_one([args.name], deleted=True)]
|
||||
else:
|
||||
args.deleted = True
|
||||
archive_infos = manifest.archives.list_considering(args)
|
||||
count = len(archive_infos)
|
||||
if count == 0:
|
||||
return
|
||||
if not args.name and not args.match_archives and args.first == 0 and args.last == 0:
|
||||
raise CommandError("Aborting: if you really want to undelete all archives, please use -a 'sh:*'.")
|
||||
|
||||
undeleted = False
|
||||
logger_list = logging.getLogger("borg.output.list")
|
||||
for i, archive_info in enumerate(archive_infos, 1):
|
||||
name, id, hex_id = archive_info.name, archive_info.id, bin_to_hex(archive_info.id)
|
||||
try:
|
||||
if not dry_run:
|
||||
manifest.archives.undelete_by_id(id)
|
||||
except KeyError:
|
||||
self.print_warning(f"Archive {name} {hex_id} not found ({i}/{count}).")
|
||||
else:
|
||||
undeleted = True
|
||||
if self.output_list:
|
||||
msg = "Would undelete: {} ({}/{})" if dry_run else "Undeleted archive: {} ({}/{})"
|
||||
logger_list.info(msg.format(format_archive(archive_info), i, count))
|
||||
if dry_run:
|
||||
logger.info("Finished dry-run.")
|
||||
elif undeleted:
|
||||
manifest.write()
|
||||
self.print_warning("Done.", wc=None)
|
||||
else:
|
||||
self.print_warning("Aborted.", wc=None)
|
||||
return
|
||||
|
||||
def build_parser_undelete(self, subparsers, common_parser, mid_common_parser):
|
||||
from ._common import process_epilog, define_archive_filters_group
|
||||
|
||||
undelete_epilog = process_epilog(
|
||||
"""
|
||||
This command undeletes archives in the repository.
|
||||
|
||||
Important: Undeleting archives is only possible before compacting.
|
||||
Once ``borg compact`` has run, all disk space occupied only by the
|
||||
deleted archives will be freed and undelete is not possible anymore.
|
||||
|
||||
When in doubt, use ``--dry-run --list`` to see what would be undeleted.
|
||||
|
||||
You can undelete multiple archives by specifying a matching pattern,
|
||||
using the ``--match-archives PATTERN`` option (for more info on these patterns,
|
||||
see :ref:`borg_patterns`).
|
||||
"""
|
||||
)
|
||||
subparser = subparsers.add_parser(
|
||||
"undelete",
|
||||
parents=[common_parser],
|
||||
add_help=False,
|
||||
description=self.do_undelete.__doc__,
|
||||
epilog=undelete_epilog,
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
help="undelete archive",
|
||||
)
|
||||
subparser.set_defaults(func=self.do_undelete)
|
||||
subparser.add_argument("-n", "--dry-run", dest="dry_run", action="store_true", help="do not change repository")
|
||||
subparser.add_argument(
|
||||
"--list", dest="output_list", action="store_true", help="output verbose list of archives"
|
||||
)
|
||||
define_archive_filters_group(subparser)
|
||||
subparser.add_argument(
|
||||
"name", metavar="NAME", nargs="?", type=archivename_validator, help="specify the archive name"
|
||||
)
|
|
@ -336,6 +336,12 @@ def delete_by_id(self, id):
|
|||
assert not self.legacy
|
||||
self.repository.store_move(f"archives/{bin_to_hex(id)}", delete=True) # soft-delete
|
||||
|
||||
def undelete_by_id(self, id):
|
||||
# undelete an archive
|
||||
assert isinstance(id, bytes)
|
||||
assert not self.legacy
|
||||
self.repository.store_move(f"archives/{bin_to_hex(id)}", undelete=True)
|
||||
|
||||
def list(
|
||||
self,
|
||||
*,
|
||||
|
@ -408,10 +414,10 @@ def list_considering(self, args):
|
|||
deleted=getattr(args, "deleted", False),
|
||||
)
|
||||
|
||||
def get_one(self, match, *, match_end=r"\Z"):
|
||||
def get_one(self, match, *, match_end=r"\Z", deleted=False):
|
||||
"""get exactly one archive matching <match>"""
|
||||
assert match is not None
|
||||
archive_infos = self._matching_info_tuples(match, match_end)
|
||||
archive_infos = self._matching_info_tuples(match, match_end, deleted=deleted)
|
||||
if len(archive_infos) != 1:
|
||||
raise CommandError(f"{match} needed to match precisely one archive, but matched {len(archive_infos)}.")
|
||||
return archive_infos[0]
|
||||
|
|
67
src/borg/testsuite/archiver/undelete_cmd_test.py
Normal file
67
src/borg/testsuite/archiver/undelete_cmd_test.py
Normal file
|
@ -0,0 +1,67 @@
|
|||
from ...constants import * # NOQA
|
||||
from . import cmd, create_regular_file, generate_archiver_tests, RK_ENCRYPTION
|
||||
|
||||
pytest_generate_tests = lambda metafunc: generate_archiver_tests(metafunc, kinds="local,remote,binary") # NOQA
|
||||
|
||||
|
||||
def test_undelete_single(archivers, request):
|
||||
archiver = request.getfixturevalue(archivers)
|
||||
create_regular_file(archiver.input_path, "file1", size=1024 * 80)
|
||||
cmd(archiver, "repo-create", RK_ENCRYPTION)
|
||||
cmd(archiver, "create", "normal", "input")
|
||||
cmd(archiver, "create", "deleted", "input")
|
||||
cmd(archiver, "delete", "deleted")
|
||||
output = cmd(archiver, "repo-list")
|
||||
assert "normal" in output
|
||||
assert "deleted" not in output
|
||||
cmd(archiver, "undelete", "deleted")
|
||||
output = cmd(archiver, "repo-list")
|
||||
assert "normal" in output
|
||||
assert "deleted" in output # it's back!
|
||||
cmd(archiver, "check")
|
||||
|
||||
|
||||
def test_undelete_multiple_dryrun(archivers, request):
|
||||
archiver = request.getfixturevalue(archivers)
|
||||
create_regular_file(archiver.input_path, "file1", size=1024 * 80)
|
||||
cmd(archiver, "repo-create", RK_ENCRYPTION)
|
||||
cmd(archiver, "create", "normal", "input")
|
||||
cmd(archiver, "create", "deleted1", "input")
|
||||
cmd(archiver, "create", "deleted2", "input")
|
||||
cmd(archiver, "delete", "deleted1")
|
||||
cmd(archiver, "delete", "deleted2")
|
||||
output = cmd(archiver, "repo-list")
|
||||
assert "normal" in output
|
||||
assert "deleted1" not in output
|
||||
assert "deleted2" not in output
|
||||
output = cmd(archiver, "undelete", "--dry-run", "--list", "-a", "sh:*")
|
||||
assert "normal" not in output # not a candidate for undeletion
|
||||
assert "deleted1" in output # candidate for undeletion
|
||||
assert "deleted2" in output # candidate for undeletion
|
||||
output = cmd(archiver, "repo-list") # nothing change, it was a dry-run
|
||||
assert "normal" in output
|
||||
assert "deleted1" not in output
|
||||
assert "deleted2" not in output
|
||||
|
||||
|
||||
def test_undelete_multiple_run(archivers, request):
|
||||
archiver = request.getfixturevalue(archivers)
|
||||
create_regular_file(archiver.input_path, "file1", size=1024 * 80)
|
||||
cmd(archiver, "repo-create", RK_ENCRYPTION)
|
||||
cmd(archiver, "create", "normal", "input")
|
||||
cmd(archiver, "create", "deleted1", "input")
|
||||
cmd(archiver, "create", "deleted2", "input")
|
||||
cmd(archiver, "delete", "deleted1")
|
||||
cmd(archiver, "delete", "deleted2")
|
||||
output = cmd(archiver, "repo-list")
|
||||
assert "normal" in output
|
||||
assert "deleted1" not in output
|
||||
assert "deleted2" not in output
|
||||
output = cmd(archiver, "undelete", "--list", "-a", "sh:*")
|
||||
assert "normal" not in output # not undeleted
|
||||
assert "deleted1" in output # undeleted
|
||||
assert "deleted2" in output # undeleted
|
||||
output = cmd(archiver, "repo-list") # nothing change, it was a dry-run
|
||||
assert "normal" in output
|
||||
assert "deleted1" in output
|
||||
assert "deleted2" in output
|
Loading…
Reference in a new issue