1
0
Fork 0
mirror of https://github.com/borgbackup/borg.git synced 2024-12-21 23:33:07 +00:00

repo-list: --deleted lists deleted archives

This commit is contained in:
Thomas Waldmann 2024-11-02 13:32:00 +01:00
parent 088d59d814
commit 7ed8ed56d2
No known key found for this signature in database
GPG key ID: 243ACFA951F78E01
6 changed files with 58 additions and 19 deletions

View file

@ -458,6 +458,7 @@ def __init__(
end=None,
log_json=False,
iec=False,
deleted=False,
):
name_is_id = isinstance(name, bytes)
self.cwd = os.getcwd()
@ -499,8 +500,9 @@ def __init__(
self.tags = set()
else:
if name_is_id:
# we also go over the manifest here to avoid quick&dirty deleted archives
info = self.manifest.archives.get_by_id(name)
# we also go over the manifest here to avoid quick&dirty deleted archives,
# except if we explicitly request one via deleted=True.
info = self.manifest.archives.get_by_id(name, deleted=deleted)
else:
info = self.manifest.archives.get(name)
if info is None:

View file

@ -369,7 +369,9 @@ def define_exclusion_group(subparser, **kwargs):
return exclude_group
def define_archive_filters_group(subparser, *, sort_by=True, first_last=True, oldest_newest=True, older_newer=True):
def define_archive_filters_group(
subparser, *, sort_by=True, first_last=True, oldest_newest=True, older_newer=True, deleted=False
):
filters_group = subparser.add_argument_group(
"Archive filters", "Archive filters can be applied to repository targets."
)
@ -456,6 +458,11 @@ def define_archive_filters_group(subparser, *, sort_by=True, first_last=True, ol
help="consider archives newer than (now - TIMESPAN), e.g. 7d or 12m.",
)
if deleted:
filters_group.add_argument(
"--deleted", dest="deleted", action="store_true", help="consider only deleted archives."
)
return filters_group

View file

@ -26,7 +26,7 @@ def do_repo_list(self, args, repository, manifest):
"BORG_RLIST_FORMAT",
"{id:.8} {time} {archive:<15} {tags:<10} {username:<10} {hostname:<10} {comment:.40}{NL}",
)
formatter = ArchiveFormatter(format, repository, manifest, manifest.key, iec=args.iec)
formatter = ArchiveFormatter(format, repository, manifest, manifest.key, iec=args.iec, deleted=args.deleted)
output_data = []
@ -113,4 +113,4 @@ def build_parser_repo_list(self, subparsers, common_parser, mid_common_parser):
"but keys used in it are added to the JSON output. "
"Some keys are always present. Note: JSON can only represent text.",
)
define_archive_filters_group(subparser)
define_archive_filters_group(subparser, deleted=True)

View file

@ -718,7 +718,7 @@ class ArchiveFormatter(BaseFormatter):
("size", "nfiles"),
)
def __init__(self, format, repository, manifest, key, *, iec=False):
def __init__(self, format, repository, manifest, key, *, iec=False, deleted=False):
static_data = {} # here could be stuff on repo level, above archive level
static_data.update(self.FIXED_KEYS)
super().__init__(format, static_data)
@ -728,6 +728,7 @@ def __init__(self, format, repository, manifest, key, *, iec=False):
self.name = None
self.id = None
self._archive = None
self.deleted = deleted # True if we want to deal with deleted archives.
self.iec = iec
self.format_keys = {f[1] for f in Formatter().parse(format)}
self.call_keys = {
@ -772,7 +773,7 @@ def archive(self):
if self._archive is None or self._archive.id != self.id:
from ..archive import Archive
self._archive = Archive(self.manifest, self.id, iec=self.iec)
self._archive = Archive(self.manifest, self.id, iec=self.iec, deleted=self.deleted)
return self._archive
def get_meta(self, key, default=None):

View file

@ -101,11 +101,17 @@ def finish(self, manifest):
manifest_archives = StableDict(self._get_raw_dict())
return manifest_archives
def ids(self):
def ids(self, *, deleted=False):
# yield the binary IDs of all archives
if not self.legacy:
try:
infos = list(self.repository.store_list("archives"))
infos = list(self.repository.store_list("archives", deleted=deleted))
if deleted:
# hack: store_list(deleted=True) yields deleted AND not deleted items,
# guess this should be fixed in a future borgstore release.
# for now, we remove the not-deleted archives here:
not_deleted_infos = set(self.repository.store_list("archives", deleted=False))
infos = [info for info in infos if info not in not_deleted_infos]
except ObjectNotFound:
infos = []
for info in infos:
@ -156,13 +162,13 @@ def _get_archive_meta(self, id: bytes) -> dict:
)
return metadata
def _infos(self):
def _infos(self, *, deleted=False):
# yield the infos of all archives
for id in self.ids():
for id in self.ids(deleted=deleted):
yield self._get_archive_meta(id)
def _info_tuples(self):
for info in self._infos():
def _info_tuples(self, *, deleted=False):
for info in self._infos(deleted=deleted):
yield ArchiveInfo(
name=info["name"],
id=info["id"],
@ -172,8 +178,8 @@ def _info_tuples(self):
host=info["hostname"],
)
def _matching_info_tuples(self, match_patterns, match_end):
archive_infos = list(self._info_tuples())
def _matching_info_tuples(self, match_patterns, match_end, *, deleted=False):
archive_infos = list(self._info_tuples(deleted=deleted))
if match_patterns:
assert isinstance(match_patterns, list), f"match_pattern is a {type(match_patterns)}"
for match in match_patterns:
@ -279,13 +285,14 @@ def get(self, name, raw=False):
else:
return dict(name=name, id=values["id"], time=values["time"])
def get_by_id(self, id, raw=False):
def get_by_id(self, id, raw=False, *, deleted=False):
assert isinstance(id, bytes)
if not self.legacy:
if id in self.ids(): # check directory
if id in self.ids(deleted=deleted): # check directory
# looks like this archive id is in the archives directory, thus it is NOT deleted.
# OR we have explicitly requested a soft-deleted archive via deleted=True.
archive_info = self._get_archive_meta(id)
if archive_info["exists"]:
if archive_info["exists"]: # True means we have found Archive metadata in the repo.
if not raw:
ts = parse_timestamp(archive_info["time"])
archive_info = ArchiveInfo(
@ -342,6 +349,7 @@ def list(
newer=None,
oldest=None,
newest=None,
deleted=False,
):
"""
Return list of ArchiveInfo instances according to the parameters.
@ -363,7 +371,7 @@ def list(
if isinstance(sort_by, (str, bytes)):
raise TypeError("sort_by must be a sequence of str")
archive_infos = self._matching_info_tuples(match, match_end)
archive_infos = self._matching_info_tuples(match, match_end, deleted=deleted)
if any([oldest, newest, older, newer]):
archive_infos = filter_archives_by_date(
@ -397,6 +405,7 @@ def list_considering(self, args):
newer=getattr(args, "newer", None),
oldest=getattr(args, "oldest", None),
newest=getattr(args, "newest", None),
deleted=getattr(args, "deleted", False),
)
def get_one(self, match, *, match_end=r"\Z"):

View file

@ -98,3 +98,23 @@ def test_repo_list_json(archivers, request):
assert "keyfile" not in list_repo["encryption"]
archive0 = list_repo["archives"][0]
checkts(archive0["time"])
def test_repo_list_deleted(archivers, request):
archiver = request.getfixturevalue(archivers)
cmd(archiver, "repo-create", RK_ENCRYPTION)
cmd(archiver, "create", "normal1", src_dir)
cmd(archiver, "create", "deleted1", src_dir)
cmd(archiver, "create", "normal2", src_dir)
cmd(archiver, "create", "deleted2", src_dir)
cmd(archiver, "delete", "-a", "sh:deleted*")
output = cmd(archiver, "repo-list")
assert "normal1" in output
assert "normal2" in output
assert "deleted1" not in output
assert "deleted2" not in output
output = cmd(archiver, "repo-list", "--deleted")
assert "normal1" not in output
assert "normal2" not in output
assert "deleted1" in output
assert "deleted2" in output