borg export-tar: fix memory leak with ssh: remote repository, fixes #5568

also: added a comment how to avoid this kind of memory leak.
This commit is contained in:
Thomas Waldmann 2020-12-17 22:28:42 +01:00
parent 6a18c47464
commit 24d3400dd4
2 changed files with 4 additions and 1 deletions

View File

@ -523,6 +523,8 @@ Utilization of max. archive size: {csize_max:.0%}
return filter(item) if filter else True return filter(item) if filter else True
def iter_items(self, filter=None, partial_extract=False, preload=False, hardlink_masters=None): def iter_items(self, filter=None, partial_extract=False, preload=False, hardlink_masters=None):
# note: when calling this with preload=True, later fetch_many() must be called with
# is_preloaded=True or the RemoteRepository code will leak memory!
assert not (filter and partial_extract and preload) or hardlink_masters is not None assert not (filter and partial_extract and preload) or hardlink_masters is not None
for item in self.pipeline.unpack_many(self.metadata.items, partial_extract=partial_extract, for item in self.pipeline.unpack_many(self.metadata.items, partial_extract=partial_extract,
preload=preload, hardlink_masters=hardlink_masters, preload=preload, hardlink_masters=hardlink_masters,

View File

@ -1031,7 +1031,8 @@ class Archiver:
""" """
Return a file-like object that reads from the chunks of *item*. Return a file-like object that reads from the chunks of *item*.
""" """
chunk_iterator = archive.pipeline.fetch_many([chunk_id for chunk_id, _, _ in item.chunks]) chunk_iterator = archive.pipeline.fetch_many([chunk_id for chunk_id, _, _ in item.chunks],
is_preloaded=True)
if pi: if pi:
info = [remove_surrogates(item.path)] info = [remove_surrogates(item.path)]
return ChunkIteratorFileWrapper(chunk_iterator, return ChunkIteratorFileWrapper(chunk_iterator,