diff --git a/borg/archive.py b/borg/archive.py index 4498c72a3..a3a133171 100644 --- a/borg/archive.py +++ b/borg/archive.py @@ -99,6 +99,15 @@ class DownloadPipeline: self.key = key def unpack_many(self, ids, filter=None, preload=False): + """ + Return iterator of items. + + *ids* is a chunk ID list of an item stream. *filter* is a callable + to decide whether an item will be yielded. *preload* preloads the data chunks of every yielded item. + + Warning: if *preload* is True then all data chunks of every yielded item have to be retrieved, + otherwise preloaded chunks will accumulate in RemoteRepository and create a memory leak. + """ unpacker = msgpack.Unpacker(use_list=False) for data in self.fetch_many(ids): unpacker.feed(data) diff --git a/borg/testsuite/archiver.py b/borg/testsuite/archiver.py index db1d5c9e9..f14edd533 100644 --- a/borg/testsuite/archiver.py +++ b/borg/testsuite/archiver.py @@ -1329,11 +1329,11 @@ class RemoteArchiverTestCase(ArchiverTestCase): def test_strip_components_doesnt_leak(self): self.cmd('init', self.repository_location) - self.create_regular_file('dir/file', contents=b"test file contents 123") - self.create_regular_file('dir/file2', contents=b"test file contents 345") - self.create_regular_file('skipped', contents=b"test file contents 567") - self.create_regular_file('skipped2', contents=b"test file contentsasdasd") - self.create_regular_file('skipped4', contents=b"sdfdsgdgfhttztu") + self.create_regular_file('dir/file', contents=b"test file contents 1") + self.create_regular_file('dir/file2', contents=b"test file contents 2") + self.create_regular_file('skipped-file1', contents=b"test file contents 3") + self.create_regular_file('skipped-file2', contents=b"test file contents 4") + self.create_regular_file('skipped-file3', contents=b"test file contents 5") self.cmd('create', self.repository_location + '::test', 'input') marker = 'cached responses left in RemoteRepository' with changedir('output'):