mirror of
https://github.com/borgbackup/borg.git
synced 2024-12-26 01:37:20 +00:00
Archiver.do_extract: Fix leak of downloaded chunk contents caused by preloading
Include condition that path is non empty after applying strip_components into filter passed to iter_items. All filtering of files to extract must be done in the filter callable used in archive.iter_items because iter_items will preload all chunks used in items it returns. If they are not actually extracted the accumulate in the responsed dict.
This commit is contained in:
parent
35a0ab2183
commit
c84ad6b7b1
1 changed files with 4 additions and 3 deletions
|
@ -371,12 +371,13 @@ def do_extract(self, args, repository, manifest, key, archive):
|
|||
sparse = args.sparse
|
||||
strip_components = args.strip_components
|
||||
dirs = []
|
||||
for item in archive.iter_items(lambda item: matcher.match(item[b'path']), preload=True):
|
||||
filter = lambda item: matcher.match(item[b'path'])
|
||||
if strip_components:
|
||||
filter = lambda item: matcher.match(item[b'path']) and os.sep.join(item[b'path'].split(os.sep)[strip_components:])
|
||||
for item in archive.iter_items(filter, preload=True):
|
||||
orig_path = item[b'path']
|
||||
if strip_components:
|
||||
item[b'path'] = os.sep.join(orig_path.split(os.sep)[strip_components:])
|
||||
if not item[b'path']:
|
||||
continue
|
||||
if not args.dry_run:
|
||||
while dirs and not item[b'path'].startswith(dirs[-1][b'path']):
|
||||
dir_item = dirs.pop(-1)
|
||||
|
|
Loading…
Reference in a new issue