Archiver.do_extract: Fix leak of downloaded chunk contents caused by preloading

Include condition that path is non empty after applying strip_components into
filter passed to iter_items.

All filtering of files to extract must be done in the filter callable used in
archive.iter_items because iter_items will preload all chunks used in items
it returns. If they are not actually extracted the accumulate in the
responsed dict.
This commit is contained in:
Martin Hostettler 2016-08-05 22:26:59 +02:00 committed by Marian Beermann
parent 35a0ab2183
commit c84ad6b7b1
1 changed files with 4 additions and 3 deletions

View File

@ -371,12 +371,13 @@ class Archiver:
sparse = args.sparse sparse = args.sparse
strip_components = args.strip_components strip_components = args.strip_components
dirs = [] dirs = []
for item in archive.iter_items(lambda item: matcher.match(item[b'path']), preload=True): filter = lambda item: matcher.match(item[b'path'])
if strip_components:
filter = lambda item: matcher.match(item[b'path']) and os.sep.join(item[b'path'].split(os.sep)[strip_components:])
for item in archive.iter_items(filter, preload=True):
orig_path = item[b'path'] orig_path = item[b'path']
if strip_components: if strip_components:
item[b'path'] = os.sep.join(orig_path.split(os.sep)[strip_components:]) item[b'path'] = os.sep.join(orig_path.split(os.sep)[strip_components:])
if not item[b'path']:
continue
if not args.dry_run: if not args.dry_run:
while dirs and not item[b'path'].startswith(dirs[-1][b'path']): while dirs and not item[b'path'].startswith(dirs[-1][b'path']):
dir_item = dirs.pop(-1) dir_item = dirs.pop(-1)