mirror of
https://github.com/borgbackup/borg.git
synced 2024-12-28 02:38:43 +00:00
extract: indent code, no semantics change
prepare for a extract_helper context manager (some changes may seem superfluous, but see the following changesets)
This commit is contained in:
parent
3cc1cdd2ed
commit
cda7465038
1 changed files with 31 additions and 30 deletions
|
@ -579,36 +579,37 @@ def make_parent(path):
|
||||||
elif chunks is not None:
|
elif chunks is not None:
|
||||||
# assign chunks to this item, since the item which had the chunks was not extracted
|
# assign chunks to this item, since the item which had the chunks was not extracted
|
||||||
item.chunks = chunks
|
item.chunks = chunks
|
||||||
if hardlink_set:
|
if True:
|
||||||
return
|
if hardlink_set:
|
||||||
if sparse and self.zeros is None:
|
return
|
||||||
self.zeros = b'\0' * (1 << self.chunker_params[1])
|
if sparse and self.zeros is None:
|
||||||
with backup_io('open'):
|
self.zeros = b'\0' * (1 << self.chunker_params[1])
|
||||||
fd = open(path, 'wb')
|
with backup_io('open'):
|
||||||
with fd:
|
fd = open(path, 'wb')
|
||||||
ids = [c.id for c in item.chunks]
|
with fd:
|
||||||
for data in self.pipeline.fetch_many(ids, is_preloaded=True):
|
ids = [c.id for c in item.chunks]
|
||||||
if pi:
|
for data in self.pipeline.fetch_many(ids, is_preloaded=True):
|
||||||
pi.show(increase=len(data), info=[remove_surrogates(item.path)])
|
if pi:
|
||||||
with backup_io('write'):
|
pi.show(increase=len(data), info=[remove_surrogates(item.path)])
|
||||||
if sparse and self.zeros.startswith(data):
|
with backup_io('write'):
|
||||||
# all-zero chunk: create a hole in a sparse file
|
if sparse and self.zeros.startswith(data):
|
||||||
fd.seek(len(data), 1)
|
# all-zero chunk: create a hole in a sparse file
|
||||||
else:
|
fd.seek(len(data), 1)
|
||||||
fd.write(data)
|
else:
|
||||||
with backup_io('truncate_and_attrs'):
|
fd.write(data)
|
||||||
pos = item_chunks_size = fd.tell()
|
with backup_io('truncate_and_attrs'):
|
||||||
fd.truncate(pos)
|
pos = item_chunks_size = fd.tell()
|
||||||
fd.flush()
|
fd.truncate(pos)
|
||||||
self.restore_attrs(path, item, fd=fd.fileno())
|
fd.flush()
|
||||||
if 'size' in item:
|
self.restore_attrs(path, item, fd=fd.fileno())
|
||||||
item_size = item.size
|
if 'size' in item:
|
||||||
if item_size != item_chunks_size:
|
item_size = item.size
|
||||||
logger.warning('{}: size inconsistency detected: size {}, chunks size {}'.format(
|
if item_size != item_chunks_size:
|
||||||
item.path, item_size, item_chunks_size))
|
logger.warning('{}: size inconsistency detected: size {}, chunks size {}'.format(
|
||||||
if has_damaged_chunks:
|
item.path, item_size, item_chunks_size))
|
||||||
logger.warning('File %s has damaged (all-zero) chunks. Try running borg check --repair.' %
|
if has_damaged_chunks:
|
||||||
remove_surrogates(item.path))
|
logger.warning('File %s has damaged (all-zero) chunks. Try running borg check --repair.' %
|
||||||
|
remove_surrogates(item.path))
|
||||||
if not hardlink_set and hardlink_masters: # 2nd term, is it correct/needed?
|
if not hardlink_set and hardlink_masters: # 2nd term, is it correct/needed?
|
||||||
# Update master entry with extracted file path, so that following hardlinks don't extract twice.
|
# Update master entry with extracted file path, so that following hardlinks don't extract twice.
|
||||||
hardlink_masters[item.get('source') or original_path] = (None, path)
|
hardlink_masters[item.get('source') or original_path] = (None, path)
|
||||||
|
|
Loading…
Reference in a new issue