mirror of
https://github.com/borgbackup/borg.git
synced 2025-02-24 23:13:25 +00:00
Merge pull request #3274 from ThomasWaldmann/chunks-healthy-recreate-fix-1.1
recreate / chunks_healthy fixes (1.1-maint)
This commit is contained in:
commit
5c28f48596
2 changed files with 26 additions and 4 deletions
|
@ -917,6 +917,10 @@ def chunk_processor(data):
|
||||||
return chunk_entry
|
return chunk_entry
|
||||||
|
|
||||||
item.chunks = []
|
item.chunks = []
|
||||||
|
# if we rechunkify, we'll get a fundamentally different chunks list, thus we need
|
||||||
|
# to get rid of .chunks_healthy, as it might not correspond to .chunks any more.
|
||||||
|
if getattr(self, 'recreate_rechunkify', False) and 'chunks_healthy' in item:
|
||||||
|
del item.chunks_healthy
|
||||||
from_chunk = 0
|
from_chunk = 0
|
||||||
part_number = 1
|
part_number = 1
|
||||||
for data in chunk_iter:
|
for data in chunk_iter:
|
||||||
|
@ -1374,7 +1378,12 @@ def replacement_chunk(size):
|
||||||
has_chunks_healthy = 'chunks_healthy' in item
|
has_chunks_healthy = 'chunks_healthy' in item
|
||||||
chunks_current = item.chunks
|
chunks_current = item.chunks
|
||||||
chunks_healthy = item.chunks_healthy if has_chunks_healthy else chunks_current
|
chunks_healthy = item.chunks_healthy if has_chunks_healthy else chunks_current
|
||||||
assert len(chunks_current) == len(chunks_healthy)
|
if has_chunks_healthy and len(chunks_current) != len(chunks_healthy):
|
||||||
|
# should never happen, but there was issue #3218.
|
||||||
|
logger.warning('{}: Invalid chunks_healthy metadata removed!'.format(item.path))
|
||||||
|
del item.chunks_healthy
|
||||||
|
has_chunks_healthy = False
|
||||||
|
chunks_healthy = chunks_current
|
||||||
for chunk_current, chunk_healthy in zip(chunks_current, chunks_healthy):
|
for chunk_current, chunk_healthy in zip(chunks_current, chunks_healthy):
|
||||||
chunk_id, size, csize = chunk_healthy
|
chunk_id, size, csize = chunk_healthy
|
||||||
if chunk_id not in self.chunks:
|
if chunk_id not in self.chunks:
|
||||||
|
@ -1630,15 +1639,17 @@ def item_is_hardlink_master(item):
|
||||||
if not matcher.match(item.path):
|
if not matcher.match(item.path):
|
||||||
self.print_file_status('x', item.path)
|
self.print_file_status('x', item.path)
|
||||||
if item_is_hardlink_master(item):
|
if item_is_hardlink_master(item):
|
||||||
hardlink_masters[item.path] = (item.get('chunks'), None)
|
hardlink_masters[item.path] = (item.get('chunks'), item.get('chunks_healthy'), None)
|
||||||
continue
|
continue
|
||||||
if target_is_subset and hardlinkable(item.mode) and item.get('source') in hardlink_masters:
|
if target_is_subset and hardlinkable(item.mode) and item.get('source') in hardlink_masters:
|
||||||
# master of this hard link is outside the target subset
|
# master of this hard link is outside the target subset
|
||||||
chunks, new_source = hardlink_masters[item.source]
|
chunks, chunks_healthy, new_source = hardlink_masters[item.source]
|
||||||
if new_source is None:
|
if new_source is None:
|
||||||
# First item to use this master, move the chunks
|
# First item to use this master, move the chunks
|
||||||
item.chunks = chunks
|
item.chunks = chunks
|
||||||
hardlink_masters[item.source] = (None, item.path)
|
if chunks_healthy is not None:
|
||||||
|
item.chunks_healthy = chunks_healthy
|
||||||
|
hardlink_masters[item.source] = (None, None, item.path)
|
||||||
del item.source
|
del item.source
|
||||||
else:
|
else:
|
||||||
# Master was already moved, only update this item's source
|
# Master was already moved, only update this item's source
|
||||||
|
|
|
@ -3563,6 +3563,17 @@ def define_archive_filters_group(subparser, *, sort_by=True, first_last=True):
|
||||||
deduplicated size of the archives using the previous chunker params.
|
deduplicated size of the archives using the previous chunker params.
|
||||||
When recompressing expect approx. (throughput / checkpoint-interval) in space usage,
|
When recompressing expect approx. (throughput / checkpoint-interval) in space usage,
|
||||||
assuming all chunks are recompressed.
|
assuming all chunks are recompressed.
|
||||||
|
|
||||||
|
If you recently ran borg check --repair and it had to fix lost chunks with all-zero
|
||||||
|
replacement chunks, please first run another backup for the same data and re-run
|
||||||
|
borg check --repair afterwards to heal any archives that had lost chunks which are
|
||||||
|
still generated from the input data.
|
||||||
|
|
||||||
|
Important: running borg recreate to re-chunk will remove the chunks_healthy
|
||||||
|
metadata of all items with replacement chunks, so healing will not be possible
|
||||||
|
any more after re-chunking (it is also unlikely it would ever work: due to the
|
||||||
|
change of chunking parameters, the missing chunk likely will never be seen again
|
||||||
|
even if you still have the data that produced it).
|
||||||
""")
|
""")
|
||||||
subparser = subparsers.add_parser('recreate', parents=[common_parser], add_help=False,
|
subparser = subparsers.add_parser('recreate', parents=[common_parser], add_help=False,
|
||||||
description=self.do_recreate.__doc__,
|
description=self.do_recreate.__doc__,
|
||||||
|
|
Loading…
Reference in a new issue