mirror of
https://github.com/borgbackup/borg.git
synced 2025-03-09 13:48:00 +00:00
improve borg check --repair healing tests, see #8302
test the healing more thoroughly: - preservation of correct chunks list in .chunks_healthy - check that .chunks_healthy is removed after healing - check that doing another borg check --repair run does not find something to heal, again. also did a datatype consistency fix for item.chunks_healthy list members: they are now post processed in the same way as item.chunks, so they have type ChunkListEntry rather than simple tuple.
This commit is contained in:
parent
5ef115b99f
commit
85d7bdd75f
2 changed files with 17 additions and 3 deletions
|
@ -282,6 +282,8 @@ class DownloadPipeline:
|
|||
for item in items:
|
||||
if 'chunks' in item:
|
||||
item.chunks = [ChunkListEntry(*e) for e in item.chunks]
|
||||
if 'chunks_healthy' in item:
|
||||
item.chunks_healthy = [ChunkListEntry(*e) for e in item.chunks_healthy]
|
||||
|
||||
if filter:
|
||||
items = [item for item in items if filter(item)]
|
||||
|
|
|
@ -3959,14 +3959,20 @@ class ArchiverCheckTestCase(ArchiverTestCaseBase):
|
|||
self.cmd('check', self.repository_location, exit_code=0)
|
||||
output = self.cmd('list', '--format={health}#{path}{LF}', self.repository_location + '::archive1', exit_code=0)
|
||||
self.assert_in('broken#', output)
|
||||
# check that the file in the old archives has now a different chunk list without the killed chunk
|
||||
# check that the file in the old archives has now a different chunk list without the killed chunk.
|
||||
# also check that the correct original chunks list is preserved in item.chunks_healthy.
|
||||
for archive_name in ('archive1', 'archive2'):
|
||||
archive, repository = self.open_archive(archive_name)
|
||||
with repository:
|
||||
for item in archive.iter_items():
|
||||
if item.path.endswith('testsuite/archiver.py'):
|
||||
self.assert_not_equal(valid_chunks, item.chunks)
|
||||
self.assert_equal(len(valid_chunks), len(item.chunks))
|
||||
self.assert_not_in(killed_chunk, item.chunks)
|
||||
self.assert_not_equal(valid_chunks, item.chunks)
|
||||
self.assert_in('chunks_healthy', item)
|
||||
self.assert_equal(len(valid_chunks), len(item.chunks_healthy))
|
||||
self.assert_in(killed_chunk, item.chunks_healthy)
|
||||
self.assert_equal(valid_chunks, item.chunks_healthy)
|
||||
break
|
||||
else:
|
||||
self.fail('should not happen')
|
||||
|
@ -3977,19 +3983,25 @@ class ArchiverCheckTestCase(ArchiverTestCaseBase):
|
|||
output = self.cmd('check', '-v', '--repair', self.repository_location, exit_code=0)
|
||||
self.assert_in('Healed previously missing file chunk', output)
|
||||
self.assert_in('testsuite/archiver.py: Completely healed previously damaged file!', output)
|
||||
# check that the file in the old archives has the correct chunks again
|
||||
# check that the file in the old archives has the correct chunks again.
|
||||
# also check that chunks_healthy list is removed as it is not needed any more.
|
||||
for archive_name in ('archive1', 'archive2'):
|
||||
archive, repository = self.open_archive(archive_name)
|
||||
with repository:
|
||||
for item in archive.iter_items():
|
||||
if item.path.endswith('testsuite/archiver.py'):
|
||||
self.assert_equal(valid_chunks, item.chunks)
|
||||
self.assert_not_in('chunks_healthy', item)
|
||||
break
|
||||
else:
|
||||
self.fail('should not happen')
|
||||
# list is also all-healthy again
|
||||
output = self.cmd('list', '--format={health}#{path}{LF}', self.repository_location + '::archive1', exit_code=0)
|
||||
self.assert_not_in('broken#', output)
|
||||
# check should be fine now (and not show it has healed anything).
|
||||
output = self.cmd('check', '-v', '--repair', self.repository_location, exit_code=0)
|
||||
self.assert_not_in('Healed previously missing file chunk', output)
|
||||
self.assert_not_in('testsuite/archiver.py: Completely healed previously damaged file!', output)
|
||||
|
||||
def test_missing_archive_item_chunk(self):
|
||||
archive, repository = self.open_archive('archive1')
|
||||
|
|
Loading…
Add table
Reference in a new issue