Merge pull request #2693 from ThomasWaldmann/hashindex-keyerror

implement Hashindex.__delete__ KeyError, more tests, fix double delete
This commit is contained in:
TW 2017-06-17 23:48:27 +02:00 committed by GitHub
commit fa9940f0fe
5 changed files with 24 additions and 8 deletions

View File

@ -160,6 +160,8 @@ hashindex_lookup(HashIndex *index, const void *key, int *start_idx)
}
else if(BUCKET_MATCHES_KEY(index, idx, key)) {
if (didx != -1) {
// note: although lookup is logically a read-only operation,
// we optimize (change) the hashindex here "on the fly".
memcpy(BUCKET_ADDR(index, didx), BUCKET_ADDR(index, idx), index->bucket_size);
BUCKET_MARK_DELETED(index, idx);
idx = didx;
@ -592,7 +594,7 @@ hashindex_delete(HashIndex *index, const void *key)
{
int idx = hashindex_lookup(index, key, NULL);
if (idx < 0) {
return 1;
return -1;
}
BUCKET_MARK_DELETED(index, idx);
index->num_entries -= 1;

View File

@ -1328,8 +1328,8 @@ class ArchiveChecker:
Missing and/or incorrect data is repaired when detected
"""
# Exclude the manifest from chunks
del self.chunks[Manifest.MANIFEST_ID]
# Exclude the manifest from chunks (manifest entry might be already deleted from self.chunks)
self.chunks.pop(Manifest.MANIFEST_ID, None)
def mark_as_possibly_superseded(id_):
if self.chunks.get(id_, ChunkIndexEntry(0, 0, 0)).refcount == 0:

View File

@ -9,7 +9,7 @@ from libc.errno cimport errno
from cpython.exc cimport PyErr_SetFromErrnoWithFilename
from cpython.buffer cimport PyBUF_SIMPLE, PyObject_GetBuffer, PyBuffer_Release
API_VERSION = '1.1_04'
API_VERSION = '1.1_05'
cdef extern from "_hashindex.c":
@ -117,7 +117,12 @@ cdef class IndexBase:
def __delitem__(self, key):
assert len(key) == self.key_size
if not hashindex_delete(self.index, <char *>key):
rc = hashindex_delete(self.index, <char *>key)
if rc == 1:
return # success
if rc == -1:
raise KeyError(key)
if rc == 0:
raise Exception('hashindex_delete failed')
def get(self, key, default=None):

View File

@ -131,7 +131,7 @@ class MandatoryFeatureUnsupported(Error):
def check_extension_modules():
from . import platform, compress, item
if hashindex.API_VERSION != '1.1_04':
if hashindex.API_VERSION != '1.1_05':
raise ExtensionModuleError
if chunker.API_VERSION != '1.1_01':
raise ExtensionModuleError

View File

@ -44,6 +44,15 @@ class HashIndexTestCase(BaseTestCase):
# Test delete
for x in range(50):
del idx[H(x)]
# Test some keys still in there
for x in range(50, 100):
assert H(x) in idx
# Test some keys not there any more
for x in range(50):
assert H(x) not in idx
# Test delete non-existing key
for x in range(50):
self.assert_raises(KeyError, idx.__delitem__, H(x))
self.assert_equal(len(idx), 50)
idx_name = tempfile.NamedTemporaryFile()
idx.write(idx_name.name)
@ -64,11 +73,11 @@ class HashIndexTestCase(BaseTestCase):
def test_nsindex(self):
self._generic_test(NSIndex, lambda x: (x, x),
'b96ec1ddabb4278cc92261ee171f7efc979dc19397cc5e89b778f05fa25bf93f')
'85f72b036c692c8266e4f51ccf0cff2147204282b5e316ae508d30a448d88fef')
def test_chunkindex(self):
self._generic_test(ChunkIndex, lambda x: (x, x, x),
'9d437a1e145beccc790c69e66ba94fc17bd982d83a401c9c6e524609405529d8')
'c83fdf33755fc37879285f2ecfc5d1f63b97577494902126b6fb6f3e4d852488')
def test_resize(self):
n = 2000 # Must be >= MIN_BUCKETS