Ver Fonte

Merge pull request #2188 from rciorba/hashindex_test

add extra test for the hashindex
TW há 8 anos atrás
pai
commit
9ae4bf023e
1 ficheiros alterados com 31 adições e 0 exclusões
  1. 31 0
      src/borg/testsuite/hashindex.py

+ 31 - 0
src/borg/testsuite/hashindex.py

@@ -125,6 +125,37 @@ class HashIndexTestCase(BaseTestCase):
         assert unique_chunks == 3
 
 
+class HashIndexExtraTestCase(BaseTestCase):
+    """These tests are separate because they should not become part of the selftest.
+    """
+    def test_chunk_indexer(self):
+        # see _hashindex.c hash_sizes, we want to be close to the max. load
+        # because interesting errors happen there.
+        key_count = int(65537 * ChunkIndex.MAX_LOAD_FACTOR) - 10
+        index = ChunkIndex(key_count)
+        all_keys = [hashlib.sha256(H(k)).digest() for k in range(key_count)]
+        # we're gonna delete 1/3 of all_keys, so let's split them 2/3 and 1/3:
+        keys, to_delete_keys = all_keys[0:(2*key_count//3)], all_keys[(2*key_count//3):]
+
+        for i, key in enumerate(keys):
+            index[key] = (i, i, i)
+        for i, key in enumerate(to_delete_keys):
+            index[key] = (i, i, i)
+
+        for key in to_delete_keys:
+            del index[key]
+        for i, key in enumerate(keys):
+            assert index[key] == (i, i, i)
+        for key in to_delete_keys:
+            assert index.get(key) is None
+
+        # now delete every key still in the index
+        for key in keys:
+            del index[key]
+        # the index should now be empty
+        assert list(index.iteritems()) == []
+
+
 class HashIndexSizeTestCase(BaseTestCase):
     def test_size_on_disk(self):
         idx = ChunkIndex()