Browse Source

testsuite: add TestRepositoryCache

Marian Beermann 8 years ago
parent
commit
7f04e00ba2
1 changed files with 90 additions and 1 deletions
  1. 90 1
      src/borg/testsuite/remote.py

+ 90 - 1
src/borg/testsuite/remote.py

@@ -1,9 +1,13 @@
+import errno
 import os
 import time
+from unittest.mock import patch
 
 import pytest
 
-from ..remote import SleepingBandwidthLimiter
+from ..remote import SleepingBandwidthLimiter, RepositoryCache
+from ..repository import Repository
+from .hashindex import H
 
 
 class TestSleepingBandwidthLimiter:
@@ -58,3 +62,88 @@ class TestSleepingBandwidthLimiter:
         now += 10
         self.expect_write(5, b"1")
         it.write(5, b"1")
+
+
+class TestRepositoryCache:
+    @pytest.yield_fixture
+    def repository(self, tmpdir):
+        self.repository_location = os.path.join(str(tmpdir), 'repository')
+        with Repository(self.repository_location, exclusive=True, create=True) as repository:
+            repository.put(H(1), b'1234')
+            repository.put(H(2), b'5678')
+            repository.put(H(3), bytes(100))
+            yield repository
+
+    @pytest.fixture
+    def cache(self, repository):
+        return RepositoryCache(repository)
+
+    def test_simple(self, cache: RepositoryCache):
+        # Single get()s are not cached, since they are used for unique objects like archives.
+        assert cache.get(H(1)) == b'1234'
+        assert cache.misses == 1
+        assert cache.hits == 0
+
+        assert list(cache.get_many([H(1)])) == [b'1234']
+        assert cache.misses == 2
+        assert cache.hits == 0
+
+        assert list(cache.get_many([H(1)])) == [b'1234']
+        assert cache.misses == 2
+        assert cache.hits == 1
+
+        assert cache.get(H(1)) == b'1234'
+        assert cache.misses == 2
+        assert cache.hits == 2
+
+    def test_backoff(self, cache: RepositoryCache):
+        def query_size_limit():
+            cache.size_limit = 0
+
+        assert list(cache.get_many([H(1), H(2)])) == [b'1234', b'5678']
+        assert cache.misses == 2
+        assert cache.evictions == 0
+        iterator = cache.get_many([H(1), H(3), H(2)])
+        assert next(iterator) == b'1234'
+
+        # Force cache to back off
+        qsl = cache.query_size_limit
+        cache.query_size_limit = query_size_limit
+        cache.backoff()
+        cache.query_size_limit = qsl
+        # Evicted H(1) and H(2)
+        assert cache.evictions == 2
+        assert H(1) not in cache.cache
+        assert H(2) not in cache.cache
+        assert next(iterator) == bytes(100)
+        assert cache.slow_misses == 0
+        # Since H(2) was in the cache when we called get_many(), but has
+        # been evicted during iterating the generator, it will be a slow miss.
+        assert next(iterator) == b'5678'
+        assert cache.slow_misses == 1
+
+    def test_enospc(self, cache: RepositoryCache):
+        class enospc_open:
+            def __init__(self, *args):
+                pass
+
+            def __enter__(self):
+                return self
+
+            def __exit__(self, exc_type, exc_val, exc_tb):
+                pass
+
+            def write(self, data):
+                raise OSError(errno.ENOSPC, 'foo')
+
+        iterator = cache.get_many([H(1), H(2), H(3)])
+        assert next(iterator) == b'1234'
+
+        with patch('builtins.open', enospc_open):
+            assert next(iterator) == b'5678'
+            assert cache.enospc == 1
+            # We didn't patch query_size_limit which would set size_limit to some low
+            # value, so nothing was actually evicted.
+            assert cache.evictions == 0
+
+        assert next(iterator) == bytes(100)