Browse Source

testsuite: simplify ArchiverCorruptionTestCase

Marian Beermann 8 years ago
parent
commit
9032aa062b
3 changed files with 22 additions and 25 deletions
  1. 2 2
      src/borg/cache.py
  2. 3 3
      src/borg/crypto/file_integrity.py
  3. 17 20
      src/borg/testsuite/archiver.py

+ 2 - 2
src/borg/cache.py

@@ -264,10 +264,10 @@ class CacheConfig:
                 # is modified and then written out.), not re-created.
                 # Thus, older versions will leave our [integrity] section alone, making the section's data invalid.
                 # Therefore, we also add the manifest ID to this section and
-                # can discern whether an older version interfere by comparing the manifest IDs of this section
+                # can discern whether an older version interfered by comparing the manifest IDs of this section
                 # and the main [cache] section.
                 self.integrity = {}
-                logger.warning('Cache integrity data lost: old Borg version modified the cache.')
+                logger.warning('Cache integrity data not available: old Borg version modified the cache.')
         except configparser.NoSectionError:
             logger.debug('Cache integrity: No integrity data found (files, chunks). Cache is from old version.')
             self.integrity = {}

+ 3 - 3
src/borg/crypto/file_integrity.py

@@ -135,13 +135,13 @@ class IntegrityCheckedFile(FileLikeWrapper):
     @classmethod
     def parse_integrity_data(cls, path: str, data: str, hasher: SHA512FileHashingWrapper):
         try:
-            integrity_file = json.loads(data)
+            integrity_data = json.loads(data)
             # Provisions for agility now, implementation later, but make sure the on-disk joint is oiled.
-            algorithm = integrity_file['algorithm']
+            algorithm = integrity_data['algorithm']
             if algorithm != hasher.ALGORITHM:
                 logger.warning('Cannot verify integrity of %s: Unknown algorithm %r', path, algorithm)
                 return
-            digests = integrity_file['digests']
+            digests = integrity_data['digests']
             # Require at least presence of the final digest
             digests['final']
             return digests

+ 17 - 20
src/borg/testsuite/archiver.py

@@ -2889,15 +2889,19 @@ class RemoteArchiverTestCase(ArchiverTestCase):
 
 
 class ArchiverCorruptionTestCase(ArchiverTestCaseBase):
+    def setUp(self):
+        super().setUp()
+        self.create_test_files()
+        self.cmd('init', '--encryption=repokey', self.repository_location)
+        self.cache_path = json.loads(self.cmd('info', self.repository_location, '--json'))['cache']['path']
+
     def corrupt(self, file):
         with open(file, 'r+b') as fd:
             fd.seek(-1, io.SEEK_END)
             fd.write(b'1')
 
     def test_cache_chunks(self):
-        self.cmd('init', '--encryption=repokey', self.repository_location)
-        cache_path = json.loads(self.cmd('info', self.repository_location, '--json'))['cache']['path']
-        self.corrupt(os.path.join(cache_path, 'chunks'))
+        self.corrupt(os.path.join(self.cache_path, 'chunks'))
 
         if self.FORK_DEFAULT:
             out = self.cmd('info', self.repository_location, exit_code=2)
@@ -2907,11 +2911,8 @@ class ArchiverCorruptionTestCase(ArchiverTestCaseBase):
                 self.cmd('info', self.repository_location)
 
     def test_cache_files(self):
-        self.create_test_files()
-        self.cmd('init', '--encryption=repokey', self.repository_location)
         self.cmd('create', self.repository_location + '::test', 'input')
-        cache_path = json.loads(self.cmd('info', self.repository_location, '--json'))['cache']['path']
-        self.corrupt(os.path.join(cache_path, 'files'))
+        self.corrupt(os.path.join(self.cache_path, 'files'))
 
         if self.FORK_DEFAULT:
             out = self.cmd('create', self.repository_location + '::test1', 'input', exit_code=2)
@@ -2921,42 +2922,38 @@ class ArchiverCorruptionTestCase(ArchiverTestCaseBase):
                 self.cmd('create', self.repository_location + '::test1', 'input')
 
     def test_chunks_archive(self):
-        self.create_test_files()
-        self.cmd('init', '--encryption=repokey', self.repository_location)
         self.cmd('create', self.repository_location + '::test1', 'input')
         # Find ID of test1 so we can corrupt it later :)
         target_id = self.cmd('list', self.repository_location, '--format={id}{LF}').strip()
         self.cmd('create', self.repository_location + '::test2', 'input')
+
+        # Force cache sync, creating archive chunks of test1 and test2 in chunks.archive.d
         self.cmd('delete', '--cache-only', self.repository_location)
+        self.cmd('info', self.repository_location, '--json')
 
-        cache_path = json.loads(self.cmd('info', self.repository_location, '--json'))['cache']['path']
-        chunks_archive = os.path.join(cache_path, 'chunks.archive.d')
+        chunks_archive = os.path.join(self.cache_path, 'chunks.archive.d')
         assert len(os.listdir(chunks_archive)) == 4  # two archives, one chunks cache and one .integrity file each
 
         self.corrupt(os.path.join(chunks_archive, target_id))
 
         # Trigger cache sync by changing the manifest ID in the cache config
-        config_path = os.path.join(cache_path, 'config')
+        config_path = os.path.join(self.cache_path, 'config')
         config = ConfigParser(interpolation=None)
         config.read(config_path)
         config.set('cache', 'manifest', bin_to_hex(bytes(32)))
         with open(config_path, 'w') as fd:
             config.write(fd)
 
-        # Cache sync will notice corrupted archive chunks, but automatically recover.
+        # Cache sync notices corrupted archive chunks, but automatically recovers.
         out = self.cmd('create', '-v', self.repository_location + '::test3', 'input', exit_code=1)
         assert 'Reading cached archive chunk index for test1' in out
         assert 'Cached archive chunk index of test1 is corrupted' in out
         assert 'Fetching and building archive index for test1' in out
 
-    def test_old_version_intefered(self):
-        self.create_test_files()
-        self.cmd('init', '--encryption=repokey', self.repository_location)
-        cache_path = json.loads(self.cmd('info', self.repository_location, '--json'))['cache']['path']
-
+    def test_old_version_interfered(self):
         # Modify the main manifest ID without touching the manifest ID in the integrity section.
         # This happens if a version without integrity checking modifies the cache.
-        config_path = os.path.join(cache_path, 'config')
+        config_path = os.path.join(self.cache_path, 'config')
         config = ConfigParser(interpolation=None)
         config.read(config_path)
         config.set('cache', 'manifest', bin_to_hex(bytes(32)))
@@ -2964,7 +2961,7 @@ class ArchiverCorruptionTestCase(ArchiverTestCaseBase):
             config.write(fd)
 
         out = self.cmd('info', self.repository_location)
-        assert 'Cache integrity data lost: old Borg version modified the cache.' in out
+        assert 'Cache integrity data not available: old Borg version modified the cache.' in out
 
 
 class DiffArchiverTestCase(ArchiverTestCaseBase):