Sfoglia il codice sorgente

Merge pull request #2134 from textshell/feature/mandatory-features

Add minimal version of in repository mandatory feature flags.
textshell 8 anni fa
parent
commit
41193eac51
6 ha cambiato i file con 280 aggiunte e 32 eliminazioni
  1. 1 1
      borg/archive.py
  2. 31 17
      borg/archiver.py
  3. 49 2
      borg/cache.py
  4. 66 2
      borg/helpers.py
  5. 112 10
      borg/testsuite/archiver.py
  6. 21 0
      conftest.py

+ 1 - 1
borg/archive.py

@@ -870,7 +870,7 @@ class ArchiveChecker:
             self.manifest = self.rebuild_manifest()
             self.manifest = self.rebuild_manifest()
         else:
         else:
             try:
             try:
-                self.manifest, _ = Manifest.load(repository, key=self.key)
+                self.manifest, _ = Manifest.load(repository, (Manifest.Operation.CHECK,), key=self.key)
             except IntegrityError as exc:
             except IntegrityError as exc:
                 logger.error('Repository manifest is corrupted: %s', exc)
                 logger.error('Repository manifest is corrupted: %s', exc)
                 self.error_found = True
                 self.error_found = True

+ 31 - 17
borg/archiver.py

@@ -56,7 +56,8 @@ def argument(args, str_or_bool):
     return str_or_bool
     return str_or_bool
 
 
 
 
-def with_repository(fake=False, invert_fake=False, create=False, lock=True, exclusive=False, manifest=True, cache=False):
+def with_repository(fake=False, invert_fake=False, create=False, lock=True,
+                    exclusive=False, manifest=True, cache=False, compatibility=None):
     """
     """
     Method decorator for subcommand-handling methods: do_XYZ(self, args, repository, …)
     Method decorator for subcommand-handling methods: do_XYZ(self, args, repository, …)
 
 
@@ -67,7 +68,20 @@ def with_repository(fake=False, invert_fake=False, create=False, lock=True, excl
     :param exclusive: (str or bool) lock repository exclusively (for writing)
     :param exclusive: (str or bool) lock repository exclusively (for writing)
     :param manifest: load manifest and key, pass them as keyword arguments
     :param manifest: load manifest and key, pass them as keyword arguments
     :param cache: open cache, pass it as keyword argument (implies manifest)
     :param cache: open cache, pass it as keyword argument (implies manifest)
+    :param compatibility: mandatory if not create and (manifest or cache), specifies mandatory feature categories to check
     """
     """
+
+    if not create and (manifest or cache):
+        if compatibility is None:
+            raise AssertionError("with_repository decorator used without compatibility argument")
+        if type(compatibility) is not tuple:
+            raise AssertionError("with_repository decorator compatibility argument must be of type tuple")
+    else:
+        if compatibility is not None:
+            raise AssertionError("with_repository called with compatibility argument but would not check" + repr(compatibility))
+        if create:
+            compatibility = Manifest.NO_OPERATION_CHECK
+
     def decorator(method):
     def decorator(method):
         @functools.wraps(method)
         @functools.wraps(method)
         def wrapper(self, args, **kwargs):
         def wrapper(self, args, **kwargs):
@@ -84,7 +98,7 @@ def with_repository(fake=False, invert_fake=False, create=False, lock=True, excl
                                         append_only=append_only)
                                         append_only=append_only)
             with repository:
             with repository:
                 if manifest or cache:
                 if manifest or cache:
-                    kwargs['manifest'], kwargs['key'] = Manifest.load(repository)
+                    kwargs['manifest'], kwargs['key'] = Manifest.load(repository, compatibility)
                 if cache:
                 if cache:
                     with Cache(repository, kwargs['key'], kwargs['manifest'],
                     with Cache(repository, kwargs['key'], kwargs['manifest'],
                                do_files=getattr(args, 'cache_files', False), lock_wait=self.lock_wait) as cache_:
                                do_files=getattr(args, 'cache_files', False), lock_wait=self.lock_wait) as cache_:
@@ -176,7 +190,7 @@ class Archiver:
             return EXIT_WARNING
             return EXIT_WARNING
         return EXIT_SUCCESS
         return EXIT_SUCCESS
 
 
-    @with_repository()
+    @with_repository(compatibility=(Manifest.Operation.CHECK,))
     def do_change_passphrase(self, args, repository, manifest, key):
     def do_change_passphrase(self, args, repository, manifest, key):
         """Change repository key file passphrase"""
         """Change repository key file passphrase"""
         if not hasattr(key, 'change_passphrase'):
         if not hasattr(key, 'change_passphrase'):
@@ -241,7 +255,7 @@ class Archiver:
         logger.info('Key updated')
         logger.info('Key updated')
         return EXIT_SUCCESS
         return EXIT_SUCCESS
 
 
-    @with_repository(fake='dry_run', exclusive=True)
+    @with_repository(fake='dry_run', exclusive=True, compatibility=(Manifest.Operation.WRITE,))
     def do_create(self, args, repository, manifest=None, key=None):
     def do_create(self, args, repository, manifest=None, key=None):
         """Create new archive"""
         """Create new archive"""
         matcher = PatternMatcher(fallback=True)
         matcher = PatternMatcher(fallback=True)
@@ -424,7 +438,7 @@ class Archiver:
                 return matcher.match(item[b'path'])
                 return matcher.match(item[b'path'])
         return item_filter
         return item_filter
 
 
-    @with_repository()
+    @with_repository(compatibility=(Manifest.Operation.READ,))
     @with_archive
     @with_archive
     def do_extract(self, args, repository, manifest, key, archive):
     def do_extract(self, args, repository, manifest, key, archive):
         """Extract archive contents"""
         """Extract archive contents"""
@@ -490,7 +504,7 @@ class Archiver:
                 self.print_warning("Include pattern '%s' never matched.", pattern)
                 self.print_warning("Include pattern '%s' never matched.", pattern)
         return self.exit_code
         return self.exit_code
 
 
-    @with_repository(exclusive=True, cache=True)
+    @with_repository(exclusive=True, cache=True, compatibility=(Manifest.Operation.CHECK,))
     @with_archive
     @with_archive
     def do_rename(self, args, repository, manifest, key, cache, archive):
     def do_rename(self, args, repository, manifest, key, cache, archive):
         """Rename an existing archive"""
         """Rename an existing archive"""
@@ -506,7 +520,7 @@ class Archiver:
         """Delete an existing repository or archive"""
         """Delete an existing repository or archive"""
         if args.location.archive:
         if args.location.archive:
             archive_name = args.location.archive
             archive_name = args.location.archive
-            manifest, key = Manifest.load(repository)
+            manifest, key = Manifest.load(repository, (Manifest.Operation.DELETE,))
 
 
             if args.forced == 2:
             if args.forced == 2:
                 try:
                 try:
@@ -537,7 +551,7 @@ class Archiver:
             if not args.cache_only:
             if not args.cache_only:
                 msg = []
                 msg = []
                 try:
                 try:
-                    manifest, key = Manifest.load(repository)
+                    manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
                 except NoManifestError:
                 except NoManifestError:
                     msg.append("You requested to completely DELETE the repository *including* all archives it may contain.")
                     msg.append("You requested to completely DELETE the repository *including* all archives it may contain.")
                     msg.append("This repository seems to have no manifest, so we can't tell anything about its contents.")
                     msg.append("This repository seems to have no manifest, so we can't tell anything about its contents.")
@@ -573,7 +587,7 @@ class Archiver:
 
 
         return self._do_mount(args)
         return self._do_mount(args)
 
 
-    @with_repository()
+    @with_repository(compatibility=(Manifest.Operation.READ,))
     def _do_mount(self, args, repository, manifest, key):
     def _do_mount(self, args, repository, manifest, key):
         from .fuse import FuseOperations
         from .fuse import FuseOperations
 
 
@@ -595,7 +609,7 @@ class Archiver:
         """un-mount the FUSE filesystem"""
         """un-mount the FUSE filesystem"""
         return umount(args.mountpoint)
         return umount(args.mountpoint)
 
 
-    @with_repository()
+    @with_repository(compatibility=(Manifest.Operation.READ,))
     def do_list(self, args, repository, manifest, key):
     def do_list(self, args, repository, manifest, key):
         """List archive or repository contents"""
         """List archive or repository contents"""
         if args.location.archive:
         if args.location.archive:
@@ -681,7 +695,7 @@ class Archiver:
                     print(format_archive(archive_info))
                     print(format_archive(archive_info))
         return self.exit_code
         return self.exit_code
 
 
-    @with_repository(cache=True)
+    @with_repository(cache=True, compatibility=(Manifest.Operation.READ,))
     @with_archive
     @with_archive
     def do_info(self, args, repository, manifest, key, archive, cache):
     def do_info(self, args, repository, manifest, key, archive, cache):
         """Show archive details such as disk space used"""
         """Show archive details such as disk space used"""
@@ -699,7 +713,7 @@ class Archiver:
         print(str(cache))
         print(str(cache))
         return self.exit_code
         return self.exit_code
 
 
-    @with_repository(exclusive=True)
+    @with_repository(exclusive=True, compatibility=(Manifest.Operation.DELETE,))
     def do_prune(self, args, repository, manifest, key):
     def do_prune(self, args, repository, manifest, key):
         """Prune repository archives according to specified rules"""
         """Prune repository archives according to specified rules"""
         if not any((args.hourly, args.daily,
         if not any((args.hourly, args.daily,
@@ -759,7 +773,7 @@ class Archiver:
     def do_upgrade(self, args, repository, manifest=None, key=None):
     def do_upgrade(self, args, repository, manifest=None, key=None):
         """upgrade a repository from a previous version"""
         """upgrade a repository from a previous version"""
         if args.tam:
         if args.tam:
-            manifest, key = Manifest.load(repository, force_tam_not_required=args.force)
+            manifest, key = Manifest.load(repository, (Manifest.Operation.CHECK,), force_tam_not_required=args.force)
 
 
             if not hasattr(key, 'change_passphrase'):
             if not hasattr(key, 'change_passphrase'):
                 print('This repository is not encrypted, cannot enable TAM.')
                 print('This repository is not encrypted, cannot enable TAM.')
@@ -784,7 +798,7 @@ class Archiver:
                 open(tam_file, 'w').close()
                 open(tam_file, 'w').close()
                 print('Updated security database')
                 print('Updated security database')
         elif args.disable_tam:
         elif args.disable_tam:
-            manifest, key = Manifest.load(repository, force_tam_not_required=True)
+            manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK, force_tam_not_required=True)
             if tam_required(repository):
             if tam_required(repository):
                 os.unlink(tam_required_file(repository))
                 os.unlink(tam_required_file(repository))
             if key.tam_required:
             if key.tam_required:
@@ -817,7 +831,7 @@ class Archiver:
         print(sysinfo())
         print(sysinfo())
         return EXIT_SUCCESS
         return EXIT_SUCCESS
 
 
-    @with_repository()
+    @with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
     def do_debug_dump_archive_items(self, args, repository, manifest, key):
     def do_debug_dump_archive_items(self, args, repository, manifest, key):
         """dump (decrypted, decompressed) archive items metadata (not: data)"""
         """dump (decrypted, decompressed) archive items metadata (not: data)"""
         archive = Archive(repository, key, manifest, args.location.archive)
         archive = Archive(repository, key, manifest, args.location.archive)
@@ -830,7 +844,7 @@ class Archiver:
         print('Done.')
         print('Done.')
         return EXIT_SUCCESS
         return EXIT_SUCCESS
 
 
-    @with_repository()
+    @with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
     def do_debug_dump_repo_objs(self, args, repository, manifest, key):
     def do_debug_dump_repo_objs(self, args, repository, manifest, key):
         """dump (decrypted, decompressed) repo objects"""
         """dump (decrypted, decompressed) repo objects"""
         marker = None
         marker = None
@@ -904,7 +918,7 @@ class Archiver:
         print('Done.')
         print('Done.')
         return EXIT_SUCCESS
         return EXIT_SUCCESS
 
 
-    @with_repository(manifest=False, exclusive=True, cache=True)
+    @with_repository(manifest=False, exclusive=True, cache=True, compatibility=Manifest.NO_OPERATION_CHECK)
     def do_debug_refcount_obj(self, args, repository, manifest, key, cache):
     def do_debug_refcount_obj(self, args, repository, manifest, key, cache):
         """display refcounts for the objects with the given IDs"""
         """display refcounts for the objects with the given IDs"""
         for hex_id in args.ids:
         for hex_id in args.ids:

+ 49 - 2
borg/cache.py

@@ -9,8 +9,8 @@ import shutil
 from .key import PlaintextKey
 from .key import PlaintextKey
 from .logger import create_logger
 from .logger import create_logger
 logger = create_logger()
 logger = create_logger()
-from .helpers import Error, get_cache_dir, decode_dict, int_to_bigint, \
-    bigint_to_int, format_file_size, yes, bin_to_hex, Location, safe_ns
+from .helpers import Error, Manifest, get_cache_dir, decode_dict, int_to_bigint, \
+    bigint_to_int, format_file_size, yes, bin_to_hex, Location, safe_ns, parse_stringified_list
 from .locking import Lock
 from .locking import Lock
 from .hashindex import ChunkIndex
 from .hashindex import ChunkIndex
 
 
@@ -84,6 +84,11 @@ class Cache:
                 self.begin_txn()
                 self.begin_txn()
                 self.commit()
                 self.commit()
 
 
+            if not self.check_cache_compatibility():
+                self.wipe_cache()
+
+            self.update_compatibility()
+
             if sync and self.manifest.id != self.manifest_id:
             if sync and self.manifest.id != self.manifest_id:
                 # If repository is older than the cache something fishy is going on
                 # If repository is older than the cache something fishy is going on
                 if self.timestamp and self.timestamp > manifest.timestamp:
                 if self.timestamp and self.timestamp > manifest.timestamp:
@@ -94,6 +99,7 @@ class Cache:
                 # Make sure an encrypted repository has not been swapped for an unencrypted repository
                 # Make sure an encrypted repository has not been swapped for an unencrypted repository
                 if self.key_type is not None and self.key_type != str(key.TYPE):
                 if self.key_type is not None and self.key_type != str(key.TYPE):
                     raise self.EncryptionMethodMismatch()
                     raise self.EncryptionMethodMismatch()
+
                 self.sync()
                 self.sync()
                 self.commit()
                 self.commit()
         except:
         except:
@@ -175,6 +181,8 @@ Chunk index:    {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
         self.timestamp = self.config.get('cache', 'timestamp', fallback=None)
         self.timestamp = self.config.get('cache', 'timestamp', fallback=None)
         self.key_type = self.config.get('cache', 'key_type', fallback=None)
         self.key_type = self.config.get('cache', 'key_type', fallback=None)
         self.previous_location = self.config.get('cache', 'previous_location', fallback=None)
         self.previous_location = self.config.get('cache', 'previous_location', fallback=None)
+        self.ignored_features = set(parse_stringified_list(self.config.get('cache', 'ignored_features', fallback='')))
+        self.mandatory_features = set(parse_stringified_list(self.config.get('cache', 'mandatory_features', fallback='')))
         self.chunks = ChunkIndex.read(os.path.join(self.path, 'chunks').encode('utf-8'))
         self.chunks = ChunkIndex.read(os.path.join(self.path, 'chunks').encode('utf-8'))
         self.files = None
         self.files = None
 
 
@@ -240,6 +248,8 @@ Chunk index:    {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
         self.config.set('cache', 'timestamp', self.manifest.timestamp)
         self.config.set('cache', 'timestamp', self.manifest.timestamp)
         self.config.set('cache', 'key_type', str(self.key.TYPE))
         self.config.set('cache', 'key_type', str(self.key.TYPE))
         self.config.set('cache', 'previous_location', self.repository._location.canonical_path())
         self.config.set('cache', 'previous_location', self.repository._location.canonical_path())
+        self.config.set('cache', 'ignored_features', ','.join(self.ignored_features))
+        self.config.set('cache', 'mandatory_features', ','.join(self.mandatory_features))
         with open(os.path.join(self.path, 'config'), 'w') as fd:
         with open(os.path.join(self.path, 'config'), 'w') as fd:
             self.config.write(fd)
             self.config.write(fd)
         self.chunks.write(os.path.join(self.path, 'chunks').encode('utf-8'))
         self.chunks.write(os.path.join(self.path, 'chunks').encode('utf-8'))
@@ -390,6 +400,43 @@ Chunk index:    {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
             self.do_cache = os.path.isdir(archive_path)
             self.do_cache = os.path.isdir(archive_path)
             self.chunks = create_master_idx(self.chunks)
             self.chunks = create_master_idx(self.chunks)
 
 
+    def check_cache_compatibility(self):
+        my_features = Manifest.SUPPORTED_REPO_FEATURES
+        if self.ignored_features & my_features:
+            # The cache might not contain references of chunks that need a feature that is mandatory for some operation
+            # and which this version supports. To avoid corruption while executing that operation force rebuild.
+            return False
+        if not self.mandatory_features <= my_features:
+            # The cache was build with consideration to at least one feature that this version does not understand.
+            # This client might misinterpret the cache. Thus force a rebuild.
+            return False
+        return True
+
+    def wipe_cache(self):
+        logger.warning("Discarding incompatible cache and forcing a cache rebuild")
+        archive_path = os.path.join(self.path, 'chunks.archive.d')
+        if os.path.isdir(archive_path):
+            shutil.rmtree(os.path.join(self.path, 'chunks.archive.d'))
+            os.makedirs(os.path.join(self.path, 'chunks.archive.d'))
+        self.chunks = ChunkIndex()
+        with open(os.path.join(self.path, 'files'), 'wb'):
+            pass  # empty file
+        self.manifest_id = ''
+        self.config.set('cache', 'manifest', '')
+
+        self.ignored_features = set()
+        self.mandatory_features = set()
+
+    def update_compatibility(self):
+        operation_to_features_map = self.manifest.get_all_mandatory_features()
+        my_features = Manifest.SUPPORTED_REPO_FEATURES
+        repo_features = set()
+        for operation, features in operation_to_features_map.items():
+            repo_features.update(features)
+
+        self.ignored_features.update(repo_features - my_features)
+        self.mandatory_features.update(repo_features & my_features)
+
     def add_chunk(self, id, data, stats):
     def add_chunk(self, id, data, stats):
         if not self.txn_active:
         if not self.txn_active:
             self.begin_txn()
             self.begin_txn()

+ 66 - 2
borg/helpers.py

@@ -2,6 +2,7 @@ import argparse
 from binascii import hexlify
 from binascii import hexlify
 from collections import namedtuple
 from collections import namedtuple
 import contextlib
 import contextlib
+import enum
 from functools import wraps
 from functools import wraps
 import grp
 import grp
 import os
 import os
@@ -110,6 +111,10 @@ class PlaceholderError(Error):
     """Formatting Error: "{}".format({}): {}({})"""
     """Formatting Error: "{}".format({}): {}({})"""
 
 
 
 
+class MandatoryFeatureUnsupported(Error):
+    """Unsupported repository feature(s) {}. A newer version of borg is required to access this repository."""
+
+
 def check_extension_modules():
 def check_extension_modules():
     from . import platform, compress
     from . import platform, compress
     if hashindex.API_VERSION != '1.0_01':
     if hashindex.API_VERSION != '1.0_01':
@@ -126,6 +131,34 @@ def check_extension_modules():
 
 
 class Manifest:
 class Manifest:
 
 
+    @enum.unique
+    class Operation(enum.Enum):
+        # The comments here only roughly describe the scope of each feature. In the end, additions need to be
+        # based on potential problems older clients could produce when accessing newer repositories and the
+        # tradeofs of locking version out or still allowing access. As all older versions and their exact
+        # behaviours are known when introducing new features sometimes this might not match the general descriptions
+        # below.
+
+        # The READ operation describes which features are needed to safely list and extract the archives in the
+        # repository.
+        READ = 'read'
+        # The CHECK operation is for all operations that need either to understand every detail
+        # of the repository (for consistency checks and repairs) or are seldom used functions that just
+        # should use the most restrictive feature set because more fine grained compatibility tracking is
+        # not needed.
+        CHECK = 'check'
+        # The WRITE operation is for adding archives. Features here ensure that older clients don't add archives
+        # in an old format, or is used to lock out clients that for other reasons can no longer safely add new
+        # archives.
+        WRITE = 'write'
+        # The DELETE operation is for all operations (like archive deletion) that need a 100% correct reference
+        # count and the need to be able to find all (directly and indirectly) referenced chunks of a given archive.
+        DELETE = 'delete'
+
+    NO_OPERATION_CHECK = tuple()
+
+    SUPPORTED_REPO_FEATURES = frozenset([])
+
     MANIFEST_ID = b'\0' * 32
     MANIFEST_ID = b'\0' * 32
 
 
     def __init__(self, key, repository, item_keys=None):
     def __init__(self, key, repository, item_keys=None):
@@ -139,7 +172,7 @@ class Manifest:
         self.timestamp = None
         self.timestamp = None
 
 
     @classmethod
     @classmethod
-    def load(cls, repository, key=None, force_tam_not_required=False):
+    def load(cls, repository, operations, key=None, force_tam_not_required=False):
         from .key import key_factory, tam_required_file, tam_required
         from .key import key_factory, tam_required_file, tam_required
         from .repository import Repository
         from .repository import Repository
         from .archive import ITEM_KEYS
         from .archive import ITEM_KEYS
@@ -153,7 +186,7 @@ class Manifest:
         data = key.decrypt(None, cdata)
         data = key.decrypt(None, cdata)
         m, manifest.tam_verified = key.unpack_and_verify_manifest(data, force_tam_not_required=force_tam_not_required)
         m, manifest.tam_verified = key.unpack_and_verify_manifest(data, force_tam_not_required=force_tam_not_required)
         manifest.id = key.id_hash(data)
         manifest.id = key.id_hash(data)
-        if not m.get(b'version') == 1:
+        if m.get(b'version') not in (1, 2):
             raise ValueError('Invalid manifest version')
             raise ValueError('Invalid manifest version')
         manifest.archives = dict((k.decode('utf-8'), v) for k, v in m[b'archives'].items())
         manifest.archives = dict((k.decode('utf-8'), v) for k, v in m[b'archives'].items())
         manifest.timestamp = m.get(b'timestamp')
         manifest.timestamp = m.get(b'timestamp')
@@ -173,8 +206,34 @@ class Manifest:
             if not manifest_required and security_required:
             if not manifest_required and security_required:
                 logger.debug('Manifest is TAM verified and says TAM is *not* required, updating security database...')
                 logger.debug('Manifest is TAM verified and says TAM is *not* required, updating security database...')
                 os.unlink(tam_required_file(repository))
                 os.unlink(tam_required_file(repository))
+        manifest.check_repository_compatibility(operations)
         return manifest, key
         return manifest, key
 
 
+    def check_repository_compatibility(self, operations):
+        for operation in operations:
+            assert isinstance(operation, self.Operation)
+            feature_flags = self.config.get(b'feature_flags', None)
+            if feature_flags is None:
+                return
+            if operation.value.encode() not in feature_flags:
+                continue
+            requirements = feature_flags[operation.value.encode()]
+            if b'mandatory' in requirements:
+                unsupported = set(requirements[b'mandatory']) - self.SUPPORTED_REPO_FEATURES
+                if unsupported:
+                    raise MandatoryFeatureUnsupported([f.decode() for f in unsupported])
+
+    def get_all_mandatory_features(self):
+        result = {}
+        feature_flags = self.config.get(b'feature_flags', None)
+        if feature_flags is None:
+            return result
+
+        for operation, requirements in feature_flags.items():
+            if b'mandatory' in requirements:
+                result[operation.decode()] = set([feature.decode() for feature in requirements[b'mandatory']])
+        return result
+
     def write(self):
     def write(self):
         if self.key.tam_required:
         if self.key.tam_required:
             self.config[b'tam_required'] = True
             self.config[b'tam_required'] = True
@@ -881,6 +940,11 @@ def bin_to_hex(binary):
     return hexlify(binary).decode('ascii')
     return hexlify(binary).decode('ascii')
 
 
 
 
+def parse_stringified_list(s):
+    l = re.split(" *, *", s)
+    return [item for item in l if item != '']
+
+
 class Location:
 class Location:
     """Object representing a repository / archive location
     """Object representing a repository / archive location
     """
     """

+ 112 - 10
borg/testsuite/archiver.py

@@ -25,7 +25,7 @@ from ..archiver import Archiver
 from ..cache import Cache
 from ..cache import Cache
 from ..crypto import bytes_to_long, num_aes_blocks
 from ..crypto import bytes_to_long, num_aes_blocks
 from ..helpers import Manifest, PatternMatcher, parse_pattern, EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR, bin_to_hex, \
 from ..helpers import Manifest, PatternMatcher, parse_pattern, EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR, bin_to_hex, \
-    get_security_dir, MAX_S
+    get_security_dir, MAX_S, MandatoryFeatureUnsupported, Location
 from ..key import RepoKey, KeyfileKey, Passphrase, TAMRequiredError
 from ..key import RepoKey, KeyfileKey, Passphrase, TAMRequiredError
 from ..keymanager import RepoIdMismatch, NotABorgKeyFile
 from ..keymanager import RepoIdMismatch, NotABorgKeyFile
 from ..remote import RemoteRepository, PathNotAllowed
 from ..remote import RemoteRepository, PathNotAllowed
@@ -248,7 +248,7 @@ class ArchiverTestCaseBase(BaseTestCase):
     def open_archive(self, name):
     def open_archive(self, name):
         repository = Repository(self.repository_path, exclusive=True)
         repository = Repository(self.repository_path, exclusive=True)
         with repository:
         with repository:
-            manifest, key = Manifest.load(repository)
+            manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
             archive = Archive(repository, key, manifest, name)
             archive = Archive(repository, key, manifest, name)
         return archive, repository
         return archive, repository
 
 
@@ -815,7 +815,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         self.cmd('extract', '--dry-run', self.repository_location + '::test.4')
         self.cmd('extract', '--dry-run', self.repository_location + '::test.4')
         # Make sure both archives have been renamed
         # Make sure both archives have been renamed
         with Repository(self.repository_path) as repository:
         with Repository(self.repository_path) as repository:
-            manifest, key = Manifest.load(repository)
+            manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
         self.assert_equal(len(manifest.archives), 2)
         self.assert_equal(len(manifest.archives), 2)
         self.assert_in('test.3', manifest.archives)
         self.assert_in('test.3', manifest.archives)
         self.assert_in('test.4', manifest.archives)
         self.assert_in('test.4', manifest.archives)
@@ -853,7 +853,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         self.cmd('init', '--encryption=none', self.repository_location)
         self.cmd('init', '--encryption=none', self.repository_location)
         self.create_src_archive('test')
         self.create_src_archive('test')
         with Repository(self.repository_path, exclusive=True) as repository:
         with Repository(self.repository_path, exclusive=True) as repository:
-            manifest, key = Manifest.load(repository)
+            manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
             archive = Archive(repository, key, manifest, 'test')
             archive = Archive(repository, key, manifest, 'test')
             for item in archive.iter_items():
             for item in archive.iter_items():
                 if 'chunks' in item:
                 if 'chunks' in item:
@@ -870,7 +870,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         self.cmd('init', '--encryption=none', self.repository_location)
         self.cmd('init', '--encryption=none', self.repository_location)
         self.create_src_archive('test')
         self.create_src_archive('test')
         with Repository(self.repository_path, exclusive=True) as repository:
         with Repository(self.repository_path, exclusive=True) as repository:
-            manifest, key = Manifest.load(repository)
+            manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
             archive = Archive(repository, key, manifest, 'test')
             archive = Archive(repository, key, manifest, 'test')
             id = archive.metadata[b'items'][0]
             id = archive.metadata[b'items'][0]
             repository.put(id, b'corrupted items metadata stream chunk')
             repository.put(id, b'corrupted items metadata stream chunk')
@@ -915,9 +915,111 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         self.cmd('create', '--dry-run', self.repository_location + '::test', 'input')
         self.cmd('create', '--dry-run', self.repository_location + '::test', 'input')
         # Make sure no archive has been created
         # Make sure no archive has been created
         with Repository(self.repository_path) as repository:
         with Repository(self.repository_path) as repository:
-            manifest, key = Manifest.load(repository)
+            manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
         self.assert_equal(len(manifest.archives), 0)
         self.assert_equal(len(manifest.archives), 0)
 
 
+    def add_unknown_feature(self, operation):
+        with Repository(self.repository_path, exclusive=True) as repository:
+            manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+            manifest.config[b'feature_flags'] = {operation.value.encode(): {b'mandatory': [b'unknown-feature']}}
+            manifest.write()
+            repository.commit()
+
+    def cmd_raises_unknown_feature(self, args):
+        if self.FORK_DEFAULT:
+            self.cmd(*args, exit_code=EXIT_ERROR)
+        else:
+            with pytest.raises(MandatoryFeatureUnsupported) as excinfo:
+                self.cmd(*args)
+            assert excinfo.value.args == (['unknown-feature'],)
+
+    def test_unknown_feature_on_create(self):
+        print(self.cmd('init', self.repository_location))
+        self.add_unknown_feature(Manifest.Operation.WRITE)
+        self.cmd_raises_unknown_feature(['create', self.repository_location + '::test', 'input'])
+
+    def test_unknown_feature_on_change_passphrase(self):
+        print(self.cmd('init', self.repository_location))
+        self.add_unknown_feature(Manifest.Operation.CHECK)
+        self.cmd_raises_unknown_feature(['change-passphrase', self.repository_location])
+
+    def test_unknown_feature_on_read(self):
+        print(self.cmd('init', self.repository_location))
+        self.cmd('create', self.repository_location + '::test', 'input')
+        self.add_unknown_feature(Manifest.Operation.READ)
+        with changedir('output'):
+            self.cmd_raises_unknown_feature(['extract', self.repository_location + '::test'])
+
+        self.cmd_raises_unknown_feature(['list', self.repository_location])
+        self.cmd_raises_unknown_feature(['info', self.repository_location + '::test'])
+
+    def test_unknown_feature_on_rename(self):
+        print(self.cmd('init', self.repository_location))
+        self.cmd('create', self.repository_location + '::test', 'input')
+        self.add_unknown_feature(Manifest.Operation.CHECK)
+        self.cmd_raises_unknown_feature(['rename', self.repository_location + '::test', 'other'])
+
+    def test_unknown_feature_on_delete(self):
+        print(self.cmd('init', self.repository_location))
+        self.cmd('create', self.repository_location + '::test', 'input')
+        self.add_unknown_feature(Manifest.Operation.DELETE)
+        # delete of an archive raises
+        self.cmd_raises_unknown_feature(['delete', self.repository_location + '::test'])
+        self.cmd_raises_unknown_feature(['prune', '--keep-daily=3', self.repository_location])
+        # delete of the whole repository ignores features
+        self.cmd('delete', self.repository_location)
+
+    @unittest.skipUnless(has_llfuse, 'llfuse not installed')
+    def test_unknown_feature_on_mount(self):
+        self.cmd('init', self.repository_location)
+        self.cmd('create', self.repository_location + '::test', 'input')
+        self.add_unknown_feature(Manifest.Operation.READ)
+        mountpoint = os.path.join(self.tmpdir, 'mountpoint')
+        os.mkdir(mountpoint)
+        # XXX this might hang if it doesn't raise an error
+        self.cmd_raises_unknown_feature(['mount', self.repository_location + '::test', mountpoint])
+
+    @pytest.mark.allow_cache_wipe
+    def test_unknown_mandatory_feature_in_cache(self):
+        if self.prefix:
+            path_prefix = 'ssh://__testsuite__'
+        else:
+            path_prefix = ''
+
+        print(self.cmd('init', self.repository_location))
+
+        with Repository(self.repository_path, exclusive=True) as repository:
+            if path_prefix:
+                repository._location = Location(self.repository_location)
+            manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+            with Cache(repository, key, manifest) as cache:
+                cache.begin_txn()
+                cache.mandatory_features = set(['unknown-feature'])
+                cache.commit()
+
+        if self.FORK_DEFAULT:
+            self.cmd('create', self.repository_location + '::test', 'input')
+        else:
+            called = False
+            wipe_cache_safe = Cache.wipe_cache
+
+            def wipe_wrapper(*args):
+                nonlocal called
+                called = True
+                wipe_cache_safe(*args)
+
+            with patch.object(Cache, 'wipe_cache', wipe_wrapper):
+                self.cmd('create', self.repository_location + '::test', 'input')
+
+            assert called
+
+        with Repository(self.repository_path, exclusive=True) as repository:
+            if path_prefix:
+                repository._location = Location(self.repository_location)
+            manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
+            with Cache(repository, key, manifest) as cache:
+                assert cache.mandatory_features == set([])
+
     def test_progress(self):
     def test_progress(self):
         self.create_regular_file('file1', size=1024 * 80)
         self.create_regular_file('file1', size=1024 * 80)
         self.cmd('init', self.repository_location)
         self.cmd('init', self.repository_location)
@@ -1594,7 +1696,7 @@ class ArchiverCheckTestCase(ArchiverTestCaseBase):
         # This bug can still live on in Borg repositories (through borg upgrade).
         # This bug can still live on in Borg repositories (through borg upgrade).
         archive, repository = self.open_archive('archive1')
         archive, repository = self.open_archive('archive1')
         with repository:
         with repository:
-            manifest, key = Manifest.load(repository)
+            manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
             with Cache(repository, key, manifest) as cache:
             with Cache(repository, key, manifest) as cache:
                 archive = Archive(repository, key, manifest, '0.13', cache=cache, create=True)
                 archive = Archive(repository, key, manifest, '0.13', cache=cache, create=True)
                 archive.items_buffer.add({
                 archive.items_buffer.add({
@@ -1611,7 +1713,7 @@ class ArchiverCheckTestCase(ArchiverTestCaseBase):
 class ManifestAuthenticationTest(ArchiverTestCaseBase):
 class ManifestAuthenticationTest(ArchiverTestCaseBase):
     def spoof_manifest(self, repository):
     def spoof_manifest(self, repository):
         with repository:
         with repository:
-            _, key = Manifest.load(repository)
+            _, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
             repository.put(Manifest.MANIFEST_ID, key.encrypt(msgpack.packb({
             repository.put(Manifest.MANIFEST_ID, key.encrypt(msgpack.packb({
                 'version': 1,
                 'version': 1,
                 'archives': {},
                 'archives': {},
@@ -1624,7 +1726,7 @@ class ManifestAuthenticationTest(ArchiverTestCaseBase):
         self.cmd('init', self.repository_location)
         self.cmd('init', self.repository_location)
         repository = Repository(self.repository_path, exclusive=True)
         repository = Repository(self.repository_path, exclusive=True)
         with repository:
         with repository:
-            manifest, key = Manifest.load(repository)
+            manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
             repository.put(Manifest.MANIFEST_ID, key.encrypt(msgpack.packb({
             repository.put(Manifest.MANIFEST_ID, key.encrypt(msgpack.packb({
                 'version': 1,
                 'version': 1,
                 'archives': {},
                 'archives': {},
@@ -1641,7 +1743,7 @@ class ManifestAuthenticationTest(ArchiverTestCaseBase):
         repository = Repository(self.repository_path, exclusive=True)
         repository = Repository(self.repository_path, exclusive=True)
         with repository:
         with repository:
             shutil.rmtree(get_security_dir(bin_to_hex(repository.id)))
             shutil.rmtree(get_security_dir(bin_to_hex(repository.id)))
-            _, key = Manifest.load(repository)
+            _, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
             key.tam_required = False
             key.tam_required = False
             key.change_passphrase(key._passphrase)
             key.change_passphrase(key._passphrase)
 
 

+ 21 - 0
conftest.py

@@ -4,6 +4,8 @@ import sys
 
 
 import pytest
 import pytest
 
 
+import borg.cache
+
 # needed to get pretty assertion failures in unit tests:
 # needed to get pretty assertion failures in unit tests:
 if hasattr(pytest, 'register_assert_rewrite'):
 if hasattr(pytest, 'register_assert_rewrite'):
     pytest.register_assert_rewrite('borg.testsuite')
     pytest.register_assert_rewrite('borg.testsuite')
@@ -35,3 +37,22 @@ def clean_env(tmpdir_factory, monkeypatch):
     keys = [key for key in os.environ if key.startswith('BORG_')]
     keys = [key for key in os.environ if key.startswith('BORG_')]
     for key in keys:
     for key in keys:
         monkeypatch.delenv(key, raising=False)
         monkeypatch.delenv(key, raising=False)
+
+
+class DefaultPatches:
+    def __init__(self, request):
+        self.org_cache_wipe_cache = borg.cache.Cache.wipe_cache
+
+        def wipe_should_not_be_called(*a, **kw):
+            raise AssertionError("Cache wipe was triggered, if this is part of the test add @pytest.mark.allow_cache_wipe")
+        if 'allow_cache_wipe' not in request.keywords:
+            borg.cache.Cache.wipe_cache = wipe_should_not_be_called
+        request.addfinalizer(self.undo)
+
+    def undo(self):
+        borg.cache.Cache.wipe_cache = self.org_cache_wipe_cache
+
+
+@pytest.fixture(autouse=True)
+def default_patches(request):
+    return DefaultPatches(request)