Przeglądaj źródła

Switch to context manager for running "dump_data_sources" before/after hooks (#790).

Dan Helfman 3 miesięcy temu
rodzic
commit
e06c6740f2

+ 27 - 9
borgmatic/commands/borgmatic.py

@@ -97,7 +97,9 @@ def run_configuration(config_filename, config, config_paths, arguments):
         )
 
     command.execute_hooks(
-        command.filter_hooks(config.get('commands'), before='configuration', action_names=arguments.keys()),
+        command.filter_hooks(
+            config.get('commands'), before='configuration', action_names=arguments.keys()
+        ),
         config.get('umask'),
         global_arguments.dry_run,
         configuration_filename=config_filename,
@@ -235,7 +237,9 @@ def run_configuration(config_filename, config, config_paths, arguments):
 
     if encountered_error:
         command.execute_hooks(
-            command.filter_hooks(config.get('commands'), after='error', action_names=arguments.keys()),
+            command.filter_hooks(
+                config.get('commands'), after='error', action_names=arguments.keys()
+            ),
             config.get('umask'),
             global_arguments.dry_run,
             configuration_filename=config_filename,
@@ -245,7 +249,9 @@ def run_configuration(config_filename, config, config_paths, arguments):
         )
 
     command.execute_hooks(
-        command.filter_hooks(config.get('commands'), after='configuration', action_names=arguments.keys()),
+        command.filter_hooks(
+            config.get('commands'), after='configuration', action_names=arguments.keys()
+        ),
         config.get('umask'),
         global_arguments.dry_run,
         configuration_filename=config_filename,
@@ -314,7 +320,9 @@ def run_actions(
     skip_actions = set(get_skip_actions(config, arguments))
 
     command.execute_hooks(
-        command.filter_hooks(config.get('commands'), before='repository', action_names=arguments.keys()),
+        command.filter_hooks(
+            config.get('commands'), before='repository', action_names=arguments.keys()
+        ),
         config.get('umask'),
         global_arguments.dry_run,
         **hook_context,
@@ -325,7 +333,9 @@ def run_actions(
             continue
 
         command.execute_hooks(
-            command.filter_hooks(config.get('commands'), before='action', action_names=arguments.keys()),
+            command.filter_hooks(
+                config.get('commands'), before='action', action_names=arguments.keys()
+            ),
             config.get('umask'),
             global_arguments.dry_run,
             **hook_context,
@@ -543,14 +553,18 @@ def run_actions(
             )
 
         command.execute_hooks(
-            command.filter_hooks(config.get('commands'), after='action', action_names=arguments.keys()),
+            command.filter_hooks(
+                config.get('commands'), after='action', action_names=arguments.keys()
+            ),
             config.get('umask'),
             global_arguments.dry_run,
             **hook_context,
         )
 
     command.execute_hooks(
-        command.filter_hooks(config.get('commands'), after='repository', action_names=arguments.keys()),
+        command.filter_hooks(
+            config.get('commands'), after='repository', action_names=arguments.keys()
+        ),
         config.get('umask'),
         global_arguments.dry_run,
         **hook_context,
@@ -838,7 +852,9 @@ def collect_configuration_run_summary_logs(configs, config_paths, arguments):
         try:
             for config_filename, config in configs.items():
                 command.execute_hooks(
-                    command.filter_hooks(config.get('commands'), before='everything', action_names=arguments.keys()),
+                    command.filter_hooks(
+                        config.get('commands'), before='everything', action_names=arguments.keys()
+                    ),
                     config.get('umask'),
                     arguments['global'].dry_run,
                     configuration_filename=config_filename,
@@ -889,7 +905,9 @@ def collect_configuration_run_summary_logs(configs, config_paths, arguments):
         try:
             for config_filename, config in configs.items():
                 command.execute_hooks(
-                    command.filter_hooks(config.get('commands'), after='everything', action_names=arguments.keys()),
+                    command.filter_hooks(
+                        config.get('commands'), after='everything', action_names=arguments.keys()
+                    ),
                     config.get('umask'),
                     arguments['global'].dry_run,
                     configuration_filename=config_filename,

+ 11 - 2
borgmatic/config/generate.py

@@ -56,7 +56,11 @@ def schema_to_sample_configuration(schema, source_config, level=0, parent_is_seq
 
     if schema_type == 'array' or (isinstance(schema_type, list) and 'array' in schema_type):
         config = ruamel.yaml.comments.CommentedSeq(
-            [schema_to_sample_configuration(schema['items'], source_config, level, parent_is_sequence=True)]
+            [
+                schema_to_sample_configuration(
+                    schema['items'], source_config, level, parent_is_sequence=True
+                )
+            ]
         )
         add_comments_to_configuration_sequence(config, schema, indent=(level * INDENT))
     elif schema_type == 'object' or (isinstance(schema_type, list) and 'object' in schema_type):
@@ -65,7 +69,12 @@ def schema_to_sample_configuration(schema, source_config, level=0, parent_is_seq
 
         config = ruamel.yaml.comments.CommentedMap(
             [
-                (field_name, schema_to_sample_configuration(sub_schema, source_config.get(field_name, {}), level + 1))
+                (
+                    field_name,
+                    schema_to_sample_configuration(
+                        sub_schema, source_config.get(field_name, {}), level + 1
+                    ),
+                )
                 for field_name, sub_schema in get_properties(schema).items()
             ]
         )

+ 1 - 1
borgmatic/config/normalize.py

@@ -98,7 +98,7 @@ def normalize_commands(config_filename, config):
         for preposition in ('before', 'after'):
             option_name = f'{preposition}_{"backup" if action_name == "create" else action_name}'
             commands = config.pop(option_name, None)
-            
+
             if not commands:
                 continue
 

+ 68 - 2
borgmatic/hooks/command.py

@@ -59,7 +59,9 @@ def filter_hooks(command_hooks, before=None, after=None, hook_name=None, action_
         if before is None or hook_config.get('before') == before
         if after is None or hook_config.get('after') == after
         if hook_name is None or config_hook_names is None or hook_name in config_hook_names
-        if action_names is None or config_action_names is None or set(config_action_names or ()).intersection(set(action_names))
+        if action_names is None
+        or config_action_names is None
+        or set(config_action_names or ()).intersection(set(action_names))
     )
 
 
@@ -115,7 +117,9 @@ def execute_hooks(command_hooks, umask, dry_run, **context):
 
                 borgmatic.execute.execute_command(
                     [command],
-                    output_log_level=(logging.ERROR if hook_config.get('after') == 'error' else logging.ANSWER),
+                    output_log_level=(
+                        logging.ERROR if hook_config.get('after') == 'error' else logging.ANSWER
+                    ),
                     shell=True,
                     environment=make_environment(os.environ),
                 )
@@ -124,6 +128,68 @@ def execute_hooks(command_hooks, umask, dry_run, **context):
                 os.umask(original_umask)
 
 
+class Before_after_hooks:
+    '''
+    A Python context manager for executing command hooks both before and after the wrapped code.
+
+    Example use as a context manager:
+
+
+       with borgmatic.hooks.command.Before_after_hooks(
+           command_hooks=config.get('commands'),
+           before_after='do_stuff',
+           hook_name='myhook',
+           umask=config.get('umask'),
+           dry_run=dry_run,
+       ):
+            do()
+            some()
+            stuff()
+
+    With that context manager in place, "before" command hooks execute before the wrapped code runs,
+    and "after" command hooks execute after the wrapped code completes.
+    '''
+
+    def __init__(self, command_hooks, before_after, hook_name, umask, dry_run, **context):
+        '''
+        Given a sequence of command hook configuration dicts, the before/after name, the name of the
+        calling hook, a umask to run commands with, a dry run flag, and any context for the executed
+        commands, save those data points for use below.
+        '''
+        self.command_hooks = command_hooks
+        self.before_after = before_after
+        self.hook_name = hook_name
+        self.umask = umask
+        self.dry_run = dry_run
+        self.context = context
+
+    def __enter__(self):
+        '''
+        Run the configured "before" command hooks that match the initialized data points.
+        '''
+        execute_hooks(
+            borgmatic.hooks.command.filter_hooks(
+                self.command_hooks, before=self.before_after, hook_name=self.hook_name
+            ),
+            self.umask,
+            self.dry_run,
+            **self.context,
+        )
+
+    def __exit__(self, exception, value, traceback):
+        '''
+        Run the configured "after" command hooks that match the initialized data points.
+        '''
+        execute_hooks(
+            borgmatic.hooks.command.filter_hooks(
+                self.command_hooks, after=self.before_after, hook_name=self.hook_name
+            ),
+            self.umask,
+            self.dry_run,
+            **self.context,
+        )
+
+
 def considered_soft_failure(error):
     '''
     Given a configuration filename and an exception object, return whether the exception object

+ 35 - 27
borgmatic/hooks/data_source/bootstrap.py

@@ -6,6 +6,7 @@ import os
 
 import borgmatic.borg.pattern
 import borgmatic.config.paths
+import borgmatic.hooks.command
 
 logger = logging.getLogger(__name__)
 
@@ -37,38 +38,45 @@ def dump_data_sources(
     if hook_config and hook_config.get('store_config_files') is False:
         return []
 
-    borgmatic_manifest_path = os.path.join(
-        borgmatic_runtime_directory, 'bootstrap', 'manifest.json'
-    )
-
-    if dry_run:
-        return []
-
-    os.makedirs(os.path.dirname(borgmatic_manifest_path), exist_ok=True)
-
-    with open(borgmatic_manifest_path, 'w') as manifest_file:
-        json.dump(
-            {
-                'borgmatic_version': importlib.metadata.version('borgmatic'),
-                'config_paths': config_paths,
-            },
-            manifest_file,
+    with borgmatic.hooks.command.Before_after_hooks(
+        command_hooks=config.get('commands'),
+        before_after='dump_data_sources',
+        hook_name='bootstrap',
+        umask=config.get('umask'),
+        dry_run=dry_run,
+    ):
+        borgmatic_manifest_path = os.path.join(
+            borgmatic_runtime_directory, 'bootstrap', 'manifest.json'
         )
 
-    patterns.extend(
-        borgmatic.borg.pattern.Pattern(
-            config_path, source=borgmatic.borg.pattern.Pattern_source.HOOK
+        if dry_run:
+            return []
+
+        os.makedirs(os.path.dirname(borgmatic_manifest_path), exist_ok=True)
+
+        with open(borgmatic_manifest_path, 'w') as manifest_file:
+            json.dump(
+                {
+                    'borgmatic_version': importlib.metadata.version('borgmatic'),
+                    'config_paths': config_paths,
+                },
+                manifest_file,
+            )
+
+        patterns.extend(
+            borgmatic.borg.pattern.Pattern(
+                config_path, source=borgmatic.borg.pattern.Pattern_source.HOOK
+            )
+            for config_path in config_paths
         )
-        for config_path in config_paths
-    )
-    patterns.append(
-        borgmatic.borg.pattern.Pattern(
-            os.path.join(borgmatic_runtime_directory, 'bootstrap'),
-            source=borgmatic.borg.pattern.Pattern_source.HOOK,
+        patterns.append(
+            borgmatic.borg.pattern.Pattern(
+                os.path.join(borgmatic_runtime_directory, 'bootstrap'),
+                source=borgmatic.borg.pattern.Pattern_source.HOOK,
+            )
         )
-    )
 
-    return []
+        return []
 
 
 def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, dry_run):

+ 37 - 29
borgmatic/hooks/data_source/btrfs.py

@@ -9,6 +9,7 @@ import subprocess
 import borgmatic.borg.pattern
 import borgmatic.config.paths
 import borgmatic.execute
+import borgmatic.hooks.command
 import borgmatic.hooks.data_source.snapshot
 
 logger = logging.getLogger(__name__)
@@ -204,41 +205,48 @@ def dump_data_sources(
 
     If this is a dry run, then don't actually snapshot anything.
     '''
-    dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
-    logger.info(f'Snapshotting Btrfs subvolumes{dry_run_label}')
+    with borgmatic.hooks.command.Before_after_hooks(
+        command_hooks=config.get('commands'),
+        before_after='dump_data_sources',
+        hook_name='btrfs',
+        umask=config.get('umask'),
+        dry_run=dry_run,
+    ):
+        dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
+        logger.info(f'Snapshotting Btrfs subvolumes{dry_run_label}')
+
+        # Based on the configured patterns, determine Btrfs subvolumes to backup. Only consider those
+        # patterns that came from actual user configuration (as opposed to, say, other hooks).
+        btrfs_command = hook_config.get('btrfs_command', 'btrfs')
+        findmnt_command = hook_config.get('findmnt_command', 'findmnt')
+        subvolumes = get_subvolumes(btrfs_command, findmnt_command, patterns)
+
+        if not subvolumes:
+            logger.warning(f'No Btrfs subvolumes found to snapshot{dry_run_label}')
+
+        # Snapshot each subvolume, rewriting patterns to use their snapshot paths.
+        for subvolume in subvolumes:
+            logger.debug(f'Creating Btrfs snapshot for {subvolume.path} subvolume')
+
+            snapshot_path = make_snapshot_path(subvolume.path)
 
-    # Based on the configured patterns, determine Btrfs subvolumes to backup. Only consider those
-    # patterns that came from actual user configuration (as opposed to, say, other hooks).
-    btrfs_command = hook_config.get('btrfs_command', 'btrfs')
-    findmnt_command = hook_config.get('findmnt_command', 'findmnt')
-    subvolumes = get_subvolumes(btrfs_command, findmnt_command, patterns)
-
-    if not subvolumes:
-        logger.warning(f'No Btrfs subvolumes found to snapshot{dry_run_label}')
-
-    # Snapshot each subvolume, rewriting patterns to use their snapshot paths.
-    for subvolume in subvolumes:
-        logger.debug(f'Creating Btrfs snapshot for {subvolume.path} subvolume')
-
-        snapshot_path = make_snapshot_path(subvolume.path)
-
-        if dry_run:
-            continue
+            if dry_run:
+                continue
 
-        snapshot_subvolume(btrfs_command, subvolume.path, snapshot_path)
+            snapshot_subvolume(btrfs_command, subvolume.path, snapshot_path)
 
-        for pattern in subvolume.contained_patterns:
-            snapshot_pattern = make_borg_snapshot_pattern(subvolume.path, pattern)
+            for pattern in subvolume.contained_patterns:
+                snapshot_pattern = make_borg_snapshot_pattern(subvolume.path, pattern)
 
-            # Attempt to update the pattern in place, since pattern order matters to Borg.
-            try:
-                patterns[patterns.index(pattern)] = snapshot_pattern
-            except ValueError:
-                patterns.append(snapshot_pattern)
+                # Attempt to update the pattern in place, since pattern order matters to Borg.
+                try:
+                    patterns[patterns.index(pattern)] = snapshot_pattern
+                except ValueError:
+                    patterns.append(snapshot_pattern)
 
-        patterns.append(make_snapshot_exclude_pattern(subvolume.path))
+            patterns.append(make_snapshot_exclude_pattern(subvolume.path))
 
-    return []
+        return []
 
 
 def delete_snapshot(btrfs_command, snapshot_path):  # pragma: no cover

+ 70 - 62
borgmatic/hooks/data_source/lvm.py

@@ -10,6 +10,7 @@ import subprocess
 import borgmatic.borg.pattern
 import borgmatic.config.paths
 import borgmatic.execute
+import borgmatic.hooks.command
 import borgmatic.hooks.data_source.snapshot
 
 logger = logging.getLogger(__name__)
@@ -197,77 +198,84 @@ def dump_data_sources(
 
     If this is a dry run, then don't actually snapshot anything.
     '''
-    dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
-    logger.info(f'Snapshotting LVM logical volumes{dry_run_label}')
-
-    # List logical volumes to get their mount points, but only consider those patterns that came
-    # from actual user configuration (as opposed to, say, other hooks).
-    lsblk_command = hook_config.get('lsblk_command', 'lsblk')
-    requested_logical_volumes = get_logical_volumes(lsblk_command, patterns)
-
-    # Snapshot each logical volume, rewriting source directories to use the snapshot paths.
-    snapshot_suffix = f'{BORGMATIC_SNAPSHOT_PREFIX}{os.getpid()}'
-    normalized_runtime_directory = os.path.normpath(borgmatic_runtime_directory)
-
-    if not requested_logical_volumes:
-        logger.warning(f'No LVM logical volumes found to snapshot{dry_run_label}')
-
-    for logical_volume in requested_logical_volumes:
-        snapshot_name = f'{logical_volume.name}_{snapshot_suffix}'
-        logger.debug(
-            f'Creating LVM snapshot {snapshot_name} of {logical_volume.mount_point}{dry_run_label}'
-        )
-
-        if not dry_run:
-            snapshot_logical_volume(
-                hook_config.get('lvcreate_command', 'lvcreate'),
-                snapshot_name,
-                logical_volume.device_path,
-                hook_config.get('snapshot_size', DEFAULT_SNAPSHOT_SIZE),
+    with borgmatic.hooks.command.Before_after_hooks(
+        command_hooks=config.get('commands'),
+        function_name='dump_data_sources',
+        hook_name='lvm',
+        umask=config.get('umask'),
+        dry_run=dry_run,
+    ):
+        dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
+        logger.info(f'Snapshotting LVM logical volumes{dry_run_label}')
+
+        # List logical volumes to get their mount points, but only consider those patterns that came
+        # from actual user configuration (as opposed to, say, other hooks).
+        lsblk_command = hook_config.get('lsblk_command', 'lsblk')
+        requested_logical_volumes = get_logical_volumes(lsblk_command, patterns)
+
+        # Snapshot each logical volume, rewriting source directories to use the snapshot paths.
+        snapshot_suffix = f'{BORGMATIC_SNAPSHOT_PREFIX}{os.getpid()}'
+        normalized_runtime_directory = os.path.normpath(borgmatic_runtime_directory)
+
+        if not requested_logical_volumes:
+            logger.warning(f'No LVM logical volumes found to snapshot{dry_run_label}')
+
+        for logical_volume in requested_logical_volumes:
+            snapshot_name = f'{logical_volume.name}_{snapshot_suffix}'
+            logger.debug(
+                f'Creating LVM snapshot {snapshot_name} of {logical_volume.mount_point}{dry_run_label}'
             )
 
-        # Get the device path for the snapshot we just created.
-        try:
-            snapshot = get_snapshots(
-                hook_config.get('lvs_command', 'lvs'), snapshot_name=snapshot_name
-            )[0]
-        except IndexError:
-            raise ValueError(f'Cannot find LVM snapshot {snapshot_name}')
-
-        # Mount the snapshot into a particular named temporary directory so that the snapshot ends
-        # up in the Borg archive at the "original" logical volume mount point path.
-        snapshot_mount_path = os.path.join(
-            normalized_runtime_directory,
-            'lvm_snapshots',
-            hashlib.shake_256(logical_volume.mount_point.encode('utf-8')).hexdigest(
-                MOUNT_POINT_HASH_LENGTH
-            ),
-            logical_volume.mount_point.lstrip(os.path.sep),
-        )
+            if not dry_run:
+                snapshot_logical_volume(
+                    hook_config.get('lvcreate_command', 'lvcreate'),
+                    snapshot_name,
+                    logical_volume.device_path,
+                    hook_config.get('snapshot_size', DEFAULT_SNAPSHOT_SIZE),
+                )
 
-        logger.debug(
-            f'Mounting LVM snapshot {snapshot_name} at {snapshot_mount_path}{dry_run_label}'
-        )
+            # Get the device path for the snapshot we just created.
+            try:
+                snapshot = get_snapshots(
+                    hook_config.get('lvs_command', 'lvs'), snapshot_name=snapshot_name
+                )[0]
+            except IndexError:
+                raise ValueError(f'Cannot find LVM snapshot {snapshot_name}')
+
+            # Mount the snapshot into a particular named temporary directory so that the snapshot ends
+            # up in the Borg archive at the "original" logical volume mount point path.
+            snapshot_mount_path = os.path.join(
+                normalized_runtime_directory,
+                'lvm_snapshots',
+                hashlib.shake_256(logical_volume.mount_point.encode('utf-8')).hexdigest(
+                    MOUNT_POINT_HASH_LENGTH
+                ),
+                logical_volume.mount_point.lstrip(os.path.sep),
+            )
 
-        if dry_run:
-            continue
+            logger.debug(
+                f'Mounting LVM snapshot {snapshot_name} at {snapshot_mount_path}{dry_run_label}'
+            )
 
-        mount_snapshot(
-            hook_config.get('mount_command', 'mount'), snapshot.device_path, snapshot_mount_path
-        )
+            if dry_run:
+                continue
 
-        for pattern in logical_volume.contained_patterns:
-            snapshot_pattern = make_borg_snapshot_pattern(
-                pattern, logical_volume, normalized_runtime_directory
+            mount_snapshot(
+                hook_config.get('mount_command', 'mount'), snapshot.device_path, snapshot_mount_path
             )
 
-            # Attempt to update the pattern in place, since pattern order matters to Borg.
-            try:
-                patterns[patterns.index(pattern)] = snapshot_pattern
-            except ValueError:
-                patterns.append(snapshot_pattern)
+            for pattern in logical_volume.contained_patterns:
+                snapshot_pattern = make_borg_snapshot_pattern(
+                    pattern, logical_volume, normalized_runtime_directory
+                )
+
+                # Attempt to update the pattern in place, since pattern order matters to Borg.
+                try:
+                    patterns[patterns.index(pattern)] = snapshot_pattern
+                except ValueError:
+                    patterns.append(snapshot_pattern)
 
-    return []
+        return []
 
 
 def unmount_snapshot(umount_command, snapshot_mount_path):  # pragma: no cover

+ 58 - 50
borgmatic/hooks/data_source/mariadb.py

@@ -6,6 +6,7 @@ import shlex
 
 import borgmatic.borg.pattern
 import borgmatic.config.paths
+import borgmatic.hooks.command
 import borgmatic.hooks.credential.parse
 from borgmatic.execute import (
     execute_command,
@@ -242,71 +243,78 @@ def dump_data_sources(
     Also append the the parent directory of the database dumps to the given patterns list, so the
     dumps actually get backed up.
     '''
-    dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
-    processes = []
-
-    logger.info(f'Dumping MariaDB databases{dry_run_label}')
-
-    for database in databases:
-        dump_path = make_dump_path(borgmatic_runtime_directory)
-        username = borgmatic.hooks.credential.parse.resolve_credential(
-            database.get('username'), config
-        )
-        password = borgmatic.hooks.credential.parse.resolve_credential(
-            database.get('password'), config
-        )
-        environment = dict(os.environ)
-        dump_database_names = database_names_to_dump(
-            database, config, username, password, environment, dry_run
-        )
-
-        if not dump_database_names:
-            if dry_run:
-                continue
-
-            raise ValueError('Cannot find any MariaDB databases to dump.')
+    with borgmatic.hooks.command.Before_after_hooks(
+        command_hooks=config.get('commands'),
+        before_after='dump_data_sources',
+        hook_name='mariadb',
+        umask=config.get('umask'),
+        dry_run=dry_run,
+    ):
+        dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
+        processes = []
+
+        logger.info(f'Dumping MariaDB databases{dry_run_label}')
+
+        for database in databases:
+            dump_path = make_dump_path(borgmatic_runtime_directory)
+            username = borgmatic.hooks.credential.parse.resolve_credential(
+                database.get('username'), config
+            )
+            password = borgmatic.hooks.credential.parse.resolve_credential(
+                database.get('password'), config
+            )
+            environment = dict(os.environ)
+            dump_database_names = database_names_to_dump(
+                database, config, username, password, environment, dry_run
+            )
 
-        if database['name'] == 'all' and database.get('format'):
-            for dump_name in dump_database_names:
-                renamed_database = copy.copy(database)
-                renamed_database['name'] = dump_name
+            if not dump_database_names:
+                if dry_run:
+                    continue
+
+                raise ValueError('Cannot find any MariaDB databases to dump.')
+
+            if database['name'] == 'all' and database.get('format'):
+                for dump_name in dump_database_names:
+                    renamed_database = copy.copy(database)
+                    renamed_database['name'] = dump_name
+                    processes.append(
+                        execute_dump_command(
+                            renamed_database,
+                            config,
+                            username,
+                            password,
+                            dump_path,
+                            (dump_name,),
+                            environment,
+                            dry_run,
+                            dry_run_label,
+                        )
+                    )
+            else:
                 processes.append(
                     execute_dump_command(
-                        renamed_database,
+                        database,
                         config,
                         username,
                         password,
                         dump_path,
-                        (dump_name,),
+                        dump_database_names,
                         environment,
                         dry_run,
                         dry_run_label,
                     )
                 )
-        else:
-            processes.append(
-                execute_dump_command(
-                    database,
-                    config,
-                    username,
-                    password,
-                    dump_path,
-                    dump_database_names,
-                    environment,
-                    dry_run,
-                    dry_run_label,
-                )
-            )
 
-    if not dry_run:
-        patterns.append(
-            borgmatic.borg.pattern.Pattern(
-                os.path.join(borgmatic_runtime_directory, 'mariadb_databases'),
-                source=borgmatic.borg.pattern.Pattern_source.HOOK,
+        if not dry_run:
+            patterns.append(
+                borgmatic.borg.pattern.Pattern(
+                    os.path.join(borgmatic_runtime_directory, 'mariadb_databases'),
+                    source=borgmatic.borg.pattern.Pattern_source.HOOK,
+                )
             )
-        )
 
-    return [process for process in processes if process]
+        return [process for process in processes if process]
 
 
 def remove_data_source_dumps(

+ 45 - 36
borgmatic/hooks/data_source/mongodb.py

@@ -4,6 +4,7 @@ import shlex
 
 import borgmatic.borg.pattern
 import borgmatic.config.paths
+import borgmatic.hooks.command
 import borgmatic.hooks.credential.parse
 from borgmatic.execute import execute_command, execute_command_with_processes
 from borgmatic.hooks.data_source import dump
@@ -48,45 +49,53 @@ def dump_data_sources(
     Also append the the parent directory of the database dumps to the given patterns list, so the
     dumps actually get backed up.
     '''
-    dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
-
-    logger.info(f'Dumping MongoDB databases{dry_run_label}')
-
-    processes = []
-    for database in databases:
-        name = database['name']
-        dump_filename = dump.make_data_source_dump_filename(
-            make_dump_path(borgmatic_runtime_directory),
-            name,
-            database.get('hostname'),
-            database.get('port'),
-        )
-        dump_format = database.get('format', 'archive')
+    with borgmatic.hooks.command.Before_after_hooks(
+        command_hooks=config.get('commands'),
+        before_after='dump_data_sources',
+        hook_name='mongodb',
+        umask=config.get('umask'),
+        dry_run=dry_run,
+    ):
+        dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
+
+        logger.info(f'Dumping MongoDB databases{dry_run_label}')
+
+        processes = []
+
+        for database in databases:
+            name = database['name']
+            dump_filename = dump.make_data_source_dump_filename(
+                make_dump_path(borgmatic_runtime_directory),
+                name,
+                database.get('hostname'),
+                database.get('port'),
+            )
+            dump_format = database.get('format', 'archive')
 
-        logger.debug(
-            f'Dumping MongoDB database {name} to {dump_filename}{dry_run_label}',
-        )
-        if dry_run:
-            continue
-
-        command = build_dump_command(database, config, dump_filename, dump_format)
-
-        if dump_format == 'directory':
-            dump.create_parent_directory_for_dump(dump_filename)
-            execute_command(command, shell=True)
-        else:
-            dump.create_named_pipe_for_dump(dump_filename)
-            processes.append(execute_command(command, shell=True, run_to_completion=False))
-
-    if not dry_run:
-        patterns.append(
-            borgmatic.borg.pattern.Pattern(
-                os.path.join(borgmatic_runtime_directory, 'mongodb_databases'),
-                source=borgmatic.borg.pattern.Pattern_source.HOOK,
+            logger.debug(
+                f'Dumping MongoDB database {name} to {dump_filename}{dry_run_label}',
+            )
+            if dry_run:
+                continue
+
+            command = build_dump_command(database, config, dump_filename, dump_format)
+
+            if dump_format == 'directory':
+                dump.create_parent_directory_for_dump(dump_filename)
+                execute_command(command, shell=True)
+            else:
+                dump.create_named_pipe_for_dump(dump_filename)
+                processes.append(execute_command(command, shell=True, run_to_completion=False))
+
+        if not dry_run:
+            patterns.append(
+                borgmatic.borg.pattern.Pattern(
+                    os.path.join(borgmatic_runtime_directory, 'mongodb_databases'),
+                    source=borgmatic.borg.pattern.Pattern_source.HOOK,
+                )
             )
-        )
 
-    return processes
+        return processes
 
 
 def make_password_config_file(password):

+ 58 - 50
borgmatic/hooks/data_source/mysql.py

@@ -5,6 +5,7 @@ import shlex
 
 import borgmatic.borg.pattern
 import borgmatic.config.paths
+import borgmatic.hooks.command
 import borgmatic.hooks.credential.parse
 import borgmatic.hooks.data_source.mariadb
 from borgmatic.execute import (
@@ -169,71 +170,78 @@ def dump_data_sources(
     Also append the the parent directory of the database dumps to the given patterns list, so the
     dumps actually get backed up.
     '''
-    dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
-    processes = []
-
-    logger.info(f'Dumping MySQL databases{dry_run_label}')
-
-    for database in databases:
-        dump_path = make_dump_path(borgmatic_runtime_directory)
-        username = borgmatic.hooks.credential.parse.resolve_credential(
-            database.get('username'), config
-        )
-        password = borgmatic.hooks.credential.parse.resolve_credential(
-            database.get('password'), config
-        )
-        environment = dict(os.environ)
-        dump_database_names = database_names_to_dump(
-            database, config, username, password, environment, dry_run
-        )
-
-        if not dump_database_names:
-            if dry_run:
-                continue
-
-            raise ValueError('Cannot find any MySQL databases to dump.')
+    with borgmatic.hooks.command.Before_after_hooks(
+        command_hooks=config.get('commands'),
+        before_after='dump_data_sources',
+        hook_name='mysql',
+        umask=config.get('umask'),
+        dry_run=dry_run,
+    ):
+        dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
+        processes = []
+
+        logger.info(f'Dumping MySQL databases{dry_run_label}')
+
+        for database in databases:
+            dump_path = make_dump_path(borgmatic_runtime_directory)
+            username = borgmatic.hooks.credential.parse.resolve_credential(
+                database.get('username'), config
+            )
+            password = borgmatic.hooks.credential.parse.resolve_credential(
+                database.get('password'), config
+            )
+            environment = dict(os.environ)
+            dump_database_names = database_names_to_dump(
+                database, config, username, password, environment, dry_run
+            )
 
-        if database['name'] == 'all' and database.get('format'):
-            for dump_name in dump_database_names:
-                renamed_database = copy.copy(database)
-                renamed_database['name'] = dump_name
+            if not dump_database_names:
+                if dry_run:
+                    continue
+
+                raise ValueError('Cannot find any MySQL databases to dump.')
+
+            if database['name'] == 'all' and database.get('format'):
+                for dump_name in dump_database_names:
+                    renamed_database = copy.copy(database)
+                    renamed_database['name'] = dump_name
+                    processes.append(
+                        execute_dump_command(
+                            renamed_database,
+                            config,
+                            username,
+                            password,
+                            dump_path,
+                            (dump_name,),
+                            environment,
+                            dry_run,
+                            dry_run_label,
+                        )
+                    )
+            else:
                 processes.append(
                     execute_dump_command(
-                        renamed_database,
+                        database,
                         config,
                         username,
                         password,
                         dump_path,
-                        (dump_name,),
+                        dump_database_names,
                         environment,
                         dry_run,
                         dry_run_label,
                     )
                 )
-        else:
-            processes.append(
-                execute_dump_command(
-                    database,
-                    config,
-                    username,
-                    password,
-                    dump_path,
-                    dump_database_names,
-                    environment,
-                    dry_run,
-                    dry_run_label,
-                )
-            )
 
-    if not dry_run:
-        patterns.append(
-            borgmatic.borg.pattern.Pattern(
-                os.path.join(borgmatic_runtime_directory, 'mysql_databases'),
-                source=borgmatic.borg.pattern.Pattern_source.HOOK,
+        if not dry_run:
+            patterns.append(
+                borgmatic.borg.pattern.Pattern(
+                    os.path.join(borgmatic_runtime_directory, 'mysql_databases'),
+                    source=borgmatic.borg.pattern.Pattern_source.HOOK,
+                )
             )
-        )
 
-    return [process for process in processes if process]
+        return [process for process in processes if process]
 
 
 def remove_data_source_dumps(

+ 108 - 92
borgmatic/hooks/data_source/postgresql.py

@@ -7,6 +7,7 @@ import shlex
 
 import borgmatic.borg.pattern
 import borgmatic.config.paths
+import borgmatic.hooks.command
 import borgmatic.hooks.credential.parse
 from borgmatic.execute import (
     execute_command,
@@ -141,112 +142,127 @@ def dump_data_sources(
 
     Raise ValueError if the databases to dump cannot be determined.
     '''
-    dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
-    processes = []
-
-    logger.info(f'Dumping PostgreSQL databases{dry_run_label}')
-
-    for database in databases:
-        environment = make_environment(database, config)
-        dump_path = make_dump_path(borgmatic_runtime_directory)
-        dump_database_names = database_names_to_dump(database, config, environment, dry_run)
-
-        if not dump_database_names:
-            if dry_run:
-                continue
-
-            raise ValueError('Cannot find any PostgreSQL databases to dump.')
-
-        for database_name in dump_database_names:
-            dump_format = database.get('format', None if database_name == 'all' else 'custom')
-            compression = database.get('compression')
-            default_dump_command = 'pg_dumpall' if database_name == 'all' else 'pg_dump'
-            dump_command = tuple(
-                shlex.quote(part)
-                for part in shlex.split(database.get('pg_dump_command') or default_dump_command)
-            )
-            dump_filename = dump.make_data_source_dump_filename(
-                dump_path,
-                database_name,
-                database.get('hostname'),
-                database.get('port'),
-            )
-            if os.path.exists(dump_filename):
-                logger.warning(
-                    f'Skipping duplicate dump of PostgreSQL database "{database_name}" to {dump_filename}'
+    with borgmatic.hooks.command.Before_after_hooks(
+        command_hooks=config.get('commands'),
+        before_after='dump_data_sources',
+        hook_name='postgresql',
+        umask=config.get('umask'),
+        dry_run=dry_run,
+    ):
+        dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
+        processes = []
+
+        logger.info(f'Dumping PostgreSQL databases{dry_run_label}')
+
+        for database in databases:
+            environment = make_environment(database, config)
+            dump_path = make_dump_path(borgmatic_runtime_directory)
+            dump_database_names = database_names_to_dump(database, config, environment, dry_run)
+
+            if not dump_database_names:
+                if dry_run:
+                    continue
+
+                raise ValueError('Cannot find any PostgreSQL databases to dump.')
+
+            for database_name in dump_database_names:
+                dump_format = database.get('format', None if database_name == 'all' else 'custom')
+                compression = database.get('compression')
+                default_dump_command = 'pg_dumpall' if database_name == 'all' else 'pg_dump'
+                dump_command = tuple(
+                    shlex.quote(part)
+                    for part in shlex.split(database.get('pg_dump_command') or default_dump_command)
                 )
-                continue
-
-            command = (
-                dump_command
-                + (
-                    '--no-password',
-                    '--clean',
-                    '--if-exists',
+                dump_filename = dump.make_data_source_dump_filename(
+                    dump_path,
+                    database_name,
+                    database.get('hostname'),
+                    database.get('port'),
                 )
-                + (('--host', shlex.quote(database['hostname'])) if 'hostname' in database else ())
-                + (('--port', shlex.quote(str(database['port']))) if 'port' in database else ())
-                + (
-                    (
-                        '--username',
-                        shlex.quote(
-                            borgmatic.hooks.credential.parse.resolve_credential(
-                                database['username'], config
-                            )
-                        ),
+                if os.path.exists(dump_filename):
+                    logger.warning(
+                        f'Skipping duplicate dump of PostgreSQL database "{database_name}" to {dump_filename}'
                     )
-                    if 'username' in database
-                    else ()
-                )
-                + (('--no-owner',) if database.get('no_owner', False) else ())
-                + (('--format', shlex.quote(dump_format)) if dump_format else ())
-                + (('--compress', shlex.quote(str(compression))) if compression is not None else ())
-                + (('--file', shlex.quote(dump_filename)) if dump_format == 'directory' else ())
-                + (
-                    tuple(shlex.quote(option) for option in database['options'].split(' '))
-                    if 'options' in database
-                    else ()
+                    continue
+
+                command = (
+                    dump_command
+                    + (
+                        '--no-password',
+                        '--clean',
+                        '--if-exists',
+                    )
+                    + (
+                        ('--host', shlex.quote(database['hostname']))
+                        if 'hostname' in database
+                        else ()
+                    )
+                    + (('--port', shlex.quote(str(database['port']))) if 'port' in database else ())
+                    + (
+                        (
+                            '--username',
+                            shlex.quote(
+                                borgmatic.hooks.credential.parse.resolve_credential(
+                                    database['username'], config
+                                )
+                            ),
+                        )
+                        if 'username' in database
+                        else ()
+                    )
+                    + (('--no-owner',) if database.get('no_owner', False) else ())
+                    + (('--format', shlex.quote(dump_format)) if dump_format else ())
+                    + (
+                        ('--compress', shlex.quote(str(compression)))
+                        if compression is not None
+                        else ()
+                    )
+                    + (('--file', shlex.quote(dump_filename)) if dump_format == 'directory' else ())
+                    + (
+                        tuple(shlex.quote(option) for option in database['options'].split(' '))
+                        if 'options' in database
+                        else ()
+                    )
+                    + (() if database_name == 'all' else (shlex.quote(database_name),))
+                    # Use shell redirection rather than the --file flag to sidestep synchronization issues
+                    # when pg_dump/pg_dumpall tries to write to a named pipe. But for the directory dump
+                    # format in a particular, a named destination is required, and redirection doesn't work.
+                    + (('>', shlex.quote(dump_filename)) if dump_format != 'directory' else ())
                 )
-                + (() if database_name == 'all' else (shlex.quote(database_name),))
-                # Use shell redirection rather than the --file flag to sidestep synchronization issues
-                # when pg_dump/pg_dumpall tries to write to a named pipe. But for the directory dump
-                # format in a particular, a named destination is required, and redirection doesn't work.
-                + (('>', shlex.quote(dump_filename)) if dump_format != 'directory' else ())
-            )
 
-            logger.debug(
-                f'Dumping PostgreSQL database "{database_name}" to {dump_filename}{dry_run_label}'
-            )
-            if dry_run:
-                continue
-
-            if dump_format == 'directory':
-                dump.create_parent_directory_for_dump(dump_filename)
-                execute_command(
-                    command,
-                    shell=True,
-                    environment=environment,
+                logger.debug(
+                    f'Dumping PostgreSQL database "{database_name}" to {dump_filename}{dry_run_label}'
                 )
-            else:
-                dump.create_named_pipe_for_dump(dump_filename)
-                processes.append(
+                if dry_run:
+                    continue
+
+                if dump_format == 'directory':
+                    dump.create_parent_directory_for_dump(dump_filename)
                     execute_command(
                         command,
                         shell=True,
                         environment=environment,
-                        run_to_completion=False,
                     )
-                )
+                else:
+                    dump.create_named_pipe_for_dump(dump_filename)
+                    processes.append(
+                        execute_command(
+                            command,
+                            shell=True,
+                            environment=environment,
+                            run_to_completion=False,
+                        )
+                    )
 
-    if not dry_run:
-        patterns.append(
-            borgmatic.borg.pattern.Pattern(
-                os.path.join(borgmatic_runtime_directory, 'postgresql_databases'),
-                source=borgmatic.borg.pattern.Pattern_source.HOOK,
+        if not dry_run:
+            patterns.append(
+                borgmatic.borg.pattern.Pattern(
+                    os.path.join(borgmatic_runtime_directory, 'postgresql_databases'),
+                    source=borgmatic.borg.pattern.Pattern_source.HOOK,
+                )
             )
-        )
 
-    return processes
+        return processes
 
 
 def remove_data_source_dumps(

+ 53 - 45
borgmatic/hooks/data_source/sqlite.py

@@ -4,6 +4,7 @@ import shlex
 
 import borgmatic.borg.pattern
 import borgmatic.config.paths
+import borgmatic.hooks.command
 from borgmatic.execute import execute_command, execute_command_with_processes
 from borgmatic.hooks.data_source import dump
 
@@ -47,55 +48,62 @@ def dump_data_sources(
     Also append the the parent directory of the database dumps to the given patterns list, so the
     dumps actually get backed up.
     '''
-    dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
-    processes = []
-
-    logger.info(f'Dumping SQLite databases{dry_run_label}')
-
-    for database in databases:
-        database_path = database['path']
-
-        if database['name'] == 'all':
-            logger.warning('The "all" database name has no meaning for SQLite databases')
-        if not os.path.exists(database_path):
-            logger.warning(
-                f'No SQLite database at {database_path}; an empty database will be created and dumped'
+    with borgmatic.hooks.command.Before_after_hooks(
+        command_hooks=config.get('commands'),
+        before_after='dump_data_sources',
+        hook_name='sqlite',
+        umask=config.get('umask'),
+        dry_run=dry_run,
+    ):
+        dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
+        processes = []
+
+        logger.info(f'Dumping SQLite databases{dry_run_label}')
+
+        for database in databases:
+            database_path = database['path']
+
+            if database['name'] == 'all':
+                logger.warning('The "all" database name has no meaning for SQLite databases')
+            if not os.path.exists(database_path):
+                logger.warning(
+                    f'No SQLite database at {database_path}; an empty database will be created and dumped'
+                )
+
+            dump_path = make_dump_path(borgmatic_runtime_directory)
+            dump_filename = dump.make_data_source_dump_filename(dump_path, database['name'])
+
+            if os.path.exists(dump_filename):
+                logger.warning(
+                    f'Skipping duplicate dump of SQLite database at {database_path} to {dump_filename}'
+                )
+                continue
+
+            command = (
+                'sqlite3',
+                shlex.quote(database_path),
+                '.dump',
+                '>',
+                shlex.quote(dump_filename),
             )
-
-        dump_path = make_dump_path(borgmatic_runtime_directory)
-        dump_filename = dump.make_data_source_dump_filename(dump_path, database['name'])
-
-        if os.path.exists(dump_filename):
-            logger.warning(
-                f'Skipping duplicate dump of SQLite database at {database_path} to {dump_filename}'
+            logger.debug(
+                f'Dumping SQLite database at {database_path} to {dump_filename}{dry_run_label}'
             )
-            continue
-
-        command = (
-            'sqlite3',
-            shlex.quote(database_path),
-            '.dump',
-            '>',
-            shlex.quote(dump_filename),
-        )
-        logger.debug(
-            f'Dumping SQLite database at {database_path} to {dump_filename}{dry_run_label}'
-        )
-        if dry_run:
-            continue
-
-        dump.create_named_pipe_for_dump(dump_filename)
-        processes.append(execute_command(command, shell=True, run_to_completion=False))
-
-    if not dry_run:
-        patterns.append(
-            borgmatic.borg.pattern.Pattern(
-                os.path.join(borgmatic_runtime_directory, 'sqlite_databases'),
-                source=borgmatic.borg.pattern.Pattern_source.HOOK,
+            if dry_run:
+                continue
+
+            dump.create_named_pipe_for_dump(dump_filename)
+            processes.append(execute_command(command, shell=True, run_to_completion=False))
+
+        if not dry_run:
+            patterns.append(
+                borgmatic.borg.pattern.Pattern(
+                    os.path.join(borgmatic_runtime_directory, 'sqlite_databases'),
+                    source=borgmatic.borg.pattern.Pattern_source.HOOK,
+                )
             )
-        )
 
-    return processes
+        return processes
 
 
 def remove_data_source_dumps(

+ 59 - 51
borgmatic/hooks/data_source/zfs.py

@@ -9,6 +9,7 @@ import subprocess
 import borgmatic.borg.pattern
 import borgmatic.config.paths
 import borgmatic.execute
+import borgmatic.hooks.command
 import borgmatic.hooks.data_source.snapshot
 
 logger = logging.getLogger(__name__)
@@ -243,64 +244,71 @@ def dump_data_sources(
 
     If this is a dry run, then don't actually snapshot anything.
     '''
-    dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
-    logger.info(f'Snapshotting ZFS datasets{dry_run_label}')
-
-    # List ZFS datasets to get their mount points, but only consider those patterns that came from
-    # actual user configuration (as opposed to, say, other hooks).
-    zfs_command = hook_config.get('zfs_command', 'zfs')
-    requested_datasets = get_datasets_to_backup(zfs_command, patterns)
-
-    # Snapshot each dataset, rewriting patterns to use the snapshot paths.
-    snapshot_name = f'{BORGMATIC_SNAPSHOT_PREFIX}{os.getpid()}'
-    normalized_runtime_directory = os.path.normpath(borgmatic_runtime_directory)
-
-    if not requested_datasets:
-        logger.warning(f'No ZFS datasets found to snapshot{dry_run_label}')
-
-    for dataset in requested_datasets:
-        full_snapshot_name = f'{dataset.name}@{snapshot_name}'
-        logger.debug(
-            f'Creating ZFS snapshot {full_snapshot_name} of {dataset.mount_point}{dry_run_label}'
-        )
-
-        if not dry_run:
-            snapshot_dataset(zfs_command, full_snapshot_name)
-
-        # Mount the snapshot into a particular named temporary directory so that the snapshot ends
-        # up in the Borg archive at the "original" dataset mount point path.
-        snapshot_mount_path = os.path.join(
-            normalized_runtime_directory,
-            'zfs_snapshots',
-            hashlib.shake_256(dataset.mount_point.encode('utf-8')).hexdigest(
-                MOUNT_POINT_HASH_LENGTH
-            ),
-            dataset.mount_point.lstrip(os.path.sep),
-        )
+    with borgmatic.hooks.command.Before_after_hooks(
+        command_hooks=config.get('commands'),
+        before_after='dump_data_sources',
+        hook_name='zfs',
+        umask=config.get('umask'),
+        dry_run=dry_run,
+    ):
+        dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
+        logger.info(f'Snapshotting ZFS datasets{dry_run_label}')
+
+        # List ZFS datasets to get their mount points, but only consider those patterns that came from
+        # actual user configuration (as opposed to, say, other hooks).
+        zfs_command = hook_config.get('zfs_command', 'zfs')
+        requested_datasets = get_datasets_to_backup(zfs_command, patterns)
+
+        # Snapshot each dataset, rewriting patterns to use the snapshot paths.
+        snapshot_name = f'{BORGMATIC_SNAPSHOT_PREFIX}{os.getpid()}'
+        normalized_runtime_directory = os.path.normpath(borgmatic_runtime_directory)
+
+        if not requested_datasets:
+            logger.warning(f'No ZFS datasets found to snapshot{dry_run_label}')
+
+        for dataset in requested_datasets:
+            full_snapshot_name = f'{dataset.name}@{snapshot_name}'
+            logger.debug(
+                f'Creating ZFS snapshot {full_snapshot_name} of {dataset.mount_point}{dry_run_label}'
+            )
 
-        logger.debug(
-            f'Mounting ZFS snapshot {full_snapshot_name} at {snapshot_mount_path}{dry_run_label}'
-        )
+            if not dry_run:
+                snapshot_dataset(zfs_command, full_snapshot_name)
+
+            # Mount the snapshot into a particular named temporary directory so that the snapshot ends
+            # up in the Borg archive at the "original" dataset mount point path.
+            snapshot_mount_path = os.path.join(
+                normalized_runtime_directory,
+                'zfs_snapshots',
+                hashlib.shake_256(dataset.mount_point.encode('utf-8')).hexdigest(
+                    MOUNT_POINT_HASH_LENGTH
+                ),
+                dataset.mount_point.lstrip(os.path.sep),
+            )
 
-        if dry_run:
-            continue
+            logger.debug(
+                f'Mounting ZFS snapshot {full_snapshot_name} at {snapshot_mount_path}{dry_run_label}'
+            )
 
-        mount_snapshot(
-            hook_config.get('mount_command', 'mount'), full_snapshot_name, snapshot_mount_path
-        )
+            if dry_run:
+                continue
 
-        for pattern in dataset.contained_patterns:
-            snapshot_pattern = make_borg_snapshot_pattern(
-                pattern, dataset, normalized_runtime_directory
+            mount_snapshot(
+                hook_config.get('mount_command', 'mount'), full_snapshot_name, snapshot_mount_path
             )
 
-            # Attempt to update the pattern in place, since pattern order matters to Borg.
-            try:
-                patterns[patterns.index(pattern)] = snapshot_pattern
-            except ValueError:
-                patterns.append(snapshot_pattern)
+            for pattern in dataset.contained_patterns:
+                snapshot_pattern = make_borg_snapshot_pattern(
+                    pattern, dataset, normalized_runtime_directory
+                )
+
+                # Attempt to update the pattern in place, since pattern order matters to Borg.
+                try:
+                    patterns[patterns.index(pattern)] = snapshot_pattern
+                except ValueError:
+                    patterns.append(snapshot_pattern)
 
-    return []
+        return []
 
 
 def unmount_snapshot(umount_command, snapshot_mount_path):  # pragma: no cover

+ 1 - 14
borgmatic/hooks/dispatch.py

@@ -63,20 +63,7 @@ def call_hook(function_name, config, hook_name, *args, **kwargs):
 
     logger.debug(f'Calling {hook_name} hook function {function_name}')
 
-    borgmatic.hooks.command.execute_hooks(
-        borgmatic.hooks.command.filter_hooks(config.get('commands'), before=function_name, hook_name=hook_name),
-        config.get('umask'),
-        dry_run=False,  # FIXME: Need to get this from somewhere.
-    )
-
-    try:
-        return getattr(module, function_name)(hook_config, config, *args, **kwargs)
-    finally:
-        borgmatic.hooks.command.execute_hooks(
-            borgmatic.hooks.command.filter_hooks(config.get('commands'), after=function_name, hook_name=hook_name),
-            config.get('umask'),
-            dry_run=False,  # FIXME: Need to get this from somewhere.
-        )
+    return getattr(module, function_name)(hook_config, config, *args, **kwargs)
 
 
 def call_hooks(function_name, config, hook_type, *args, **kwargs):