Browse Source

merge upstream

Vandal 4 months ago
parent
commit
2045706faa
39 changed files with 1327 additions and 910 deletions
  1. 10 0
      NEWS
  2. 33 0
      borgmatic/actions/import_key.py
  3. 1 3
      borgmatic/borg/extract.py
  4. 2 0
      borgmatic/borg/feature.py
  5. 70 0
      borgmatic/borg/import_key.py
  6. 7 1
      borgmatic/borg/prune.py
  7. 26 1
      borgmatic/commands/arguments.py
  8. 18 0
      borgmatic/commands/borgmatic.py
  9. 36 47
      borgmatic/config/schema.yaml
  10. 9 3
      borgmatic/config/validate.py
  11. 16 8
      borgmatic/hooks/command.py
  12. 27 35
      borgmatic/hooks/data_source/bootstrap.py
  13. 25 33
      borgmatic/hooks/data_source/btrfs.py
  14. 62 70
      borgmatic/hooks/data_source/lvm.py
  15. 50 58
      borgmatic/hooks/data_source/mariadb.py
  16. 37 45
      borgmatic/hooks/data_source/mongodb.py
  17. 50 58
      borgmatic/hooks/data_source/mysql.py
  18. 92 108
      borgmatic/hooks/data_source/postgresql.py
  19. 52 57
      borgmatic/hooks/data_source/sqlite.py
  20. 51 59
      borgmatic/hooks/data_source/zfs.py
  21. 21 18
      docs/how-to/add-preparation-and-cleanup-steps-to-backups.md
  22. 0 1
      docs/how-to/set-up-backups.md
  23. 7 1
      tests/integration/config/test_schema.py
  24. 20 0
      tests/unit/actions/test_import_key.py
  25. 2 2
      tests/unit/borg/test_extract.py
  26. 279 0
      tests/unit/borg/test_import_key.py
  27. 66 0
      tests/unit/borg/test_prune.py
  28. 20 0
      tests/unit/commands/test_borgmatic.py
  29. 47 2
      tests/unit/config/test_validate.py
  30. 0 7
      tests/unit/hooks/data_source/test_bootstrap.py
  31. 0 18
      tests/unit/hooks/data_source/test_btrfs.py
  32. 0 21
      tests/unit/hooks/data_source/test_lvm.py
  33. 0 18
      tests/unit/hooks/data_source/test_mariadb.py
  34. 0 21
      tests/unit/hooks/data_source/test_mongodb.py
  35. 0 18
      tests/unit/hooks/data_source/test_mysql.py
  36. 0 42
      tests/unit/hooks/data_source/test_postgresql.py
  37. 143 18
      tests/unit/hooks/data_source/test_sqlite.py
  38. 0 15
      tests/unit/hooks/data_source/test_zfs.py
  39. 48 122
      tests/unit/hooks/test_command.py

+ 10 - 0
NEWS

@@ -1,9 +1,19 @@
 2.0.0.dev0
+ * #345: Add a "key import" action to import a repository key from backup.
  * #790, #821: Deprecate all "before_*", "after_*" and "on_error" command hooks in favor of more
    flexible "commands:". See the documentation for more information:
    https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/
+ * #790: BREAKING: For both new and deprecated command hooks, run a configured "after" hook even if
+   an error occurs first. This allows you to perform cleanup steps that correspond to "before"
+   preparation commands—even when something goes wrong.
+ * #790: BREAKING: Run all command hooks (both new and deprecated) respecting the
+   "working_directory" option if configured, meaning that hook commands are run in that directory.
+ * #836: Add a custom command option for the SQLite hook.
+ * #1010: When using Borg 2, don't pass the "--stats" flag to "borg prune".
  * #1020: Document a database use case involving a temporary database client container:
    https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#containers
+ * #1037: Fix an error with the "extract" action when both a remote repository and a
+   "working_directory" are used.
 
 1.9.14
  * #409: With the PagerDuty monitoring hook, send borgmatic logs to PagerDuty so they show up in the

+ 33 - 0
borgmatic/actions/import_key.py

@@ -0,0 +1,33 @@
+import logging
+
+import borgmatic.borg.import_key
+import borgmatic.config.validate
+
+logger = logging.getLogger(__name__)
+
+
+def run_import_key(
+    repository,
+    config,
+    local_borg_version,
+    import_arguments,
+    global_arguments,
+    local_path,
+    remote_path,
+):
+    '''
+    Run the "key import" action for the given repository.
+    '''
+    if import_arguments.repository is None or borgmatic.config.validate.repositories_match(
+        repository, import_arguments.repository
+    ):
+        logger.info('Importing repository key')
+        borgmatic.borg.import_key.import_key(
+            repository['path'],
+            config,
+            local_borg_version,
+            import_arguments,
+            global_arguments,
+            local_path=local_path,
+            remote_path=remote_path,
+        )

+ 1 - 3
borgmatic/borg/extract.py

@@ -134,9 +134,7 @@ def extract_archive(
             # Make the repository path absolute so the destination directory used below via changing
             # the working directory doesn't prevent Borg from finding the repo. But also apply the
             # user's configured working directory (if any) to the repo path.
-            borgmatic.config.validate.normalize_repository_path(
-                os.path.join(working_directory or '', repository)
-            ),
+            borgmatic.config.validate.normalize_repository_path(repository, working_directory),
             archive,
             local_borg_version,
         )

+ 2 - 0
borgmatic/borg/feature.py

@@ -17,6 +17,7 @@ class Feature(Enum):
     MATCH_ARCHIVES = 11
     EXCLUDED_FILES_MINUS = 12
     ARCHIVE_SERIES = 13
+    NO_PRUNE_STATS = 14
 
 
 FEATURE_TO_MINIMUM_BORG_VERSION = {
@@ -33,6 +34,7 @@ FEATURE_TO_MINIMUM_BORG_VERSION = {
     Feature.MATCH_ARCHIVES: parse('2.0.0b3'),  # borg --match-archives
     Feature.EXCLUDED_FILES_MINUS: parse('2.0.0b5'),  # --list --filter uses "-" for excludes
     Feature.ARCHIVE_SERIES: parse('2.0.0b11'),  # identically named archives form a series
+    Feature.NO_PRUNE_STATS: parse('2.0.0b10'),  # prune --stats is not available
 }
 
 

+ 70 - 0
borgmatic/borg/import_key.py

@@ -0,0 +1,70 @@
+import logging
+import os
+
+import borgmatic.config.paths
+import borgmatic.logger
+from borgmatic.borg import environment, flags
+from borgmatic.execute import DO_NOT_CAPTURE, execute_command
+
+logger = logging.getLogger(__name__)
+
+
+def import_key(
+    repository_path,
+    config,
+    local_borg_version,
+    import_arguments,
+    global_arguments,
+    local_path='borg',
+    remote_path=None,
+):
+    '''
+    Given a local or remote repository path, a configuration dict, the local Borg version, import
+    arguments, and optional local and remote Borg paths, import the repository key from the
+    path indicated in the import arguments.
+
+    If the path is empty or "-", then read the key from stdin.
+
+    Raise ValueError if the path is given and it does not exist.
+    '''
+    umask = config.get('umask', None)
+    lock_wait = config.get('lock_wait', None)
+    working_directory = borgmatic.config.paths.get_working_directory(config)
+
+    if import_arguments.path and import_arguments.path != '-':
+        if not os.path.exists(os.path.join(working_directory or '', import_arguments.path)):
+            raise ValueError(f'Path {import_arguments.path} does not exist. Aborting.')
+
+        input_file = None
+    else:
+        input_file = DO_NOT_CAPTURE
+
+    full_command = (
+        (local_path, 'key', 'import')
+        + (('--remote-path', remote_path) if remote_path else ())
+        + (('--umask', str(umask)) if umask else ())
+        + (('--log-json',) if global_arguments.log_json else ())
+        + (('--lock-wait', str(lock_wait)) if lock_wait else ())
+        + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
+        + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
+        + flags.make_flags('paper', import_arguments.paper)
+        + flags.make_repository_flags(
+            repository_path,
+            local_borg_version,
+        )
+        + ((import_arguments.path,) if input_file is None else ())
+    )
+
+    if global_arguments.dry_run:
+        logger.info('Skipping key import (dry run)')
+        return
+
+    execute_command(
+        full_command,
+        input_file=input_file,
+        output_log_level=logging.INFO,
+        environment=environment.make_environment(config),
+        working_directory=working_directory,
+        borg_local_path=local_path,
+        borg_exit_codes=config.get('borg_exit_codes'),
+    )

+ 7 - 1
borgmatic/borg/prune.py

@@ -75,7 +75,13 @@ def prune_archives(
         + (('--umask', str(umask)) if umask else ())
         + (('--log-json',) if global_arguments.log_json else ())
         + (('--lock-wait', str(lock_wait)) if lock_wait else ())
-        + (('--stats',) if prune_arguments.stats and not dry_run else ())
+        + (
+            ('--stats',)
+            if prune_arguments.stats
+            and not dry_run
+            and not feature.available(feature.Feature.NO_PRUNE_STATS, local_borg_version)
+            else ()
+        )
         + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
         + flags.make_flags_from_arguments(
             prune_arguments,

+ 26 - 1
borgmatic/commands/arguments.py

@@ -547,7 +547,7 @@ def make_parsers():
         dest='stats',
         default=False,
         action='store_true',
-        help='Display statistics of the pruned archive',
+        help='Display statistics of the pruned archive [Borg 1 only]',
     )
     prune_group.add_argument(
         '--list', dest='list_archives', action='store_true', help='List archives kept/pruned'
@@ -1479,6 +1479,31 @@ def make_parsers():
         '-h', '--help', action='help', help='Show this help message and exit'
     )
 
+    key_import_parser = key_parsers.add_parser(
+        'import',
+        help='Import a copy of the repository key from backup',
+        description='Import a copy of the repository key from backup',
+        add_help=False,
+    )
+    key_import_group = key_import_parser.add_argument_group('key import arguments')
+    key_import_group.add_argument(
+        '--paper',
+        action='store_true',
+        help='Import interactively from a backup done with --paper',
+    )
+    key_import_group.add_argument(
+        '--repository',
+        help='Path of repository to import the key from, defaults to the configured repository if there is only one, quoted globs supported',
+    )
+    key_import_group.add_argument(
+        '--path',
+        metavar='PATH',
+        help='Path to import the key from backup, defaults to stdin',
+    )
+    key_import_group.add_argument(
+        '-h', '--help', action='help', help='Show this help message and exit'
+    )
+
     key_change_passphrase_parser = key_parsers.add_parser(
         'change-passphrase',
         help='Change the passphrase protecting the repository key',

+ 18 - 0
borgmatic/commands/borgmatic.py

@@ -21,6 +21,7 @@ import borgmatic.actions.delete
 import borgmatic.actions.export_key
 import borgmatic.actions.export_tar
 import borgmatic.actions.extract
+import borgmatic.actions.import_key
 import borgmatic.actions.info
 import borgmatic.actions.list
 import borgmatic.actions.mount
@@ -33,6 +34,7 @@ import borgmatic.actions.restore
 import borgmatic.actions.transfer
 import borgmatic.commands.completion.bash
 import borgmatic.commands.completion.fish
+import borgmatic.config.paths
 from borgmatic.borg import umount as borg_umount
 from borgmatic.borg import version as borg_version
 from borgmatic.commands.arguments import parse_arguments
@@ -207,6 +209,7 @@ def run_configuration(config_filename, config, config_paths, arguments):
                 command_hooks=config.get('commands'),
                 before_after='configuration',
                 umask=config.get('umask'),
+                working_directory=borgmatic.config.paths.get_working_directory(config),
                 dry_run=global_arguments.dry_run,
                 action_names=arguments.keys(),
                 configuration_filename=config_filename,
@@ -286,6 +289,7 @@ def run_configuration(config_filename, config, config_paths, arguments):
                 config.get('commands'), after='error', action_names=arguments.keys()
             ),
             config.get('umask'),
+            borgmatic.config.paths.get_working_directory(config),
             global_arguments.dry_run,
             configuration_filename=config_filename,
             log_file=arguments['global'].log_file or '',
@@ -342,6 +346,7 @@ def run_actions(
         command_hooks=config.get('commands'),
         before_after='repository',
         umask=config.get('umask'),
+        working_directory=borgmatic.config.paths.get_working_directory(config),
         dry_run=global_arguments.dry_run,
         action_names=arguments.keys(),
         **hook_context,
@@ -354,6 +359,7 @@ def run_actions(
                 command_hooks=config.get('commands'),
                 before_after='action',
                 umask=config.get('umask'),
+                working_directory=borgmatic.config.paths.get_working_directory(config),
                 dry_run=global_arguments.dry_run,
                 action_names=arguments.keys(),
                 **hook_context,
@@ -528,6 +534,16 @@ def run_actions(
                         local_path,
                         remote_path,
                     )
+                elif action_name == 'import' and action_name not in skip_actions:
+                    borgmatic.actions.import_key.run_import_key(
+                        repository,
+                        config,
+                        local_borg_version,
+                        action_arguments,
+                        global_arguments,
+                        local_path,
+                        remote_path,
+                    )
                 elif action_name == 'change-passphrase' and action_name not in skip_actions:
                     borgmatic.actions.change_passphrase.run_change_passphrase(
                         repository,
@@ -854,6 +870,7 @@ def collect_configuration_run_summary_logs(configs, config_paths, arguments):
                     config.get('commands'), before='everything', action_names=arguments.keys()
                 ),
                 config.get('umask'),
+                borgmatic.config.paths.get_working_directory(config),
                 arguments['global'].dry_run,
                 configuration_filename=config_filename,
                 log_file=arguments['global'].log_file or '',
@@ -908,6 +925,7 @@ def collect_configuration_run_summary_logs(configs, config_paths, arguments):
                     config.get('commands'), after='everything', action_names=arguments.keys()
                 ),
                 config.get('umask'),
+                borgmatic.config.paths.get_working_directory(config),
                 arguments['global'].dry_run,
                 configuration_filename=config_filename,
                 log_file=arguments['global'].log_file or '',

+ 36 - 47
borgmatic/config/schema.yaml

@@ -959,7 +959,6 @@ properties:
                               - repository
                               - configuration
                               - everything
-                              - dump_data_sources
                           description: |
                               Name for the point in borgmatic's execution that
                               the commands should be run before (required if
@@ -972,19 +971,7 @@ properties:
                               repositories in the current configuration file.
                                * "everything" runs before all configuration
                               files.
-                               * "dump_data_sources" runs before each data
-                              source is dumped.
                           example: action
-                      hooks:
-                          type: array
-                          items:
-                              type: string
-                          description: |
-                              List of names of other hooks that this command
-                              hook applies to. Defaults to all hooks of the
-                              relevant type. Only supported for the
-                              "dump_data_sources" hook.
-                          example: postgresql
                       when:
                           type: array
                           items:
@@ -1013,9 +1000,7 @@ properties:
                                   - borg
                           description: |
                               List of actions for which the commands will be
-                              run. Defaults to running for all actions. Ignored
-                              for "dump_data_sources", which by its nature only
-                              runs for "create".
+                              run. Defaults to running for all actions.
                           example: [create, prune, compact, check]
                       run:
                           type: array
@@ -1037,7 +1022,6 @@ properties:
                               - configuration
                               - everything
                               - error
-                              - dump_data_sources
                           description: |
                               Name for the point in borgmatic's execution that
                               the commands should be run after (required if
@@ -1051,19 +1035,7 @@ properties:
                                * "everything" runs after all configuration
                               files.
                                * "error" runs after an error occurs.
-                               * "dump_data_sources" runs after each data
-                              source is dumped.
                           example: action
-                      hooks:
-                          type: array
-                          items:
-                              type: string
-                          description: |
-                              List of names of other hooks that this command
-                              hook applies to. Defaults to all hooks of the
-                              relevant type. Only supported for the
-                              "dump_data_sources" hook.
-                          example: postgresql
                       when:
                           type: array
                           items:
@@ -1093,9 +1065,7 @@ properties:
                           description: |
                               Only trigger the hook when borgmatic is run with
                               particular actions listed here. Defaults to
-                              running for all actions. Ignored for
-                              "dump_data_sources", which by its nature only runs
-                              for "create".
+                              running for all actions.
                           example: [create, prune, compact, check]
                       run:
                           type: array
@@ -1267,11 +1237,11 @@ properties:
                         Command to use instead of "pg_dump" or "pg_dumpall".
                         This can be used to run a specific pg_dump version
                         (e.g., one inside a running container). If you run it
-                        from within a container, make sure to mount your
-                        host's ".borgmatic" folder into the container using
-                        the same directory structure. Defaults to "pg_dump"
-                        for single database dump or "pg_dumpall" to dump all
-                        databases.
+                        from within a container, make sure to mount the path in
+                        the "user_runtime_directory" option from the host into
+                        the container at the same location. Defaults to
+                        "pg_dump" for single database dump or "pg_dumpall" to
+                        dump all databases.
                     example: docker exec my_pg_container pg_dump
                 pg_restore_command:
                     type: string
@@ -1408,10 +1378,11 @@ properties:
                     description: |
                         Command to use instead of "mariadb-dump". This can be
                         used to run a specific mariadb_dump version (e.g., one
-                        inside a running container). If you run it from within
-                        a container, make sure to mount your host's
-                        ".borgmatic" folder into the container using the same
-                        directory structure. Defaults to "mariadb-dump".
+                        inside a running container). If you run it from within a
+                        container, make sure to mount the path in the
+                        "user_runtime_directory" option from the host into the
+                        container at the same location. Defaults to
+                        "mariadb-dump".
                     example: docker exec mariadb_container mariadb-dump
                 mariadb_command:
                     type: string
@@ -1550,12 +1521,12 @@ properties:
                 mysql_dump_command:
                     type: string
                     description: |
-                        Command to use instead of "mysqldump". This can be
-                        used to run a specific mysql_dump version (e.g., one
-                        inside a running container). If you run it from within
-                        a container, make sure to mount your host's
-                        ".borgmatic" folder into the container using the same
-                        directory structure. Defaults to "mysqldump".
+                        Command to use instead of "mysqldump". This can be used
+                        to run a specific mysql_dump version (e.g., one inside a
+                        running container). If you run it from within a
+                        container, make sure to mount the path in the
+                        "user_runtime_directory" option from the host into the
+                        container at the same location. Defaults to "mysqldump".
                     example: docker exec mysql_container mysqldump
                 mysql_command:
                     type: string
@@ -1642,6 +1613,24 @@ properties:
                         Path to the SQLite database file to restore to. Defaults
                         to the "path" option.
                     example: /var/lib/sqlite/users.db
+                sqlite_command:
+                    type: string
+                    description: |
+                        Command to use instead of "sqlite3". This can be used to
+                        run a specific sqlite3 version (e.g., one inside a
+                        running container). If you run it from within a
+                        container, make sure to mount the path in the
+                        "user_runtime_directory" option from the host into the
+                        container at the same location. Defaults to "sqlite3".
+                    example: docker exec sqlite_container sqlite3
+                sqlite_restore_command:
+                    type: string
+                    description: |
+                        Command to run when restoring a database instead
+                        of "sqlite3". This can be used to run a specific 
+                        sqlite3 version (e.g., one inside a running container). 
+                        Defaults to "sqlite3".
+                    example: docker exec sqlite_container sqlite3
     mongodb_databases:
         type: array
         items:

+ 9 - 3
borgmatic/config/validate.py

@@ -138,16 +138,22 @@ def parse_configuration(config_filename, schema_filename, overrides=None, resolv
     return config, config_paths, logs
 
 
-def normalize_repository_path(repository):
+def normalize_repository_path(repository, base=None):
     '''
     Given a repository path, return the absolute path of it (for local repositories).
+    Optionally, use a base path for resolving relative paths, e.g. to the configured working directory.
     '''
     # A colon in the repository could mean that it's either a file:// URL or a remote repository.
     # If it's a remote repository, we don't want to normalize it. If it's a file:// URL, we do.
     if ':' not in repository:
-        return os.path.abspath(repository)
+        return (
+            os.path.abspath(os.path.join(base, repository)) if base else os.path.abspath(repository)
+        )
     elif repository.startswith('file://'):
-        return os.path.abspath(repository.partition('file://')[-1])
+        local_path = repository.partition('file://')[-1]
+        return (
+            os.path.abspath(os.path.join(base, local_path)) if base else os.path.abspath(local_path)
+        )
     else:
         return repository
 

+ 16 - 8
borgmatic/hooks/command.py

@@ -55,22 +55,20 @@ def filter_hooks(command_hooks, before=None, after=None, hook_name=None, action_
     return tuple(
         hook_config
         for hook_config in command_hooks or ()
-        for config_hook_names in (hook_config.get('hooks'),)
         for config_action_names in (hook_config.get('when'),)
         if before is None or hook_config.get('before') == before
         if after is None or hook_config.get('after') == after
-        if hook_name is None or config_hook_names is None or hook_name in config_hook_names
         if action_names is None
         or config_action_names is None
         or set(config_action_names or ()).intersection(set(action_names))
     )
 
 
-def execute_hooks(command_hooks, umask, dry_run, **context):
+def execute_hooks(command_hooks, umask, working_directory, dry_run, **context):
     '''
-    Given a sequence of command hook dicts from configuration, a umask to execute with (or None),
-    and whether this is a dry run, run the commands for each hook. Or don't run them if this is a
-    dry run.
+    Given a sequence of command hook dicts from configuration, a umask to execute with (or None), a
+    working directory to execute with, and whether this is a dry run, run the commands for each
+    hook. Or don't run them if this is a dry run.
 
     The context contains optional values interpolated by name into the hook commands.
 
@@ -123,6 +121,7 @@ def execute_hooks(command_hooks, umask, dry_run, **context):
                     ),
                     shell=True,
                     environment=make_environment(os.environ),
+                    working_directory=working_directory,
                 )
         finally:
             if original_umask:
@@ -155,6 +154,7 @@ class Before_after_hooks:
         command_hooks,
         before_after,
         umask,
+        working_directory,
         dry_run,
         hook_name=None,
         action_names=None,
@@ -162,12 +162,14 @@ class Before_after_hooks:
     ):
         '''
         Given a sequence of command hook configuration dicts, the before/after name, a umask to run
-        commands with, a dry run flag, the name of the calling hook, a sequence of action names, and
-        any context for the executed commands, save those data points for use below.
+        commands with, a working directory to run commands with, a dry run flag, the name of the
+        calling hook, a sequence of action names, and any context for the executed commands, save
+        those data points for use below.
         '''
         self.command_hooks = command_hooks
         self.before_after = before_after
         self.umask = umask
+        self.working_directory = working_directory
         self.dry_run = dry_run
         self.hook_name = hook_name
         self.action_names = action_names
@@ -186,6 +188,7 @@ class Before_after_hooks:
                     action_names=self.action_names,
                 ),
                 self.umask,
+                self.working_directory,
                 self.dry_run,
                 **self.context,
             )
@@ -193,6 +196,10 @@ class Before_after_hooks:
             if considered_soft_failure(error):
                 return
 
+            # Trigger the after hook manually, since raising here will prevent it from being run
+            # otherwise.
+            self.__exit__(None, None, None)
+
             raise ValueError(f'Error running before {self.before_after} hook: {error}')
 
     def __exit__(self, exception_type, exception, traceback):
@@ -208,6 +215,7 @@ class Before_after_hooks:
                     action_names=self.action_names,
                 ),
                 self.umask,
+                self.working_directory,
                 self.dry_run,
                 **self.context,
             )

+ 27 - 35
borgmatic/hooks/data_source/bootstrap.py

@@ -6,7 +6,6 @@ import os
 
 import borgmatic.borg.pattern
 import borgmatic.config.paths
-import borgmatic.hooks.command
 
 logger = logging.getLogger(__name__)
 
@@ -38,45 +37,38 @@ def dump_data_sources(
     if hook_config and hook_config.get('store_config_files') is False:
         return []
 
-    with borgmatic.hooks.command.Before_after_hooks(
-        command_hooks=config.get('commands'),
-        before_after='dump_data_sources',
-        umask=config.get('umask'),
-        dry_run=dry_run,
-        hook_name='bootstrap',
-    ):
-        borgmatic_manifest_path = os.path.join(
-            borgmatic_runtime_directory, 'bootstrap', 'manifest.json'
+    borgmatic_manifest_path = os.path.join(
+        borgmatic_runtime_directory, 'bootstrap', 'manifest.json'
+    )
+
+    if dry_run:
+        return []
+
+    os.makedirs(os.path.dirname(borgmatic_manifest_path), exist_ok=True)
+
+    with open(borgmatic_manifest_path, 'w') as manifest_file:
+        json.dump(
+            {
+                'borgmatic_version': importlib.metadata.version('borgmatic'),
+                'config_paths': config_paths,
+            },
+            manifest_file,
         )
 
-        if dry_run:
-            return []
-
-        os.makedirs(os.path.dirname(borgmatic_manifest_path), exist_ok=True)
-
-        with open(borgmatic_manifest_path, 'w') as manifest_file:
-            json.dump(
-                {
-                    'borgmatic_version': importlib.metadata.version('borgmatic'),
-                    'config_paths': config_paths,
-                },
-                manifest_file,
-            )
-
-        patterns.extend(
-            borgmatic.borg.pattern.Pattern(
-                config_path, source=borgmatic.borg.pattern.Pattern_source.HOOK
-            )
-            for config_path in config_paths
+    patterns.extend(
+        borgmatic.borg.pattern.Pattern(
+            config_path, source=borgmatic.borg.pattern.Pattern_source.HOOK
         )
-        patterns.append(
-            borgmatic.borg.pattern.Pattern(
-                os.path.join(borgmatic_runtime_directory, 'bootstrap'),
-                source=borgmatic.borg.pattern.Pattern_source.HOOK,
-            )
+        for config_path in config_paths
+    )
+    patterns.append(
+        borgmatic.borg.pattern.Pattern(
+            os.path.join(borgmatic_runtime_directory, 'bootstrap'),
+            source=borgmatic.borg.pattern.Pattern_source.HOOK,
         )
+    )
 
-        return []
+    return []
 
 
 def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, dry_run):

+ 25 - 33
borgmatic/hooks/data_source/btrfs.py

@@ -9,7 +9,6 @@ import subprocess
 import borgmatic.borg.pattern
 import borgmatic.config.paths
 import borgmatic.execute
-import borgmatic.hooks.command
 import borgmatic.hooks.data_source.snapshot
 
 logger = logging.getLogger(__name__)
@@ -250,48 +249,41 @@ def dump_data_sources(
 
     If this is a dry run, then don't actually snapshot anything.
     '''
-    with borgmatic.hooks.command.Before_after_hooks(
-        command_hooks=config.get('commands'),
-        before_after='dump_data_sources',
-        umask=config.get('umask'),
-        dry_run=dry_run,
-        hook_name='btrfs',
-    ):
-        dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
-        logger.info(f'Snapshotting Btrfs subvolumes{dry_run_label}')
+    dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
+    logger.info(f'Snapshotting Btrfs subvolumes{dry_run_label}')
 
-        # Based on the configured patterns, determine Btrfs subvolumes to backup. Only consider those
-        # patterns that came from actual user configuration (as opposed to, say, other hooks).
-        btrfs_command = hook_config.get('btrfs_command', 'btrfs')
-        findmnt_command = hook_config.get('findmnt_command', 'findmnt')
-        subvolumes = get_subvolumes(btrfs_command, findmnt_command, patterns)
+    # Based on the configured patterns, determine Btrfs subvolumes to backup. Only consider those
+    # patterns that came from actual user configuration (as opposed to, say, other hooks).
+    btrfs_command = hook_config.get('btrfs_command', 'btrfs')
+    findmnt_command = hook_config.get('findmnt_command', 'findmnt')
+    subvolumes = get_subvolumes(btrfs_command, findmnt_command, patterns)
 
-        if not subvolumes:
-            logger.warning(f'No Btrfs subvolumes found to snapshot{dry_run_label}')
+    if not subvolumes:
+        logger.warning(f'No Btrfs subvolumes found to snapshot{dry_run_label}')
 
-        # Snapshot each subvolume, rewriting patterns to use their snapshot paths.
-        for subvolume in subvolumes:
-            logger.debug(f'Creating Btrfs snapshot for {subvolume.path} subvolume')
+    # Snapshot each subvolume, rewriting patterns to use their snapshot paths.
+    for subvolume in subvolumes:
+        logger.debug(f'Creating Btrfs snapshot for {subvolume.path} subvolume')
 
-            snapshot_path = make_snapshot_path(subvolume.path)
+        snapshot_path = make_snapshot_path(subvolume.path)
 
-            if dry_run:
-                continue
+        if dry_run:
+            continue
 
-            snapshot_subvolume(btrfs_command, subvolume.path, snapshot_path)
+        snapshot_subvolume(btrfs_command, subvolume.path, snapshot_path)
 
-            for pattern in subvolume.contained_patterns:
-                snapshot_pattern = make_borg_snapshot_pattern(subvolume.path, pattern)
+        for pattern in subvolume.contained_patterns:
+            snapshot_pattern = make_borg_snapshot_pattern(subvolume.path, pattern)
 
-                # Attempt to update the pattern in place, since pattern order matters to Borg.
-                try:
-                    patterns[patterns.index(pattern)] = snapshot_pattern
-                except ValueError:
-                    patterns.append(snapshot_pattern)
+            # Attempt to update the pattern in place, since pattern order matters to Borg.
+            try:
+                patterns[patterns.index(pattern)] = snapshot_pattern
+            except ValueError:
+                patterns.append(snapshot_pattern)
 
-            patterns.append(make_snapshot_exclude_pattern(subvolume.path))
+        patterns.append(make_snapshot_exclude_pattern(subvolume.path))
 
-        return []
+    return []
 
 
 def delete_snapshot(btrfs_command, snapshot_path):  # pragma: no cover

+ 62 - 70
borgmatic/hooks/data_source/lvm.py

@@ -10,7 +10,6 @@ import subprocess
 import borgmatic.borg.pattern
 import borgmatic.config.paths
 import borgmatic.execute
-import borgmatic.hooks.command
 import borgmatic.hooks.data_source.snapshot
 
 logger = logging.getLogger(__name__)
@@ -198,84 +197,77 @@ def dump_data_sources(
 
     If this is a dry run, then don't actually snapshot anything.
     '''
-    with borgmatic.hooks.command.Before_after_hooks(
-        command_hooks=config.get('commands'),
-        before_after='dump_data_sources',
-        umask=config.get('umask'),
-        dry_run=dry_run,
-        hook_name='lvm',
-    ):
-        dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
-        logger.info(f'Snapshotting LVM logical volumes{dry_run_label}')
-
-        # List logical volumes to get their mount points, but only consider those patterns that came
-        # from actual user configuration (as opposed to, say, other hooks).
-        lsblk_command = hook_config.get('lsblk_command', 'lsblk')
-        requested_logical_volumes = get_logical_volumes(lsblk_command, patterns)
-
-        # Snapshot each logical volume, rewriting source directories to use the snapshot paths.
-        snapshot_suffix = f'{BORGMATIC_SNAPSHOT_PREFIX}{os.getpid()}'
-        normalized_runtime_directory = os.path.normpath(borgmatic_runtime_directory)
-
-        if not requested_logical_volumes:
-            logger.warning(f'No LVM logical volumes found to snapshot{dry_run_label}')
-
-        for logical_volume in requested_logical_volumes:
-            snapshot_name = f'{logical_volume.name}_{snapshot_suffix}'
-            logger.debug(
-                f'Creating LVM snapshot {snapshot_name} of {logical_volume.mount_point}{dry_run_label}'
-            )
+    dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
+    logger.info(f'Snapshotting LVM logical volumes{dry_run_label}')
 
-            if not dry_run:
-                snapshot_logical_volume(
-                    hook_config.get('lvcreate_command', 'lvcreate'),
-                    snapshot_name,
-                    logical_volume.device_path,
-                    hook_config.get('snapshot_size', DEFAULT_SNAPSHOT_SIZE),
-                )
+    # List logical volumes to get their mount points, but only consider those patterns that came
+    # from actual user configuration (as opposed to, say, other hooks).
+    lsblk_command = hook_config.get('lsblk_command', 'lsblk')
+    requested_logical_volumes = get_logical_volumes(lsblk_command, patterns)
 
-            # Get the device path for the snapshot we just created.
-            try:
-                snapshot = get_snapshots(
-                    hook_config.get('lvs_command', 'lvs'), snapshot_name=snapshot_name
-                )[0]
-            except IndexError:
-                raise ValueError(f'Cannot find LVM snapshot {snapshot_name}')
-
-            # Mount the snapshot into a particular named temporary directory so that the snapshot ends
-            # up in the Borg archive at the "original" logical volume mount point path.
-            snapshot_mount_path = os.path.join(
-                normalized_runtime_directory,
-                'lvm_snapshots',
-                hashlib.shake_256(logical_volume.mount_point.encode('utf-8')).hexdigest(
-                    MOUNT_POINT_HASH_LENGTH
-                ),
-                logical_volume.mount_point.lstrip(os.path.sep),
-            )
+    # Snapshot each logical volume, rewriting source directories to use the snapshot paths.
+    snapshot_suffix = f'{BORGMATIC_SNAPSHOT_PREFIX}{os.getpid()}'
+    normalized_runtime_directory = os.path.normpath(borgmatic_runtime_directory)
 
-            logger.debug(
-                f'Mounting LVM snapshot {snapshot_name} at {snapshot_mount_path}{dry_run_label}'
-            )
+    if not requested_logical_volumes:
+        logger.warning(f'No LVM logical volumes found to snapshot{dry_run_label}')
 
-            if dry_run:
-                continue
+    for logical_volume in requested_logical_volumes:
+        snapshot_name = f'{logical_volume.name}_{snapshot_suffix}'
+        logger.debug(
+            f'Creating LVM snapshot {snapshot_name} of {logical_volume.mount_point}{dry_run_label}'
+        )
 
-            mount_snapshot(
-                hook_config.get('mount_command', 'mount'), snapshot.device_path, snapshot_mount_path
+        if not dry_run:
+            snapshot_logical_volume(
+                hook_config.get('lvcreate_command', 'lvcreate'),
+                snapshot_name,
+                logical_volume.device_path,
+                hook_config.get('snapshot_size', DEFAULT_SNAPSHOT_SIZE),
             )
 
-            for pattern in logical_volume.contained_patterns:
-                snapshot_pattern = make_borg_snapshot_pattern(
-                    pattern, logical_volume, normalized_runtime_directory
-                )
+        # Get the device path for the snapshot we just created.
+        try:
+            snapshot = get_snapshots(
+                hook_config.get('lvs_command', 'lvs'), snapshot_name=snapshot_name
+            )[0]
+        except IndexError:
+            raise ValueError(f'Cannot find LVM snapshot {snapshot_name}')
+
+        # Mount the snapshot into a particular named temporary directory so that the snapshot ends
+        # up in the Borg archive at the "original" logical volume mount point path.
+        snapshot_mount_path = os.path.join(
+            normalized_runtime_directory,
+            'lvm_snapshots',
+            hashlib.shake_256(logical_volume.mount_point.encode('utf-8')).hexdigest(
+                MOUNT_POINT_HASH_LENGTH
+            ),
+            logical_volume.mount_point.lstrip(os.path.sep),
+        )
+
+        logger.debug(
+            f'Mounting LVM snapshot {snapshot_name} at {snapshot_mount_path}{dry_run_label}'
+        )
+
+        if dry_run:
+            continue
 
-                # Attempt to update the pattern in place, since pattern order matters to Borg.
-                try:
-                    patterns[patterns.index(pattern)] = snapshot_pattern
-                except ValueError:
-                    patterns.append(snapshot_pattern)
+        mount_snapshot(
+            hook_config.get('mount_command', 'mount'), snapshot.device_path, snapshot_mount_path
+        )
+
+        for pattern in logical_volume.contained_patterns:
+            snapshot_pattern = make_borg_snapshot_pattern(
+                pattern, logical_volume, normalized_runtime_directory
+            )
+
+            # Attempt to update the pattern in place, since pattern order matters to Borg.
+            try:
+                patterns[patterns.index(pattern)] = snapshot_pattern
+            except ValueError:
+                patterns.append(snapshot_pattern)
 
-        return []
+    return []
 
 
 def unmount_snapshot(umount_command, snapshot_mount_path):  # pragma: no cover

+ 50 - 58
borgmatic/hooks/data_source/mariadb.py

@@ -6,7 +6,6 @@ import shlex
 
 import borgmatic.borg.pattern
 import borgmatic.config.paths
-import borgmatic.hooks.command
 import borgmatic.hooks.credential.parse
 from borgmatic.execute import (
     execute_command,
@@ -243,78 +242,71 @@ def dump_data_sources(
     Also append the the parent directory of the database dumps to the given patterns list, so the
     dumps actually get backed up.
     '''
-    with borgmatic.hooks.command.Before_after_hooks(
-        command_hooks=config.get('commands'),
-        before_after='dump_data_sources',
-        umask=config.get('umask'),
-        dry_run=dry_run,
-        hook_name='mariadb',
-    ):
-        dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
-        processes = []
-
-        logger.info(f'Dumping MariaDB databases{dry_run_label}')
-
-        for database in databases:
-            dump_path = make_dump_path(borgmatic_runtime_directory)
-            username = borgmatic.hooks.credential.parse.resolve_credential(
-                database.get('username'), config
-            )
-            password = borgmatic.hooks.credential.parse.resolve_credential(
-                database.get('password'), config
-            )
-            environment = dict(os.environ)
-            dump_database_names = database_names_to_dump(
-                database, config, username, password, environment, dry_run
-            )
+    dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
+    processes = []
 
-            if not dump_database_names:
-                if dry_run:
-                    continue
-
-                raise ValueError('Cannot find any MariaDB databases to dump.')
-
-            if database['name'] == 'all' and database.get('format'):
-                for dump_name in dump_database_names:
-                    renamed_database = copy.copy(database)
-                    renamed_database['name'] = dump_name
-                    processes.append(
-                        execute_dump_command(
-                            renamed_database,
-                            config,
-                            username,
-                            password,
-                            dump_path,
-                            (dump_name,),
-                            environment,
-                            dry_run,
-                            dry_run_label,
-                        )
-                    )
-            else:
+    logger.info(f'Dumping MariaDB databases{dry_run_label}')
+
+    for database in databases:
+        dump_path = make_dump_path(borgmatic_runtime_directory)
+        username = borgmatic.hooks.credential.parse.resolve_credential(
+            database.get('username'), config
+        )
+        password = borgmatic.hooks.credential.parse.resolve_credential(
+            database.get('password'), config
+        )
+        environment = dict(os.environ)
+        dump_database_names = database_names_to_dump(
+            database, config, username, password, environment, dry_run
+        )
+
+        if not dump_database_names:
+            if dry_run:
+                continue
+
+            raise ValueError('Cannot find any MariaDB databases to dump.')
+
+        if database['name'] == 'all' and database.get('format'):
+            for dump_name in dump_database_names:
+                renamed_database = copy.copy(database)
+                renamed_database['name'] = dump_name
                 processes.append(
                     execute_dump_command(
-                        database,
+                        renamed_database,
                         config,
                         username,
                         password,
                         dump_path,
-                        dump_database_names,
+                        (dump_name,),
                         environment,
                         dry_run,
                         dry_run_label,
                     )
                 )
-
-        if not dry_run:
-            patterns.append(
-                borgmatic.borg.pattern.Pattern(
-                    os.path.join(borgmatic_runtime_directory, 'mariadb_databases'),
-                    source=borgmatic.borg.pattern.Pattern_source.HOOK,
+        else:
+            processes.append(
+                execute_dump_command(
+                    database,
+                    config,
+                    username,
+                    password,
+                    dump_path,
+                    dump_database_names,
+                    environment,
+                    dry_run,
+                    dry_run_label,
                 )
             )
 
-        return [process for process in processes if process]
+    if not dry_run:
+        patterns.append(
+            borgmatic.borg.pattern.Pattern(
+                os.path.join(borgmatic_runtime_directory, 'mariadb_databases'),
+                source=borgmatic.borg.pattern.Pattern_source.HOOK,
+            )
+        )
+
+    return [process for process in processes if process]
 
 
 def remove_data_source_dumps(

+ 37 - 45
borgmatic/hooks/data_source/mongodb.py

@@ -4,7 +4,6 @@ import shlex
 
 import borgmatic.borg.pattern
 import borgmatic.config.paths
-import borgmatic.hooks.command
 import borgmatic.hooks.credential.parse
 from borgmatic.execute import execute_command, execute_command_with_processes
 from borgmatic.hooks.data_source import dump
@@ -49,53 +48,46 @@ def dump_data_sources(
     Also append the the parent directory of the database dumps to the given patterns list, so the
     dumps actually get backed up.
     '''
-    with borgmatic.hooks.command.Before_after_hooks(
-        command_hooks=config.get('commands'),
-        before_after='dump_data_sources',
-        umask=config.get('umask'),
-        dry_run=dry_run,
-        hook_name='mongodb',
-    ):
-        dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
-
-        logger.info(f'Dumping MongoDB databases{dry_run_label}')
-
-        processes = []
-
-        for database in databases:
-            name = database['name']
-            dump_filename = dump.make_data_source_dump_filename(
-                make_dump_path(borgmatic_runtime_directory),
-                name,
-                database.get('hostname'),
-                database.get('port'),
-            )
-            dump_format = database.get('format', 'archive')
+    dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
 
-            logger.debug(
-                f'Dumping MongoDB database {name} to {dump_filename}{dry_run_label}',
-            )
-            if dry_run:
-                continue
-
-            command = build_dump_command(database, config, dump_filename, dump_format)
-
-            if dump_format == 'directory':
-                dump.create_parent_directory_for_dump(dump_filename)
-                execute_command(command, shell=True)
-            else:
-                dump.create_named_pipe_for_dump(dump_filename)
-                processes.append(execute_command(command, shell=True, run_to_completion=False))
-
-        if not dry_run:
-            patterns.append(
-                borgmatic.borg.pattern.Pattern(
-                    os.path.join(borgmatic_runtime_directory, 'mongodb_databases'),
-                    source=borgmatic.borg.pattern.Pattern_source.HOOK,
-                )
+    logger.info(f'Dumping MongoDB databases{dry_run_label}')
+
+    processes = []
+
+    for database in databases:
+        name = database['name']
+        dump_filename = dump.make_data_source_dump_filename(
+            make_dump_path(borgmatic_runtime_directory),
+            name,
+            database.get('hostname'),
+            database.get('port'),
+        )
+        dump_format = database.get('format', 'archive')
+
+        logger.debug(
+            f'Dumping MongoDB database {name} to {dump_filename}{dry_run_label}',
+        )
+        if dry_run:
+            continue
+
+        command = build_dump_command(database, config, dump_filename, dump_format)
+
+        if dump_format == 'directory':
+            dump.create_parent_directory_for_dump(dump_filename)
+            execute_command(command, shell=True)
+        else:
+            dump.create_named_pipe_for_dump(dump_filename)
+            processes.append(execute_command(command, shell=True, run_to_completion=False))
+
+    if not dry_run:
+        patterns.append(
+            borgmatic.borg.pattern.Pattern(
+                os.path.join(borgmatic_runtime_directory, 'mongodb_databases'),
+                source=borgmatic.borg.pattern.Pattern_source.HOOK,
             )
+        )
 
-        return processes
+    return processes
 
 
 def make_password_config_file(password):

+ 50 - 58
borgmatic/hooks/data_source/mysql.py

@@ -5,7 +5,6 @@ import shlex
 
 import borgmatic.borg.pattern
 import borgmatic.config.paths
-import borgmatic.hooks.command
 import borgmatic.hooks.credential.parse
 import borgmatic.hooks.data_source.mariadb
 from borgmatic.execute import (
@@ -170,78 +169,71 @@ def dump_data_sources(
     Also append the the parent directory of the database dumps to the given patterns list, so the
     dumps actually get backed up.
     '''
-    with borgmatic.hooks.command.Before_after_hooks(
-        command_hooks=config.get('commands'),
-        before_after='dump_data_sources',
-        umask=config.get('umask'),
-        dry_run=dry_run,
-        hook_name='mysql',
-    ):
-        dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
-        processes = []
-
-        logger.info(f'Dumping MySQL databases{dry_run_label}')
-
-        for database in databases:
-            dump_path = make_dump_path(borgmatic_runtime_directory)
-            username = borgmatic.hooks.credential.parse.resolve_credential(
-                database.get('username'), config
-            )
-            password = borgmatic.hooks.credential.parse.resolve_credential(
-                database.get('password'), config
-            )
-            environment = dict(os.environ)
-            dump_database_names = database_names_to_dump(
-                database, config, username, password, environment, dry_run
-            )
+    dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
+    processes = []
 
-            if not dump_database_names:
-                if dry_run:
-                    continue
-
-                raise ValueError('Cannot find any MySQL databases to dump.')
-
-            if database['name'] == 'all' and database.get('format'):
-                for dump_name in dump_database_names:
-                    renamed_database = copy.copy(database)
-                    renamed_database['name'] = dump_name
-                    processes.append(
-                        execute_dump_command(
-                            renamed_database,
-                            config,
-                            username,
-                            password,
-                            dump_path,
-                            (dump_name,),
-                            environment,
-                            dry_run,
-                            dry_run_label,
-                        )
-                    )
-            else:
+    logger.info(f'Dumping MySQL databases{dry_run_label}')
+
+    for database in databases:
+        dump_path = make_dump_path(borgmatic_runtime_directory)
+        username = borgmatic.hooks.credential.parse.resolve_credential(
+            database.get('username'), config
+        )
+        password = borgmatic.hooks.credential.parse.resolve_credential(
+            database.get('password'), config
+        )
+        environment = dict(os.environ)
+        dump_database_names = database_names_to_dump(
+            database, config, username, password, environment, dry_run
+        )
+
+        if not dump_database_names:
+            if dry_run:
+                continue
+
+            raise ValueError('Cannot find any MySQL databases to dump.')
+
+        if database['name'] == 'all' and database.get('format'):
+            for dump_name in dump_database_names:
+                renamed_database = copy.copy(database)
+                renamed_database['name'] = dump_name
                 processes.append(
                     execute_dump_command(
-                        database,
+                        renamed_database,
                         config,
                         username,
                         password,
                         dump_path,
-                        dump_database_names,
+                        (dump_name,),
                         environment,
                         dry_run,
                         dry_run_label,
                     )
                 )
-
-        if not dry_run:
-            patterns.append(
-                borgmatic.borg.pattern.Pattern(
-                    os.path.join(borgmatic_runtime_directory, 'mysql_databases'),
-                    source=borgmatic.borg.pattern.Pattern_source.HOOK,
+        else:
+            processes.append(
+                execute_dump_command(
+                    database,
+                    config,
+                    username,
+                    password,
+                    dump_path,
+                    dump_database_names,
+                    environment,
+                    dry_run,
+                    dry_run_label,
                 )
             )
 
-        return [process for process in processes if process]
+    if not dry_run:
+        patterns.append(
+            borgmatic.borg.pattern.Pattern(
+                os.path.join(borgmatic_runtime_directory, 'mysql_databases'),
+                source=borgmatic.borg.pattern.Pattern_source.HOOK,
+            )
+        )
+
+    return [process for process in processes if process]
 
 
 def remove_data_source_dumps(

+ 92 - 108
borgmatic/hooks/data_source/postgresql.py

@@ -7,7 +7,6 @@ import shlex
 
 import borgmatic.borg.pattern
 import borgmatic.config.paths
-import borgmatic.hooks.command
 import borgmatic.hooks.credential.parse
 from borgmatic.execute import (
     execute_command,
@@ -142,127 +141,112 @@ def dump_data_sources(
 
     Raise ValueError if the databases to dump cannot be determined.
     '''
-    with borgmatic.hooks.command.Before_after_hooks(
-        command_hooks=config.get('commands'),
-        before_after='dump_data_sources',
-        umask=config.get('umask'),
-        dry_run=dry_run,
-        hook_name='postgresql',
-    ):
-        dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
-        processes = []
-
-        logger.info(f'Dumping PostgreSQL databases{dry_run_label}')
-
-        for database in databases:
-            environment = make_environment(database, config)
-            dump_path = make_dump_path(borgmatic_runtime_directory)
-            dump_database_names = database_names_to_dump(database, config, environment, dry_run)
-
-            if not dump_database_names:
-                if dry_run:
-                    continue
-
-                raise ValueError('Cannot find any PostgreSQL databases to dump.')
-
-            for database_name in dump_database_names:
-                dump_format = database.get('format', None if database_name == 'all' else 'custom')
-                compression = database.get('compression')
-                default_dump_command = 'pg_dumpall' if database_name == 'all' else 'pg_dump'
-                dump_command = tuple(
-                    shlex.quote(part)
-                    for part in shlex.split(database.get('pg_dump_command') or default_dump_command)
+    dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
+    processes = []
+
+    logger.info(f'Dumping PostgreSQL databases{dry_run_label}')
+
+    for database in databases:
+        environment = make_environment(database, config)
+        dump_path = make_dump_path(borgmatic_runtime_directory)
+        dump_database_names = database_names_to_dump(database, config, environment, dry_run)
+
+        if not dump_database_names:
+            if dry_run:
+                continue
+
+            raise ValueError('Cannot find any PostgreSQL databases to dump.')
+
+        for database_name in dump_database_names:
+            dump_format = database.get('format', None if database_name == 'all' else 'custom')
+            compression = database.get('compression')
+            default_dump_command = 'pg_dumpall' if database_name == 'all' else 'pg_dump'
+            dump_command = tuple(
+                shlex.quote(part)
+                for part in shlex.split(database.get('pg_dump_command') or default_dump_command)
+            )
+            dump_filename = dump.make_data_source_dump_filename(
+                dump_path,
+                database_name,
+                database.get('hostname'),
+                database.get('port'),
+            )
+            if os.path.exists(dump_filename):
+                logger.warning(
+                    f'Skipping duplicate dump of PostgreSQL database "{database_name}" to {dump_filename}'
                 )
-                dump_filename = dump.make_data_source_dump_filename(
-                    dump_path,
-                    database_name,
-                    database.get('hostname'),
-                    database.get('port'),
+                continue
+
+            command = (
+                dump_command
+                + (
+                    '--no-password',
+                    '--clean',
+                    '--if-exists',
                 )
-                if os.path.exists(dump_filename):
-                    logger.warning(
-                        f'Skipping duplicate dump of PostgreSQL database "{database_name}" to {dump_filename}'
-                    )
-                    continue
-
-                command = (
-                    dump_command
-                    + (
-                        '--no-password',
-                        '--clean',
-                        '--if-exists',
-                    )
-                    + (
-                        ('--host', shlex.quote(database['hostname']))
-                        if 'hostname' in database
-                        else ()
-                    )
-                    + (('--port', shlex.quote(str(database['port']))) if 'port' in database else ())
-                    + (
-                        (
-                            '--username',
-                            shlex.quote(
-                                borgmatic.hooks.credential.parse.resolve_credential(
-                                    database['username'], config
-                                )
-                            ),
-                        )
-                        if 'username' in database
-                        else ()
+                + (('--host', shlex.quote(database['hostname'])) if 'hostname' in database else ())
+                + (('--port', shlex.quote(str(database['port']))) if 'port' in database else ())
+                + (
+                    (
+                        '--username',
+                        shlex.quote(
+                            borgmatic.hooks.credential.parse.resolve_credential(
+                                database['username'], config
+                            )
+                        ),
                     )
-                    + (('--no-owner',) if database.get('no_owner', False) else ())
-                    + (('--format', shlex.quote(dump_format)) if dump_format else ())
-                    + (
-                        ('--compress', shlex.quote(str(compression)))
-                        if compression is not None
-                        else ()
-                    )
-                    + (('--file', shlex.quote(dump_filename)) if dump_format == 'directory' else ())
-                    + (
-                        tuple(shlex.quote(option) for option in database['options'].split(' '))
-                        if 'options' in database
-                        else ()
-                    )
-                    + (() if database_name == 'all' else (shlex.quote(database_name),))
-                    # Use shell redirection rather than the --file flag to sidestep synchronization issues
-                    # when pg_dump/pg_dumpall tries to write to a named pipe. But for the directory dump
-                    # format in a particular, a named destination is required, and redirection doesn't work.
-                    + (('>', shlex.quote(dump_filename)) if dump_format != 'directory' else ())
+                    if 'username' in database
+                    else ()
                 )
-
-                logger.debug(
-                    f'Dumping PostgreSQL database "{database_name}" to {dump_filename}{dry_run_label}'
+                + (('--no-owner',) if database.get('no_owner', False) else ())
+                + (('--format', shlex.quote(dump_format)) if dump_format else ())
+                + (('--compress', shlex.quote(str(compression))) if compression is not None else ())
+                + (('--file', shlex.quote(dump_filename)) if dump_format == 'directory' else ())
+                + (
+                    tuple(shlex.quote(option) for option in database['options'].split(' '))
+                    if 'options' in database
+                    else ()
                 )
-                if dry_run:
-                    continue
+                + (() if database_name == 'all' else (shlex.quote(database_name),))
+                # Use shell redirection rather than the --file flag to sidestep synchronization issues
+                # when pg_dump/pg_dumpall tries to write to a named pipe. But for the directory dump
+                # format in a particular, a named destination is required, and redirection doesn't work.
+                + (('>', shlex.quote(dump_filename)) if dump_format != 'directory' else ())
+            )
 
-                if dump_format == 'directory':
-                    dump.create_parent_directory_for_dump(dump_filename)
+            logger.debug(
+                f'Dumping PostgreSQL database "{database_name}" to {dump_filename}{dry_run_label}'
+            )
+            if dry_run:
+                continue
+
+            if dump_format == 'directory':
+                dump.create_parent_directory_for_dump(dump_filename)
+                execute_command(
+                    command,
+                    shell=True,
+                    environment=environment,
+                )
+            else:
+                dump.create_named_pipe_for_dump(dump_filename)
+                processes.append(
                     execute_command(
                         command,
                         shell=True,
                         environment=environment,
+                        run_to_completion=False,
                     )
-                else:
-                    dump.create_named_pipe_for_dump(dump_filename)
-                    processes.append(
-                        execute_command(
-                            command,
-                            shell=True,
-                            environment=environment,
-                            run_to_completion=False,
-                        )
-                    )
-
-        if not dry_run:
-            patterns.append(
-                borgmatic.borg.pattern.Pattern(
-                    os.path.join(borgmatic_runtime_directory, 'postgresql_databases'),
-                    source=borgmatic.borg.pattern.Pattern_source.HOOK,
                 )
+
+    if not dry_run:
+        patterns.append(
+            borgmatic.borg.pattern.Pattern(
+                os.path.join(borgmatic_runtime_directory, 'postgresql_databases'),
+                source=borgmatic.borg.pattern.Pattern_source.HOOK,
             )
+        )
 
-        return processes
+    return processes
 
 
 def remove_data_source_dumps(

+ 52 - 57
borgmatic/hooks/data_source/sqlite.py

@@ -4,7 +4,6 @@ import shlex
 
 import borgmatic.borg.pattern
 import borgmatic.config.paths
-import borgmatic.hooks.command
 from borgmatic.execute import execute_command, execute_command_with_processes
 from borgmatic.hooks.data_source import dump
 
@@ -48,62 +47,58 @@ def dump_data_sources(
     Also append the the parent directory of the database dumps to the given patterns list, so the
     dumps actually get backed up.
     '''
-    with borgmatic.hooks.command.Before_after_hooks(
-        command_hooks=config.get('commands'),
-        before_after='dump_data_sources',
-        umask=config.get('umask'),
-        dry_run=dry_run,
-        hook_name='sqlite',
-    ):
-        dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
-        processes = []
-
-        logger.info(f'Dumping SQLite databases{dry_run_label}')
-
-        for database in databases:
-            database_path = database['path']
-
-            if database['name'] == 'all':
-                logger.warning('The "all" database name has no meaning for SQLite databases')
-            if not os.path.exists(database_path):
-                logger.warning(
-                    f'No SQLite database at {database_path}; an empty database will be created and dumped'
-                )
-
-            dump_path = make_dump_path(borgmatic_runtime_directory)
-            dump_filename = dump.make_data_source_dump_filename(dump_path, database['name'])
-
-            if os.path.exists(dump_filename):
-                logger.warning(
-                    f'Skipping duplicate dump of SQLite database at {database_path} to {dump_filename}'
-                )
-                continue
-
-            command = (
-                'sqlite3',
-                shlex.quote(database_path),
-                '.dump',
-                '>',
-                shlex.quote(dump_filename),
+    dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
+    processes = []
+
+    logger.info(f'Dumping SQLite databases{dry_run_label}')
+
+    for database in databases:
+        database_path = database['path']
+
+        if database['name'] == 'all':
+            logger.warning('The "all" database name has no meaning for SQLite databases')
+        if not os.path.exists(database_path):
+            logger.warning(
+                f'No SQLite database at {database_path}; an empty database will be created and dumped'
             )
-            logger.debug(
-                f'Dumping SQLite database at {database_path} to {dump_filename}{dry_run_label}'
+
+        dump_path = make_dump_path(borgmatic_runtime_directory)
+        dump_filename = dump.make_data_source_dump_filename(dump_path, database['name'])
+
+        if os.path.exists(dump_filename):
+            logger.warning(
+                f'Skipping duplicate dump of SQLite database at {database_path} to {dump_filename}'
             )
-            if dry_run:
-                continue
-
-            dump.create_named_pipe_for_dump(dump_filename)
-            processes.append(execute_command(command, shell=True, run_to_completion=False))
-
-        if not dry_run:
-            patterns.append(
-                borgmatic.borg.pattern.Pattern(
-                    os.path.join(borgmatic_runtime_directory, 'sqlite_databases'),
-                    source=borgmatic.borg.pattern.Pattern_source.HOOK,
-                )
+            continue
+
+        sqlite_command = tuple(
+            shlex.quote(part) for part in shlex.split(database.get('sqlite_command') or 'sqlite3')
+        )
+        command = sqlite_command + (
+            shlex.quote(database_path),
+            '.dump',
+            '>',
+            shlex.quote(dump_filename),
+        )
+
+        logger.debug(
+            f'Dumping SQLite database at {database_path} to {dump_filename}{dry_run_label}'
+        )
+        if dry_run:
+            continue
+
+        dump.create_named_pipe_for_dump(dump_filename)
+        processes.append(execute_command(command, shell=True, run_to_completion=False))
+
+    if not dry_run:
+        patterns.append(
+            borgmatic.borg.pattern.Pattern(
+                os.path.join(borgmatic_runtime_directory, 'sqlite_databases'),
+                source=borgmatic.borg.pattern.Pattern_source.HOOK,
             )
+        )
 
-        return processes
+    return processes
 
 
 def remove_data_source_dumps(
@@ -168,11 +163,11 @@ def restore_data_source_dump(
     except FileNotFoundError:  # pragma: no cover
         pass
 
-    restore_command = (
-        'sqlite3',
-        database_path,
+    sqlite_restore_command = tuple(
+        shlex.quote(part)
+        for part in shlex.split(data_source.get('sqlite_restore_command') or 'sqlite3')
     )
-
+    restore_command = sqlite_restore_command + (shlex.quote(database_path),)
     # Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning
     # if the restore paths don't exist in the archive.
     execute_command_with_processes(

+ 51 - 59
borgmatic/hooks/data_source/zfs.py

@@ -9,7 +9,6 @@ import subprocess
 import borgmatic.borg.pattern
 import borgmatic.config.paths
 import borgmatic.execute
-import borgmatic.hooks.command
 import borgmatic.hooks.data_source.snapshot
 
 logger = logging.getLogger(__name__)
@@ -244,71 +243,64 @@ def dump_data_sources(
 
     If this is a dry run, then don't actually snapshot anything.
     '''
-    with borgmatic.hooks.command.Before_after_hooks(
-        command_hooks=config.get('commands'),
-        before_after='dump_data_sources',
-        umask=config.get('umask'),
-        dry_run=dry_run,
-        hook_name='zfs',
-    ):
-        dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
-        logger.info(f'Snapshotting ZFS datasets{dry_run_label}')
-
-        # List ZFS datasets to get their mount points, but only consider those patterns that came from
-        # actual user configuration (as opposed to, say, other hooks).
-        zfs_command = hook_config.get('zfs_command', 'zfs')
-        requested_datasets = get_datasets_to_backup(zfs_command, patterns)
-
-        # Snapshot each dataset, rewriting patterns to use the snapshot paths.
-        snapshot_name = f'{BORGMATIC_SNAPSHOT_PREFIX}{os.getpid()}'
-        normalized_runtime_directory = os.path.normpath(borgmatic_runtime_directory)
-
-        if not requested_datasets:
-            logger.warning(f'No ZFS datasets found to snapshot{dry_run_label}')
-
-        for dataset in requested_datasets:
-            full_snapshot_name = f'{dataset.name}@{snapshot_name}'
-            logger.debug(
-                f'Creating ZFS snapshot {full_snapshot_name} of {dataset.mount_point}{dry_run_label}'
-            )
+    dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
+    logger.info(f'Snapshotting ZFS datasets{dry_run_label}')
 
-            if not dry_run:
-                snapshot_dataset(zfs_command, full_snapshot_name)
-
-            # Mount the snapshot into a particular named temporary directory so that the snapshot ends
-            # up in the Borg archive at the "original" dataset mount point path.
-            snapshot_mount_path = os.path.join(
-                normalized_runtime_directory,
-                'zfs_snapshots',
-                hashlib.shake_256(dataset.mount_point.encode('utf-8')).hexdigest(
-                    MOUNT_POINT_HASH_LENGTH
-                ),
-                dataset.mount_point.lstrip(os.path.sep),
-            )
+    # List ZFS datasets to get their mount points, but only consider those patterns that came from
+    # actual user configuration (as opposed to, say, other hooks).
+    zfs_command = hook_config.get('zfs_command', 'zfs')
+    requested_datasets = get_datasets_to_backup(zfs_command, patterns)
 
-            logger.debug(
-                f'Mounting ZFS snapshot {full_snapshot_name} at {snapshot_mount_path}{dry_run_label}'
-            )
+    # Snapshot each dataset, rewriting patterns to use the snapshot paths.
+    snapshot_name = f'{BORGMATIC_SNAPSHOT_PREFIX}{os.getpid()}'
+    normalized_runtime_directory = os.path.normpath(borgmatic_runtime_directory)
 
-            if dry_run:
-                continue
+    if not requested_datasets:
+        logger.warning(f'No ZFS datasets found to snapshot{dry_run_label}')
 
-            mount_snapshot(
-                hook_config.get('mount_command', 'mount'), full_snapshot_name, snapshot_mount_path
-            )
+    for dataset in requested_datasets:
+        full_snapshot_name = f'{dataset.name}@{snapshot_name}'
+        logger.debug(
+            f'Creating ZFS snapshot {full_snapshot_name} of {dataset.mount_point}{dry_run_label}'
+        )
 
-            for pattern in dataset.contained_patterns:
-                snapshot_pattern = make_borg_snapshot_pattern(
-                    pattern, dataset, normalized_runtime_directory
-                )
+        if not dry_run:
+            snapshot_dataset(zfs_command, full_snapshot_name)
+
+        # Mount the snapshot into a particular named temporary directory so that the snapshot ends
+        # up in the Borg archive at the "original" dataset mount point path.
+        snapshot_mount_path = os.path.join(
+            normalized_runtime_directory,
+            'zfs_snapshots',
+            hashlib.shake_256(dataset.mount_point.encode('utf-8')).hexdigest(
+                MOUNT_POINT_HASH_LENGTH
+            ),
+            dataset.mount_point.lstrip(os.path.sep),
+        )
 
-                # Attempt to update the pattern in place, since pattern order matters to Borg.
-                try:
-                    patterns[patterns.index(pattern)] = snapshot_pattern
-                except ValueError:
-                    patterns.append(snapshot_pattern)
+        logger.debug(
+            f'Mounting ZFS snapshot {full_snapshot_name} at {snapshot_mount_path}{dry_run_label}'
+        )
+
+        if dry_run:
+            continue
+
+        mount_snapshot(
+            hook_config.get('mount_command', 'mount'), full_snapshot_name, snapshot_mount_path
+        )
+
+        for pattern in dataset.contained_patterns:
+            snapshot_pattern = make_borg_snapshot_pattern(
+                pattern, dataset, normalized_runtime_directory
+            )
+
+            # Attempt to update the pattern in place, since pattern order matters to Borg.
+            try:
+                patterns[patterns.index(pattern)] = snapshot_pattern
+            except ValueError:
+                patterns.append(snapshot_pattern)
 
-        return []
+    return []
 
 
 def unmount_snapshot(umount_command, snapshot_mount_path):  # pragma: no cover

+ 21 - 18
docs/how-to/add-preparation-and-cleanup-steps-to-backups.md

@@ -66,21 +66,15 @@ Each command in the `commands:` list has the following options:
  * `when`: Only trigger the hook when borgmatic is run with particular actions (`create`, `prune`, etc.) listed here. Defaults to running for all actions.
  * `run`: List of one or more shell commands or scripts to run when this command hook is triggered.
 
-There's also another command hook that works a little differently:
+An `after` command hook runs even if an error occurs in the corresponding
+`before` hook or between those two hooks. This allows you to perform cleanup
+steps that correspond to `before` preparation commands—even when something goes
+wrong. This is a departure from the way that the deprecated `after_*` hooks
+worked in borgmatic prior to version 2.0.0.
 
-```yaml
-commands:
-    - before: dump_data_sources
-      hooks: [postgresql]
-      run:
-          - echo "Right before the PostgreSQL database dump!"
-```
-
-This command hook has the following options:
-
- * `before` or `after`: `dump_data_sources`
- * `hooks`: Names of other hooks that this command hook applies to, e.g. `postgresql`, `mariadb`, `zfs`, `btrfs`, etc. Defaults to all hooks of the relevant type.
- * `run`: One or more shell commands or scripts to run when this command hook is triggered.
+Additionally, when command hooks run, they respect the `working_directory`
+option if it is configured, meaning that the hook commands are run in that
+directory.
 
 
 ### Order of execution
@@ -96,9 +90,6 @@ borgmatic for the `create` and `prune` actions. Here's the order of execution:
     * Run `before: configuration` hooks (from the first configuration file).
         * Run `before: repository` hooks (for the first repository).
             * Run `before: action` hooks for `create`.
-                * Run `before: dump_data_sources` hooks (e.g. for the PostgreSQL hook).
-                * Actually dump data sources (e.g. PostgreSQL databases).
-                * Run `after: dump_data_sources` hooks (e.g. for the PostgreSQL hook).
             * Actually run the `create` action (e.g. `borg create`).
             * Run `after: action` hooks for `create`.
             * Run `before: action` hooks for `prune`.
@@ -118,7 +109,10 @@ configuration files.
 command hooks worked a little differently. In these older versions of borgmatic,
 you can specify `before_backup` hooks to perform preparation steps before
 running backups and specify `after_backup` hooks to perform cleanup steps
-afterwards. Here's an example:
+afterwards. These deprecated command hooks still work in version 2.0.0+,
+although see below about a few semantic differences starting in that version.
+
+Here's an example of these deprecated hooks:
 
 ```yaml
 before_backup:
@@ -143,6 +137,15 @@ instance, `before_prune` runs before a `prune` action for a repository, while
 <span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
 these options in the `hooks:` section of your configuration.
 
+<span class="minilink minilink-addedin">New in version 2.0.0</span> An `after_*`
+command hook runs even if an error occurs in the corresponding `before_*` hook
+or between those two hooks. This allows you to perform cleanup steps that
+correspond to `before_*` preparation commands—even when something goes wrong.
+
+<span class="minilink minilink-addedin">New in version 2.0.0</span> When command
+hooks run, they respect the `working_directory` option if it is configured,
+meaning that the hook commands are run in that directory.
+
 <span class="minilink minilink-addedin">New in version 1.7.0</span> The
 `before_actions` and `after_actions` hooks run before/after all the actions
 (like `create`, `prune`, etc.) for each repository. These hooks are a good

+ 0 - 1
docs/how-to/set-up-backups.md

@@ -311,7 +311,6 @@ Then, from the directory where you downloaded it:
 
 ```bash
 sudo mv borgmatic /etc/cron.d/borgmatic
-sudo chmod +x /etc/cron.d/borgmatic
 ```
 
 If borgmatic is installed at a different location than

+ 7 - 1
tests/integration/config/test_schema.py

@@ -14,7 +14,13 @@ def test_schema_line_length_stays_under_limit():
         assert len(line.rstrip('\n')) <= MAXIMUM_LINE_LENGTH
 
 
-ACTIONS_MODULE_NAMES_TO_OMIT = {'arguments', 'change_passphrase', 'export_key', 'json'}
+ACTIONS_MODULE_NAMES_TO_OMIT = {
+    'arguments',
+    'change_passphrase',
+    'export_key',
+    'import_key',
+    'json',
+}
 ACTIONS_MODULE_NAMES_TO_ADD = {'key', 'umount'}
 
 

+ 20 - 0
tests/unit/actions/test_import_key.py

@@ -0,0 +1,20 @@
+from flexmock import flexmock
+
+from borgmatic.actions import import_key as module
+
+
+def test_run_import_key_does_not_raise():
+    flexmock(module.logger).answer = lambda message: None
+    flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True)
+    flexmock(module.borgmatic.borg.import_key).should_receive('import_key')
+    import_arguments = flexmock(repository=flexmock())
+
+    module.run_import_key(
+        repository={'path': 'repo'},
+        config={},
+        local_borg_version=None,
+        import_arguments=import_arguments,
+        global_arguments=flexmock(),
+        local_path=None,
+        remote_path=None,
+    )

+ 2 - 2
tests/unit/borg/test_extract.py

@@ -710,7 +710,7 @@ def test_extract_archive_uses_configured_working_directory_in_repo_path_and_dest
     )
     flexmock(module.borgmatic.config.validate).should_receive(
         'normalize_repository_path'
-    ).with_args('/working/dir/repo').and_return('/working/dir/repo').once()
+    ).with_args('repo', '/working/dir').and_return('/working/dir/repo').once()
 
     module.extract_archive(
         dry_run=False,
@@ -733,7 +733,7 @@ def test_extract_archive_uses_configured_working_directory_in_repo_path_when_des
     )
     flexmock(module.borgmatic.config.validate).should_receive(
         'normalize_repository_path'
-    ).with_args('/working/dir/repo').and_return('/working/dir/repo').once()
+    ).with_args('repo', '/working/dir').and_return('/working/dir/repo').once()
 
     module.extract_archive(
         dry_run=False,

+ 279 - 0
tests/unit/borg/test_import_key.py

@@ -0,0 +1,279 @@
+import logging
+
+import pytest
+from flexmock import flexmock
+
+from borgmatic.borg import import_key as module
+
+from ..test_verbosity import insert_logging_mock
+
+
+def insert_execute_command_mock(
+    command, input_file=module.DO_NOT_CAPTURE, working_directory=None, borg_exit_codes=None
+):
+
+    flexmock(module.environment).should_receive('make_environment')
+    flexmock(module.borgmatic.config.paths).should_receive('get_working_directory').and_return(
+        working_directory,
+    )
+    flexmock(module).should_receive('execute_command').with_args(
+        command,
+        input_file=input_file,
+        output_log_level=module.logging.INFO,
+        environment=None,
+        working_directory=working_directory,
+        borg_local_path=command[0],
+        borg_exit_codes=borg_exit_codes,
+    ).once()
+
+
+def test_import_key_calls_borg_with_required_flags():
+    flexmock(module.flags).should_receive('make_flags').and_return(())
+    flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.os.path).should_receive('exists').never()
+    insert_execute_command_mock(('borg', 'key', 'import', 'repo'))
+
+    module.import_key(
+        repository_path='repo',
+        config={},
+        local_borg_version='1.2.3',
+        import_arguments=flexmock(paper=False, path=None),
+        global_arguments=flexmock(dry_run=False, log_json=False),
+    )
+
+
+def test_import_key_calls_borg_with_local_path():
+    flexmock(module.flags).should_receive('make_flags').and_return(())
+    flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.os.path).should_receive('exists').never()
+    insert_execute_command_mock(('borg1', 'key', 'import', 'repo'))
+
+    module.import_key(
+        repository_path='repo',
+        config={},
+        local_borg_version='1.2.3',
+        import_arguments=flexmock(paper=False, path=None),
+        global_arguments=flexmock(dry_run=False, log_json=False),
+        local_path='borg1',
+    )
+
+
+def test_import_key_calls_borg_using_exit_codes():
+    flexmock(module.flags).should_receive('make_flags').and_return(())
+    flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.os.path).should_receive('exists').never()
+    borg_exit_codes = flexmock()
+    insert_execute_command_mock(('borg', 'key', 'import', 'repo'), borg_exit_codes=borg_exit_codes)
+
+    module.import_key(
+        repository_path='repo',
+        config={'borg_exit_codes': borg_exit_codes},
+        local_borg_version='1.2.3',
+        import_arguments=flexmock(paper=False, path=None),
+        global_arguments=flexmock(dry_run=False, log_json=False),
+    )
+
+
+def test_import_key_calls_borg_with_remote_path_flags():
+    flexmock(module.flags).should_receive('make_flags').and_return(())
+    flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.os.path).should_receive('exists').never()
+    insert_execute_command_mock(('borg', 'key', 'import', '--remote-path', 'borg1', 'repo'))
+
+    module.import_key(
+        repository_path='repo',
+        config={},
+        local_borg_version='1.2.3',
+        import_arguments=flexmock(paper=False, path=None),
+        global_arguments=flexmock(dry_run=False, log_json=False),
+        remote_path='borg1',
+    )
+
+
+def test_import_key_calls_borg_with_umask_flags():
+    flexmock(module.flags).should_receive('make_flags').and_return(())
+    flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.os.path).should_receive('exists').never()
+    insert_execute_command_mock(('borg', 'key', 'import', '--umask', '0770', 'repo'))
+
+    module.import_key(
+        repository_path='repo',
+        config={'umask': '0770'},
+        local_borg_version='1.2.3',
+        import_arguments=flexmock(paper=False, path=None),
+        global_arguments=flexmock(dry_run=False, log_json=False),
+    )
+
+
+def test_import_key_calls_borg_with_log_json_flags():
+    flexmock(module.flags).should_receive('make_flags').and_return(())
+    flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.os.path).should_receive('exists').never()
+    insert_execute_command_mock(('borg', 'key', 'import', '--log-json', 'repo'))
+
+    module.import_key(
+        repository_path='repo',
+        config={},
+        local_borg_version='1.2.3',
+        import_arguments=flexmock(paper=False, path=None),
+        global_arguments=flexmock(dry_run=False, log_json=True),
+    )
+
+
+def test_import_key_calls_borg_with_lock_wait_flags():
+    flexmock(module.flags).should_receive('make_flags').and_return(())
+    flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.os.path).should_receive('exists').never()
+    insert_execute_command_mock(('borg', 'key', 'import', '--lock-wait', '5', 'repo'))
+
+    module.import_key(
+        repository_path='repo',
+        config={'lock_wait': '5'},
+        local_borg_version='1.2.3',
+        import_arguments=flexmock(paper=False, path=None),
+        global_arguments=flexmock(dry_run=False, log_json=False),
+    )
+
+
+def test_import_key_with_log_info_calls_borg_with_info_parameter():
+    flexmock(module.flags).should_receive('make_flags').and_return(())
+    flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.os.path).should_receive('exists').never()
+    insert_execute_command_mock(('borg', 'key', 'import', '--info', 'repo'))
+    insert_logging_mock(logging.INFO)
+
+    module.import_key(
+        repository_path='repo',
+        config={},
+        local_borg_version='1.2.3',
+        import_arguments=flexmock(paper=False, path=None),
+        global_arguments=flexmock(dry_run=False, log_json=False),
+    )
+
+
+def test_import_key_with_log_debug_calls_borg_with_debug_flags():
+    flexmock(module.flags).should_receive('make_flags').and_return(())
+    flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.os.path).should_receive('exists').never()
+    insert_execute_command_mock(('borg', 'key', 'import', '--debug', '--show-rc', 'repo'))
+    insert_logging_mock(logging.DEBUG)
+
+    module.import_key(
+        repository_path='repo',
+        config={},
+        local_borg_version='1.2.3',
+        import_arguments=flexmock(paper=False, path=None),
+        global_arguments=flexmock(dry_run=False, log_json=False),
+    )
+
+
+def test_import_key_calls_borg_with_paper_flags():
+    flexmock(module.flags).should_receive('make_flags').and_return(('--paper',))
+    flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.os.path).should_receive('exists').never()
+    insert_execute_command_mock(('borg', 'key', 'import', '--paper', 'repo'))
+
+    module.import_key(
+        repository_path='repo',
+        config={},
+        local_borg_version='1.2.3',
+        import_arguments=flexmock(paper=True, path=None),
+        global_arguments=flexmock(dry_run=False, log_json=False),
+    )
+
+
+def test_import_key_calls_borg_with_path_argument():
+    flexmock(module.flags).should_receive('make_flags').and_return(())
+    flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.os.path).should_receive('exists').with_args('source').and_return(True)
+    insert_execute_command_mock(('borg', 'key', 'import', 'repo', 'source'), input_file=None)
+
+    module.import_key(
+        repository_path='repo',
+        config={},
+        local_borg_version='1.2.3',
+        import_arguments=flexmock(paper=False, path='source'),
+        global_arguments=flexmock(dry_run=False, log_json=False),
+    )
+
+
+def test_import_key_with_non_existent_path_raises():
+    flexmock(module.flags).should_receive('make_flags').and_return(())
+    flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.os.path).should_receive('exists').and_return(False)
+    flexmock(module).should_receive('execute_command').never()
+
+    with pytest.raises(ValueError):
+        module.import_key(
+            repository_path='repo',
+            config={},
+            local_borg_version='1.2.3',
+            import_arguments=flexmock(paper=False, path='source'),
+            global_arguments=flexmock(dry_run=False, log_json=False),
+        )
+
+
+def test_import_key_with_stdin_path_calls_borg_without_path_argument():
+    flexmock(module.flags).should_receive('make_flags').and_return(())
+    flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.os.path).should_receive('exists').never()
+    insert_execute_command_mock(('borg', 'key', 'import', 'repo'))
+
+    module.import_key(
+        repository_path='repo',
+        config={},
+        local_borg_version='1.2.3',
+        import_arguments=flexmock(paper=False, path='-'),
+        global_arguments=flexmock(dry_run=False, log_json=False),
+    )
+
+
+def test_import_key_with_dry_run_skips_borg_call():
+    flexmock(module.flags).should_receive('make_flags').and_return(())
+    flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.os.path).should_receive('exists').never()
+    flexmock(module).should_receive('execute_command').never()
+
+    module.import_key(
+        repository_path='repo',
+        config={},
+        local_borg_version='1.2.3',
+        import_arguments=flexmock(paper=False, path=None),
+        global_arguments=flexmock(dry_run=True, log_json=False),
+    )
+
+
+def test_import_key_calls_borg_with_working_directory():
+    flexmock(module.flags).should_receive('make_flags').and_return(())
+    flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.os.path).should_receive('exists').never()
+    insert_execute_command_mock(('borg', 'key', 'import', 'repo'), working_directory='/working/dir')
+
+    module.import_key(
+        repository_path='repo',
+        config={'working_directory': '/working/dir'},
+        local_borg_version='1.2.3',
+        import_arguments=flexmock(paper=False, path=None),
+        global_arguments=flexmock(dry_run=False, log_json=False),
+    )
+
+
+def test_import_key_calls_borg_with_path_argument_and_working_directory():
+    flexmock(module.flags).should_receive('make_flags').and_return(())
+    flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.os.path).should_receive('exists').with_args('/working/dir/source').and_return(
+        True
+    ).once()
+    insert_execute_command_mock(
+        ('borg', 'key', 'import', 'repo', 'source'),
+        input_file=None,
+        working_directory='/working/dir',
+    )
+
+    module.import_key(
+        repository_path='repo',
+        config={'working_directory': '/working/dir'},
+        local_borg_version='1.2.3',
+        import_arguments=flexmock(paper=False, path='source'),
+        global_arguments=flexmock(dry_run=False, log_json=False),
+    )

+ 66 - 0
tests/unit/borg/test_prune.py

@@ -210,6 +210,9 @@ def test_prune_archives_calls_borg_with_flags():
     flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
     flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS)
     flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.feature).should_receive('available').with_args(
+        module.feature.Feature.NO_PRUNE_STATS, '1.2.3'
+    ).and_return(False)
     insert_execute_command_mock(PRUNE_COMMAND + ('repo',), logging.INFO)
 
     prune_arguments = flexmock(stats=False, list_archives=False)
@@ -228,6 +231,9 @@ def test_prune_archives_with_log_info_calls_borg_with_info_flag():
     flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
     flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS)
     flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.feature).should_receive('available').with_args(
+        module.feature.Feature.NO_PRUNE_STATS, '1.2.3'
+    ).and_return(False)
     insert_execute_command_mock(PRUNE_COMMAND + ('--info', 'repo'), logging.INFO)
     insert_logging_mock(logging.INFO)
 
@@ -247,6 +253,9 @@ def test_prune_archives_with_log_debug_calls_borg_with_debug_flag():
     flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
     flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS)
     flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.feature).should_receive('available').with_args(
+        module.feature.Feature.NO_PRUNE_STATS, '1.2.3'
+    ).and_return(False)
     insert_execute_command_mock(PRUNE_COMMAND + ('--debug', '--show-rc', 'repo'), logging.INFO)
     insert_logging_mock(logging.DEBUG)
 
@@ -266,6 +275,9 @@ def test_prune_archives_with_dry_run_calls_borg_with_dry_run_flag():
     flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
     flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS)
     flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.feature).should_receive('available').with_args(
+        module.feature.Feature.NO_PRUNE_STATS, '1.2.3'
+    ).and_return(False)
     insert_execute_command_mock(PRUNE_COMMAND + ('--dry-run', 'repo'), logging.INFO)
 
     prune_arguments = flexmock(stats=False, list_archives=False)
@@ -284,6 +296,9 @@ def test_prune_archives_with_local_path_calls_borg_via_local_path():
     flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
     flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS)
     flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.feature).should_receive('available').with_args(
+        module.feature.Feature.NO_PRUNE_STATS, '1.2.3'
+    ).and_return(False)
     insert_execute_command_mock(('borg1',) + PRUNE_COMMAND[1:] + ('repo',), logging.INFO)
 
     prune_arguments = flexmock(stats=False, list_archives=False)
@@ -303,6 +318,9 @@ def test_prune_archives_with_exit_codes_calls_borg_using_them():
     flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
     flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS)
     flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.feature).should_receive('available').with_args(
+        module.feature.Feature.NO_PRUNE_STATS, '1.2.3'
+    ).and_return(False)
     borg_exit_codes = flexmock()
     insert_execute_command_mock(
         ('borg',) + PRUNE_COMMAND[1:] + ('repo',),
@@ -326,6 +344,9 @@ def test_prune_archives_with_remote_path_calls_borg_with_remote_path_flags():
     flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
     flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS)
     flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.feature).should_receive('available').with_args(
+        module.feature.Feature.NO_PRUNE_STATS, '1.2.3'
+    ).and_return(False)
     insert_execute_command_mock(PRUNE_COMMAND + ('--remote-path', 'borg1', 'repo'), logging.INFO)
 
     prune_arguments = flexmock(stats=False, list_archives=False)
@@ -345,6 +366,9 @@ def test_prune_archives_with_stats_calls_borg_with_stats_flag_and_answer_output_
     flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
     flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS)
     flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.feature).should_receive('available').with_args(
+        module.feature.Feature.NO_PRUNE_STATS, '1.2.3'
+    ).and_return(False)
     insert_execute_command_mock(PRUNE_COMMAND + ('--stats', 'repo'), module.borgmatic.logger.ANSWER)
 
     prune_arguments = flexmock(stats=True, list_archives=False)
@@ -363,6 +387,9 @@ def test_prune_archives_with_files_calls_borg_with_list_flag_and_answer_output_l
     flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
     flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS)
     flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.feature).should_receive('available').with_args(
+        module.feature.Feature.NO_PRUNE_STATS, '1.2.3'
+    ).and_return(False)
     insert_execute_command_mock(PRUNE_COMMAND + ('--list', 'repo'), module.borgmatic.logger.ANSWER)
 
     prune_arguments = flexmock(stats=False, list_archives=True)
@@ -382,6 +409,9 @@ def test_prune_archives_with_umask_calls_borg_with_umask_flags():
     config = {'umask': '077'}
     flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS)
     flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.feature).should_receive('available').with_args(
+        module.feature.Feature.NO_PRUNE_STATS, '1.2.3'
+    ).and_return(False)
     insert_execute_command_mock(PRUNE_COMMAND + ('--umask', '077', 'repo'), logging.INFO)
 
     prune_arguments = flexmock(stats=False, list_archives=False)
@@ -400,6 +430,9 @@ def test_prune_archives_with_log_json_calls_borg_with_log_json_flag():
     flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
     flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS)
     flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.feature).should_receive('available').with_args(
+        module.feature.Feature.NO_PRUNE_STATS, '1.2.3'
+    ).and_return(False)
     insert_execute_command_mock(PRUNE_COMMAND + ('--log-json', 'repo'), logging.INFO)
 
     prune_arguments = flexmock(stats=False, list_archives=False)
@@ -419,6 +452,9 @@ def test_prune_archives_with_lock_wait_calls_borg_with_lock_wait_flags():
     config = {'lock_wait': 5}
     flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS)
     flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.feature).should_receive('available').with_args(
+        module.feature.Feature.NO_PRUNE_STATS, '1.2.3'
+    ).and_return(False)
     insert_execute_command_mock(PRUNE_COMMAND + ('--lock-wait', '5', 'repo'), logging.INFO)
 
     prune_arguments = flexmock(stats=False, list_archives=False)
@@ -437,6 +473,9 @@ def test_prune_archives_with_extra_borg_options_calls_borg_with_extra_options():
     flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
     flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS)
     flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.feature).should_receive('available').with_args(
+        module.feature.Feature.NO_PRUNE_STATS, '1.2.3'
+    ).and_return(False)
     insert_execute_command_mock(PRUNE_COMMAND + ('--extra', '--options', 'repo'), logging.INFO)
 
     prune_arguments = flexmock(stats=False, list_archives=False)
@@ -471,6 +510,9 @@ def test_prune_archives_with_date_based_matching_calls_borg_with_date_based_flag
         )
     )
     flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo'))
+    flexmock(module.feature).should_receive('available').with_args(
+        module.feature.Feature.NO_PRUNE_STATS, '1.2.3'
+    ).and_return(False)
     flexmock(module.environment).should_receive('make_environment')
     flexmock(module.borgmatic.config.paths).should_receive('get_working_directory').and_return(None)
     flexmock(module).should_receive('execute_command').with_args(
@@ -521,6 +563,9 @@ def test_prune_archives_calls_borg_with_working_directory():
     flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
     flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS)
     flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.feature).should_receive('available').with_args(
+        module.feature.Feature.NO_PRUNE_STATS, '1.2.3'
+    ).and_return(False)
     insert_execute_command_mock(
         PRUNE_COMMAND + ('repo',), logging.INFO, working_directory='/working/dir'
     )
@@ -534,3 +579,24 @@ def test_prune_archives_calls_borg_with_working_directory():
         global_arguments=flexmock(log_json=False),
         prune_arguments=prune_arguments,
     )
+
+
+def test_prune_archives_calls_borg_with_flags_and_when_feature_available():
+    flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
+    flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
+    flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS)
+    flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
+    flexmock(module.feature).should_receive('available').with_args(
+        module.feature.Feature.NO_PRUNE_STATS, '2.0.0b10'
+    ).and_return(True)
+    insert_execute_command_mock(PRUNE_COMMAND + ('repo',), logging.ANSWER)
+
+    prune_arguments = flexmock(stats=True, list_archives=False)
+    module.prune_archives(
+        dry_run=False,
+        repository_path='repo',
+        config={},
+        local_borg_version='2.0.0b10',
+        global_arguments=flexmock(log_json=False),
+        prune_arguments=prune_arguments,
+    )

+ 20 - 0
tests/unit/commands/test_borgmatic.py

@@ -1390,6 +1390,26 @@ def test_run_actions_runs_export_key():
     )
 
 
+def test_run_actions_runs_import_key():
+    flexmock(module).should_receive('add_custom_log_levels')
+    flexmock(module).should_receive('get_skip_actions').and_return([])
+    flexmock(module.command).should_receive('Before_after_hooks').and_return(flexmock())
+    flexmock(borgmatic.actions.import_key).should_receive('run_import_key').once()
+
+    tuple(
+        module.run_actions(
+            arguments={'global': flexmock(dry_run=False, log_file='foo'), 'import': flexmock()},
+            config_filename=flexmock(),
+            config={'repositories': []},
+            config_paths=[],
+            local_path=flexmock(),
+            remote_path=flexmock(),
+            local_borg_version=flexmock(),
+            repository={'path': 'repo'},
+        )
+    )
+
+
 def test_run_actions_runs_change_passphrase():
     flexmock(module).should_receive('add_custom_log_levels')
     flexmock(module).should_receive('get_skip_actions').and_return([])

+ 47 - 2
tests/unit/config/test_validate.py

@@ -94,13 +94,40 @@ def test_normalize_repository_path_passes_through_remote_repository():
     module.normalize_repository_path(repository) == repository
 
 
+def test_normalize_repository_path_passes_through_remote_repository_with_base_dir():
+    repository = 'example.org:test.borg'
+
+    flexmock(module.os.path).should_receive('abspath').never()
+    module.normalize_repository_path(repository, '/working') == repository
+
+
 def test_normalize_repository_path_passes_through_file_repository():
     repository = 'file:///foo/bar/test.borg'
-    flexmock(module.os.path).should_receive('abspath').and_return('/foo/bar/test.borg')
+    flexmock(module.os.path).should_receive('abspath').with_args('/foo/bar/test.borg').and_return(
+        '/foo/bar/test.borg'
+    )
 
     module.normalize_repository_path(repository) == '/foo/bar/test.borg'
 
 
+def test_normalize_repository_path_passes_through_absolute_file_repository_with_base_dir():
+    repository = 'file:///foo/bar/test.borg'
+    flexmock(module.os.path).should_receive('abspath').with_args('/foo/bar/test.borg').and_return(
+        '/foo/bar/test.borg'
+    )
+
+    module.normalize_repository_path(repository, '/working') == '/foo/bar/test.borg'
+
+
+def test_normalize_repository_path_resolves_relative_file_repository_with_base_dir():
+    repository = 'file://foo/bar/test.borg'
+    flexmock(module.os.path).should_receive('abspath').with_args(
+        '/working/foo/bar/test.borg'
+    ).and_return('/working/foo/bar/test.borg')
+
+    module.normalize_repository_path(repository, '/working') == '/working/foo/bar/test.borg'
+
+
 def test_normalize_repository_path_passes_through_absolute_repository():
     repository = '/foo/bar/test.borg'
     flexmock(module.os.path).should_receive('abspath').and_return(repository)
@@ -108,14 +135,32 @@ def test_normalize_repository_path_passes_through_absolute_repository():
     module.normalize_repository_path(repository) == repository
 
 
+def test_normalize_repository_path_passes_through_absolute_repository_with_base_dir():
+    repository = '/foo/bar/test.borg'
+    flexmock(module.os.path).should_receive('abspath').and_return(repository)
+
+    module.normalize_repository_path(repository, '/working') == repository
+
+
 def test_normalize_repository_path_resolves_relative_repository():
     repository = 'test.borg'
     absolute = '/foo/bar/test.borg'
-    flexmock(module.os.path).should_receive('abspath').and_return(absolute)
+    flexmock(module.os.path).should_receive('abspath').with_args(repository).and_return(absolute)
 
     module.normalize_repository_path(repository) == absolute
 
 
+def test_normalize_repository_path_resolves_relative_repository_with_base_dir():
+    repository = 'test.borg'
+    base = '/working'
+    absolute = '/working/test.borg'
+    flexmock(module.os.path).should_receive('abspath').with_args('/working/test.borg').and_return(
+        absolute
+    )
+
+    module.normalize_repository_path(repository, base) == absolute
+
+
 @pytest.mark.parametrize(
     'first,second,expected_result',
     (

+ 0 - 7
tests/unit/hooks/data_source/test_bootstrap.py

@@ -6,9 +6,6 @@ from borgmatic.hooks.data_source import bootstrap as module
 
 
 def test_dump_data_sources_creates_manifest_file():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     flexmock(module.os).should_receive('makedirs')
 
     flexmock(module.importlib.metadata).should_receive('version').and_return('1.0.0')
@@ -35,7 +32,6 @@ def test_dump_data_sources_creates_manifest_file():
 
 
 def test_dump_data_sources_with_store_config_files_false_does_not_create_manifest_file():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').never()
     flexmock(module.os).should_receive('makedirs').never()
     flexmock(module.json).should_receive('dump').never()
     hook_config = {'store_config_files': False}
@@ -51,9 +47,6 @@ def test_dump_data_sources_with_store_config_files_false_does_not_create_manifes
 
 
 def test_dump_data_sources_with_dry_run_does_not_create_manifest_file():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     flexmock(module.os).should_receive('makedirs').never()
     flexmock(module.json).should_receive('dump').never()
 

+ 0 - 18
tests/unit/hooks/data_source/test_btrfs.py

@@ -269,9 +269,6 @@ def test_make_borg_snapshot_pattern_includes_slashdot_hack_and_stripped_pattern_
 
 
 def test_dump_data_sources_snapshots_each_subvolume_and_updates_patterns():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     patterns = [Pattern('/foo'), Pattern('/mnt/subvol1')]
     config = {'btrfs': {}}
     flexmock(module).should_receive('get_subvolumes').and_return(
@@ -350,9 +347,6 @@ def test_dump_data_sources_snapshots_each_subvolume_and_updates_patterns():
 
 
 def test_dump_data_sources_uses_custom_btrfs_command_in_commands():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     patterns = [Pattern('/foo'), Pattern('/mnt/subvol1')]
     config = {'btrfs': {'btrfs_command': '/usr/local/bin/btrfs'}}
     flexmock(module).should_receive('get_subvolumes').and_return(
@@ -406,9 +400,6 @@ def test_dump_data_sources_uses_custom_btrfs_command_in_commands():
 
 
 def test_dump_data_sources_uses_custom_findmnt_command_in_commands():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     patterns = [Pattern('/foo'), Pattern('/mnt/subvol1')]
     config = {'btrfs': {'findmnt_command': '/usr/local/bin/findmnt'}}
     flexmock(module).should_receive('get_subvolumes').with_args(
@@ -464,9 +455,6 @@ def test_dump_data_sources_uses_custom_findmnt_command_in_commands():
 
 
 def test_dump_data_sources_with_dry_run_skips_snapshot_and_patterns_update():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     patterns = [Pattern('/foo'), Pattern('/mnt/subvol1')]
     config = {'btrfs': {}}
     flexmock(module).should_receive('get_subvolumes').and_return(
@@ -495,9 +483,6 @@ def test_dump_data_sources_with_dry_run_skips_snapshot_and_patterns_update():
 
 
 def test_dump_data_sources_without_matching_subvolumes_skips_snapshot_and_patterns_update():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     patterns = [Pattern('/foo'), Pattern('/mnt/subvol1')]
     config = {'btrfs': {}}
     flexmock(module).should_receive('get_subvolumes').and_return(())
@@ -522,9 +507,6 @@ def test_dump_data_sources_without_matching_subvolumes_skips_snapshot_and_patter
 
 
 def test_dump_data_sources_snapshots_adds_to_existing_exclude_patterns():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     patterns = [Pattern('/foo'), Pattern('/mnt/subvol1')]
     config = {'btrfs': {}, 'exclude_patterns': ['/bar']}
     flexmock(module).should_receive('get_subvolumes').and_return(

+ 0 - 21
tests/unit/hooks/data_source/test_lvm.py

@@ -282,9 +282,6 @@ def test_make_borg_snapshot_pattern_includes_slashdot_hack_and_stripped_pattern_
 
 
 def test_dump_data_sources_snapshots_and_mounts_and_updates_patterns():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     config = {'lvm': {}}
     patterns = [Pattern('/mnt/lvolume1/subdir'), Pattern('/mnt/lvolume2')]
     logical_volumes = (
@@ -354,9 +351,6 @@ def test_dump_data_sources_snapshots_and_mounts_and_updates_patterns():
 
 
 def test_dump_data_sources_with_no_logical_volumes_skips_snapshots():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     config = {'lvm': {}}
     patterns = [Pattern('/mnt/lvolume1/subdir'), Pattern('/mnt/lvolume2')]
     flexmock(module).should_receive('get_logical_volumes').and_return(())
@@ -379,9 +373,6 @@ def test_dump_data_sources_with_no_logical_volumes_skips_snapshots():
 
 
 def test_dump_data_sources_uses_snapshot_size_for_snapshot():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     config = {'lvm': {'snapshot_size': '1000PB'}}
     patterns = [Pattern('/mnt/lvolume1/subdir'), Pattern('/mnt/lvolume2')]
     logical_volumes = (
@@ -457,9 +448,6 @@ def test_dump_data_sources_uses_snapshot_size_for_snapshot():
 
 
 def test_dump_data_sources_uses_custom_commands():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     config = {
         'lvm': {
             'lsblk_command': '/usr/local/bin/lsblk',
@@ -546,9 +534,6 @@ def test_dump_data_sources_uses_custom_commands():
 
 
 def test_dump_data_sources_with_dry_run_skips_snapshots_and_does_not_touch_patterns():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     config = {'lvm': {}}
     patterns = [Pattern('/mnt/lvolume1/subdir'), Pattern('/mnt/lvolume2')]
     flexmock(module).should_receive('get_logical_volumes').and_return(
@@ -600,9 +585,6 @@ def test_dump_data_sources_with_dry_run_skips_snapshots_and_does_not_touch_patte
 
 
 def test_dump_data_sources_ignores_mismatch_between_given_patterns_and_contained_patterns():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     config = {'lvm': {}}
     patterns = [Pattern('/hmm')]
     logical_volumes = (
@@ -673,9 +655,6 @@ def test_dump_data_sources_ignores_mismatch_between_given_patterns_and_contained
 
 
 def test_dump_data_sources_with_missing_snapshot_errors():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     config = {'lvm': {}}
     patterns = [Pattern('/mnt/lvolume1/subdir'), Pattern('/mnt/lvolume2')]
     flexmock(module).should_receive('get_logical_volumes').and_return(

+ 0 - 18
tests/unit/hooks/data_source/test_mariadb.py

@@ -237,9 +237,6 @@ def test_use_streaming_false_for_no_databases():
 
 
 def test_dump_data_sources_dumps_each_database():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo'}, {'name': 'bar'}]
     processes = [flexmock(), flexmock()]
     flexmock(module).should_receive('make_dump_path').and_return('')
@@ -281,9 +278,6 @@ def test_dump_data_sources_dumps_each_database():
 
 
 def test_dump_data_sources_dumps_with_password():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     database = {'name': 'foo', 'username': 'root', 'password': 'trustsome1'}
     process = flexmock()
     flexmock(module).should_receive('make_dump_path').and_return('')
@@ -318,9 +312,6 @@ def test_dump_data_sources_dumps_with_password():
 
 
 def test_dump_data_sources_dumps_all_databases_at_once():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'all'}]
     process = flexmock()
     flexmock(module).should_receive('make_dump_path').and_return('')
@@ -352,9 +343,6 @@ def test_dump_data_sources_dumps_all_databases_at_once():
 
 
 def test_dump_data_sources_dumps_all_databases_separately_when_format_configured():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'all', 'format': 'sql'}]
     processes = [flexmock(), flexmock()]
     flexmock(module).should_receive('make_dump_path').and_return('')
@@ -862,9 +850,6 @@ def test_execute_dump_command_with_dry_run_skips_mariadb_dump():
 
 
 def test_dump_data_sources_errors_for_missing_all_databases():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'all'}]
     flexmock(module).should_receive('make_dump_path').and_return('')
     flexmock(module.os).should_receive('environ').and_return({'USER': 'root'})
@@ -888,9 +873,6 @@ def test_dump_data_sources_errors_for_missing_all_databases():
 
 
 def test_dump_data_sources_does_not_error_for_missing_all_databases_with_dry_run():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'all'}]
     flexmock(module).should_receive('make_dump_path').and_return('')
     flexmock(module.os).should_receive('environ').and_return({'USER': 'root'})

+ 0 - 21
tests/unit/hooks/data_source/test_mongodb.py

@@ -24,9 +24,6 @@ def test_use_streaming_false_for_no_databases():
 
 
 def test_dump_data_sources_runs_mongodump_for_each_database():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo'}, {'name': 'bar'}]
     processes = [flexmock(), flexmock()]
     flexmock(module).should_receive('make_dump_path').and_return('')
@@ -56,9 +53,6 @@ def test_dump_data_sources_runs_mongodump_for_each_database():
 
 
 def test_dump_data_sources_with_dry_run_skips_mongodump():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo'}, {'name': 'bar'}]
     flexmock(module).should_receive('make_dump_path').and_return('')
     flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
@@ -81,9 +75,6 @@ def test_dump_data_sources_with_dry_run_skips_mongodump():
 
 
 def test_dump_data_sources_runs_mongodump_with_hostname_and_port():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
     process = flexmock()
     flexmock(module).should_receive('make_dump_path').and_return('')
@@ -120,9 +111,6 @@ def test_dump_data_sources_runs_mongodump_with_hostname_and_port():
 
 
 def test_dump_data_sources_runs_mongodump_with_username_and_password():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [
         {
             'name': 'foo',
@@ -174,9 +162,6 @@ def test_dump_data_sources_runs_mongodump_with_username_and_password():
 
 
 def test_dump_data_sources_runs_mongodump_with_directory_format():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo', 'format': 'directory'}]
     flexmock(module).should_receive('make_dump_path').and_return('')
     flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
@@ -204,9 +189,6 @@ def test_dump_data_sources_runs_mongodump_with_directory_format():
 
 
 def test_dump_data_sources_runs_mongodump_with_options():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo', 'options': '--stuff=such'}]
     process = flexmock()
     flexmock(module).should_receive('make_dump_path').and_return('')
@@ -240,9 +222,6 @@ def test_dump_data_sources_runs_mongodump_with_options():
 
 
 def test_dump_data_sources_runs_mongodumpall_for_all_databases():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'all'}]
     process = flexmock()
     flexmock(module).should_receive('make_dump_path').and_return('')

+ 0 - 18
tests/unit/hooks/data_source/test_mysql.py

@@ -134,9 +134,6 @@ def test_use_streaming_false_for_no_databases():
 
 
 def test_dump_data_sources_dumps_each_database():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo'}, {'name': 'bar'}]
     processes = [flexmock(), flexmock()]
     flexmock(module).should_receive('make_dump_path').and_return('')
@@ -175,9 +172,6 @@ def test_dump_data_sources_dumps_each_database():
 
 
 def test_dump_data_sources_dumps_with_password():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     database = {'name': 'foo', 'username': 'root', 'password': 'trustsome1'}
     process = flexmock()
     flexmock(module).should_receive('make_dump_path').and_return('')
@@ -212,9 +206,6 @@ def test_dump_data_sources_dumps_with_password():
 
 
 def test_dump_data_sources_dumps_all_databases_at_once():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'all'}]
     process = flexmock()
     flexmock(module).should_receive('make_dump_path').and_return('')
@@ -246,9 +237,6 @@ def test_dump_data_sources_dumps_all_databases_at_once():
 
 
 def test_dump_data_sources_dumps_all_databases_separately_when_format_configured():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'all', 'format': 'sql'}]
     processes = [flexmock(), flexmock()]
     flexmock(module).should_receive('make_dump_path').and_return('')
@@ -774,9 +762,6 @@ def test_execute_dump_command_with_dry_run_skips_mysqldump():
 
 
 def test_dump_data_sources_errors_for_missing_all_databases():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'all'}]
     flexmock(module).should_receive('make_dump_path').and_return('')
     flexmock(module.os).should_receive('environ').and_return({'USER': 'root'})
@@ -800,9 +785,6 @@ def test_dump_data_sources_errors_for_missing_all_databases():
 
 
 def test_dump_data_sources_does_not_error_for_missing_all_databases_with_dry_run():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'all'}]
     flexmock(module).should_receive('make_dump_path').and_return('')
     flexmock(module.os).should_receive('environ').and_return({'USER': 'root'})

+ 0 - 42
tests/unit/hooks/data_source/test_postgresql.py

@@ -236,9 +236,6 @@ def test_use_streaming_false_for_no_databases():
 
 
 def test_dump_data_sources_runs_pg_dump_for_each_database():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo'}, {'name': 'bar'}]
     processes = [flexmock(), flexmock()]
     flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
@@ -287,9 +284,6 @@ def test_dump_data_sources_runs_pg_dump_for_each_database():
 
 
 def test_dump_data_sources_raises_when_no_database_names_to_dump():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo'}, {'name': 'bar'}]
     flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
     flexmock(module).should_receive('make_dump_path').and_return('')
@@ -307,9 +301,6 @@ def test_dump_data_sources_raises_when_no_database_names_to_dump():
 
 
 def test_dump_data_sources_does_not_raise_when_no_database_names_to_dump():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo'}, {'name': 'bar'}]
     flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
     flexmock(module).should_receive('make_dump_path').and_return('')
@@ -326,9 +317,6 @@ def test_dump_data_sources_does_not_raise_when_no_database_names_to_dump():
 
 
 def test_dump_data_sources_with_duplicate_dump_skips_pg_dump():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo'}, {'name': 'bar'}]
     flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
     flexmock(module).should_receive('make_dump_path').and_return('')
@@ -356,9 +344,6 @@ def test_dump_data_sources_with_duplicate_dump_skips_pg_dump():
 
 
 def test_dump_data_sources_with_dry_run_skips_pg_dump():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo'}, {'name': 'bar'}]
     flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
     flexmock(module).should_receive('make_dump_path').and_return('')
@@ -389,9 +374,6 @@ def test_dump_data_sources_with_dry_run_skips_pg_dump():
 
 
 def test_dump_data_sources_runs_pg_dump_with_hostname_and_port():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
     process = flexmock()
     flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
@@ -438,9 +420,6 @@ def test_dump_data_sources_runs_pg_dump_with_hostname_and_port():
 
 
 def test_dump_data_sources_runs_pg_dump_with_username_and_password():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo', 'username': 'postgres', 'password': 'trustsome1'}]
     process = flexmock()
     flexmock(module).should_receive('make_environment').and_return(
@@ -487,9 +466,6 @@ def test_dump_data_sources_runs_pg_dump_with_username_and_password():
 
 
 def test_dump_data_sources_with_username_injection_attack_gets_escaped():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo', 'username': 'postgres; naughty-command', 'password': 'trustsome1'}]
     process = flexmock()
     flexmock(module).should_receive('make_environment').and_return(
@@ -536,9 +512,6 @@ def test_dump_data_sources_with_username_injection_attack_gets_escaped():
 
 
 def test_dump_data_sources_runs_pg_dump_with_directory_format():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo', 'format': 'directory'}]
     flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
     flexmock(module).should_receive('make_dump_path').and_return('')
@@ -583,9 +556,6 @@ def test_dump_data_sources_runs_pg_dump_with_directory_format():
 
 
 def test_dump_data_sources_runs_pg_dump_with_string_compression():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo', 'compression': 'winrar'}]
     processes = [flexmock()]
     flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
@@ -633,9 +603,6 @@ def test_dump_data_sources_runs_pg_dump_with_string_compression():
 
 
 def test_dump_data_sources_runs_pg_dump_with_integer_compression():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo', 'compression': 0}]
     processes = [flexmock()]
     flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
@@ -683,9 +650,6 @@ def test_dump_data_sources_runs_pg_dump_with_integer_compression():
 
 
 def test_dump_data_sources_runs_pg_dump_with_options():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo', 'options': '--stuff=such'}]
     process = flexmock()
     flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
@@ -729,9 +693,6 @@ def test_dump_data_sources_runs_pg_dump_with_options():
 
 
 def test_dump_data_sources_runs_pg_dumpall_for_all_databases():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'all'}]
     process = flexmock()
     flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
@@ -764,9 +725,6 @@ def test_dump_data_sources_runs_pg_dumpall_for_all_databases():
 
 
 def test_dump_data_sources_runs_non_default_pg_dump():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'name': 'foo', 'pg_dump_command': 'special_pg_dump --compress *'}]
     process = flexmock()
     flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})

+ 143 - 18
tests/unit/hooks/data_source/test_sqlite.py

@@ -17,9 +17,6 @@ def test_use_streaming_false_for_no_databases():
 
 
 def test_dump_data_sources_logs_and_skips_if_dump_already_exists():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'path': '/path/to/database', 'name': 'database'}]
 
     flexmock(module).should_receive('make_dump_path').and_return('/run/borgmatic')
@@ -44,9 +41,6 @@ def test_dump_data_sources_logs_and_skips_if_dump_already_exists():
 
 
 def test_dump_data_sources_dumps_each_database():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [
         {'path': '/path/to/database1', 'name': 'database1'},
         {'path': '/path/to/database2', 'name': 'database2'},
@@ -77,9 +71,6 @@ def test_dump_data_sources_dumps_each_database():
 
 
 def test_dump_data_sources_with_path_injection_attack_gets_escaped():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [
         {'path': '/path/to/database1; naughty-command', 'name': 'database1'},
     ]
@@ -116,10 +107,49 @@ def test_dump_data_sources_with_path_injection_attack_gets_escaped():
     )
 
 
-def test_dump_data_sources_with_non_existent_path_warns_and_dumps_database():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
+def test_dump_data_sources_runs_non_default_sqlite_with_path_injection_attack_gets_escaped():
+    databases = [
+        {
+            'path': '/path/to/database1; naughty-command',
+            'name': 'database1',
+            'sqlite_command': 'custom_sqlite *',
+        },
+    ]
+    processes = [flexmock()]
+
+    flexmock(module).should_receive('make_dump_path').and_return('/run/borgmatic')
+    flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
+        '/run/borgmatic/database'
     )
+    flexmock(module.os.path).should_receive('exists').and_return(False)
+    flexmock(module.dump).should_receive('create_named_pipe_for_dump')
+    flexmock(module).should_receive('execute_command').with_args(
+        (
+            'custom_sqlite',  # custom sqlite command
+            "'*'",  # Should get shell escaped to prevent injection attacks.
+            "'/path/to/database1; naughty-command'",
+            '.dump',
+            '>',
+            '/run/borgmatic/database',
+        ),
+        shell=True,
+        run_to_completion=False,
+    ).and_return(processes[0])
+
+    assert (
+        module.dump_data_sources(
+            databases,
+            {},
+            config_paths=('test.yaml',),
+            borgmatic_runtime_directory='/run/borgmatic',
+            patterns=[],
+            dry_run=False,
+        )
+        == processes
+    )
+
+
+def test_dump_data_sources_with_non_existent_path_warns_and_dumps_database():
     databases = [
         {'path': '/path/to/database1', 'name': 'database1'},
     ]
@@ -148,9 +178,6 @@ def test_dump_data_sources_with_non_existent_path_warns_and_dumps_database():
 
 
 def test_dump_data_sources_with_name_all_warns_and_dumps_all_databases():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [
         {'path': '/path/to/database1', 'name': 'all'},
     ]
@@ -181,9 +208,6 @@ def test_dump_data_sources_with_name_all_warns_and_dumps_all_databases():
 
 
 def test_dump_data_sources_does_not_dump_if_dry_run():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     databases = [{'path': '/path/to/database', 'name': 'database'}]
 
     flexmock(module).should_receive('make_dump_path').and_return('/run/borgmatic')
@@ -234,6 +258,41 @@ def test_restore_data_source_dump_restores_database():
     )
 
 
+def test_restore_data_source_dump_runs_non_default_sqlite_restores_database():
+    hook_config = [
+        {
+            'path': '/path/to/database',
+            'name': 'database',
+            'sqlite_restore_command': 'custom_sqlite *',
+        },
+        {'name': 'other'},
+    ]
+    extract_process = flexmock(stdout=flexmock())
+
+    flexmock(module).should_receive('execute_command_with_processes').with_args(
+        (
+            'custom_sqlite',
+            "'*'",  # Should get shell escaped to prevent injection attacks.
+            '/path/to/database',
+        ),
+        processes=[extract_process],
+        output_log_level=logging.DEBUG,
+        input_file=extract_process.stdout,
+    ).once()
+
+    flexmock(module.os).should_receive('remove').once()
+
+    module.restore_data_source_dump(
+        hook_config,
+        {},
+        data_source=hook_config[0],
+        dry_run=False,
+        extract_process=extract_process,
+        connection_params={'restore_path': None},
+        borgmatic_runtime_directory='/run/borgmatic',
+    )
+
+
 def test_restore_data_source_dump_with_connection_params_uses_connection_params_for_restore():
     hook_config = [
         {'path': '/path/to/database', 'name': 'database', 'restore_path': 'config/path/to/database'}
@@ -263,6 +322,38 @@ def test_restore_data_source_dump_with_connection_params_uses_connection_params_
     )
 
 
+def test_restore_data_source_dump_runs_non_default_sqlite_with_connection_params_uses_connection_params_for_restore():
+    hook_config = [
+        {'path': '/path/to/database', 'name': 'database', 'restore_path': 'config/path/to/database'}
+    ]
+    extract_process = flexmock(stdout=flexmock())
+
+    flexmock(module).should_receive('execute_command_with_processes').with_args(
+        (
+            'custom_sqlite',
+            'cli/path/to/database',
+        ),
+        processes=[extract_process],
+        output_log_level=logging.DEBUG,
+        input_file=extract_process.stdout,
+    ).once()
+
+    flexmock(module.os).should_receive('remove').once()
+
+    module.restore_data_source_dump(
+        hook_config,
+        {},
+        data_source={
+            'name': 'database',
+            'sqlite_restore_command': 'custom_sqlite',
+        },
+        dry_run=False,
+        extract_process=extract_process,
+        connection_params={'restore_path': 'cli/path/to/database'},
+        borgmatic_runtime_directory='/run/borgmatic',
+    )
+
+
 def test_restore_data_source_dump_without_connection_params_uses_restore_params_in_config_for_restore():
     hook_config = [
         {'path': '/path/to/database', 'name': 'database', 'restore_path': 'config/path/to/database'}
@@ -292,6 +383,40 @@ def test_restore_data_source_dump_without_connection_params_uses_restore_params_
     )
 
 
+def test_restore_data_source_dump_runs_non_default_sqlite_without_connection_params_uses_restore_params_in_config_for_restore():
+    hook_config = [
+        {
+            'path': '/path/to/database',
+            'name': 'database',
+            'sqlite_restore_command': 'custom_sqlite',
+            'restore_path': 'config/path/to/database',
+        }
+    ]
+    extract_process = flexmock(stdout=flexmock())
+
+    flexmock(module).should_receive('execute_command_with_processes').with_args(
+        (
+            'custom_sqlite',
+            'config/path/to/database',
+        ),
+        processes=[extract_process],
+        output_log_level=logging.DEBUG,
+        input_file=extract_process.stdout,
+    ).once()
+
+    flexmock(module.os).should_receive('remove').once()
+
+    module.restore_data_source_dump(
+        hook_config,
+        {},
+        data_source=hook_config[0],
+        dry_run=False,
+        extract_process=extract_process,
+        connection_params={'restore_path': None},
+        borgmatic_runtime_directory='/run/borgmatic',
+    )
+
+
 def test_restore_data_source_dump_does_not_restore_database_if_dry_run():
     hook_config = [{'path': '/path/to/database', 'name': 'database'}]
     extract_process = flexmock(stdout=flexmock())

+ 0 - 15
tests/unit/hooks/data_source/test_zfs.py

@@ -296,9 +296,6 @@ def test_make_borg_snapshot_pattern_includes_slashdot_hack_and_stripped_pattern_
 
 
 def test_dump_data_sources_snapshots_and_mounts_and_updates_patterns():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     dataset = flexmock(
         name='dataset',
         mount_point='/mnt/dataset',
@@ -341,9 +338,6 @@ def test_dump_data_sources_snapshots_and_mounts_and_updates_patterns():
 
 
 def test_dump_data_sources_with_no_datasets_skips_snapshots():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     flexmock(module).should_receive('get_datasets_to_backup').and_return(())
     flexmock(module.os).should_receive('getpid').and_return(1234)
     flexmock(module).should_receive('snapshot_dataset').never()
@@ -366,9 +360,6 @@ def test_dump_data_sources_with_no_datasets_skips_snapshots():
 
 
 def test_dump_data_sources_uses_custom_commands():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     dataset = flexmock(
         name='dataset',
         mount_point='/mnt/dataset',
@@ -418,9 +409,6 @@ def test_dump_data_sources_uses_custom_commands():
 
 
 def test_dump_data_sources_with_dry_run_skips_commands_and_does_not_touch_patterns():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     flexmock(module).should_receive('get_datasets_to_backup').and_return(
         (flexmock(name='dataset', mount_point='/mnt/dataset'),)
     )
@@ -445,9 +433,6 @@ def test_dump_data_sources_with_dry_run_skips_commands_and_does_not_touch_patter
 
 
 def test_dump_data_sources_ignores_mismatch_between_given_patterns_and_contained_patterns():
-    flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
-        flexmock()
-    )
     dataset = flexmock(
         name='dataset',
         mount_point='/mnt/dataset',

+ 48 - 122
tests/unit/hooks/test_command.py

@@ -133,121 +133,6 @@ def test_make_environment_with_pyinstaller_and_LD_LIBRARY_PATH_ORIG_copies_it_in
                 },
             ),
         ),
-        (
-            (
-                {
-                    'before': 'dump_data_sources',
-                    'hooks': ['postgresql'],
-                    'run': ['foo'],
-                },
-                {
-                    'before': 'dump_data_sources',
-                    'hooks': ['lvm'],
-                    'run': ['bar'],
-                },
-                {
-                    'after': 'dump_data_sources',
-                    'hooks': ['lvm'],
-                    'run': ['baz'],
-                },
-            ),
-            {
-                'before': 'dump_data_sources',
-                'hook_name': 'lvm',
-            },
-            (
-                {
-                    'before': 'dump_data_sources',
-                    'hooks': ['lvm'],
-                    'run': ['bar'],
-                },
-            ),
-        ),
-        (
-            (
-                {
-                    'before': 'dump_data_sources',
-                    'run': ['foo'],
-                },
-                {
-                    'before': 'dump_data_sources',
-                    'run': ['bar'],
-                },
-                {
-                    'after': 'dump_data_sources',
-                    'run': ['baz'],
-                },
-            ),
-            {
-                'before': 'dump_data_sources',
-                'hook_name': 'lvm',
-            },
-            (
-                {
-                    'before': 'dump_data_sources',
-                    'run': ['foo'],
-                },
-                {
-                    'before': 'dump_data_sources',
-                    'run': ['bar'],
-                },
-            ),
-        ),
-        (
-            (
-                {
-                    'before': 'dump_data_sources',
-                    'hooks': ['postgresql', 'zfs', 'lvm'],
-                    'run': ['foo'],
-                },
-            ),
-            {
-                'before': 'dump_data_sources',
-                'hook_name': 'lvm',
-            },
-            (
-                {
-                    'before': 'dump_data_sources',
-                    'hooks': ['postgresql', 'zfs', 'lvm'],
-                    'run': ['foo'],
-                },
-            ),
-        ),
-        (
-            (
-                {
-                    'before': 'action',
-                    'when': ['create'],
-                    'run': ['foo'],
-                },
-                {
-                    'before': 'action',
-                    'when': ['prune'],
-                    'run': ['bar'],
-                },
-                {
-                    'before': 'action',
-                    'when': ['compact'],
-                    'run': ['baz'],
-                },
-            ),
-            {
-                'before': 'action',
-                'action_names': ['create', 'compact', 'extract'],
-            },
-            (
-                {
-                    'before': 'action',
-                    'when': ['create'],
-                    'run': ['foo'],
-                },
-                {
-                    'before': 'action',
-                    'when': ['compact'],
-                    'run': ['baz'],
-                },
-            ),
-        ),
         (
             (
                 {
@@ -305,11 +190,13 @@ def test_execute_hooks_invokes_each_hook_and_command():
             output_log_level=LOGGING_ANSWER,
             shell=True,
             environment={},
+            working_directory=None,
         ).once()
 
     module.execute_hooks(
         [{'before': 'create', 'run': ['foo']}, {'before': 'create', 'run': ['bar', 'baz']}],
         umask=None,
+        working_directory=None,
         dry_run=False,
     )
 
@@ -328,9 +215,35 @@ def test_execute_hooks_with_umask_sets_that_umask():
         output_log_level=logging.ANSWER,
         shell=True,
         environment={},
+        working_directory=None,
+    )
+
+    module.execute_hooks(
+        [{'before': 'create', 'run': ['foo']}], umask=77, working_directory=None, dry_run=False
+    )
+
+
+def test_execute_hooks_with_working_directory_executes_command_with_it():
+    flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
+    flexmock(module.logging).ANSWER = LOGGING_ANSWER
+    flexmock(module).should_receive('interpolate_context').replace_with(
+        lambda hook_description, command, context: command
+    )
+    flexmock(module).should_receive('make_environment').and_return({})
+    flexmock(module.borgmatic.execute).should_receive('execute_command').with_args(
+        ['foo'],
+        output_log_level=logging.ANSWER,
+        shell=True,
+        environment={},
+        working_directory='/working',
     )
 
-    module.execute_hooks([{'before': 'create', 'run': ['foo']}], umask=77, dry_run=False)
+    module.execute_hooks(
+        [{'before': 'create', 'run': ['foo']}],
+        umask=None,
+        working_directory='/working',
+        dry_run=False,
+    )
 
 
 def test_execute_hooks_with_dry_run_skips_commands():
@@ -342,11 +255,13 @@ def test_execute_hooks_with_dry_run_skips_commands():
     flexmock(module).should_receive('make_environment').and_return({})
     flexmock(module.borgmatic.execute).should_receive('execute_command').never()
 
-    module.execute_hooks([{'before': 'create', 'run': ['foo']}], umask=None, dry_run=True)
+    module.execute_hooks(
+        [{'before': 'create', 'run': ['foo']}], umask=None, working_directory=None, dry_run=True
+    )
 
 
 def test_execute_hooks_with_empty_commands_does_not_raise():
-    module.execute_hooks([], umask=None, dry_run=True)
+    module.execute_hooks([], umask=None, working_directory=None, dry_run=True)
 
 
 def test_execute_hooks_with_error_logs_as_error():
@@ -361,9 +276,12 @@ def test_execute_hooks_with_error_logs_as_error():
         output_log_level=logging.ERROR,
         shell=True,
         environment={},
+        working_directory=None,
     ).once()
 
-    module.execute_hooks([{'after': 'error', 'run': ['foo']}], umask=None, dry_run=False)
+    module.execute_hooks(
+        [{'after': 'error', 'run': ['foo']}], umask=None, working_directory=None, dry_run=False
+    )
 
 
 def test_execute_hooks_with_before_or_after_raises():
@@ -380,6 +298,7 @@ def test_execute_hooks_with_before_or_after_raises():
                 {'erstwhile': 'create', 'run': ['bar', 'baz']},
             ],
             umask=None,
+            working_directory=None,
             dry_run=False,
         )
 
@@ -398,11 +317,13 @@ def test_execute_hooks_without_commands_to_run_does_not_raise():
             output_log_level=LOGGING_ANSWER,
             shell=True,
             environment={},
+            working_directory=None,
         ).once()
 
     module.execute_hooks(
         [{'before': 'create', 'run': []}, {'before': 'create', 'run': ['foo', 'bar']}],
         umask=None,
+        working_directory=None,
         dry_run=False,
     )
 
@@ -430,6 +351,7 @@ def test_before_after_hooks_calls_command_hooks():
         command_hooks=commands,
         before_after='action',
         umask=1234,
+        working_directory='/working',
         dry_run=False,
         hook_name='myhook',
         action_names=['create'],
@@ -439,7 +361,7 @@ def test_before_after_hooks_calls_command_hooks():
         pass
 
 
-def test_before_after_hooks_with_before_error_raises_and_skips_after_hook():
+def test_before_after_hooks_with_before_error_runs_after_hook_and_raises():
     commands = [
         {'before': 'repository', 'run': ['foo', 'bar']},
         {'after': 'repository', 'run': ['baz']},
@@ -455,8 +377,8 @@ def test_before_after_hooks_with_before_error_raises_and_skips_after_hook():
         after='action',
         hook_name='myhook',
         action_names=['create'],
-    ).never()
-    flexmock(module).should_receive('execute_hooks').and_raise(OSError)
+    ).and_return(flexmock()).once()
+    flexmock(module).should_receive('execute_hooks').and_raise(OSError).and_return(None)
     flexmock(module).should_receive('considered_soft_failure').and_return(False)
 
     with pytest.raises(ValueError):
@@ -464,6 +386,7 @@ def test_before_after_hooks_with_before_error_raises_and_skips_after_hook():
             command_hooks=commands,
             before_after='action',
             umask=1234,
+            working_directory='/working',
             dry_run=False,
             hook_name='myhook',
             action_names=['create'],
@@ -497,6 +420,7 @@ def test_before_after_hooks_with_before_soft_failure_does_not_raise():
         command_hooks=commands,
         before_after='action',
         umask=1234,
+        working_directory='/working',
         dry_run=False,
         hook_name='myhook',
         action_names=['create'],
@@ -531,6 +455,7 @@ def test_before_after_hooks_with_after_error_raises():
             command_hooks=commands,
             before_after='action',
             umask=1234,
+            working_directory='/working',
             dry_run=False,
             hook_name='myhook',
             action_names=['create'],
@@ -564,6 +489,7 @@ def test_before_after_hooks_with_after_soft_failure_does_not_raise():
         command_hooks=commands,
         before_after='action',
         umask=1234,
+        working_directory='/working',
         dry_run=False,
         hook_name='myhook',
         action_names=['create'],