Browse Source

Revert "Log the repository path or label on every relevant log message, not just some logs (#635)."

This reverts commit 90c1161a8c3c52474f76ee0a96808ea5f0b21719.
Dan Helfman 4 months ago
parent
commit
1232ba8045
53 changed files with 559 additions and 404 deletions
  1. 0 3
      NEWS
  2. 1 1
      borgmatic/actions/borg.py
  3. 1 1
      borgmatic/actions/break_lock.py
  4. 1 1
      borgmatic/actions/change_passphrase.py
  5. 21 16
      borgmatic/actions/check.py
  6. 2 3
      borgmatic/actions/compact.py
  7. 1 0
      borgmatic/actions/config/bootstrap.py
  8. 6 2
      borgmatic/actions/create.py
  9. 1 1
      borgmatic/actions/delete.py
  10. 1 1
      borgmatic/actions/export_key.py
  11. 1 1
      borgmatic/actions/export_tar.py
  12. 1 1
      borgmatic/actions/extract.py
  13. 1 1
      borgmatic/actions/info.py
  14. 2 2
      borgmatic/actions/list.py
  15. 2 2
      borgmatic/actions/mount.py
  16. 1 1
      borgmatic/actions/prune.py
  17. 1 1
      borgmatic/actions/repo_create.py
  18. 1 1
      borgmatic/actions/repo_delete.py
  19. 1 1
      borgmatic/actions/repo_info.py
  20. 1 1
      borgmatic/actions/repo_list.py
  21. 14 6
      borgmatic/actions/restore.py
  22. 1 1
      borgmatic/actions/transfer.py
  23. 8 3
      borgmatic/borg/create.py
  24. 74 76
      borgmatic/commands/borgmatic.py
  25. 8 7
      borgmatic/config/paths.py
  26. 9 9
      borgmatic/hooks/command.py
  27. 15 13
      borgmatic/hooks/data_source/bootstrap.py
  28. 24 20
      borgmatic/hooks/data_source/btrfs.py
  29. 2 2
      borgmatic/hooks/data_source/dump.py
  30. 28 25
      borgmatic/hooks/data_source/lvm.py
  31. 31 25
      borgmatic/hooks/data_source/mariadb.py
  32. 22 19
      borgmatic/hooks/data_source/mongodb.py
  33. 31 25
      borgmatic/hooks/data_source/mysql.py
  34. 27 23
      borgmatic/hooks/data_source/postgresql.py
  35. 24 22
      borgmatic/hooks/data_source/sqlite.py
  36. 24 22
      borgmatic/hooks/data_source/zfs.py
  37. 19 18
      borgmatic/hooks/dispatch.py
  38. 5 18
      borgmatic/logger.py
  39. 1 1
      pyproject.toml
  40. 8 0
      tests/unit/actions/test_check.py
  41. 16 0
      tests/unit/actions/test_restore.py
  42. 3 1
      tests/unit/borg/test_create.py
  43. 7 0
      tests/unit/hooks/data_source/test_bootstrap.py
  44. 15 0
      tests/unit/hooks/data_source/test_btrfs.py
  45. 19 0
      tests/unit/hooks/data_source/test_lvm.py
  46. 20 5
      tests/unit/hooks/data_source/test_mariadb.py
  47. 3 1
      tests/unit/hooks/data_source/test_mongodb.py
  48. 20 5
      tests/unit/hooks/data_source/test_mysql.py
  49. 3 1
      tests/unit/hooks/data_source/test_postgresql.py
  50. 2 2
      tests/unit/hooks/data_source/test_sqlite.py
  51. 16 0
      tests/unit/hooks/data_source/test_zfs.py
  52. 12 12
      tests/unit/hooks/test_command.py
  53. 1 1
      tests/unit/hooks/test_dispatch.py

+ 0 - 3
NEWS

@@ -1,6 +1,3 @@
-1.9.9.dev0
- * #635: Log the repository path or label on every relevant log message, not just some logs.
-
 1.9.8
  * #979: Fix root patterns so they don't have an invalid "sh:" prefix before getting passed to Borg.
  * Expand the recent contributors documentation section to include ticket submitters—not just code

+ 1 - 1
borgmatic/actions/borg.py

@@ -23,7 +23,7 @@ def run_borg(
         repository, borg_arguments.repository
     ):
         logger.info(
-            'Running arbitrary Borg command'
+            f'{repository.get("label", repository["path"])}: Running arbitrary Borg command'
         )
         archive_name = borgmatic.borg.repo_list.resolve_archive_name(
             repository['path'],

+ 1 - 1
borgmatic/actions/break_lock.py

@@ -22,7 +22,7 @@ def run_break_lock(
         repository, break_lock_arguments.repository
     ):
         logger.info(
-            'Breaking repository and cache locks'
+            f'{repository.get("label", repository["path"])}: Breaking repository and cache locks'
         )
         borgmatic.borg.break_lock.break_lock(
             repository['path'],

+ 1 - 1
borgmatic/actions/change_passphrase.py

@@ -25,7 +25,7 @@ def run_change_passphrase(
         )
     ):
         logger.info(
-            'Changing repository passphrase'
+            f'{repository.get("label", repository["path"])}: Changing repository passphrase'
         )
         borgmatic.borg.change_passphrase.change_passphrase(
             repository['path'],

+ 21 - 16
borgmatic/actions/check.py

@@ -363,6 +363,7 @@ def collect_spot_check_source_paths(
         borgmatic.hooks.dispatch.call_hooks(
             'use_streaming',
             config,
+            repository['path'],
             borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
         ).values()
     )
@@ -467,14 +468,15 @@ def compare_spot_check_hashes(
     global_arguments,
     local_path,
     remote_path,
+    log_prefix,
     source_paths,
 ):
     '''
     Given a repository configuration dict, the name of the latest archive, a configuration dict, the
     local Borg version, global arguments as an argparse.Namespace instance, the local Borg path, the
-    remote Borg path, and spot check source paths, compare the hashes for a sampling of the source
-    paths with hashes from corresponding paths in the given archive. Return a sequence of the paths
-    that fail that hash comparison.
+    remote Borg path, a log label, and spot check source paths, compare the hashes for a sampling of
+    the source paths with hashes from corresponding paths in the given archive. Return a sequence of
+    the paths that fail that hash comparison.
     '''
     # Based on the configured sample percentage, come up with a list of random sample files from the
     # source directories.
@@ -490,7 +492,7 @@ def compare_spot_check_hashes(
         if os.path.exists(os.path.join(working_directory or '', source_path))
     }
     logger.debug(
-        f'Sampling {sample_count} source paths (~{spot_check_config["data_sample_percentage"]}%) for spot check'
+        f'{log_prefix}: Sampling {sample_count} source paths (~{spot_check_config["data_sample_percentage"]}%) for spot check'
     )
 
     source_sample_paths_iterator = iter(source_sample_paths)
@@ -578,7 +580,8 @@ def spot_check(
     disk to those stored in the latest archive. If any differences are beyond configured tolerances,
     then the check fails.
     '''
-    logger.debug('Running spot check')
+    log_prefix = f'{repository.get("label", repository["path"])}'
+    logger.debug(f'{log_prefix}: Running spot check')
 
     try:
         spot_check_config = next(
@@ -601,7 +604,7 @@ def spot_check(
         remote_path,
         borgmatic_runtime_directory,
     )
-    logger.debug(f'{len(source_paths)} total source paths for spot check')
+    logger.debug(f'{log_prefix}: {len(source_paths)} total source paths for spot check')
 
     archive = borgmatic.borg.repo_list.resolve_archive_name(
         repository['path'],
@@ -612,7 +615,7 @@ def spot_check(
         local_path,
         remote_path,
     )
-    logger.debug(f'Using archive {archive} for spot check')
+    logger.debug(f'{log_prefix}: Using archive {archive} for spot check')
 
     archive_paths = collect_spot_check_archive_paths(
         repository,
@@ -624,11 +627,11 @@ def spot_check(
         remote_path,
         borgmatic_runtime_directory,
     )
-    logger.debug(f'{len(archive_paths)} total archive paths for spot check')
+    logger.debug(f'{log_prefix}: {len(archive_paths)} total archive paths for spot check')
 
     if len(source_paths) == 0:
         logger.debug(
-            f'Paths in latest archive but not source paths: {", ".join(set(archive_paths)) or "none"}'
+            f'{log_prefix}: Paths in latest archive but not source paths: {", ".join(set(archive_paths)) or "none"}'
         )
         raise ValueError(
             'Spot check failed: There are no source paths to compare against the archive'
@@ -641,10 +644,10 @@ def spot_check(
     if count_delta_percentage > spot_check_config['count_tolerance_percentage']:
         rootless_source_paths = set(path.lstrip(os.path.sep) for path in source_paths)
         logger.debug(
-            f'Paths in source paths but not latest archive: {", ".join(rootless_source_paths - set(archive_paths)) or "none"}'
+            f'{log_prefix}: Paths in source paths but not latest archive: {", ".join(rootless_source_paths - set(archive_paths)) or "none"}'
         )
         logger.debug(
-            f'Paths in latest archive but not source paths: {", ".join(set(archive_paths) - rootless_source_paths) or "none"}'
+            f'{log_prefix}: Paths in latest archive but not source paths: {", ".join(set(archive_paths) - rootless_source_paths) or "none"}'
         )
         raise ValueError(
             f'Spot check failed: {count_delta_percentage:.2f}% file count delta between source paths and latest archive (tolerance is {spot_check_config["count_tolerance_percentage"]}%)'
@@ -658,24 +661,25 @@ def spot_check(
         global_arguments,
         local_path,
         remote_path,
+        log_prefix,
         source_paths,
     )
 
     # Error if the percentage of failing hashes exceeds the configured tolerance percentage.
-    logger.debug(f'{len(failing_paths)} non-matching spot check hashes')
+    logger.debug(f'{log_prefix}: {len(failing_paths)} non-matching spot check hashes')
     data_tolerance_percentage = spot_check_config['data_tolerance_percentage']
     failing_percentage = (len(failing_paths) / len(source_paths)) * 100
 
     if failing_percentage > data_tolerance_percentage:
         logger.debug(
-            f'Source paths with data not matching the latest archive: {", ".join(failing_paths)}'
+            f'{log_prefix}: Source paths with data not matching the latest archive: {", ".join(failing_paths)}'
         )
         raise ValueError(
             f'Spot check failed: {failing_percentage:.2f}% of source paths with data not matching the latest archive (tolerance is {data_tolerance_percentage}%)'
         )
 
     logger.info(
-        f'Spot check passed with a {count_delta_percentage:.2f}% file count delta and a {failing_percentage:.2f}% file data delta'
+        f'{log_prefix}: Spot check passed with a {count_delta_percentage:.2f}% file count delta and a {failing_percentage:.2f}% file data delta'
     )
 
 
@@ -709,7 +713,8 @@ def run_check(
         **hook_context,
     )
 
-    logger.info(f'Running consistency checks')
+    log_prefix = repository.get('label', repository['path'])
+    logger.info(f'{log_prefix}: Running consistency checks')
 
     repository_id = borgmatic.borg.check.get_repository_id(
         repository['path'],
@@ -763,7 +768,7 @@ def run_check(
 
     if 'spot' in checks:
         with borgmatic.config.paths.Runtime_directory(
-            config
+            config, log_prefix
         ) as borgmatic_runtime_directory:
             spot_check(
                 repository,

+ 2 - 3
borgmatic/actions/compact.py

@@ -38,7 +38,7 @@ def run_compact(
     )
     if borgmatic.borg.feature.available(borgmatic.borg.feature.Feature.COMPACT, local_borg_version):
         logger.info(
-            f'Compacting segments{dry_run_label}'
+            f'{repository.get("label", repository["path"])}: Compacting segments{dry_run_label}'
         )
         borgmatic.borg.compact.compact_segments(
             global_arguments.dry_run,
@@ -54,9 +54,8 @@ def run_compact(
         )
     else:  # pragma: nocover
         logger.info(
-            'Skipping compact (only available/needed in Borg 1.2+)'
+            f'{repository.get("label", repository["path"])}: Skipping compact (only available/needed in Borg 1.2+)'
         )
-
     borgmatic.hooks.command.execute_hook(
         config.get('after_compact'),
         config.get('umask'),

+ 1 - 0
borgmatic/actions/config/bootstrap.py

@@ -45,6 +45,7 @@ def get_config_paths(archive_name, bootstrap_arguments, global_arguments, local_
     # still want to support reading the manifest from previously created archives as well.
     with borgmatic.config.paths.Runtime_directory(
         {'user_runtime_directory': bootstrap_arguments.user_runtime_directory},
+        bootstrap_arguments.repository,
     ) as borgmatic_runtime_directory:
         for base_directory in (
             'borgmatic',

+ 6 - 2
borgmatic/actions/create.py

@@ -283,15 +283,17 @@ def run_create(
         **hook_context,
     )
 
-    logger.info(f'Creating archive{dry_run_label}')
+    log_prefix = repository.get('label', repository['path'])
+    logger.info(f'{log_prefix}: Creating archive{dry_run_label}')
     working_directory = borgmatic.config.paths.get_working_directory(config)
 
     with borgmatic.config.paths.Runtime_directory(
-        config
+        config, log_prefix
     ) as borgmatic_runtime_directory:
         borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
             'remove_data_source_dumps',
             config,
+            repository['path'],
             borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
             borgmatic_runtime_directory,
             global_arguments.dry_run,
@@ -300,6 +302,7 @@ def run_create(
         active_dumps = borgmatic.hooks.dispatch.call_hooks(
             'dump_data_sources',
             config,
+            repository['path'],
             borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
             config_paths,
             borgmatic_runtime_directory,
@@ -336,6 +339,7 @@ def run_create(
         borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
             'remove_data_source_dumps',
             config,
+            config_filename,
             borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
             borgmatic_runtime_directory,
             global_arguments.dry_run,

+ 1 - 1
borgmatic/actions/delete.py

@@ -23,7 +23,7 @@ def run_delete(
     if delete_arguments.repository is None or borgmatic.config.validate.repositories_match(
         repository, delete_arguments.repository
     ):
-        logger.answer('Deleting archives')
+        logger.answer(f'{repository.get("label", repository["path"])}: Deleting archives')
 
         archive_name = (
             borgmatic.borg.repo_list.resolve_archive_name(

+ 1 - 1
borgmatic/actions/export_key.py

@@ -21,7 +21,7 @@ def run_export_key(
     if export_arguments.repository is None or borgmatic.config.validate.repositories_match(
         repository, export_arguments.repository
     ):
-        logger.info('Exporting repository key')
+        logger.info(f'{repository.get("label", repository["path"])}: Exporting repository key')
         borgmatic.borg.export_key.export_key(
             repository['path'],
             config,

+ 1 - 1
borgmatic/actions/export_tar.py

@@ -23,7 +23,7 @@ def run_export_tar(
         repository, export_tar_arguments.repository
     ):
         logger.info(
-            f'Exporting archive {export_tar_arguments.archive} as tar file'
+            f'{repository["path"]}: Exporting archive {export_tar_arguments.archive} as tar file'
         )
         borgmatic.borg.export_tar.export_tar_archive(
             global_arguments.dry_run,

+ 1 - 1
borgmatic/actions/extract.py

@@ -34,7 +34,7 @@ def run_extract(
         repository, extract_arguments.repository
     ):
         logger.info(
-            f'Extracting archive {extract_arguments.archive}'
+            f'{repository.get("label", repository["path"])}: Extracting archive {extract_arguments.archive}'
         )
         borgmatic.borg.extract.extract_archive(
             global_arguments.dry_run,

+ 1 - 1
borgmatic/actions/info.py

@@ -28,7 +28,7 @@ def run_info(
     ):
         if not info_arguments.json:
             logger.answer(
-                'Displaying archive summary information'
+                f'{repository.get("label", repository["path"])}: Displaying archive summary information'
             )
         archive_name = borgmatic.borg.repo_list.resolve_archive_name(
             repository['path'],

+ 2 - 2
borgmatic/actions/list.py

@@ -27,9 +27,9 @@ def run_list(
     ):
         if not list_arguments.json:
             if list_arguments.find_paths:  # pragma: no cover
-                logger.answer('Searching archives')
+                logger.answer(f'{repository.get("label", repository["path"])}: Searching archives')
             elif not list_arguments.archive:  # pragma: no cover
-                logger.answer('Listing archives')
+                logger.answer(f'{repository.get("label", repository["path"])}: Listing archives')
 
         archive_name = borgmatic.borg.repo_list.resolve_archive_name(
             repository['path'],

+ 2 - 2
borgmatic/actions/mount.py

@@ -24,10 +24,10 @@ def run_mount(
     ):
         if mount_arguments.archive:
             logger.info(
-                f'Mounting archive {mount_arguments.archive}'
+                f'{repository.get("label", repository["path"])}: Mounting archive {mount_arguments.archive}'
             )
         else:  # pragma: nocover
-            logger.info('Mounting repository')
+            logger.info(f'{repository.get("label", repository["path"])}: Mounting repository')
 
         borgmatic.borg.mount.mount_archive(
             repository['path'],

+ 1 - 1
borgmatic/actions/prune.py

@@ -35,7 +35,7 @@ def run_prune(
         global_arguments.dry_run,
         **hook_context,
     )
-    logger.info(f'Pruning archives{dry_run_label}')
+    logger.info(f'{repository.get("label", repository["path"])}: Pruning archives{dry_run_label}')
     borgmatic.borg.prune.prune_archives(
         global_arguments.dry_run,
         repository['path'],

+ 1 - 1
borgmatic/actions/repo_create.py

@@ -23,7 +23,7 @@ def run_repo_create(
     ):
         return
 
-    logger.info('Creating repository')
+    logger.info(f'{repository.get("label", repository["path"])}: Creating repository')
     borgmatic.borg.repo_create.create_repository(
         global_arguments.dry_run,
         repository['path'],

+ 1 - 1
borgmatic/actions/repo_delete.py

@@ -21,7 +21,7 @@ def run_repo_delete(
         repository, repo_delete_arguments.repository
     ):
         logger.answer(
-            'Deleting repository'
+            f'{repository.get("label", repository["path"])}: Deleting repository'
             + (' cache' if repo_delete_arguments.cache_only else '')
         )
 

+ 1 - 1
borgmatic/actions/repo_info.py

@@ -26,7 +26,7 @@ def run_repo_info(
     ):
         if not repo_info_arguments.json:
             logger.answer(
-                'Displaying repository summary information'
+                f'{repository.get("label", repository["path"])}: Displaying repository summary information'
             )
 
         json_output = borgmatic.borg.repo_info.display_repository_info(

+ 1 - 1
borgmatic/actions/repo_list.py

@@ -25,7 +25,7 @@ def run_repo_list(
         repository, repo_list_arguments.repository
     ):
         if not repo_list_arguments.json:
-            logger.answer('Listing repository')
+            logger.answer(f'{repository.get("label", repository["path"])}: Listing repository')
 
         json_output = borgmatic.borg.repo_list.list_repository(
             repository['path'],

+ 14 - 6
borgmatic/actions/restore.py

@@ -71,10 +71,10 @@ def render_dump_metadata(dump):
     return metadata
 
 
-def get_configured_data_source(config, restore_dump):
+def get_configured_data_source(config, restore_dump, log_prefix):
     '''
     Search in the given configuration dict for dumps corresponding to the given dump to restore. If
-    there are multiple matches, error.
+    there are multiple matches, error. Log using the given log prefix.
 
     Return the found data source as a data source configuration dict or None if not found.
     '''
@@ -91,6 +91,7 @@ def get_configured_data_source(config, restore_dump):
             borgmatic.hooks.dispatch.call_hook(
                 function_name='get_default_port',
                 config=config,
+                log_prefix=log_prefix,
                 hook_name=hook_name,
             ),
         )
@@ -173,12 +174,13 @@ def restore_single_dump(
     )
 
     logger.info(
-        f'Restoring data source {dump_metadata}'
+        f'{repository.get("label", repository["path"])}: Restoring data source {dump_metadata}'
     )
 
     dump_patterns = borgmatic.hooks.dispatch.call_hooks(
         'make_data_source_dump_patterns',
         config,
+        repository['path'],
         borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
         borgmatic_runtime_directory,
         data_source['name'],
@@ -225,6 +227,7 @@ def restore_single_dump(
     borgmatic.hooks.dispatch.call_hook(
         function_name='restore_data_source_dump',
         config=config,
+        log_prefix=repository['path'],
         hook_name=hook_name,
         data_source=data_source,
         dry_run=global_arguments.dry_run,
@@ -316,7 +319,7 @@ def collect_dumps_from_archive(
             break
         else:
             logger.warning(
-                f'Ignoring invalid data source dump path "{dump_path}" in archive {archive}'
+                f'{repository}: Ignoring invalid data source dump path "{dump_path}" in archive {archive}'
             )
 
     return dumps_from_archive
@@ -441,14 +444,16 @@ def run_restore(
     ):
         return
 
-    logger.info(f'Restoring data sources from archive {restore_arguments.archive}')
+    log_prefix = repository.get('label', repository['path'])
+    logger.info(f'{log_prefix}: Restoring data sources from archive {restore_arguments.archive}')
 
     with borgmatic.config.paths.Runtime_directory(
-        config
+        config, log_prefix
     ) as borgmatic_runtime_directory:
         borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
             'remove_data_source_dumps',
             config,
+            repository['path'],
             borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
             borgmatic_runtime_directory,
             global_arguments.dry_run,
@@ -489,6 +494,7 @@ def run_restore(
             found_data_source = get_configured_data_source(
                 config,
                 restore_dump,
+                log_prefix=repository['path'],
             )
 
             # For a dump that wasn't found via an exact match in the configuration, try to fallback
@@ -497,6 +503,7 @@ def run_restore(
                 found_data_source = get_configured_data_source(
                     config,
                     Dump(restore_dump.hook_name, 'all', restore_dump.hostname, restore_dump.port),
+                    log_prefix=repository['path'],
                 )
 
                 if not found_data_source:
@@ -524,6 +531,7 @@ def run_restore(
         borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
             'remove_data_source_dumps',
             config,
+            repository['path'],
             borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
             borgmatic_runtime_directory,
             global_arguments.dry_run,

+ 1 - 1
borgmatic/actions/transfer.py

@@ -18,7 +18,7 @@ def run_transfer(
     Run the "transfer" action for the given repository.
     '''
     logger.info(
-        'Transferring archives to repository'
+        f'{repository.get("label", repository["path"])}: Transferring archives to repository'
     )
     borgmatic.borg.transfer.transfer_archives(
         global_arguments.dry_run,

+ 8 - 3
borgmatic/borg/create.py

@@ -20,12 +20,14 @@ from borgmatic.execute import (
 logger = logging.getLogger(__name__)
 
 
-def write_patterns_file(patterns, borgmatic_runtime_directory, patterns_file=None):
+def write_patterns_file(patterns, borgmatic_runtime_directory, log_prefix, patterns_file=None):
     '''
     Given a sequence of patterns as borgmatic.borg.pattern.Pattern instances, write them to a named
     temporary file in the given borgmatic runtime directory and return the file object so it can
     continue to exist on disk as long as the caller needs it.
 
+    Use the given log prefix in any logging.
+
     If an optional open pattern file is given, append to it instead of making a new temporary file.
     Return None if no patterns are provided.
     '''
@@ -41,7 +43,7 @@ def write_patterns_file(patterns, borgmatic_runtime_directory, patterns_file=Non
         f'{pattern.type.value} {pattern.style.value}{":" if pattern.style.value else ""}{pattern.path}'
         for pattern in patterns
     )
-    logger.debug(f'Writing patterns to {patterns_file.name}:\n{patterns_output}')
+    logger.debug(f'{log_prefix}: Writing patterns to {patterns_file.name}:\n{patterns_output}')
 
     patterns_file.write(patterns_output)
     patterns_file.flush()
@@ -215,7 +217,9 @@ def make_base_create_command(
     if config.get('source_directories_must_exist', False):
         check_all_root_patterns_exist(patterns)
 
-    patterns_file = write_patterns_file(patterns, borgmatic_runtime_directory)
+    patterns_file = write_patterns_file(
+        patterns, borgmatic_runtime_directory, log_prefix=repository_path
+    )
     checkpoint_interval = config.get('checkpoint_interval', None)
     checkpoint_volume = config.get('checkpoint_volume', None)
     chunker_params = config.get('chunker_params', None)
@@ -330,6 +334,7 @@ def make_base_create_command(
                     for special_file_path in special_file_paths
                 ),
                 borgmatic_runtime_directory,
+                log_prefix=repository_path,
                 patterns_file=patterns_file,
             )
 

+ 74 - 76
borgmatic/commands/borgmatic.py

@@ -39,7 +39,7 @@ from borgmatic.commands.arguments import parse_arguments
 from borgmatic.config import checks, collect, validate
 from borgmatic.hooks import command, dispatch
 from borgmatic.hooks.monitoring import monitor
-from borgmatic.logger import DISABLED, add_custom_log_levels, configure_logging, should_do_markup, set_log_prefix
+from borgmatic.logger import DISABLED, add_custom_log_levels, configure_logging, should_do_markup
 from borgmatic.signals import configure_signals
 from borgmatic.verbosity import verbosity_to_log_level
 
@@ -86,12 +86,12 @@ def run_configuration(config_filename, config, config_paths, arguments):
 
     if skip_actions:
         logger.debug(
-            f"Skipping {'/'.join(skip_actions)} action{'s' if len(skip_actions) > 1 else ''} due to configured skip_actions"
+            f"{config_filename}: Skipping {'/'.join(skip_actions)} action{'s' if len(skip_actions) > 1 else ''} due to configured skip_actions"
         )
 
     try:
         local_borg_version = borg_version.local_borg_version(config, local_path)
-        logger.debug(f'Borg {local_borg_version}')
+        logger.debug(f'{config_filename}: Borg {local_borg_version}')
     except (OSError, CalledProcessError, ValueError) as error:
         yield from log_error_records(f'{config_filename}: Error getting local Borg version', error)
         return
@@ -101,6 +101,7 @@ def run_configuration(config_filename, config, config_paths, arguments):
             dispatch.call_hooks(
                 'initialize_monitor',
                 config,
+                config_filename,
                 dispatch.Hook_type.MONITORING,
                 monitoring_log_level,
                 global_arguments.dry_run,
@@ -109,13 +110,14 @@ def run_configuration(config_filename, config, config_paths, arguments):
             dispatch.call_hooks(
                 'ping_monitor',
                 config,
+                config_filename,
                 dispatch.Hook_type.MONITORING,
                 monitor.State.START,
                 monitoring_log_level,
                 global_arguments.dry_run,
             )
     except (OSError, CalledProcessError) as error:
-        if command.considered_soft_failure(error):
+        if command.considered_soft_failure(config_filename, error):
             return
 
         encountered_error = error
@@ -128,59 +130,55 @@ def run_configuration(config_filename, config, config_paths, arguments):
                 (repo, 0),
             )
 
-        try:
-            while not repo_queue.empty():
-                repository, retry_num = repo_queue.get()
-                set_log_prefix(repository.get('label', repository['path'])) 
-                logger.debug(
-                    'Running actions for repository'
+        while not repo_queue.empty():
+            repository, retry_num = repo_queue.get()
+            logger.debug(
+                f'{repository.get("label", repository["path"])}: Running actions for repository'
+            )
+            timeout = retry_num * retry_wait
+            if timeout:
+                logger.warning(
+                    f'{repository.get("label", repository["path"])}: Sleeping {timeout}s before next retry'
                 )
-                timeout = retry_num * retry_wait
-                if timeout:
-                    logger.warning(
-                        f'Sleeping {timeout}s before next retry'
-                    )
-                    time.sleep(timeout)
-                try:
-                    yield from run_actions(
-                        arguments=arguments,
-                        config_filename=config_filename,
-                        config=config,
-                        config_paths=config_paths,
-                        local_path=local_path,
-                        remote_path=remote_path,
-                        local_borg_version=local_borg_version,
-                        repository=repository,
+                time.sleep(timeout)
+            try:
+                yield from run_actions(
+                    arguments=arguments,
+                    config_filename=config_filename,
+                    config=config,
+                    config_paths=config_paths,
+                    local_path=local_path,
+                    remote_path=remote_path,
+                    local_borg_version=local_borg_version,
+                    repository=repository,
+                )
+            except (OSError, CalledProcessError, ValueError) as error:
+                if retry_num < retries:
+                    repo_queue.put(
+                        (repository, retry_num + 1),
                     )
-                except (OSError, CalledProcessError, ValueError) as error:
-                    if retry_num < retries:
-                        repo_queue.put(
-                            (repository, retry_num + 1),
-                        )
-                        tuple(  # Consume the generator so as to trigger logging.
-                            log_error_records(
-                                f'{repository.get("label", repository["path"])}: Error running actions for repository',
-                                error,
-                                levelno=logging.WARNING,
-                                log_command_error_output=True,
-                            )
-                        )
-                        logger.warning(
-                            f'Retrying... attempt {retry_num + 1}/{retries}'
+                    tuple(  # Consume the generator so as to trigger logging.
+                        log_error_records(
+                            f'{repository.get("label", repository["path"])}: Error running actions for repository',
+                            error,
+                            levelno=logging.WARNING,
+                            log_command_error_output=True,
                         )
-                        continue
+                    )
+                    logger.warning(
+                        f'{repository.get("label", repository["path"])}: Retrying... attempt {retry_num + 1}/{retries}'
+                    )
+                    continue
 
-                    if command.considered_soft_failure(error):
-                        continue
+                if command.considered_soft_failure(config_filename, error):
+                    continue
 
-                    yield from log_error_records(
-                        f'Error running actions for repository',
-                        error,
-                    )
-                    encountered_error = error
-                    error_repository = repository['path']
-        finally:
-            set_log_prefix(config_filename)
+                yield from log_error_records(
+                    f'{repository.get("label", repository["path"])}: Error running actions for repository',
+                    error,
+                )
+                encountered_error = error
+                error_repository = repository['path']
 
     try:
         if monitoring_hooks_are_activated:
@@ -188,13 +186,14 @@ def run_configuration(config_filename, config, config_paths, arguments):
             dispatch.call_hooks(
                 'ping_monitor',
                 config,
+                config_filename,
                 dispatch.Hook_type.MONITORING,
                 monitor.State.LOG,
                 monitoring_log_level,
                 global_arguments.dry_run,
             )
     except (OSError, CalledProcessError) as error:
-        if not command.considered_soft_failure(error):
+        if not command.considered_soft_failure(config_filename, error):
             encountered_error = error
             yield from log_error_records(f'{repository["path"]}: Error pinging monitor', error)
 
@@ -204,6 +203,7 @@ def run_configuration(config_filename, config, config_paths, arguments):
                 dispatch.call_hooks(
                     'ping_monitor',
                     config,
+                    config_filename,
                     dispatch.Hook_type.MONITORING,
                     monitor.State.FINISH,
                     monitoring_log_level,
@@ -212,12 +212,13 @@ def run_configuration(config_filename, config, config_paths, arguments):
                 dispatch.call_hooks(
                     'destroy_monitor',
                     config,
+                    config_filename,
                     dispatch.Hook_type.MONITORING,
                     monitoring_log_level,
                     global_arguments.dry_run,
                 )
         except (OSError, CalledProcessError) as error:
-            if command.considered_soft_failure(error):
+            if command.considered_soft_failure(config_filename, error):
                 return
 
             encountered_error = error
@@ -238,6 +239,7 @@ def run_configuration(config_filename, config, config_paths, arguments):
             dispatch.call_hooks(
                 'ping_monitor',
                 config,
+                config_filename,
                 dispatch.Hook_type.MONITORING,
                 monitor.State.FAIL,
                 monitoring_log_level,
@@ -246,12 +248,13 @@ def run_configuration(config_filename, config, config_paths, arguments):
             dispatch.call_hooks(
                 'destroy_monitor',
                 config,
+                config_filename,
                 dispatch.Hook_type.MONITORING,
                 monitoring_log_level,
                 global_arguments.dry_run,
             )
         except (OSError, CalledProcessError) as error:
-            if command.considered_soft_failure(error):
+            if command.considered_soft_failure(config_filename, error):
                 return
 
             yield from log_error_records(f'{config_filename}: Error running on-error hook', error)
@@ -816,28 +819,23 @@ def collect_configuration_run_summary_logs(configs, config_paths, arguments):
 
     # Execute the actions corresponding to each configuration file.
     json_results = []
-
-    try:
-        for config_filename, config in configs.items():
-            set_log_prefix(config_filename) 
-            results = list(run_configuration(config_filename, config, config_paths, arguments))
-            error_logs = tuple(result for result in results if isinstance(result, logging.LogRecord))
-
-            if error_logs:
-                yield from log_error_records('An error occurred')
-                yield from error_logs
-            else:
-                yield logging.makeLogRecord(
-                    dict(
-                        levelno=logging.INFO,
-                        levelname='INFO',
-                        msg='Successfully ran configuration file',
-                    )
+    for config_filename, config in configs.items():
+        results = list(run_configuration(config_filename, config, config_paths, arguments))
+        error_logs = tuple(result for result in results if isinstance(result, logging.LogRecord))
+
+        if error_logs:
+            yield from log_error_records(f'{config_filename}: An error occurred')
+            yield from error_logs
+        else:
+            yield logging.makeLogRecord(
+                dict(
+                    levelno=logging.INFO,
+                    levelname='INFO',
+                    msg=f'{config_filename}: Successfully ran configuration file',
                 )
-                if results:
-                    json_results.extend(results)
-    finally:
-        set_log_prefix(None) 
+            )
+            if results:
+                json_results.extend(results)
 
     if 'umount' in arguments:
         logger.info(f"Unmounting mount point {arguments['umount'].mount_point}")

+ 8 - 7
borgmatic/config/paths.py

@@ -76,13 +76,14 @@ class Runtime_directory:
     automatically gets cleaned up as necessary.
     '''
 
-    def __init__(self, config):
+    def __init__(self, config, log_prefix):
         '''
-        Given a configuration dict determine the borgmatic runtime directory, creating a secure,
-        temporary directory within it if necessary. Defaults to $XDG_RUNTIME_DIR/./borgmatic or
-        $RUNTIME_DIRECTORY/./borgmatic or $TMPDIR/borgmatic-[random]/./borgmatic or
-        $TEMP/borgmatic-[random]/./borgmatic or /tmp/borgmatic-[random]/./borgmatic where "[random]"
-        is a randomly generated string intended to avoid path collisions.
+        Given a configuration dict and a log prefix, determine the borgmatic runtime directory,
+        creating a secure, temporary directory within it if necessary. Defaults to
+        $XDG_RUNTIME_DIR/./borgmatic or $RUNTIME_DIRECTORY/./borgmatic or
+        $TMPDIR/borgmatic-[random]/./borgmatic or $TEMP/borgmatic-[random]/./borgmatic or
+        /tmp/borgmatic-[random]/./borgmatic where "[random]" is a randomly generated string intended
+        to avoid path collisions.
 
         If XDG_RUNTIME_DIR or RUNTIME_DIRECTORY is set and already ends in "/borgmatic", then don't
         tack on a second "/borgmatic" path component.
@@ -126,7 +127,7 @@ class Runtime_directory:
         )
         os.makedirs(self.runtime_path, mode=0o700, exist_ok=True)
 
-        logger.debug(f'Using runtime directory {os.path.normpath(self.runtime_path)}')
+        logger.debug(f'{log_prefix}: Using runtime directory {os.path.normpath(self.runtime_path)}')
 
     def __enter__(self):
         '''

+ 9 - 9
borgmatic/hooks/command.py

@@ -12,7 +12,7 @@ logger = logging.getLogger(__name__)
 SOFT_FAIL_EXIT_CODE = 75
 
 
-def interpolate_context(hook_description, command, context):
+def interpolate_context(config_filename, hook_description, command, context):
     '''
     Given a config filename, a hook description, a single hook command, and a dict of context
     names/values, interpolate the values by "{name}" into the command and return the result.
@@ -22,7 +22,7 @@ def interpolate_context(hook_description, command, context):
 
     for unsupported_variable in re.findall(r'{\w+}', command):
         logger.warning(
-            f"Variable '{unsupported_variable}' is not supported in {hook_description} hook"
+            f"{config_filename}: Variable '{unsupported_variable}' is not supported in {hook_description} hook"
         )
 
     return command
@@ -54,26 +54,26 @@ def execute_hook(commands, umask, config_filename, description, dry_run, **conte
     Raise subprocesses.CalledProcessError if an error occurs in a hook.
     '''
     if not commands:
-        logger.debug(f'No commands to run for {description} hook')
+        logger.debug(f'{config_filename}: No commands to run for {description} hook')
         return
 
     dry_run_label = ' (dry run; not actually running hooks)' if dry_run else ''
 
     context['configuration_filename'] = config_filename
     commands = [
-        interpolate_context(description, command, context) for command in commands
+        interpolate_context(config_filename, description, command, context) for command in commands
     ]
 
     if len(commands) == 1:
-        logger.info(f'Running command for {description} hook{dry_run_label}')
+        logger.info(f'{config_filename}: Running command for {description} hook{dry_run_label}')
     else:
         logger.info(
-            f'Running {len(commands)} commands for {description} hook{dry_run_label}',
+            f'{config_filename}: Running {len(commands)} commands for {description} hook{dry_run_label}',
         )
 
     if umask:
         parsed_umask = int(str(umask), 8)
-        logger.debug(f'Set hook umask to {oct(parsed_umask)}')
+        logger.debug(f'{config_filename}: Set hook umask to {oct(parsed_umask)}')
         original_umask = os.umask(parsed_umask)
     else:
         original_umask = None
@@ -94,7 +94,7 @@ def execute_hook(commands, umask, config_filename, description, dry_run, **conte
             os.umask(original_umask)
 
 
-def considered_soft_failure(error):
+def considered_soft_failure(config_filename, error):
     '''
     Given a configuration filename and an exception object, return whether the exception object
     represents a subprocess.CalledProcessError with a return code of SOFT_FAIL_EXIT_CODE. If so,
@@ -106,7 +106,7 @@ def considered_soft_failure(error):
 
     if exit_code == SOFT_FAIL_EXIT_CODE:
         logger.info(
-            f'Command hook exited with soft failure exit code ({SOFT_FAIL_EXIT_CODE}); skipping remaining repository actions',
+            f'{config_filename}: Command hook exited with soft failure exit code ({SOFT_FAIL_EXIT_CODE}); skipping remaining repository actions',
         )
         return True
 

+ 15 - 13
borgmatic/hooks/data_source/bootstrap.py

@@ -10,7 +10,7 @@ import borgmatic.config.paths
 logger = logging.getLogger(__name__)
 
 
-def use_streaming(hook_config, config):  # pragma: no cover
+def use_streaming(hook_config, config, log_prefix):  # pragma: no cover
     '''
     Return whether dump streaming is used for this hook. (Spoiler: It isn't.)
     '''
@@ -20,17 +20,18 @@ def use_streaming(hook_config, config):  # pragma: no cover
 def dump_data_sources(
     hook_config,
     config,
+    log_prefix,
     config_paths,
     borgmatic_runtime_directory,
     patterns,
     dry_run,
 ):
     '''
-    Given a bootstrap configuration dict, a configuration dict, the borgmatic configuration file
-    paths, the borgmatic runtime directory, the configured patterns, and whether this is a dry run,
-    create a borgmatic manifest file to store the paths of the configuration files used to create
-    the archive. But skip this if the bootstrap store_config_files option is False or if this is a
-    dry run.
+    Given a bootstrap configuration dict, a configuration dict, a log prefix, the borgmatic
+    configuration file paths, the borgmatic runtime directory, the configured patterns, and whether
+    this is a dry run, create a borgmatic manifest file to store the paths of the configuration
+    files used to create the archive. But skip this if the bootstrap store_config_files option is
+    False or if this is a dry run.
 
     Return an empty sequence, since there are no ongoing dump processes from this hook.
     '''
@@ -63,11 +64,11 @@ def dump_data_sources(
     return []
 
 
-def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, dry_run):
+def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_directory, dry_run):
     '''
-    Given a bootstrap configuration dict, a configuration dict, the borgmatic runtime directory, and
-    whether this is a dry run, then remove the manifest file created above. If this is a dry run,
-    then don't actually remove anything.
+    Given a bootstrap configuration dict, a configuration dict, a log prefix, the borgmatic runtime
+    directory, and whether this is a dry run, then remove the manifest file created above. If this
+    is a dry run, then don't actually remove anything.
     '''
     dry_run_label = ' (dry run; not actually removing anything)' if dry_run else ''
 
@@ -78,13 +79,13 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
         'bootstrap',
     )
     logger.debug(
-        f'Looking for bootstrap manifest files to remove in {manifest_glob}{dry_run_label}'
+        f'{log_prefix}: Looking for bootstrap manifest files to remove in {manifest_glob}{dry_run_label}'
     )
 
     for manifest_directory in glob.glob(manifest_glob):
         manifest_file_path = os.path.join(manifest_directory, 'manifest.json')
         logger.debug(
-            f'Removing bootstrap manifest at {manifest_file_path}{dry_run_label}'
+            f'{log_prefix}: Removing bootstrap manifest at {manifest_file_path}{dry_run_label}'
         )
 
         if dry_run:
@@ -102,7 +103,7 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
 
 
 def make_data_source_dump_patterns(
-    hook_config, config, borgmatic_runtime_directory, name=None
+    hook_config, config, log_prefix, borgmatic_runtime_directory, name=None
 ):  # pragma: no cover
     '''
     Restores are implemented via the separate, purpose-specific "bootstrap" action rather than the
@@ -114,6 +115,7 @@ def make_data_source_dump_patterns(
 def restore_data_source_dump(
     hook_config,
     config,
+    log_prefix,
     data_source,
     dry_run,
     extract_process,

+ 24 - 20
borgmatic/hooks/data_source/btrfs.py

@@ -14,7 +14,7 @@ import borgmatic.hooks.data_source.snapshot
 logger = logging.getLogger(__name__)
 
 
-def use_streaming(hook_config, config):  # pragma: no cover
+def use_streaming(hook_config, config, log_prefix):  # pragma: no cover
     '''
     Return whether dump streaming is used for this hook. (Spoiler: It isn't.)
     '''
@@ -211,24 +211,26 @@ def snapshot_subvolume(btrfs_command, subvolume_path, snapshot_path):  # pragma:
 def dump_data_sources(
     hook_config,
     config,
+    log_prefix,
     config_paths,
     borgmatic_runtime_directory,
     patterns,
     dry_run,
 ):
     '''
-    Given a Btrfs configuration dict, a configuration dict, the borgmatic configuration file paths,
-    the borgmatic runtime directory, the configured patterns, and whether this is a dry run,
-    auto-detect and snapshot any Btrfs subvolume mount points listed in the given patterns. Also
-    update those patterns, replacing subvolume mount points with corresponding snapshot directories
-    so they get stored in the Borg archive instead.
+    Given a Btrfs configuration dict, a configuration dict, a log prefix, the borgmatic
+    configuration file paths, the borgmatic runtime directory, the configured patterns, and whether
+    this is a dry run, auto-detect and snapshot any Btrfs subvolume mount points listed in the given
+    patterns. Also update those patterns, replacing subvolume mount points with corresponding
+    snapshot directories so they get stored in the Borg archive instead. Use the log prefix in any
+    log entries.
 
     Return an empty sequence, since there are no ongoing dump processes from this hook.
 
     If this is a dry run, then don't actually snapshot anything.
     '''
     dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
-    logger.info(f'Snapshotting Btrfs subvolumes{dry_run_label}')
+    logger.info(f'{log_prefix}: Snapshotting Btrfs subvolumes{dry_run_label}')
 
     # Based on the configured patterns, determine Btrfs subvolumes to backup.
     btrfs_command = hook_config.get('btrfs_command', 'btrfs')
@@ -236,11 +238,11 @@ def dump_data_sources(
     subvolumes = get_subvolumes(btrfs_command, findmnt_command, patterns)
 
     if not subvolumes:
-        logger.warning(f'No Btrfs subvolumes found to snapshot{dry_run_label}')
+        logger.warning(f'{log_prefix}: No Btrfs subvolumes found to snapshot{dry_run_label}')
 
     # Snapshot each subvolume, rewriting patterns to use their snapshot paths.
     for subvolume in subvolumes:
-        logger.debug(f'Creating Btrfs snapshot for {subvolume.path} subvolume')
+        logger.debug(f'{log_prefix}: Creating Btrfs snapshot for {subvolume.path} subvolume')
 
         snapshot_path = make_snapshot_path(subvolume.path)
 
@@ -278,11 +280,12 @@ def delete_snapshot(btrfs_command, snapshot_path):  # pragma: no cover
     )
 
 
-def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, dry_run):
+def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_directory, dry_run):
     '''
-    Given a Btrfs configuration dict, a configuration dict, the borgmatic runtime directory, and
-    whether this is a dry run, delete any Btrfs snapshots created by borgmatic. If this is a dry run
-    or Btrfs isn't configured in borgmatic's configuration, then don't actually remove anything.
+    Given a Btrfs configuration dict, a configuration dict, a log prefix, the borgmatic runtime
+    directory, and whether this is a dry run, delete any Btrfs snapshots created by borgmatic. Use
+    the log prefix in any log entries. If this is a dry run or Btrfs isn't configured in borgmatic's
+    configuration, then don't actually remove anything.
     '''
     if hook_config is None:
         return
@@ -295,10 +298,10 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
     try:
         all_subvolumes = get_subvolumes(btrfs_command, findmnt_command)
     except FileNotFoundError as error:
-        logger.debug(f'Could not find "{error.filename}" command')
+        logger.debug(f'{log_prefix}: Could not find "{error.filename}" command')
         return
     except subprocess.CalledProcessError as error:
-        logger.debug(error)
+        logger.debug(f'{log_prefix}: {error}')
         return
 
     # Reversing the sorted subvolumes ensures that we remove longer mount point paths of child
@@ -310,14 +313,14 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
         )
 
         logger.debug(
-            f'Looking for snapshots to remove in {subvolume_snapshots_glob}{dry_run_label}'
+            f'{log_prefix}: Looking for snapshots to remove in {subvolume_snapshots_glob}{dry_run_label}'
         )
 
         for snapshot_path in glob.glob(subvolume_snapshots_glob):
             if not os.path.isdir(snapshot_path):
                 continue
 
-            logger.debug(f'Deleting Btrfs snapshot {snapshot_path}{dry_run_label}')
+            logger.debug(f'{log_prefix}: Deleting Btrfs snapshot {snapshot_path}{dry_run_label}')
 
             if dry_run:
                 continue
@@ -325,10 +328,10 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
             try:
                 delete_snapshot(btrfs_command, snapshot_path)
             except FileNotFoundError:
-                logger.debug(f'Could not find "{btrfs_command}" command')
+                logger.debug(f'{log_prefix}: Could not find "{btrfs_command}" command')
                 return
             except subprocess.CalledProcessError as error:
-                logger.debug(error)
+                logger.debug(f'{log_prefix}: {error}')
                 return
 
             # Strip off the subvolume path from the end of the snapshot path and then delete the
@@ -337,7 +340,7 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
 
 
 def make_data_source_dump_patterns(
-    hook_config, config, borgmatic_runtime_directory, name=None
+    hook_config, config, log_prefix, borgmatic_runtime_directory, name=None
 ):  # pragma: no cover
     '''
     Restores aren't implemented, because stored files can be extracted directly with "extract".
@@ -348,6 +351,7 @@ def make_data_source_dump_patterns(
 def restore_data_source_dump(
     hook_config,
     config,
+    log_prefix,
     data_source,
     dry_run,
     extract_process,

+ 2 - 2
borgmatic/hooks/data_source/dump.py

@@ -46,14 +46,14 @@ def create_named_pipe_for_dump(dump_path):
     os.mkfifo(dump_path, mode=0o600)
 
 
-def remove_data_source_dumps(dump_path, data_source_type_name, dry_run):
+def remove_data_source_dumps(dump_path, data_source_type_name, log_prefix, dry_run):
     '''
     Remove all data source dumps in the given dump directory path (including the directory itself).
     If this is a dry run, then don't actually remove anything.
     '''
     dry_run_label = ' (dry run; not actually removing anything)' if dry_run else ''
 
-    logger.debug(f'Removing {data_source_type_name} data source dumps{dry_run_label}')
+    logger.debug(f'{log_prefix}: Removing {data_source_type_name} data source dumps{dry_run_label}')
 
     if dry_run:
         return

+ 28 - 25
borgmatic/hooks/data_source/lvm.py

@@ -14,7 +14,7 @@ import borgmatic.hooks.data_source.snapshot
 logger = logging.getLogger(__name__)
 
 
-def use_streaming(hook_config, config):  # pragma: no cover
+def use_streaming(hook_config, config, log_prefix):  # pragma: no cover
     '''
     Return whether dump streaming is used for this hook. (Spoiler: It isn't.)
     '''
@@ -161,24 +161,26 @@ DEFAULT_SNAPSHOT_SIZE = '10%ORIGIN'
 def dump_data_sources(
     hook_config,
     config,
+    log_prefix,
     config_paths,
     borgmatic_runtime_directory,
     patterns,
     dry_run,
 ):
     '''
-    Given an LVM configuration dict, a configuration dict, the borgmatic configuration file paths,
-    the borgmatic runtime directory, the configured patterns, and whether this is a dry run,
-    auto-detect and snapshot any LVM logical volume mount points listed in the given patterns. Also
-    update those patterns, replacing logical volume mount points with corresponding snapshot
-    directories so they get stored in the Borg archive instead.
+    Given an LVM configuration dict, a configuration dict, a log prefix, the borgmatic configuration
+    file paths, the borgmatic runtime directory, the configured patterns, and whether this is a dry
+    run, auto-detect and snapshot any LVM logical volume mount points listed in the given patterns.
+    Also update those patterns, replacing logical volume mount points with corresponding snapshot
+    directories so they get stored in the Borg archive instead. Use the log prefix in any log
+    entries.
 
     Return an empty sequence, since there are no ongoing dump processes from this hook.
 
     If this is a dry run, then don't actually snapshot anything.
     '''
     dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
-    logger.info(f'Snapshotting LVM logical volumes{dry_run_label}')
+    logger.info(f'{log_prefix}: Snapshotting LVM logical volumes{dry_run_label}')
 
     # List logical volumes to get their mount points.
     lsblk_command = hook_config.get('lsblk_command', 'lsblk')
@@ -189,12 +191,12 @@ def dump_data_sources(
     normalized_runtime_directory = os.path.normpath(borgmatic_runtime_directory)
 
     if not requested_logical_volumes:
-        logger.warning(f'No LVM logical volumes found to snapshot{dry_run_label}')
+        logger.warning(f'{log_prefix}: No LVM logical volumes found to snapshot{dry_run_label}')
 
     for logical_volume in requested_logical_volumes:
         snapshot_name = f'{logical_volume.name}_{snapshot_suffix}'
         logger.debug(
-            f'Creating LVM snapshot {snapshot_name} of {logical_volume.mount_point}{dry_run_label}'
+            f'{log_prefix}: Creating LVM snapshot {snapshot_name} of {logical_volume.mount_point}{dry_run_label}'
         )
 
         if not dry_run:
@@ -222,7 +224,7 @@ def dump_data_sources(
         )
 
         logger.debug(
-            f'Mounting LVM snapshot {snapshot_name} at {snapshot_mount_path}{dry_run_label}'
+            f'{log_prefix}: Mounting LVM snapshot {snapshot_name} at {snapshot_mount_path}{dry_run_label}'
         )
 
         if dry_run:
@@ -310,12 +312,12 @@ def get_snapshots(lvs_command, snapshot_name=None):
         raise ValueError(f'Invalid {lvs_command} output: Missing key "{error}"')
 
 
-def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, dry_run):
+def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_directory, dry_run):
     '''
-    Given an LVM configuration dict, a configuration dict, the borgmatic runtime directory, and
-    whether this is a dry run, unmount and delete any LVM snapshots created by borgmatic. If this is
-    a dry run or LVM isn't configured in borgmatic's configuration, then don't actually remove
-    anything.
+    Given an LVM configuration dict, a configuration dict, a log prefix, the borgmatic runtime
+    directory, and whether this is a dry run, unmount and delete any LVM snapshots created by
+    borgmatic. Use the log prefix in any log entries. If this is a dry run or LVM isn't configured
+    in borgmatic's configuration, then don't actually remove anything.
     '''
     if hook_config is None:
         return
@@ -326,10 +328,10 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
     try:
         logical_volumes = get_logical_volumes(hook_config.get('lsblk_command', 'lsblk'))
     except FileNotFoundError as error:
-        logger.debug(f'Could not find "{error.filename}" command')
+        logger.debug(f'{log_prefix}: Could not find "{error.filename}" command')
         return
     except subprocess.CalledProcessError as error:
-        logger.debug(error)
+        logger.debug(f'{log_prefix}: {error}')
         return
 
     snapshots_glob = os.path.join(
@@ -339,7 +341,7 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
         'lvm_snapshots',
     )
     logger.debug(
-        f'Looking for snapshots to remove in {snapshots_glob}{dry_run_label}'
+        f'{log_prefix}: Looking for snapshots to remove in {snapshots_glob}{dry_run_label}'
     )
     umount_command = hook_config.get('umount_command', 'umount')
 
@@ -365,7 +367,7 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
                     continue
 
             logger.debug(
-                f'Unmounting LVM snapshot at {snapshot_mount_path}{dry_run_label}'
+                f'{log_prefix}: Unmounting LVM snapshot at {snapshot_mount_path}{dry_run_label}'
             )
 
             if dry_run:
@@ -374,10 +376,10 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
             try:
                 unmount_snapshot(umount_command, snapshot_mount_path)
             except FileNotFoundError:
-                logger.debug(f'Could not find "{umount_command}" command')
+                logger.debug(f'{log_prefix}: Could not find "{umount_command}" command')
                 return
             except subprocess.CalledProcessError as error:
-                logger.debug(error)
+                logger.debug(f'{log_prefix}: {error}')
                 return
 
         if not dry_run:
@@ -389,10 +391,10 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
     try:
         snapshots = get_snapshots(hook_config.get('lvs_command', 'lvs'))
     except FileNotFoundError as error:
-        logger.debug(f'Could not find "{error.filename}" command')
+        logger.debug(f'{log_prefix}: Could not find "{error.filename}" command')
         return
     except subprocess.CalledProcessError as error:
-        logger.debug(error)
+        logger.debug(f'{log_prefix}: {error}')
         return
 
     for snapshot in snapshots:
@@ -400,14 +402,14 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
         if not snapshot.name.split('_')[-1].startswith(BORGMATIC_SNAPSHOT_PREFIX):
             continue
 
-        logger.debug(f'Deleting LVM snapshot {snapshot.name}{dry_run_label}')
+        logger.debug(f'{log_prefix}: Deleting LVM snapshot {snapshot.name}{dry_run_label}')
 
         if not dry_run:
             remove_snapshot(lvremove_command, snapshot.device_path)
 
 
 def make_data_source_dump_patterns(
-    hook_config, config, borgmatic_runtime_directory, name=None
+    hook_config, config, log_prefix, borgmatic_runtime_directory, name=None
 ):  # pragma: no cover
     '''
     Restores aren't implemented, because stored files can be extracted directly with "extract".
@@ -418,6 +420,7 @@ def make_data_source_dump_patterns(
 def restore_data_source_dump(
     hook_config,
     config,
+    log_prefix,
     data_source,
     dry_run,
     extract_process,

+ 31 - 25
borgmatic/hooks/data_source/mariadb.py

@@ -25,7 +25,7 @@ def make_dump_path(base_directory):  # pragma: no cover
 SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys')
 
 
-def database_names_to_dump(database, extra_environment, dry_run):
+def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
     '''
     Given a requested database config, return the corresponding sequence of database names to dump.
     In the case of "all", query for the names of databases on the configured host and return them,
@@ -49,7 +49,7 @@ def database_names_to_dump(database, extra_environment, dry_run):
         + ('--skip-column-names', '--batch')
         + ('--execute', 'show schemas')
     )
-    logger.debug(f'Querying for "all" MariaDB databases to dump')
+    logger.debug(f'{log_prefix}: Querying for "all" MariaDB databases to dump')
     show_output = execute_command_and_capture_output(
         show_command, extra_environment=extra_environment
     )
@@ -62,11 +62,12 @@ def database_names_to_dump(database, extra_environment, dry_run):
 
 
 def execute_dump_command(
-    database, dump_path, database_names, extra_environment, dry_run, dry_run_label
+    database, log_prefix, dump_path, database_names, extra_environment, dry_run, dry_run_label
 ):
     '''
     Kick off a dump for the given MariaDB database (provided as a configuration dict) to a named
-    pipe constructed from the given dump path and database name.
+    pipe constructed from the given dump path and database name. Use the given log prefix in any
+    log entries.
 
     Return a subprocess.Popen instance for the dump process ready to spew to a named pipe. But if
     this is a dry run, then don't actually dump anything and return None.
@@ -81,7 +82,7 @@ def execute_dump_command(
 
     if os.path.exists(dump_filename):
         logger.warning(
-            f'Skipping duplicate dump of MariaDB database "{database_name}" to {dump_filename}'
+            f'{log_prefix}: Skipping duplicate dump of MariaDB database "{database_name}" to {dump_filename}'
         )
         return None
 
@@ -103,7 +104,7 @@ def execute_dump_command(
     )
 
     logger.debug(
-        f'Dumping MariaDB database "{database_name}" to {dump_filename}{dry_run_label}'
+        f'{log_prefix}: Dumping MariaDB database "{database_name}" to {dump_filename}{dry_run_label}'
     )
     if dry_run:
         return None
@@ -117,14 +118,14 @@ def execute_dump_command(
     )
 
 
-def get_default_port(databases, config):  # pragma: no cover
+def get_default_port(databases, config, log_prefix):  # pragma: no cover
     return 3306
 
 
-def use_streaming(databases, config):
+def use_streaming(databases, config, log_prefix):
     '''
-    Given a sequence of MariaDB database configuration dicts, a configuration dict (ignored), return
-    whether streaming will be using during dumps.
+    Given a sequence of MariaDB database configuration dicts, a configuration dict (ignored), and a
+    log prefix (ignored), return whether streaming will be using during dumps.
     '''
     return any(databases)
 
@@ -132,6 +133,7 @@ def use_streaming(databases, config):
 def dump_data_sources(
     databases,
     config,
+    log_prefix,
     config_paths,
     borgmatic_runtime_directory,
     patterns,
@@ -140,7 +142,8 @@ def dump_data_sources(
     '''
     Dump the given MariaDB databases to a named pipe. The databases are supplied as a sequence of
     dicts, one dict describing each database as per the configuration schema. Use the given
-    borgmatic runtime directory to construct the destination path.
+    borgmatic runtime directory to construct the destination path and the given log prefix in any
+    log entries.
 
     Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
     pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
@@ -150,13 +153,13 @@ def dump_data_sources(
     dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
     processes = []
 
-    logger.info(f'Dumping MariaDB databases{dry_run_label}')
+    logger.info(f'{log_prefix}: Dumping MariaDB databases{dry_run_label}')
 
     for database in databases:
         dump_path = make_dump_path(borgmatic_runtime_directory)
         extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
         dump_database_names = database_names_to_dump(
-            database, extra_environment, dry_run
+            database, extra_environment, log_prefix, dry_run
         )
 
         if not dump_database_names:
@@ -172,6 +175,7 @@ def dump_data_sources(
                 processes.append(
                     execute_dump_command(
                         renamed_database,
+                        log_prefix,
                         dump_path,
                         (dump_name,),
                         extra_environment,
@@ -183,6 +187,7 @@ def dump_data_sources(
             processes.append(
                 execute_dump_command(
                     database,
+                    log_prefix,
                     dump_path,
                     dump_database_names,
                     extra_environment,
@@ -202,25 +207,25 @@ def dump_data_sources(
 
 
 def remove_data_source_dumps(
-    databases, config, borgmatic_runtime_directory, dry_run
+    databases, config, log_prefix, borgmatic_runtime_directory, dry_run
 ):  # pragma: no cover
     '''
     Remove all database dump files for this hook regardless of the given databases. Use the
-    borgmatic_runtime_directory to construct the destination path. If this is a dry run, then don't
-    actually remove anything.
+    borgmatic_runtime_directory to construct the destination path and the log prefix in any log
+    entries. If this is a dry run, then don't actually remove anything.
     '''
     dump.remove_data_source_dumps(
-        make_dump_path(borgmatic_runtime_directory), 'MariaDB', dry_run
+        make_dump_path(borgmatic_runtime_directory), 'MariaDB', log_prefix, dry_run
     )
 
 
 def make_data_source_dump_patterns(
-    databases, config, borgmatic_runtime_directory, name=None
+    databases, config, log_prefix, borgmatic_runtime_directory, name=None
 ):  # pragma: no cover
     '''
-    Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
-    and a database name to match, return the corresponding glob patterns to match the database dump
-    in an archive.
+    Given a sequence of configurations dicts, a configuration dict, a prefix to log with, the
+    borgmatic runtime directory, and a database name to match, return the corresponding glob
+    patterns to match the database dump in an archive.
     '''
     borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
 
@@ -238,6 +243,7 @@ def make_data_source_dump_patterns(
 def restore_data_source_dump(
     hook_config,
     config,
+    log_prefix,
     data_source,
     dry_run,
     extract_process,
@@ -246,9 +252,9 @@ def restore_data_source_dump(
 ):
     '''
     Restore a database from the given extract stream. The database is supplied as a data source
-    configuration dict, but the given hook configuration is ignored. If this is a dry run, then
-    don't actually restore anything. Trigger the given active extract process (an instance of
-    subprocess.Popen) to produce output to consume.
+    configuration dict, but the given hook configuration is ignored. The given log prefix is used
+    for any log entries. If this is a dry run, then don't actually restore anything. Trigger the
+    given active extract process (an instance of subprocess.Popen) to produce output to consume.
     '''
     dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
     hostname = connection_params['hostname'] or data_source.get(
@@ -282,7 +288,7 @@ def restore_data_source_dump(
     )
     extra_environment = {'MYSQL_PWD': password} if password else None
 
-    logger.debug(f"Restoring MariaDB database {data_source['name']}{dry_run_label}")
+    logger.debug(f"{log_prefix}: Restoring MariaDB database {data_source['name']}{dry_run_label}")
     if dry_run:
         return
 

+ 22 - 19
borgmatic/hooks/data_source/mongodb.py

@@ -17,14 +17,14 @@ def make_dump_path(base_directory):  # pragma: no cover
     return dump.make_data_source_dump_path(base_directory, 'mongodb_databases')
 
 
-def get_default_port(databases, config):  # pragma: no cover
+def get_default_port(databases, config, log_prefix):  # pragma: no cover
     return 27017
 
 
-def use_streaming(databases, config):
+def use_streaming(databases, config, log_prefix):
     '''
-    Given a sequence of MongoDB database configuration dicts, a configuration dict (ignored), return
-    whether streaming will be using during dumps.
+    Given a sequence of MongoDB database configuration dicts, a configuration dict (ignored), and a
+    log prefix (ignored), return whether streaming will be using during dumps.
     '''
     return any(database.get('format') != 'directory' for database in databases)
 
@@ -32,6 +32,7 @@ def use_streaming(databases, config):
 def dump_data_sources(
     databases,
     config,
+    log_prefix,
     config_paths,
     borgmatic_runtime_directory,
     patterns,
@@ -40,7 +41,8 @@ def dump_data_sources(
     '''
     Dump the given MongoDB databases to a named pipe. The databases are supplied as a sequence of
     dicts, one dict describing each database as per the configuration schema. Use the borgmatic
-    runtime directory to construct the destination path (used for the directory format.
+    runtime directory to construct the destination path (used for the directory format and the given
+    log prefix in any log entries.
 
     Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
     pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
@@ -49,7 +51,7 @@ def dump_data_sources(
     '''
     dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
 
-    logger.info(f'Dumping MongoDB databases{dry_run_label}')
+    logger.info(f'{log_prefix}: Dumping MongoDB databases{dry_run_label}')
 
     processes = []
     for database in databases:
@@ -63,7 +65,7 @@ def dump_data_sources(
         dump_format = database.get('format', 'archive')
 
         logger.debug(
-            f'Dumping MongoDB database {name} to {dump_filename}{dry_run_label}',
+            f'{log_prefix}: Dumping MongoDB database {name} to {dump_filename}{dry_run_label}',
         )
         if dry_run:
             continue
@@ -116,25 +118,25 @@ def build_dump_command(database, dump_filename, dump_format):
 
 
 def remove_data_source_dumps(
-    databases, config, borgmatic_runtime_directory, dry_run
+    databases, config, log_prefix, borgmatic_runtime_directory, dry_run
 ):  # pragma: no cover
     '''
     Remove all database dump files for this hook regardless of the given databases. Use the
-    borgmatic_runtime_directory to construct the destination path. If this is a dry run, then don't
-    actually remove anything.
+    borgmatic_runtime_directory to construct the destination path and the log prefix in any log
+    entries. If this is a dry run, then don't actually remove anything.
     '''
     dump.remove_data_source_dumps(
-        make_dump_path(borgmatic_runtime_directory), 'MongoDB', dry_run
+        make_dump_path(borgmatic_runtime_directory), 'MongoDB', log_prefix, dry_run
     )
 
 
 def make_data_source_dump_patterns(
-    databases, config, borgmatic_runtime_directory, name=None
+    databases, config, log_prefix, borgmatic_runtime_directory, name=None
 ):  # pragma: no cover
     '''
-    Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
-    and a database name to match, return the corresponding glob patterns to match the database dump
-    in an archive.
+    Given a sequence of configurations dicts, a configuration dict, a prefix to log with, the
+    borgmatic runtime directory, and a database name to match, return the corresponding glob
+    patterns to match the database dump in an archive.
     '''
     borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
 
@@ -152,6 +154,7 @@ def make_data_source_dump_patterns(
 def restore_data_source_dump(
     hook_config,
     config,
+    log_prefix,
     data_source,
     dry_run,
     extract_process,
@@ -161,9 +164,9 @@ def restore_data_source_dump(
     '''
     Restore a database from the given extract stream. The database is supplied as a data source
     configuration dict, but the given hook configuration is ignored. The given configuration dict is
-    used to construct the destination path. If this is a dry run, then don't actually restore
-    anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce
-    output to consume.
+    used to construct the destination path, and the given log prefix is used for any log entries. If
+    this is a dry run, then don't actually restore anything. Trigger the given active extract
+    process (an instance of subprocess.Popen) to produce output to consume.
 
     If the extract process is None, then restore the dump from the filesystem rather than from an
     extract stream.
@@ -178,7 +181,7 @@ def restore_data_source_dump(
         extract_process, data_source, dump_filename, connection_params
     )
 
-    logger.debug(f"Restoring MongoDB database {data_source['name']}{dry_run_label}")
+    logger.debug(f"{log_prefix}: Restoring MongoDB database {data_source['name']}{dry_run_label}")
     if dry_run:
         return
 

+ 31 - 25
borgmatic/hooks/data_source/mysql.py

@@ -25,7 +25,7 @@ def make_dump_path(base_directory):  # pragma: no cover
 SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys')
 
 
-def database_names_to_dump(database, extra_environment, dry_run):
+def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
     '''
     Given a requested database config, return the corresponding sequence of database names to dump.
     In the case of "all", query for the names of databases on the configured host and return them,
@@ -49,7 +49,7 @@ def database_names_to_dump(database, extra_environment, dry_run):
         + ('--skip-column-names', '--batch')
         + ('--execute', 'show schemas')
     )
-    logger.debug(f'Querying for "all" MySQL databases to dump')
+    logger.debug(f'{log_prefix}: Querying for "all" MySQL databases to dump')
     show_output = execute_command_and_capture_output(
         show_command, extra_environment=extra_environment
     )
@@ -62,11 +62,12 @@ def database_names_to_dump(database, extra_environment, dry_run):
 
 
 def execute_dump_command(
-    database, dump_path, database_names, extra_environment, dry_run, dry_run_label
+    database, log_prefix, dump_path, database_names, extra_environment, dry_run, dry_run_label
 ):
     '''
     Kick off a dump for the given MySQL/MariaDB database (provided as a configuration dict) to a
-    named pipe constructed from the given dump path and database name.
+    named pipe constructed from the given dump path and database name. Use the given log prefix in
+    any log entries.
 
     Return a subprocess.Popen instance for the dump process ready to spew to a named pipe. But if
     this is a dry run, then don't actually dump anything and return None.
@@ -81,7 +82,7 @@ def execute_dump_command(
 
     if os.path.exists(dump_filename):
         logger.warning(
-            f'Skipping duplicate dump of MySQL database "{database_name}" to {dump_filename}'
+            f'{log_prefix}: Skipping duplicate dump of MySQL database "{database_name}" to {dump_filename}'
         )
         return None
 
@@ -102,7 +103,7 @@ def execute_dump_command(
     )
 
     logger.debug(
-        f'Dumping MySQL database "{database_name}" to {dump_filename}{dry_run_label}'
+        f'{log_prefix}: Dumping MySQL database "{database_name}" to {dump_filename}{dry_run_label}'
     )
     if dry_run:
         return None
@@ -116,14 +117,14 @@ def execute_dump_command(
     )
 
 
-def get_default_port(databases, config):  # pragma: no cover
+def get_default_port(databases, config, log_prefix):  # pragma: no cover
     return 3306
 
 
-def use_streaming(databases, config):
+def use_streaming(databases, config, log_prefix):
     '''
-    Given a sequence of MySQL database configuration dicts, a configuration dict (ignored), return
-    whether streaming will be using during dumps.
+    Given a sequence of MySQL database configuration dicts, a configuration dict (ignored), and a
+    log prefix (ignored), return whether streaming will be using during dumps.
     '''
     return any(databases)
 
@@ -131,6 +132,7 @@ def use_streaming(databases, config):
 def dump_data_sources(
     databases,
     config,
+    log_prefix,
     config_paths,
     borgmatic_runtime_directory,
     patterns,
@@ -139,7 +141,8 @@ def dump_data_sources(
     '''
     Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence
     of dicts, one dict describing each database as per the configuration schema. Use the given
-    borgmatic runtime directory to construct the destination path.
+    borgmatic runtime directory to construct the destination path and the given log prefix in any
+    log entries.
 
     Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
     pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
@@ -149,13 +152,13 @@ def dump_data_sources(
     dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
     processes = []
 
-    logger.info(f'Dumping MySQL databases{dry_run_label}')
+    logger.info(f'{log_prefix}: Dumping MySQL databases{dry_run_label}')
 
     for database in databases:
         dump_path = make_dump_path(borgmatic_runtime_directory)
         extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
         dump_database_names = database_names_to_dump(
-            database, extra_environment, dry_run
+            database, extra_environment, log_prefix, dry_run
         )
 
         if not dump_database_names:
@@ -171,6 +174,7 @@ def dump_data_sources(
                 processes.append(
                     execute_dump_command(
                         renamed_database,
+                        log_prefix,
                         dump_path,
                         (dump_name,),
                         extra_environment,
@@ -182,6 +186,7 @@ def dump_data_sources(
             processes.append(
                 execute_dump_command(
                     database,
+                    log_prefix,
                     dump_path,
                     dump_database_names,
                     extra_environment,
@@ -201,25 +206,25 @@ def dump_data_sources(
 
 
 def remove_data_source_dumps(
-    databases, config, borgmatic_runtime_directory, dry_run
+    databases, config, log_prefix, borgmatic_runtime_directory, dry_run
 ):  # pragma: no cover
     '''
     Remove all database dump files for this hook regardless of the given databases. Use the
-    borgmatic runtime directory to construct the destination path. If this is a dry run, then don't
-    actually remove anything.
+    borgmatic runtime directory to construct the destination path and the log prefix in any log
+    entries. If this is a dry run, then don't actually remove anything.
     '''
     dump.remove_data_source_dumps(
-        make_dump_path(borgmatic_runtime_directory), 'MySQL', dry_run
+        make_dump_path(borgmatic_runtime_directory), 'MySQL', log_prefix, dry_run
     )
 
 
 def make_data_source_dump_patterns(
-    databases, config, borgmatic_runtime_directory, name=None
+    databases, config, log_prefix, borgmatic_runtime_directory, name=None
 ):  # pragma: no cover
     '''
-    Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
-    and a database name to match, return the corresponding glob patterns to match the database dump
-    in an archive.
+    Given a sequence of configurations dicts, a configuration dict, a prefix to log with, the
+    borgmatic runtime directory, and a database name to match, return the corresponding glob
+    patterns to match the database dump in an archive.
     '''
     borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
 
@@ -237,6 +242,7 @@ def make_data_source_dump_patterns(
 def restore_data_source_dump(
     hook_config,
     config,
+    log_prefix,
     data_source,
     dry_run,
     extract_process,
@@ -245,9 +251,9 @@ def restore_data_source_dump(
 ):
     '''
     Restore a database from the given extract stream. The database is supplied as a data source
-    configuration dict, but the given hook configuration is ignored. If this is a dry run, then
-    don't actually restore anything. Trigger the given active extract process (an instance of
-    subprocess.Popen) to produce output to consume.
+    configuration dict, but the given hook configuration is ignored. The given log prefix is used
+    for any log entries. If this is a dry run, then don't actually restore anything. Trigger the
+    given active extract process (an instance of subprocess.Popen) to produce output to consume.
     '''
     dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
     hostname = connection_params['hostname'] or data_source.get(
@@ -281,7 +287,7 @@ def restore_data_source_dump(
     )
     extra_environment = {'MYSQL_PWD': password} if password else None
 
-    logger.debug(f"Restoring MySQL database {data_source['name']}{dry_run_label}")
+    logger.debug(f"{log_prefix}: Restoring MySQL database {data_source['name']}{dry_run_label}")
     if dry_run:
         return
 

+ 27 - 23
borgmatic/hooks/data_source/postgresql.py

@@ -58,7 +58,7 @@ def make_extra_environment(database, restore_connection_params=None):
 EXCLUDED_DATABASE_NAMES = ('template0', 'template1')
 
 
-def database_names_to_dump(database, extra_environment, dry_run):
+def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
     '''
     Given a requested database config, return the corresponding sequence of database names to dump.
     In the case of "all" when a database format is given, query for the names of databases on the
@@ -85,7 +85,7 @@ def database_names_to_dump(database, extra_environment, dry_run):
         + (('--username', database['username']) if 'username' in database else ())
         + (tuple(database['list_options'].split(' ')) if 'list_options' in database else ())
     )
-    logger.debug(f'Querying for "all" PostgreSQL databases to dump')
+    logger.debug(f'{log_prefix}: Querying for "all" PostgreSQL databases to dump')
     list_output = execute_command_and_capture_output(
         list_command, extra_environment=extra_environment
     )
@@ -97,14 +97,14 @@ def database_names_to_dump(database, extra_environment, dry_run):
     )
 
 
-def get_default_port(databases, config):  # pragma: no cover
+def get_default_port(databases, config, log_prefix):  # pragma: no cover
     return 5432
 
 
-def use_streaming(databases, config):
+def use_streaming(databases, config, log_prefix):
     '''
-    Given a sequence of PostgreSQL database configuration dicts, a configuration dict (ignored),
-    return whether streaming will be using during dumps.
+    Given a sequence of PostgreSQL database configuration dicts, a configuration dict (ignored), and
+    a log prefix (ignored), return whether streaming will be using during dumps.
     '''
     return any(database.get('format') != 'directory' for database in databases)
 
@@ -112,6 +112,7 @@ def use_streaming(databases, config):
 def dump_data_sources(
     databases,
     config,
+    log_prefix,
     config_paths,
     borgmatic_runtime_directory,
     patterns,
@@ -120,7 +121,8 @@ def dump_data_sources(
     '''
     Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of
     dicts, one dict describing each database as per the configuration schema. Use the given
-    borgmatic runtime directory to construct the destination path.
+    borgmatic runtime directory to construct the destination path and the given log prefix in any
+    log entries.
 
     Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
     pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
@@ -132,13 +134,13 @@ def dump_data_sources(
     dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
     processes = []
 
-    logger.info(f'Dumping PostgreSQL databases{dry_run_label}')
+    logger.info(f'{log_prefix}: Dumping PostgreSQL databases{dry_run_label}')
 
     for database in databases:
         extra_environment = make_extra_environment(database)
         dump_path = make_dump_path(borgmatic_runtime_directory)
         dump_database_names = database_names_to_dump(
-            database, extra_environment, dry_run
+            database, extra_environment, log_prefix, dry_run
         )
 
         if not dump_database_names:
@@ -162,7 +164,7 @@ def dump_data_sources(
             )
             if os.path.exists(dump_filename):
                 logger.warning(
-                    f'Skipping duplicate dump of PostgreSQL database "{database_name}" to {dump_filename}'
+                    f'{log_prefix}: Skipping duplicate dump of PostgreSQL database "{database_name}" to {dump_filename}'
                 )
                 continue
 
@@ -196,7 +198,7 @@ def dump_data_sources(
             )
 
             logger.debug(
-                f'Dumping PostgreSQL database "{database_name}" to {dump_filename}{dry_run_label}'
+                f'{log_prefix}: Dumping PostgreSQL database "{database_name}" to {dump_filename}{dry_run_label}'
             )
             if dry_run:
                 continue
@@ -230,25 +232,25 @@ def dump_data_sources(
 
 
 def remove_data_source_dumps(
-    databases, config, borgmatic_runtime_directory, dry_run
+    databases, config, log_prefix, borgmatic_runtime_directory, dry_run
 ):  # pragma: no cover
     '''
     Remove all database dump files for this hook regardless of the given databases. Use the
-    borgmatic runtime directory to construct the destination path. If this is a dry run, then don't
-    actually remove anything.
+    borgmatic runtime directory to construct the destination path and the log prefix in any log
+    entries. If this is a dry run, then don't actually remove anything.
     '''
     dump.remove_data_source_dumps(
-        make_dump_path(borgmatic_runtime_directory), 'PostgreSQL', dry_run
+        make_dump_path(borgmatic_runtime_directory), 'PostgreSQL', log_prefix, dry_run
     )
 
 
 def make_data_source_dump_patterns(
-    databases, config, borgmatic_runtime_directory, name=None
+    databases, config, log_prefix, borgmatic_runtime_directory, name=None
 ):  # pragma: no cover
     '''
-    Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
-    and a database name to match, return the corresponding glob patterns to match the database dump
-    in an archive.
+    Given a sequence of configurations dicts, a configuration dict, a prefix to log with, the
+    borgmatic runtime directory, and a database name to match, return the corresponding glob
+    patterns to match the database dump in an archive.
     '''
     borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
 
@@ -266,6 +268,7 @@ def make_data_source_dump_patterns(
 def restore_data_source_dump(
     hook_config,
     config,
+    log_prefix,
     data_source,
     dry_run,
     extract_process,
@@ -275,9 +278,10 @@ def restore_data_source_dump(
     '''
     Restore a database from the given extract stream. The database is supplied as a data source
     configuration dict, but the given hook configuration is ignored. The given borgmatic runtime
-    directory is used to construct the destination path (used for the directory format). If this is
-    a dry run, then don't actually restore anything. Trigger the given active extract process (an
-    instance of subprocess.Popen) to produce output to consume.
+    directory is used to construct the destination path (used for the directory format), and the
+    given log prefix is used for any log entries. If this is a dry run, then don't actually restore
+    anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce
+    output to consume.
 
     If the extract process is None, then restore the dump from the filesystem rather than from an
     extract stream.
@@ -351,7 +355,7 @@ def restore_data_source_dump(
     )
 
     logger.debug(
-        f"Restoring PostgreSQL database {data_source['name']}{dry_run_label}"
+        f"{log_prefix}: Restoring PostgreSQL database {data_source['name']}{dry_run_label}"
     )
     if dry_run:
         return

+ 24 - 22
borgmatic/hooks/data_source/sqlite.py

@@ -17,14 +17,14 @@ def make_dump_path(base_directory):  # pragma: no cover
     return dump.make_data_source_dump_path(base_directory, 'sqlite_databases')
 
 
-def get_default_port(databases, config):  # pragma: no cover
+def get_default_port(databases, config, log_prefix):  # pragma: no cover
     return None  # SQLite doesn't use a port.
 
 
-def use_streaming(databases, config):
+def use_streaming(databases, config, log_prefix):
     '''
-    Given a sequence of SQLite database configuration dicts, a configuration dict (ignored), return
-    whether streaming will be using during dumps.
+    Given a sequence of SQLite database configuration dicts, a configuration dict (ignored), and a
+    log prefix (ignored), return whether streaming will be using during dumps.
     '''
     return any(databases)
 
@@ -32,6 +32,7 @@ def use_streaming(databases, config):
 def dump_data_sources(
     databases,
     config,
+    log_prefix,
     config_paths,
     borgmatic_runtime_directory,
     patterns,
@@ -40,7 +41,7 @@ def dump_data_sources(
     '''
     Dump the given SQLite databases to a named pipe. The databases are supplied as a sequence of
     configuration dicts, as per the configuration schema. Use the given borgmatic runtime directory
-    to construct the destination path.
+    to construct the destination path and the given log prefix in any log entries.
 
     Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
     pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
@@ -50,7 +51,7 @@ def dump_data_sources(
     dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
     processes = []
 
-    logger.info(f'Dumping SQLite databases{dry_run_label}')
+    logger.info(f'{log_prefix}: Dumping SQLite databases{dry_run_label}')
 
     for database in databases:
         database_path = database['path']
@@ -59,7 +60,7 @@ def dump_data_sources(
             logger.warning('The "all" database name has no meaning for SQLite databases')
         if not os.path.exists(database_path):
             logger.warning(
-                f'No SQLite database at {database_path}; an empty database will be created and dumped'
+                f'{log_prefix}: No SQLite database at {database_path}; an empty database will be created and dumped'
             )
 
         dump_path = make_dump_path(borgmatic_runtime_directory)
@@ -67,7 +68,7 @@ def dump_data_sources(
 
         if os.path.exists(dump_filename):
             logger.warning(
-                f'Skipping duplicate dump of SQLite database at {database_path} to {dump_filename}'
+                f'{log_prefix}: Skipping duplicate dump of SQLite database at {database_path} to {dump_filename}'
             )
             continue
 
@@ -79,7 +80,7 @@ def dump_data_sources(
             shlex.quote(dump_filename),
         )
         logger.debug(
-            f'Dumping SQLite database at {database_path} to {dump_filename}{dry_run_label}'
+            f'{log_prefix}: Dumping SQLite database at {database_path} to {dump_filename}{dry_run_label}'
         )
         if dry_run:
             continue
@@ -98,25 +99,25 @@ def dump_data_sources(
 
 
 def remove_data_source_dumps(
-    databases, config, borgmatic_runtime_directory, dry_run
+    databases, config, log_prefix, borgmatic_runtime_directory, dry_run
 ):  # pragma: no cover
     '''
     Remove all database dump files for this hook regardless of the given databases. Use the
-    borgmatic runtime directory to construct the destination path. If this is a dry run, then don't
-    actually remove anything.
+    borgmatic runtime directory to construct the destination path and the log prefix in any log
+    entries. If this is a dry run, then don't actually remove anything.
     '''
     dump.remove_data_source_dumps(
-        make_dump_path(borgmatic_runtime_directory), 'SQLite', dry_run
+        make_dump_path(borgmatic_runtime_directory), 'SQLite', log_prefix, dry_run
     )
 
 
 def make_data_source_dump_patterns(
-    databases, config, borgmatic_runtime_directory, name=None
+    databases, config, log_prefix, borgmatic_runtime_directory, name=None
 ):  # pragma: no cover
     '''
-    Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
-    and a database name to match, return the corresponding glob patterns to match the database dump
-    in an archive.
+    Given a sequence of configurations dicts, a configuration dict, a prefix to log with, the
+    borgmatic runtime directory, and a database name to match, return the corresponding glob
+    patterns to match the database dump in an archive.
     '''
     borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
 
@@ -134,6 +135,7 @@ def make_data_source_dump_patterns(
 def restore_data_source_dump(
     hook_config,
     config,
+    log_prefix,
     data_source,
     dry_run,
     extract_process,
@@ -142,22 +144,22 @@ def restore_data_source_dump(
 ):
     '''
     Restore a database from the given extract stream. The database is supplied as a data source
-    configuration dict, but the given hook configuration is ignored. If this is a dry run, then
-    don't actually restore anything. Trigger the given active extract process (an instance of
-    subprocess.Popen) to produce output to consume.
+    configuration dict, but the given hook configuration is ignored. The given log prefix is used
+    for any log entries. If this is a dry run, then don't actually restore anything. Trigger the
+    given active extract process (an instance of subprocess.Popen) to produce output to consume.
     '''
     dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
     database_path = connection_params['restore_path'] or data_source.get(
         'restore_path', data_source.get('path')
     )
 
-    logger.debug(f'Restoring SQLite database at {database_path}{dry_run_label}')
+    logger.debug(f'{log_prefix}: Restoring SQLite database at {database_path}{dry_run_label}')
     if dry_run:
         return
 
     try:
         os.remove(database_path)
-        logger.warning(f'Removed existing SQLite database at {database_path}')
+        logger.warning(f'{log_prefix}: Removed existing SQLite database at {database_path}')
     except FileNotFoundError:  # pragma: no cover
         pass
 

+ 24 - 22
borgmatic/hooks/data_source/zfs.py

@@ -13,7 +13,7 @@ import borgmatic.hooks.data_source.snapshot
 logger = logging.getLogger(__name__)
 
 
-def use_streaming(hook_config, config):  # pragma: no cover
+def use_streaming(hook_config, config, log_prefix):  # pragma: no cover
     '''
     Return whether dump streaming is used for this hook. (Spoiler: It isn't.)
     '''
@@ -189,25 +189,26 @@ def make_borg_snapshot_pattern(pattern, normalized_runtime_directory):
 def dump_data_sources(
     hook_config,
     config,
+    log_prefix,
     config_paths,
     borgmatic_runtime_directory,
     patterns,
     dry_run,
 ):
     '''
-    Given a ZFS configuration dict, a configuration dict, the borgmatic configuration file paths,
-    the borgmatic runtime directory, the configured patterns, and whether this is a dry run,
-    auto-detect and snapshot any ZFS dataset mount points listed in the given patterns and any
+    Given a ZFS configuration dict, a configuration dict, a log prefix, the borgmatic configuration
+    file paths, the borgmatic runtime directory, the configured patterns, and whether this is a dry
+    run, auto-detect and snapshot any ZFS dataset mount points listed in the given patterns and any
     dataset with a borgmatic-specific user property. Also update those patterns, replacing dataset
     mount points with corresponding snapshot directories so they get stored in the Borg archive
-    instead.
+    instead. Use the log prefix in any log entries.
 
     Return an empty sequence, since there are no ongoing dump processes from this hook.
 
     If this is a dry run, then don't actually snapshot anything.
     '''
     dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
-    logger.info(f'Snapshotting ZFS datasets{dry_run_label}')
+    logger.info(f'{log_prefix}: Snapshotting ZFS datasets{dry_run_label}')
 
     # List ZFS datasets to get their mount points.
     zfs_command = hook_config.get('zfs_command', 'zfs')
@@ -218,12 +219,12 @@ def dump_data_sources(
     normalized_runtime_directory = os.path.normpath(borgmatic_runtime_directory)
 
     if not requested_datasets:
-        logger.warning(f'No ZFS datasets found to snapshot{dry_run_label}')
+        logger.warning(f'{log_prefix}: No ZFS datasets found to snapshot{dry_run_label}')
 
     for dataset in requested_datasets:
         full_snapshot_name = f'{dataset.name}@{snapshot_name}'
         logger.debug(
-            f'Creating ZFS snapshot {full_snapshot_name} of {dataset.mount_point}{dry_run_label}'
+            f'{log_prefix}: Creating ZFS snapshot {full_snapshot_name} of {dataset.mount_point}{dry_run_label}'
         )
 
         if not dry_run:
@@ -238,7 +239,7 @@ def dump_data_sources(
         )
 
         logger.debug(
-            f'Mounting ZFS snapshot {full_snapshot_name} at {snapshot_mount_path}{dry_run_label}'
+            f'{log_prefix}: Mounting ZFS snapshot {full_snapshot_name} at {snapshot_mount_path}{dry_run_label}'
         )
 
         if dry_run:
@@ -305,12 +306,12 @@ def get_all_snapshots(zfs_command):
     return tuple(line.rstrip() for line in list_output.splitlines())
 
 
-def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, dry_run):
+def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_directory, dry_run):
     '''
-    Given a ZFS configuration dict, a configuration dict, the borgmatic runtime directory, and
-    whether this is a dry run, unmount and destroy any ZFS snapshots created by borgmatic. If this
-    is a dry run or ZFS isn't configured in borgmatic's configuration, then don't actually remove
-    anything.
+    Given a ZFS configuration dict, a configuration dict, a log prefix, the borgmatic runtime
+    directory, and whether this is a dry run, unmount and destroy any ZFS snapshots created by
+    borgmatic. Use the log prefix in any log entries. If this is a dry run or ZFS isn't configured
+    in borgmatic's configuration, then don't actually remove anything.
     '''
     if hook_config is None:
         return
@@ -323,10 +324,10 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
     try:
         dataset_mount_points = get_all_dataset_mount_points(zfs_command)
     except FileNotFoundError:
-        logger.debug(f'Could not find "{zfs_command}" command')
+        logger.debug(f'{log_prefix}: Could not find "{zfs_command}" command')
         return
     except subprocess.CalledProcessError as error:
-        logger.debug(error)
+        logger.debug(f'{log_prefix}: {error}')
         return
 
     snapshots_glob = os.path.join(
@@ -336,7 +337,7 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
         'zfs_snapshots',
     )
     logger.debug(
-        f'Looking for snapshots to remove in {snapshots_glob}{dry_run_label}'
+        f'{log_prefix}: Looking for snapshots to remove in {snapshots_glob}{dry_run_label}'
     )
     umount_command = hook_config.get('umount_command', 'umount')
 
@@ -363,17 +364,17 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
                     continue
 
             logger.debug(
-                f'Unmounting ZFS snapshot at {snapshot_mount_path}{dry_run_label}'
+                f'{log_prefix}: Unmounting ZFS snapshot at {snapshot_mount_path}{dry_run_label}'
             )
 
             if not dry_run:
                 try:
                     unmount_snapshot(umount_command, snapshot_mount_path)
                 except FileNotFoundError:
-                    logger.debug(f'Could not find "{umount_command}" command')
+                    logger.debug(f'{log_prefix}: Could not find "{umount_command}" command')
                     return
                 except subprocess.CalledProcessError as error:
-                    logger.debug(error)
+                    logger.debug(f'{log_prefix}: {error}')
                     return
 
         if not dry_run:
@@ -387,14 +388,14 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
         if not full_snapshot_name.split('@')[-1].startswith(BORGMATIC_SNAPSHOT_PREFIX):
             continue
 
-        logger.debug(f'Destroying ZFS snapshot {full_snapshot_name}{dry_run_label}')
+        logger.debug(f'{log_prefix}: Destroying ZFS snapshot {full_snapshot_name}{dry_run_label}')
 
         if not dry_run:
             destroy_snapshot(zfs_command, full_snapshot_name)
 
 
 def make_data_source_dump_patterns(
-    hook_config, config, borgmatic_runtime_directory, name=None
+    hook_config, config, log_prefix, borgmatic_runtime_directory, name=None
 ):  # pragma: no cover
     '''
     Restores aren't implemented, because stored files can be extracted directly with "extract".
@@ -405,6 +406,7 @@ def make_data_source_dump_patterns(
 def restore_data_source_dump(
     hook_config,
     config,
+    log_prefix,
     data_source,
     dry_run,
     extract_process,

+ 19 - 18
borgmatic/hooks/dispatch.py

@@ -21,12 +21,12 @@ def get_submodule_names(parent_module):  # pragma: no cover
     return tuple(module_info.name for module_info in pkgutil.iter_modules(parent_module.__path__))
 
 
-def call_hook(function_name, config, hook_name, *args, **kwargs):
+def call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs):
     '''
-    Given a configuration dict, call the requested function of the Python module corresponding to
-    the given hook name. Supply that call with the configuration for this hook (if any) and any
-    given args and kwargs. Return the return value of that call or None if the module in question is
-    not a hook.
+    Given a configuration dict and a prefix to use in log entries, call the requested function of
+    the Python module corresponding to the given hook name. Supply that call with the configuration
+    for this hook (if any), the log prefix, and any given args and kwargs. Return the return value
+    of that call or None if the module in question is not a hook.
 
     Raise ValueError if the hook name is unknown.
     Raise AttributeError if the function name is not found in the module.
@@ -54,16 +54,17 @@ def call_hook(function_name, config, hook_name, *args, **kwargs):
     else:
         raise ValueError(f'Unknown hook name: {hook_name}')
 
-    logger.debug(f'Calling {hook_name} hook function {function_name}')
+    logger.debug(f'{log_prefix}: Calling {hook_name} hook function {function_name}')
 
-    return getattr(module, function_name)(hook_config, config, *args, **kwargs)
+    return getattr(module, function_name)(hook_config, config, log_prefix, *args, **kwargs)
 
 
-def call_hooks(function_name, config, hook_type, *args, **kwargs):
+def call_hooks(function_name, config, log_prefix, hook_type, *args, **kwargs):
     '''
-    Given a configuration dict, call the requested function of the Python module corresponding to
-    each hook of the given hook type (either "data_source" or "monitoring"). Supply each call with
-    the configuration for that hook, and any given args and kwargs.
+    Given a configuration dict and a prefix to use in log entries, call the requested function of
+    the Python module corresponding to each hook of the given hook type (either "data_source" or
+    "monitoring"). Supply each call with the configuration for that hook, the log prefix, and any
+    given args and kwargs.
 
     Collect any return values into a dict from module name to return value. Note that the module
     name is the name of the hook module itself, which might be different from the hook configuration
@@ -77,7 +78,7 @@ def call_hooks(function_name, config, hook_type, *args, **kwargs):
     Raise anything else that a called function raises. An error stops calls to subsequent functions.
     '''
     return {
-        hook_name: call_hook(function_name, config, hook_name, *args, **kwargs)
+        hook_name: call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs)
         for hook_name in get_submodule_names(
             importlib.import_module(f'borgmatic.hooks.{hook_type.value}')
         )
@@ -85,18 +86,18 @@ def call_hooks(function_name, config, hook_type, *args, **kwargs):
     }
 
 
-def call_hooks_even_if_unconfigured(function_name, config, hook_type, *args, **kwargs):
+def call_hooks_even_if_unconfigured(function_name, config, log_prefix, hook_type, *args, **kwargs):
     '''
-    Given a configuration dict, call the requested function of the Python module corresponding to
-    each hook of the given hook type (either "data_source" or "monitoring"). Supply each call with
-    the configuration for that hook and any given args and kwargs. Collect any return values into a
-    dict from hook name to return value.
+    Given a configuration dict and a prefix to use in log entries, call the requested function of
+    the Python module corresponding to each hook of the given hook type (either "data_source" or
+    "monitoring"). Supply each call with the configuration for that hook, the log prefix, and any
+    given args and kwargs. Collect any return values into a dict from hook name to return value.
 
     Raise AttributeError if the function name is not found in the module.
     Raise anything else that a called function raises. An error stops calls to subsequent functions.
     '''
     return {
-        hook_name: call_hook(function_name, config, hook_name, *args, **kwargs)
+        hook_name: call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs)
         for hook_name in get_submodule_names(
             importlib.import_module(f'borgmatic.hooks.{hook_type.value}')
         )

+ 5 - 18
borgmatic/logger.py

@@ -88,8 +88,8 @@ class Multi_stream_handler(logging.Handler):
 
 
 class Console_no_color_formatter(logging.Formatter):
-    def __init__(self, *args, **kwargs):
-        super(Console_no_color_formatter, self).__init__('{prefix}{message}', style='{', defaults={'prefix': ''}, *args, **kwargs)
+    def format(self, record):  # pragma: no cover
+        return record.msg
 
 
 class Color(enum.Enum):
@@ -102,9 +102,6 @@ class Color(enum.Enum):
 
 
 class Console_color_formatter(logging.Formatter):
-    def __init__(self, *args, **kwargs):
-        super(Console_color_formatter, self).__init__('{prefix}{message}', style='{', defaults={'prefix': ''}, *args, **kwargs)
-
     def format(self, record):
         add_custom_log_levels()
 
@@ -121,7 +118,7 @@ class Console_color_formatter(logging.Formatter):
             .value
         )
 
-        return color_text(color, super(Console_color_formatter, self).format(record))
+        return color_text(color, record.msg)
 
 
 def ansi_escape_code(color):  # pragma: no cover
@@ -182,16 +179,6 @@ def add_custom_log_levels():  # pragma: no cover
     add_logging_level('DISABLED', DISABLED)
 
 
-def set_log_prefix(prefix):
-    '''
-    Given a prefix string, set it onto the formatter defaults for every logging handler so that it
-    shows up in every subsequent logging message. For this to work, this relies on each logging
-    formatter to be initialized with "{prefix}" somewhere in its logging format.
-    '''
-    for handler in logging.getLogger().handlers:
-        handler.formatter._style._defaults = {'prefix': f'{prefix}: ' if prefix else ''}
-
-
 def configure_logging(
     console_log_level,
     syslog_log_level=None,
@@ -255,7 +242,7 @@ def configure_logging(
         if syslog_path:
             syslog_handler = logging.handlers.SysLogHandler(address=syslog_path)
             syslog_handler.setFormatter(
-                logging.Formatter('borgmatic: {levelname} {prefix}{message}', style='{', defaults={'prefix': ''})  # noqa: FS003
+                logging.Formatter('borgmatic: {levelname} {message}', style='{')  # noqa: FS003
             )
             syslog_handler.setLevel(syslog_log_level)
             handlers.append(syslog_handler)
@@ -264,7 +251,7 @@ def configure_logging(
         file_handler = logging.handlers.WatchedFileHandler(log_file)
         file_handler.setFormatter(
             logging.Formatter(
-                log_file_format or '[{asctime}] {levelname}: {prefix}{message}', style='{', defaults={'prefix': ''}  # noqa: FS003
+                log_file_format or '[{asctime}] {levelname}: {message}', style='{'  # noqa: FS003
             )
         )
         file_handler.setLevel(log_file_log_level)

+ 1 - 1
pyproject.toml

@@ -1,6 +1,6 @@
 [project]
 name = "borgmatic"
-version = "1.9.9.dev0"
+version = "1.9.8"
 authors = [
   { name="Dan Helfman", email="witten@torsion.org" },
 ]

+ 8 - 0
tests/unit/actions/test_check.py

@@ -931,6 +931,7 @@ def test_compare_spot_check_hashes_returns_paths_having_failing_hashes():
         global_arguments=flexmock(),
         local_path=flexmock(),
         remote_path=flexmock(),
+        log_prefix='repo',
         source_paths=('/foo', '/bar', '/baz', '/quux'),
     ) == ('/bar',)
 
@@ -971,6 +972,7 @@ def test_compare_spot_check_hashes_returns_relative_paths_having_failing_hashes(
         global_arguments=flexmock(),
         local_path=flexmock(),
         remote_path=flexmock(),
+        log_prefix='repo',
         source_paths=('foo', 'bar', 'baz', 'quux'),
     ) == ('bar',)
 
@@ -1011,6 +1013,7 @@ def test_compare_spot_check_hashes_handles_data_sample_percentage_above_100():
         global_arguments=flexmock(),
         local_path=flexmock(),
         remote_path=flexmock(),
+        log_prefix='repo',
         source_paths=('/foo', '/bar'),
     ) == ('/foo', '/bar')
 
@@ -1048,6 +1051,7 @@ def test_compare_spot_check_hashes_uses_xxh64sum_command_option():
         global_arguments=flexmock(),
         local_path=flexmock(),
         remote_path=flexmock(),
+        log_prefix='repo',
         source_paths=('/foo', '/bar', '/baz', '/quux'),
     ) == ('/bar',)
 
@@ -1084,6 +1088,7 @@ def test_compare_spot_check_hashes_considers_path_missing_from_archive_as_not_ma
         global_arguments=flexmock(),
         local_path=flexmock(),
         remote_path=flexmock(),
+        log_prefix='repo',
         source_paths=('/foo', '/bar', '/baz', '/quux'),
     ) == ('/bar',)
 
@@ -1119,6 +1124,7 @@ def test_compare_spot_check_hashes_considers_non_existent_path_as_not_matching()
         global_arguments=flexmock(),
         local_path=flexmock(),
         remote_path=flexmock(),
+        log_prefix='repo',
         source_paths=('/foo', '/bar', '/baz', '/quux'),
     ) == ('/bar',)
 
@@ -1165,6 +1171,7 @@ def test_compare_spot_check_hashes_with_too_many_paths_feeds_them_to_commands_in
         global_arguments=flexmock(),
         local_path=flexmock(),
         remote_path=flexmock(),
+        log_prefix='repo',
         source_paths=('/foo', '/bar', '/baz', '/quux'),
     ) == ('/quux',)
 
@@ -1207,6 +1214,7 @@ def test_compare_spot_check_hashes_uses_working_directory_to_access_source_paths
         global_arguments=flexmock(),
         local_path=flexmock(),
         remote_path=flexmock(),
+        log_prefix='repo',
         source_paths=('foo', 'bar', 'baz', 'quux'),
     ) == ('bar',)
 

+ 16 - 0
tests/unit/actions/test_restore.py

@@ -194,6 +194,7 @@ def test_get_configured_data_source_matches_data_source_with_restore_dump():
             'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}],
         },
         restore_dump=module.Dump('postgresql_databases', 'bar'),
+        log_prefix='test',
     ) == {'name': 'bar'}
 
 
@@ -205,6 +206,7 @@ def test_get_configured_data_source_matches_nothing_when_nothing_configured():
         module.get_configured_data_source(
             config={},
             restore_dump=module.Dump('postgresql_databases', 'quux'),
+            log_prefix='test',
         )
         is None
     )
@@ -220,6 +222,7 @@ def test_get_configured_data_source_matches_nothing_when_restore_dump_does_not_m
                 'postgresql_databases': [{'name': 'foo'}],
             },
             restore_dump=module.Dump('postgresql_databases', 'quux'),
+            log_prefix='test',
         )
         is None
     )
@@ -247,6 +250,7 @@ def test_get_configured_data_source_with_multiple_matching_data_sources_errors()
                 ],
             },
             restore_dump=module.Dump('postgresql_databases', 'bar'),
+            log_prefix='test',
         )
 
 
@@ -287,6 +291,7 @@ def test_restore_single_dump_extracts_and_restores_single_file_dump():
     flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hook').with_args(
         function_name='restore_data_source_dump',
         config=object,
+        log_prefix=object,
         hook_name=object,
         data_source=object,
         dry_run=object,
@@ -329,6 +334,7 @@ def test_restore_single_dump_extracts_and_restores_directory_dump():
     flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hook').with_args(
         function_name='restore_data_source_dump',
         config=object,
+        log_prefix=object,
         hook_name=object,
         data_source=object,
         dry_run=object,
@@ -371,6 +377,7 @@ def test_restore_single_dump_with_directory_dump_error_cleans_up_temporary_direc
     flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hook').with_args(
         function_name='restore_data_source_dump',
         config=object,
+        log_prefix=object,
         hook_name=object,
         data_source=object,
         dry_run=object,
@@ -412,6 +419,7 @@ def test_restore_single_dump_with_directory_dump_and_dry_run_skips_directory_mov
     flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hook').with_args(
         function_name='restore_data_source_dump',
         config=object,
+        log_prefix=object,
         hook_name=object,
         data_source=object,
         dry_run=object,
@@ -1056,14 +1064,17 @@ def test_run_restore_restores_data_source_configured_with_all_name():
     flexmock(module).should_receive('get_configured_data_source').with_args(
         config=object,
         restore_dump=module.Dump(hook_name='postgresql_databases', data_source_name='foo'),
+        log_prefix=object,
     ).and_return({'name': 'foo'})
     flexmock(module).should_receive('get_configured_data_source').with_args(
         config=object,
         restore_dump=module.Dump(hook_name='postgresql_databases', data_source_name='bar'),
+        log_prefix=object,
     ).and_return(None)
     flexmock(module).should_receive('get_configured_data_source').with_args(
         config=object,
         restore_dump=module.Dump(hook_name='postgresql_databases', data_source_name='all'),
+        log_prefix=object,
     ).and_return({'name': 'bar'})
     flexmock(module).should_receive('restore_single_dump').with_args(
         repository=object,
@@ -1137,14 +1148,17 @@ def test_run_restore_skips_missing_data_source():
     flexmock(module).should_receive('get_configured_data_source').with_args(
         config=object,
         restore_dump=module.Dump(hook_name='postgresql_databases', data_source_name='foo'),
+        log_prefix=object,
     ).and_return({'name': 'foo'})
     flexmock(module).should_receive('get_configured_data_source').with_args(
         config=object,
         restore_dump=module.Dump(hook_name='postgresql_databases', data_source_name='bar'),
+        log_prefix=object,
     ).and_return(None)
     flexmock(module).should_receive('get_configured_data_source').with_args(
         config=object,
         restore_dump=module.Dump(hook_name='postgresql_databases', data_source_name='all'),
+        log_prefix=object,
     ).and_return(None)
     flexmock(module).should_receive('restore_single_dump').with_args(
         repository=object,
@@ -1218,10 +1232,12 @@ def test_run_restore_restores_data_sources_from_different_hooks():
     flexmock(module).should_receive('get_configured_data_source').with_args(
         config=object,
         restore_dump=module.Dump(hook_name='postgresql_databases', data_source_name='foo'),
+        log_prefix=object,
     ).and_return({'name': 'foo'})
     flexmock(module).should_receive('get_configured_data_source').with_args(
         config=object,
         restore_dump=module.Dump(hook_name='mysql_databases', data_source_name='foo'),
+        log_prefix=object,
     ).and_return({'name': 'bar'})
     flexmock(module).should_receive('restore_single_dump').with_args(
         repository=object,

+ 3 - 1
tests/unit/borg/test_create.py

@@ -17,12 +17,13 @@ def test_write_patterns_file_writes_pattern_lines():
     module.write_patterns_file(
         [Pattern('/foo'), Pattern('/foo/bar', Pattern_type.INCLUDE, Pattern_style.SHELL)],
         borgmatic_runtime_directory='/run/user/0',
+        log_prefix='test.yaml',
     )
 
 
 def test_write_patterns_file_with_empty_exclude_patterns_does_not_raise():
     module.write_patterns_file(
-        [], borgmatic_runtime_directory='/run/user/0'
+        [], borgmatic_runtime_directory='/run/user/0', log_prefix='test.yaml'
     )
 
 
@@ -35,6 +36,7 @@ def test_write_patterns_file_appends_to_existing():
     module.write_patterns_file(
         [Pattern('/foo'), Pattern('/foo/bar', Pattern_type.INCLUDE)],
         borgmatic_runtime_directory='/run/user/0',
+        log_prefix='test.yaml',
         patterns_file=patterns_file,
     )
 

+ 7 - 0
tests/unit/hooks/data_source/test_bootstrap.py

@@ -24,6 +24,7 @@ def test_dump_data_sources_creates_manifest_file():
     module.dump_data_sources(
         hook_config=None,
         config={},
+        log_prefix='test',
         config_paths=('test.yaml',),
         borgmatic_runtime_directory='/run/borgmatic',
         patterns=[],
@@ -39,6 +40,7 @@ def test_dump_data_sources_with_store_config_files_false_does_not_create_manifes
     module.dump_data_sources(
         hook_config=hook_config,
         config={'bootstrap': hook_config},
+        log_prefix='test',
         config_paths=('test.yaml',),
         borgmatic_runtime_directory='/run/borgmatic',
         patterns=[],
@@ -53,6 +55,7 @@ def test_dump_data_sources_with_dry_run_does_not_create_manifest_file():
     module.dump_data_sources(
         hook_config=None,
         config={},
+        log_prefix='test',
         config_paths=('test.yaml',),
         borgmatic_runtime_directory='/run/borgmatic',
         patterns=[],
@@ -73,6 +76,7 @@ def test_remove_data_source_dumps_deletes_manifest_and_parent_directory():
     module.remove_data_source_dumps(
         hook_config=None,
         config={},
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -89,6 +93,7 @@ def test_remove_data_source_dumps_with_dry_run_bails():
     module.remove_data_source_dumps(
         hook_config=None,
         config={},
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=True,
     )
@@ -107,6 +112,7 @@ def test_remove_data_source_dumps_swallows_manifest_file_not_found_error():
     module.remove_data_source_dumps(
         hook_config=None,
         config={},
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -127,6 +133,7 @@ def test_remove_data_source_dumps_swallows_manifest_parent_directory_not_found_e
     module.remove_data_source_dumps(
         hook_config=None,
         config={},
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )

+ 15 - 0
tests/unit/hooks/data_source/test_btrfs.py

@@ -242,6 +242,7 @@ def test_dump_data_sources_snapshots_each_subvolume_and_updates_patterns():
         module.dump_data_sources(
             hook_config=config['btrfs'],
             config=config,
+            log_prefix='test',
             config_paths=('test.yaml',),
             borgmatic_runtime_directory='/run/borgmatic',
             patterns=patterns,
@@ -299,6 +300,7 @@ def test_dump_data_sources_uses_custom_btrfs_command_in_commands():
         module.dump_data_sources(
             hook_config=config['btrfs'],
             config=config,
+            log_prefix='test',
             config_paths=('test.yaml',),
             borgmatic_runtime_directory='/run/borgmatic',
             patterns=patterns,
@@ -354,6 +356,7 @@ def test_dump_data_sources_uses_custom_findmnt_command_in_commands():
         module.dump_data_sources(
             hook_config=config['btrfs'],
             config=config,
+            log_prefix='test',
             config_paths=('test.yaml',),
             borgmatic_runtime_directory='/run/borgmatic',
             patterns=patterns,
@@ -394,6 +397,7 @@ def test_dump_data_sources_with_dry_run_skips_snapshot_and_patterns_update():
         module.dump_data_sources(
             hook_config=config['btrfs'],
             config=config,
+            log_prefix='test',
             config_paths=('test.yaml',),
             borgmatic_runtime_directory='/run/borgmatic',
             patterns=patterns,
@@ -418,6 +422,7 @@ def test_dump_data_sources_without_matching_subvolumes_skips_snapshot_and_patter
         module.dump_data_sources(
             hook_config=config['btrfs'],
             config=config,
+            log_prefix='test',
             config_paths=('test.yaml',),
             borgmatic_runtime_directory='/run/borgmatic',
             patterns=patterns,
@@ -480,6 +485,7 @@ def test_dump_data_sources_snapshots_adds_to_existing_exclude_patterns():
         module.dump_data_sources(
             hook_config=config['btrfs'],
             config=config,
+            log_prefix='test',
             config_paths=('test.yaml',),
             borgmatic_runtime_directory='/run/borgmatic',
             patterns=patterns,
@@ -589,6 +595,7 @@ def test_remove_data_source_dumps_deletes_snapshots():
     module.remove_data_source_dumps(
         hook_config=config['btrfs'],
         config=config,
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -606,6 +613,7 @@ def test_remove_data_source_dumps_without_hook_configuration_bails():
     module.remove_data_source_dumps(
         hook_config=None,
         config={'source_directories': '/mnt/subvolume'},
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -624,6 +632,7 @@ def test_remove_data_source_dumps_with_get_subvolumes_file_not_found_error_bails
     module.remove_data_source_dumps(
         hook_config=config['btrfs'],
         config=config,
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -644,6 +653,7 @@ def test_remove_data_source_dumps_with_get_subvolumes_called_process_error_bails
     module.remove_data_source_dumps(
         hook_config=config['btrfs'],
         config=config,
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -707,6 +717,7 @@ def test_remove_data_source_dumps_with_dry_run_skips_deletes():
     module.remove_data_source_dumps(
         hook_config=config['btrfs'],
         config=config,
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=True,
     )
@@ -725,6 +736,7 @@ def test_remove_data_source_dumps_without_subvolumes_skips_deletes():
     module.remove_data_source_dumps(
         hook_config=config['btrfs'],
         config=config,
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -768,6 +780,7 @@ def test_remove_data_source_without_snapshots_skips_deletes():
     module.remove_data_source_dumps(
         hook_config=config['btrfs'],
         config=config,
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -831,6 +844,7 @@ def test_remove_data_source_dumps_with_delete_snapshot_file_not_found_error_bail
     module.remove_data_source_dumps(
         hook_config=config['btrfs'],
         config=config,
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -896,6 +910,7 @@ def test_remove_data_source_dumps_with_delete_snapshot_called_process_error_bail
     module.remove_data_source_dumps(
         hook_config=config['btrfs'],
         config=config,
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )

+ 19 - 0
tests/unit/hooks/data_source/test_lvm.py

@@ -220,6 +220,7 @@ def test_dump_data_sources_snapshots_and_mounts_and_updates_patterns():
         module.dump_data_sources(
             hook_config=config['lvm'],
             config=config,
+            log_prefix='test',
             config_paths=('test.yaml',),
             borgmatic_runtime_directory='/run/borgmatic',
             patterns=patterns,
@@ -245,6 +246,7 @@ def test_dump_data_sources_with_no_logical_volumes_skips_snapshots():
         module.dump_data_sources(
             hook_config=config['lvm'],
             config=config,
+            log_prefix='test',
             config_paths=('test.yaml',),
             borgmatic_runtime_directory='/run/borgmatic',
             patterns=patterns,
@@ -315,6 +317,7 @@ def test_dump_data_sources_uses_snapshot_size_for_snapshot():
         module.dump_data_sources(
             hook_config=config['lvm'],
             config=config,
+            log_prefix='test',
             config_paths=('test.yaml',),
             borgmatic_runtime_directory='/run/borgmatic',
             patterns=patterns,
@@ -395,6 +398,7 @@ def test_dump_data_sources_uses_custom_commands():
         module.dump_data_sources(
             hook_config=config['lvm'],
             config=config,
+            log_prefix='test',
             config_paths=('test.yaml',),
             borgmatic_runtime_directory='/run/borgmatic',
             patterns=patterns,
@@ -446,6 +450,7 @@ def test_dump_data_sources_with_dry_run_skips_snapshots_and_does_not_touch_patte
         module.dump_data_sources(
             hook_config=config['lvm'],
             config=config,
+            log_prefix='test',
             config_paths=('test.yaml',),
             borgmatic_runtime_directory='/run/borgmatic',
             patterns=patterns,
@@ -513,6 +518,7 @@ def test_dump_data_sources_ignores_mismatch_between_given_patterns_and_contained
         module.dump_data_sources(
             hook_config=config['lvm'],
             config=config,
+            log_prefix='test',
             config_paths=('test.yaml',),
             borgmatic_runtime_directory='/run/borgmatic',
             patterns=patterns,
@@ -566,6 +572,7 @@ def test_dump_data_sources_with_missing_snapshot_errors():
         module.dump_data_sources(
             hook_config=config['lvm'],
             config=config,
+            log_prefix='test',
             config_paths=('test.yaml',),
             borgmatic_runtime_directory='/run/borgmatic',
             patterns=patterns,
@@ -721,6 +728,7 @@ def test_remove_data_source_dumps_unmounts_and_remove_snapshots():
     module.remove_data_source_dumps(
         hook_config=config['lvm'],
         config=config,
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -737,6 +745,7 @@ def test_remove_data_source_dumps_bails_for_missing_lvm_configuration():
     module.remove_data_source_dumps(
         hook_config=None,
         config={'source_directories': '/mnt/lvolume'},
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -754,6 +763,7 @@ def test_remove_data_source_dumps_bails_for_missing_lsblk_command():
     module.remove_data_source_dumps(
         hook_config=config['lvm'],
         config=config,
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -773,6 +783,7 @@ def test_remove_data_source_dumps_bails_for_lsblk_command_error():
     module.remove_data_source_dumps(
         hook_config=config['lvm'],
         config=config,
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -817,6 +828,7 @@ def test_remove_data_source_dumps_with_missing_snapshot_directory_skips_unmount(
     module.remove_data_source_dumps(
         hook_config=config['lvm'],
         config=config,
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -874,6 +886,7 @@ def test_remove_data_source_dumps_with_missing_snapshot_mount_path_skips_unmount
     module.remove_data_source_dumps(
         hook_config=config['lvm'],
         config=config,
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -931,6 +944,7 @@ def test_remove_data_source_dumps_with_successful_mount_point_removal_skips_unmo
     module.remove_data_source_dumps(
         hook_config=config['lvm'],
         config=config,
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -974,6 +988,7 @@ def test_remove_data_source_dumps_bails_for_missing_umount_command():
     module.remove_data_source_dumps(
         hook_config=config['lvm'],
         config=config,
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -1017,6 +1032,7 @@ def test_remove_data_source_dumps_bails_for_umount_command_error():
     module.remove_data_source_dumps(
         hook_config=config['lvm'],
         config=config,
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -1060,6 +1076,7 @@ def test_remove_data_source_dumps_bails_for_missing_lvs_command():
     module.remove_data_source_dumps(
         hook_config=config['lvm'],
         config=config,
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -1105,6 +1122,7 @@ def test_remove_data_source_dumps_bails_for_lvs_command_error():
     module.remove_data_source_dumps(
         hook_config=config['lvm'],
         config=config,
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -1147,6 +1165,7 @@ def test_remove_data_source_with_dry_run_skips_snapshot_unmount_and_delete():
     module.remove_data_source_dumps(
         hook_config=config['lvm'],
         config=config,
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=True,
     )

+ 20 - 5
tests/unit/hooks/data_source/test_mariadb.py

@@ -8,9 +8,10 @@ from borgmatic.hooks.data_source import mariadb as module
 
 def test_database_names_to_dump_passes_through_name():
     extra_environment = flexmock()
+    log_prefix = ''
 
     names = module.database_names_to_dump(
-        {'name': 'foo'}, extra_environment, dry_run=False
+        {'name': 'foo'}, extra_environment, log_prefix, dry_run=False
     )
 
     assert names == ('foo',)
@@ -18,10 +19,11 @@ def test_database_names_to_dump_passes_through_name():
 
 def test_database_names_to_dump_bails_for_dry_run():
     extra_environment = flexmock()
+    log_prefix = ''
     flexmock(module).should_receive('execute_command_and_capture_output').never()
 
     names = module.database_names_to_dump(
-        {'name': 'all'}, extra_environment, dry_run=True
+        {'name': 'all'}, extra_environment, log_prefix, dry_run=True
     )
 
     assert names == ()
@@ -29,13 +31,14 @@ def test_database_names_to_dump_bails_for_dry_run():
 
 def test_database_names_to_dump_queries_mariadb_for_database_names():
     extra_environment = flexmock()
+    log_prefix = ''
     flexmock(module).should_receive('execute_command_and_capture_output').with_args(
         ('mariadb', '--skip-column-names', '--batch', '--execute', 'show schemas'),
         extra_environment=extra_environment,
     ).and_return('foo\nbar\nmysql\n').once()
 
     names = module.database_names_to_dump(
-        {'name': 'all'}, extra_environment, dry_run=False
+        {'name': 'all'}, extra_environment, log_prefix, dry_run=False
     )
 
     assert names == ('foo', 'bar')
@@ -43,12 +46,12 @@ def test_database_names_to_dump_queries_mariadb_for_database_names():
 
 def test_use_streaming_true_for_any_databases():
     assert module.use_streaming(
-        databases=[flexmock(), flexmock()], config=flexmock(),
+        databases=[flexmock(), flexmock()], config=flexmock(), log_prefix=flexmock()
     )
 
 
 def test_use_streaming_false_for_no_databases():
-    assert not module.use_streaming(databases=[], config=flexmock())
+    assert not module.use_streaming(databases=[], config=flexmock(), log_prefix=flexmock())
 
 
 def test_dump_data_sources_dumps_each_database():
@@ -62,6 +65,7 @@ def test_dump_data_sources_dumps_each_database():
     for name, process in zip(('foo', 'bar'), processes):
         flexmock(module).should_receive('execute_dump_command').with_args(
             database={'name': name},
+            log_prefix=object,
             dump_path=object,
             database_names=(name,),
             extra_environment=object,
@@ -93,6 +97,7 @@ def test_dump_data_sources_dumps_with_password():
 
     flexmock(module).should_receive('execute_dump_command').with_args(
         database=database,
+        log_prefix=object,
         dump_path=object,
         database_names=('foo',),
         extra_environment={'MYSQL_PWD': 'trustsome1'},
@@ -118,6 +123,7 @@ def test_dump_data_sources_dumps_all_databases_at_once():
     flexmock(module).should_receive('database_names_to_dump').and_return(('foo', 'bar'))
     flexmock(module).should_receive('execute_dump_command').with_args(
         database={'name': 'all'},
+        log_prefix=object,
         dump_path=object,
         database_names=('foo', 'bar'),
         extra_environment=object,
@@ -145,6 +151,7 @@ def test_dump_data_sources_dumps_all_databases_separately_when_format_configured
     for name, process in zip(('foo', 'bar'), processes):
         flexmock(module).should_receive('execute_dump_command').with_args(
             database={'name': name, 'format': 'sql'},
+            log_prefix=object,
             dump_path=object,
             database_names=(name,),
             extra_environment=object,
@@ -226,6 +233,7 @@ def test_execute_dump_command_runs_mariadb_dump():
     assert (
         module.execute_dump_command(
             database={'name': 'foo'},
+            log_prefix='log',
             dump_path=flexmock(),
             database_names=('foo',),
             extra_environment=None,
@@ -257,6 +265,7 @@ def test_execute_dump_command_runs_mariadb_dump_without_add_drop_database():
     assert (
         module.execute_dump_command(
             database={'name': 'foo', 'add_drop_database': False},
+            log_prefix='log',
             dump_path=flexmock(),
             database_names=('foo',),
             extra_environment=None,
@@ -295,6 +304,7 @@ def test_execute_dump_command_runs_mariadb_dump_with_hostname_and_port():
     assert (
         module.execute_dump_command(
             database={'name': 'foo', 'hostname': 'database.example.org', 'port': 5433},
+            log_prefix='log',
             dump_path=flexmock(),
             database_names=('foo',),
             extra_environment=None,
@@ -329,6 +339,7 @@ def test_execute_dump_command_runs_mariadb_dump_with_username_and_password():
     assert (
         module.execute_dump_command(
             database={'name': 'foo', 'username': 'root', 'password': 'trustsome1'},
+            log_prefix='log',
             dump_path=flexmock(),
             database_names=('foo',),
             extra_environment={'MYSQL_PWD': 'trustsome1'},
@@ -362,6 +373,7 @@ def test_execute_dump_command_runs_mariadb_dump_with_options():
     assert (
         module.execute_dump_command(
             database={'name': 'foo', 'options': '--stuff=such'},
+            log_prefix='log',
             dump_path=flexmock(),
             database_names=('foo',),
             extra_environment=None,
@@ -399,6 +411,7 @@ def test_execute_dump_command_runs_non_default_mariadb_dump_with_options():
                 'mariadb_dump_command': 'custom_mariadb_dump',
                 'options': '--stuff=such',
             },  # Custom MariaDB dump command specified
+            log_prefix='log',
             dump_path=flexmock(),
             database_names=('foo',),
             extra_environment=None,
@@ -418,6 +431,7 @@ def test_execute_dump_command_with_duplicate_dump_skips_mariadb_dump():
     assert (
         module.execute_dump_command(
             database={'name': 'foo'},
+            log_prefix='log',
             dump_path=flexmock(),
             database_names=('foo',),
             extra_environment=None,
@@ -438,6 +452,7 @@ def test_execute_dump_command_with_dry_run_skips_mariadb_dump():
     assert (
         module.execute_dump_command(
             database={'name': 'foo'},
+            log_prefix='log',
             dump_path=flexmock(),
             database_names=('foo',),
             extra_environment=None,

+ 3 - 1
tests/unit/hooks/data_source/test_mongodb.py

@@ -9,6 +9,7 @@ def test_use_streaming_true_for_any_non_directory_format_databases():
     assert module.use_streaming(
         databases=[{'format': 'stuff'}, {'format': 'directory'}, {}],
         config=flexmock(),
+        log_prefix=flexmock(),
     )
 
 
@@ -16,11 +17,12 @@ def test_use_streaming_false_for_all_directory_format_databases():
     assert not module.use_streaming(
         databases=[{'format': 'directory'}, {'format': 'directory'}],
         config=flexmock(),
+        log_prefix=flexmock(),
     )
 
 
 def test_use_streaming_false_for_no_databases():
-    assert not module.use_streaming(databases=[], config=flexmock())
+    assert not module.use_streaming(databases=[], config=flexmock(), log_prefix=flexmock())
 
 
 def test_dump_data_sources_runs_mongodump_for_each_database():

+ 20 - 5
tests/unit/hooks/data_source/test_mysql.py

@@ -8,9 +8,10 @@ from borgmatic.hooks.data_source import mysql as module
 
 def test_database_names_to_dump_passes_through_name():
     extra_environment = flexmock()
+    log_prefix = ''
 
     names = module.database_names_to_dump(
-        {'name': 'foo'}, extra_environment, dry_run=False
+        {'name': 'foo'}, extra_environment, log_prefix, dry_run=False
     )
 
     assert names == ('foo',)
@@ -18,10 +19,11 @@ def test_database_names_to_dump_passes_through_name():
 
 def test_database_names_to_dump_bails_for_dry_run():
     extra_environment = flexmock()
+    log_prefix = ''
     flexmock(module).should_receive('execute_command_and_capture_output').never()
 
     names = module.database_names_to_dump(
-        {'name': 'all'}, extra_environment, dry_run=True
+        {'name': 'all'}, extra_environment, log_prefix, dry_run=True
     )
 
     assert names == ()
@@ -29,13 +31,14 @@ def test_database_names_to_dump_bails_for_dry_run():
 
 def test_database_names_to_dump_queries_mysql_for_database_names():
     extra_environment = flexmock()
+    log_prefix = ''
     flexmock(module).should_receive('execute_command_and_capture_output').with_args(
         ('mysql', '--skip-column-names', '--batch', '--execute', 'show schemas'),
         extra_environment=extra_environment,
     ).and_return('foo\nbar\nmysql\n').once()
 
     names = module.database_names_to_dump(
-        {'name': 'all'}, extra_environment, dry_run=False
+        {'name': 'all'}, extra_environment, log_prefix, dry_run=False
     )
 
     assert names == ('foo', 'bar')
@@ -43,12 +46,12 @@ def test_database_names_to_dump_queries_mysql_for_database_names():
 
 def test_use_streaming_true_for_any_databases():
     assert module.use_streaming(
-        databases=[flexmock(), flexmock()], config=flexmock(),
+        databases=[flexmock(), flexmock()], config=flexmock(), log_prefix=flexmock()
     )
 
 
 def test_use_streaming_false_for_no_databases():
-    assert not module.use_streaming(databases=[], config=flexmock())
+    assert not module.use_streaming(databases=[], config=flexmock(), log_prefix=flexmock())
 
 
 def test_dump_data_sources_dumps_each_database():
@@ -62,6 +65,7 @@ def test_dump_data_sources_dumps_each_database():
     for name, process in zip(('foo', 'bar'), processes):
         flexmock(module).should_receive('execute_dump_command').with_args(
             database={'name': name},
+            log_prefix=object,
             dump_path=object,
             database_names=(name,),
             extra_environment=object,
@@ -93,6 +97,7 @@ def test_dump_data_sources_dumps_with_password():
 
     flexmock(module).should_receive('execute_dump_command').with_args(
         database=database,
+        log_prefix=object,
         dump_path=object,
         database_names=('foo',),
         extra_environment={'MYSQL_PWD': 'trustsome1'},
@@ -118,6 +123,7 @@ def test_dump_data_sources_dumps_all_databases_at_once():
     flexmock(module).should_receive('database_names_to_dump').and_return(('foo', 'bar'))
     flexmock(module).should_receive('execute_dump_command').with_args(
         database={'name': 'all'},
+        log_prefix=object,
         dump_path=object,
         database_names=('foo', 'bar'),
         extra_environment=object,
@@ -145,6 +151,7 @@ def test_dump_data_sources_dumps_all_databases_separately_when_format_configured
     for name, process in zip(('foo', 'bar'), processes):
         flexmock(module).should_receive('execute_dump_command').with_args(
             database={'name': name, 'format': 'sql'},
+            log_prefix=object,
             dump_path=object,
             database_names=(name,),
             extra_environment=object,
@@ -226,6 +233,7 @@ def test_execute_dump_command_runs_mysqldump():
     assert (
         module.execute_dump_command(
             database={'name': 'foo'},
+            log_prefix='log',
             dump_path=flexmock(),
             database_names=('foo',),
             extra_environment=None,
@@ -257,6 +265,7 @@ def test_execute_dump_command_runs_mysqldump_without_add_drop_database():
     assert (
         module.execute_dump_command(
             database={'name': 'foo', 'add_drop_database': False},
+            log_prefix='log',
             dump_path=flexmock(),
             database_names=('foo',),
             extra_environment=None,
@@ -295,6 +304,7 @@ def test_execute_dump_command_runs_mysqldump_with_hostname_and_port():
     assert (
         module.execute_dump_command(
             database={'name': 'foo', 'hostname': 'database.example.org', 'port': 5433},
+            log_prefix='log',
             dump_path=flexmock(),
             database_names=('foo',),
             extra_environment=None,
@@ -329,6 +339,7 @@ def test_execute_dump_command_runs_mysqldump_with_username_and_password():
     assert (
         module.execute_dump_command(
             database={'name': 'foo', 'username': 'root', 'password': 'trustsome1'},
+            log_prefix='log',
             dump_path=flexmock(),
             database_names=('foo',),
             extra_environment={'MYSQL_PWD': 'trustsome1'},
@@ -362,6 +373,7 @@ def test_execute_dump_command_runs_mysqldump_with_options():
     assert (
         module.execute_dump_command(
             database={'name': 'foo', 'options': '--stuff=such'},
+            log_prefix='log',
             dump_path=flexmock(),
             database_names=('foo',),
             extra_environment=None,
@@ -397,6 +409,7 @@ def test_execute_dump_command_runs_non_default_mysqldump():
                 'name': 'foo',
                 'mysql_dump_command': 'custom_mysqldump',
             },  # Custom MySQL dump command specified
+            log_prefix='log',
             dump_path=flexmock(),
             database_names=('foo',),
             extra_environment=None,
@@ -416,6 +429,7 @@ def test_execute_dump_command_with_duplicate_dump_skips_mysqldump():
     assert (
         module.execute_dump_command(
             database={'name': 'foo'},
+            log_prefix='log',
             dump_path=flexmock(),
             database_names=('foo',),
             extra_environment=None,
@@ -436,6 +450,7 @@ def test_execute_dump_command_with_dry_run_skips_mysqldump():
     assert (
         module.execute_dump_command(
             database={'name': 'foo'},
+            log_prefix='log',
             dump_path=flexmock(),
             database_names=('foo',),
             extra_environment=None,

+ 3 - 1
tests/unit/hooks/data_source/test_postgresql.py

@@ -203,6 +203,7 @@ def test_use_streaming_true_for_any_non_directory_format_databases():
     assert module.use_streaming(
         databases=[{'format': 'stuff'}, {'format': 'directory'}, {}],
         config=flexmock(),
+        log_prefix=flexmock(),
     )
 
 
@@ -210,11 +211,12 @@ def test_use_streaming_false_for_all_directory_format_databases():
     assert not module.use_streaming(
         databases=[{'format': 'directory'}, {'format': 'directory'}],
         config=flexmock(),
+        log_prefix=flexmock(),
     )
 
 
 def test_use_streaming_false_for_no_databases():
-    assert not module.use_streaming(databases=[], config=flexmock())
+    assert not module.use_streaming(databases=[], config=flexmock(), log_prefix=flexmock())
 
 
 def test_dump_data_sources_runs_pg_dump_for_each_database():

+ 2 - 2
tests/unit/hooks/data_source/test_sqlite.py

@@ -7,12 +7,12 @@ from borgmatic.hooks.data_source import sqlite as module
 
 def test_use_streaming_true_for_any_databases():
     assert module.use_streaming(
-        databases=[flexmock(), flexmock()], config=flexmock(),
+        databases=[flexmock(), flexmock()], config=flexmock(), log_prefix=flexmock()
     )
 
 
 def test_use_streaming_false_for_no_databases():
-    assert not module.use_streaming(databases=[], config=flexmock())
+    assert not module.use_streaming(databases=[], config=flexmock(), log_prefix=flexmock())
 
 
 def test_dump_data_sources_logs_and_skips_if_dump_already_exists():

+ 16 - 0
tests/unit/hooks/data_source/test_zfs.py

@@ -154,6 +154,7 @@ def test_dump_data_sources_snapshots_and_mounts_and_updates_patterns():
         module.dump_data_sources(
             hook_config={},
             config={'source_directories': '/mnt/dataset', 'zfs': {}},
+            log_prefix='test',
             config_paths=('test.yaml',),
             borgmatic_runtime_directory='/run/borgmatic',
             patterns=patterns,
@@ -176,6 +177,7 @@ def test_dump_data_sources_with_no_datasets_skips_snapshots():
         module.dump_data_sources(
             hook_config={},
             config={'patterns': flexmock(), 'zfs': {}},
+            log_prefix='test',
             config_paths=('test.yaml',),
             borgmatic_runtime_directory='/run/borgmatic',
             patterns=patterns,
@@ -225,6 +227,7 @@ def test_dump_data_sources_uses_custom_commands():
                 'patterns': flexmock(),
                 'zfs': hook_config,
             },
+            log_prefix='test',
             config_paths=('test.yaml',),
             borgmatic_runtime_directory='/run/borgmatic',
             patterns=patterns,
@@ -249,6 +252,7 @@ def test_dump_data_sources_with_dry_run_skips_commands_and_does_not_touch_patter
         module.dump_data_sources(
             hook_config={},
             config={'patterns': ('R /mnt/dataset',), 'zfs': {}},
+            log_prefix='test',
             config_paths=('test.yaml',),
             borgmatic_runtime_directory='/run/borgmatic',
             patterns=patterns,
@@ -291,6 +295,7 @@ def test_dump_data_sources_ignores_mismatch_between_given_patterns_and_contained
         module.dump_data_sources(
             hook_config={},
             config={'patterns': ('R /mnt/dataset',), 'zfs': {}},
+            log_prefix='test',
             config_paths=('test.yaml',),
             borgmatic_runtime_directory='/run/borgmatic',
             patterns=patterns,
@@ -333,6 +338,7 @@ def test_remove_data_source_dumps_unmounts_and_destroys_snapshots():
     module.remove_data_source_dumps(
         hook_config={},
         config={'source_directories': '/mnt/dataset', 'zfs': {}},
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -360,6 +366,7 @@ def test_remove_data_source_dumps_use_custom_commands():
     module.remove_data_source_dumps(
         hook_config=hook_config,
         config={'source_directories': '/mnt/dataset', 'zfs': hook_config},
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -374,6 +381,7 @@ def test_remove_data_source_dumps_bails_for_missing_hook_configuration():
     module.remove_data_source_dumps(
         hook_config=None,
         config={'source_directories': '/mnt/dataset'},
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -389,6 +397,7 @@ def test_remove_data_source_dumps_bails_for_missing_zfs_command():
     module.remove_data_source_dumps(
         hook_config=hook_config,
         config={'source_directories': '/mnt/dataset', 'zfs': hook_config},
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -406,6 +415,7 @@ def test_remove_data_source_dumps_bails_for_zfs_command_error():
     module.remove_data_source_dumps(
         hook_config=hook_config,
         config={'source_directories': '/mnt/dataset', 'zfs': hook_config},
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -429,6 +439,7 @@ def test_remove_data_source_dumps_bails_for_missing_umount_command():
     module.remove_data_source_dumps(
         hook_config=hook_config,
         config={'source_directories': '/mnt/dataset', 'zfs': hook_config},
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -452,6 +463,7 @@ def test_remove_data_source_dumps_bails_for_umount_command_error():
     module.remove_data_source_dumps(
         hook_config=hook_config,
         config={'source_directories': '/mnt/dataset', 'zfs': hook_config},
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -476,6 +488,7 @@ def test_remove_data_source_dumps_skips_unmount_snapshot_directories_that_are_no
     module.remove_data_source_dumps(
         hook_config={},
         config={'source_directories': '/mnt/dataset', 'zfs': {}},
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -505,6 +518,7 @@ def test_remove_data_source_dumps_skips_unmount_snapshot_mount_paths_that_are_no
     module.remove_data_source_dumps(
         hook_config={},
         config={'source_directories': '/mnt/dataset', 'zfs': {}},
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -534,6 +548,7 @@ def test_remove_data_source_dumps_skips_unmount_snapshot_mount_paths_after_rmtre
     module.remove_data_source_dumps(
         hook_config={},
         config={'source_directories': '/mnt/dataset', 'zfs': {}},
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=False,
     )
@@ -556,6 +571,7 @@ def test_remove_data_source_dumps_with_dry_run_skips_unmount_and_destroy():
     module.remove_data_source_dumps(
         hook_config={},
         config={'source_directories': '/mnt/dataset', 'zfs': {}},
+        log_prefix='test',
         borgmatic_runtime_directory='/run/borgmatic',
         dry_run=True,
     )

+ 12 - 12
tests/unit/hooks/test_command.py

@@ -7,13 +7,13 @@ from borgmatic.hooks import command as module
 
 
 def test_interpolate_context_passes_through_command_without_variable():
-    assert module.interpolate_context('pre-backup', 'ls', {'foo': 'bar'}) == 'ls'
+    assert module.interpolate_context('test.yaml', 'pre-backup', 'ls', {'foo': 'bar'}) == 'ls'
 
 
 def test_interpolate_context_passes_through_command_with_unknown_variable():
     command = 'ls {baz}'  # noqa: FS003
 
-    assert module.interpolate_context('pre-backup', command, {'foo': 'bar'}) == command
+    assert module.interpolate_context('test.yaml', 'pre-backup', command, {'foo': 'bar'}) == command
 
 
 def test_interpolate_context_interpolates_variables():
@@ -21,7 +21,7 @@ def test_interpolate_context_interpolates_variables():
     context = {'foo': 'bar', 'baz': 'quux'}
 
     assert (
-        module.interpolate_context('pre-backup', command, context) == 'ls barquux quux'
+        module.interpolate_context('test.yaml', 'pre-backup', command, context) == 'ls barquux quux'
     )
 
 
@@ -30,7 +30,7 @@ def test_interpolate_context_escapes_interpolated_variables():
     context = {'foo': 'bar', 'inject': 'hi; naughty-command'}
 
     assert (
-        module.interpolate_context('pre-backup', command, context)
+        module.interpolate_context('test.yaml', 'pre-backup', command, context)
         == "ls bar 'hi; naughty-command'"
     )
 
@@ -53,7 +53,7 @@ def test_make_environment_with_pyinstaller_and_LD_LIBRARY_PATH_ORIG_copies_it_in
 
 def test_execute_hook_invokes_each_command():
     flexmock(module).should_receive('interpolate_context').replace_with(
-        lambda hook_description, command, context: command
+        lambda config_file, hook_description, command, context: command
     )
     flexmock(module).should_receive('make_environment').and_return({})
     flexmock(module.borgmatic.execute).should_receive('execute_command').with_args(
@@ -68,7 +68,7 @@ def test_execute_hook_invokes_each_command():
 
 def test_execute_hook_with_multiple_commands_invokes_each_command():
     flexmock(module).should_receive('interpolate_context').replace_with(
-        lambda hook_description, command, context: command
+        lambda config_file, hook_description, command, context: command
     )
     flexmock(module).should_receive('make_environment').and_return({})
     flexmock(module.borgmatic.execute).should_receive('execute_command').with_args(
@@ -89,7 +89,7 @@ def test_execute_hook_with_multiple_commands_invokes_each_command():
 
 def test_execute_hook_with_umask_sets_that_umask():
     flexmock(module).should_receive('interpolate_context').replace_with(
-        lambda hook_description, command, context: command
+        lambda config_file, hook_description, command, context: command
     )
     flexmock(module.os).should_receive('umask').with_args(0o77).and_return(0o22).once()
     flexmock(module.os).should_receive('umask').with_args(0o22).once()
@@ -106,7 +106,7 @@ def test_execute_hook_with_umask_sets_that_umask():
 
 def test_execute_hook_with_dry_run_skips_commands():
     flexmock(module).should_receive('interpolate_context').replace_with(
-        lambda hook_description, command, context: command
+        lambda config_file, hook_description, command, context: command
     )
     flexmock(module).should_receive('make_environment').and_return({})
     flexmock(module.borgmatic.execute).should_receive('execute_command').never()
@@ -120,7 +120,7 @@ def test_execute_hook_with_empty_commands_does_not_raise():
 
 def test_execute_hook_on_error_logs_as_error():
     flexmock(module).should_receive('interpolate_context').replace_with(
-        lambda hook_description, command, context: command
+        lambda config_file, hook_description, command, context: command
     )
     flexmock(module).should_receive('make_environment').and_return({})
     flexmock(module.borgmatic.execute).should_receive('execute_command').with_args(
@@ -136,14 +136,14 @@ def test_execute_hook_on_error_logs_as_error():
 def test_considered_soft_failure_treats_soft_fail_exit_code_as_soft_fail():
     error = subprocess.CalledProcessError(module.SOFT_FAIL_EXIT_CODE, 'try again')
 
-    assert module.considered_soft_failure(error)
+    assert module.considered_soft_failure('config.yaml', error)
 
 
 def test_considered_soft_failure_does_not_treat_other_exit_code_as_soft_fail():
     error = subprocess.CalledProcessError(1, 'error')
 
-    assert not module.considered_soft_failure(error)
+    assert not module.considered_soft_failure('config.yaml', error)
 
 
 def test_considered_soft_failure_does_not_treat_other_exception_type_as_soft_fail():
-    assert not module.considered_soft_failure(Exception())
+    assert not module.considered_soft_failure('config.yaml', Exception())

+ 1 - 1
tests/unit/hooks/test_dispatch.py

@@ -6,7 +6,7 @@ from flexmock import flexmock
 from borgmatic.hooks import dispatch as module
 
 
-def hook_function(hook_config, config, thing, value):
+def hook_function(hook_config, config, log_prefix, thing, value):
     '''
     This test function gets mocked out below.
     '''