瀏覽代碼

ZFS snapshots (#261).

Reviewed-on: https://projects.torsion.org/borgmatic-collective/borgmatic/pulls/944
Dan Helfman 7 月之前
父節點
當前提交
8de7094691
共有 39 個文件被更改,包括 1730 次插入749 次删除
  1. 2 0
      NEWS
  2. 1 0
      README.md
  3. 13 3
      borgmatic/actions/check.py
  4. 145 1
      borgmatic/actions/create.py
  5. 15 156
      borgmatic/borg/create.py
  6. 2 2
      borgmatic/config/generate.py
  7. 36 2
      borgmatic/config/paths.py
  8. 23 1
      borgmatic/config/schema.yaml
  9. 4 2
      borgmatic/hooks/dispatch.py
  10. 1 0
      borgmatic/hooks/dump.py
  11. 12 1
      borgmatic/hooks/mariadb.py
  12. 13 1
      borgmatic/hooks/mongodb.py
  13. 12 1
      borgmatic/hooks/mysql.py
  14. 12 1
      borgmatic/hooks/postgresql.py
  15. 12 1
      borgmatic/hooks/sqlite.py
  16. 324 0
      borgmatic/hooks/zfs.py
  17. 1 1
      docs/Dockerfile
  18. 1 1
      docs/how-to/add-preparation-and-cleanup-steps-to-backups.md
  19. 1 1
      docs/how-to/backup-to-a-removable-drive-or-an-intermittent-server.md
  20. 1 1
      docs/how-to/customize-warnings-and-errors.md
  21. 1 1
      docs/how-to/develop-on-borgmatic.md
  22. 3 3
      docs/how-to/inspect-your-backups.md
  23. 1 1
      docs/how-to/run-arbitrary-borg-commands.md
  24. 90 0
      docs/how-to/snapshot-your-filesystems.md
  25. 1 1
      docs/how-to/upgrade.md
  26. 二進制
      docs/static/openzfs.png
  27. 1 1
      pyproject.toml
  28. 30 10
      tests/unit/actions/test_check.py
  29. 272 50
      tests/unit/actions/test_create.py
  30. 49 466
      tests/unit/borg/test_create.py
  31. 20 0
      tests/unit/config/test_generate.py
  32. 30 0
      tests/unit/config/test_paths.py
  33. 5 3
      tests/unit/hooks/test_dispatch.py
  34. 36 6
      tests/unit/hooks/test_mariadb.py
  35. 42 7
      tests/unit/hooks/test_mongodb.py
  36. 36 6
      tests/unit/hooks/test_mysql.py
  37. 72 12
      tests/unit/hooks/test_postgresql.py
  38. 36 6
      tests/unit/hooks/test_sqlite.py
  39. 374 0
      tests/unit/hooks/test_zfs.py

+ 2 - 0
NEWS

@@ -1,4 +1,6 @@
 1.9.3.dev0
 1.9.3.dev0
+ * #261 (beta): Add a ZFS hook for snapshotting and backing up ZFS datasets. See the documentation
+   for more information: https://torsion.org/borgmatic/docs/how-to/snapshot-your-filesystems/
  * Add a "--deleted" flag to the "repo-list" action for listing deleted archives that haven't
  * Add a "--deleted" flag to the "repo-list" action for listing deleted archives that haven't
    yet been compacted (Borg 2 only).
    yet been compacted (Borg 2 only).
 
 

+ 1 - 0
README.md

@@ -61,6 +61,7 @@ borgmatic is powered by [Borg Backup](https://www.borgbackup.org/).
 <a href="https://mariadb.com/"><img src="docs/static/mariadb.png" alt="MariaDB" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
 <a href="https://mariadb.com/"><img src="docs/static/mariadb.png" alt="MariaDB" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
 <a href="https://www.mongodb.com/"><img src="docs/static/mongodb.png" alt="MongoDB" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
 <a href="https://www.mongodb.com/"><img src="docs/static/mongodb.png" alt="MongoDB" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
 <a href="https://sqlite.org/"><img src="docs/static/sqlite.png" alt="SQLite" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
 <a href="https://sqlite.org/"><img src="docs/static/sqlite.png" alt="SQLite" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
+<a href="https://openzfs.org/"><img src="docs/static/openzfs.png" alt="OpenZFS" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
 <a href="https://healthchecks.io/"><img src="docs/static/healthchecks.png" alt="Healthchecks" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
 <a href="https://healthchecks.io/"><img src="docs/static/healthchecks.png" alt="Healthchecks" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
 <a href="https://uptime.kuma.pet/"><img src="docs/static/uptimekuma.png" alt="Uptime Kuma" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
 <a href="https://uptime.kuma.pet/"><img src="docs/static/uptimekuma.png" alt="Uptime Kuma" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
 <a href="https://cronitor.io/"><img src="docs/static/cronitor.png" alt="Cronitor" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
 <a href="https://cronitor.io/"><img src="docs/static/cronitor.png" alt="Cronitor" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>

+ 13 - 3
borgmatic/actions/check.py

@@ -8,6 +8,7 @@ import pathlib
 import random
 import random
 import shutil
 import shutil
 
 
+import borgmatic.actions.create
 import borgmatic.borg.check
 import borgmatic.borg.check
 import borgmatic.borg.create
 import borgmatic.borg.create
 import borgmatic.borg.environment
 import borgmatic.borg.environment
@@ -345,7 +346,13 @@ def upgrade_check_times(config, borg_repository_id):
 
 
 
 
 def collect_spot_check_source_paths(
 def collect_spot_check_source_paths(
-    repository, config, local_borg_version, global_arguments, local_path, remote_path
+    repository,
+    config,
+    local_borg_version,
+    global_arguments,
+    local_path,
+    remote_path,
+    borgmatic_runtime_directory,
 ):
 ):
     '''
     '''
     Given a repository configuration dict, a configuration dict, the local Borg version, global
     Given a repository configuration dict, a configuration dict, the local Borg version, global
@@ -366,10 +373,12 @@ def collect_spot_check_source_paths(
             dry_run=True,
             dry_run=True,
             repository_path=repository['path'],
             repository_path=repository['path'],
             config=config,
             config=config,
-            config_paths=(),
+            source_directories=borgmatic.actions.create.process_source_directories(
+                config, config_paths=()
+            ),
             local_borg_version=local_borg_version,
             local_borg_version=local_borg_version,
             global_arguments=global_arguments,
             global_arguments=global_arguments,
-            borgmatic_runtime_directories=(),
+            borgmatic_runtime_directory=borgmatic_runtime_directory,
             local_path=local_path,
             local_path=local_path,
             remote_path=remote_path,
             remote_path=remote_path,
             list_files=True,
             list_files=True,
@@ -585,6 +594,7 @@ def spot_check(
         global_arguments,
         global_arguments,
         local_path,
         local_path,
         remote_path,
         remote_path,
+        borgmatic_runtime_directory,
     )
     )
     logger.debug(f'{log_prefix}: {len(source_paths)} total source paths for spot check')
     logger.debug(f'{log_prefix}: {len(source_paths)} total source paths for spot check')
 
 

+ 145 - 1
borgmatic/actions/create.py

@@ -1,7 +1,10 @@
+import glob
 import importlib.metadata
 import importlib.metadata
+import itertools
 import json
 import json
 import logging
 import logging
 import os
 import os
+import pathlib
 
 
 import borgmatic.actions.json
 import borgmatic.actions.json
 import borgmatic.borg.create
 import borgmatic.borg.create
@@ -40,6 +43,138 @@ def create_borgmatic_manifest(config, config_paths, borgmatic_runtime_directory,
         )
         )
 
 
 
 
+def expand_directory(directory, working_directory):
+    '''
+    Given a directory path, expand any tilde (representing a user's home directory) and any globs
+    therein. Return a list of one or more resulting paths.
+    '''
+    expanded_directory = os.path.join(working_directory or '', os.path.expanduser(directory))
+
+    return glob.glob(expanded_directory) or [expanded_directory]
+
+
+def expand_directories(directories, working_directory=None):
+    '''
+    Given a sequence of directory paths and an optional working directory, expand tildes and globs
+    in each one. Return all the resulting directories as a single flattened tuple.
+    '''
+    if directories is None:
+        return ()
+
+    return tuple(
+        itertools.chain.from_iterable(
+            expand_directory(directory, working_directory) for directory in directories
+        )
+    )
+
+
+def map_directories_to_devices(directories, working_directory=None):
+    '''
+    Given a sequence of directories and an optional working directory, return a map from directory
+    to an identifier for the device on which that directory resides or None if the path doesn't
+    exist.
+
+    This is handy for determining whether two different directories are on the same filesystem (have
+    the same device identifier).
+    '''
+    return {
+        directory: os.stat(full_directory).st_dev if os.path.exists(full_directory) else None
+        for directory in directories
+        for full_directory in (os.path.join(working_directory or '', directory),)
+    }
+
+
+def deduplicate_directories(directory_devices, additional_directory_devices):
+    '''
+    Given a map from directory to the identifier for the device on which that directory resides,
+    return the directories as a sorted sequence with all duplicate child directories removed. For
+    instance, if paths is ['/foo', '/foo/bar'], return just: ['/foo']
+
+    The one exception to this rule is if two paths are on different filesystems (devices). In that
+    case, they won't get de-duplicated in case they both need to be passed to Borg (e.g. the
+    location.one_file_system option is true).
+
+    The idea is that if Borg is given a parent directory, then it doesn't also need to be given
+    child directories, because it will naturally spider the contents of the parent directory. And
+    there are cases where Borg coming across the same file twice will result in duplicate reads and
+    even hangs, e.g. when a database hook is using a named pipe for streaming database dumps to
+    Borg.
+
+    If any additional directory devices are given, also deduplicate against them, but don't include
+    them in the returned directories.
+    '''
+    deduplicated = set()
+    directories = sorted(directory_devices.keys())
+    additional_directories = sorted(additional_directory_devices.keys())
+    all_devices = {**directory_devices, **additional_directory_devices}
+
+    for directory in directories:
+        deduplicated.add(directory)
+        parents = pathlib.PurePath(directory).parents
+
+        # If another directory in the given list (or the additional list) is a parent of current
+        # directory (even n levels up) and both are on the same filesystem, then the current
+        # directory is a duplicate.
+        for other_directory in directories + additional_directories:
+            for parent in parents:
+                if (
+                    pathlib.PurePath(other_directory) == parent
+                    and all_devices[directory] is not None
+                    and all_devices[other_directory] == all_devices[directory]
+                ):
+                    if directory in deduplicated:
+                        deduplicated.remove(directory)
+                    break
+
+    return sorted(deduplicated)
+
+
+ROOT_PATTERN_PREFIX = 'R '
+
+
+def pattern_root_directories(patterns=None):
+    '''
+    Given a sequence of patterns, parse out and return just the root directories.
+    '''
+    if not patterns:
+        return []
+
+    return [
+        pattern.split(ROOT_PATTERN_PREFIX, maxsplit=1)[1]
+        for pattern in patterns
+        if pattern.startswith(ROOT_PATTERN_PREFIX)
+    ]
+
+
+def process_source_directories(config, config_paths, source_directories=None):
+    '''
+    Given a sequence of source directories (either in the source_directories argument or, lacking
+    that, from config) and a sequence of config paths to append, expand and deduplicate the source
+    directories, returning the result.
+    '''
+    working_directory = borgmatic.config.paths.get_working_directory(config)
+
+    if source_directories is None:
+        source_directories = tuple(config.get('source_directories', ())) + (
+            tuple(config_paths) if config.get('store_config_files', True) else ()
+        )
+
+    return deduplicate_directories(
+        map_directories_to_devices(
+            expand_directories(
+                tuple(source_directories),
+                working_directory=working_directory,
+            )
+        ),
+        additional_directory_devices=map_directories_to_devices(
+            expand_directories(
+                pattern_root_directories(config.get('patterns')),
+                working_directory=working_directory,
+            )
+        ),
+    )
+
+
 def run_create(
 def run_create(
     config_filename,
     config_filename,
     repository,
     repository,
@@ -86,14 +221,21 @@ def run_create(
             borgmatic_runtime_directory,
             borgmatic_runtime_directory,
             global_arguments.dry_run,
             global_arguments.dry_run,
         )
         )
+        source_directories = process_source_directories(config, config_paths)
         active_dumps = borgmatic.hooks.dispatch.call_hooks(
         active_dumps = borgmatic.hooks.dispatch.call_hooks(
             'dump_data_sources',
             'dump_data_sources',
             config,
             config,
             repository['path'],
             repository['path'],
             borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
             borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
             borgmatic_runtime_directory,
             borgmatic_runtime_directory,
+            source_directories,
             global_arguments.dry_run,
             global_arguments.dry_run,
         )
         )
+
+        # Process source directories again in case any data source hooks updated them. Without this
+        # step, we could end up with duplicate paths that cause Borg to hang when it tries to read
+        # from the same named pipe twice.
+        source_directories = process_source_directories(config, config_paths, source_directories)
         stream_processes = [process for processes in active_dumps.values() for process in processes]
         stream_processes = [process for processes in active_dumps.values() for process in processes]
 
 
         if config.get('store_config_files', True):
         if config.get('store_config_files', True):
@@ -103,12 +245,14 @@ def run_create(
                 borgmatic_runtime_directory,
                 borgmatic_runtime_directory,
                 global_arguments.dry_run,
                 global_arguments.dry_run,
             )
             )
+            if not global_arguments.dry_run:
+                source_directories.append(os.path.join(borgmatic_runtime_directory, 'bootstrap'))
 
 
         json_output = borgmatic.borg.create.create_archive(
         json_output = borgmatic.borg.create.create_archive(
             global_arguments.dry_run,
             global_arguments.dry_run,
             repository['path'],
             repository['path'],
             config,
             config,
-            config_paths,
+            source_directories,
             local_borg_version,
             local_borg_version,
             global_arguments,
             global_arguments,
             borgmatic_runtime_directory,
             borgmatic_runtime_directory,

+ 15 - 156
borgmatic/borg/create.py

@@ -1,4 +1,3 @@
-import glob
 import itertools
 import itertools
 import logging
 import logging
 import os
 import os
@@ -20,31 +19,6 @@ from borgmatic.execute import (
 logger = logging.getLogger(__name__)
 logger = logging.getLogger(__name__)
 
 
 
 
-def expand_directory(directory, working_directory):
-    '''
-    Given a directory path, expand any tilde (representing a user's home directory) and any globs
-    therein. Return a list of one or more resulting paths.
-    '''
-    expanded_directory = os.path.join(working_directory or '', os.path.expanduser(directory))
-
-    return glob.glob(expanded_directory) or [expanded_directory]
-
-
-def expand_directories(directories, working_directory=None):
-    '''
-    Given a sequence of directory paths and an optional working directory, expand tildes and globs
-    in each one. Return all the resulting directories as a single flattened tuple.
-    '''
-    if directories is None:
-        return ()
-
-    return tuple(
-        itertools.chain.from_iterable(
-            expand_directory(directory, working_directory) for directory in directories
-        )
-    )
-
-
 def expand_home_directories(directories):
 def expand_home_directories(directories):
     '''
     '''
     Given a sequence of directory paths, expand tildes in each one. Do not perform any globbing.
     Given a sequence of directory paths, expand tildes in each one. Do not perform any globbing.
@@ -56,67 +30,6 @@ def expand_home_directories(directories):
     return tuple(os.path.expanduser(directory) for directory in directories)
     return tuple(os.path.expanduser(directory) for directory in directories)
 
 
 
 
-def map_directories_to_devices(directories, working_directory=None):
-    '''
-    Given a sequence of directories and an optional working directory, return a map from directory
-    to an identifier for the device on which that directory resides or None if the path doesn't
-    exist.
-
-    This is handy for determining whether two different directories are on the same filesystem (have
-    the same device identifier).
-    '''
-    return {
-        directory: os.stat(full_directory).st_dev if os.path.exists(full_directory) else None
-        for directory in directories
-        for full_directory in (os.path.join(working_directory or '', directory),)
-    }
-
-
-def deduplicate_directories(directory_devices, additional_directory_devices):
-    '''
-    Given a map from directory to the identifier for the device on which that directory resides,
-    return the directories as a sorted tuple with all duplicate child directories removed. For
-    instance, if paths is ('/foo', '/foo/bar'), return just: ('/foo',)
-
-    The one exception to this rule is if two paths are on different filesystems (devices). In that
-    case, they won't get de-duplicated in case they both need to be passed to Borg (e.g. the
-    location.one_file_system option is true).
-
-    The idea is that if Borg is given a parent directory, then it doesn't also need to be given
-    child directories, because it will naturally spider the contents of the parent directory. And
-    there are cases where Borg coming across the same file twice will result in duplicate reads and
-    even hangs, e.g. when a database hook is using a named pipe for streaming database dumps to
-    Borg.
-
-    If any additional directory devices are given, also deduplicate against them, but don't include
-    them in the returned directories.
-    '''
-    deduplicated = set()
-    directories = sorted(directory_devices.keys())
-    additional_directories = sorted(additional_directory_devices.keys())
-    all_devices = {**directory_devices, **additional_directory_devices}
-
-    for directory in directories:
-        deduplicated.add(directory)
-        parents = pathlib.PurePath(directory).parents
-
-        # If another directory in the given list (or the additional list) is a parent of current
-        # directory (even n levels up) and both are on the same filesystem, then the current
-        # directory is a duplicate.
-        for other_directory in directories + additional_directories:
-            for parent in parents:
-                if (
-                    pathlib.PurePath(other_directory) == parent
-                    and all_devices[directory] is not None
-                    and all_devices[other_directory] == all_devices[directory]
-                ):
-                    if directory in deduplicated:
-                        deduplicated.remove(directory)
-                    break
-
-    return tuple(sorted(deduplicated))
-
-
 def write_pattern_file(patterns=None, sources=None, pattern_file=None):
 def write_pattern_file(patterns=None, sources=None, pattern_file=None):
     '''
     '''
     Given a sequence of patterns and an optional sequence of source directories, write them to a
     Given a sequence of patterns and an optional sequence of source directories, write them to a
@@ -221,32 +134,6 @@ def make_list_filter_flags(local_borg_version, dry_run):
         return f'{base_flags}-'
         return f'{base_flags}-'
 
 
 
 
-def collect_borgmatic_runtime_directories(borgmatic_runtime_directory):
-    '''
-    Return a list of borgmatic-specific runtime directories used for temporary runtime data like
-    streaming database dumps and bootstrap metadata. If no such directories exist, return an empty
-    list.
-    '''
-    return [borgmatic_runtime_directory] if os.path.exists(borgmatic_runtime_directory) else []
-
-
-ROOT_PATTERN_PREFIX = 'R '
-
-
-def pattern_root_directories(patterns=None):
-    '''
-    Given a sequence of patterns, parse out and return just the root directories.
-    '''
-    if not patterns:
-        return []
-
-    return [
-        pattern.split(ROOT_PATTERN_PREFIX, maxsplit=1)[1]
-        for pattern in patterns
-        if pattern.startswith(ROOT_PATTERN_PREFIX)
-    ]
-
-
 def special_file(path):
 def special_file(path):
     '''
     '''
     Return whether the given path is a special file (character device, block device, or named pipe
     Return whether the given path is a special file (character device, block device, or named pipe
@@ -307,21 +194,15 @@ def collect_special_file_paths(
     )
     )
 
 
 
 
-def check_all_source_directories_exist(source_directories, working_directory=None):
+def check_all_source_directories_exist(source_directories):
     '''
     '''
-    Given a sequence of source directories and an optional working directory to serve as a prefix
-    for each (if it's a relative directory), check that the source directories all exist. If any do
+    Given a sequence of source directories, check that the source directories all exist. If any do
     not, raise an exception.
     not, raise an exception.
     '''
     '''
     missing_directories = [
     missing_directories = [
         source_directory
         source_directory
         for source_directory in source_directories
         for source_directory in source_directories
-        if not all(
-            [
-                os.path.exists(os.path.join(working_directory or '', directory))
-                for directory in expand_directory(source_directory, working_directory)
-            ]
-        )
+        if not os.path.exists(source_directory)
     ]
     ]
     if missing_directories:
     if missing_directories:
         raise ValueError(f"Source directories do not exist: {', '.join(missing_directories)}")
         raise ValueError(f"Source directories do not exist: {', '.join(missing_directories)}")
@@ -334,10 +215,10 @@ def make_base_create_command(
     dry_run,
     dry_run,
     repository_path,
     repository_path,
     config,
     config,
-    config_paths,
+    source_directories,
     local_borg_version,
     local_borg_version,
     global_arguments,
     global_arguments,
-    borgmatic_runtime_directories,
+    borgmatic_runtime_directory,
     local_path='borg',
     local_path='borg',
     remote_path=None,
     remote_path=None,
     progress=False,
     progress=False,
@@ -352,34 +233,13 @@ def make_base_create_command(
     (base Borg create command flags, Borg create command positional arguments, open pattern file
     (base Borg create command flags, Borg create command positional arguments, open pattern file
     handle, open exclude file handle).
     handle, open exclude file handle).
     '''
     '''
-    working_directory = borgmatic.config.paths.get_working_directory(config)
-
     if config.get('source_directories_must_exist', False):
     if config.get('source_directories_must_exist', False):
-        check_all_source_directories_exist(
-            config.get('source_directories'), working_directory=working_directory
-        )
-
-    sources = deduplicate_directories(
-        map_directories_to_devices(
-            expand_directories(
-                tuple(config.get('source_directories', ()))
-                + borgmatic_runtime_directories
-                + tuple(config_paths if config.get('store_config_files', True) else ()),
-                working_directory=working_directory,
-            )
-        ),
-        additional_directory_devices=map_directories_to_devices(
-            expand_directories(
-                pattern_root_directories(config.get('patterns')),
-                working_directory=working_directory,
-            )
-        ),
-    )
+        check_all_source_directories_exist(source_directories)
 
 
     ensure_files_readable(config.get('patterns_from'), config.get('exclude_from'))
     ensure_files_readable(config.get('patterns_from'), config.get('exclude_from'))
 
 
     pattern_file = (
     pattern_file = (
-        write_pattern_file(config.get('patterns'), sources)
+        write_pattern_file(config.get('patterns'), source_directories)
         if config.get('patterns') or config.get('patterns_from')
         if config.get('patterns') or config.get('patterns_from')
         else None
         else None
     )
     )
@@ -457,7 +317,7 @@ def make_base_create_command(
 
 
     create_positional_arguments = flags.make_repository_archive_flags(
     create_positional_arguments = flags.make_repository_archive_flags(
         repository_path, archive_name_format, local_borg_version
         repository_path, archive_name_format, local_borg_version
-    ) + (sources if not pattern_file else ())
+    ) + (tuple(source_directories) if not pattern_file else ())
 
 
     # If database hooks are enabled (as indicated by streaming processes), exclude files that might
     # If database hooks are enabled (as indicated by streaming processes), exclude files that might
     # cause Borg to hang. But skip this if the user has explicitly set the "read_special" to True.
     # cause Borg to hang. But skip this if the user has explicitly set the "read_special" to True.
@@ -466,6 +326,7 @@ def make_base_create_command(
             f'{repository_path}: Ignoring configured "read_special" value of false, as true is needed for database hooks.'
             f'{repository_path}: Ignoring configured "read_special" value of false, as true is needed for database hooks.'
         )
         )
         borg_environment = environment.make_environment(config)
         borg_environment = environment.make_environment(config)
+        working_directory = borgmatic.config.paths.get_working_directory(config)
 
 
         logger.debug(f'{repository_path}: Collecting special file paths')
         logger.debug(f'{repository_path}: Collecting special file paths')
         special_file_paths = collect_special_file_paths(
         special_file_paths = collect_special_file_paths(
@@ -474,7 +335,9 @@ def make_base_create_command(
             local_path,
             local_path,
             working_directory,
             working_directory,
             borg_environment,
             borg_environment,
-            skip_directories=borgmatic_runtime_directories,
+            skip_directories=(
+                [borgmatic_runtime_directory] if os.path.exists(borgmatic_runtime_directory) else []
+            ),
         )
         )
 
 
         if special_file_paths:
         if special_file_paths:
@@ -501,7 +364,7 @@ def create_archive(
     dry_run,
     dry_run,
     repository_path,
     repository_path,
     config,
     config,
-    config_paths,
+    source_directories,
     local_borg_version,
     local_borg_version,
     global_arguments,
     global_arguments,
     borgmatic_runtime_directory,
     borgmatic_runtime_directory,
@@ -524,20 +387,16 @@ def create_archive(
     borgmatic.logger.add_custom_log_levels()
     borgmatic.logger.add_custom_log_levels()
 
 
     working_directory = borgmatic.config.paths.get_working_directory(config)
     working_directory = borgmatic.config.paths.get_working_directory(config)
-    borgmatic_runtime_directories = expand_directories(
-        collect_borgmatic_runtime_directories(borgmatic_runtime_directory),
-        working_directory=working_directory,
-    )
 
 
     (create_flags, create_positional_arguments, pattern_file, exclude_file) = (
     (create_flags, create_positional_arguments, pattern_file, exclude_file) = (
         make_base_create_command(
         make_base_create_command(
             dry_run,
             dry_run,
             repository_path,
             repository_path,
             config,
             config,
-            config_paths,
+            source_directories,
             local_borg_version,
             local_borg_version,
             global_arguments,
             global_arguments,
-            borgmatic_runtime_directories,
+            borgmatic_runtime_directory,
             local_path,
             local_path,
             remote_path,
             remote_path,
             progress,
             progress,

+ 2 - 2
borgmatic/config/generate.py

@@ -44,12 +44,12 @@ def schema_to_sample_configuration(schema, level=0, parent_is_sequence=False):
     if example is not None:
     if example is not None:
         return example
         return example
 
 
-    if schema_type == 'array':
+    if schema_type == 'array' or (isinstance(schema_type, list) and 'array' in schema_type):
         config = ruamel.yaml.comments.CommentedSeq(
         config = ruamel.yaml.comments.CommentedSeq(
             [schema_to_sample_configuration(schema['items'], level, parent_is_sequence=True)]
             [schema_to_sample_configuration(schema['items'], level, parent_is_sequence=True)]
         )
         )
         add_comments_to_configuration_sequence(config, schema, indent=(level * INDENT))
         add_comments_to_configuration_sequence(config, schema, indent=(level * INDENT))
-    elif schema_type == 'object':
+    elif schema_type == 'object' or (isinstance(schema_type, list) and 'object' in schema_type):
         config = ruamel.yaml.comments.CommentedMap(
         config = ruamel.yaml.comments.CommentedMap(
             [
             [
                 (field_name, schema_to_sample_configuration(sub_schema, level + 1))
                 (field_name, schema_to_sample_configuration(sub_schema, level + 1))

+ 36 - 2
borgmatic/config/paths.py

@@ -33,6 +33,32 @@ def get_borgmatic_source_directory(config):
 TEMPORARY_DIRECTORY_PREFIX = 'borgmatic-'
 TEMPORARY_DIRECTORY_PREFIX = 'borgmatic-'
 
 
 
 
+def replace_temporary_subdirectory_with_glob(path):
+    '''
+    Given an absolute temporary directory path, look for a subdirectory within it starting with the
+    temporary directory prefix and replace it with an appropriate glob. For instance, given:
+
+        /tmp/borgmatic-aet8kn93/borgmatic
+
+    ... replace it with:
+
+        /tmp/borgmatic-*/borgmatic
+
+    This is useful for finding previous temporary directories from prior borgmatic runs.
+    '''
+    return os.path.join(
+        '/',
+        *(
+            (
+                f'{TEMPORARY_DIRECTORY_PREFIX}*'
+                if subdirectory.startswith(TEMPORARY_DIRECTORY_PREFIX)
+                else subdirectory
+            )
+            for subdirectory in path.split(os.path.sep)
+        ),
+    )
+
+
 class Runtime_directory:
 class Runtime_directory:
     '''
     '''
     A Python context manager for creating and cleaning up the borgmatic runtime directory used for
     A Python context manager for creating and cleaning up the borgmatic runtime directory used for
@@ -84,7 +110,9 @@ class Runtime_directory:
 
 
         self.runtime_path = expand_user_in_path(
         self.runtime_path = expand_user_in_path(
             os.path.join(
             os.path.join(
-                base_path if final_directory == 'borgmatic' else runtime_directory, '.', 'borgmatic'
+                base_path if final_directory == 'borgmatic' else runtime_directory,
+                '.',  # Borg 1.4+ "slashdot" hack.
+                'borgmatic',
             )
             )
         )
         )
         os.makedirs(self.runtime_path, mode=0o700, exist_ok=True)
         os.makedirs(self.runtime_path, mode=0o700, exist_ok=True)
@@ -102,7 +130,13 @@ class Runtime_directory:
         Delete any temporary directory that was created as part of initialization.
         Delete any temporary directory that was created as part of initialization.
         '''
         '''
         if self.temporary_directory:
         if self.temporary_directory:
-            self.temporary_directory.cleanup()
+            try:
+                self.temporary_directory.cleanup()
+            # The cleanup() call errors if, for instance, there's still a
+            # mounted filesystem within the temporary directory. There's
+            # nothing we can do about that here, so swallow the error.
+            except OSError:
+                pass
 
 
 
 
 def make_runtime_directory_glob(borgmatic_runtime_directory):
 def make_runtime_directory_glob(borgmatic_runtime_directory):

+ 23 - 1
borgmatic/config/schema.yaml

@@ -2255,7 +2255,29 @@ properties:
                     config: "__config"
                     config: "__config"
                     hostname: "__hostname"
                     hostname: "__hostname"
         description: |
         description: |
-            Configuration for a monitoring integration with Grafana loki. You
+            Configuration for a monitoring integration with Grafana Loki. You
             can send the logs to a self-hosted instance or create an account at
             can send the logs to a self-hosted instance or create an account at
             https://grafana.com/auth/sign-up/create-user. See borgmatic
             https://grafana.com/auth/sign-up/create-user. See borgmatic
             monitoring documentation for details.
             monitoring documentation for details.
+
+    zfs:
+        type: ["object", "null"]
+        additionalProperties: false
+        properties:
+            zfs_command:
+                type: string
+                description: |
+                    Command to use instead of "zfs".
+                example: /usr/local/bin/zfs
+            mount_command:
+                type: string
+                description: |
+                    Command to use instead of "mount".
+                example: /usr/local/bin/mount
+            umount_command:
+                type: string
+                description: |
+                    Command to use instead of "umount".
+                example: /usr/local/bin/umount
+        description: |
+            Configuration for integration with the ZFS filesystem.

+ 4 - 2
borgmatic/hooks/dispatch.py

@@ -16,6 +16,7 @@ from borgmatic.hooks import (
     sqlite,
     sqlite,
     uptimekuma,
     uptimekuma,
     zabbix,
     zabbix,
+    zfs,
 )
 )
 
 
 logger = logging.getLogger(__name__)
 logger = logging.getLogger(__name__)
@@ -36,6 +37,7 @@ HOOK_NAME_TO_MODULE = {
     'sqlite_databases': sqlite,
     'sqlite_databases': sqlite,
     'uptime_kuma': uptimekuma,
     'uptime_kuma': uptimekuma,
     'zabbix': zabbix,
     'zabbix': zabbix,
+    'zfs': zfs,
 }
 }
 
 
 
 
@@ -49,7 +51,7 @@ def call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs):
     Raise AttributeError if the function name is not found in the module.
     Raise AttributeError if the function name is not found in the module.
     Raise anything else that the called function raises.
     Raise anything else that the called function raises.
     '''
     '''
-    hook_config = config.get(hook_name, {})
+    hook_config = config.get(hook_name) or {}
 
 
     try:
     try:
         module = HOOK_NAME_TO_MODULE[hook_name]
         module = HOOK_NAME_TO_MODULE[hook_name]
@@ -77,7 +79,7 @@ def call_hooks(function_name, config, log_prefix, hook_names, *args, **kwargs):
     return {
     return {
         hook_name: call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs)
         hook_name: call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs)
         for hook_name in hook_names
         for hook_name in hook_names
-        if config.get(hook_name)
+        if hook_name in config
     }
     }
 
 
 
 

+ 1 - 0
borgmatic/hooks/dump.py

@@ -11,6 +11,7 @@ DATA_SOURCE_HOOK_NAMES = (
     'mongodb_databases',
     'mongodb_databases',
     'postgresql_databases',
     'postgresql_databases',
     'sqlite_databases',
     'sqlite_databases',
+    'zfs',
 )
 )
 
 
 
 

+ 12 - 1
borgmatic/hooks/mariadb.py

@@ -122,7 +122,14 @@ def use_streaming(databases, config, log_prefix):
     return any(databases)
     return any(databases)
 
 
 
 
-def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory, dry_run):
+def dump_data_sources(
+    databases,
+    config,
+    log_prefix,
+    borgmatic_runtime_directory,
+    source_directories,
+    dry_run,
+):
     '''
     '''
     Dump the given MariaDB databases to a named pipe. The databases are supplied as a sequence of
     Dump the given MariaDB databases to a named pipe. The databases are supplied as a sequence of
     dicts, one dict describing each database as per the configuration schema. Use the given
     dicts, one dict describing each database as per the configuration schema. Use the given
@@ -131,6 +138,7 @@ def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory
 
 
     Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
     Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
     pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
     pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
+    Also append the given source directories with the parent directory of the database dumps.
     '''
     '''
     dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
     dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
     processes = []
     processes = []
@@ -178,6 +186,9 @@ def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory
                 )
                 )
             )
             )
 
 
+    if not dry_run:
+        source_directories.append(os.path.join(borgmatic_runtime_directory, 'mariadb_databases'))
+
     return [process for process in processes if process]
     return [process for process in processes if process]
 
 
 
 

+ 13 - 1
borgmatic/hooks/mongodb.py

@@ -1,4 +1,5 @@
 import logging
 import logging
+import os
 import shlex
 import shlex
 
 
 import borgmatic.config.paths
 import borgmatic.config.paths
@@ -23,7 +24,14 @@ def use_streaming(databases, config, log_prefix):
     return any(database.get('format') != 'directory' for database in databases)
     return any(database.get('format') != 'directory' for database in databases)
 
 
 
 
-def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory, dry_run):
+def dump_data_sources(
+    databases,
+    config,
+    log_prefix,
+    borgmatic_runtime_directory,
+    source_directories,
+    dry_run,
+):
     '''
     '''
     Dump the given MongoDB databases to a named pipe. The databases are supplied as a sequence of
     Dump the given MongoDB databases to a named pipe. The databases are supplied as a sequence of
     dicts, one dict describing each database as per the configuration schema. Use the borgmatic
     dicts, one dict describing each database as per the configuration schema. Use the borgmatic
@@ -32,6 +40,7 @@ def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory
 
 
     Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
     Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
     pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
     pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
+    Also append the given source directories with the parent directory of the database dumps.
     '''
     '''
     dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
     dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
 
 
@@ -60,6 +69,9 @@ def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory
             dump.create_named_pipe_for_dump(dump_filename)
             dump.create_named_pipe_for_dump(dump_filename)
             processes.append(execute_command(command, shell=True, run_to_completion=False))
             processes.append(execute_command(command, shell=True, run_to_completion=False))
 
 
+    if not dry_run:
+        source_directories.append(os.path.join(borgmatic_runtime_directory, 'mongodb_databases'))
+
     return processes
     return processes
 
 
 
 

+ 12 - 1
borgmatic/hooks/mysql.py

@@ -121,7 +121,14 @@ def use_streaming(databases, config, log_prefix):
     return any(databases)
     return any(databases)
 
 
 
 
-def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory, dry_run):
+def dump_data_sources(
+    databases,
+    config,
+    log_prefix,
+    borgmatic_runtime_directory,
+    source_directories,
+    dry_run,
+):
     '''
     '''
     Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence
     Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence
     of dicts, one dict describing each database as per the configuration schema. Use the given
     of dicts, one dict describing each database as per the configuration schema. Use the given
@@ -130,6 +137,7 @@ def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory
 
 
     Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
     Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
     pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
     pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
+    Also append the given source directories with the parent directory of the database dumps.
     '''
     '''
     dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
     dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
     processes = []
     processes = []
@@ -177,6 +185,9 @@ def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory
                 )
                 )
             )
             )
 
 
+    if not dry_run:
+        source_directories.append(os.path.join(borgmatic_runtime_directory, 'mysql_databases'))
+
     return [process for process in processes if process]
     return [process for process in processes if process]
 
 
 
 

+ 12 - 1
borgmatic/hooks/postgresql.py

@@ -104,7 +104,14 @@ def use_streaming(databases, config, log_prefix):
     return any(database.get('format') != 'directory' for database in databases)
     return any(database.get('format') != 'directory' for database in databases)
 
 
 
 
-def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory, dry_run):
+def dump_data_sources(
+    databases,
+    config,
+    log_prefix,
+    borgmatic_runtime_directory,
+    source_directories,
+    dry_run,
+):
     '''
     '''
     Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of
     Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of
     dicts, one dict describing each database as per the configuration schema. Use the given
     dicts, one dict describing each database as per the configuration schema. Use the given
@@ -113,6 +120,7 @@ def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory
 
 
     Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
     Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
     pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
     pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
+    Also append the given source directories with the parent directory of the database dumps.
 
 
     Raise ValueError if the databases to dump cannot be determined.
     Raise ValueError if the databases to dump cannot be determined.
     '''
     '''
@@ -203,6 +211,9 @@ def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory
                     )
                     )
                 )
                 )
 
 
+    if not dry_run:
+        source_directories.append(os.path.join(borgmatic_runtime_directory, 'postgresql_databases'))
+
     return processes
     return processes
 
 
 
 

+ 12 - 1
borgmatic/hooks/sqlite.py

@@ -24,7 +24,14 @@ def use_streaming(databases, config, log_prefix):
     return any(databases)
     return any(databases)
 
 
 
 
-def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory, dry_run):
+def dump_data_sources(
+    databases,
+    config,
+    log_prefix,
+    borgmatic_runtime_directory,
+    source_directories,
+    dry_run,
+):
     '''
     '''
     Dump the given SQLite databases to a named pipe. The databases are supplied as a sequence of
     Dump the given SQLite databases to a named pipe. The databases are supplied as a sequence of
     configuration dicts, as per the configuration schema. Use the given borgmatic runtime directory
     configuration dicts, as per the configuration schema. Use the given borgmatic runtime directory
@@ -32,6 +39,7 @@ def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory
 
 
     Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
     Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
     pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
     pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
+    Also append the given source directories with the parent directory of the database dumps.
     '''
     '''
     dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
     dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
     processes = []
     processes = []
@@ -73,6 +81,9 @@ def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory
         dump.create_named_pipe_for_dump(dump_filename)
         dump.create_named_pipe_for_dump(dump_filename)
         processes.append(execute_command(command, shell=True, run_to_completion=False))
         processes.append(execute_command(command, shell=True, run_to_completion=False))
 
 
+    if not dry_run:
+        source_directories.append(os.path.join(borgmatic_runtime_directory, 'sqlite_databases'))
+
     return processes
     return processes
 
 
 
 

+ 324 - 0
borgmatic/hooks/zfs.py

@@ -0,0 +1,324 @@
+import glob
+import logging
+import os
+import shutil
+import subprocess
+
+import borgmatic.config.paths
+import borgmatic.execute
+
+logger = logging.getLogger(__name__)
+
+
+def use_streaming(hook_config, config, log_prefix):  # pragma: no cover
+    '''
+    Return whether dump streaming is used for this hook. (Spoiler: It isn't.)
+    '''
+    return False
+
+
+BORGMATIC_SNAPSHOT_PREFIX = 'borgmatic-'
+BORGMATIC_USER_PROPERTY = 'org.torsion.borgmatic:backup'
+
+
+def get_datasets_to_backup(zfs_command, source_directories):
+    '''
+    Given a ZFS command to run and a sequence of configured source directories, find the
+    intersection between the current ZFS dataset mount points and the configured borgmatic source
+    directories. The idea is that these are the requested datasets to snapshot. But also include any
+    datasets tagged with a borgmatic-specific user property, whether or not they appear in source
+    directories.
+
+    Return the result as a sequence of (dataset name, mount point) pairs.
+    '''
+    list_output = borgmatic.execute.execute_command_and_capture_output(
+        (
+            zfs_command,
+            'list',
+            '-H',
+            '-t',
+            'filesystem',
+            '-o',
+            f'name,mountpoint,{BORGMATIC_USER_PROPERTY}',
+        )
+    )
+    source_directories_set = set(source_directories)
+
+    try:
+        return tuple(
+            (dataset_name, mount_point)
+            for line in list_output.splitlines()
+            for (dataset_name, mount_point, user_property_value) in (line.rstrip().split('\t'),)
+            if mount_point in source_directories_set or user_property_value == 'auto'
+        )
+    except ValueError:
+        raise ValueError('Invalid {zfs_command} list output')
+
+
+def get_all_datasets(zfs_command):
+    '''
+    Given a ZFS command to run, return all ZFS datasets as a sequence of (dataset name, mount point)
+    pairs.
+    '''
+    list_output = borgmatic.execute.execute_command_and_capture_output(
+        (
+            zfs_command,
+            'list',
+            '-H',
+            '-t',
+            'filesystem',
+            '-o',
+            'name,mountpoint',
+        )
+    )
+
+    try:
+        return tuple(
+            (dataset_name, mount_point)
+            for line in list_output.splitlines()
+            for (dataset_name, mount_point) in (line.rstrip().split('\t'),)
+        )
+    except ValueError:
+        raise ValueError('Invalid {zfs_command} list output')
+
+
+def snapshot_dataset(zfs_command, full_snapshot_name):  # pragma: no cover
+    '''
+    Given a ZFS command to run and a snapshot name of the form "dataset@snapshot", create a new ZFS
+    snapshot.
+    '''
+    borgmatic.execute.execute_command(
+        (
+            zfs_command,
+            'snapshot',
+            '-r',
+            full_snapshot_name,
+        ),
+        output_log_level=logging.DEBUG,
+    )
+
+
+def mount_snapshot(mount_command, full_snapshot_name, snapshot_mount_path):  # pragma: no cover
+    '''
+    Given a mount command to run, an existing snapshot name of the form "dataset@snapshot", and the
+    path where the snapshot should be mounted, mount the snapshot (making any necessary directories
+    first).
+    '''
+    os.makedirs(snapshot_mount_path, mode=0o700, exist_ok=True)
+    borgmatic.execute.execute_command(
+        (
+            mount_command,
+            '-t',
+            'zfs',
+            full_snapshot_name,
+            snapshot_mount_path,
+        ),
+        output_log_level=logging.DEBUG,
+    )
+
+
+def dump_data_sources(
+    hook_config,
+    config,
+    log_prefix,
+    borgmatic_runtime_directory,
+    source_directories,
+    dry_run,
+):
+    '''
+    Given a ZFS configuration dict, a configuration dict, a log prefix, the borgmatic runtime
+    directory, the configured source directories, and whether this is a dry run, auto-detect and
+    snapshot any ZFS dataset mount points listed in the given source directories and any dataset
+    with a borgmatic-specific user property. Also update those source directories, replacing dataset
+    mount points with corresponding snapshot directories so they get stored in the Borg archive
+    instead of the dataset mount points. Use the log prefix in any log entries.
+
+    Return an empty sequence, since there are no ongoing dump processes from this hook.
+
+    If this is a dry run, then don't actually snapshot anything.
+    '''
+    dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
+    logger.info(f'{log_prefix}: Snapshotting ZFS datasets{dry_run_label}')
+
+    # List ZFS datasets to get their mount points.
+    zfs_command = hook_config.get('zfs_command', 'zfs')
+    requested_datasets = get_datasets_to_backup(zfs_command, source_directories)
+
+    # Snapshot each dataset, rewriting source directories to use the snapshot paths.
+    snapshot_name = f'{BORGMATIC_SNAPSHOT_PREFIX}{os.getpid()}'
+
+    for dataset_name, mount_point in requested_datasets:
+        full_snapshot_name = f'{dataset_name}@{snapshot_name}'
+        logger.debug(f'{log_prefix}: Creating ZFS snapshot {full_snapshot_name}{dry_run_label}')
+
+        if not dry_run:
+            snapshot_dataset(zfs_command, full_snapshot_name)
+
+        # Mount the snapshot into a particular named temporary directory so that the snapshot ends
+        # up in the Borg archive at the "original" dataset mount point path.
+        snapshot_mount_path_for_borg = os.path.join(
+            os.path.normpath(borgmatic_runtime_directory),
+            'zfs_snapshots',
+            '.',  # Borg 1.4+ "slashdot" hack.
+            mount_point.lstrip(os.path.sep),
+        )
+        snapshot_mount_path = os.path.normpath(snapshot_mount_path_for_borg)
+        logger.debug(
+            f'{log_prefix}: Mounting ZFS snapshot {full_snapshot_name} at {snapshot_mount_path}{dry_run_label}'
+        )
+
+        if not dry_run:
+            mount_snapshot(
+                hook_config.get('mount_command', 'mount'), full_snapshot_name, snapshot_mount_path
+            )
+
+            if mount_point in source_directories:
+                source_directories.remove(mount_point)
+
+            source_directories.append(snapshot_mount_path_for_borg)
+
+    return []
+
+
+def unmount_snapshot(umount_command, snapshot_mount_path):  # pragma: no cover
+    '''
+    Given a umount command to run and the mount path of a snapshot, unmount it.
+    '''
+    borgmatic.execute.execute_command(
+        (
+            umount_command,
+            snapshot_mount_path,
+        ),
+        output_log_level=logging.DEBUG,
+    )
+
+
+def destroy_snapshot(zfs_command, full_snapshot_name):  # pragma: no cover
+    '''
+    Given a ZFS command to run and the name of a snapshot in the form "dataset@snapshot", destroy
+    it.
+    '''
+    borgmatic.execute.execute_command(
+        (
+            zfs_command,
+            'destroy',
+            '-r',
+            full_snapshot_name,
+        ),
+        output_log_level=logging.DEBUG,
+    )
+
+
+def get_all_snapshots(zfs_command):
+    '''
+    Given a ZFS command to run, return all ZFS snapshots as a sequence of full snapshot names of the
+    form "dataset@snapshot".
+    '''
+    list_output = borgmatic.execute.execute_command_and_capture_output(
+        (
+            zfs_command,
+            'list',
+            '-H',
+            '-t',
+            'snapshot',
+            '-o',
+            'name',
+        )
+    )
+
+    return tuple(line.rstrip() for line in list_output.splitlines())
+
+
+def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_directory, dry_run):
+    '''
+    Given a ZFS configuration dict, a configuration dict, a log prefix, the borgmatic runtime
+    directory, and whether this is a dry run, unmount and destroy any ZFS snapshots created by
+    borgmatic. Use the log prefix in any log entries. If this is a dry run, then don't actually
+    remove anything.
+    '''
+    dry_run_label = ' (dry run; not actually removing anything)' if dry_run else ''
+
+    # Unmount snapshots.
+    zfs_command = hook_config.get('zfs_command', 'zfs')
+
+    try:
+        datasets = get_all_datasets(zfs_command)
+    except FileNotFoundError:
+        logger.debug(f'{log_prefix}: Could not find "{zfs_command}" command')
+        return
+    except subprocess.CalledProcessError as error:
+        logger.debug(f'{log_prefix}: {error}')
+        return
+
+    snapshots_glob = os.path.join(
+        borgmatic.config.paths.replace_temporary_subdirectory_with_glob(
+            os.path.normpath(borgmatic_runtime_directory),
+        ),
+        'zfs_snapshots',
+    )
+    logger.debug(
+        f'{log_prefix}: Looking for snapshots to remove in {snapshots_glob}{dry_run_label}'
+    )
+    umount_command = hook_config.get('umount_command', 'umount')
+
+    for snapshots_directory in glob.glob(snapshots_glob):
+        if not os.path.isdir(snapshots_directory):
+            continue
+
+        # This might fail if the directory is already mounted, but we swallow errors here since
+        # we'll try again below. The point of doing it here is that we don't want to try to unmount
+        # a non-mounted directory (which *will* fail), and probing for whether a directory is
+        # mounted is tough to do in a cross-platform way.
+        if not dry_run:
+            shutil.rmtree(snapshots_directory, ignore_errors=True)
+
+        for _, mount_point in datasets:
+            snapshot_mount_path = os.path.join(snapshots_directory, mount_point.lstrip(os.path.sep))
+            if not os.path.isdir(snapshot_mount_path):
+                continue
+
+            logger.debug(
+                f'{log_prefix}: Unmounting ZFS snapshot at {snapshot_mount_path}{dry_run_label}'
+            )
+
+            if not dry_run:
+                try:
+                    unmount_snapshot(umount_command, snapshot_mount_path)
+                except FileNotFoundError:
+                    logger.debug(f'{log_prefix}: Could not find "{umount_command}" command')
+                    return
+                except subprocess.CalledProcessError as error:
+                    logger.debug(f'{log_prefix}: {error}')
+                    return
+
+        if not dry_run:
+            shutil.rmtree(snapshots_directory)
+
+    # Destroy snapshots.
+    full_snapshot_names = get_all_snapshots(zfs_command)
+
+    for full_snapshot_name in full_snapshot_names:
+        # Only destroy snapshots that borgmatic actually created!
+        if not full_snapshot_name.split('@')[-1].startswith(BORGMATIC_SNAPSHOT_PREFIX):
+            continue
+
+        logger.debug(f'{log_prefix}: Destroying ZFS snapshot {full_snapshot_name}{dry_run_label}')
+
+        if not dry_run:
+            destroy_snapshot(zfs_command, full_snapshot_name)
+
+
+def make_data_source_dump_patterns(hook_config, config, log_prefix, name=None):  # pragma: no cover
+    '''
+    Restores aren't implemented, because stored files can be extracted directly with "extract".
+    '''
+    raise NotImplementedError()
+
+
+def restore_data_source_dump(
+    hook_config, config, log_prefix, data_source, dry_run, extract_process, connection_params
+):  # pragma: no cover
+    '''
+    Restores aren't implemented, because stored files can be extracted directly with "extract".
+    '''
+    raise NotImplementedError()

+ 1 - 1
docs/Dockerfile

@@ -2,7 +2,7 @@ FROM docker.io/alpine:3.20.1 AS borgmatic
 
 
 COPY . /app
 COPY . /app
 RUN apk add --no-cache py3-pip py3-ruamel.yaml py3-ruamel.yaml.clib
 RUN apk add --no-cache py3-pip py3-ruamel.yaml py3-ruamel.yaml.clib
-RUN pip install --break-system-packages --no-cache /app && generate-borgmatic-config && chmod +r /etc/borgmatic/config.yaml
+RUN pip install --break-system-packages --no-cache /app && borgmatic config generate && chmod +r /etc/borgmatic/config.yaml
 RUN borgmatic --help > /command-line.txt \
 RUN borgmatic --help > /command-line.txt \
     && for action in repo-create transfer create prune compact check delete extract config "config bootstrap" "config generate" "config validate" export-tar mount umount repo-delete restore repo-list list repo-info info break-lock "key export" "key change-passphrase" borg; do \
     && for action in repo-create transfer create prune compact check delete extract config "config bootstrap" "config generate" "config validate" export-tar mount umount repo-delete restore repo-list list repo-info info break-lock "key export" "key change-passphrase" borg; do \
            echo -e "\n--------------------------------------------------------------------------------\n" >> /command-line.txt \
            echo -e "\n--------------------------------------------------------------------------------\n" >> /command-line.txt \

+ 1 - 1
docs/how-to/add-preparation-and-cleanup-steps-to-backups.md

@@ -3,7 +3,7 @@ title: How to add preparation and cleanup steps to backups
 eleventyNavigation:
 eleventyNavigation:
   key: 🧹 Add preparation and cleanup steps
   key: 🧹 Add preparation and cleanup steps
   parent: How-to guides
   parent: How-to guides
-  order: 9
+  order: 10
 ---
 ---
 ## Preparation and cleanup hooks
 ## Preparation and cleanup hooks
 
 

+ 1 - 1
docs/how-to/backup-to-a-removable-drive-or-an-intermittent-server.md

@@ -3,7 +3,7 @@ title: How to backup to a removable drive or an intermittent server
 eleventyNavigation:
 eleventyNavigation:
   key: 💾 Backup to a removable drive/server
   key: 💾 Backup to a removable drive/server
   parent: How-to guides
   parent: How-to guides
-  order: 10
+  order: 11
 ---
 ---
 ## Occasional backups
 ## Occasional backups
 
 

+ 1 - 1
docs/how-to/customize-warnings-and-errors.md

@@ -3,7 +3,7 @@ title: How to customize warnings and errors
 eleventyNavigation:
 eleventyNavigation:
   key: 💥 Customize warnings/errors
   key: 💥 Customize warnings/errors
   parent: How-to guides
   parent: How-to guides
-  order: 12
+  order: 13
 ---
 ---
 ## When things go wrong
 ## When things go wrong
 
 

+ 1 - 1
docs/how-to/develop-on-borgmatic.md

@@ -3,7 +3,7 @@ title: How to develop on borgmatic
 eleventyNavigation:
 eleventyNavigation:
   key: 🏗️ Develop on borgmatic
   key: 🏗️ Develop on borgmatic
   parent: How-to guides
   parent: How-to guides
-  order: 14
+  order: 15
 ---
 ---
 ## Source code
 ## Source code
 
 

+ 3 - 3
docs/how-to/inspect-your-backups.md

@@ -119,10 +119,10 @@ archive, regardless of the user who performs the backup. (Note that Borg
 doesn't store the leading `/`.)
 doesn't store the leading `/`.)
 
 
 <span class="minilink minilink-addedin">With Borg version 1.2 and
 <span class="minilink minilink-addedin">With Borg version 1.2 and
-earlier</span>Database dump files are stored at a path dependent on the
-[runtime
+earlier</span>Database dump files are stored at a path dependent on the [runtime
 directory](https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#runtime-directory)
 directory](https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#runtime-directory)
-in use at the time the archive was created.
+in use at the time the archive was created, as Borg 1.2 and earlier do not
+support path rewriting.
 
 
 <span class="minilink minilink-addedin">Prior to borgmatic version
 <span class="minilink minilink-addedin">Prior to borgmatic version
 1.9.0</span>Database dump files were instead stored at `~/.borgmatic` within
 1.9.0</span>Database dump files were instead stored at `~/.borgmatic` within

+ 1 - 1
docs/how-to/run-arbitrary-borg-commands.md

@@ -3,7 +3,7 @@ title: How to run arbitrary Borg commands
 eleventyNavigation:
 eleventyNavigation:
   key: 🔧 Run arbitrary Borg commands
   key: 🔧 Run arbitrary Borg commands
   parent: How-to guides
   parent: How-to guides
-  order: 11
+  order: 12
 ---
 ---
 ## Running Borg with borgmatic
 ## Running Borg with borgmatic
 
 

+ 90 - 0
docs/how-to/snapshot-your-filesystems.md

@@ -0,0 +1,90 @@
+---
+title: How to snapshot your filesystems
+eleventyNavigation:
+  key: 📸 Snapshot your filesystems
+  parent: How-to guides
+  order: 9
+---
+## Filesystem hooks
+
+Many filesystems support taking snapshots—point-in-time, read-only "copies" of
+your data, ideal for backing up files that may change during the backup. These
+snapshots initially don't use any additional storage space and can be made
+almost instantly.
+
+To help automate backup of these filesystems, borgmatic can use them to take
+snapshots.
+
+
+### ZFS
+
+<span class="minilink minilink-addedin">New in version 1.9.3</span> <span
+class="minilink minilink-addedin">Beta feature</span> borgmatic supports
+taking snapshots with the [ZFS filesystem](https://openzfs.org/) and sending
+those snapshots to Borg for backup.
+
+To use this feature, first you need one or more mounted ZFS datasets. Then,
+enable ZFS within borgmatic by adding the following line to your configuration
+file:
+
+```yaml
+zfs:
+```
+
+No other options are necessary to enable ZFS support, but if desired you can
+override some of the commands used by the ZFS hook. For instance:
+
+```yaml
+zfs:
+    zfs_command: /usr/local/bin/zfs
+    mount_command: /usr/local/bin/mount
+    umount_command: /usr/local/bin/umount
+```
+
+As long as the ZFS hook is in beta, it may be subject to breaking changes
+and/or may not work well for your use cases. But feel free to use it in
+production if you're okay with these caveats, and please [provide any
+feedback](https://torsion.org/borgmatic/#issues) you have on this feature.
+
+
+#### Dataset discovery
+
+You have a couple of options for borgmatic to find and backup your ZFS datasets:
+
+ * For any dataset you'd like backed up, add its mount point to borgmatic's
+   `source_directories`.
+ * Or set the borgmatic-specific user property
+   `org.torsion.borgmatic:backup=auto` onto your dataset, e.g. by running `zfs
+   set org.torsion.borgmatic:backup=auto datasetname`. Then borgmatic can find
+   and backup these datasets.
+
+If you have multiple borgmatic configuration files with ZFS enabled, and you'd
+like particular datasets to be backed up only for particular configuration
+files, use the `source_directories` option instead of the user property.
+
+During a backup, borgmatic automatically snapshots these discovered datasets,
+temporary mounts the snapshots within its [runtime
+directory](https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#runtime-directory),
+and includes the snapshotted files in the files sent to Borg. borgmatic is
+also responsible for cleaning up (destroying) these snapshots after a backup
+completes.
+
+Additionally, borgmatic rewrites the snapshot file paths so that they appear
+at their original dataset locations in a Borg archive. For instance, if your
+dataset is mounted at `/mnt/dataset`, then the snapshotted files will appear
+in an archive at `/mnt/dataset` as well.
+
+<span class="minilink minilink-addedin">With Borg version 1.2 and
+earlier</span>Snapshotted files are instead stored at a path dependent on the
+[runtime
+directory](https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#runtime-directory)
+in use at the time the archive was created, as Borg 1.2 and earlier do not
+support path rewriting.
+
+
+#### Extract a dataset
+
+Filesystem snapshots are stored in a Borg archive as normal files, so
+you can use the standard
+[extract action](https://torsion.org/borgmatic/docs/how-to/extract-a-backup/) to
+extract them.

+ 1 - 1
docs/how-to/upgrade.md

@@ -3,7 +3,7 @@ title: How to upgrade borgmatic and Borg
 eleventyNavigation:
 eleventyNavigation:
   key: 📦 Upgrade borgmatic/Borg
   key: 📦 Upgrade borgmatic/Borg
   parent: How-to guides
   parent: How-to guides
-  order: 13
+  order: 14
 ---
 ---
 ## Upgrading borgmatic
 ## Upgrading borgmatic
 
 

二進制
docs/static/openzfs.png


+ 1 - 1
pyproject.toml

@@ -49,7 +49,7 @@ skip-string-normalization = true
 
 
 [tool.pytest.ini_options]
 [tool.pytest.ini_options]
 testpaths = "tests"
 testpaths = "tests"
-addopts = "--cov-report term-missing:skip-covered --cov=borgmatic --ignore=tests/end-to-end"
+addopts = "--cov-report term-missing:skip-covered --cov=borgmatic --no-cov-on-fail --cov-fail-under=100 --ignore=tests/end-to-end"
 
 
 [tool.isort]
 [tool.isort]
 profile = "black"
 profile = "black"

+ 30 - 10
tests/unit/actions/test_check.py

@@ -557,14 +557,17 @@ def test_collect_spot_check_source_paths_parses_borg_output():
     flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return(
     flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return(
         {'hook1': False, 'hook2': True}
         {'hook1': False, 'hook2': True}
     )
     )
+    flexmock(module.borgmatic.actions.create).should_receive(
+        'process_source_directories'
+    ).and_return(['foo', 'bar'])
     flexmock(module.borgmatic.borg.create).should_receive('make_base_create_command').with_args(
     flexmock(module.borgmatic.borg.create).should_receive('make_base_create_command').with_args(
         dry_run=True,
         dry_run=True,
         repository_path='repo',
         repository_path='repo',
         config=object,
         config=object,
-        config_paths=(),
+        source_directories=['foo', 'bar'],
         local_borg_version=object,
         local_borg_version=object,
         global_arguments=object,
         global_arguments=object,
-        borgmatic_runtime_directories=(),
+        borgmatic_runtime_directory='/run/borgmatic',
         local_path=object,
         local_path=object,
         remote_path=object,
         remote_path=object,
         list_files=True,
         list_files=True,
@@ -588,6 +591,7 @@ def test_collect_spot_check_source_paths_parses_borg_output():
         global_arguments=flexmock(),
         global_arguments=flexmock(),
         local_path=flexmock(),
         local_path=flexmock(),
         remote_path=flexmock(),
         remote_path=flexmock(),
+        borgmatic_runtime_directory='/run/borgmatic',
     ) == ('/etc/path', '/etc/other')
     ) == ('/etc/path', '/etc/other')
 
 
 
 
@@ -595,14 +599,17 @@ def test_collect_spot_check_source_paths_passes_through_stream_processes_false()
     flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return(
     flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return(
         {'hook1': False, 'hook2': False}
         {'hook1': False, 'hook2': False}
     )
     )
+    flexmock(module.borgmatic.actions.create).should_receive(
+        'process_source_directories'
+    ).and_return(['foo', 'bar'])
     flexmock(module.borgmatic.borg.create).should_receive('make_base_create_command').with_args(
     flexmock(module.borgmatic.borg.create).should_receive('make_base_create_command').with_args(
         dry_run=True,
         dry_run=True,
         repository_path='repo',
         repository_path='repo',
         config=object,
         config=object,
-        config_paths=(),
+        source_directories=['foo', 'bar'],
         local_borg_version=object,
         local_borg_version=object,
         global_arguments=object,
         global_arguments=object,
-        borgmatic_runtime_directories=(),
+        borgmatic_runtime_directory='/run/borgmatic',
         local_path=object,
         local_path=object,
         remote_path=object,
         remote_path=object,
         list_files=True,
         list_files=True,
@@ -626,6 +633,7 @@ def test_collect_spot_check_source_paths_passes_through_stream_processes_false()
         global_arguments=flexmock(),
         global_arguments=flexmock(),
         local_path=flexmock(),
         local_path=flexmock(),
         remote_path=flexmock(),
         remote_path=flexmock(),
+        borgmatic_runtime_directory='/run/borgmatic',
     ) == ('/etc/path', '/etc/other')
     ) == ('/etc/path', '/etc/other')
 
 
 
 
@@ -633,14 +641,17 @@ def test_collect_spot_check_source_paths_without_working_directory_parses_borg_o
     flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return(
     flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return(
         {'hook1': False, 'hook2': True}
         {'hook1': False, 'hook2': True}
     )
     )
+    flexmock(module.borgmatic.actions.create).should_receive(
+        'process_source_directories'
+    ).and_return(['foo', 'bar'])
     flexmock(module.borgmatic.borg.create).should_receive('make_base_create_command').with_args(
     flexmock(module.borgmatic.borg.create).should_receive('make_base_create_command').with_args(
         dry_run=True,
         dry_run=True,
         repository_path='repo',
         repository_path='repo',
         config=object,
         config=object,
-        config_paths=(),
+        source_directories=['foo', 'bar'],
         local_borg_version=object,
         local_borg_version=object,
         global_arguments=object,
         global_arguments=object,
-        borgmatic_runtime_directories=(),
+        borgmatic_runtime_directory='/run/borgmatic',
         local_path=object,
         local_path=object,
         remote_path=object,
         remote_path=object,
         list_files=True,
         list_files=True,
@@ -664,6 +675,7 @@ def test_collect_spot_check_source_paths_without_working_directory_parses_borg_o
         global_arguments=flexmock(),
         global_arguments=flexmock(),
         local_path=flexmock(),
         local_path=flexmock(),
         remote_path=flexmock(),
         remote_path=flexmock(),
+        borgmatic_runtime_directory='/run/borgmatic',
     ) == ('/etc/path', '/etc/other')
     ) == ('/etc/path', '/etc/other')
 
 
 
 
@@ -671,14 +683,17 @@ def test_collect_spot_check_source_paths_skips_directories():
     flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return(
     flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return(
         {'hook1': False, 'hook2': True}
         {'hook1': False, 'hook2': True}
     )
     )
+    flexmock(module.borgmatic.actions.create).should_receive(
+        'process_source_directories'
+    ).and_return(['foo', 'bar'])
     flexmock(module.borgmatic.borg.create).should_receive('make_base_create_command').with_args(
     flexmock(module.borgmatic.borg.create).should_receive('make_base_create_command').with_args(
         dry_run=True,
         dry_run=True,
         repository_path='repo',
         repository_path='repo',
         config=object,
         config=object,
-        config_paths=(),
+        source_directories=['foo', 'bar'],
         local_borg_version=object,
         local_borg_version=object,
         global_arguments=object,
         global_arguments=object,
-        borgmatic_runtime_directories=(),
+        borgmatic_runtime_directory='/run/borgmatic',
         local_path=object,
         local_path=object,
         remote_path=object,
         remote_path=object,
         list_files=True,
         list_files=True,
@@ -704,6 +719,7 @@ def test_collect_spot_check_source_paths_skips_directories():
             global_arguments=flexmock(),
             global_arguments=flexmock(),
             local_path=flexmock(),
             local_path=flexmock(),
             remote_path=flexmock(),
             remote_path=flexmock(),
+            borgmatic_runtime_directory='/run/borgmatic',
         )
         )
         == ()
         == ()
     )
     )
@@ -806,14 +822,17 @@ def test_collect_spot_check_source_paths_uses_working_directory():
     flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return(
     flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return(
         {'hook1': False, 'hook2': True}
         {'hook1': False, 'hook2': True}
     )
     )
+    flexmock(module.borgmatic.actions.create).should_receive(
+        'process_source_directories'
+    ).and_return(['foo', 'bar'])
     flexmock(module.borgmatic.borg.create).should_receive('make_base_create_command').with_args(
     flexmock(module.borgmatic.borg.create).should_receive('make_base_create_command').with_args(
         dry_run=True,
         dry_run=True,
         repository_path='repo',
         repository_path='repo',
         config=object,
         config=object,
-        config_paths=(),
+        source_directories=['foo', 'bar'],
         local_borg_version=object,
         local_borg_version=object,
         global_arguments=object,
         global_arguments=object,
-        borgmatic_runtime_directories=(),
+        borgmatic_runtime_directory='/run/borgmatic',
         local_path=object,
         local_path=object,
         remote_path=object,
         remote_path=object,
         list_files=True,
         list_files=True,
@@ -840,6 +859,7 @@ def test_collect_spot_check_source_paths_uses_working_directory():
         global_arguments=flexmock(),
         global_arguments=flexmock(),
         local_path=flexmock(),
         local_path=flexmock(),
         remote_path=flexmock(),
         remote_path=flexmock(),
+        borgmatic_runtime_directory='/run/borgmatic',
     ) == ('foo', 'bar')
     ) == ('foo', 'bar')
 
 
 
 

+ 272 - 50
tests/unit/actions/test_create.py

@@ -1,10 +1,274 @@
 import sys
 import sys
 
 
+import pytest
 from flexmock import flexmock
 from flexmock import flexmock
 
 
 from borgmatic.actions import create as module
 from borgmatic.actions import create as module
 
 
 
 
+def test_create_borgmatic_manifest_creates_manifest_file():
+    flexmock(module.os.path).should_receive('join').with_args(
+        '/run/borgmatic', 'bootstrap', 'manifest.json'
+    ).and_return('/run/borgmatic/bootstrap/manifest.json')
+    flexmock(module.os.path).should_receive('exists').and_return(False)
+    flexmock(module.os).should_receive('makedirs').and_return(True)
+
+    flexmock(module.importlib.metadata).should_receive('version').and_return('1.0.0')
+    flexmock(sys.modules['builtins']).should_receive('open').with_args(
+        '/run/borgmatic/bootstrap/manifest.json', 'w'
+    ).and_return(
+        flexmock(
+            __enter__=lambda *args: flexmock(write=lambda *args: None, close=lambda *args: None),
+            __exit__=lambda *args: None,
+        )
+    )
+    flexmock(module.json).should_receive('dump').and_return(True).once()
+
+    module.create_borgmatic_manifest({}, 'test.yaml', '/run/borgmatic', False)
+
+
+def test_create_borgmatic_manifest_creates_manifest_file_with_custom_borgmatic_runtime_directory():
+    flexmock(module.os.path).should_receive('join').with_args(
+        '/run/borgmatic', 'bootstrap', 'manifest.json'
+    ).and_return('/run/borgmatic/bootstrap/manifest.json')
+    flexmock(module.os.path).should_receive('exists').and_return(False)
+    flexmock(module.os).should_receive('makedirs').and_return(True)
+
+    flexmock(module.importlib.metadata).should_receive('version').and_return('1.0.0')
+    flexmock(sys.modules['builtins']).should_receive('open').with_args(
+        '/run/borgmatic/bootstrap/manifest.json', 'w'
+    ).and_return(
+        flexmock(
+            __enter__=lambda *args: flexmock(write=lambda *args: None, close=lambda *args: None),
+            __exit__=lambda *args: None,
+        )
+    )
+    flexmock(module.json).should_receive('dump').and_return(True).once()
+
+    module.create_borgmatic_manifest(
+        {'borgmatic_runtime_directory': '/borgmatic'}, 'test.yaml', '/run/borgmatic', False
+    )
+
+
+def test_create_borgmatic_manifest_does_not_create_manifest_file_on_dry_run():
+    flexmock(module.json).should_receive('dump').never()
+
+    module.create_borgmatic_manifest({}, 'test.yaml', '/run/borgmatic', True)
+
+
+def test_expand_directory_with_basic_path_passes_it_through():
+    flexmock(module.os.path).should_receive('expanduser').and_return('foo')
+    flexmock(module.glob).should_receive('glob').and_return([])
+
+    paths = module.expand_directory('foo', None)
+
+    assert paths == ['foo']
+
+
+def test_expand_directory_with_glob_expands():
+    flexmock(module.os.path).should_receive('expanduser').and_return('foo*')
+    flexmock(module.glob).should_receive('glob').and_return(['foo', 'food'])
+
+    paths = module.expand_directory('foo*', None)
+
+    assert paths == ['foo', 'food']
+
+
+def test_expand_directory_with_working_directory_passes_it_through():
+    flexmock(module.os.path).should_receive('expanduser').and_return('foo')
+    flexmock(module.glob).should_receive('glob').with_args('/working/dir/foo').and_return([]).once()
+
+    paths = module.expand_directory('foo', working_directory='/working/dir')
+
+    assert paths == ['/working/dir/foo']
+
+
+def test_expand_directory_with_glob_passes_through_working_directory():
+    flexmock(module.os.path).should_receive('expanduser').and_return('foo*')
+    flexmock(module.glob).should_receive('glob').with_args('/working/dir/foo*').and_return(
+        ['/working/dir/foo', '/working/dir/food']
+    ).once()
+
+    paths = module.expand_directory('foo*', working_directory='/working/dir')
+
+    assert paths == ['/working/dir/foo', '/working/dir/food']
+
+
+def test_expand_directories_flattens_expanded_directories():
+    flexmock(module).should_receive('expand_directory').with_args('~/foo', None).and_return(
+        ['/root/foo']
+    )
+    flexmock(module).should_receive('expand_directory').with_args('bar*', None).and_return(
+        ['bar', 'barf']
+    )
+
+    paths = module.expand_directories(('~/foo', 'bar*'))
+
+    assert paths == ('/root/foo', 'bar', 'barf')
+
+
+def test_expand_directories_with_working_directory_passes_it_through():
+    flexmock(module).should_receive('expand_directory').with_args('foo', '/working/dir').and_return(
+        ['/working/dir/foo']
+    )
+
+    paths = module.expand_directories(('foo',), working_directory='/working/dir')
+
+    assert paths == ('/working/dir/foo',)
+
+
+def test_expand_directories_considers_none_as_no_directories():
+    paths = module.expand_directories(None, None)
+
+    assert paths == ()
+
+
+def test_map_directories_to_devices_gives_device_id_per_path():
+    flexmock(module.os.path).should_receive('exists').and_return(True)
+    flexmock(module.os).should_receive('stat').with_args('/foo').and_return(flexmock(st_dev=55))
+    flexmock(module.os).should_receive('stat').with_args('/bar').and_return(flexmock(st_dev=66))
+
+    device_map = module.map_directories_to_devices(('/foo', '/bar'))
+
+    assert device_map == {
+        '/foo': 55,
+        '/bar': 66,
+    }
+
+
+def test_map_directories_to_devices_with_missing_path_does_not_error():
+    flexmock(module.os.path).should_receive('exists').and_return(True).and_return(False)
+    flexmock(module.os).should_receive('stat').with_args('/foo').and_return(flexmock(st_dev=55))
+    flexmock(module.os).should_receive('stat').with_args('/bar').never()
+
+    device_map = module.map_directories_to_devices(('/foo', '/bar'))
+
+    assert device_map == {
+        '/foo': 55,
+        '/bar': None,
+    }
+
+
+def test_map_directories_to_devices_uses_working_directory_to_construct_path():
+    flexmock(module.os.path).should_receive('exists').and_return(True)
+    flexmock(module.os).should_receive('stat').with_args('/foo').and_return(flexmock(st_dev=55))
+    flexmock(module.os).should_receive('stat').with_args('/working/dir/bar').and_return(
+        flexmock(st_dev=66)
+    )
+
+    device_map = module.map_directories_to_devices(
+        ('/foo', 'bar'), working_directory='/working/dir'
+    )
+
+    assert device_map == {
+        '/foo': 55,
+        'bar': 66,
+    }
+
+
+@pytest.mark.parametrize(
+    'directories,additional_directories,expected_directories',
+    (
+        ({'/': 1, '/root': 1}, {}, ['/']),
+        ({'/': 1, '/root/': 1}, {}, ['/']),
+        ({'/': 1, '/root': 2}, {}, ['/', '/root']),
+        ({'/root': 1, '/': 1}, {}, ['/']),
+        ({'/root': 1, '/root/foo': 1}, {}, ['/root']),
+        ({'/root/': 1, '/root/foo': 1}, {}, ['/root/']),
+        ({'/root': 1, '/root/foo/': 1}, {}, ['/root']),
+        ({'/root': 1, '/root/foo': 2}, {}, ['/root', '/root/foo']),
+        ({'/root/foo': 1, '/root': 1}, {}, ['/root']),
+        ({'/root': None, '/root/foo': None}, {}, ['/root', '/root/foo']),
+        ({'/root': 1, '/etc': 1, '/root/foo/bar': 1}, {}, ['/etc', '/root']),
+        ({'/root': 1, '/root/foo': 1, '/root/foo/bar': 1}, {}, ['/root']),
+        ({'/dup': 1, '/dup': 1}, {}, ['/dup']),
+        ({'/foo': 1, '/bar': 1}, {}, ['/bar', '/foo']),
+        ({'/foo': 1, '/bar': 2}, {}, ['/bar', '/foo']),
+        ({'/root/foo': 1}, {'/root': 1}, []),
+        ({'/root/foo': 1}, {'/root': 2}, ['/root/foo']),
+        ({'/root/foo': 1}, {}, ['/root/foo']),
+    ),
+)
+def test_deduplicate_directories_removes_child_paths_on_the_same_filesystem(
+    directories, additional_directories, expected_directories
+):
+    assert (
+        module.deduplicate_directories(directories, additional_directories) == expected_directories
+    )
+
+
+def test_pattern_root_directories_deals_with_none_patterns():
+    assert module.pattern_root_directories(patterns=None) == []
+
+
+def test_pattern_root_directories_parses_roots_and_ignores_others():
+    assert module.pattern_root_directories(
+        ['R /root', '+ /root/foo', '- /root/foo/bar', 'R /baz']
+    ) == ['/root', '/baz']
+
+
+def test_process_source_directories_includes_source_directories_and_config_paths():
+    flexmock(module.borgmatic.config.paths).should_receive('get_working_directory').and_return(
+        '/working'
+    )
+    flexmock(module).should_receive('deduplicate_directories').and_return(
+        ('foo', 'bar', 'test.yaml')
+    )
+    flexmock(module).should_receive('map_directories_to_devices').and_return({})
+    flexmock(module).should_receive('expand_directories').with_args(
+        ('foo', 'bar', 'test.yaml'), working_directory='/working'
+    ).and_return(()).once()
+    flexmock(module).should_receive('pattern_root_directories').and_return(())
+    flexmock(module).should_receive('expand_directories').with_args(
+        (), working_directory='/working'
+    ).and_return(())
+
+    assert module.process_source_directories(
+        config={'source_directories': ['foo', 'bar']}, config_paths=('test.yaml',)
+    ) == ('foo', 'bar', 'test.yaml')
+
+
+def test_process_source_directories_does_not_include_config_paths_when_store_config_files_is_false():
+    flexmock(module.borgmatic.config.paths).should_receive('get_working_directory').and_return(
+        '/working'
+    )
+    flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar'))
+    flexmock(module).should_receive('map_directories_to_devices').and_return({})
+    flexmock(module).should_receive('expand_directories').with_args(
+        ('foo', 'bar'), working_directory='/working'
+    ).and_return(()).once()
+    flexmock(module).should_receive('pattern_root_directories').and_return(())
+    flexmock(module).should_receive('expand_directories').with_args(
+        (), working_directory='/working'
+    ).and_return(())
+
+    assert module.process_source_directories(
+        config={'source_directories': ['foo', 'bar'], 'store_config_files': False},
+        config_paths=('test.yaml',),
+    ) == ('foo', 'bar')
+
+
+def test_process_source_directories_prefers_source_directory_argument_to_config():
+    flexmock(module.borgmatic.config.paths).should_receive('get_working_directory').and_return(
+        '/working'
+    )
+    flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar'))
+    flexmock(module).should_receive('map_directories_to_devices').and_return({})
+    flexmock(module).should_receive('expand_directories').with_args(
+        ('foo', 'bar'), working_directory='/working'
+    ).and_return(()).once()
+    flexmock(module).should_receive('pattern_root_directories').and_return(())
+    flexmock(module).should_receive('expand_directories').with_args(
+        (), working_directory='/working'
+    ).and_return(())
+
+    assert module.process_source_directories(
+        config={'source_directories': ['nope']},
+        config_paths=('test.yaml',),
+        source_directories=['foo', 'bar'],
+    ) == ('foo', 'bar')
+
+
 def test_run_create_executes_and_calls_hooks_for_configured_repository():
 def test_run_create_executes_and_calls_hooks_for_configured_repository():
     flexmock(module.logger).answer = lambda message: None
     flexmock(module.logger).answer = lambda message: None
     flexmock(module.borgmatic.config.validate).should_receive('repositories_match').never()
     flexmock(module.borgmatic.config.validate).should_receive('repositories_match').never()
@@ -18,6 +282,8 @@ def test_run_create_executes_and_calls_hooks_for_configured_repository():
     flexmock(module.borgmatic.hooks.dispatch).should_receive(
     flexmock(module.borgmatic.hooks.dispatch).should_receive(
         'call_hooks_even_if_unconfigured'
         'call_hooks_even_if_unconfigured'
     ).and_return({})
     ).and_return({})
+    flexmock(module).should_receive('process_source_directories').and_return([])
+    flexmock(module.os.path).should_receive('join').and_return('/run/borgmatic/bootstrap')
     create_arguments = flexmock(
     create_arguments = flexmock(
         repository=None,
         repository=None,
         progress=flexmock(),
         progress=flexmock(),
@@ -57,6 +323,8 @@ def test_run_create_with_store_config_files_false_does_not_create_borgmatic_mani
     flexmock(module.borgmatic.hooks.dispatch).should_receive(
     flexmock(module.borgmatic.hooks.dispatch).should_receive(
         'call_hooks_even_if_unconfigured'
         'call_hooks_even_if_unconfigured'
     ).and_return({})
     ).and_return({})
+    flexmock(module).should_receive('process_source_directories').and_return([])
+    flexmock(module.os.path).should_receive('join').and_return('/run/borgmatic/bootstrap')
     create_arguments = flexmock(
     create_arguments = flexmock(
         repository=None,
         repository=None,
         progress=flexmock(),
         progress=flexmock(),
@@ -98,6 +366,8 @@ def test_run_create_runs_with_selected_repository():
     flexmock(module.borgmatic.hooks.dispatch).should_receive(
     flexmock(module.borgmatic.hooks.dispatch).should_receive(
         'call_hooks_even_if_unconfigured'
         'call_hooks_even_if_unconfigured'
     ).and_return({})
     ).and_return({})
+    flexmock(module).should_receive('process_source_directories').and_return([])
+    flexmock(module.os.path).should_receive('join').and_return('/run/borgmatic/bootstrap')
     create_arguments = flexmock(
     create_arguments = flexmock(
         repository=flexmock(),
         repository=flexmock(),
         progress=flexmock(),
         progress=flexmock(),
@@ -177,6 +447,8 @@ def test_run_create_produces_json():
     flexmock(module.borgmatic.hooks.dispatch).should_receive(
     flexmock(module.borgmatic.hooks.dispatch).should_receive(
         'call_hooks_even_if_unconfigured'
         'call_hooks_even_if_unconfigured'
     ).and_return({})
     ).and_return({})
+    flexmock(module).should_receive('process_source_directories').and_return([])
+    flexmock(module.os.path).should_receive('join').and_return('/run/borgmatic/bootstrap')
     create_arguments = flexmock(
     create_arguments = flexmock(
         repository=flexmock(),
         repository=flexmock(),
         progress=flexmock(),
         progress=flexmock(),
@@ -201,53 +473,3 @@ def test_run_create_produces_json():
             remote_path=None,
             remote_path=None,
         )
         )
     ) == [parsed_json]
     ) == [parsed_json]
-
-
-def test_create_borgmatic_manifest_creates_manifest_file():
-    flexmock(module.os.path).should_receive('join').with_args(
-        '/run/borgmatic', 'bootstrap', 'manifest.json'
-    ).and_return('/run/borgmatic/bootstrap/manifest.json')
-    flexmock(module.os.path).should_receive('exists').and_return(False)
-    flexmock(module.os).should_receive('makedirs').and_return(True)
-
-    flexmock(module.importlib.metadata).should_receive('version').and_return('1.0.0')
-    flexmock(sys.modules['builtins']).should_receive('open').with_args(
-        '/run/borgmatic/bootstrap/manifest.json', 'w'
-    ).and_return(
-        flexmock(
-            __enter__=lambda *args: flexmock(write=lambda *args: None, close=lambda *args: None),
-            __exit__=lambda *args: None,
-        )
-    )
-    flexmock(module.json).should_receive('dump').and_return(True).once()
-
-    module.create_borgmatic_manifest({}, 'test.yaml', '/run/borgmatic', False)
-
-
-def test_create_borgmatic_manifest_creates_manifest_file_with_custom_borgmatic_runtime_directory():
-    flexmock(module.os.path).should_receive('join').with_args(
-        '/run/borgmatic', 'bootstrap', 'manifest.json'
-    ).and_return('/run/borgmatic/bootstrap/manifest.json')
-    flexmock(module.os.path).should_receive('exists').and_return(False)
-    flexmock(module.os).should_receive('makedirs').and_return(True)
-
-    flexmock(module.importlib.metadata).should_receive('version').and_return('1.0.0')
-    flexmock(sys.modules['builtins']).should_receive('open').with_args(
-        '/run/borgmatic/bootstrap/manifest.json', 'w'
-    ).and_return(
-        flexmock(
-            __enter__=lambda *args: flexmock(write=lambda *args: None, close=lambda *args: None),
-            __exit__=lambda *args: None,
-        )
-    )
-    flexmock(module.json).should_receive('dump').and_return(True).once()
-
-    module.create_borgmatic_manifest(
-        {'borgmatic_runtime_directory': '/borgmatic'}, 'test.yaml', '/run/borgmatic', False
-    )
-
-
-def test_create_borgmatic_manifest_does_not_create_manifest_file_on_dry_run():
-    flexmock(module.json).should_receive('dump').never()
-
-    module.create_borgmatic_manifest({}, 'test.yaml', '/run/borgmatic', True)

文件差異過大導致無法顯示
+ 49 - 466
tests/unit/borg/test_create.py


+ 20 - 0
tests/unit/config/test_generate.py

@@ -103,6 +103,26 @@ def test_schema_to_sample_configuration_generates_config_sequence_of_maps_with_e
     assert config == [OrderedDict([('field1', 'Example 1'), ('field2', 'Example 2')])]
     assert config == [OrderedDict([('field1', 'Example 1'), ('field2', 'Example 2')])]
 
 
 
 
+def test_schema_to_sample_configuration_generates_config_sequence_of_maps_with_multiple_types():
+    schema = {
+        'type': 'array',
+        'items': {
+            'type': ['object', 'null'],
+            'properties': OrderedDict(
+                [('field1', {'example': 'Example 1'}), ('field2', {'example': 'Example 2'})]
+            ),
+        },
+    }
+    flexmock(module).should_receive('get_properties').and_return(schema['items']['properties'])
+    flexmock(module.ruamel.yaml.comments).should_receive('CommentedSeq').replace_with(list)
+    flexmock(module).should_receive('add_comments_to_configuration_sequence')
+    flexmock(module).should_receive('add_comments_to_configuration_object')
+
+    config = module.schema_to_sample_configuration(schema)
+
+    assert config == [OrderedDict([('field1', 'Example 1'), ('field2', 'Example 2')])]
+
+
 def test_schema_to_sample_configuration_with_unsupported_schema_raises():
 def test_schema_to_sample_configuration_with_unsupported_schema_raises():
     schema = {'gobbledygook': [{'type': 'not-your'}]}
     schema = {'gobbledygook': [{'type': 'not-your'}]}
 
 

+ 30 - 0
tests/unit/config/test_paths.py

@@ -22,6 +22,10 @@ def test_expand_user_in_path_handles_none_directory():
     assert module.expand_user_in_path(None) is None
     assert module.expand_user_in_path(None) is None
 
 
 
 
+def test_expand_user_in_path_handles_incorrectly_typed_directory():
+    assert module.expand_user_in_path(3) is None
+
+
 def test_get_borgmatic_source_directory_uses_config_option():
 def test_get_borgmatic_source_directory_uses_config_option():
     flexmock(module).should_receive('expand_user_in_path').replace_with(lambda path: path)
     flexmock(module).should_receive('expand_user_in_path').replace_with(lambda path: path)
 
 
@@ -34,6 +38,13 @@ def test_get_borgmatic_source_directory_without_config_option_uses_default():
     assert module.get_borgmatic_source_directory({}) == '~/.borgmatic'
     assert module.get_borgmatic_source_directory({}) == '~/.borgmatic'
 
 
 
 
+def test_replace_temporary_subdirectory_with_glob_transforms_path():
+    assert (
+        module.replace_temporary_subdirectory_with_glob('/tmp/borgmatic-aet8kn93/borgmatic')
+        == '/tmp/borgmatic-*/borgmatic'
+    )
+
+
 def test_runtime_directory_uses_config_option():
 def test_runtime_directory_uses_config_option():
     flexmock(module).should_receive('expand_user_in_path').replace_with(lambda path: path)
     flexmock(module).should_receive('expand_user_in_path').replace_with(lambda path: path)
     flexmock(module.os).should_receive('makedirs')
     flexmock(module.os).should_receive('makedirs')
@@ -154,6 +165,25 @@ def test_runtime_directory_falls_back_to_hard_coded_tmp_path_and_adds_temporary_
         assert borgmatic_runtime_directory == '/tmp/borgmatic-1234/./borgmatic'
         assert borgmatic_runtime_directory == '/tmp/borgmatic-1234/./borgmatic'
 
 
 
 
+def test_runtime_directory_with_erroring_cleanup_does_not_raise():
+    flexmock(module).should_receive('expand_user_in_path').replace_with(lambda path: path)
+    flexmock(module.os.environ).should_receive('get').with_args('XDG_RUNTIME_DIR').and_return(None)
+    flexmock(module.os.environ).should_receive('get').with_args('RUNTIME_DIRECTORY').and_return(
+        None
+    )
+    flexmock(module.os.environ).should_receive('get').with_args('TMPDIR').and_return(None)
+    flexmock(module.os.environ).should_receive('get').with_args('TEMP').and_return(None)
+    temporary_directory = flexmock(name='/tmp/borgmatic-1234')
+    temporary_directory.should_receive('cleanup').and_raise(OSError).once()
+    flexmock(module.tempfile).should_receive('TemporaryDirectory').with_args(
+        prefix='borgmatic-', dir='/tmp'
+    ).and_return(temporary_directory)
+    flexmock(module.os).should_receive('makedirs')
+
+    with module.Runtime_directory({}, 'prefix') as borgmatic_runtime_directory:
+        assert borgmatic_runtime_directory == '/tmp/borgmatic-1234/./borgmatic'
+
+
 @pytest.mark.parametrize(
 @pytest.mark.parametrize(
     'borgmatic_runtime_directory,expected_glob',
     'borgmatic_runtime_directory,expected_glob',
     (
     (

+ 5 - 3
tests/unit/hooks/test_dispatch.py

@@ -77,10 +77,12 @@ def test_call_hooks_calls_skips_return_values_for_missing_hooks():
     assert return_values == expected_return_values
     assert return_values == expected_return_values
 
 
 
 
-def test_call_hooks_calls_skips_return_values_for_null_hooks():
+def test_call_hooks_calls_treats_null_hook_as_optionless():
     config = {'super_hook': flexmock(), 'other_hook': None}
     config = {'super_hook': flexmock(), 'other_hook': None}
-    expected_return_values = {'super_hook': flexmock()}
-    flexmock(module).should_receive('call_hook').and_return(expected_return_values['super_hook'])
+    expected_return_values = {'super_hook': flexmock(), 'other_hook': flexmock()}
+    flexmock(module).should_receive('call_hook').and_return(
+        expected_return_values['super_hook']
+    ).and_return(expected_return_values['other_hook'])
 
 
     return_values = module.call_hooks(
     return_values = module.call_hooks(
         'do_stuff', config, 'prefix', ('super_hook', 'other_hook'), 55
         'do_stuff', config, 'prefix', ('super_hook', 'other_hook'), 55

+ 36 - 6
tests/unit/hooks/test_mariadb.py

@@ -75,7 +75,12 @@ def test_dump_data_sources_dumps_each_database():
 
 
     assert (
     assert (
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=False,
         )
         )
         == processes
         == processes
     )
     )
@@ -100,7 +105,12 @@ def test_dump_data_sources_dumps_with_password():
     ).and_return(process).once()
     ).and_return(process).once()
 
 
     assert module.dump_data_sources(
     assert module.dump_data_sources(
-        [database], {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+        [database],
+        {},
+        'test.yaml',
+        borgmatic_runtime_directory='/run/borgmatic',
+        source_directories=[],
+        dry_run=False,
     ) == [process]
     ) == [process]
 
 
 
 
@@ -120,7 +130,12 @@ def test_dump_data_sources_dumps_all_databases_at_once():
     ).and_return(process).once()
     ).and_return(process).once()
 
 
     assert module.dump_data_sources(
     assert module.dump_data_sources(
-        databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+        databases,
+        {},
+        'test.yaml',
+        borgmatic_runtime_directory='/run/borgmatic',
+        source_directories=[],
+        dry_run=False,
     ) == [process]
     ) == [process]
 
 
 
 
@@ -143,7 +158,12 @@ def test_dump_data_sources_dumps_all_databases_separately_when_format_configured
 
 
     assert (
     assert (
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=False,
         )
         )
         == processes
         == processes
     )
     )
@@ -449,7 +469,12 @@ def test_dump_data_sources_errors_for_missing_all_databases():
 
 
     with pytest.raises(ValueError):
     with pytest.raises(ValueError):
         assert module.dump_data_sources(
         assert module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=False,
         )
         )
 
 
 
 
@@ -463,7 +488,12 @@ def test_dump_data_sources_does_not_error_for_missing_all_databases_with_dry_run
 
 
     assert (
     assert (
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=True
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=True,
         )
         )
         == []
         == []
     )
     )

+ 42 - 7
tests/unit/hooks/test_mongodb.py

@@ -43,7 +43,12 @@ def test_dump_data_sources_runs_mongodump_for_each_database():
 
 
     assert (
     assert (
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=False,
         )
         )
         == processes
         == processes
     )
     )
@@ -60,7 +65,12 @@ def test_dump_data_sources_with_dry_run_skips_mongodump():
 
 
     assert (
     assert (
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=True
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=True,
         )
         )
         == []
         == []
     )
     )
@@ -93,7 +103,12 @@ def test_dump_data_sources_runs_mongodump_with_hostname_and_port():
     ).and_return(process).once()
     ).and_return(process).once()
 
 
     assert module.dump_data_sources(
     assert module.dump_data_sources(
-        databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+        databases,
+        {},
+        'test.yaml',
+        borgmatic_runtime_directory='/run/borgmatic',
+        source_directories=[],
+        dry_run=False,
     ) == [process]
     ) == [process]
 
 
 
 
@@ -133,7 +148,12 @@ def test_dump_data_sources_runs_mongodump_with_username_and_password():
     ).and_return(process).once()
     ).and_return(process).once()
 
 
     assert module.dump_data_sources(
     assert module.dump_data_sources(
-        databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+        databases,
+        {},
+        'test.yaml',
+        borgmatic_runtime_directory='/run/borgmatic',
+        source_directories=[],
+        dry_run=False,
     ) == [process]
     ) == [process]
 
 
 
 
@@ -153,7 +173,12 @@ def test_dump_data_sources_runs_mongodump_with_directory_format():
 
 
     assert (
     assert (
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=False,
         )
         )
         == []
         == []
     )
     )
@@ -183,7 +208,12 @@ def test_dump_data_sources_runs_mongodump_with_options():
     ).and_return(process).once()
     ).and_return(process).once()
 
 
     assert module.dump_data_sources(
     assert module.dump_data_sources(
-        databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+        databases,
+        {},
+        'test.yaml',
+        borgmatic_runtime_directory='/run/borgmatic',
+        source_directories=[],
+        dry_run=False,
     ) == [process]
     ) == [process]
 
 
 
 
@@ -203,7 +233,12 @@ def test_dump_data_sources_runs_mongodumpall_for_all_databases():
     ).and_return(process).once()
     ).and_return(process).once()
 
 
     assert module.dump_data_sources(
     assert module.dump_data_sources(
-        databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+        databases,
+        {},
+        'test.yaml',
+        borgmatic_runtime_directory='/run/borgmatic',
+        source_directories=[],
+        dry_run=False,
     ) == [process]
     ) == [process]
 
 
 
 

+ 36 - 6
tests/unit/hooks/test_mysql.py

@@ -75,7 +75,12 @@ def test_dump_data_sources_dumps_each_database():
 
 
     assert (
     assert (
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=False,
         )
         )
         == processes
         == processes
     )
     )
@@ -100,7 +105,12 @@ def test_dump_data_sources_dumps_with_password():
     ).and_return(process).once()
     ).and_return(process).once()
 
 
     assert module.dump_data_sources(
     assert module.dump_data_sources(
-        [database], {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+        [database],
+        {},
+        'test.yaml',
+        borgmatic_runtime_directory='/run/borgmatic',
+        source_directories=[],
+        dry_run=False,
     ) == [process]
     ) == [process]
 
 
 
 
@@ -120,7 +130,12 @@ def test_dump_data_sources_dumps_all_databases_at_once():
     ).and_return(process).once()
     ).and_return(process).once()
 
 
     assert module.dump_data_sources(
     assert module.dump_data_sources(
-        databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+        databases,
+        {},
+        'test.yaml',
+        borgmatic_runtime_directory='/run/borgmatic',
+        source_directories=[],
+        dry_run=False,
     ) == [process]
     ) == [process]
 
 
 
 
@@ -143,7 +158,12 @@ def test_dump_data_sources_dumps_all_databases_separately_when_format_configured
 
 
     assert (
     assert (
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=False,
         )
         )
         == processes
         == processes
     )
     )
@@ -447,7 +467,12 @@ def test_dump_data_sources_errors_for_missing_all_databases():
 
 
     with pytest.raises(ValueError):
     with pytest.raises(ValueError):
         assert module.dump_data_sources(
         assert module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=False,
         )
         )
 
 
 
 
@@ -461,7 +486,12 @@ def test_dump_data_sources_does_not_error_for_missing_all_databases_with_dry_run
 
 
     assert (
     assert (
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=True
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=True,
         )
         )
         == []
         == []
     )
     )

+ 72 - 12
tests/unit/hooks/test_postgresql.py

@@ -253,7 +253,12 @@ def test_dump_data_sources_runs_pg_dump_for_each_database():
 
 
     assert (
     assert (
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=False,
         )
         )
         == processes
         == processes
     )
     )
@@ -267,7 +272,12 @@ def test_dump_data_sources_raises_when_no_database_names_to_dump():
 
 
     with pytest.raises(ValueError):
     with pytest.raises(ValueError):
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=False,
         )
         )
 
 
 
 
@@ -278,7 +288,12 @@ def test_dump_data_sources_does_not_raise_when_no_database_names_to_dump():
     flexmock(module).should_receive('database_names_to_dump').and_return(())
     flexmock(module).should_receive('database_names_to_dump').and_return(())
 
 
     module.dump_data_sources(
     module.dump_data_sources(
-        databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=True
+        databases,
+        {},
+        'test.yaml',
+        borgmatic_runtime_directory='/run/borgmatic',
+        source_directories=[],
+        dry_run=True,
     ) == []
     ) == []
 
 
 
 
@@ -298,7 +313,12 @@ def test_dump_data_sources_with_duplicate_dump_skips_pg_dump():
 
 
     assert (
     assert (
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=False,
         )
         )
         == []
         == []
     )
     )
@@ -320,7 +340,12 @@ def test_dump_data_sources_with_dry_run_skips_pg_dump():
 
 
     assert (
     assert (
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=True
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=True,
         )
         )
         == []
         == []
     )
     )
@@ -360,7 +385,12 @@ def test_dump_data_sources_runs_pg_dump_with_hostname_and_port():
     ).and_return(process).once()
     ).and_return(process).once()
 
 
     assert module.dump_data_sources(
     assert module.dump_data_sources(
-        databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+        databases,
+        {},
+        'test.yaml',
+        borgmatic_runtime_directory='/run/borgmatic',
+        source_directories=[],
+        dry_run=False,
     ) == [process]
     ) == [process]
 
 
 
 
@@ -398,7 +428,12 @@ def test_dump_data_sources_runs_pg_dump_with_username_and_password():
     ).and_return(process).once()
     ).and_return(process).once()
 
 
     assert module.dump_data_sources(
     assert module.dump_data_sources(
-        databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+        databases,
+        {},
+        'test.yaml',
+        borgmatic_runtime_directory='/run/borgmatic',
+        source_directories=[],
+        dry_run=False,
     ) == [process]
     ) == [process]
 
 
 
 
@@ -436,7 +471,12 @@ def test_dump_data_sources_with_username_injection_attack_gets_escaped():
     ).and_return(process).once()
     ).and_return(process).once()
 
 
     assert module.dump_data_sources(
     assert module.dump_data_sources(
-        databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+        databases,
+        {},
+        'test.yaml',
+        borgmatic_runtime_directory='/run/borgmatic',
+        source_directories=[],
+        dry_run=False,
     ) == [process]
     ) == [process]
 
 
 
 
@@ -470,7 +510,12 @@ def test_dump_data_sources_runs_pg_dump_with_directory_format():
 
 
     assert (
     assert (
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=False,
         )
         )
         == []
         == []
     )
     )
@@ -507,7 +552,12 @@ def test_dump_data_sources_runs_pg_dump_with_options():
     ).and_return(process).once()
     ).and_return(process).once()
 
 
     assert module.dump_data_sources(
     assert module.dump_data_sources(
-        databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+        databases,
+        {},
+        'test.yaml',
+        borgmatic_runtime_directory='/run/borgmatic',
+        source_directories=[],
+        dry_run=False,
     ) == [process]
     ) == [process]
 
 
 
 
@@ -531,7 +581,12 @@ def test_dump_data_sources_runs_pg_dumpall_for_all_databases():
     ).and_return(process).once()
     ).and_return(process).once()
 
 
     assert module.dump_data_sources(
     assert module.dump_data_sources(
-        databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+        databases,
+        {},
+        'test.yaml',
+        borgmatic_runtime_directory='/run/borgmatic',
+        source_directories=[],
+        dry_run=False,
     ) == [process]
     ) == [process]
 
 
 
 
@@ -567,7 +622,12 @@ def test_dump_data_sources_runs_non_default_pg_dump():
     ).and_return(process).once()
     ).and_return(process).once()
 
 
     assert module.dump_data_sources(
     assert module.dump_data_sources(
-        databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+        databases,
+        {},
+        'test.yaml',
+        borgmatic_runtime_directory='/run/borgmatic',
+        source_directories=[],
+        dry_run=False,
     ) == [process]
     ) == [process]
 
 
 
 

+ 36 - 6
tests/unit/hooks/test_sqlite.py

@@ -28,7 +28,12 @@ def test_dump_data_sources_logs_and_skips_if_dump_already_exists():
 
 
     assert (
     assert (
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=False,
         )
         )
         == []
         == []
     )
     )
@@ -53,7 +58,12 @@ def test_dump_data_sources_dumps_each_database():
 
 
     assert (
     assert (
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=False,
         )
         )
         == processes
         == processes
     )
     )
@@ -85,7 +95,12 @@ def test_dump_data_sources_with_path_injection_attack_gets_escaped():
 
 
     assert (
     assert (
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=False,
         )
         )
         == processes
         == processes
     )
     )
@@ -108,7 +123,12 @@ def test_dump_data_sources_with_non_existent_path_warns_and_dumps_database():
 
 
     assert (
     assert (
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=False,
         )
         )
         == processes
         == processes
     )
     )
@@ -133,7 +153,12 @@ def test_dump_data_sources_with_name_all_warns_and_dumps_all_databases():
 
 
     assert (
     assert (
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=False
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=False,
         )
         )
         == processes
         == processes
     )
     )
@@ -152,7 +177,12 @@ def test_dump_data_sources_does_not_dump_if_dry_run():
 
 
     assert (
     assert (
         module.dump_data_sources(
         module.dump_data_sources(
-            databases, {}, 'test.yaml', borgmatic_runtime_directory='/run/borgmatic', dry_run=True
+            databases,
+            {},
+            'test.yaml',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=[],
+            dry_run=True,
         )
         )
         == []
         == []
     )
     )

+ 374 - 0
tests/unit/hooks/test_zfs.py

@@ -0,0 +1,374 @@
+import pytest
+from flexmock import flexmock
+
+import borgmatic.execute
+from borgmatic.hooks import zfs as module
+
+
+def test_get_datasets_to_backup_filters_datasets_by_source_directories():
+    flexmock(borgmatic.execute).should_receive('execute_command_and_capture_output').and_return(
+        'dataset\t/dataset\t-\nother\t/other\t-',
+    )
+
+    assert module.get_datasets_to_backup(
+        'zfs', source_directories=('/foo', '/dataset', '/bar')
+    ) == (('dataset', '/dataset'),)
+
+
+def test_get_datasets_to_backup_filters_datasets_by_user_property():
+    flexmock(borgmatic.execute).should_receive('execute_command_and_capture_output').and_return(
+        'dataset\t/dataset\tauto\nother\t/other\t-',
+    )
+
+    assert module.get_datasets_to_backup('zfs', source_directories=('/foo', '/bar')) == (
+        ('dataset', '/dataset'),
+    )
+
+
+def test_get_datasets_to_backup_with_invalid_list_output_raises():
+    flexmock(borgmatic.execute).should_receive('execute_command_and_capture_output').and_return(
+        'dataset',
+    )
+
+    with pytest.raises(ValueError, match='zfs'):
+        module.get_datasets_to_backup('zfs', source_directories=('/foo', '/bar'))
+
+
+def test_get_get_all_datasets_does_not_filter_datasets():
+    flexmock(borgmatic.execute).should_receive('execute_command_and_capture_output').and_return(
+        'dataset\t/dataset\nother\t/other',
+    )
+
+    assert module.get_all_datasets('zfs') == (
+        ('dataset', '/dataset'),
+        ('other', '/other'),
+    )
+
+
+def test_get_all_datasets_with_invalid_list_output_raises():
+    flexmock(borgmatic.execute).should_receive('execute_command_and_capture_output').and_return(
+        'dataset',
+    )
+
+    with pytest.raises(ValueError, match='zfs'):
+        module.get_all_datasets('zfs')
+
+
+def test_dump_data_sources_snapshots_and_mounts_and_updates_source_directories():
+    flexmock(module).should_receive('get_datasets_to_backup').and_return(
+        (('dataset', '/mnt/dataset'),)
+    )
+    flexmock(module.os).should_receive('getpid').and_return(1234)
+    full_snapshot_name = 'dataset@borgmatic-1234'
+    flexmock(module).should_receive('snapshot_dataset').with_args(
+        'zfs',
+        full_snapshot_name,
+    ).once()
+    snapshot_mount_path = '/run/borgmatic/zfs_snapshots/./mnt/dataset'
+    flexmock(module).should_receive('mount_snapshot').with_args(
+        'mount',
+        full_snapshot_name,
+        module.os.path.normpath(snapshot_mount_path),
+    ).once()
+    source_directories = ['/mnt/dataset']
+
+    assert (
+        module.dump_data_sources(
+            hook_config={},
+            config={'source_directories': '/mnt/dataset', 'zfs': {}},
+            log_prefix='test',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=source_directories,
+            dry_run=False,
+        )
+        == []
+    )
+
+    assert source_directories == [snapshot_mount_path]
+
+
+def test_dump_data_sources_uses_custom_commands():
+    flexmock(module).should_receive('get_datasets_to_backup').and_return(
+        (('dataset', '/mnt/dataset'),)
+    )
+    flexmock(module.os).should_receive('getpid').and_return(1234)
+    full_snapshot_name = 'dataset@borgmatic-1234'
+    flexmock(module).should_receive('snapshot_dataset').with_args(
+        '/usr/local/bin/zfs',
+        full_snapshot_name,
+    ).once()
+    snapshot_mount_path = '/run/borgmatic/zfs_snapshots/./mnt/dataset'
+    flexmock(module).should_receive('mount_snapshot').with_args(
+        '/usr/local/bin/mount',
+        full_snapshot_name,
+        module.os.path.normpath(snapshot_mount_path),
+    ).once()
+    source_directories = ['/mnt/dataset']
+    hook_config = {
+        'zfs_command': '/usr/local/bin/zfs',
+        'mount_command': '/usr/local/bin/mount',
+    }
+
+    assert (
+        module.dump_data_sources(
+            hook_config=hook_config,
+            config={
+                'source_directories': source_directories,
+                'zfs': hook_config,
+            },
+            log_prefix='test',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=source_directories,
+            dry_run=False,
+        )
+        == []
+    )
+
+    assert source_directories == [snapshot_mount_path]
+
+
+def test_dump_data_sources_with_dry_run_skips_commands_and_does_not_touch_source_directories():
+    flexmock(module).should_receive('get_datasets_to_backup').and_return(
+        (('dataset', '/mnt/dataset'),)
+    )
+    flexmock(module.os).should_receive('getpid').and_return(1234)
+    flexmock(module).should_receive('snapshot_dataset').never()
+    flexmock(module).should_receive('mount_snapshot').never()
+    source_directories = ['/mnt/dataset']
+
+    assert (
+        module.dump_data_sources(
+            hook_config={},
+            config={'source_directories': '/mnt/dataset', 'zfs': {}},
+            log_prefix='test',
+            borgmatic_runtime_directory='/run/borgmatic',
+            source_directories=source_directories,
+            dry_run=True,
+        )
+        == []
+    )
+
+    assert source_directories == ['/mnt/dataset']
+
+
+def test_get_all_snapshots_parses_list_output():
+    flexmock(borgmatic.execute).should_receive('execute_command_and_capture_output').and_return(
+        'dataset1@borgmatic-1234\ndataset2@borgmatic-4567',
+    )
+
+    assert module.get_all_snapshots('zfs') == ('dataset1@borgmatic-1234', 'dataset2@borgmatic-4567')
+
+
+def test_remove_data_source_dumps_unmounts_and_destroys_snapshots():
+    flexmock(module).should_receive('get_all_datasets').and_return((('dataset', '/mnt/dataset'),))
+    flexmock(module.borgmatic.config.paths).should_receive(
+        'replace_temporary_subdirectory_with_glob'
+    ).and_return('/run/borgmatic')
+    flexmock(module.glob).should_receive('glob').replace_with(lambda path: [path])
+    flexmock(module.os.path).should_receive('isdir').and_return(True)
+    flexmock(module.shutil).should_receive('rmtree')
+    flexmock(module).should_receive('unmount_snapshot').with_args(
+        'umount', '/run/borgmatic/zfs_snapshots/mnt/dataset'
+    ).once()
+    flexmock(module).should_receive('get_all_snapshots').and_return(
+        ('dataset@borgmatic-1234', 'dataset@other', 'other@other', 'invalid')
+    )
+    flexmock(module).should_receive('destroy_snapshot').with_args(
+        'zfs', 'dataset@borgmatic-1234'
+    ).once()
+
+    module.remove_data_source_dumps(
+        hook_config={},
+        config={'source_directories': '/mnt/dataset', 'zfs': {}},
+        log_prefix='test',
+        borgmatic_runtime_directory='/run/borgmatic',
+        dry_run=False,
+    )
+
+
+def test_remove_data_source_dumps_use_custom_commands():
+    flexmock(module).should_receive('get_all_datasets').and_return((('dataset', '/mnt/dataset'),))
+    flexmock(module.borgmatic.config.paths).should_receive(
+        'replace_temporary_subdirectory_with_glob'
+    ).and_return('/run/borgmatic')
+    flexmock(module.glob).should_receive('glob').replace_with(lambda path: [path])
+    flexmock(module.os.path).should_receive('isdir').and_return(True)
+    flexmock(module.shutil).should_receive('rmtree')
+    flexmock(module).should_receive('unmount_snapshot').with_args(
+        '/usr/local/bin/umount', '/run/borgmatic/zfs_snapshots/mnt/dataset'
+    ).once()
+    flexmock(module).should_receive('get_all_snapshots').and_return(
+        ('dataset@borgmatic-1234', 'dataset@other', 'other@other', 'invalid')
+    )
+    flexmock(module).should_receive('destroy_snapshot').with_args(
+        '/usr/local/bin/zfs', 'dataset@borgmatic-1234'
+    ).once()
+    hook_config = {'zfs_command': '/usr/local/bin/zfs', 'umount_command': '/usr/local/bin/umount'}
+
+    module.remove_data_source_dumps(
+        hook_config=hook_config,
+        config={'source_directories': '/mnt/dataset', 'zfs': hook_config},
+        log_prefix='test',
+        borgmatic_runtime_directory='/run/borgmatic',
+        dry_run=False,
+    )
+
+
+def test_remove_data_source_dumps_bails_for_missing_zfs_command():
+    flexmock(module).should_receive('get_all_datasets').and_raise(FileNotFoundError)
+    flexmock(module.borgmatic.config.paths).should_receive(
+        'replace_temporary_subdirectory_with_glob'
+    ).never()
+    hook_config = {'zfs_command': 'wtf'}
+
+    module.remove_data_source_dumps(
+        hook_config=hook_config,
+        config={'source_directories': '/mnt/dataset', 'zfs': hook_config},
+        log_prefix='test',
+        borgmatic_runtime_directory='/run/borgmatic',
+        dry_run=False,
+    )
+
+
+def test_remove_data_source_dumps_bails_for_zfs_command_error():
+    flexmock(module).should_receive('get_all_datasets').and_raise(
+        module.subprocess.CalledProcessError(1, 'wtf')
+    )
+    flexmock(module.borgmatic.config.paths).should_receive(
+        'replace_temporary_subdirectory_with_glob'
+    ).never()
+    hook_config = {'zfs_command': 'wtf'}
+
+    module.remove_data_source_dumps(
+        hook_config=hook_config,
+        config={'source_directories': '/mnt/dataset', 'zfs': hook_config},
+        log_prefix='test',
+        borgmatic_runtime_directory='/run/borgmatic',
+        dry_run=False,
+    )
+
+
+def test_remove_data_source_dumps_bails_for_missing_umount_command():
+    flexmock(module).should_receive('get_all_datasets').and_return((('dataset', '/mnt/dataset'),))
+    flexmock(module.borgmatic.config.paths).should_receive(
+        'replace_temporary_subdirectory_with_glob'
+    ).and_return('/run/borgmatic')
+    flexmock(module.glob).should_receive('glob').replace_with(lambda path: [path])
+    flexmock(module.os.path).should_receive('isdir').and_return(True)
+    flexmock(module.shutil).should_receive('rmtree')
+    flexmock(module).should_receive('unmount_snapshot').with_args(
+        '/usr/local/bin/umount', '/run/borgmatic/zfs_snapshots/mnt/dataset'
+    ).and_raise(FileNotFoundError)
+    flexmock(module).should_receive('get_all_snapshots').never()
+    flexmock(module).should_receive('destroy_snapshot').never()
+    hook_config = {'zfs_command': '/usr/local/bin/zfs', 'umount_command': '/usr/local/bin/umount'}
+
+    module.remove_data_source_dumps(
+        hook_config=hook_config,
+        config={'source_directories': '/mnt/dataset', 'zfs': hook_config},
+        log_prefix='test',
+        borgmatic_runtime_directory='/run/borgmatic',
+        dry_run=False,
+    )
+
+
+def test_remove_data_source_dumps_bails_for_umount_command_error():
+    flexmock(module).should_receive('get_all_datasets').and_return((('dataset', '/mnt/dataset'),))
+    flexmock(module.borgmatic.config.paths).should_receive(
+        'replace_temporary_subdirectory_with_glob'
+    ).and_return('/run/borgmatic')
+    flexmock(module.glob).should_receive('glob').replace_with(lambda path: [path])
+    flexmock(module.os.path).should_receive('isdir').and_return(True)
+    flexmock(module.shutil).should_receive('rmtree')
+    flexmock(module).should_receive('unmount_snapshot').with_args(
+        '/usr/local/bin/umount', '/run/borgmatic/zfs_snapshots/mnt/dataset'
+    ).and_raise(module.subprocess.CalledProcessError(1, 'wtf'))
+    flexmock(module).should_receive('get_all_snapshots').never()
+    flexmock(module).should_receive('destroy_snapshot').never()
+    hook_config = {'zfs_command': '/usr/local/bin/zfs', 'umount_command': '/usr/local/bin/umount'}
+
+    module.remove_data_source_dumps(
+        hook_config=hook_config,
+        config={'source_directories': '/mnt/dataset', 'zfs': hook_config},
+        log_prefix='test',
+        borgmatic_runtime_directory='/run/borgmatic',
+        dry_run=False,
+    )
+
+
+def test_remove_data_source_dumps_skips_unmount_snapshot_directories_that_are_not_actually_directories():
+    flexmock(module).should_receive('get_all_datasets').and_return((('dataset', '/mnt/dataset'),))
+    flexmock(module.borgmatic.config.paths).should_receive(
+        'replace_temporary_subdirectory_with_glob'
+    ).and_return('/run/borgmatic')
+    flexmock(module.glob).should_receive('glob').replace_with(lambda path: [path])
+    flexmock(module.os.path).should_receive('isdir').and_return(False)
+    flexmock(module.shutil).should_receive('rmtree').never()
+    flexmock(module).should_receive('unmount_snapshot').never()
+    flexmock(module).should_receive('get_all_snapshots').and_return(
+        ('dataset@borgmatic-1234', 'dataset@other', 'other@other', 'invalid')
+    )
+    flexmock(module).should_receive('destroy_snapshot').with_args(
+        'zfs', 'dataset@borgmatic-1234'
+    ).once()
+
+    module.remove_data_source_dumps(
+        hook_config={},
+        config={'source_directories': '/mnt/dataset', 'zfs': {}},
+        log_prefix='test',
+        borgmatic_runtime_directory='/run/borgmatic',
+        dry_run=False,
+    )
+
+
+def test_remove_data_source_dumps_skips_unmount_snapshot_mount_paths_that_are_not_actually_directories():
+    flexmock(module).should_receive('get_all_datasets').and_return((('dataset', '/mnt/dataset'),))
+    flexmock(module.borgmatic.config.paths).should_receive(
+        'replace_temporary_subdirectory_with_glob'
+    ).and_return('/run/borgmatic')
+    flexmock(module.glob).should_receive('glob').replace_with(lambda path: [path])
+    flexmock(module.os.path).should_receive('isdir').with_args(
+        '/run/borgmatic/zfs_snapshots'
+    ).and_return(True)
+    flexmock(module.os.path).should_receive('isdir').with_args(
+        '/run/borgmatic/zfs_snapshots/mnt/dataset'
+    ).and_return(False)
+    flexmock(module.shutil).should_receive('rmtree')
+    flexmock(module).should_receive('unmount_snapshot').never()
+    flexmock(module).should_receive('get_all_snapshots').and_return(
+        ('dataset@borgmatic-1234', 'dataset@other', 'other@other', 'invalid')
+    )
+    flexmock(module).should_receive('destroy_snapshot').with_args(
+        'zfs', 'dataset@borgmatic-1234'
+    ).once()
+
+    module.remove_data_source_dumps(
+        hook_config={},
+        config={'source_directories': '/mnt/dataset', 'zfs': {}},
+        log_prefix='test',
+        borgmatic_runtime_directory='/run/borgmatic',
+        dry_run=False,
+    )
+
+
+def test_remove_data_source_dumps_with_dry_run_skips_unmount_and_destroy():
+    flexmock(module).should_receive('get_all_datasets').and_return((('dataset', '/mnt/dataset'),))
+    flexmock(module.borgmatic.config.paths).should_receive(
+        'replace_temporary_subdirectory_with_glob'
+    ).and_return('/run/borgmatic')
+    flexmock(module.glob).should_receive('glob').replace_with(lambda path: [path])
+    flexmock(module.os.path).should_receive('isdir').and_return(True)
+    flexmock(module.shutil).should_receive('rmtree').never()
+    flexmock(module).should_receive('unmount_snapshot').never()
+    flexmock(module).should_receive('get_all_snapshots').and_return(
+        ('dataset@borgmatic-1234', 'dataset@other', 'other@other', 'invalid')
+    )
+    flexmock(module).should_receive('destroy_snapshot').never()
+
+    module.remove_data_source_dumps(
+        hook_config={},
+        config={'source_directories': '/mnt/dataset', 'zfs': {}},
+        log_prefix='test',
+        borgmatic_runtime_directory='/run/borgmatic',
+        dry_run=True,
+    )

部分文件因文件數量過多而無法顯示