Przeglądaj źródła

Merge branch 'main' of ssh://projects.torsion.org:3022/borgmatic-collective/borgmatic

Dan Helfman 1 dzień temu
rodzic
commit
126b08b1ac

+ 2 - 0
NEWS

@@ -1,4 +1,6 @@
 2.0.14.dev0
 2.0.14.dev0
+ * #1208: Fix for the "restore" action incorrectly extracting more database dumps than the
+   "--database" flag specifies.
  * #1210: Fix an error when running the "spot" check or "extract" action with the "progress" option
  * #1210: Fix an error when running the "spot" check or "extract" action with the "progress" option
    or "--progress" flag.
    or "--progress" flag.
  * #1212: Fix an error when restoring multiple directory-format database dumps at once.
  * #1212: Fix an error when restoring multiple directory-format database dumps at once.

+ 43 - 40
borgmatic/actions/restore.py

@@ -200,6 +200,10 @@ def restore_single_dump(
         borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
         borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
         borgmatic_runtime_directory,
         borgmatic_runtime_directory,
         data_source['name'],
         data_source['name'],
+        data_source.get('hostname'),
+        data_source.get('port'),
+        data_source.get('container'),
+        data_source.get('label'),
     )[hook_name.split('_databases', 1)[0]]
     )[hook_name.split('_databases', 1)[0]]
 
 
     destination_path = (
     destination_path = (
@@ -267,9 +271,9 @@ def collect_dumps_from_archive(
     Given a local or remote repository path, a resolved archive name, a configuration dict, the
     Given a local or remote repository path, a resolved archive name, a configuration dict, the
     local Borg version, global arguments an argparse.Namespace, local and remote Borg paths, and the
     local Borg version, global arguments an argparse.Namespace, local and remote Borg paths, and the
     borgmatic runtime directory, query the archive for the names of data sources dumps it contains
     borgmatic runtime directory, query the archive for the names of data sources dumps it contains
-    and return them as a set of Dump instances.
+    and return them as a tuple of Dump instances.
     '''
     '''
-    dumps_from_archive = set()
+    dumps_from_archive = {}  # Use a dict as an ordered set.
 
 
     # There is (at most) one dump metadata file per data source hook. Load each.
     # There is (at most) one dump metadata file per data source hook. Load each.
     for dumps_metadata_path in borgmatic.borg.list.capture_archive_listing(
     for dumps_metadata_path in borgmatic.borg.list.capture_archive_listing(
@@ -298,33 +302,30 @@ def collect_dumps_from_archive(
         if not dumps_metadata_path:
         if not dumps_metadata_path:
             continue
             continue
 
 
-        dumps_from_archive.update(
-            set(
-                borgmatic.hooks.data_source.dump.parse_data_source_dumps_metadata(
-                    borgmatic.borg.extract.extract_archive(
-                        global_arguments.dry_run,
-                        repository,
-                        archive,
-                        [dumps_metadata_path],
-                        config,
-                        local_borg_version,
-                        global_arguments,
-                        local_path=local_path,
-                        remote_path=remote_path,
-                        extract_to_stdout=True,
-                    )
-                    .stdout.read()
-                    .decode(),
-                    dumps_metadata_path,
-                )
+        for dump in borgmatic.hooks.data_source.dump.parse_data_source_dumps_metadata(
+            borgmatic.borg.extract.extract_archive(
+                global_arguments.dry_run,
+                repository,
+                archive,
+                [dumps_metadata_path],
+                config,
+                local_borg_version,
+                global_arguments,
+                local_path=local_path,
+                remote_path=remote_path,
+                extract_to_stdout=True,
             )
             )
-        )
+            .stdout.read()
+            .decode(),
+            dumps_metadata_path,
+        ):
+            dumps_from_archive[dump] = None
 
 
     # If we've successfully loaded any dumps metadata, we're done.
     # If we've successfully loaded any dumps metadata, we're done.
     if dumps_from_archive:
     if dumps_from_archive:
         logger.debug('Collecting database dumps from archive data source dumps metadata files')
         logger.debug('Collecting database dumps from archive data source dumps metadata files')
 
 
-        return dumps_from_archive
+        return tuple(dumps_from_archive.keys())
 
 
     # No dumps metadata files were found, so for backwards compatibility, fall back to parsing the
     # No dumps metadata files were found, so for backwards compatibility, fall back to parsing the
     # paths of dumps found in the archive to get their respective dump metadata.
     # paths of dumps found in the archive to get their respective dump metadata.
@@ -388,11 +389,11 @@ def collect_dumps_from_archive(
             except (ValueError, TypeError):
             except (ValueError, TypeError):
                 port = None
                 port = None
 
 
-            dumps_from_archive.add(
+            dumps_from_archive[
                 Dump(
                 Dump(
                     hook_name, data_source_name, None if hostname == 'localhost' else hostname, port
                     hook_name, data_source_name, None if hostname == 'localhost' else hostname, port
                 )
                 )
-            )
+            ] = None
 
 
             # We've successfully parsed the dump path, so need to probe any further.
             # We've successfully parsed the dump path, so need to probe any further.
             break
             break
@@ -401,20 +402,21 @@ def collect_dumps_from_archive(
                 f'Ignoring invalid data source dump path "{dump_path}" in archive {archive}',
                 f'Ignoring invalid data source dump path "{dump_path}" in archive {archive}',
             )
             )
 
 
-    return dumps_from_archive
+    return tuple(dumps_from_archive.keys())
 
 
 
 
 def get_dumps_to_restore(restore_arguments, dumps_from_archive):
 def get_dumps_to_restore(restore_arguments, dumps_from_archive):
     '''
     '''
     Given restore arguments as an argparse.Namespace instance indicating which dumps to restore and
     Given restore arguments as an argparse.Namespace instance indicating which dumps to restore and
-    a set of Dump instances representing the dumps found in an archive, return a set of specific
+    a tuple of Dump instances representing the dumps found in an archive, return a tuple of specific
     Dump instances from the archive to restore. As part of this, replace any Dump having a data
     Dump instances from the archive to restore. As part of this, replace any Dump having a data
     source name of "all" with multiple named Dump instances as appropriate.
     source name of "all" with multiple named Dump instances as appropriate.
 
 
     Raise ValueError if any of the requested data source names cannot be found in the archive or if
     Raise ValueError if any of the requested data source names cannot be found in the archive or if
     there are multiple archive dump matches for a given requested dump.
     there are multiple archive dump matches for a given requested dump.
     '''
     '''
-    requested_dumps = (
+    requested_dumps = tuple(
+        # Use a dict comprehension as an ordered set.
         {
         {
             Dump(
             Dump(
                 hook_name=(
                 hook_name=(
@@ -431,16 +433,16 @@ def get_dumps_to_restore(restore_arguments, dumps_from_archive):
                 port=restore_arguments.original_port,
                 port=restore_arguments.original_port,
                 label=restore_arguments.original_label or UNSPECIFIED,
                 label=restore_arguments.original_label or UNSPECIFIED,
                 container=restore_arguments.original_container or UNSPECIFIED,
                 container=restore_arguments.original_container or UNSPECIFIED,
-            )
+            ): None
             for name in restore_arguments.data_sources or (UNSPECIFIED,)
             for name in restore_arguments.data_sources or (UNSPECIFIED,)
-        }
+        }.keys()
         if restore_arguments.hook
         if restore_arguments.hook
         or restore_arguments.data_sources
         or restore_arguments.data_sources
         or restore_arguments.original_hostname
         or restore_arguments.original_hostname
         or restore_arguments.original_port
         or restore_arguments.original_port
         or restore_arguments.original_label
         or restore_arguments.original_label
         or restore_arguments.original_container
         or restore_arguments.original_container
-        else {
+        else (
             Dump(
             Dump(
                 hook_name=UNSPECIFIED,
                 hook_name=UNSPECIFIED,
                 data_source_name='all',
                 data_source_name='all',
@@ -449,14 +451,15 @@ def get_dumps_to_restore(restore_arguments, dumps_from_archive):
                 label=UNSPECIFIED,
                 label=UNSPECIFIED,
                 container=UNSPECIFIED,
                 container=UNSPECIFIED,
             ),
             ),
-        }
+        )
     )
     )
     missing_dumps = set()
     missing_dumps = set()
-    dumps_to_restore = set()
+    dumps_to_restore = {}  # Use a dict as an ordered set.
 
 
     # If there's a requested "all" dump, add every dump from the archive to the dumps to restore.
     # If there's a requested "all" dump, add every dump from the archive to the dumps to restore.
     if any(dump for dump in requested_dumps if dump.data_source_name == 'all'):
     if any(dump for dump in requested_dumps if dump.data_source_name == 'all'):
-        dumps_to_restore.update(dumps_from_archive)
+        for dump in dumps_from_archive:
+            dumps_to_restore[dump] = None
 
 
     # If any archive dump matches a requested dump, add the archive dump to the dumps to restore.
     # If any archive dump matches a requested dump, add the archive dump to the dumps to restore.
     for requested_dump in requested_dumps:
     for requested_dump in requested_dumps:
@@ -472,7 +475,7 @@ def get_dumps_to_restore(restore_arguments, dumps_from_archive):
         if len(matching_dumps) == 0:
         if len(matching_dumps) == 0:
             missing_dumps.add(requested_dump)
             missing_dumps.add(requested_dump)
         elif len(matching_dumps) == 1:
         elif len(matching_dumps) == 1:
-            dumps_to_restore.add(matching_dumps[0])
+            dumps_to_restore[matching_dumps[0]] = None
         else:
         else:
             raise ValueError(
             raise ValueError(
                 f'Cannot restore data source {render_dump_metadata(requested_dump)} because there are multiple matching dumps in the archive. Try adding flags to disambiguate.',
                 f'Cannot restore data source {render_dump_metadata(requested_dump)} because there are multiple matching dumps in the archive. Try adding flags to disambiguate.',
@@ -487,20 +490,20 @@ def get_dumps_to_restore(restore_arguments, dumps_from_archive):
             f"Cannot restore data source dump{'s' if len(missing_dumps) > 1 else ''} {rendered_dumps} missing from archive",
             f"Cannot restore data source dump{'s' if len(missing_dumps) > 1 else ''} {rendered_dumps} missing from archive",
         )
         )
 
 
-    return dumps_to_restore
+    return tuple(dumps_to_restore.keys())
 
 
 
 
 def ensure_requested_dumps_restored(dumps_to_restore, dumps_actually_restored):
 def ensure_requested_dumps_restored(dumps_to_restore, dumps_actually_restored):
     '''
     '''
-    Given a set of requested dumps to restore and a set of dumps actually restored, raise ValueError
-    if any requested dumps to restore weren't restored, indicating that they were missing from the
-    configuration.
+    Given a tuple of requested dumps to restore and a set of dumps actually restored, raise
+    ValueError if any requested dumps to restore weren't restored, indicating that they were missing
+    from the configuration.
     '''
     '''
     if not dumps_to_restore:
     if not dumps_to_restore:
         raise ValueError('No data source dumps were found to restore')
         raise ValueError('No data source dumps were found to restore')
 
 
     missing_dumps = sorted(
     missing_dumps = sorted(
-        dumps_to_restore - dumps_actually_restored,
+        set(dumps_to_restore) - dumps_actually_restored,
         key=lambda dump: dump.data_source_name,
         key=lambda dump: dump.data_source_name,
     )
     )
 
 

+ 4 - 0
borgmatic/hooks/data_source/bootstrap.py

@@ -116,6 +116,10 @@ def make_data_source_dump_patterns(
     config,
     config,
     borgmatic_runtime_directory,
     borgmatic_runtime_directory,
     name=None,
     name=None,
+    hostname=None,
+    port=None,
+    container=None,
+    label=None,
 ):  # pragma: no cover
 ):  # pragma: no cover
     '''
     '''
     Restores are implemented via the separate, purpose-specific "bootstrap" action rather than the
     Restores are implemented via the separate, purpose-specific "bootstrap" action rather than the

+ 4 - 0
borgmatic/hooks/data_source/btrfs.py

@@ -412,6 +412,10 @@ def make_data_source_dump_patterns(
     config,
     config,
     borgmatic_runtime_directory,
     borgmatic_runtime_directory,
     name=None,
     name=None,
+    hostname=None,
+    port=None,
+    container=None,
+    label=None,
 ):  # pragma: no cover
 ):  # pragma: no cover
     '''
     '''
     Restores aren't implemented, because stored files can be extracted directly with "extract".
     Restores aren't implemented, because stored files can be extracted directly with "extract".

+ 23 - 5
borgmatic/hooks/data_source/dump.py

@@ -2,6 +2,7 @@ import fnmatch
 import json
 import json
 import logging
 import logging
 import os
 import os
+import re
 import shutil
 import shutil
 
 
 import borgmatic.actions.restore
 import borgmatic.actions.restore
@@ -114,16 +115,33 @@ def remove_data_source_dumps(dump_path, data_source_type_name, dry_run):
         shutil.rmtree(dump_path)
         shutil.rmtree(dump_path)
 
 
 
 
+END_OF_STRING_PATTTERN = re.compile(r'\\z', flags=re.IGNORECASE)
+
+
 def convert_glob_patterns_to_borg_pattern(patterns):
 def convert_glob_patterns_to_borg_pattern(patterns):
     '''
     '''
     Convert a sequence of shell glob patterns like "/etc/*", "/tmp/*" to the corresponding Borg
     Convert a sequence of shell glob patterns like "/etc/*", "/tmp/*" to the corresponding Borg
     regular expression archive pattern as a single string like "re:etc/.*|tmp/.*".
     regular expression archive pattern as a single string like "re:etc/.*|tmp/.*".
     '''
     '''
-    # Remove the "\z" or "\Z" generated by fnmatch.translate() because we don't want the pattern to
-    # match only at the end of a path, as directory format dumps require extracting files with paths
-    # longer than the pattern. E.g., a pattern of "borgmatic/*/foo_databases/test" should also match
-    # paths like "borgmatic/*/foo_databases/test/toc.dat"
+    # This deserves some explanation. Here's what this code is doing for each shell glob pattern:
+    #
+    #  * Strip off the leading slash, since Borg doesn't store leading slashes in archives.
+    #  * Use fnmatch.translate() to convert the pattern to a Borg pattern.
+    #  * Remove the "\z" or \"Z" end-of-string special character generated by fnmatch.translate(),
+    #    because Borg doesn't like it. Replace it with a "$" end-of-string special character
+    #    instead. And yes, this is using a regular expression to modify a regular expression.
+    #  * Do the above for each of:
+    #    * The plain pattern. This supports the use case of a standard database where its name is a
+    #      filename in the path. Example: borgmatic/foo_databases/hostname/test
+    #    * The pattern with "/*" tacked onto the end of it. This supports the use case of a
+    #      directory-format database where its name is a directory name in the path. Example:
+    #      borgmatic/foo_databases/hostname/test/toc.dat
+    #
+    # Join the resulting transformed patterns together with "|" and return them as a string.
     return 're:' + '|'.join(
     return 're:' + '|'.join(
-        fnmatch.translate(pattern.lstrip('/')).replace('\\z', '').replace('\\Z', '')
+        re.sub(END_OF_STRING_PATTTERN, '$', fnmatch.translate(stripped))
+        + '|'
+        + re.sub(END_OF_STRING_PATTTERN, '$', fnmatch.translate(stripped + '/*'))
         for pattern in patterns
         for pattern in patterns
+        for stripped in (pattern.lstrip('/'),)
     )
     )

+ 4 - 0
borgmatic/hooks/data_source/lvm.py

@@ -452,6 +452,10 @@ def make_data_source_dump_patterns(
     config,
     config,
     borgmatic_runtime_directory,
     borgmatic_runtime_directory,
     name=None,
     name=None,
+    hostname=None,
+    port=None,
+    container=None,
+    label=None,
 ):  # pragma: no cover
 ):  # pragma: no cover
     '''
     '''
     Restores aren't implemented, because stored files can be extracted directly with "extract".
     Restores aren't implemented, because stored files can be extracted directly with "extract".

+ 15 - 3
borgmatic/hooks/data_source/mariadb.py

@@ -410,6 +410,10 @@ def make_data_source_dump_patterns(
     config,
     config,
     borgmatic_runtime_directory,
     borgmatic_runtime_directory,
     name=None,
     name=None,
+    hostname=None,
+    port=None,
+    container=None,
+    label=None,
 ):  # pragma: no cover
 ):  # pragma: no cover
     '''
     '''
     Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
     Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
@@ -419,16 +423,24 @@ def make_data_source_dump_patterns(
     borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
     borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
 
 
     return (
     return (
-        dump.make_data_source_dump_filename(make_dump_path('borgmatic'), name, label='*'),
+        dump.make_data_source_dump_filename(
+            make_dump_path('borgmatic'), name, hostname, port, container, label
+        ),
         dump.make_data_source_dump_filename(
         dump.make_data_source_dump_filename(
             make_dump_path(borgmatic_runtime_directory),
             make_dump_path(borgmatic_runtime_directory),
             name,
             name,
-            label='*',
+            hostname,
+            port,
+            container,
+            label,
         ),
         ),
         dump.make_data_source_dump_filename(
         dump.make_data_source_dump_filename(
             make_dump_path(borgmatic_source_directory),
             make_dump_path(borgmatic_source_directory),
             name,
             name,
-            label='*',
+            hostname,
+            port,
+            container,
+            label,
         ),
         ),
     )
     )
 
 

+ 15 - 3
borgmatic/hooks/data_source/mongodb.py

@@ -212,6 +212,10 @@ def make_data_source_dump_patterns(
     config,
     config,
     borgmatic_runtime_directory,
     borgmatic_runtime_directory,
     name=None,
     name=None,
+    hostname=None,
+    port=None,
+    container=None,
+    label=None,
 ):  # pragma: no cover
 ):  # pragma: no cover
     '''
     '''
     Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
     Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
@@ -221,16 +225,24 @@ def make_data_source_dump_patterns(
     borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
     borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
 
 
     return (
     return (
-        dump.make_data_source_dump_filename(make_dump_path('borgmatic'), name, label='*'),
+        dump.make_data_source_dump_filename(
+            make_dump_path('borgmatic'), name, hostname, port, container, label
+        ),
         dump.make_data_source_dump_filename(
         dump.make_data_source_dump_filename(
             make_dump_path(borgmatic_runtime_directory),
             make_dump_path(borgmatic_runtime_directory),
             name,
             name,
-            label='*',
+            hostname,
+            port,
+            container,
+            label,
         ),
         ),
         dump.make_data_source_dump_filename(
         dump.make_data_source_dump_filename(
             make_dump_path(borgmatic_source_directory),
             make_dump_path(borgmatic_source_directory),
             name,
             name,
-            label='*',
+            hostname,
+            port,
+            container,
+            label,
         ),
         ),
     )
     )
 
 

+ 15 - 3
borgmatic/hooks/data_source/mysql.py

@@ -341,6 +341,10 @@ def make_data_source_dump_patterns(
     config,
     config,
     borgmatic_runtime_directory,
     borgmatic_runtime_directory,
     name=None,
     name=None,
+    hostname=None,
+    port=None,
+    container=None,
+    label=None,
 ):  # pragma: no cover
 ):  # pragma: no cover
     '''
     '''
     Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
     Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
@@ -350,16 +354,24 @@ def make_data_source_dump_patterns(
     borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
     borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
 
 
     return (
     return (
-        dump.make_data_source_dump_filename(make_dump_path('borgmatic'), name, label='*'),
+        dump.make_data_source_dump_filename(
+            make_dump_path('borgmatic'), name, hostname, port, container, label
+        ),
         dump.make_data_source_dump_filename(
         dump.make_data_source_dump_filename(
             make_dump_path(borgmatic_runtime_directory),
             make_dump_path(borgmatic_runtime_directory),
             name,
             name,
-            label='*',
+            hostname,
+            port,
+            container,
+            label,
         ),
         ),
         dump.make_data_source_dump_filename(
         dump.make_data_source_dump_filename(
             make_dump_path(borgmatic_source_directory),
             make_dump_path(borgmatic_source_directory),
             name,
             name,
-            label='*',
+            hostname,
+            port,
+            container,
+            label,
         ),
         ),
     )
     )
 
 

+ 15 - 3
borgmatic/hooks/data_source/postgresql.py

@@ -302,6 +302,10 @@ def make_data_source_dump_patterns(
     config,
     config,
     borgmatic_runtime_directory,
     borgmatic_runtime_directory,
     name=None,
     name=None,
+    hostname=None,
+    port=None,
+    container=None,
+    label=None,
 ):  # pragma: no cover
 ):  # pragma: no cover
     '''
     '''
     Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
     Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
@@ -311,16 +315,24 @@ def make_data_source_dump_patterns(
     borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
     borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
 
 
     return (
     return (
-        dump.make_data_source_dump_filename(make_dump_path('borgmatic'), name, label='*'),
+        dump.make_data_source_dump_filename(
+            make_dump_path('borgmatic'), name, hostname, port, container, label
+        ),
         dump.make_data_source_dump_filename(
         dump.make_data_source_dump_filename(
             make_dump_path(borgmatic_runtime_directory),
             make_dump_path(borgmatic_runtime_directory),
             name,
             name,
-            label='*',
+            hostname,
+            port,
+            container,
+            label,
         ),
         ),
         dump.make_data_source_dump_filename(
         dump.make_data_source_dump_filename(
             make_dump_path(borgmatic_source_directory),
             make_dump_path(borgmatic_source_directory),
             name,
             name,
-            label='*',
+            hostname,
+            port,
+            container,
+            label,
         ),
         ),
     )
     )
 
 

+ 15 - 3
borgmatic/hooks/data_source/sqlite.py

@@ -144,6 +144,10 @@ def make_data_source_dump_patterns(
     config,
     config,
     borgmatic_runtime_directory,
     borgmatic_runtime_directory,
     name=None,
     name=None,
+    hostname=None,
+    port=None,
+    container=None,
+    label=None,
 ):  # pragma: no cover
 ):  # pragma: no cover
     '''
     '''
     Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
     Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
@@ -153,16 +157,24 @@ def make_data_source_dump_patterns(
     borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
     borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
 
 
     return (
     return (
-        dump.make_data_source_dump_filename(make_dump_path('borgmatic'), name, label='*'),
+        dump.make_data_source_dump_filename(
+            make_dump_path('borgmatic'), name, hostname, port, container, label
+        ),
         dump.make_data_source_dump_filename(
         dump.make_data_source_dump_filename(
             make_dump_path(borgmatic_runtime_directory),
             make_dump_path(borgmatic_runtime_directory),
             name,
             name,
-            label='*',
+            hostname,
+            port,
+            container,
+            label,
         ),
         ),
         dump.make_data_source_dump_filename(
         dump.make_data_source_dump_filename(
             make_dump_path(borgmatic_source_directory),
             make_dump_path(borgmatic_source_directory),
             name,
             name,
-            label='*',
+            hostname,
+            port,
+            container,
+            label,
         ),
         ),
     )
     )
 
 

+ 4 - 0
borgmatic/hooks/data_source/zfs.py

@@ -453,6 +453,10 @@ def make_data_source_dump_patterns(
     config,
     config,
     borgmatic_runtime_directory,
     borgmatic_runtime_directory,
     name=None,
     name=None,
+    hostname=None,
+    port=None,
+    container=None,
+    label=None,
 ):  # pragma: no cover
 ):  # pragma: no cover
     '''
     '''
     Restores aren't implemented, because stored files can be extracted directly with "extract".
     Restores aren't implemented, because stored files can be extracted directly with "extract".

+ 52 - 48
tests/unit/actions/test_restore.py

@@ -316,6 +316,10 @@ def test_restore_single_dump_extracts_and_restores_single_file_dump():
         object,
         object,
         object,
         object,
         object,
         object,
+        object,
+        object,
+        object,
+        object,
     ).and_return({'postgresql': flexmock()})
     ).and_return({'postgresql': flexmock()})
     flexmock(module.tempfile).should_receive('mkdtemp').never()
     flexmock(module.tempfile).should_receive('mkdtemp').never()
     flexmock(module.borgmatic.hooks.data_source.dump).should_receive(
     flexmock(module.borgmatic.hooks.data_source.dump).should_receive(
@@ -360,6 +364,10 @@ def test_restore_single_dump_extracts_and_restores_directory_dump():
         object,
         object,
         object,
         object,
         object,
         object,
+        object,
+        object,
+        object,
+        object,
     ).and_return({'postgresql': flexmock()})
     ).and_return({'postgresql': flexmock()})
     flexmock(module.tempfile).should_receive('mkdtemp').once().and_return(
     flexmock(module.tempfile).should_receive('mkdtemp').once().and_return(
         '/run/user/0/borgmatic/tmp1234',
         '/run/user/0/borgmatic/tmp1234',
@@ -406,6 +414,10 @@ def test_restore_single_dump_with_directory_dump_error_cleans_up_temporary_direc
         object,
         object,
         object,
         object,
         object,
         object,
+        object,
+        object,
+        object,
+        object,
     ).and_return({'postgresql': flexmock()})
     ).and_return({'postgresql': flexmock()})
     flexmock(module.tempfile).should_receive('mkdtemp').once().and_return(
     flexmock(module.tempfile).should_receive('mkdtemp').once().and_return(
         '/run/user/0/borgmatic/tmp1234',
         '/run/user/0/borgmatic/tmp1234',
@@ -453,6 +465,10 @@ def test_restore_single_dump_with_directory_dump_and_dry_run_skips_directory_mov
         object,
         object,
         object,
         object,
         object,
         object,
+        object,
+        object,
+        object,
+        object,
     ).and_return({'postgresql': flexmock()})
     ).and_return({'postgresql': flexmock()})
     flexmock(module.tempfile).should_receive('mkdtemp').once().and_return('/run/borgmatic/tmp1234')
     flexmock(module.tempfile).should_receive('mkdtemp').once().and_return('/run/borgmatic/tmp1234')
     flexmock(module.borgmatic.hooks.data_source.dump).should_receive(
     flexmock(module.borgmatic.hooks.data_source.dump).should_receive(
@@ -526,7 +542,7 @@ def test_collect_dumps_from_archive_with_dumps_metadata_parses_it():
         borgmatic_runtime_directory='/run/borgmatic',
         borgmatic_runtime_directory='/run/borgmatic',
     )
     )
 
 
-    assert archive_dumps == set(dumps_metadata)
+    assert archive_dumps == tuple(dumps_metadata)
 
 
 
 
 def test_collect_dumps_from_archive_with_empty_dumps_metadata_path_falls_back_to_parsing_archive_paths():
 def test_collect_dumps_from_archive_with_empty_dumps_metadata_path_falls_back_to_parsing_archive_paths():
@@ -564,11 +580,11 @@ def test_collect_dumps_from_archive_with_empty_dumps_metadata_path_falls_back_to
         borgmatic_runtime_directory='/run/borgmatic',
         borgmatic_runtime_directory='/run/borgmatic',
     )
     )
 
 
-    assert archive_dumps == {
+    assert archive_dumps == (
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'bar', 'host', 1234),
         module.Dump('postgresql_databases', 'bar', 'host', 1234),
         module.Dump('mysql_databases', 'quux'),
         module.Dump('mysql_databases', 'quux'),
-    }
+    )
 
 
 
 
 def test_collect_dumps_from_archive_without_dumps_metadata_falls_back_to_parsing_archive_paths():
 def test_collect_dumps_from_archive_without_dumps_metadata_falls_back_to_parsing_archive_paths():
@@ -606,11 +622,11 @@ def test_collect_dumps_from_archive_without_dumps_metadata_falls_back_to_parsing
         borgmatic_runtime_directory='/run/borgmatic',
         borgmatic_runtime_directory='/run/borgmatic',
     )
     )
 
 
-    assert archive_dumps == {
+    assert archive_dumps == (
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'bar', 'host', 1234),
         module.Dump('postgresql_databases', 'bar', 'host', 1234),
         module.Dump('mysql_databases', 'quux'),
         module.Dump('mysql_databases', 'quux'),
-    }
+    )
 
 
 
 
 def test_collect_dumps_from_archive_parses_archive_paths_with_different_base_directories():
 def test_collect_dumps_from_archive_parses_archive_paths_with_different_base_directories():
@@ -649,12 +665,12 @@ def test_collect_dumps_from_archive_parses_archive_paths_with_different_base_dir
         borgmatic_runtime_directory='/run/borgmatic',
         borgmatic_runtime_directory='/run/borgmatic',
     )
     )
 
 
-    assert archive_dumps == {
+    assert archive_dumps == (
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'bar'),
         module.Dump('postgresql_databases', 'bar'),
         module.Dump('postgresql_databases', 'baz'),
         module.Dump('postgresql_databases', 'baz'),
         module.Dump('mysql_databases', 'quux'),
         module.Dump('mysql_databases', 'quux'),
-    }
+    )
 
 
 
 
 def test_collect_dumps_from_archive_parses_directory_format_archive_paths():
 def test_collect_dumps_from_archive_parses_directory_format_archive_paths():
@@ -691,9 +707,7 @@ def test_collect_dumps_from_archive_parses_directory_format_archive_paths():
         borgmatic_runtime_directory='/run/borgmatic',
         borgmatic_runtime_directory='/run/borgmatic',
     )
     )
 
 
-    assert archive_dumps == {
-        module.Dump('postgresql_databases', 'foo'),
-    }
+    assert archive_dumps == (module.Dump('postgresql_databases', 'foo'),)
 
 
 
 
 def test_collect_dumps_from_archive_skips_bad_archive_paths_or_bad_path_components():
 def test_collect_dumps_from_archive_skips_bad_archive_paths_or_bad_path_components():
@@ -733,18 +747,18 @@ def test_collect_dumps_from_archive_skips_bad_archive_paths_or_bad_path_componen
         borgmatic_runtime_directory='/run/borgmatic',
         borgmatic_runtime_directory='/run/borgmatic',
     )
     )
 
 
-    assert archive_dumps == {
+    assert archive_dumps == (
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'bar'),
         module.Dump('postgresql_databases', 'bar'),
-    }
+    )
 
 
 
 
 def test_get_dumps_to_restore_gets_requested_dumps_found_in_archive():
 def test_get_dumps_to_restore_gets_requested_dumps_found_in_archive():
-    dumps_from_archive = {
+    dumps_from_archive = (
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'bar'),
         module.Dump('postgresql_databases', 'bar'),
         module.Dump('postgresql_databases', 'baz'),
         module.Dump('postgresql_databases', 'baz'),
-    }
+    )
     flexmock(module).should_receive('dumps_match').and_return(False)
     flexmock(module).should_receive('dumps_match').and_return(False)
     flexmock(module).should_receive('dumps_match').with_args(
     flexmock(module).should_receive('dumps_match').with_args(
         module.Dump(
         module.Dump(
@@ -777,16 +791,14 @@ def test_get_dumps_to_restore_gets_requested_dumps_found_in_archive():
             original_container=None,
             original_container=None,
         ),
         ),
         dumps_from_archive=dumps_from_archive,
         dumps_from_archive=dumps_from_archive,
-    ) == {
+    ) == (
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'bar'),
         module.Dump('postgresql_databases', 'bar'),
-    }
+    )
 
 
 
 
 def test_get_dumps_to_restore_raises_for_requested_dumps_missing_from_archive():
 def test_get_dumps_to_restore_raises_for_requested_dumps_missing_from_archive():
-    dumps_from_archive = {
-        module.Dump('postgresql_databases', 'foo'),
-    }
+    dumps_from_archive = (module.Dump('postgresql_databases', 'foo'),)
     flexmock(module).should_receive('dumps_match').and_return(False)
     flexmock(module).should_receive('dumps_match').and_return(False)
     flexmock(module).should_receive('dumps_match').with_args(
     flexmock(module).should_receive('dumps_match').with_args(
         module.Dump(
         module.Dump(
@@ -816,10 +828,10 @@ def test_get_dumps_to_restore_raises_for_requested_dumps_missing_from_archive():
 
 
 
 
 def test_get_dumps_to_restore_without_requested_dumps_finds_all_archive_dumps():
 def test_get_dumps_to_restore_without_requested_dumps_finds_all_archive_dumps():
-    dumps_from_archive = {
+    dumps_from_archive = (
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'bar'),
         module.Dump('postgresql_databases', 'bar'),
-    }
+    )
     flexmock(module).should_receive('dumps_match').and_return(False)
     flexmock(module).should_receive('dumps_match').and_return(False)
 
 
     assert (
     assert (
@@ -839,10 +851,10 @@ def test_get_dumps_to_restore_without_requested_dumps_finds_all_archive_dumps():
 
 
 
 
 def test_get_dumps_to_restore_with_all_in_requested_dumps_finds_all_archive_dumps():
 def test_get_dumps_to_restore_with_all_in_requested_dumps_finds_all_archive_dumps():
-    dumps_from_archive = {
+    dumps_from_archive = (
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'bar'),
         module.Dump('postgresql_databases', 'bar'),
-    }
+    )
     flexmock(module).should_receive('dumps_match').and_return(False)
     flexmock(module).should_receive('dumps_match').and_return(False)
     flexmock(module).should_receive('dumps_match').with_args(
     flexmock(module).should_receive('dumps_match').with_args(
         module.Dump(
         module.Dump(
@@ -882,10 +894,10 @@ def test_get_dumps_to_restore_with_all_in_requested_dumps_finds_all_archive_dump
 
 
 
 
 def test_get_dumps_to_restore_with_all_in_requested_dumps_plus_additional_requested_dumps_omits_duplicates():
 def test_get_dumps_to_restore_with_all_in_requested_dumps_plus_additional_requested_dumps_omits_duplicates():
-    dumps_from_archive = {
+    dumps_from_archive = (
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'bar'),
         module.Dump('postgresql_databases', 'bar'),
-    }
+    )
     flexmock(module).should_receive('dumps_match').and_return(False)
     flexmock(module).should_receive('dumps_match').and_return(False)
     flexmock(module).should_receive('dumps_match').with_args(
     flexmock(module).should_receive('dumps_match').with_args(
         module.Dump(
         module.Dump(
@@ -958,10 +970,10 @@ def test_get_dumps_to_restore_raises_for_multiple_matching_dumps_in_archive():
                 original_label=None,
                 original_label=None,
                 original_container=None,
                 original_container=None,
             ),
             ),
-            dumps_from_archive={
+            dumps_from_archive=(
                 module.Dump('postgresql_databases', 'foo'),
                 module.Dump('postgresql_databases', 'foo'),
                 module.Dump('mariadb_databases', 'foo'),
                 module.Dump('mariadb_databases', 'foo'),
-            },
+            ),
         )
         )
     assert 'Try adding flags to disambiguate.' in str(exc_info.value)
     assert 'Try adding flags to disambiguate.' in str(exc_info.value)
 
 
@@ -990,17 +1002,17 @@ def test_get_dumps_to_restore_raises_for_all_in_requested_dumps_and_requested_du
                 original_label=None,
                 original_label=None,
                 original_container=None,
                 original_container=None,
             ),
             ),
-            dumps_from_archive={module.Dump('postgresql_databases', 'foo')},
+            dumps_from_archive=(module.Dump('postgresql_databases', 'foo'),),
         )
         )
     assert 'dump test missing from archive' in str(exc_info.value)
     assert 'dump test missing from archive' in str(exc_info.value)
 
 
 
 
 def test_get_dumps_to_restore_with_requested_hook_name_filters_dumps_found_in_archive():
 def test_get_dumps_to_restore_with_requested_hook_name_filters_dumps_found_in_archive():
-    dumps_from_archive = {
+    dumps_from_archive = (
         module.Dump('mariadb_databases', 'foo'),
         module.Dump('mariadb_databases', 'foo'),
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('sqlite_databases', 'bar'),
         module.Dump('sqlite_databases', 'bar'),
-    }
+    )
     flexmock(module).should_receive('dumps_match').and_return(False)
     flexmock(module).should_receive('dumps_match').and_return(False)
     flexmock(module).should_receive('dumps_match').with_args(
     flexmock(module).should_receive('dumps_match').with_args(
         module.Dump(
         module.Dump(
@@ -1023,17 +1035,15 @@ def test_get_dumps_to_restore_with_requested_hook_name_filters_dumps_found_in_ar
             original_container=None,
             original_container=None,
         ),
         ),
         dumps_from_archive=dumps_from_archive,
         dumps_from_archive=dumps_from_archive,
-    ) == {
-        module.Dump('postgresql_databases', 'foo'),
-    }
+    ) == (module.Dump('postgresql_databases', 'foo'),)
 
 
 
 
 def test_get_dumps_to_restore_with_requested_shortened_hook_name_filters_dumps_found_in_archive():
 def test_get_dumps_to_restore_with_requested_shortened_hook_name_filters_dumps_found_in_archive():
-    dumps_from_archive = {
+    dumps_from_archive = (
         module.Dump('mariadb_databases', 'foo'),
         module.Dump('mariadb_databases', 'foo'),
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('sqlite_databases', 'bar'),
         module.Dump('sqlite_databases', 'bar'),
-    }
+    )
     flexmock(module).should_receive('dumps_match').and_return(False)
     flexmock(module).should_receive('dumps_match').and_return(False)
     flexmock(module).should_receive('dumps_match').with_args(
     flexmock(module).should_receive('dumps_match').with_args(
         module.Dump(
         module.Dump(
@@ -1056,17 +1066,15 @@ def test_get_dumps_to_restore_with_requested_shortened_hook_name_filters_dumps_f
             original_container=None,
             original_container=None,
         ),
         ),
         dumps_from_archive=dumps_from_archive,
         dumps_from_archive=dumps_from_archive,
-    ) == {
-        module.Dump('postgresql_databases', 'foo'),
-    }
+    ) == (module.Dump('postgresql_databases', 'foo'),)
 
 
 
 
 def test_get_dumps_to_restore_with_requested_hostname_filters_dumps_found_in_archive():
 def test_get_dumps_to_restore_with_requested_hostname_filters_dumps_found_in_archive():
-    dumps_from_archive = {
+    dumps_from_archive = (
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'foo'),
         module.Dump('postgresql_databases', 'foo', 'host'),
         module.Dump('postgresql_databases', 'foo', 'host'),
         module.Dump('postgresql_databases', 'bar'),
         module.Dump('postgresql_databases', 'bar'),
-    }
+    )
     flexmock(module).should_receive('dumps_match').and_return(False)
     flexmock(module).should_receive('dumps_match').and_return(False)
     flexmock(module).should_receive('dumps_match').with_args(
     flexmock(module).should_receive('dumps_match').with_args(
         module.Dump(
         module.Dump(
@@ -1089,17 +1097,15 @@ def test_get_dumps_to_restore_with_requested_hostname_filters_dumps_found_in_arc
             original_container=None,
             original_container=None,
         ),
         ),
         dumps_from_archive=dumps_from_archive,
         dumps_from_archive=dumps_from_archive,
-    ) == {
-        module.Dump('postgresql_databases', 'foo', 'host'),
-    }
+    ) == (module.Dump('postgresql_databases', 'foo', 'host'),)
 
 
 
 
 def test_get_dumps_to_restore_with_requested_port_filters_dumps_found_in_archive():
 def test_get_dumps_to_restore_with_requested_port_filters_dumps_found_in_archive():
-    dumps_from_archive = {
+    dumps_from_archive = (
         module.Dump('postgresql_databases', 'foo', 'host'),
         module.Dump('postgresql_databases', 'foo', 'host'),
         module.Dump('postgresql_databases', 'foo', 'host', 1234),
         module.Dump('postgresql_databases', 'foo', 'host', 1234),
         module.Dump('postgresql_databases', 'bar'),
         module.Dump('postgresql_databases', 'bar'),
-    }
+    )
     flexmock(module).should_receive('dumps_match').and_return(False)
     flexmock(module).should_receive('dumps_match').and_return(False)
     flexmock(module).should_receive('dumps_match').with_args(
     flexmock(module).should_receive('dumps_match').with_args(
         module.Dump(
         module.Dump(
@@ -1123,9 +1129,7 @@ def test_get_dumps_to_restore_with_requested_port_filters_dumps_found_in_archive
             original_container=None,
             original_container=None,
         ),
         ),
         dumps_from_archive=dumps_from_archive,
         dumps_from_archive=dumps_from_archive,
-    ) == {
-        module.Dump('postgresql_databases', 'foo', 'host', 1234),
-    }
+    ) == (module.Dump('postgresql_databases', 'foo', 'host', 1234),)
 
 
 
 
 def test_ensure_requested_dumps_restored_with_all_dumps_restored_does_not_raise():
 def test_ensure_requested_dumps_restored_with_all_dumps_restored_does_not_raise():

+ 2 - 2
tests/unit/hooks/data_source/test_dump.py

@@ -157,6 +157,6 @@ def test_remove_data_source_dumps_without_dump_path_present_skips_removal():
 
 
 def test_convert_glob_patterns_to_borg_pattern_makes_multipart_regular_expression():
 def test_convert_glob_patterns_to_borg_pattern_makes_multipart_regular_expression():
     assert (
     assert (
-        module.convert_glob_patterns_to_borg_pattern(('/etc/foo/bar', '/bar/*/baz'))
-        == 're:(?s:etc/foo/bar)|(?s:bar/.*/baz)'
+        module.convert_glob_patterns_to_borg_pattern(('/etc/foo/bar', '/bar/baz/quux'))
+        == 're:(?s:etc/foo/bar)$|(?s:etc/foo/bar/.*)$|(?s:bar/baz/quux)$|(?s:bar/baz/quux/.*)$'
     )
     )