Browse Source

Code formatting (#635).

Dan Helfman 4 months ago
parent
commit
8c5db19490

+ 1 - 3
borgmatic/actions/borg.py

@@ -22,9 +22,7 @@ def run_borg(
     if borg_arguments.repository is None or borgmatic.config.validate.repositories_match(
     if borg_arguments.repository is None or borgmatic.config.validate.repositories_match(
         repository, borg_arguments.repository
         repository, borg_arguments.repository
     ):
     ):
-        logger.info(
-            'Running arbitrary Borg command'
-        )
+        logger.info('Running arbitrary Borg command')
         archive_name = borgmatic.borg.repo_list.resolve_archive_name(
         archive_name = borgmatic.borg.repo_list.resolve_archive_name(
             repository['path'],
             repository['path'],
             borg_arguments.archive,
             borg_arguments.archive,

+ 1 - 3
borgmatic/actions/break_lock.py

@@ -21,9 +21,7 @@ def run_break_lock(
     if break_lock_arguments.repository is None or borgmatic.config.validate.repositories_match(
     if break_lock_arguments.repository is None or borgmatic.config.validate.repositories_match(
         repository, break_lock_arguments.repository
         repository, break_lock_arguments.repository
     ):
     ):
-        logger.info(
-            'Breaking repository and cache locks'
-        )
+        logger.info('Breaking repository and cache locks')
         borgmatic.borg.break_lock.break_lock(
         borgmatic.borg.break_lock.break_lock(
             repository['path'],
             repository['path'],
             config,
             config,

+ 1 - 3
borgmatic/actions/change_passphrase.py

@@ -24,9 +24,7 @@ def run_change_passphrase(
             repository, change_passphrase_arguments.repository
             repository, change_passphrase_arguments.repository
         )
         )
     ):
     ):
-        logger.info(
-            'Changing repository passphrase'
-        )
+        logger.info('Changing repository passphrase')
         borgmatic.borg.change_passphrase.change_passphrase(
         borgmatic.borg.change_passphrase.change_passphrase(
             repository['path'],
             repository['path'],
             config,
             config,

+ 1 - 3
borgmatic/actions/check.py

@@ -762,9 +762,7 @@ def run_check(
         write_check_time(make_check_time_path(config, repository_id, 'extract'))
         write_check_time(make_check_time_path(config, repository_id, 'extract'))
 
 
     if 'spot' in checks:
     if 'spot' in checks:
-        with borgmatic.config.paths.Runtime_directory(
-            config
-        ) as borgmatic_runtime_directory:
+        with borgmatic.config.paths.Runtime_directory(config) as borgmatic_runtime_directory:
             spot_check(
             spot_check(
                 repository,
                 repository,
                 config,
                 config,

+ 2 - 6
borgmatic/actions/compact.py

@@ -37,9 +37,7 @@ def run_compact(
         **hook_context,
         **hook_context,
     )
     )
     if borgmatic.borg.feature.available(borgmatic.borg.feature.Feature.COMPACT, local_borg_version):
     if borgmatic.borg.feature.available(borgmatic.borg.feature.Feature.COMPACT, local_borg_version):
-        logger.info(
-            f'Compacting segments{dry_run_label}'
-        )
+        logger.info(f'Compacting segments{dry_run_label}')
         borgmatic.borg.compact.compact_segments(
         borgmatic.borg.compact.compact_segments(
             global_arguments.dry_run,
             global_arguments.dry_run,
             repository['path'],
             repository['path'],
@@ -53,9 +51,7 @@ def run_compact(
             threshold=compact_arguments.threshold,
             threshold=compact_arguments.threshold,
         )
         )
     else:  # pragma: nocover
     else:  # pragma: nocover
-        logger.info(
-            'Skipping compact (only available/needed in Borg 1.2+)'
-        )
+        logger.info('Skipping compact (only available/needed in Borg 1.2+)')
 
 
     borgmatic.hooks.command.execute_hook(
     borgmatic.hooks.command.execute_hook(
         config.get('after_compact'),
         config.get('after_compact'),

+ 1 - 3
borgmatic/actions/create.py

@@ -286,9 +286,7 @@ def run_create(
     logger.info(f'Creating archive{dry_run_label}')
     logger.info(f'Creating archive{dry_run_label}')
     working_directory = borgmatic.config.paths.get_working_directory(config)
     working_directory = borgmatic.config.paths.get_working_directory(config)
 
 
-    with borgmatic.config.paths.Runtime_directory(
-        config
-    ) as borgmatic_runtime_directory:
+    with borgmatic.config.paths.Runtime_directory(config) as borgmatic_runtime_directory:
         borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
         borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
             'remove_data_source_dumps',
             'remove_data_source_dumps',
             config,
             config,

+ 1 - 3
borgmatic/actions/export_tar.py

@@ -22,9 +22,7 @@ def run_export_tar(
     if export_tar_arguments.repository is None or borgmatic.config.validate.repositories_match(
     if export_tar_arguments.repository is None or borgmatic.config.validate.repositories_match(
         repository, export_tar_arguments.repository
         repository, export_tar_arguments.repository
     ):
     ):
-        logger.info(
-            f'Exporting archive {export_tar_arguments.archive} as tar file'
-        )
+        logger.info(f'Exporting archive {export_tar_arguments.archive} as tar file')
         borgmatic.borg.export_tar.export_tar_archive(
         borgmatic.borg.export_tar.export_tar_archive(
             global_arguments.dry_run,
             global_arguments.dry_run,
             repository['path'],
             repository['path'],

+ 1 - 3
borgmatic/actions/extract.py

@@ -33,9 +33,7 @@ def run_extract(
     if extract_arguments.repository is None or borgmatic.config.validate.repositories_match(
     if extract_arguments.repository is None or borgmatic.config.validate.repositories_match(
         repository, extract_arguments.repository
         repository, extract_arguments.repository
     ):
     ):
-        logger.info(
-            f'Extracting archive {extract_arguments.archive}'
-        )
+        logger.info(f'Extracting archive {extract_arguments.archive}')
         borgmatic.borg.extract.extract_archive(
         borgmatic.borg.extract.extract_archive(
             global_arguments.dry_run,
             global_arguments.dry_run,
             repository['path'],
             repository['path'],

+ 1 - 3
borgmatic/actions/info.py

@@ -27,9 +27,7 @@ def run_info(
         repository, info_arguments.repository
         repository, info_arguments.repository
     ):
     ):
         if not info_arguments.json:
         if not info_arguments.json:
-            logger.answer(
-                'Displaying archive summary information'
-            )
+            logger.answer('Displaying archive summary information')
         archive_name = borgmatic.borg.repo_list.resolve_archive_name(
         archive_name = borgmatic.borg.repo_list.resolve_archive_name(
             repository['path'],
             repository['path'],
             info_arguments.archive,
             info_arguments.archive,

+ 1 - 3
borgmatic/actions/mount.py

@@ -23,9 +23,7 @@ def run_mount(
         repository, mount_arguments.repository
         repository, mount_arguments.repository
     ):
     ):
         if mount_arguments.archive:
         if mount_arguments.archive:
-            logger.info(
-                f'Mounting archive {mount_arguments.archive}'
-            )
+            logger.info(f'Mounting archive {mount_arguments.archive}')
         else:  # pragma: nocover
         else:  # pragma: nocover
             logger.info('Mounting repository')
             logger.info('Mounting repository')
 
 

+ 1 - 2
borgmatic/actions/repo_delete.py

@@ -21,8 +21,7 @@ def run_repo_delete(
         repository, repo_delete_arguments.repository
         repository, repo_delete_arguments.repository
     ):
     ):
         logger.answer(
         logger.answer(
-            'Deleting repository'
-            + (' cache' if repo_delete_arguments.cache_only else '')
+            'Deleting repository' + (' cache' if repo_delete_arguments.cache_only else '')
         )
         )
 
 
         borgmatic.borg.repo_delete.delete_repository(
         borgmatic.borg.repo_delete.delete_repository(

+ 1 - 3
borgmatic/actions/repo_info.py

@@ -25,9 +25,7 @@ def run_repo_info(
         repository, repo_info_arguments.repository
         repository, repo_info_arguments.repository
     ):
     ):
         if not repo_info_arguments.json:
         if not repo_info_arguments.json:
-            logger.answer(
-                'Displaying repository summary information'
-            )
+            logger.answer('Displaying repository summary information')
 
 
         json_output = borgmatic.borg.repo_info.display_repository_info(
         json_output = borgmatic.borg.repo_info.display_repository_info(
             repository['path'],
             repository['path'],

+ 2 - 6
borgmatic/actions/restore.py

@@ -172,9 +172,7 @@ def restore_single_dump(
         Dump(hook_name, data_source['name'], data_source.get('hostname'), data_source.get('port'))
         Dump(hook_name, data_source['name'], data_source.get('hostname'), data_source.get('port'))
     )
     )
 
 
-    logger.info(
-        f'Restoring data source {dump_metadata}'
-    )
+    logger.info(f'Restoring data source {dump_metadata}')
 
 
     dump_patterns = borgmatic.hooks.dispatch.call_hooks(
     dump_patterns = borgmatic.hooks.dispatch.call_hooks(
         'make_data_source_dump_patterns',
         'make_data_source_dump_patterns',
@@ -443,9 +441,7 @@ def run_restore(
 
 
     logger.info(f'Restoring data sources from archive {restore_arguments.archive}')
     logger.info(f'Restoring data sources from archive {restore_arguments.archive}')
 
 
-    with borgmatic.config.paths.Runtime_directory(
-        config
-    ) as borgmatic_runtime_directory:
+    with borgmatic.config.paths.Runtime_directory(config) as borgmatic_runtime_directory:
         borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
         borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
             'remove_data_source_dumps',
             'remove_data_source_dumps',
             config,
             config,

+ 1 - 3
borgmatic/actions/transfer.py

@@ -17,9 +17,7 @@ def run_transfer(
     '''
     '''
     Run the "transfer" action for the given repository.
     Run the "transfer" action for the given repository.
     '''
     '''
-    logger.info(
-        'Transferring archives to repository'
-    )
+    logger.info('Transferring archives to repository')
     borgmatic.borg.transfer.transfer_archives(
     borgmatic.borg.transfer.transfer_archives(
         global_arguments.dry_run,
         global_arguments.dry_run,
         repository['path'],
         repository['path'],

+ 16 - 14
borgmatic/commands/borgmatic.py

@@ -39,7 +39,13 @@ from borgmatic.commands.arguments import parse_arguments
 from borgmatic.config import checks, collect, validate
 from borgmatic.config import checks, collect, validate
 from borgmatic.hooks import command, dispatch
 from borgmatic.hooks import command, dispatch
 from borgmatic.hooks.monitoring import monitor
 from borgmatic.hooks.monitoring import monitor
-from borgmatic.logger import DISABLED, add_custom_log_levels, configure_logging, should_do_markup, set_log_prefix
+from borgmatic.logger import (
+    DISABLED,
+    add_custom_log_levels,
+    configure_logging,
+    set_log_prefix,
+    should_do_markup,
+)
 from borgmatic.signals import configure_signals
 from borgmatic.signals import configure_signals
 from borgmatic.verbosity import verbosity_to_log_level
 from borgmatic.verbosity import verbosity_to_log_level
 
 
@@ -131,15 +137,11 @@ def run_configuration(config_filename, config, config_paths, arguments):
         try:
         try:
             while not repo_queue.empty():
             while not repo_queue.empty():
                 repository, retry_num = repo_queue.get()
                 repository, retry_num = repo_queue.get()
-                set_log_prefix(repository.get('label', repository['path'])) 
-                logger.debug(
-                    'Running actions for repository'
-                )
+                set_log_prefix(repository.get('label', repository['path']))
+                logger.debug('Running actions for repository')
                 timeout = retry_num * retry_wait
                 timeout = retry_num * retry_wait
                 if timeout:
                 if timeout:
-                    logger.warning(
-                        f'Sleeping {timeout}s before next retry'
-                    )
+                    logger.warning(f'Sleeping {timeout}s before next retry')
                     time.sleep(timeout)
                     time.sleep(timeout)
                 try:
                 try:
                     yield from run_actions(
                     yield from run_actions(
@@ -165,9 +167,7 @@ def run_configuration(config_filename, config, config_paths, arguments):
                                 log_command_error_output=True,
                                 log_command_error_output=True,
                             )
                             )
                         )
                         )
-                        logger.warning(
-                            f'Retrying... attempt {retry_num + 1}/{retries}'
-                        )
+                        logger.warning(f'Retrying... attempt {retry_num + 1}/{retries}')
                         continue
                         continue
 
 
                     if command.considered_soft_failure(error):
                     if command.considered_soft_failure(error):
@@ -819,9 +819,11 @@ def collect_configuration_run_summary_logs(configs, config_paths, arguments):
 
 
     try:
     try:
         for config_filename, config in configs.items():
         for config_filename, config in configs.items():
-            set_log_prefix(config_filename) 
+            set_log_prefix(config_filename)
             results = list(run_configuration(config_filename, config, config_paths, arguments))
             results = list(run_configuration(config_filename, config, config_paths, arguments))
-            error_logs = tuple(result for result in results if isinstance(result, logging.LogRecord))
+            error_logs = tuple(
+                result for result in results if isinstance(result, logging.LogRecord)
+            )
 
 
             if error_logs:
             if error_logs:
                 yield from log_error_records('An error occurred')
                 yield from log_error_records('An error occurred')
@@ -837,7 +839,7 @@ def collect_configuration_run_summary_logs(configs, config_paths, arguments):
                 if results:
                 if results:
                     json_results.extend(results)
                     json_results.extend(results)
     finally:
     finally:
-        set_log_prefix(None) 
+        set_log_prefix(None)
 
 
     if 'umount' in arguments:
     if 'umount' in arguments:
         logger.info(f"Unmounting mount point {arguments['umount'].mount_point}")
         logger.info(f"Unmounting mount point {arguments['umount'].mount_point}")

+ 1 - 3
borgmatic/hooks/command.py

@@ -60,9 +60,7 @@ def execute_hook(commands, umask, config_filename, description, dry_run, **conte
     dry_run_label = ' (dry run; not actually running hooks)' if dry_run else ''
     dry_run_label = ' (dry run; not actually running hooks)' if dry_run else ''
 
 
     context['configuration_filename'] = config_filename
     context['configuration_filename'] = config_filename
-    commands = [
-        interpolate_context(description, command, context) for command in commands
-    ]
+    commands = [interpolate_context(description, command, context) for command in commands]
 
 
     if len(commands) == 1:
     if len(commands) == 1:
         logger.info(f'Running command for {description} hook{dry_run_label}')
         logger.info(f'Running command for {description} hook{dry_run_label}')

+ 1 - 3
borgmatic/hooks/data_source/bootstrap.py

@@ -83,9 +83,7 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
 
 
     for manifest_directory in glob.glob(manifest_glob):
     for manifest_directory in glob.glob(manifest_glob):
         manifest_file_path = os.path.join(manifest_directory, 'manifest.json')
         manifest_file_path = os.path.join(manifest_directory, 'manifest.json')
-        logger.debug(
-            f'Removing bootstrap manifest at {manifest_file_path}{dry_run_label}'
-        )
+        logger.debug(f'Removing bootstrap manifest at {manifest_file_path}{dry_run_label}')
 
 
         if dry_run:
         if dry_run:
             continue
             continue

+ 2 - 6
borgmatic/hooks/data_source/lvm.py

@@ -338,9 +338,7 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
         ),
         ),
         'lvm_snapshots',
         'lvm_snapshots',
     )
     )
-    logger.debug(
-        f'Looking for snapshots to remove in {snapshots_glob}{dry_run_label}'
-    )
+    logger.debug(f'Looking for snapshots to remove in {snapshots_glob}{dry_run_label}')
     umount_command = hook_config.get('umount_command', 'umount')
     umount_command = hook_config.get('umount_command', 'umount')
 
 
     for snapshots_directory in glob.glob(snapshots_glob):
     for snapshots_directory in glob.glob(snapshots_glob):
@@ -364,9 +362,7 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
                 if not os.path.isdir(snapshot_mount_path):
                 if not os.path.isdir(snapshot_mount_path):
                     continue
                     continue
 
 
-            logger.debug(
-                f'Unmounting LVM snapshot at {snapshot_mount_path}{dry_run_label}'
-            )
+            logger.debug(f'Unmounting LVM snapshot at {snapshot_mount_path}{dry_run_label}')
 
 
             if dry_run:
             if dry_run:
                 continue
                 continue

+ 3 - 9
borgmatic/hooks/data_source/mariadb.py

@@ -102,9 +102,7 @@ def execute_dump_command(
         + ('--result-file', dump_filename)
         + ('--result-file', dump_filename)
     )
     )
 
 
-    logger.debug(
-        f'Dumping MariaDB database "{database_name}" to {dump_filename}{dry_run_label}'
-    )
+    logger.debug(f'Dumping MariaDB database "{database_name}" to {dump_filename}{dry_run_label}')
     if dry_run:
     if dry_run:
         return None
         return None
 
 
@@ -155,9 +153,7 @@ def dump_data_sources(
     for database in databases:
     for database in databases:
         dump_path = make_dump_path(borgmatic_runtime_directory)
         dump_path = make_dump_path(borgmatic_runtime_directory)
         extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
         extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
-        dump_database_names = database_names_to_dump(
-            database, extra_environment, dry_run
-        )
+        dump_database_names = database_names_to_dump(database, extra_environment, dry_run)
 
 
         if not dump_database_names:
         if not dump_database_names:
             if dry_run:
             if dry_run:
@@ -209,9 +205,7 @@ def remove_data_source_dumps(
     borgmatic_runtime_directory to construct the destination path. If this is a dry run, then don't
     borgmatic_runtime_directory to construct the destination path. If this is a dry run, then don't
     actually remove anything.
     actually remove anything.
     '''
     '''
-    dump.remove_data_source_dumps(
-        make_dump_path(borgmatic_runtime_directory), 'MariaDB', dry_run
-    )
+    dump.remove_data_source_dumps(make_dump_path(borgmatic_runtime_directory), 'MariaDB', dry_run)
 
 
 
 
 def make_data_source_dump_patterns(
 def make_data_source_dump_patterns(

+ 1 - 3
borgmatic/hooks/data_source/mongodb.py

@@ -123,9 +123,7 @@ def remove_data_source_dumps(
     borgmatic_runtime_directory to construct the destination path. If this is a dry run, then don't
     borgmatic_runtime_directory to construct the destination path. If this is a dry run, then don't
     actually remove anything.
     actually remove anything.
     '''
     '''
-    dump.remove_data_source_dumps(
-        make_dump_path(borgmatic_runtime_directory), 'MongoDB', dry_run
-    )
+    dump.remove_data_source_dumps(make_dump_path(borgmatic_runtime_directory), 'MongoDB', dry_run)
 
 
 
 
 def make_data_source_dump_patterns(
 def make_data_source_dump_patterns(

+ 3 - 9
borgmatic/hooks/data_source/mysql.py

@@ -101,9 +101,7 @@ def execute_dump_command(
         + ('--result-file', dump_filename)
         + ('--result-file', dump_filename)
     )
     )
 
 
-    logger.debug(
-        f'Dumping MySQL database "{database_name}" to {dump_filename}{dry_run_label}'
-    )
+    logger.debug(f'Dumping MySQL database "{database_name}" to {dump_filename}{dry_run_label}')
     if dry_run:
     if dry_run:
         return None
         return None
 
 
@@ -154,9 +152,7 @@ def dump_data_sources(
     for database in databases:
     for database in databases:
         dump_path = make_dump_path(borgmatic_runtime_directory)
         dump_path = make_dump_path(borgmatic_runtime_directory)
         extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
         extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
-        dump_database_names = database_names_to_dump(
-            database, extra_environment, dry_run
-        )
+        dump_database_names = database_names_to_dump(database, extra_environment, dry_run)
 
 
         if not dump_database_names:
         if not dump_database_names:
             if dry_run:
             if dry_run:
@@ -208,9 +204,7 @@ def remove_data_source_dumps(
     borgmatic runtime directory to construct the destination path. If this is a dry run, then don't
     borgmatic runtime directory to construct the destination path. If this is a dry run, then don't
     actually remove anything.
     actually remove anything.
     '''
     '''
-    dump.remove_data_source_dumps(
-        make_dump_path(borgmatic_runtime_directory), 'MySQL', dry_run
-    )
+    dump.remove_data_source_dumps(make_dump_path(borgmatic_runtime_directory), 'MySQL', dry_run)
 
 
 
 
 def make_data_source_dump_patterns(
 def make_data_source_dump_patterns(

+ 2 - 6
borgmatic/hooks/data_source/postgresql.py

@@ -137,9 +137,7 @@ def dump_data_sources(
     for database in databases:
     for database in databases:
         extra_environment = make_extra_environment(database)
         extra_environment = make_extra_environment(database)
         dump_path = make_dump_path(borgmatic_runtime_directory)
         dump_path = make_dump_path(borgmatic_runtime_directory)
-        dump_database_names = database_names_to_dump(
-            database, extra_environment, dry_run
-        )
+        dump_database_names = database_names_to_dump(database, extra_environment, dry_run)
 
 
         if not dump_database_names:
         if not dump_database_names:
             if dry_run:
             if dry_run:
@@ -350,9 +348,7 @@ def restore_data_source_dump(
         data_source, restore_connection_params=connection_params
         data_source, restore_connection_params=connection_params
     )
     )
 
 
-    logger.debug(
-        f"Restoring PostgreSQL database {data_source['name']}{dry_run_label}"
-    )
+    logger.debug(f"Restoring PostgreSQL database {data_source['name']}{dry_run_label}")
     if dry_run:
     if dry_run:
         return
         return
 
 

+ 1 - 3
borgmatic/hooks/data_source/sqlite.py

@@ -105,9 +105,7 @@ def remove_data_source_dumps(
     borgmatic runtime directory to construct the destination path. If this is a dry run, then don't
     borgmatic runtime directory to construct the destination path. If this is a dry run, then don't
     actually remove anything.
     actually remove anything.
     '''
     '''
-    dump.remove_data_source_dumps(
-        make_dump_path(borgmatic_runtime_directory), 'SQLite', dry_run
-    )
+    dump.remove_data_source_dumps(make_dump_path(borgmatic_runtime_directory), 'SQLite', dry_run)
 
 
 
 
 def make_data_source_dump_patterns(
 def make_data_source_dump_patterns(

+ 2 - 6
borgmatic/hooks/data_source/zfs.py

@@ -335,9 +335,7 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
         ),
         ),
         'zfs_snapshots',
         'zfs_snapshots',
     )
     )
-    logger.debug(
-        f'Looking for snapshots to remove in {snapshots_glob}{dry_run_label}'
-    )
+    logger.debug(f'Looking for snapshots to remove in {snapshots_glob}{dry_run_label}')
     umount_command = hook_config.get('umount_command', 'umount')
     umount_command = hook_config.get('umount_command', 'umount')
 
 
     for snapshots_directory in glob.glob(snapshots_glob):
     for snapshots_directory in glob.glob(snapshots_glob):
@@ -362,9 +360,7 @@ def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, d
                 if not os.path.isdir(snapshot_mount_path):
                 if not os.path.isdir(snapshot_mount_path):
                     continue
                     continue
 
 
-            logger.debug(
-                f'Unmounting ZFS snapshot at {snapshot_mount_path}{dry_run_label}'
-            )
+            logger.debug(f'Unmounting ZFS snapshot at {snapshot_mount_path}{dry_run_label}')
 
 
             if not dry_run:
             if not dry_run:
                 try:
                 try:

+ 12 - 4
borgmatic/logger.py

@@ -89,7 +89,9 @@ class Multi_stream_handler(logging.Handler):
 
 
 class Console_no_color_formatter(logging.Formatter):
 class Console_no_color_formatter(logging.Formatter):
     def __init__(self, *args, **kwargs):
     def __init__(self, *args, **kwargs):
-        super(Console_no_color_formatter, self).__init__('{prefix}{message}', style='{', defaults={'prefix': ''}, *args, **kwargs)
+        super(Console_no_color_formatter, self).__init__(
+            '{prefix}{message}', style='{', defaults={'prefix': ''}, *args, **kwargs
+        )
 
 
 
 
 class Color(enum.Enum):
 class Color(enum.Enum):
@@ -103,7 +105,9 @@ class Color(enum.Enum):
 
 
 class Console_color_formatter(logging.Formatter):
 class Console_color_formatter(logging.Formatter):
     def __init__(self, *args, **kwargs):
     def __init__(self, *args, **kwargs):
-        super(Console_color_formatter, self).__init__('{prefix}{message}', style='{', defaults={'prefix': ''}, *args, **kwargs)
+        super(Console_color_formatter, self).__init__(
+            '{prefix}{message}', style='{', defaults={'prefix': ''}, *args, **kwargs
+        )
 
 
     def format(self, record):
     def format(self, record):
         add_custom_log_levels()
         add_custom_log_levels()
@@ -255,7 +259,9 @@ def configure_logging(
         if syslog_path:
         if syslog_path:
             syslog_handler = logging.handlers.SysLogHandler(address=syslog_path)
             syslog_handler = logging.handlers.SysLogHandler(address=syslog_path)
             syslog_handler.setFormatter(
             syslog_handler.setFormatter(
-                logging.Formatter('borgmatic: {levelname} {prefix}{message}', style='{', defaults={'prefix': ''})  # noqa: FS003
+                logging.Formatter(
+                    'borgmatic: {levelname} {prefix}{message}', style='{', defaults={'prefix': ''}
+                )  # noqa: FS003
             )
             )
             syslog_handler.setLevel(syslog_log_level)
             syslog_handler.setLevel(syslog_log_level)
             handlers.append(syslog_handler)
             handlers.append(syslog_handler)
@@ -264,7 +270,9 @@ def configure_logging(
         file_handler = logging.handlers.WatchedFileHandler(log_file)
         file_handler = logging.handlers.WatchedFileHandler(log_file)
         file_handler.setFormatter(
         file_handler.setFormatter(
             logging.Formatter(
             logging.Formatter(
-                log_file_format or '[{asctime}] {levelname}: {prefix}{message}', style='{', defaults={'prefix': ''}  # noqa: FS003
+                log_file_format or '[{asctime}] {levelname}: {prefix}{message}',
+                style='{',
+                defaults={'prefix': ''},  # noqa: FS003
             )
             )
         )
         )
         file_handler.setLevel(log_file_log_level)
         file_handler.setLevel(log_file_log_level)

+ 1 - 3
tests/unit/borg/test_create.py

@@ -21,9 +21,7 @@ def test_write_patterns_file_writes_pattern_lines():
 
 
 
 
 def test_write_patterns_file_with_empty_exclude_patterns_does_not_raise():
 def test_write_patterns_file_with_empty_exclude_patterns_does_not_raise():
-    module.write_patterns_file(
-        [], borgmatic_runtime_directory='/run/user/0'
-    )
+    module.write_patterns_file([], borgmatic_runtime_directory='/run/user/0')
 
 
 
 
 def test_write_patterns_file_appends_to_existing():
 def test_write_patterns_file_appends_to_existing():

+ 5 - 10
tests/unit/hooks/data_source/test_mariadb.py

@@ -9,9 +9,7 @@ from borgmatic.hooks.data_source import mariadb as module
 def test_database_names_to_dump_passes_through_name():
 def test_database_names_to_dump_passes_through_name():
     extra_environment = flexmock()
     extra_environment = flexmock()
 
 
-    names = module.database_names_to_dump(
-        {'name': 'foo'}, extra_environment, dry_run=False
-    )
+    names = module.database_names_to_dump({'name': 'foo'}, extra_environment, dry_run=False)
 
 
     assert names == ('foo',)
     assert names == ('foo',)
 
 
@@ -20,9 +18,7 @@ def test_database_names_to_dump_bails_for_dry_run():
     extra_environment = flexmock()
     extra_environment = flexmock()
     flexmock(module).should_receive('execute_command_and_capture_output').never()
     flexmock(module).should_receive('execute_command_and_capture_output').never()
 
 
-    names = module.database_names_to_dump(
-        {'name': 'all'}, extra_environment, dry_run=True
-    )
+    names = module.database_names_to_dump({'name': 'all'}, extra_environment, dry_run=True)
 
 
     assert names == ()
     assert names == ()
 
 
@@ -34,16 +30,15 @@ def test_database_names_to_dump_queries_mariadb_for_database_names():
         extra_environment=extra_environment,
         extra_environment=extra_environment,
     ).and_return('foo\nbar\nmysql\n').once()
     ).and_return('foo\nbar\nmysql\n').once()
 
 
-    names = module.database_names_to_dump(
-        {'name': 'all'}, extra_environment, dry_run=False
-    )
+    names = module.database_names_to_dump({'name': 'all'}, extra_environment, dry_run=False)
 
 
     assert names == ('foo', 'bar')
     assert names == ('foo', 'bar')
 
 
 
 
 def test_use_streaming_true_for_any_databases():
 def test_use_streaming_true_for_any_databases():
     assert module.use_streaming(
     assert module.use_streaming(
-        databases=[flexmock(), flexmock()], config=flexmock(),
+        databases=[flexmock(), flexmock()],
+        config=flexmock(),
     )
     )
 
 
 
 

+ 5 - 10
tests/unit/hooks/data_source/test_mysql.py

@@ -9,9 +9,7 @@ from borgmatic.hooks.data_source import mysql as module
 def test_database_names_to_dump_passes_through_name():
 def test_database_names_to_dump_passes_through_name():
     extra_environment = flexmock()
     extra_environment = flexmock()
 
 
-    names = module.database_names_to_dump(
-        {'name': 'foo'}, extra_environment, dry_run=False
-    )
+    names = module.database_names_to_dump({'name': 'foo'}, extra_environment, dry_run=False)
 
 
     assert names == ('foo',)
     assert names == ('foo',)
 
 
@@ -20,9 +18,7 @@ def test_database_names_to_dump_bails_for_dry_run():
     extra_environment = flexmock()
     extra_environment = flexmock()
     flexmock(module).should_receive('execute_command_and_capture_output').never()
     flexmock(module).should_receive('execute_command_and_capture_output').never()
 
 
-    names = module.database_names_to_dump(
-        {'name': 'all'}, extra_environment, dry_run=True
-    )
+    names = module.database_names_to_dump({'name': 'all'}, extra_environment, dry_run=True)
 
 
     assert names == ()
     assert names == ()
 
 
@@ -34,16 +30,15 @@ def test_database_names_to_dump_queries_mysql_for_database_names():
         extra_environment=extra_environment,
         extra_environment=extra_environment,
     ).and_return('foo\nbar\nmysql\n').once()
     ).and_return('foo\nbar\nmysql\n').once()
 
 
-    names = module.database_names_to_dump(
-        {'name': 'all'}, extra_environment, dry_run=False
-    )
+    names = module.database_names_to_dump({'name': 'all'}, extra_environment, dry_run=False)
 
 
     assert names == ('foo', 'bar')
     assert names == ('foo', 'bar')
 
 
 
 
 def test_use_streaming_true_for_any_databases():
 def test_use_streaming_true_for_any_databases():
     assert module.use_streaming(
     assert module.use_streaming(
-        databases=[flexmock(), flexmock()], config=flexmock(),
+        databases=[flexmock(), flexmock()],
+        config=flexmock(),
     )
     )
 
 
 
 

+ 2 - 1
tests/unit/hooks/data_source/test_sqlite.py

@@ -7,7 +7,8 @@ from borgmatic.hooks.data_source import sqlite as module
 
 
 def test_use_streaming_true_for_any_databases():
 def test_use_streaming_true_for_any_databases():
     assert module.use_streaming(
     assert module.use_streaming(
-        databases=[flexmock(), flexmock()], config=flexmock(),
+        databases=[flexmock(), flexmock()],
+        config=flexmock(),
     )
     )
 
 
 
 

+ 2 - 5
tests/unit/hooks/test_command.py

@@ -20,9 +20,7 @@ def test_interpolate_context_interpolates_variables():
     command = 'ls {foo}{baz} {baz}'  # noqa: FS003
     command = 'ls {foo}{baz} {baz}'  # noqa: FS003
     context = {'foo': 'bar', 'baz': 'quux'}
     context = {'foo': 'bar', 'baz': 'quux'}
 
 
-    assert (
-        module.interpolate_context('pre-backup', command, context) == 'ls barquux quux'
-    )
+    assert module.interpolate_context('pre-backup', command, context) == 'ls barquux quux'
 
 
 
 
 def test_interpolate_context_escapes_interpolated_variables():
 def test_interpolate_context_escapes_interpolated_variables():
@@ -30,8 +28,7 @@ def test_interpolate_context_escapes_interpolated_variables():
     context = {'foo': 'bar', 'inject': 'hi; naughty-command'}
     context = {'foo': 'bar', 'inject': 'hi; naughty-command'}
 
 
     assert (
     assert (
-        module.interpolate_context('pre-backup', command, context)
-        == "ls bar 'hi; naughty-command'"
+        module.interpolate_context('pre-backup', command, context) == "ls bar 'hi; naughty-command'"
     )
     )