Răsfoiți Sursa

Add a flake8 plugin that enforces a blank line after if/for/while/with/try blocks.

Dan Helfman 2 zile în urmă
părinte
comite
34f677dba1

+ 4 - 0
borgmatic/actions/check.py

@@ -57,10 +57,12 @@ def parse_checks(config, only_checks=None):
         logger.warning(
             'The "disabled" value for the "checks" option is deprecated and will be removed from a future release; use "skip_actions" instead'
         )
+
         if len(checks) > 1:
             logger.warning(
                 'Multiple checks are configured, but one of them is "disabled"; not running any checks'
             )
+
         return ()
 
     return checks
@@ -164,6 +166,7 @@ def filter_checks_on_frequency(
                     **dict.fromkeys(day for day in days if day != 'weekday'),
                     **dict.fromkeys(WEEKDAY_DAYS),
                 }
+
             if 'weekend' in days:
                 days = {
                     **dict.fromkeys(day for day in days if day != 'weekend'),
@@ -794,4 +797,5 @@ def run_check(
                 remote_path,
                 borgmatic_runtime_directory,
             )
+
         write_check_time(make_check_time_path(config, repository_id, 'spot'))

+ 1 - 0
borgmatic/actions/info.py

@@ -28,6 +28,7 @@ def run_info(
     ):
         if not info_arguments.json:
             logger.answer('Displaying archive summary information')
+
         archive_name = borgmatic.borg.repo_list.resolve_archive_name(
             repository['path'],
             info_arguments.archive,

+ 1 - 0
borgmatic/actions/restore.py

@@ -40,6 +40,7 @@ def dumps_match(first, second, default_port=None):
         if default_port is not None and field_name == 'port':
             if first_value == default_port and second_value is None:
                 continue
+
             if second_value == default_port and first_value is None:
                 continue
 

+ 3 - 0
borgmatic/borg/check.py

@@ -44,6 +44,7 @@ def make_archive_filter_flags(local_borg_version, config, checks, check_argument
         logger.warning(
             'Ignoring check_last option, as "archives" or "data" are not in consistency checks'
         )
+
     if prefix:
         logger.warning(
             'Ignoring consistency prefix option, as "archives" or "data" are not in consistency checks'
@@ -127,8 +128,10 @@ def check_archives(
     extra_borg_options = config.get('extra_borg_options', {}).get('check', '')
 
     verbosity_flags = ()
+
     if logger.isEnabledFor(logging.INFO):
         verbosity_flags = ('--info',)
+
     if logger.isEnabledFor(logging.DEBUG):
         verbosity_flags = ('--debug', '--show-rc')
 

+ 6 - 0
borgmatic/commands/borgmatic.py

@@ -241,9 +241,11 @@ def run_configuration(config_filename, config, config_paths, arguments):
                     with Log_prefix(repository.get('label', repository['path'])):
                         logger.debug('Running actions for repository')
                         timeout = retry_num * retry_wait
+
                         if timeout:
                             logger.warning(f'Sleeping {timeout}s before next retry')
                             time.sleep(timeout)
+
                         try:
                             yield from run_actions(
                                 arguments=arguments,
@@ -1060,17 +1062,21 @@ def main(extra_summary_logs=[]):  # pragma: no cover
     except SystemExit as error:
         if error.code == 0:
             raise error
+
         configure_logging(logging.CRITICAL)
         logger.critical(f"Error parsing arguments: {' '.join(sys.argv)}")
         exit_with_help_link()
 
     global_arguments = arguments['global']
+
     if global_arguments.version:
         print(importlib.metadata.version('borgmatic'))
         sys.exit(0)
+
     if global_arguments.bash_completion:
         print(borgmatic.commands.completion.bash.bash_completion())
         sys.exit(0)
+
     if global_arguments.fish_completion:
         print(borgmatic.commands.completion.fish.fish_completion())
         sys.exit(0)

+ 1 - 0
borgmatic/config/constants.py

@@ -13,6 +13,7 @@ def coerce_scalar(value):
 
     if value == 'true' or value == 'True':
         return True
+
     if value == 'false' or value == 'False':
         return False
 

+ 1 - 0
borgmatic/config/generate.py

@@ -267,6 +267,7 @@ def merge_source_configuration_into_destination(destination_config, source_confi
     '''
     if not source_config:
         return destination_config
+
     if not destination_config or not isinstance(source_config, collections.abc.Mapping):
         return source_config
 

+ 1 - 0
borgmatic/config/normalize.py

@@ -341,6 +341,7 @@ def normalize(config_filename, config):
                         )
                     )
                 )
+
             if ':' in repository_path:
                 if repository_path.startswith('file://'):
                     updated_repository_path = os.path.abspath(

+ 1 - 0
borgmatic/config/validate.py

@@ -194,6 +194,7 @@ def repositories_match(first, second):
     '''
     if isinstance(first, str):
         first = {'path': first, 'label': first}
+
     if isinstance(second, str):
         second = {'path': second, 'label': second}
 

+ 3 - 0
borgmatic/execute.py

@@ -31,6 +31,7 @@ def interpret_exit_code(command, exit_code, borg_local_path=None, borg_exit_code
     '''
     if exit_code is None:
         return Exit_status.STILL_RUNNING
+
     if exit_code == 0:
         return Exit_status.SUCCESS
 
@@ -374,6 +375,7 @@ def execute_command_and_capture_output(
             == Exit_status.ERROR
         ):
             raise
+
         output = error.output
 
     return output.decode() if output is not None else None
@@ -435,6 +437,7 @@ def execute_command_with_processes(
             if process.poll() is None:
                 process.stdout.read(0)
                 process.kill()
+
         raise
 
     with borgmatic.logger.Log_prefix(None):  # Log command output without any prefix.

+ 1 - 0
borgmatic/hooks/data_source/mariadb.py

@@ -113,6 +113,7 @@ def database_names_to_dump(database, config, username, password, environment, dr
     '''
     if database['name'] != 'all':
         return (database['name'],)
+
     if dry_run:
         return ()
 

+ 10 - 0
borgmatic/hooks/data_source/mongodb.py

@@ -80,6 +80,7 @@ def dump_data_sources(
             processes.append(
                 execute_command(command, shell=True, run_to_completion=False)  # noqa: S604
             )
+
     if not dry_run:
         patterns.append(
             borgmatic.borg.pattern.Pattern(
@@ -259,24 +260,33 @@ def build_restore_command(extract_process, database, config, dump_filename, conn
         shlex.quote(part)
         for part in shlex.split(database.get('mongorestore_command') or 'mongorestore')
     )
+
     if extract_process:
         command.append('--archive')
     else:
         command.extend(('--dir', dump_filename))
+
     if database['name'] != 'all':
         command.extend(('--drop',))
+
     if hostname:
         command.extend(('--host', hostname))
+
     if port:
         command.extend(('--port', str(port)))
+
     if username:
         command.extend(('--username', username))
+
     if password:
         command.extend(('--config', make_password_config_file(password)))
+
     if 'authentication_database' in database:
         command.extend(('--authenticationDatabase', database['authentication_database']))
+
     if 'restore_options' in database:
         command.extend(database['restore_options'].split(' '))
+
     if database.get('schemas'):
         for schema in database['schemas']:
             command.extend(('--nsInclude', schema))

+ 1 - 0
borgmatic/hooks/data_source/mysql.py

@@ -36,6 +36,7 @@ def database_names_to_dump(database, config, username, password, environment, dr
     '''
     if database['name'] != 'all':
         return (database['name'],)
+
     if dry_run:
         return ()
 

+ 6 - 0
borgmatic/hooks/data_source/postgresql.py

@@ -50,12 +50,16 @@ def make_environment(database, config, restore_connection_params=None):
 
     if 'ssl_mode' in database:
         environment['PGSSLMODE'] = database['ssl_mode']
+
     if 'ssl_cert' in database:
         environment['PGSSLCERT'] = database['ssl_cert']
+
     if 'ssl_key' in database:
         environment['PGSSLKEY'] = database['ssl_key']
+
     if 'ssl_root_cert' in database:
         environment['PGSSLROOTCERT'] = database['ssl_root_cert']
+
     if 'ssl_crl' in database:
         environment['PGSSLCRL'] = database['ssl_crl']
 
@@ -76,8 +80,10 @@ def database_names_to_dump(database, config, environment, dry_run):
 
     if requested_name != 'all':
         return (requested_name,)
+
     if not database.get('format'):
         return ('all',)
+
     if dry_run:
         return ()
 

+ 1 - 0
borgmatic/hooks/data_source/sqlite.py

@@ -57,6 +57,7 @@ def dump_data_sources(
 
         if database['name'] == 'all':
             logger.warning('The "all" database name has no meaning for SQLite databases')
+
         if not os.path.exists(database_path):
             logger.warning(
                 f'No SQLite database at {database_path}; an empty database will be created and dumped'

+ 1 - 0
borgmatic/hooks/monitoring/ntfy.py

@@ -73,6 +73,7 @@ def ping_monitor(hook_config, config, config_filename, state, monitoring_log_lev
                 logger.warning(
                     'ntfy access_token is set but so is username/password, only using access_token'
                 )
+
             auth = requests.auth.HTTPBasicAuth('', access_token)
         elif (username and password) is not None:
             auth = requests.auth.HTTPBasicAuth(username, password)

+ 1 - 0
borgmatic/hooks/monitoring/pushover.py

@@ -50,6 +50,7 @@ def ping_monitor(hook_config, config, config_filename, state, monitoring_log_lev
         if 'expire' not in state_config:
             logger.info('Setting expire to default (10 min)')
             state_config['expire'] = 600
+
         if 'retry' not in state_config:
             logger.info('Setting retry to default (30 sec)')
             state_config['retry'] = 30

+ 2 - 0
borgmatic/logger.py

@@ -344,8 +344,10 @@ def configure_logging(
 
     if syslog_log_level is None:
         syslog_log_level = logging.DISABLED
+
     if log_file_log_level is None:
         log_file_log_level = console_log_level
+
     if monitoring_log_level is None:
         monitoring_log_level = console_log_level
 

+ 1 - 0
test_requirements.txt

@@ -10,6 +10,7 @@ codespell==2.4.1
 coverage==7.9.1
 flake8==7.3.0
 flake8-bandit==4.1.1
+flake8-clean-block==0.1.2
 flake8-eradicate==1.5.0
 flake8-quotes==3.4.0
 flake8-use-fstring==1.4

+ 12 - 0
tests/end-to-end/hooks/data_source/test_database.py

@@ -295,12 +295,16 @@ def create_test_tables(config, use_restore_options=False):
 
     if 'postgresql_databases' in config:
         run_postgresql_command(command.format(id=1), config, use_restore_options)
+
     if 'mariadb_databases' in config:
         run_mariadb_command(command.format(id=2), config, use_restore_options)
+
     if 'mysql_databases' in config:
         run_mariadb_command(command.format(id=3), config, use_restore_options, binary_name='mysql')
+
     if 'mongodb_databases' in config:
         get_mongodb_database_client(config, use_restore_options)['test4'].insert_one({'thing': 1})
+
     if 'sqlite_databases' in config:
         run_sqlite_command(command.format(id=5), config, use_restore_options)
 
@@ -313,12 +317,16 @@ def drop_test_tables(config, use_restore_options=False):
 
     if 'postgresql_databases' in config:
         run_postgresql_command(command.format(id=1), config, use_restore_options)
+
     if 'mariadb_databases' in config:
         run_mariadb_command(command.format(id=2), config, use_restore_options)
+
     if 'mysql_databases' in config:
         run_mariadb_command(command.format(id=3), config, use_restore_options, binary_name='mysql')
+
     if 'mongodb_databases' in config:
         get_mongodb_database_client(config, use_restore_options)['test4'].drop()
+
     if 'sqlite_databases' in config:
         run_sqlite_command(command.format(id=5), config, use_restore_options)
 
@@ -334,10 +342,13 @@ def select_test_tables(config, use_restore_options=False):
 
     if 'postgresql_databases' in config:
         run_postgresql_command(command.format(id=1), config, use_restore_options)
+
     if 'mariadb_databases' in config:
         run_mariadb_command(command.format(id=2), config, use_restore_options)
+
     if 'mysql_databases' in config:
         run_mariadb_command(command.format(id=3), config, use_restore_options, binary_name='mysql')
+
     if 'mongodb_databases' in config:
         assert (
             get_mongodb_database_client(config, use_restore_options)['test4'].count_documents(
@@ -345,6 +356,7 @@ def select_test_tables(config, use_restore_options=False):
             )
             > 0
         )
+
     if 'sqlite_databases' in config:
         run_sqlite_command(command.format(id=5), config, use_restore_options)