Browse Source

do not return the rc from Archiver methods

this is not needed and getting rid of it makes
the code / behaviour simpler to understand:

if a fatal error is detected, we throw an exception.

if we encounter something warning worthy, we emit and collect the warning.

in a few cases, we directly call set_ec to set the
exit code as needed, e.g. if passing it through
from a subprocess.

also:
- get rid of Archiver.exit_code
- assert that return value of archiver methods is None
- fix a print_warning call to use the correct formatting method
Thomas Waldmann 1 year ago
parent
commit
4bb42d2a00
4 changed files with 57 additions and 88 deletions
  1. 29 78
      src/borg/archiver.py
  2. 1 2
      src/borg/helpers/__init__.py
  3. 5 2
      src/borg/helpers/fs.py
  4. 22 6
      src/borg/testsuite/archiver.py

+ 29 - 78
src/borg/archiver.py

@@ -236,7 +236,6 @@ class Highlander(argparse.Action):
 class Archiver:
 class Archiver:
 
 
     def __init__(self, lock_wait=None, prog=None):
     def __init__(self, lock_wait=None, prog=None):
-        self.exit_code = EXIT_SUCCESS
         self.lock_wait = lock_wait
         self.lock_wait = lock_wait
         self.prog = prog
         self.prog = prog
         self.last_checkpoint = time.monotonic()
         self.last_checkpoint = time.monotonic()
@@ -288,7 +287,6 @@ class Archiver:
             append_only=args.append_only,
             append_only=args.append_only,
             storage_quota=args.storage_quota,
             storage_quota=args.storage_quota,
         ).serve()
         ).serve()
-        return EXIT_SUCCESS
 
 
     @with_repository(create=True, exclusive=True, manifest=False)
     @with_repository(create=True, exclusive=True, manifest=False)
     def do_init(self, args, repository):
     def do_init(self, args, repository):
@@ -299,7 +297,7 @@ class Archiver:
             key = key_creator(repository, args)
             key = key_creator(repository, args)
         except (EOFError, KeyboardInterrupt):
         except (EOFError, KeyboardInterrupt):
             repository.destroy()
             repository.destroy()
-            return EXIT_WARNING
+            raise CancelledByUser()
         manifest = Manifest(key, repository)
         manifest = Manifest(key, repository)
         manifest.key = key
         manifest.key = key
         manifest.write()
         manifest.write()
@@ -327,7 +325,6 @@ class Archiver:
                 'If you used a repokey mode, the key is stored in the repo, but you should back it up separately.\n'
                 'If you used a repokey mode, the key is stored in the repo, but you should back it up separately.\n'
                 'Use "borg key export" to export the key, optionally in printable format.\n'
                 'Use "borg key export" to export the key, optionally in printable format.\n'
                 'Write down the passphrase. Store both at safe place(s).\n')
                 'Write down the passphrase. Store both at safe place(s).\n')
-        return self.exit_code
 
 
     @with_repository(exclusive=True, manifest=False)
     @with_repository(exclusive=True, manifest=False)
     def do_check(self, args, repository):
     def do_check(self, args, repository):
@@ -355,15 +352,16 @@ class Archiver:
             raise CommandError("--repository-only is required for --max-duration support.")
             raise CommandError("--repository-only is required for --max-duration support.")
         if not args.archives_only:
         if not args.archives_only:
             if not repository.check(repair=args.repair, save_space=args.save_space, max_duration=args.max_duration):
             if not repository.check(repair=args.repair, save_space=args.save_space, max_duration=args.max_duration):
-                return EXIT_WARNING
+                set_ec(EXIT_WARNING)
+                return
         if args.prefix is not None:
         if args.prefix is not None:
             args.glob_archives = args.prefix + '*'
             args.glob_archives = args.prefix + '*'
         if not args.repo_only and not ArchiveChecker().check(
         if not args.repo_only and not ArchiveChecker().check(
                 repository, repair=args.repair, archive=args.location.archive,
                 repository, repair=args.repair, archive=args.location.archive,
                 first=args.first, last=args.last, sort_by=args.sort_by or 'ts', glob=args.glob_archives,
                 first=args.first, last=args.last, sort_by=args.sort_by or 'ts', glob=args.glob_archives,
                 verify_data=args.verify_data, save_space=args.save_space):
                 verify_data=args.verify_data, save_space=args.save_space):
-            return EXIT_WARNING
-        return EXIT_SUCCESS
+            set_ec(EXIT_WARNING)
+            return
 
 
     @with_repository(compatibility=(Manifest.Operation.CHECK,))
     @with_repository(compatibility=(Manifest.Operation.CHECK,))
     def do_change_passphrase(self, args, repository, manifest, key):
     def do_change_passphrase(self, args, repository, manifest, key):
@@ -375,7 +373,6 @@ class Archiver:
         if hasattr(key, 'find_key'):
         if hasattr(key, 'find_key'):
             # print key location to make backing it up easier
             # print key location to make backing it up easier
             logger.info('Key location: %s', key.find_key())
             logger.info('Key location: %s', key.find_key())
-        return EXIT_SUCCESS
 
 
     @with_repository(lock=False, exclusive=False, manifest=False, cache=False)
     @with_repository(lock=False, exclusive=False, manifest=False, cache=False)
     def do_key_export(self, args, repository):
     def do_key_export(self, args, repository):
@@ -392,7 +389,6 @@ class Archiver:
                     manager.export(args.path)
                     manager.export(args.path)
             except IsADirectoryError:
             except IsADirectoryError:
                 raise CommandError(f"'{args.path}' must be a file, not a directory")
                 raise CommandError(f"'{args.path}' must be a file, not a directory")
-        return EXIT_SUCCESS
 
 
     @with_repository(lock=False, exclusive=False, manifest=False, cache=False)
     @with_repository(lock=False, exclusive=False, manifest=False, cache=False)
     def do_key_import(self, args, repository):
     def do_key_import(self, args, repository):
@@ -408,7 +404,6 @@ class Archiver:
             if args.path != '-' and not os.path.exists(args.path):
             if args.path != '-' and not os.path.exists(args.path):
                 raise CommandError("input file does not exist: " + args.path)
                 raise CommandError("input file does not exist: " + args.path)
             manager.import_keyfile(args)
             manager.import_keyfile(args)
-        return EXIT_SUCCESS
 
 
     @with_repository(manifest=False)
     @with_repository(manifest=False)
     def do_migrate_to_repokey(self, args, repository):
     def do_migrate_to_repokey(self, args, repository):
@@ -424,7 +419,6 @@ class Archiver:
         key_new.chunk_seed = key_old.chunk_seed
         key_new.chunk_seed = key_old.chunk_seed
         key_new.change_passphrase()  # option to change key protection passphrase, save
         key_new.change_passphrase()  # option to change key protection passphrase, save
         logger.info('Key updated')
         logger.info('Key updated')
-        return EXIT_SUCCESS
 
 
     def do_benchmark_crud(self, args):
     def do_benchmark_crud(self, args):
         """Benchmark Create, Read, Update, Delete for archives."""
         """Benchmark Create, Read, Update, Delete for archives."""
@@ -433,30 +427,30 @@ class Archiver:
             compression = '--compression=none'
             compression = '--compression=none'
             # measure create perf (without files cache to always have it chunking)
             # measure create perf (without files cache to always have it chunking)
             t_start = time.monotonic()
             t_start = time.monotonic()
-            rc = self.do_create(self.parse_args(['create', compression, '--files-cache=disabled', archive + '1', path]))
+            rc = get_ec(self.do_create(self.parse_args(['create', compression, '--files-cache=disabled', archive + '1', path])))
             t_end = time.monotonic()
             t_end = time.monotonic()
             dt_create = t_end - t_start
             dt_create = t_end - t_start
             assert rc == 0
             assert rc == 0
             # now build files cache
             # now build files cache
-            rc1 = self.do_create(self.parse_args(['create', compression, archive + '2', path]))
-            rc2 = self.do_delete(self.parse_args(['delete', archive + '2']))
+            rc1 = get_ec(self.do_create(self.parse_args(['create', compression, archive + '2', path])))
+            rc2 = get_ec(self.do_delete(self.parse_args(['delete', archive + '2'])))
             assert rc1 == rc2 == 0
             assert rc1 == rc2 == 0
             # measure a no-change update (archive1 is still present)
             # measure a no-change update (archive1 is still present)
             t_start = time.monotonic()
             t_start = time.monotonic()
-            rc1 = self.do_create(self.parse_args(['create', compression, archive + '3', path]))
+            rc1 = get_ec(self.do_create(self.parse_args(['create', compression, archive + '3', path])))
             t_end = time.monotonic()
             t_end = time.monotonic()
             dt_update = t_end - t_start
             dt_update = t_end - t_start
-            rc2 = self.do_delete(self.parse_args(['delete', archive + '3']))
+            rc2 = get_ec(self.do_delete(self.parse_args(['delete', archive + '3'])))
             assert rc1 == rc2 == 0
             assert rc1 == rc2 == 0
             # measure extraction (dry-run: without writing result to disk)
             # measure extraction (dry-run: without writing result to disk)
             t_start = time.monotonic()
             t_start = time.monotonic()
-            rc = self.do_extract(self.parse_args(['extract', '--dry-run', archive + '1']))
+            rc = get_ec(self.do_extract(self.parse_args(['extract', '--dry-run', archive + '1'])))
             t_end = time.monotonic()
             t_end = time.monotonic()
             dt_extract = t_end - t_start
             dt_extract = t_end - t_start
             assert rc == 0
             assert rc == 0
             # measure archive deletion (of LAST present archive with the data)
             # measure archive deletion (of LAST present archive with the data)
             t_start = time.monotonic()
             t_start = time.monotonic()
-            rc = self.do_delete(self.parse_args(['delete', archive + '1']))
+            rc = get_ec(self.do_delete(self.parse_args(['delete', archive + '1'])))
             t_end = time.monotonic()
             t_end = time.monotonic()
             dt_delete = t_end - t_start
             dt_delete = t_end - t_start
             assert rc == 0
             assert rc == 0
@@ -504,8 +498,6 @@ class Archiver:
             print(fmt % ('U', msg, total_size_MB / dt_update, count, file_size_formatted, content, dt_update))
             print(fmt % ('U', msg, total_size_MB / dt_update, count, file_size_formatted, content, dt_update))
             print(fmt % ('D', msg, total_size_MB / dt_delete, count, file_size_formatted, content, dt_delete))
             print(fmt % ('D', msg, total_size_MB / dt_delete, count, file_size_formatted, content, dt_delete))
 
 
-        return 0
-
     @with_repository(fake='dry_run', exclusive=True, compatibility=(Manifest.Operation.WRITE,))
     @with_repository(fake='dry_run', exclusive=True, compatibility=(Manifest.Operation.WRITE,))
     def do_create(self, args, repository, manifest=None, key=None):
     def do_create(self, args, repository, manifest=None, key=None):
         """Create new archive"""
         """Create new archive"""
@@ -670,7 +662,6 @@ class Archiver:
                 create_inner(archive, cache, fso)
                 create_inner(archive, cache, fso)
         else:
         else:
             create_inner(None, None, None)
             create_inner(None, None, None)
-        return self.exit_code
 
 
     def _process_any(self, *, path, parent_fd, name, st, fso, cache, read_special, dry_run):
     def _process_any(self, *, path, parent_fd, name, st, fso, cache, read_special, dry_run):
         """
         """
@@ -931,7 +922,6 @@ class Archiver:
         if pi:
         if pi:
             # clear progress output
             # clear progress output
             pi.finish()
             pi.finish()
-        return self.exit_code
 
 
     @with_repository(compatibility=(Manifest.Operation.READ,))
     @with_repository(compatibility=(Manifest.Operation.READ,))
     @with_archive
     @with_archive
@@ -965,8 +955,6 @@ class Archiver:
         with create_filter_process(filter, stream=tarstream, stream_close=tarstream_close, inbound=False) as _stream:
         with create_filter_process(filter, stream=tarstream, stream_close=tarstream_close, inbound=False) as _stream:
             self._export_tar(args, archive, _stream)
             self._export_tar(args, archive, _stream)
 
 
-        return self.exit_code
-
     def _export_tar(self, args, archive, tarstream):
     def _export_tar(self, args, archive, tarstream):
         matcher = self.build_matcher(args.patterns, args.paths)
         matcher = self.build_matcher(args.patterns, args.paths)
 
 
@@ -1102,7 +1090,6 @@ class Archiver:
 
 
         for pattern in matcher.get_unmatched_include_patterns():
         for pattern in matcher.get_unmatched_include_patterns():
             self.print_warning_instance(IncludePatternNeverMatchedWarning(pattern))
             self.print_warning_instance(IncludePatternNeverMatchedWarning(pattern))
-        return self.exit_code
 
 
     @with_repository(compatibility=(Manifest.Operation.READ,))
     @with_repository(compatibility=(Manifest.Operation.READ,))
     @with_archive
     @with_archive
@@ -1152,8 +1139,6 @@ class Archiver:
         for pattern in matcher.get_unmatched_include_patterns():
         for pattern in matcher.get_unmatched_include_patterns():
             self.print_warning_instance(IncludePatternNeverMatchedWarning(pattern))
             self.print_warning_instance(IncludePatternNeverMatchedWarning(pattern))
 
 
-        return self.exit_code
-
     @with_repository(exclusive=True, cache=True, compatibility=(Manifest.Operation.CHECK,))
     @with_repository(exclusive=True, cache=True, compatibility=(Manifest.Operation.CHECK,))
     @with_archive
     @with_archive
     def do_rename(self, args, repository, manifest, key, cache, archive):
     def do_rename(self, args, repository, manifest, key, cache, archive):
@@ -1162,7 +1147,6 @@ class Archiver:
         manifest.write()
         manifest.write()
         repository.commit(compact=False)
         repository.commit(compact=False)
         cache.commit()
         cache.commit()
-        return self.exit_code
 
 
     def maybe_checkpoint(self, *, checkpoint_func, checkpoint_interval):
     def maybe_checkpoint(self, *, checkpoint_func, checkpoint_interval):
         checkpointed = False
         checkpointed = False
@@ -1187,9 +1171,9 @@ class Archiver:
         if archive_filter_specified and explicit_archives_specified:
         if archive_filter_specified and explicit_archives_specified:
             raise CommandError('Mixing archive filters and explicitly named archives is not supported.')
             raise CommandError('Mixing archive filters and explicitly named archives is not supported.')
         if archive_filter_specified or explicit_archives_specified:
         if archive_filter_specified or explicit_archives_specified:
-            return self._delete_archives(args, repository)
+            self._delete_archives(args, repository)
         else:
         else:
-            return self._delete_repository(args, repository)
+            self._delete_repository(args, repository)
 
 
     def _delete_archives(self, args, repository):
     def _delete_archives(self, args, repository):
         """Delete archives"""
         """Delete archives"""
@@ -1206,7 +1190,7 @@ class Archiver:
             args.consider_checkpoints = True
             args.consider_checkpoints = True
             archive_names = tuple(x.name for x in manifest.archives.list_considering(args))
             archive_names = tuple(x.name for x in manifest.archives.list_considering(args))
             if not archive_names:
             if not archive_names:
-                return self.exit_code
+                return
 
 
         if args.forced == 2:
         if args.forced == 2:
             deleted = False
             deleted = False
@@ -1231,7 +1215,7 @@ class Archiver:
                 self.print_warning('Done. Run "borg check --repair" to clean up the mess.', wc=None)
                 self.print_warning('Done. Run "borg check --repair" to clean up the mess.', wc=None)
             else:
             else:
                 self.print_warning('Aborted.', wc=None)
                 self.print_warning('Aborted.', wc=None)
-            return self.exit_code
+            return
 
 
         stats = Statistics(iec=args.iec)
         stats = Statistics(iec=args.iec)
         with Cache(repository, key, manifest, progress=args.progress, lock_wait=self.lock_wait, iec=args.iec) as cache:
         with Cache(repository, key, manifest, progress=args.progress, lock_wait=self.lock_wait, iec=args.iec) as cache:
@@ -1250,7 +1234,7 @@ class Archiver:
                 try:
                 try:
                     archive_info = manifest.archives[archive_name]
                     archive_info = manifest.archives[archive_name]
                 except KeyError:
                 except KeyError:
-                    self.print_warning(msg_not_found, archive_name, i, len(archive_names))
+                    self.print_warning(msg_not_found, archive_name, i, len(archive_names), wt="curly")
                 else:
                 else:
                     if self.output_list:
                     if self.output_list:
                         logger_list.info(msg_delete.format(format_archive(archive_info), i, len(archive_names)))
                         logger_list.info(msg_delete.format(format_archive(archive_info), i, len(archive_names)))
@@ -1274,8 +1258,6 @@ class Archiver:
                           str(cache),
                           str(cache),
                           DASHES, logger=logging.getLogger('borg.output.stats'))
                           DASHES, logger=logging.getLogger('borg.output.stats'))
 
 
-        return self.exit_code
-
     def _delete_repository(self, args, repository):
     def _delete_repository(self, args, repository):
         """Delete a repository"""
         """Delete a repository"""
         dry_run = args.dry_run
         dry_run = args.dry_run
@@ -1333,7 +1315,6 @@ class Archiver:
             logger.info("Cache deleted.")
             logger.info("Cache deleted.")
         else:
         else:
             logger.info("Would delete cache.")
             logger.info("Would delete cache.")
-        return self.exit_code
 
 
     def do_mount(self, args):
     def do_mount(self, args):
         """Mount archive or an entire repository as a FUSE filesystem"""
         """Mount archive or an entire repository as a FUSE filesystem"""
@@ -1346,7 +1327,7 @@ class Archiver:
         if not os.path.isdir(args.mountpoint) or not os.access(args.mountpoint, os.R_OK | os.W_OK | os.X_OK):
         if not os.path.isdir(args.mountpoint) or not os.access(args.mountpoint, os.R_OK | os.W_OK | os.X_OK):
             raise RTError('%s: Mount point must be a writable directory' % args.mountpoint)
             raise RTError('%s: Mount point must be a writable directory' % args.mountpoint)
 
 
-        return self._do_mount(args)
+        self._do_mount(args)
 
 
     @with_repository(compatibility=(Manifest.Operation.READ,))
     @with_repository(compatibility=(Manifest.Operation.READ,))
     def _do_mount(self, args, repository, manifest, key):
     def _do_mount(self, args, repository, manifest, key):
@@ -1360,11 +1341,10 @@ class Archiver:
             except RuntimeError:
             except RuntimeError:
                 # Relevant error message already printed to stderr by FUSE
                 # Relevant error message already printed to stderr by FUSE
                 raise RTError("FUSE mount failed")
                 raise RTError("FUSE mount failed")
-        return self.exit_code
 
 
     def do_umount(self, args):
     def do_umount(self, args):
         """un-mount the FUSE filesystem"""
         """un-mount the FUSE filesystem"""
-        return umount(args.mountpoint)
+        umount(args.mountpoint)
 
 
     @with_repository(compatibility=(Manifest.Operation.READ,))
     @with_repository(compatibility=(Manifest.Operation.READ,))
     def do_list(self, args, repository, manifest, key):
     def do_list(self, args, repository, manifest, key):
@@ -1372,11 +1352,11 @@ class Archiver:
         if args.location.archive:
         if args.location.archive:
             if args.json:
             if args.json:
                 raise CommandError('The --json option is only valid for listing archives, not archive contents.')
                 raise CommandError('The --json option is only valid for listing archives, not archive contents.')
-            return self._list_archive(args, repository, manifest, key)
+            self._list_archive(args, repository, manifest, key)
         else:
         else:
             if args.json_lines:
             if args.json_lines:
                 raise CommandError('The --json-lines option is only valid for listing archive contents, not archives.')
                 raise CommandError('The --json-lines option is only valid for listing archive contents, not archives.')
-            return self._list_repository(args, repository, manifest, key)
+            self._list_repository(args, repository, manifest, key)
 
 
     def _list_archive(self, args, repository, manifest, key):
     def _list_archive(self, args, repository, manifest, key):
         matcher = self.build_matcher(args.patterns, args.paths)
         matcher = self.build_matcher(args.patterns, args.paths)
@@ -1402,8 +1382,6 @@ class Archiver:
         else:
         else:
             _list_inner(cache=None)
             _list_inner(cache=None)
 
 
-        return self.exit_code
-
     def _list_repository(self, args, repository, manifest, key):
     def _list_repository(self, args, repository, manifest, key):
         if args.format is not None:
         if args.format is not None:
             format = args.format
             format = args.format
@@ -1426,15 +1404,13 @@ class Archiver:
                 'archives': output_data
                 'archives': output_data
             }))
             }))
 
 
-        return self.exit_code
-
     @with_repository(cache=True, compatibility=(Manifest.Operation.READ,))
     @with_repository(cache=True, compatibility=(Manifest.Operation.READ,))
     def do_info(self, args, repository, manifest, key, cache):
     def do_info(self, args, repository, manifest, key, cache):
         """Show archive details such as disk space used"""
         """Show archive details such as disk space used"""
         if any((args.location.archive, args.first, args.last, args.prefix is not None, args.glob_archives)):
         if any((args.location.archive, args.first, args.last, args.prefix is not None, args.glob_archives)):
-            return self._info_archives(args, repository, manifest, key, cache)
+            self._info_archives(args, repository, manifest, key, cache)
         else:
         else:
-            return self._info_repository(args, repository, manifest, key, cache)
+            self._info_repository(args, repository, manifest, key, cache)
 
 
     def _info_archives(self, args, repository, manifest, key, cache):
     def _info_archives(self, args, repository, manifest, key, cache):
         def format_cmdline(cmdline):
         def format_cmdline(cmdline):
@@ -1474,8 +1450,6 @@ class Archiver:
                 This archive:   {stats[original_size]:>20s} {stats[compressed_size]:>20s} {stats[deduplicated_size]:>20s}
                 This archive:   {stats[original_size]:>20s} {stats[compressed_size]:>20s} {stats[deduplicated_size]:>20s}
                 {cache}
                 {cache}
                 """).strip().format(cache=cache, **info))
                 """).strip().format(cache=cache, **info))
-            if self.exit_code:
-                break
             if not args.json and len(archive_names) - i:
             if not args.json and len(archive_names) - i:
                 print()
                 print()
 
 
@@ -1483,7 +1457,6 @@ class Archiver:
             json_print(basic_json_data(manifest, cache=cache, extra={
             json_print(basic_json_data(manifest, cache=cache, extra={
                 'archives': output_data,
                 'archives': output_data,
             }))
             }))
-        return self.exit_code
 
 
     def _info_repository(self, args, repository, manifest, key, cache):
     def _info_repository(self, args, repository, manifest, key, cache):
         info = basic_json_data(manifest, cache=cache, extra={
         info = basic_json_data(manifest, cache=cache, extra={
@@ -1515,7 +1488,6 @@ class Archiver:
             print(DASHES)
             print(DASHES)
             print(STATS_HEADER)
             print(STATS_HEADER)
             print(str(cache))
             print(str(cache))
-        return self.exit_code
 
 
     @with_repository(exclusive=True, compatibility=(Manifest.Operation.DELETE,))
     @with_repository(exclusive=True, compatibility=(Manifest.Operation.DELETE,))
     def do_prune(self, args, repository, manifest, key):
     def do_prune(self, args, repository, manifest, key):
@@ -1612,7 +1584,6 @@ class Archiver:
                           stats.summary.format(label='Deleted data:', stats=stats),
                           stats.summary.format(label='Deleted data:', stats=stats),
                           str(cache),
                           str(cache),
                           DASHES, logger=logging.getLogger('borg.output.stats'))
                           DASHES, logger=logging.getLogger('borg.output.stats'))
-        return self.exit_code
 
 
     @with_repository(fake=('tam', 'disable_tam', 'archives_tam'), invert_fake=True, manifest=False, exclusive=True)
     @with_repository(fake=('tam', 'disable_tam', 'archives_tam'), invert_fake=True, manifest=False, exclusive=True)
     def do_upgrade(self, args, repository, manifest=None, key=None):
     def do_upgrade(self, args, repository, manifest=None, key=None):
@@ -1687,7 +1658,6 @@ class Archiver:
                 repo.upgrade(args.dry_run, inplace=args.inplace, progress=args.progress)
                 repo.upgrade(args.dry_run, inplace=args.inplace, progress=args.progress)
             except NotImplementedError as e:
             except NotImplementedError as e:
                 print("warning: %s" % e)
                 print("warning: %s" % e)
-        return self.exit_code
 
 
     @with_repository(cache=True, exclusive=True, compatibility=(Manifest.Operation.CHECK,))
     @with_repository(cache=True, exclusive=True, compatibility=(Manifest.Operation.CHECK,))
     def do_recreate(self, args, repository, manifest, key, cache):
     def do_recreate(self, args, repository, manifest, key, cache):
@@ -1728,7 +1698,6 @@ class Archiver:
             manifest.write()
             manifest.write()
             repository.commit(compact=False)
             repository.commit(compact=False)
             cache.commit()
             cache.commit()
-        return self.exit_code
 
 
     @with_repository(cache=True, exclusive=True, compatibility=(Manifest.Operation.WRITE,))
     @with_repository(cache=True, exclusive=True, compatibility=(Manifest.Operation.WRITE,))
     def do_import_tar(self, args, repository, manifest, key, cache):
     def do_import_tar(self, args, repository, manifest, key, cache):
@@ -1744,8 +1713,6 @@ class Archiver:
         with create_filter_process(filter, stream=tarstream, stream_close=tarstream_close, inbound=True) as _stream:
         with create_filter_process(filter, stream=tarstream, stream_close=tarstream_close, inbound=True) as _stream:
             self._import_tar(args, repository, manifest, key, cache, _stream)
             self._import_tar(args, repository, manifest, key, cache, _stream)
 
 
-        return self.exit_code
-
     def _import_tar(self, args, repository, manifest, key, cache, tarstream):
     def _import_tar(self, args, repository, manifest, key, cache, tarstream):
         t0 = utcnow()
         t0 = utcnow()
         t0_monotonic = time.monotonic()
         t0_monotonic = time.monotonic()
@@ -1836,7 +1803,8 @@ class Archiver:
         env = prepare_subprocess_env(system=True)
         env = prepare_subprocess_env(system=True)
         try:
         try:
             # we exit with the return code we get from the subprocess
             # we exit with the return code we get from the subprocess
-            return subprocess.call([args.command] + args.args, env=env)
+            rc = subprocess.call([args.command] + args.args, env=env)
+            set_ec(rc)
         finally:
         finally:
             # we need to commit the "no change" operation we did to the manifest
             # we need to commit the "no change" operation we did to the manifest
             # because it created a new segment file in the repository. if we would
             # because it created a new segment file in the repository. if we would
@@ -1854,7 +1822,6 @@ class Archiver:
         repository.put(Manifest.MANIFEST_ID, data)
         repository.put(Manifest.MANIFEST_ID, data)
         threshold = args.threshold / 100
         threshold = args.threshold / 100
         repository.commit(compact=True, threshold=threshold, cleanup_commits=args.cleanup_commits)
         repository.commit(compact=True, threshold=threshold, cleanup_commits=args.cleanup_commits)
-        return EXIT_SUCCESS
 
 
     @with_repository(exclusive=True, manifest=False)
     @with_repository(exclusive=True, manifest=False)
     def do_config(self, args, repository):
     def do_config(self, args, repository):
@@ -1973,9 +1940,7 @@ class Archiver:
                 try:
                 try:
                     print(config.get(section, name))
                     print(config.get(section, name))
                 except (configparser.NoOptionError, configparser.NoSectionError) as e:
                 except (configparser.NoOptionError, configparser.NoSectionError) as e:
-                    print(e, file=sys.stderr)
-                    return EXIT_WARNING
-            return EXIT_SUCCESS
+                    raise Error(e)
         finally:
         finally:
             if args.cache:
             if args.cache:
                 cache.close()
                 cache.close()
@@ -1987,7 +1952,6 @@ class Archiver:
         # Additional debug information
         # Additional debug information
         print('CRC implementation:', crc32.__name__)
         print('CRC implementation:', crc32.__name__)
         print('Process ID:', get_process_id())
         print('Process ID:', get_process_id())
-        return EXIT_SUCCESS
 
 
     @with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
     @with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
     def do_debug_dump_archive_items(self, args, repository, manifest, key):
     def do_debug_dump_archive_items(self, args, repository, manifest, key):
@@ -2001,7 +1965,6 @@ class Archiver:
             with open(filename, 'wb') as fd:
             with open(filename, 'wb') as fd:
                 fd.write(data)
                 fd.write(data)
         print('Done.')
         print('Done.')
-        return EXIT_SUCCESS
 
 
     @with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
     @with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
     def do_debug_dump_archive(self, args, repository, manifest, key):
     def do_debug_dump_archive(self, args, repository, manifest, key):
@@ -2051,7 +2014,6 @@ class Archiver:
 
 
         with dash_open(args.path, 'w') as fd:
         with dash_open(args.path, 'w') as fd:
             output(fd)
             output(fd)
-        return EXIT_SUCCESS
 
 
     @with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
     @with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
     def do_debug_dump_manifest(self, args, repository, manifest, key):
     def do_debug_dump_manifest(self, args, repository, manifest, key):
@@ -2063,7 +2025,6 @@ class Archiver:
 
 
         with dash_open(args.path, 'w') as fd:
         with dash_open(args.path, 'w') as fd:
             json.dump(meta, fd, indent=4)
             json.dump(meta, fd, indent=4)
-        return EXIT_SUCCESS
 
 
     @with_repository(manifest=False)
     @with_repository(manifest=False)
     def do_debug_dump_repo_objs(self, args, repository):
     def do_debug_dump_repo_objs(self, args, repository):
@@ -2119,7 +2080,6 @@ class Archiver:
                     decrypt_dump(i, id, cdata)
                     decrypt_dump(i, id, cdata)
                     i += 1
                     i += 1
         print('Done.')
         print('Done.')
-        return EXIT_SUCCESS
 
 
     @with_repository(manifest=False)
     @with_repository(manifest=False)
     def do_debug_search_repo_objs(self, args, repository):
     def do_debug_search_repo_objs(self, args, repository):
@@ -2185,7 +2145,6 @@ class Archiver:
                 if i % 10000 == 0:
                 if i % 10000 == 0:
                     print('%d objects processed.' % i)
                     print('%d objects processed.' % i)
         print('Done.')
         print('Done.')
-        return EXIT_SUCCESS
 
 
     @with_repository(manifest=False)
     @with_repository(manifest=False)
     def do_debug_get_obj(self, args, repository):
     def do_debug_get_obj(self, args, repository):
@@ -2204,7 +2163,6 @@ class Archiver:
         with open(args.path, "wb") as f:
         with open(args.path, "wb") as f:
             f.write(data)
             f.write(data)
         print("object %s fetched." % hex_id)
         print("object %s fetched." % hex_id)
-        return EXIT_SUCCESS
 
 
     @with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
     @with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
     def do_debug_id_hash(self, args, repository, manifest, key):
     def do_debug_id_hash(self, args, repository, manifest, key):
@@ -2213,7 +2171,6 @@ class Archiver:
             data = f.read()
             data = f.read()
         id = key.id_hash(data)
         id = key.id_hash(data)
         print(id.hex())
         print(id.hex())
-        return EXIT_SUCCESS
 
 
     @with_repository(manifest=False, exclusive=True)
     @with_repository(manifest=False, exclusive=True)
     def do_debug_put_obj(self, args, repository):
     def do_debug_put_obj(self, args, repository):
@@ -2230,7 +2187,6 @@ class Archiver:
         repository.put(id, data)
         repository.put(id, data)
         print("object %s put." % hex_id)
         print("object %s put." % hex_id)
         repository.commit(compact=False)
         repository.commit(compact=False)
-        return EXIT_SUCCESS
 
 
     @with_repository(manifest=False, exclusive=True)
     @with_repository(manifest=False, exclusive=True)
     def do_debug_delete_obj(self, args, repository):
     def do_debug_delete_obj(self, args, repository):
@@ -2251,7 +2207,6 @@ class Archiver:
         if modified:
         if modified:
             repository.commit(compact=False)
             repository.commit(compact=False)
         print('Done.')
         print('Done.')
-        return EXIT_SUCCESS
 
 
     @with_repository(manifest=False, exclusive=True, cache=True, compatibility=Manifest.NO_OPERATION_CHECK)
     @with_repository(manifest=False, exclusive=True, cache=True, compatibility=Manifest.NO_OPERATION_CHECK)
     def do_debug_refcount_obj(self, args, repository, manifest, key, cache):
     def do_debug_refcount_obj(self, args, repository, manifest, key, cache):
@@ -2267,7 +2222,6 @@ class Archiver:
                     print("object %s has %d referrers [info from chunks cache]." % (hex_id, refcount))
                     print("object %s has %d referrers [info from chunks cache]." % (hex_id, refcount))
                 except KeyError:
                 except KeyError:
                     print("object %s not found [info from chunks cache]." % hex_id)
                     print("object %s not found [info from chunks cache]." % hex_id)
-        return EXIT_SUCCESS
 
 
     @with_repository(manifest=False, exclusive=True)
     @with_repository(manifest=False, exclusive=True)
     def do_debug_dump_hints(self, args, repository):
     def do_debug_dump_hints(self, args, repository):
@@ -2285,21 +2239,18 @@ class Archiver:
                 json.dump(hints, fd, indent=4)
                 json.dump(hints, fd, indent=4)
         finally:
         finally:
             repository.rollback()
             repository.rollback()
-        return EXIT_SUCCESS
 
 
     def do_debug_convert_profile(self, args):
     def do_debug_convert_profile(self, args):
         """convert Borg profile to Python profile"""
         """convert Borg profile to Python profile"""
         import marshal
         import marshal
         with args.output, args.input:
         with args.output, args.input:
             marshal.dump(msgpack.unpack(args.input, use_list=False, raw=False), args.output)
             marshal.dump(msgpack.unpack(args.input, use_list=False, raw=False), args.output)
-        return EXIT_SUCCESS
 
 
     @with_repository(lock=False, manifest=False)
     @with_repository(lock=False, manifest=False)
     def do_break_lock(self, args, repository):
     def do_break_lock(self, args, repository):
         """Break the repository lock (e.g. in case it was left by a dead borg."""
         """Break the repository lock (e.g. in case it was left by a dead borg."""
         repository.break_lock()
         repository.break_lock()
         Cache.break_lock(repository)
         Cache.break_lock(repository)
-        return self.exit_code
 
 
     helptext = collections.OrderedDict()
     helptext = collections.OrderedDict()
     helptext['patterns'] = textwrap.dedent('''
     helptext['patterns'] = textwrap.dedent('''
@@ -2730,12 +2681,10 @@ class Archiver:
             msg_lines += ['    Commands: %s' % ', '.join(sorted(commands.keys()))]
             msg_lines += ['    Commands: %s' % ', '.join(sorted(commands.keys()))]
             msg_lines += ['    Topics: %s' % ', '.join(sorted(self.helptext.keys()))]
             msg_lines += ['    Topics: %s' % ', '.join(sorted(self.helptext.keys()))]
             parser.error('\n'.join(msg_lines))
             parser.error('\n'.join(msg_lines))
-        return self.exit_code
 
 
     def do_subcommand_help(self, parser, args):
     def do_subcommand_help(self, parser, args):
         """display infos about subcommand"""
         """display infos about subcommand"""
         parser.print_help()
         parser.print_help()
-        return EXIT_SUCCESS
 
 
     do_maincommand_help = do_subcommand_help
     do_maincommand_help = do_subcommand_help
 
 
@@ -5241,7 +5190,9 @@ class Archiver:
                         # it compatible (see above).
                         # it compatible (see above).
                         msgpack.pack(profiler.stats, fd, use_bin_type=True)
                         msgpack.pack(profiler.stats, fd, use_bin_type=True)
         else:
         else:
-            return get_ec(func(args))
+            rc = func(args)
+            assert rc is None
+            return get_ec(rc)
 
 
 
 
 def sig_info_handler(sig_no, stack):  # pragma: no cover
 def sig_info_handler(sig_no, stack):  # pragma: no cover

+ 1 - 2
src/borg/helpers/__init__.py

@@ -49,8 +49,7 @@ def add_warning(msg, *args, **kwargs):
 
 
 """
 """
 The global exit_code variable is used so that modules other than archiver can increase the program exit code if a
 The global exit_code variable is used so that modules other than archiver can increase the program exit code if a
-warning or error occurred during their operation. This is different from archiver.exit_code, which is only accessible
-from the archiver object.
+warning or error occurred during their operation.
 
 
 Note: keep this in helpers/__init__.py as the code expects to be able to assign to helpers.exit_code.
 Note: keep this in helpers/__init__.py as the code expects to be able to assign to helpers.exit_code.
 """
 """

+ 5 - 2
src/borg/helpers/fs.py

@@ -332,11 +332,14 @@ def os_stat(*, path=None, parent_fd=None, name=None, follow_symlinks=False):
 
 
 
 
 def umount(mountpoint):
 def umount(mountpoint):
+    from . import set_ec
+
     env = prepare_subprocess_env(system=True)
     env = prepare_subprocess_env(system=True)
     try:
     try:
-        return subprocess.call(['fusermount', '-u', mountpoint], env=env)
+        rc = subprocess.call(['fusermount', '-u', mountpoint], env=env)
     except FileNotFoundError:
     except FileNotFoundError:
-        return subprocess.call(['umount', mountpoint], env=env)
+        rc = subprocess.call(['umount', mountpoint], env=env)
+    set_ec(rc)
 
 
 
 
 # below is a slightly modified tempfile.mkstemp that has an additional mode parameter.
 # below is a slightly modified tempfile.mkstemp that has an additional mode parameter.

+ 22 - 6
src/borg/testsuite/archiver.py

@@ -96,7 +96,6 @@ def exec_cmd(*args, archiver=None, fork=False, exe=None, input=b'', binary_outpu
             if archiver is None:
             if archiver is None:
                 archiver = Archiver()
                 archiver = Archiver()
             archiver.prerun_checks = lambda *args: None
             archiver.prerun_checks = lambda *args: None
-            archiver.exit_code = EXIT_SUCCESS
             helpers.exit_code = EXIT_SUCCESS
             helpers.exit_code = EXIT_SUCCESS
             helpers.warnings_list = []
             helpers.warnings_list = []
             try:
             try:
@@ -2979,7 +2978,11 @@ class ArchiverTestCase(ArchiverTestCaseBase):
             raise EOFError
             raise EOFError
 
 
         with patch.object(KeyfileKeyBase, 'create', raise_eof):
         with patch.object(KeyfileKeyBase, 'create', raise_eof):
-            self.cmd('init', '--encryption=repokey', self.repository_location, exit_code=1)
+            if self.FORK_DEFAULT:
+                self.cmd('init', '--encryption=repokey', self.repository_location, exit_code=2)
+            else:
+                with pytest.raises(CancelledByUser):
+                    self.cmd('init', '--encryption=repokey', self.repository_location)
         assert not os.path.exists(self.repository_location)
         assert not os.path.exists(self.repository_location)
 
 
     def test_init_requires_encryption_option(self):
     def test_init_requires_encryption_option(self):
@@ -3516,8 +3519,13 @@ id: 2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02
         self.assert_in('id', output)
         self.assert_in('id', output)
         self.assert_not_in('last_segment_checked', output)
         self.assert_not_in('last_segment_checked', output)
 
 
-        output = self.cmd('config', self.repository_location, 'last_segment_checked', exit_code=1)
-        self.assert_in('No option ', output)
+        if self.FORK_DEFAULT:
+            output = self.cmd('config', self.repository_location, 'last_segment_checked', exit_code=2)
+            self.assert_in('No option ', output)
+        else:
+            with pytest.raises(Error):
+                self.cmd('config', self.repository_location, 'last_segment_checked')
+
         self.cmd('config', self.repository_location, 'last_segment_checked', '123')
         self.cmd('config', self.repository_location, 'last_segment_checked', '123')
         output = self.cmd('config', self.repository_location, 'last_segment_checked')
         output = self.cmd('config', self.repository_location, 'last_segment_checked')
         assert output == '123' + '\n'
         assert output == '123' + '\n'
@@ -3535,7 +3543,11 @@ id: 2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02
             output = self.cmd('config', self.repository_location, cfg_key)
             output = self.cmd('config', self.repository_location, cfg_key)
             assert output == cfg_value + '\n'
             assert output == cfg_value + '\n'
             self.cmd('config', '--delete', self.repository_location, cfg_key)
             self.cmd('config', '--delete', self.repository_location, cfg_key)
-            self.cmd('config', self.repository_location, cfg_key, exit_code=1)
+            if self.FORK_DEFAULT:
+                self.cmd('config', self.repository_location, cfg_key, exit_code=2)
+            else:
+                with pytest.raises(Error):
+                    self.cmd('config', self.repository_location, cfg_key)
 
 
         self.cmd('config', '--list', '--delete', self.repository_location, exit_code=2)
         self.cmd('config', '--list', '--delete', self.repository_location, exit_code=2)
         if self.FORK_DEFAULT:
         if self.FORK_DEFAULT:
@@ -3543,7 +3555,11 @@ id: 2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02
         else:
         else:
             with pytest.raises(CommandError):
             with pytest.raises(CommandError):
                 self.cmd('config', self.repository_location)
                 self.cmd('config', self.repository_location)
-        self.cmd('config', self.repository_location, 'invalid-option', exit_code=1)
+        if self.FORK_DEFAULT:
+            self.cmd('config', self.repository_location, 'invalid-option', exit_code=2)
+        else:
+            with pytest.raises(Error):
+                self.cmd('config', self.repository_location, 'invalid-option')
 
 
     requires_gnutar = pytest.mark.skipif(not have_gnutar(), reason='GNU tar must be installed for this test.')
     requires_gnutar = pytest.mark.skipif(not have_gnutar(), reason='GNU tar must be installed for this test.')
     requires_gzip = pytest.mark.skipif(not shutil.which('gzip'), reason='gzip must be installed for this test.')
     requires_gzip = pytest.mark.skipif(not shutil.which('gzip'), reason='gzip must be installed for this test.')