Browse Source

Merge pull request #7976 from ThomasWaldmann/new-rc-1.4

optional more specific return codes
TW 1 year ago
parent
commit
de5bfdbd7a

+ 152 - 66
docs/internals/frontends.rst

@@ -538,92 +538,178 @@ Message IDs are strings that essentially give a log message or operation a name,
 full text, since texts change more frequently. Message IDs are unambiguous and reduce the need to parse
 full text, since texts change more frequently. Message IDs are unambiguous and reduce the need to parse
 log messages.
 log messages.
 
 
-Assigned message IDs are:
+Assigned message IDs and related error RCs (exit codes) are:
 
 
 .. See scripts/errorlist.py; this is slightly edited.
 .. See scripts/errorlist.py; this is slightly edited.
 
 
 Errors
 Errors
-    Archive.AlreadyExists
+    Error rc: 2 traceback: no
+        Error: {}
+    ErrorWithTraceback rc: 2 traceback: yes
+        Error: {}
+
+    Buffer.MemoryLimitExceeded rc: 2 traceback: no
+        Requested buffer size {} is above the limit of {}.
+    EfficientCollectionQueue.SizeUnderflow rc: 2 traceback: no
+        Could not pop_front first {} elements, collection only has {} elements..
+    RTError rc: 2 traceback: no
+        Runtime Error: {}
+
+    CancelledByUser rc: 3 traceback: no
+        Cancelled by user.
+
+    CommandError rc: 4 traceback: no
+        Command Error: {}
+    PlaceholderError rc: 5 traceback: no
+        Formatting Error: "{}".format({}): {}({})
+    InvalidPlaceholder rc: 6 traceback: no
+        Invalid placeholder "{}" in string: {}
+
+    Repository.AlreadyExists rc: 10 traceback: no
+        A repository already exists at {}.
+    Repository.AtticRepository rc: 11 traceback: no
+        Attic repository detected. Please run "borg upgrade {}".
+    Repository.CheckNeeded rc: 12 traceback: yes
+        Inconsistency detected. Please run "borg check {}".
+    Repository.DoesNotExist rc: 13 traceback: no
+        Repository {} does not exist.
+    Repository.InsufficientFreeSpaceError rc: 14 traceback: no
+        Insufficient free space to complete transaction (required: {}, available: {}).
+    Repository.InvalidRepository rc: 15 traceback: no
+        {} is not a valid repository. Check repo config.
+    Repository.InvalidRepositoryConfig rc: 16 traceback: no
+        {} does not have a valid configuration. Check repo config [{}].
+    Repository.ObjectNotFound rc: 17 traceback: yes
+        Object with key {} not found in repository {}.
+    Repository.ParentPathDoesNotExist rc: 18 traceback: no
+        The parent path of the repo directory [{}] does not exist.
+    Repository.PathAlreadyExists rc: 19 traceback: no
+        There is already something at {}.
+    Repository.StorageQuotaExceeded rc: 20 traceback: no
+        The storage quota ({}) has been exceeded ({}). Try deleting some archives.
+
+    MandatoryFeatureUnsupported rc: 25 traceback: no
+        Unsupported repository feature(s) {}. A newer version of borg is required to access this repository.
+    NoManifestError rc: 26 traceback: no
+        Repository has no manifest.
+    UnsupportedManifestError rc: 27 traceback: no
+        Unsupported manifest envelope. A newer version is required to access this repository.
+
+    Archive.AlreadyExists rc: 30 traceback: no
         Archive {} already exists
         Archive {} already exists
-    Archive.DoesNotExist
+    Archive.DoesNotExist rc: 31 traceback: no
         Archive {} does not exist
         Archive {} does not exist
-    Archive.IncompatibleFilesystemEncodingError
+    Archive.IncompatibleFilesystemEncodingError rc: 32 traceback: no
         Failed to encode filename "{}" into file system encoding "{}". Consider configuring the LANG environment variable.
         Failed to encode filename "{}" into file system encoding "{}". Consider configuring the LANG environment variable.
-    Cache.CacheInitAbortedError
-        Cache initialization aborted
-    Cache.EncryptionMethodMismatch
-        Repository encryption method changed since last access, refusing to continue
-    Cache.RepositoryAccessAborted
-        Repository access aborted
-    Cache.RepositoryIDNotUnique
-        Cache is newer than repository - do you have multiple, independently updated repos with same ID?
-    Cache.RepositoryReplay
-        Cache is newer than repository - this is either an attack or unsafe (multiple repos with same ID)
-    Buffer.MemoryLimitExceeded
-        Requested buffer size {} is above the limit of {}.
-    ExtensionModuleError
-        The Borg binary extension modules do not seem to be properly installed
-    IntegrityError
-        Data integrity error: {}
-    NoManifestError
-        Repository has no manifest.
-    PlaceholderError
-        Formatting Error: "{}".format({}): {}({})
-    KeyfileInvalidError
+
+    KeyfileInvalidError rc: 40 traceback: no
         Invalid key file for repository {} found in {}.
         Invalid key file for repository {} found in {}.
-    KeyfileMismatchError
+    KeyfileMismatchError rc: 41 traceback: no
         Mismatch between repository {} and key file {}.
         Mismatch between repository {} and key file {}.
-    KeyfileNotFoundError
+    KeyfileNotFoundError rc: 42 traceback: no
         No key file for repository {} found in {}.
         No key file for repository {} found in {}.
-    PassphraseWrong
-        passphrase supplied in BORG_PASSPHRASE is incorrect
-    PasswordRetriesExceeded
-        exceeded the maximum password retries
-    RepoKeyNotFoundError
-        No key entry found in the config of repository {}.
-    UnsupportedManifestError
-        Unsupported manifest envelope. A newer version is required to access this repository.
-    UnsupportedPayloadError
-        Unsupported payload type {}. A newer version is required to access this repository.
-    NotABorgKeyFile
+    NotABorgKeyFile rc: 43 traceback: no
         This file is not a borg key backup, aborting.
         This file is not a borg key backup, aborting.
-    RepoIdMismatch
+    RepoKeyNotFoundError rc: 44 traceback: no
+        No key entry found in the config of repository {}.
+    RepoIdMismatch rc: 45 traceback: no
         This key backup seems to be for a different backup repository, aborting.
         This key backup seems to be for a different backup repository, aborting.
-    UnencryptedRepo
-        Keymanagement not available for unencrypted repositories.
-    UnknownKeyType
-        Keytype {0} is unknown.
-    LockError
+    UnencryptedRepo rc: 46 traceback: no
+        Key management not available for unencrypted repositories.
+    UnknownKeyType rc: 47 traceback: no
+        Key type {0} is unknown.
+    UnsupportedPayloadError rc: 48 traceback: no
+        Unsupported payload type {}. A newer version is required to access this repository.
+
+    NoPassphraseFailure rc: 50 traceback: no
+        can not acquire a passphrase: {}
+    PasscommandFailure rc: 51 traceback: no
+        passcommand supplied in BORG_PASSCOMMAND failed: {}
+    PassphraseWrong rc: 52 traceback: no
+        passphrase supplied in BORG_PASSPHRASE, by BORG_PASSCOMMAND or via BORG_PASSPHRASE_FD is incorrect.
+    PasswordRetriesExceeded rc: 53 traceback: no
+        exceeded the maximum password retries
+
+    Cache.CacheInitAbortedError rc: 60 traceback: no
+        Cache initialization aborted
+    Cache.EncryptionMethodMismatch rc: 61 traceback: no
+        Repository encryption method changed since last access, refusing to continue
+    Cache.RepositoryAccessAborted rc: 62 traceback: no
+        Repository access aborted
+    Cache.RepositoryIDNotUnique rc: 63 traceback: no
+        Cache is newer than repository - do you have multiple, independently updated repos with same ID?
+    Cache.RepositoryReplay rc: 64 traceback: no
+        Cache, or information obtained from the security directory is newer than repository - this is either an attack or unsafe (multiple repos with same ID)
+
+    LockError rc: 70 traceback: no
         Failed to acquire the lock {}.
         Failed to acquire the lock {}.
-    LockErrorT
+    LockErrorT rc: 71 traceback: yes
         Failed to acquire the lock {}.
         Failed to acquire the lock {}.
-    ConnectionClosed
+    LockFailed rc: 72 traceback: yes
+        Failed to create/acquire the lock {} ({}).
+    LockTimeout rc: 73 traceback: no
+        Failed to create/acquire the lock {} (timeout).
+    NotLocked rc: 74 traceback: yes
+        Failed to release the lock {} (was not locked).
+    NotMyLock rc: 75 traceback: yes
+        Failed to release the lock {} (was/is locked, but not by me).
+
+    ConnectionClosed rc: 80 traceback: no
         Connection closed by remote host
         Connection closed by remote host
-    InvalidRPCMethod
+    ConnectionClosedWithHint rc: 81 traceback: no
+        Connection closed by remote host. {}
+    InvalidRPCMethod rc: 82 traceback: no
         RPC method {} is not valid
         RPC method {} is not valid
-    PathNotAllowed
-        Repository path not allowed
-    RemoteRepository.RPCServerOutdated
+    PathNotAllowed rc: 83 traceback: no
+        Repository path not allowed: {}
+    RemoteRepository.RPCServerOutdated rc: 84 traceback: no
         Borg server is too old for {}. Required version {}
         Borg server is too old for {}. Required version {}
-    UnexpectedRPCDataFormatFromClient
+    UnexpectedRPCDataFormatFromClient rc: 85 traceback: no
         Borg {}: Got unexpected RPC data format from client.
         Borg {}: Got unexpected RPC data format from client.
-    UnexpectedRPCDataFormatFromServer
+    UnexpectedRPCDataFormatFromServer rc: 86 traceback: no
         Got unexpected RPC data format from server:
         Got unexpected RPC data format from server:
         {}
         {}
-    Repository.AlreadyExists
-        Repository {} already exists.
-    Repository.CheckNeeded
-        Inconsistency detected. Please run "borg check {}".
-    Repository.DoesNotExist
-        Repository {} does not exist.
-    Repository.InsufficientFreeSpaceError
-        Insufficient free space to complete transaction (required: {}, available: {}).
-    Repository.InvalidRepository
-        {} is not a valid repository. Check repo config.
-    Repository.AtticRepository
-        Attic repository detected. Please run "borg upgrade {}".
-    Repository.ObjectNotFound
-        Object with key {} not found in repository {}.
+
+    IntegrityError rc: 90 traceback: yes
+        Data integrity error: {}
+    FileIntegrityError rc: 91 traceback: yes
+        File failed integrity check: {}
+    DecompressionError rc: 92 traceback: yes
+        Decompression error: {}
+
+    ArchiveTAMInvalid rc: 95 traceback: yes
+        Data integrity error: {}
+    ArchiveTAMRequiredError rc: 96 traceback: yes
+        Archive '{}' is unauthenticated, but it is required for this repository.
+    TAMInvalid rc: 97 traceback: yes
+        Data integrity error: {}
+    TAMRequiredError rc: 98 traceback: yes
+        Manifest is unauthenticated, but it is required for this repository.
+    TAMUnsupportedSuiteError rc: 99 traceback: yes
+        Could not verify manifest: Unsupported suite {!r}; a newer version is needed.
+
+Warnings
+    BorgWarning rc: 1
+        Warning: {}
+    BackupWarning rc: 1
+        {}: {}
+
+    FileChangedWarning rc: 100
+        {}: file changed while we backed it up
+    IncludePatternNeverMatchedWarning rc: 101
+        Include pattern '{}' never matched.
+    BackupError rc: 102
+        {}: backup error
+    BackupRaceConditionError rc: 103
+        {}: file type or inode changed while we backed it up (race condition, skipped file)
+    BackupOSError rc: 104
+        {}: {}
+    BackupPermissionError rc: 105
+        {}: {}
+    BackupIOError rc: 106
+        {}: {}
+    BackupFileNotFoundError rc: 107
+        {}: {}
 
 
 Operations
 Operations
     - cache.begin_transaction
     - cache.begin_transaction

+ 3 - 0
docs/usage/general/environment.rst.inc

@@ -35,6 +35,9 @@ General:
         Main usecase for this is to fully automate ``borg change-passphrase``.
         Main usecase for this is to fully automate ``borg change-passphrase``.
     BORG_DISPLAY_PASSPHRASE
     BORG_DISPLAY_PASSPHRASE
         When set, use the value to answer the "display the passphrase for verification" question when defining a new passphrase for encrypted repositories.
         When set, use the value to answer the "display the passphrase for verification" question when defining a new passphrase for encrypted repositories.
+    BORG_EXIT_CODES
+        When set to "modern", the borg process will return more specific exit codes (rc).
+        Default is "legacy" and returns rc 2 for all errors, 1 for all warnings, 0 for success.
     BORG_HOST_ID
     BORG_HOST_ID
         Borg usually computes a host id from the FQDN plus the results of ``uuid.getnode()`` (which usually returns
         Borg usually computes a host id from the FQDN plus the results of ``uuid.getnode()`` (which usually returns
         a unique id based on the MAC address of the network interface. Except if that MAC happens to be all-zero - in
         a unique id based on the MAC address of the network interface. Except if that MAC happens to be all-zero - in

+ 4 - 2
docs/usage/general/return-codes.rst.inc

@@ -7,10 +7,12 @@ Borg can exit with the following return codes (rc):
 Return code Meaning
 Return code Meaning
 =========== =======
 =========== =======
 0           success (logged as INFO)
 0           success (logged as INFO)
-1           warning (operation reached its normal end, but there were warnings --
+1           generic warning (operation reached its normal end, but there were warnings --
             you should check the log, logged as WARNING)
             you should check the log, logged as WARNING)
-2           error (like a fatal error, a local or remote exception, the operation
+2           generic error (like a fatal error, a local or remote exception, the operation
             did not reach its normal end, logged as ERROR)
             did not reach its normal end, logged as ERROR)
+3..99       specific error (enabled by BORG_EXIT_CODES=modern)
+100..127    specific warning (enabled by BORG_EXIT_CODES=modern)
 128+N       killed by signal N (e.g. 137 == kill -9)
 128+N       killed by signal N (e.g. 137 == kill -9)
 =========== =======
 =========== =======
 
 

+ 56 - 7
scripts/errorlist.py

@@ -1,14 +1,63 @@
 #!/usr/bin/env python3
 #!/usr/bin/env python3
+# this script automatically generates the error list for the docs by
+# looking at the "Error" class and its subclasses.
 
 
 from textwrap import indent
 from textwrap import indent
 
 
-import borg.archiver  # noqa: F401 - need import to get Error and ErrorWithTraceback subclasses.
-from borg.helpers import Error, ErrorWithTraceback
+import borg.archiver  # noqa: F401 - need import to get Error subclasses.
+from borg.constants import *  # NOQA
+from borg.helpers import Error, BackupError, BorgWarning
 
 
-classes = Error.__subclasses__() + ErrorWithTraceback.__subclasses__()
 
 
-for cls in sorted(classes, key=lambda cls: (cls.__module__, cls.__qualname__)):
-    if cls is ErrorWithTraceback:
-        continue
-    print('   ', cls.__qualname__)
+def subclasses(cls):
+    direct_subclasses = cls.__subclasses__()
+    return set(direct_subclasses) | set(s for c in direct_subclasses for s in subclasses(c))
+
+
+# 0, 1, 2 are used for success, generic warning, generic error
+# 3..99 are available for specific errors
+# 100..127 are available for specific warnings
+# 128+ are reserved for signals
+free_error_rcs = set(range(EXIT_ERROR_BASE, EXIT_WARNING_BASE))  # 3 .. 99
+free_warning_rcs = set(range(EXIT_WARNING_BASE, EXIT_SIGNAL_BASE))  # 100 .. 127
+
+# these classes map to rc 2
+generic_error_rc_classes = set()
+generic_warning_rc_classes = set()
+
+error_classes = {Error} | subclasses(Error)
+
+for cls in sorted(error_classes, key=lambda cls: (cls.__module__, cls.__qualname__)):
+    traceback = "yes" if cls.traceback else "no"
+    rc = cls.exit_mcode
+    print('   ', cls.__qualname__, 'rc:', rc, 'traceback:', traceback)
     print(indent(cls.__doc__, ' ' * 8))
     print(indent(cls.__doc__, ' ' * 8))
+    if rc in free_error_rcs:
+        free_error_rcs.remove(rc)
+    elif rc == 2:
+        generic_error_rc_classes.add(cls.__qualname__)
+    else:  # rc != 2
+        # if we did not intentionally map this to the generic error rc, this might be an issue:
+        print(f'ERROR: {rc} is not a free/available RC, but either duplicate or invalid')
+
+print()
+print('free error RCs:', sorted(free_error_rcs))
+print('generic errors:', sorted(generic_error_rc_classes))
+
+warning_classes = {BorgWarning} | subclasses(BorgWarning) | {BackupError} | subclasses(BackupError)
+
+for cls in sorted(warning_classes, key=lambda cls: (cls.__module__, cls.__qualname__)):
+    rc = cls.exit_mcode
+    print('   ', cls.__qualname__, 'rc:', rc)
+    print(indent(cls.__doc__, ' ' * 8))
+    if rc in free_warning_rcs:
+        free_warning_rcs.remove(rc)
+    elif rc == 1:
+        generic_warning_rc_classes.add(cls.__qualname__)
+    else:  # rc != 1
+        # if we did not intentionally map this to the generic warning rc, this might be an issue:
+        print(f'ERROR: {rc} is not a free/available RC, but either duplicate or invalid')
+
+print("\n")
+print('free warning RCs:', sorted(free_warning_rcs))
+print('generic warnings:', sorted(generic_warning_rc_classes))

+ 19 - 36
src/borg/archive.py

@@ -1,3 +1,4 @@
+import errno
 import json
 import json
 import os
 import os
 import socket
 import socket
@@ -25,6 +26,8 @@ from .crypto.key import key_factory, UnsupportedPayloadError
 from .compress import Compressor, CompressionSpec
 from .compress import Compressor, CompressionSpec
 from .constants import *  # NOQA
 from .constants import *  # NOQA
 from .crypto.low_level import IntegrityError as IntegrityErrorBase
 from .crypto.low_level import IntegrityError as IntegrityErrorBase
+from .helpers import BackupError, BackupRaceConditionError
+from .helpers import BackupOSError, BackupPermissionError, BackupFileNotFoundError, BackupIOError
 from .hashindex import ChunkIndex, ChunkIndexEntry, CacheSynchronizer
 from .hashindex import ChunkIndex, ChunkIndexEntry, CacheSynchronizer
 from .helpers import Manifest
 from .helpers import Manifest
 from .helpers import hardlinkable
 from .helpers import hardlinkable
@@ -177,36 +180,6 @@ def is_special(mode):
     return stat.S_ISBLK(mode) or stat.S_ISCHR(mode) or stat.S_ISFIFO(mode)
     return stat.S_ISBLK(mode) or stat.S_ISCHR(mode) or stat.S_ISFIFO(mode)
 
 
 
 
-class BackupError(Exception):
-    """
-    Exception raised for non-OSError-based exceptions while accessing backup files.
-    """
-
-
-class BackupOSError(Exception):
-    """
-    Wrapper for OSError raised while accessing backup files.
-
-    Borg does different kinds of IO, and IO failures have different consequences.
-    This wrapper represents failures of input file or extraction IO.
-    These are non-critical and are only reported (exit code = 1, warning).
-
-    Any unwrapped IO error is critical and aborts execution (for example repository IO failure).
-    """
-    def __init__(self, op, os_error):
-        self.op = op
-        self.os_error = os_error
-        self.errno = os_error.errno
-        self.strerror = os_error.strerror
-        self.filename = os_error.filename
-
-    def __str__(self):
-        if self.op:
-            return f'{self.op}: {self.os_error}'
-        else:
-            return str(self.os_error)
-
-
 class BackupIO:
 class BackupIO:
     op = ''
     op = ''
 
 
@@ -219,7 +192,14 @@ class BackupIO:
 
 
     def __exit__(self, exc_type, exc_val, exc_tb):
     def __exit__(self, exc_type, exc_val, exc_tb):
         if exc_type and issubclass(exc_type, OSError):
         if exc_type and issubclass(exc_type, OSError):
-            raise BackupOSError(self.op, exc_val) from exc_val
+            E_MAP = {
+                errno.EPERM: BackupPermissionError,
+                errno.EACCES: BackupPermissionError,
+                errno.ENOENT: BackupFileNotFoundError,
+                errno.EIO: BackupIOError,
+            }
+            e_cls = E_MAP.get(exc_val.errno, BackupOSError)
+            raise e_cls(self.op, exc_val) from exc_val
 
 
 
 
 backup_io = BackupIO()
 backup_io = BackupIO()
@@ -254,10 +234,10 @@ def stat_update_check(st_old, st_curr):
     # are not duplicate in a short timeframe, this check is redundant and solved by the ino check:
     # are not duplicate in a short timeframe, this check is redundant and solved by the ino check:
     if stat.S_IFMT(st_old.st_mode) != stat.S_IFMT(st_curr.st_mode):
     if stat.S_IFMT(st_old.st_mode) != stat.S_IFMT(st_curr.st_mode):
         # in this case, we dispatched to wrong handler - abort
         # in this case, we dispatched to wrong handler - abort
-        raise BackupError('file type changed (race condition), skipping file')
+        raise BackupRaceConditionError('file type changed (race condition), skipping file')
     if st_old.st_ino != st_curr.st_ino:
     if st_old.st_ino != st_curr.st_ino:
         # in this case, the hardlinks-related code in create_helper has the wrong inode - abort!
         # in this case, the hardlinks-related code in create_helper has the wrong inode - abort!
-        raise BackupError('file inode changed (race condition), skipping file')
+        raise BackupRaceConditionError('file inode changed (race condition), skipping file')
     # looks ok, we are still dealing with the same thing - return current stat:
     # looks ok, we are still dealing with the same thing - return current stat:
     return st_curr
     return st_curr
 
 
@@ -424,14 +404,17 @@ def get_item_uid_gid(item, *, numeric, uid_forced=None, gid_forced=None, uid_def
 
 
 class Archive:
 class Archive:
 
 
-    class DoesNotExist(Error):
-        """Archive {} does not exist"""
-
     class AlreadyExists(Error):
     class AlreadyExists(Error):
         """Archive {} already exists"""
         """Archive {} already exists"""
+        exit_mcode = 30
+
+    class DoesNotExist(Error):
+        """Archive {} does not exist"""
+        exit_mcode = 31
 
 
     class IncompatibleFilesystemEncodingError(Error):
     class IncompatibleFilesystemEncodingError(Error):
         """Failed to encode filename "{}" into file system encoding "{}". Consider configuring the LANG environment variable."""
         """Failed to encode filename "{}" into file system encoding "{}". Consider configuring the LANG environment variable."""
+        exit_mcode = 32
 
 
     def __init__(self, repository, key, manifest, name, cache=None, create=False,
     def __init__(self, repository, key, manifest, name, cache=None, create=False,
                  checkpoint_interval=1800, numeric_ids=False, noatime=False, noctime=False,
                  checkpoint_interval=1800, numeric_ids=False, noatime=False, noctime=False,

+ 131 - 195
src/borg/archiver.py

@@ -37,7 +37,7 @@ try:
     from . import helpers
     from . import helpers
     from .algorithms.checksums import crc32
     from .algorithms.checksums import crc32
     from .archive import Archive, ArchiveChecker, ArchiveRecreater, Statistics, is_special
     from .archive import Archive, ArchiveChecker, ArchiveRecreater, Statistics, is_special
-    from .archive import BackupError, BackupOSError, backup_io, OsOpen, stat_update_check
+    from .archive import BackupError, BackupRaceConditionError, BackupOSError, backup_io, OsOpen, stat_update_check
     from .archive import FilesystemObjectProcessors, TarfileObjectProcessors, MetadataCollector, ChunksProcessor
     from .archive import FilesystemObjectProcessors, TarfileObjectProcessors, MetadataCollector, ChunksProcessor
     from .archive import has_link
     from .archive import has_link
     from .cache import Cache, assert_secure, SecurityManager
     from .cache import Cache, assert_secure, SecurityManager
@@ -45,8 +45,10 @@ try:
     from .compress import CompressionSpec
     from .compress import CompressionSpec
     from .crypto.key import key_creator, key_argument_names, tam_required_file, tam_required, RepoKey, PassphraseKey
     from .crypto.key import key_creator, key_argument_names, tam_required_file, tam_required, RepoKey, PassphraseKey
     from .crypto.keymanager import KeyManager
     from .crypto.keymanager import KeyManager
-    from .helpers import EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR, EXIT_SIGNAL_BASE
-    from .helpers import Error, NoManifestError, set_ec
+    from .helpers import EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR, EXIT_SIGNAL_BASE, classify_ec
+    from .helpers import Error, NoManifestError, CancelledByUser, RTError, CommandError
+    from .helpers import modern_ec, set_ec, get_ec, get_reset_ec
+    from .helpers import add_warning, BorgWarning, FileChangedWarning, BackupWarning, IncludePatternNeverMatchedWarning
     from .helpers import positive_int_validator, location_validator, archivename_validator, ChunkerParams, Location
     from .helpers import positive_int_validator, location_validator, archivename_validator, ChunkerParams, Location
     from .helpers import PrefixSpec, GlobSpec, CommentSpec, PathSpec, SortBySpec, FilesCacheMode
     from .helpers import PrefixSpec, GlobSpec, CommentSpec, PathSpec, SortBySpec, FilesCacheMode
     from .helpers import BaseFormatter, ItemFormatter, ArchiveFormatter
     from .helpers import BaseFormatter, ItemFormatter, ArchiveFormatter
@@ -235,20 +237,29 @@ class Highlander(argparse.Action):
 class Archiver:
 class Archiver:
 
 
     def __init__(self, lock_wait=None, prog=None):
     def __init__(self, lock_wait=None, prog=None):
-        self.exit_code = EXIT_SUCCESS
         self.lock_wait = lock_wait
         self.lock_wait = lock_wait
         self.prog = prog
         self.prog = prog
         self.last_checkpoint = time.monotonic()
         self.last_checkpoint = time.monotonic()
 
 
-    def print_error(self, msg, *args):
-        msg = args and msg % args or msg
-        self.exit_code = EXIT_ERROR
-        logger.error(msg)
-
-    def print_warning(self, msg, *args):
-        msg = args and msg % args or msg
-        self.exit_code = EXIT_WARNING  # we do not terminate here, so it is a warning
-        logger.warning(msg)
+    def print_warning(self, msg, *args, **kw):
+        warning_code = kw.get("wc", EXIT_WARNING)  # note: wc=None can be used to not influence exit code
+        warning_type = kw.get("wt", "percent")
+        assert warning_type in ("percent", "curly")
+        warning_msgid = kw.get("msgid")
+        if warning_code is not None:
+            add_warning(msg, *args, wc=warning_code, wt=warning_type)
+        if warning_type == "percent":
+            output = args and msg % args or msg
+        else:  # == "curly"
+            output = args and msg.format(*args) or msg
+        logger.warning(output, msgid=warning_msgid) if warning_msgid else logger.warning(output)
+
+    def print_warning_instance(self, warning):
+        assert isinstance(warning, BorgWarning)
+        # if it is a BackupWarning, use the wrapped BackupError exception instance:
+        cls = type(warning.args[1]) if isinstance(warning, BackupWarning) else type(warning)
+        msg, msgid, args, wc = cls.__doc__, cls.__qualname__, warning.args, warning.exit_code
+        self.print_warning(msg, *args, wc=wc, wt="curly", msgid=msgid)
 
 
     def print_file_status(self, status, path):
     def print_file_status(self, status, path):
         # if we get called with status == None, the final file status was already printed
         # if we get called with status == None, the final file status was already printed
@@ -277,7 +288,6 @@ class Archiver:
             append_only=args.append_only,
             append_only=args.append_only,
             storage_quota=args.storage_quota,
             storage_quota=args.storage_quota,
         ).serve()
         ).serve()
-        return EXIT_SUCCESS
 
 
     @with_repository(create=True, exclusive=True, manifest=False)
     @with_repository(create=True, exclusive=True, manifest=False)
     def do_init(self, args, repository):
     def do_init(self, args, repository):
@@ -288,7 +298,7 @@ class Archiver:
             key = key_creator(repository, args)
             key = key_creator(repository, args)
         except (EOFError, KeyboardInterrupt):
         except (EOFError, KeyboardInterrupt):
             repository.destroy()
             repository.destroy()
-            return EXIT_WARNING
+            raise CancelledByUser()
         manifest = Manifest(key, repository)
         manifest = Manifest(key, repository)
         manifest.key = key
         manifest.key = key
         manifest.write()
         manifest.write()
@@ -316,7 +326,6 @@ class Archiver:
                 'If you used a repokey mode, the key is stored in the repo, but you should back it up separately.\n'
                 'If you used a repokey mode, the key is stored in the repo, but you should back it up separately.\n'
                 'Use "borg key export" to export the key, optionally in printable format.\n'
                 'Use "borg key export" to export the key, optionally in printable format.\n'
                 'Write down the passphrase. Store both at safe place(s).\n')
                 'Write down the passphrase. Store both at safe place(s).\n')
-        return self.exit_code
 
 
     @with_repository(exclusive=True, manifest=False)
     @with_repository(exclusive=True, manifest=False)
     def do_check(self, args, repository):
     def do_check(self, args, repository):
@@ -330,45 +339,41 @@ class Archiver:
             if not yes(msg, false_msg="Aborting.", invalid_msg="Invalid answer, aborting.",
             if not yes(msg, false_msg="Aborting.", invalid_msg="Invalid answer, aborting.",
                        truish=('YES', ), retry=False,
                        truish=('YES', ), retry=False,
                        env_var_override='BORG_CHECK_I_KNOW_WHAT_I_AM_DOING'):
                        env_var_override='BORG_CHECK_I_KNOW_WHAT_I_AM_DOING'):
-                return EXIT_ERROR
+                raise CancelledByUser()
         if args.repo_only and any(
         if args.repo_only and any(
            (args.verify_data, args.first, args.last, args.prefix is not None, args.glob_archives)):
            (args.verify_data, args.first, args.last, args.prefix is not None, args.glob_archives)):
-            self.print_error("--repository-only contradicts --first, --last, --glob-archives, --prefix and --verify-data arguments.")
-            return EXIT_ERROR
+            raise CommandError("--repository-only contradicts --first, --last, --glob-archives, --prefix and --verify-data arguments.")
         if args.repair and args.max_duration:
         if args.repair and args.max_duration:
-            self.print_error("--repair does not allow --max-duration argument.")
-            return EXIT_ERROR
+            raise CommandError("--repair does not allow --max-duration argument.")
         if args.max_duration and not args.repo_only:
         if args.max_duration and not args.repo_only:
             # when doing a partial repo check, we can only check crc32 checksums in segment files,
             # when doing a partial repo check, we can only check crc32 checksums in segment files,
             # we can't build a fresh repo index in memory to verify the on-disk index against it.
             # we can't build a fresh repo index in memory to verify the on-disk index against it.
             # thus, we should not do an archives check based on a unknown-quality on-disk repo index.
             # thus, we should not do an archives check based on a unknown-quality on-disk repo index.
             # also, there is no max_duration support in the archives check code anyway.
             # also, there is no max_duration support in the archives check code anyway.
-            self.print_error("--repository-only is required for --max-duration support.")
-            return EXIT_ERROR
+            raise CommandError("--repository-only is required for --max-duration support.")
         if not args.archives_only:
         if not args.archives_only:
             if not repository.check(repair=args.repair, save_space=args.save_space, max_duration=args.max_duration):
             if not repository.check(repair=args.repair, save_space=args.save_space, max_duration=args.max_duration):
-                return EXIT_WARNING
+                set_ec(EXIT_WARNING)
+                return
         if args.prefix is not None:
         if args.prefix is not None:
             args.glob_archives = args.prefix + '*'
             args.glob_archives = args.prefix + '*'
         if not args.repo_only and not ArchiveChecker().check(
         if not args.repo_only and not ArchiveChecker().check(
                 repository, repair=args.repair, archive=args.location.archive,
                 repository, repair=args.repair, archive=args.location.archive,
                 first=args.first, last=args.last, sort_by=args.sort_by or 'ts', glob=args.glob_archives,
                 first=args.first, last=args.last, sort_by=args.sort_by or 'ts', glob=args.glob_archives,
                 verify_data=args.verify_data, save_space=args.save_space):
                 verify_data=args.verify_data, save_space=args.save_space):
-            return EXIT_WARNING
-        return EXIT_SUCCESS
+            set_ec(EXIT_WARNING)
+            return
 
 
     @with_repository(compatibility=(Manifest.Operation.CHECK,))
     @with_repository(compatibility=(Manifest.Operation.CHECK,))
     def do_change_passphrase(self, args, repository, manifest, key):
     def do_change_passphrase(self, args, repository, manifest, key):
         """Change repository key file passphrase"""
         """Change repository key file passphrase"""
         if not hasattr(key, 'change_passphrase'):
         if not hasattr(key, 'change_passphrase'):
-            print('This repository is not encrypted, cannot change the passphrase.')
-            return EXIT_ERROR
+            raise CommandError('This repository is not encrypted, cannot change the passphrase.')
         key.change_passphrase()
         key.change_passphrase()
         logger.info('Key updated')
         logger.info('Key updated')
         if hasattr(key, 'find_key'):
         if hasattr(key, 'find_key'):
             # print key location to make backing it up easier
             # print key location to make backing it up easier
             logger.info('Key location: %s', key.find_key())
             logger.info('Key location: %s', key.find_key())
-        return EXIT_SUCCESS
 
 
     @with_repository(lock=False, exclusive=False, manifest=False, cache=False)
     @with_repository(lock=False, exclusive=False, manifest=False, cache=False)
     def do_key_export(self, args, repository):
     def do_key_export(self, args, repository):
@@ -384,9 +389,7 @@ class Archiver:
                 else:
                 else:
                     manager.export(args.path)
                     manager.export(args.path)
             except IsADirectoryError:
             except IsADirectoryError:
-                self.print_error(f"'{args.path}' must be a file, not a directory")
-                return EXIT_ERROR
-        return EXIT_SUCCESS
+                raise CommandError(f"'{args.path}' must be a file, not a directory")
 
 
     @with_repository(lock=False, exclusive=False, manifest=False, cache=False)
     @with_repository(lock=False, exclusive=False, manifest=False, cache=False)
     def do_key_import(self, args, repository):
     def do_key_import(self, args, repository):
@@ -394,18 +397,14 @@ class Archiver:
         manager = KeyManager(repository)
         manager = KeyManager(repository)
         if args.paper:
         if args.paper:
             if args.path:
             if args.path:
-                self.print_error("with --paper import from file is not supported")
-                return EXIT_ERROR
+                raise CommandError("with --paper import from file is not supported")
             manager.import_paperkey(args)
             manager.import_paperkey(args)
         else:
         else:
             if not args.path:
             if not args.path:
-                self.print_error("input file to import key from expected")
-                return EXIT_ERROR
+                raise CommandError("expected input file to import key from")
             if args.path != '-' and not os.path.exists(args.path):
             if args.path != '-' and not os.path.exists(args.path):
-                self.print_error("input file does not exist: " + args.path)
-                return EXIT_ERROR
+                raise CommandError("input file does not exist: " + args.path)
             manager.import_keyfile(args)
             manager.import_keyfile(args)
-        return EXIT_SUCCESS
 
 
     @with_repository(manifest=False)
     @with_repository(manifest=False)
     def do_migrate_to_repokey(self, args, repository):
     def do_migrate_to_repokey(self, args, repository):
@@ -421,7 +420,6 @@ class Archiver:
         key_new.chunk_seed = key_old.chunk_seed
         key_new.chunk_seed = key_old.chunk_seed
         key_new.change_passphrase()  # option to change key protection passphrase, save
         key_new.change_passphrase()  # option to change key protection passphrase, save
         logger.info('Key updated')
         logger.info('Key updated')
-        return EXIT_SUCCESS
 
 
     def do_benchmark_crud(self, args):
     def do_benchmark_crud(self, args):
         """Benchmark Create, Read, Update, Delete for archives."""
         """Benchmark Create, Read, Update, Delete for archives."""
@@ -430,30 +428,30 @@ class Archiver:
             compression = '--compression=none'
             compression = '--compression=none'
             # measure create perf (without files cache to always have it chunking)
             # measure create perf (without files cache to always have it chunking)
             t_start = time.monotonic()
             t_start = time.monotonic()
-            rc = self.do_create(self.parse_args(['create', compression, '--files-cache=disabled', archive + '1', path]))
+            rc = get_reset_ec(self.do_create(self.parse_args(['create', compression, '--files-cache=disabled', archive + '1', path])))
             t_end = time.monotonic()
             t_end = time.monotonic()
             dt_create = t_end - t_start
             dt_create = t_end - t_start
             assert rc == 0
             assert rc == 0
             # now build files cache
             # now build files cache
-            rc1 = self.do_create(self.parse_args(['create', compression, archive + '2', path]))
-            rc2 = self.do_delete(self.parse_args(['delete', archive + '2']))
+            rc1 = get_reset_ec(self.do_create(self.parse_args(['create', compression, archive + '2', path])))
+            rc2 = get_reset_ec(self.do_delete(self.parse_args(['delete', archive + '2'])))
             assert rc1 == rc2 == 0
             assert rc1 == rc2 == 0
             # measure a no-change update (archive1 is still present)
             # measure a no-change update (archive1 is still present)
             t_start = time.monotonic()
             t_start = time.monotonic()
-            rc1 = self.do_create(self.parse_args(['create', compression, archive + '3', path]))
+            rc1 = get_reset_ec(self.do_create(self.parse_args(['create', compression, archive + '3', path])))
             t_end = time.monotonic()
             t_end = time.monotonic()
             dt_update = t_end - t_start
             dt_update = t_end - t_start
-            rc2 = self.do_delete(self.parse_args(['delete', archive + '3']))
+            rc2 = get_reset_ec(self.do_delete(self.parse_args(['delete', archive + '3'])))
             assert rc1 == rc2 == 0
             assert rc1 == rc2 == 0
             # measure extraction (dry-run: without writing result to disk)
             # measure extraction (dry-run: without writing result to disk)
             t_start = time.monotonic()
             t_start = time.monotonic()
-            rc = self.do_extract(self.parse_args(['extract', '--dry-run', archive + '1']))
+            rc = get_reset_ec(self.do_extract(self.parse_args(['extract', '--dry-run', archive + '1'])))
             t_end = time.monotonic()
             t_end = time.monotonic()
             dt_extract = t_end - t_start
             dt_extract = t_end - t_start
             assert rc == 0
             assert rc == 0
             # measure archive deletion (of LAST present archive with the data)
             # measure archive deletion (of LAST present archive with the data)
             t_start = time.monotonic()
             t_start = time.monotonic()
-            rc = self.do_delete(self.parse_args(['delete', archive + '1']))
+            rc = get_reset_ec(self.do_delete(self.parse_args(['delete', archive + '1'])))
             t_end = time.monotonic()
             t_end = time.monotonic()
             dt_delete = t_end - t_start
             dt_delete = t_end - t_start
             assert rc == 0
             assert rc == 0
@@ -501,8 +499,6 @@ class Archiver:
             print(fmt % ('U', msg, total_size_MB / dt_update, count, file_size_formatted, content, dt_update))
             print(fmt % ('U', msg, total_size_MB / dt_update, count, file_size_formatted, content, dt_update))
             print(fmt % ('D', msg, total_size_MB / dt_delete, count, file_size_formatted, content, dt_delete))
             print(fmt % ('D', msg, total_size_MB / dt_delete, count, file_size_formatted, content, dt_delete))
 
 
-        return 0
-
     @with_repository(fake='dry_run', exclusive=True, compatibility=(Manifest.Operation.WRITE,))
     @with_repository(fake='dry_run', exclusive=True, compatibility=(Manifest.Operation.WRITE,))
     def do_create(self, args, repository, manifest=None, key=None):
     def do_create(self, args, repository, manifest=None, key=None):
         """Create new archive"""
         """Create new archive"""
@@ -536,16 +532,13 @@ class Archiver:
                             env = prepare_subprocess_env(system=True)
                             env = prepare_subprocess_env(system=True)
                             proc = subprocess.Popen(args.paths, stdout=subprocess.PIPE, env=env, preexec_fn=ignore_sigint)
                             proc = subprocess.Popen(args.paths, stdout=subprocess.PIPE, env=env, preexec_fn=ignore_sigint)
                         except (FileNotFoundError, PermissionError) as e:
                         except (FileNotFoundError, PermissionError) as e:
-                            self.print_error('Failed to execute command: %s', e)
-                            return self.exit_code
+                            raise CommandError('Failed to execute command: %s', e)
                         status = fso.process_pipe(path=path, cache=cache, fd=proc.stdout, mode=mode, user=user, group=group)
                         status = fso.process_pipe(path=path, cache=cache, fd=proc.stdout, mode=mode, user=user, group=group)
                         rc = proc.wait()
                         rc = proc.wait()
                         if rc != 0:
                         if rc != 0:
-                            self.print_error('Command %r exited with status %d', args.paths[0], rc)
-                            return self.exit_code
-                    except BackupOSError as e:
-                        self.print_error('%s: %s', path, e)
-                        return self.exit_code
+                            raise CommandError('Command %r exited with status %d', args.paths[0], rc)
+                    except BackupError as e:
+                        raise Error('%s: %s', path, e)
                 else:
                 else:
                     status = '-'
                     status = '-'
                 self.print_file_status(status, path)
                 self.print_file_status(status, path)
@@ -556,8 +549,7 @@ class Archiver:
                         env = prepare_subprocess_env(system=True)
                         env = prepare_subprocess_env(system=True)
                         proc = subprocess.Popen(args.paths, stdout=subprocess.PIPE, env=env, preexec_fn=ignore_sigint)
                         proc = subprocess.Popen(args.paths, stdout=subprocess.PIPE, env=env, preexec_fn=ignore_sigint)
                     except (FileNotFoundError, PermissionError) as e:
                     except (FileNotFoundError, PermissionError) as e:
-                        self.print_error('Failed to execute command: %s', e)
-                        return self.exit_code
+                        raise CommandError('Failed to execute command: %s', e)
                     pipe_bin = proc.stdout
                     pipe_bin = proc.stdout
                 else:  # args.paths_from_stdin == True
                 else:  # args.paths_from_stdin == True
                     pipe_bin = sys.stdin.buffer
                     pipe_bin = sys.stdin.buffer
@@ -569,17 +561,16 @@ class Archiver:
                             st = os_stat(path=path, parent_fd=None, name=None, follow_symlinks=False)
                             st = os_stat(path=path, parent_fd=None, name=None, follow_symlinks=False)
                         status = self._process_any(path=path, parent_fd=None, name=None, st=st, fso=fso,
                         status = self._process_any(path=path, parent_fd=None, name=None, st=st, fso=fso,
                                                    cache=cache, read_special=args.read_special, dry_run=dry_run)
                                                    cache=cache, read_special=args.read_special, dry_run=dry_run)
-                    except (BackupOSError, BackupError) as e:
-                        self.print_warning('%s: %s', path, e)
+                    except BackupError as e:
+                        self.print_warning_instance(BackupWarning(path, e))
                         status = 'E'
                         status = 'E'
                     if status == 'C':
                     if status == 'C':
-                        self.print_warning('%s: file changed while we backed it up', path)
+                        self.print_warning_instance(FileChangedWarning(path))
                     self.print_file_status(status, path)
                     self.print_file_status(status, path)
                 if args.paths_from_command:
                 if args.paths_from_command:
                     rc = proc.wait()
                     rc = proc.wait()
                     if rc != 0:
                     if rc != 0:
-                        self.print_error('Command %r exited with status %d', args.paths[0], rc)
-                        return self.exit_code
+                        raise CommandError('Command %r exited with status %d', args.paths[0], rc)
             else:
             else:
                 for path in args.paths:
                 for path in args.paths:
                     if path == '-':  # stdin
                     if path == '-':  # stdin
@@ -590,9 +581,9 @@ class Archiver:
                         if not dry_run:
                         if not dry_run:
                             try:
                             try:
                                 status = fso.process_pipe(path=path, cache=cache, fd=sys.stdin.buffer, mode=mode, user=user, group=group)
                                 status = fso.process_pipe(path=path, cache=cache, fd=sys.stdin.buffer, mode=mode, user=user, group=group)
-                            except BackupOSError as e:
+                            except BackupError as e:
+                                self.print_warning_instance(BackupWarning(path, e))
                                 status = 'E'
                                 status = 'E'
-                                self.print_warning('%s: %s', path, e)
                         else:
                         else:
                             status = '-'
                             status = '-'
                         self.print_file_status(status, path)
                         self.print_file_status(status, path)
@@ -610,9 +601,9 @@ class Archiver:
                         # if we get back here, we've finished recursing into <path>,
                         # if we get back here, we've finished recursing into <path>,
                         # we do not ever want to get back in there (even if path is given twice as recursion root)
                         # we do not ever want to get back in there (even if path is given twice as recursion root)
                         skip_inodes.add((st.st_ino, st.st_dev))
                         skip_inodes.add((st.st_ino, st.st_dev))
-                    except (BackupOSError, BackupError) as e:
+                    except BackupError as e:
                         # this comes from os.stat, self._rec_walk has own exception handler
                         # this comes from os.stat, self._rec_walk has own exception handler
-                        self.print_warning('%s: %s', path, e)
+                        self.print_warning_instance(BackupWarning(path, e))
                         continue
                         continue
             if not dry_run:
             if not dry_run:
                 if args.progress:
                 if args.progress:
@@ -621,7 +612,7 @@ class Archiver:
                 if sig_int:
                 if sig_int:
                     # do not save the archive if the user ctrl-c-ed - it is valid, but incomplete.
                     # do not save the archive if the user ctrl-c-ed - it is valid, but incomplete.
                     # we already have a checkpoint archive in this case.
                     # we already have a checkpoint archive in this case.
-                    self.print_error("Got Ctrl-C / SIGINT.")
+                    raise Error("Got Ctrl-C / SIGINT.")
                 else:
                 else:
                     archive.save(comment=args.comment, timestamp=args.timestamp)
                     archive.save(comment=args.comment, timestamp=args.timestamp)
                     args.stats |= args.json
                     args.stats |= args.json
@@ -672,7 +663,6 @@ class Archiver:
                 create_inner(archive, cache, fso)
                 create_inner(archive, cache, fso)
         else:
         else:
             create_inner(None, None, None)
             create_inner(None, None, None)
-        return self.exit_code
 
 
     def _process_any(self, *, path, parent_fd, name, st, fso, cache, read_special, dry_run):
     def _process_any(self, *, path, parent_fd, name, st, fso, cache, read_special, dry_run):
         """
         """
@@ -822,12 +812,11 @@ class Archiver:
                                     exclude_caches=exclude_caches, exclude_if_present=exclude_if_present,
                                     exclude_caches=exclude_caches, exclude_if_present=exclude_if_present,
                                     keep_exclude_tags=keep_exclude_tags, skip_inodes=skip_inodes, restrict_dev=restrict_dev,
                                     keep_exclude_tags=keep_exclude_tags, skip_inodes=skip_inodes, restrict_dev=restrict_dev,
                                     read_special=read_special, dry_run=dry_run)
                                     read_special=read_special, dry_run=dry_run)
-
-        except (BackupOSError, BackupError) as e:
-            self.print_warning('%s: %s', path, e)
+        except BackupError as e:
+            self.print_warning_instance(BackupWarning(path, e))
             status = 'E'
             status = 'E'
         if status == 'C':
         if status == 'C':
-            self.print_warning('%s: file changed while we backed it up', path)
+            self.print_warning_instance(FileChangedWarning(path))
         if not recurse_excluded_dir:
         if not recurse_excluded_dir:
             self.print_file_status(status, path)
             self.print_file_status(status, path)
 
 
@@ -899,8 +888,8 @@ class Archiver:
                     dir_item = dirs.pop(-1)
                     dir_item = dirs.pop(-1)
                     try:
                     try:
                         archive.extract_item(dir_item, stdout=stdout)
                         archive.extract_item(dir_item, stdout=stdout)
-                    except BackupOSError as e:
-                        self.print_warning('%s: %s', remove_surrogates(dir_item.path), e)
+                    except BackupError as e:
+                        self.print_warning_instance(BackupWarning(remove_surrogates(dir_item.path), e))
             if output_list:
             if output_list:
                 logging.getLogger('borg.output.list').info(remove_surrogates(item.path))
                 logging.getLogger('borg.output.list').info(remove_surrogates(item.path))
             try:
             try:
@@ -913,9 +902,8 @@ class Archiver:
                     else:
                     else:
                         archive.extract_item(item, stdout=stdout, sparse=sparse, hardlink_masters=hardlink_masters,
                         archive.extract_item(item, stdout=stdout, sparse=sparse, hardlink_masters=hardlink_masters,
                                              stripped_components=strip_components, original_path=orig_path, pi=pi)
                                              stripped_components=strip_components, original_path=orig_path, pi=pi)
-            except (BackupOSError, BackupError) as e:
-                self.print_warning('%s: %s', remove_surrogates(orig_path), e)
-
+            except BackupError as e:
+                self.print_warning_instance(BackupWarning(remove_surrogates(orig_path), e))
         if pi:
         if pi:
             pi.finish()
             pi.finish()
 
 
@@ -927,14 +915,14 @@ class Archiver:
                 dir_item = dirs.pop(-1)
                 dir_item = dirs.pop(-1)
                 try:
                 try:
                     archive.extract_item(dir_item, stdout=stdout)
                     archive.extract_item(dir_item, stdout=stdout)
-                except BackupOSError as e:
-                    self.print_warning('%s: %s', remove_surrogates(dir_item.path), e)
+                except BackupError as e:
+                    self.print_warning_instance(BackupWarning(remove_surrogates(dir_item.path), e))
+
         for pattern in matcher.get_unmatched_include_patterns():
         for pattern in matcher.get_unmatched_include_patterns():
-            self.print_warning("Include pattern '%s' never matched.", pattern)
+            self.print_warning_instance(IncludePatternNeverMatchedWarning(pattern))
         if pi:
         if pi:
             # clear progress output
             # clear progress output
             pi.finish()
             pi.finish()
-        return self.exit_code
 
 
     @with_repository(compatibility=(Manifest.Operation.READ,))
     @with_repository(compatibility=(Manifest.Operation.READ,))
     @with_archive
     @with_archive
@@ -968,8 +956,6 @@ class Archiver:
         with create_filter_process(filter, stream=tarstream, stream_close=tarstream_close, inbound=False) as _stream:
         with create_filter_process(filter, stream=tarstream, stream_close=tarstream_close, inbound=False) as _stream:
             self._export_tar(args, archive, _stream)
             self._export_tar(args, archive, _stream)
 
 
-        return self.exit_code
-
     def _export_tar(self, args, archive, tarstream):
     def _export_tar(self, args, archive, tarstream):
         matcher = self.build_matcher(args.patterns, args.paths)
         matcher = self.build_matcher(args.patterns, args.paths)
 
 
@@ -1083,7 +1069,6 @@ class Archiver:
                 tarinfo.type = tarfile.FIFOTYPE
                 tarinfo.type = tarfile.FIFOTYPE
             else:
             else:
                 self.print_warning('%s: unsupported file type %o for tar export', remove_surrogates(item.path), modebits)
                 self.print_warning('%s: unsupported file type %o for tar export', remove_surrogates(item.path), modebits)
-                set_ec(EXIT_WARNING)
                 return None, stream
                 return None, stream
             return tarinfo, stream
             return tarinfo, stream
 
 
@@ -1105,8 +1090,7 @@ class Archiver:
         tar.close()
         tar.close()
 
 
         for pattern in matcher.get_unmatched_include_patterns():
         for pattern in matcher.get_unmatched_include_patterns():
-            self.print_warning("Include pattern '%s' never matched.", pattern)
-        return self.exit_code
+            self.print_warning_instance(IncludePatternNeverMatchedWarning(pattern))
 
 
     @with_repository(compatibility=(Manifest.Operation.READ,))
     @with_repository(compatibility=(Manifest.Operation.READ,))
     @with_archive
     @with_archive
@@ -1133,13 +1117,13 @@ class Archiver:
             # we know chunker params of both archives
             # we know chunker params of both archives
             can_compare_chunk_ids = normalize_chunker_params(cp1) == normalize_chunker_params(cp2)
             can_compare_chunk_ids = normalize_chunker_params(cp1) == normalize_chunker_params(cp2)
             if not can_compare_chunk_ids:
             if not can_compare_chunk_ids:
-                self.print_warning('--chunker-params are different between archives, diff will be slow.')
+                self.print_warning('--chunker-params are different between archives, diff will be slow.', wc=None)
         else:
         else:
             # we do not know chunker params of at least one of the archives
             # we do not know chunker params of at least one of the archives
             can_compare_chunk_ids = False
             can_compare_chunk_ids = False
             self.print_warning('--chunker-params might be different between archives, diff will be slow.\n'
             self.print_warning('--chunker-params might be different between archives, diff will be slow.\n'
                                'If you know for certain that they are the same, pass --same-chunker-params '
                                'If you know for certain that they are the same, pass --same-chunker-params '
-                               'to override this check.')
+                               'to override this check.', wc=None)
 
 
         matcher = self.build_matcher(args.patterns, args.paths)
         matcher = self.build_matcher(args.patterns, args.paths)
 
 
@@ -1154,9 +1138,7 @@ class Archiver:
             print_output(diff, remove_surrogates(path))
             print_output(diff, remove_surrogates(path))
 
 
         for pattern in matcher.get_unmatched_include_patterns():
         for pattern in matcher.get_unmatched_include_patterns():
-            self.print_warning("Include pattern '%s' never matched.", pattern)
-
-        return self.exit_code
+            self.print_warning_instance(IncludePatternNeverMatchedWarning(pattern))
 
 
     @with_repository(exclusive=True, cache=True, compatibility=(Manifest.Operation.CHECK,))
     @with_repository(exclusive=True, cache=True, compatibility=(Manifest.Operation.CHECK,))
     @with_archive
     @with_archive
@@ -1166,7 +1148,6 @@ class Archiver:
         manifest.write()
         manifest.write()
         repository.commit(compact=False)
         repository.commit(compact=False)
         cache.commit()
         cache.commit()
-        return self.exit_code
 
 
     def maybe_checkpoint(self, *, checkpoint_func, checkpoint_interval):
     def maybe_checkpoint(self, *, checkpoint_func, checkpoint_interval):
         checkpointed = False
         checkpointed = False
@@ -1189,12 +1170,11 @@ class Archiver:
         explicit_archives_specified = args.location.archive or args.archives
         explicit_archives_specified = args.location.archive or args.archives
         self.output_list = args.output_list
         self.output_list = args.output_list
         if archive_filter_specified and explicit_archives_specified:
         if archive_filter_specified and explicit_archives_specified:
-            self.print_error('Mixing archive filters and explicitly named archives is not supported.')
-            return self.exit_code
+            raise CommandError('Mixing archive filters and explicitly named archives is not supported.')
         if archive_filter_specified or explicit_archives_specified:
         if archive_filter_specified or explicit_archives_specified:
-            return self._delete_archives(args, repository)
+            self._delete_archives(args, repository)
         else:
         else:
-            return self._delete_repository(args, repository)
+            self._delete_repository(args, repository)
 
 
     def _delete_archives(self, args, repository):
     def _delete_archives(self, args, repository):
         """Delete archives"""
         """Delete archives"""
@@ -1211,7 +1191,7 @@ class Archiver:
             args.consider_checkpoints = True
             args.consider_checkpoints = True
             archive_names = tuple(x.name for x in manifest.archives.list_considering(args))
             archive_names = tuple(x.name for x in manifest.archives.list_considering(args))
             if not archive_names:
             if not archive_names:
-                return self.exit_code
+                return
 
 
         if args.forced == 2:
         if args.forced == 2:
             deleted = False
             deleted = False
@@ -1220,8 +1200,7 @@ class Archiver:
                 try:
                 try:
                     current_archive = manifest.archives.pop(archive_name)
                     current_archive = manifest.archives.pop(archive_name)
                 except KeyError:
                 except KeyError:
-                    self.exit_code = EXIT_WARNING
-                    logger.warning(f'Archive {archive_name} not found ({i}/{len(archive_names)}).')
+                    self.print_warning('Archive %s not found (%d/%d).', archive_name, i, len(archive_names))
                 else:
                 else:
                     deleted = True
                     deleted = True
                     if self.output_list:
                     if self.output_list:
@@ -1234,10 +1213,10 @@ class Archiver:
                 manifest.write()
                 manifest.write()
                 # note: might crash in compact() after committing the repo
                 # note: might crash in compact() after committing the repo
                 repository.commit(compact=False)
                 repository.commit(compact=False)
-                logger.warning('Done. Run "borg check --repair" to clean up the mess.')
+                self.print_warning('Done. Run "borg check --repair" to clean up the mess.', wc=None)
             else:
             else:
-                logger.warning('Aborted.')
-            return self.exit_code
+                self.print_warning('Aborted.', wc=None)
+            return
 
 
         stats = Statistics(iec=args.iec)
         stats = Statistics(iec=args.iec)
         with Cache(repository, key, manifest, progress=args.progress, lock_wait=self.lock_wait, iec=args.iec) as cache:
         with Cache(repository, key, manifest, progress=args.progress, lock_wait=self.lock_wait, iec=args.iec) as cache:
@@ -1256,7 +1235,7 @@ class Archiver:
                 try:
                 try:
                     archive_info = manifest.archives[archive_name]
                     archive_info = manifest.archives[archive_name]
                 except KeyError:
                 except KeyError:
-                    logger.warning(msg_not_found.format(archive_name, i, len(archive_names)))
+                    self.print_warning(msg_not_found, archive_name, i, len(archive_names), wt="curly")
                 else:
                 else:
                     if self.output_list:
                     if self.output_list:
                         logger_list.info(msg_delete.format(format_archive(archive_info), i, len(archive_names)))
                         logger_list.info(msg_delete.format(format_archive(archive_info), i, len(archive_names)))
@@ -1270,7 +1249,7 @@ class Archiver:
                         uncommitted_deletes = 0 if checkpointed else (uncommitted_deletes + 1)
                         uncommitted_deletes = 0 if checkpointed else (uncommitted_deletes + 1)
             if sig_int:
             if sig_int:
                 # Ctrl-C / SIGINT: do not checkpoint (commit) again, we already have a checkpoint in this case.
                 # Ctrl-C / SIGINT: do not checkpoint (commit) again, we already have a checkpoint in this case.
-                self.print_error("Got Ctrl-C / SIGINT.")
+                raise Error("Got Ctrl-C / SIGINT.")
             elif uncommitted_deletes > 0:
             elif uncommitted_deletes > 0:
                 checkpoint_func()
                 checkpoint_func()
             if args.stats:
             if args.stats:
@@ -1280,8 +1259,6 @@ class Archiver:
                           str(cache),
                           str(cache),
                           DASHES, logger=logging.getLogger('borg.output.stats'))
                           DASHES, logger=logging.getLogger('borg.output.stats'))
 
 
-        return self.exit_code
-
     def _delete_repository(self, args, repository):
     def _delete_repository(self, args, repository):
         """Delete a repository"""
         """Delete a repository"""
         dry_run = args.dry_run
         dry_run = args.dry_run
@@ -1325,8 +1302,7 @@ class Archiver:
                 msg = '\n'.join(msg)
                 msg = '\n'.join(msg)
                 if not yes(msg, false_msg="Aborting.", invalid_msg='Invalid answer, aborting.', truish=('YES',),
                 if not yes(msg, false_msg="Aborting.", invalid_msg='Invalid answer, aborting.', truish=('YES',),
                            retry=False, env_var_override='BORG_DELETE_I_KNOW_WHAT_I_AM_DOING'):
                            retry=False, env_var_override='BORG_DELETE_I_KNOW_WHAT_I_AM_DOING'):
-                    self.exit_code = EXIT_ERROR
-                    return self.exit_code
+                    raise CancelledByUser()
             if not dry_run:
             if not dry_run:
                 repository.destroy()
                 repository.destroy()
                 logger.info("Repository deleted.")
                 logger.info("Repository deleted.")
@@ -1340,7 +1316,6 @@ class Archiver:
             logger.info("Cache deleted.")
             logger.info("Cache deleted.")
         else:
         else:
             logger.info("Would delete cache.")
             logger.info("Would delete cache.")
-        return self.exit_code
 
 
     def do_mount(self, args):
     def do_mount(self, args):
         """Mount archive or an entire repository as a FUSE filesystem"""
         """Mount archive or an entire repository as a FUSE filesystem"""
@@ -1348,14 +1323,12 @@ class Archiver:
 
 
         from .fuse_impl import llfuse, BORG_FUSE_IMPL
         from .fuse_impl import llfuse, BORG_FUSE_IMPL
         if llfuse is None:
         if llfuse is None:
-            self.print_error('borg mount not available: no FUSE support, BORG_FUSE_IMPL=%s.' % BORG_FUSE_IMPL)
-            return self.exit_code
+            raise RTError('borg mount not available: no FUSE support, BORG_FUSE_IMPL=%s.' % BORG_FUSE_IMPL)
 
 
         if not os.path.isdir(args.mountpoint) or not os.access(args.mountpoint, os.R_OK | os.W_OK | os.X_OK):
         if not os.path.isdir(args.mountpoint) or not os.access(args.mountpoint, os.R_OK | os.W_OK | os.X_OK):
-            self.print_error('%s: Mountpoint must be a writable directory' % args.mountpoint)
-            return self.exit_code
+            raise RTError('%s: Mount point must be a writable directory' % args.mountpoint)
 
 
-        return self._do_mount(args)
+        self._do_mount(args)
 
 
     @with_repository(compatibility=(Manifest.Operation.READ,))
     @with_repository(compatibility=(Manifest.Operation.READ,))
     def _do_mount(self, args, repository, manifest, key):
     def _do_mount(self, args, repository, manifest, key):
@@ -1368,26 +1341,23 @@ class Archiver:
                 operations.mount(args.mountpoint, args.options, args.foreground)
                 operations.mount(args.mountpoint, args.options, args.foreground)
             except RuntimeError:
             except RuntimeError:
                 # Relevant error message already printed to stderr by FUSE
                 # Relevant error message already printed to stderr by FUSE
-                self.exit_code = EXIT_ERROR
-        return self.exit_code
+                raise RTError("FUSE mount failed")
 
 
     def do_umount(self, args):
     def do_umount(self, args):
         """un-mount the FUSE filesystem"""
         """un-mount the FUSE filesystem"""
-        return umount(args.mountpoint)
+        umount(args.mountpoint)
 
 
     @with_repository(compatibility=(Manifest.Operation.READ,))
     @with_repository(compatibility=(Manifest.Operation.READ,))
     def do_list(self, args, repository, manifest, key):
     def do_list(self, args, repository, manifest, key):
         """List archive or repository contents"""
         """List archive or repository contents"""
         if args.location.archive:
         if args.location.archive:
             if args.json:
             if args.json:
-                self.print_error('The --json option is only valid for listing archives, not archive contents.')
-                return self.exit_code
-            return self._list_archive(args, repository, manifest, key)
+                raise CommandError('The --json option is only valid for listing archives, not archive contents.')
+            self._list_archive(args, repository, manifest, key)
         else:
         else:
             if args.json_lines:
             if args.json_lines:
-                self.print_error('The --json-lines option is only valid for listing archive contents, not archives.')
-                return self.exit_code
-            return self._list_repository(args, repository, manifest, key)
+                raise CommandError('The --json-lines option is only valid for listing archive contents, not archives.')
+            self._list_repository(args, repository, manifest, key)
 
 
     def _list_archive(self, args, repository, manifest, key):
     def _list_archive(self, args, repository, manifest, key):
         matcher = self.build_matcher(args.patterns, args.paths)
         matcher = self.build_matcher(args.patterns, args.paths)
@@ -1413,8 +1383,6 @@ class Archiver:
         else:
         else:
             _list_inner(cache=None)
             _list_inner(cache=None)
 
 
-        return self.exit_code
-
     def _list_repository(self, args, repository, manifest, key):
     def _list_repository(self, args, repository, manifest, key):
         if args.format is not None:
         if args.format is not None:
             format = args.format
             format = args.format
@@ -1437,15 +1405,13 @@ class Archiver:
                 'archives': output_data
                 'archives': output_data
             }))
             }))
 
 
-        return self.exit_code
-
     @with_repository(cache=True, compatibility=(Manifest.Operation.READ,))
     @with_repository(cache=True, compatibility=(Manifest.Operation.READ,))
     def do_info(self, args, repository, manifest, key, cache):
     def do_info(self, args, repository, manifest, key, cache):
         """Show archive details such as disk space used"""
         """Show archive details such as disk space used"""
         if any((args.location.archive, args.first, args.last, args.prefix is not None, args.glob_archives)):
         if any((args.location.archive, args.first, args.last, args.prefix is not None, args.glob_archives)):
-            return self._info_archives(args, repository, manifest, key, cache)
+            self._info_archives(args, repository, manifest, key, cache)
         else:
         else:
-            return self._info_repository(args, repository, manifest, key, cache)
+            self._info_repository(args, repository, manifest, key, cache)
 
 
     def _info_archives(self, args, repository, manifest, key, cache):
     def _info_archives(self, args, repository, manifest, key, cache):
         def format_cmdline(cmdline):
         def format_cmdline(cmdline):
@@ -1485,8 +1451,6 @@ class Archiver:
                 This archive:   {stats[original_size]:>20s} {stats[compressed_size]:>20s} {stats[deduplicated_size]:>20s}
                 This archive:   {stats[original_size]:>20s} {stats[compressed_size]:>20s} {stats[deduplicated_size]:>20s}
                 {cache}
                 {cache}
                 """).strip().format(cache=cache, **info))
                 """).strip().format(cache=cache, **info))
-            if self.exit_code:
-                break
             if not args.json and len(archive_names) - i:
             if not args.json and len(archive_names) - i:
                 print()
                 print()
 
 
@@ -1494,7 +1458,6 @@ class Archiver:
             json_print(basic_json_data(manifest, cache=cache, extra={
             json_print(basic_json_data(manifest, cache=cache, extra={
                 'archives': output_data,
                 'archives': output_data,
             }))
             }))
-        return self.exit_code
 
 
     def _info_repository(self, args, repository, manifest, key, cache):
     def _info_repository(self, args, repository, manifest, key, cache):
         info = basic_json_data(manifest, cache=cache, extra={
         info = basic_json_data(manifest, cache=cache, extra={
@@ -1526,17 +1489,15 @@ class Archiver:
             print(DASHES)
             print(DASHES)
             print(STATS_HEADER)
             print(STATS_HEADER)
             print(str(cache))
             print(str(cache))
-        return self.exit_code
 
 
     @with_repository(exclusive=True, compatibility=(Manifest.Operation.DELETE,))
     @with_repository(exclusive=True, compatibility=(Manifest.Operation.DELETE,))
     def do_prune(self, args, repository, manifest, key):
     def do_prune(self, args, repository, manifest, key):
         """Prune repository archives according to specified rules"""
         """Prune repository archives according to specified rules"""
         if not any((args.secondly, args.minutely, args.hourly, args.daily,
         if not any((args.secondly, args.minutely, args.hourly, args.daily,
                     args.weekly, args.monthly, args.yearly, args.within)):
                     args.weekly, args.monthly, args.yearly, args.within)):
-            self.print_error('At least one of the "keep-within", "keep-last", '
-                             '"keep-secondly", "keep-minutely", "keep-hourly", "keep-daily", '
-                             '"keep-weekly", "keep-monthly" or "keep-yearly" settings must be specified.')
-            return self.exit_code
+            raise CommandError('At least one of the "keep-within", "keep-last", '
+                               '"keep-secondly", "keep-minutely", "keep-hourly", "keep-daily", '
+                               '"keep-weekly", "keep-monthly" or "keep-yearly" settings must be specified.')
         if args.prefix is not None:
         if args.prefix is not None:
             args.glob_archives = args.prefix + '*'
             args.glob_archives = args.prefix + '*'
         checkpoint_re = r'\.checkpoint(\.\d+)?'
         checkpoint_re = r'\.checkpoint(\.\d+)?'
@@ -1615,7 +1576,7 @@ class Archiver:
             pi.finish()
             pi.finish()
             if sig_int:
             if sig_int:
                 # Ctrl-C / SIGINT: do not checkpoint (commit) again, we already have a checkpoint in this case.
                 # Ctrl-C / SIGINT: do not checkpoint (commit) again, we already have a checkpoint in this case.
-                self.print_error("Got Ctrl-C / SIGINT.")
+                raise Error("Got Ctrl-C / SIGINT.")
             elif uncommitted_deletes > 0:
             elif uncommitted_deletes > 0:
                 checkpoint_func()
                 checkpoint_func()
             if args.stats:
             if args.stats:
@@ -1624,7 +1585,6 @@ class Archiver:
                           stats.summary.format(label='Deleted data:', stats=stats),
                           stats.summary.format(label='Deleted data:', stats=stats),
                           str(cache),
                           str(cache),
                           DASHES, logger=logging.getLogger('borg.output.stats'))
                           DASHES, logger=logging.getLogger('borg.output.stats'))
-        return self.exit_code
 
 
     @with_repository(fake=('tam', 'disable_tam', 'archives_tam'), invert_fake=True, manifest=False, exclusive=True)
     @with_repository(fake=('tam', 'disable_tam', 'archives_tam'), invert_fake=True, manifest=False, exclusive=True)
     def do_upgrade(self, args, repository, manifest=None, key=None):
     def do_upgrade(self, args, repository, manifest=None, key=None):
@@ -1699,7 +1659,6 @@ class Archiver:
                 repo.upgrade(args.dry_run, inplace=args.inplace, progress=args.progress)
                 repo.upgrade(args.dry_run, inplace=args.inplace, progress=args.progress)
             except NotImplementedError as e:
             except NotImplementedError as e:
                 print("warning: %s" % e)
                 print("warning: %s" % e)
-        return self.exit_code
 
 
     @with_repository(cache=True, exclusive=True, compatibility=(Manifest.Operation.CHECK,))
     @with_repository(cache=True, exclusive=True, compatibility=(Manifest.Operation.CHECK,))
     def do_recreate(self, args, repository, manifest, key, cache):
     def do_recreate(self, args, repository, manifest, key, cache):
@@ -1722,15 +1681,13 @@ class Archiver:
         if args.location.archive:
         if args.location.archive:
             name = args.location.archive
             name = args.location.archive
             if recreater.is_temporary_archive(name):
             if recreater.is_temporary_archive(name):
-                self.print_error('Refusing to work on temporary archive of prior recreate: %s', name)
-                return self.exit_code
+                raise CommandError('Refusing to work on temporary archive of prior recreate: %s', name)
             if not recreater.recreate(name, args.comment, args.target):
             if not recreater.recreate(name, args.comment, args.target):
-                self.print_error('Nothing to do. Archive was not processed.\n'
-                                 'Specify at least one pattern, PATH, --comment, re-compression or re-chunking option.')
+                raise CommandError('Nothing to do. Archive was not processed.\n'
+                                   'Specify at least one pattern, PATH, --comment, re-compression or re-chunking option.')
         else:
         else:
             if args.target is not None:
             if args.target is not None:
-                self.print_error('--target: Need to specify single archive')
-                return self.exit_code
+                raise CommandError('--target: Need to specify single archive')
             for archive in manifest.archives.list(sort_by=['ts']):
             for archive in manifest.archives.list(sort_by=['ts']):
                 name = archive.name
                 name = archive.name
                 if recreater.is_temporary_archive(name):
                 if recreater.is_temporary_archive(name):
@@ -1742,7 +1699,6 @@ class Archiver:
             manifest.write()
             manifest.write()
             repository.commit(compact=False)
             repository.commit(compact=False)
             cache.commit()
             cache.commit()
-        return self.exit_code
 
 
     @with_repository(cache=True, exclusive=True, compatibility=(Manifest.Operation.WRITE,))
     @with_repository(cache=True, exclusive=True, compatibility=(Manifest.Operation.WRITE,))
     def do_import_tar(self, args, repository, manifest, key, cache):
     def do_import_tar(self, args, repository, manifest, key, cache):
@@ -1758,8 +1714,6 @@ class Archiver:
         with create_filter_process(filter, stream=tarstream, stream_close=tarstream_close, inbound=True) as _stream:
         with create_filter_process(filter, stream=tarstream, stream_close=tarstream_close, inbound=True) as _stream:
             self._import_tar(args, repository, manifest, key, cache, _stream)
             self._import_tar(args, repository, manifest, key, cache, _stream)
 
 
-        return self.exit_code
-
     def _import_tar(self, args, repository, manifest, key, cache, tarstream):
     def _import_tar(self, args, repository, manifest, key, cache, tarstream):
         t0 = utcnow()
         t0 = utcnow()
         t0_monotonic = time.monotonic()
         t0_monotonic = time.monotonic()
@@ -1850,7 +1804,8 @@ class Archiver:
         env = prepare_subprocess_env(system=True)
         env = prepare_subprocess_env(system=True)
         try:
         try:
             # we exit with the return code we get from the subprocess
             # we exit with the return code we get from the subprocess
-            return subprocess.call([args.command] + args.args, env=env)
+            rc = subprocess.call([args.command] + args.args, env=env)
+            set_ec(rc)
         finally:
         finally:
             # we need to commit the "no change" operation we did to the manifest
             # we need to commit the "no change" operation we did to the manifest
             # because it created a new segment file in the repository. if we would
             # because it created a new segment file in the repository. if we would
@@ -1868,7 +1823,6 @@ class Archiver:
         repository.put(Manifest.MANIFEST_ID, data)
         repository.put(Manifest.MANIFEST_ID, data)
         threshold = args.threshold / 100
         threshold = args.threshold / 100
         repository.commit(compact=True, threshold=threshold, cleanup_commits=args.cleanup_commits)
         repository.commit(compact=True, threshold=threshold, cleanup_commits=args.cleanup_commits)
-        return EXIT_SUCCESS
 
 
     @with_repository(exclusive=True, manifest=False)
     @with_repository(exclusive=True, manifest=False)
     def do_config(self, args, repository):
     def do_config(self, args, repository):
@@ -1945,8 +1899,7 @@ class Archiver:
 
 
         if not args.list:
         if not args.list:
             if args.name is None:
             if args.name is None:
-                self.print_error('No config key name was provided.')
-                return self.exit_code
+                raise CommandError('No config key name was provided.')
 
 
             try:
             try:
                 section, name = args.name.split('.')
                 section, name = args.name.split('.')
@@ -1988,9 +1941,7 @@ class Archiver:
                 try:
                 try:
                     print(config.get(section, name))
                     print(config.get(section, name))
                 except (configparser.NoOptionError, configparser.NoSectionError) as e:
                 except (configparser.NoOptionError, configparser.NoSectionError) as e:
-                    print(e, file=sys.stderr)
-                    return EXIT_WARNING
-            return EXIT_SUCCESS
+                    raise Error(e)
         finally:
         finally:
             if args.cache:
             if args.cache:
                 cache.close()
                 cache.close()
@@ -2002,7 +1953,6 @@ class Archiver:
         # Additional debug information
         # Additional debug information
         print('CRC implementation:', crc32.__name__)
         print('CRC implementation:', crc32.__name__)
         print('Process ID:', get_process_id())
         print('Process ID:', get_process_id())
-        return EXIT_SUCCESS
 
 
     @with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
     @with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
     def do_debug_dump_archive_items(self, args, repository, manifest, key):
     def do_debug_dump_archive_items(self, args, repository, manifest, key):
@@ -2016,7 +1966,6 @@ class Archiver:
             with open(filename, 'wb') as fd:
             with open(filename, 'wb') as fd:
                 fd.write(data)
                 fd.write(data)
         print('Done.')
         print('Done.')
-        return EXIT_SUCCESS
 
 
     @with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
     @with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
     def do_debug_dump_archive(self, args, repository, manifest, key):
     def do_debug_dump_archive(self, args, repository, manifest, key):
@@ -2066,7 +2015,6 @@ class Archiver:
 
 
         with dash_open(args.path, 'w') as fd:
         with dash_open(args.path, 'w') as fd:
             output(fd)
             output(fd)
-        return EXIT_SUCCESS
 
 
     @with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
     @with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
     def do_debug_dump_manifest(self, args, repository, manifest, key):
     def do_debug_dump_manifest(self, args, repository, manifest, key):
@@ -2078,7 +2026,6 @@ class Archiver:
 
 
         with dash_open(args.path, 'w') as fd:
         with dash_open(args.path, 'w') as fd:
             json.dump(meta, fd, indent=4)
             json.dump(meta, fd, indent=4)
-        return EXIT_SUCCESS
 
 
     @with_repository(manifest=False)
     @with_repository(manifest=False)
     def do_debug_dump_repo_objs(self, args, repository):
     def do_debug_dump_repo_objs(self, args, repository):
@@ -2134,7 +2081,6 @@ class Archiver:
                     decrypt_dump(i, id, cdata)
                     decrypt_dump(i, id, cdata)
                     i += 1
                     i += 1
         print('Done.')
         print('Done.')
-        return EXIT_SUCCESS
 
 
     @with_repository(manifest=False)
     @with_repository(manifest=False)
     def do_debug_search_repo_objs(self, args, repository):
     def do_debug_search_repo_objs(self, args, repository):
@@ -2158,8 +2104,7 @@ class Archiver:
         except (ValueError, UnicodeEncodeError):
         except (ValueError, UnicodeEncodeError):
             wanted = None
             wanted = None
         if not wanted:
         if not wanted:
-            self.print_error('search term needs to be hex:123abc or str:foobar style')
-            return EXIT_ERROR
+            raise CommandError('search term needs to be hex:123abc or str:foobar style')
 
 
         from .crypto.key import key_factory
         from .crypto.key import key_factory
         # set up the key without depending on a manifest obj
         # set up the key without depending on a manifest obj
@@ -2201,7 +2146,6 @@ class Archiver:
                 if i % 10000 == 0:
                 if i % 10000 == 0:
                     print('%d objects processed.' % i)
                     print('%d objects processed.' % i)
         print('Done.')
         print('Done.')
-        return EXIT_SUCCESS
 
 
     @with_repository(manifest=False)
     @with_repository(manifest=False)
     def do_debug_get_obj(self, args, repository):
     def do_debug_get_obj(self, args, repository):
@@ -2212,17 +2156,14 @@ class Archiver:
             if len(id) != 32:  # 256bit
             if len(id) != 32:  # 256bit
                 raise ValueError("id must be 256bits or 64 hex digits")
                 raise ValueError("id must be 256bits or 64 hex digits")
         except ValueError as err:
         except ValueError as err:
-            print(f"object id {hex_id} is invalid [{str(err)}].")
-            return EXIT_ERROR
+            raise CommandError(f"object id {hex_id} is invalid [{str(err)}].")
         try:
         try:
             data = repository.get(id)
             data = repository.get(id)
         except Repository.ObjectNotFound:
         except Repository.ObjectNotFound:
-            print("object %s not found." % hex_id)
-            return EXIT_ERROR
+            raise RTError("object %s not found." % hex_id)
         with open(args.path, "wb") as f:
         with open(args.path, "wb") as f:
             f.write(data)
             f.write(data)
         print("object %s fetched." % hex_id)
         print("object %s fetched." % hex_id)
-        return EXIT_SUCCESS
 
 
     @with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
     @with_repository(compatibility=Manifest.NO_OPERATION_CHECK)
     def do_debug_id_hash(self, args, repository, manifest, key):
     def do_debug_id_hash(self, args, repository, manifest, key):
@@ -2231,7 +2172,6 @@ class Archiver:
             data = f.read()
             data = f.read()
         id = key.id_hash(data)
         id = key.id_hash(data)
         print(id.hex())
         print(id.hex())
-        return EXIT_SUCCESS
 
 
     @with_repository(manifest=False, exclusive=True)
     @with_repository(manifest=False, exclusive=True)
     def do_debug_put_obj(self, args, repository):
     def do_debug_put_obj(self, args, repository):
@@ -2244,12 +2184,10 @@ class Archiver:
             if len(id) != 32:  # 256bit
             if len(id) != 32:  # 256bit
                 raise ValueError("id must be 256bits or 64 hex digits")
                 raise ValueError("id must be 256bits or 64 hex digits")
         except ValueError as err:
         except ValueError as err:
-            print(f"object id {hex_id} is invalid [{str(err)}].")
-            return EXIT_ERROR
+            raise CommandError(f"object id {hex_id} is invalid [{str(err)}].")
         repository.put(id, data)
         repository.put(id, data)
         print("object %s put." % hex_id)
         print("object %s put." % hex_id)
         repository.commit(compact=False)
         repository.commit(compact=False)
-        return EXIT_SUCCESS
 
 
     @with_repository(manifest=False, exclusive=True)
     @with_repository(manifest=False, exclusive=True)
     def do_debug_delete_obj(self, args, repository):
     def do_debug_delete_obj(self, args, repository):
@@ -2270,7 +2208,6 @@ class Archiver:
         if modified:
         if modified:
             repository.commit(compact=False)
             repository.commit(compact=False)
         print('Done.')
         print('Done.')
-        return EXIT_SUCCESS
 
 
     @with_repository(manifest=False, exclusive=True, cache=True, compatibility=Manifest.NO_OPERATION_CHECK)
     @with_repository(manifest=False, exclusive=True, cache=True, compatibility=Manifest.NO_OPERATION_CHECK)
     def do_debug_refcount_obj(self, args, repository, manifest, key, cache):
     def do_debug_refcount_obj(self, args, repository, manifest, key, cache):
@@ -2286,7 +2223,6 @@ class Archiver:
                     print("object %s has %d referrers [info from chunks cache]." % (hex_id, refcount))
                     print("object %s has %d referrers [info from chunks cache]." % (hex_id, refcount))
                 except KeyError:
                 except KeyError:
                     print("object %s not found [info from chunks cache]." % hex_id)
                     print("object %s not found [info from chunks cache]." % hex_id)
-        return EXIT_SUCCESS
 
 
     @with_repository(manifest=False, exclusive=True)
     @with_repository(manifest=False, exclusive=True)
     def do_debug_dump_hints(self, args, repository):
     def do_debug_dump_hints(self, args, repository):
@@ -2304,21 +2240,18 @@ class Archiver:
                 json.dump(hints, fd, indent=4)
                 json.dump(hints, fd, indent=4)
         finally:
         finally:
             repository.rollback()
             repository.rollback()
-        return EXIT_SUCCESS
 
 
     def do_debug_convert_profile(self, args):
     def do_debug_convert_profile(self, args):
         """convert Borg profile to Python profile"""
         """convert Borg profile to Python profile"""
         import marshal
         import marshal
         with args.output, args.input:
         with args.output, args.input:
             marshal.dump(msgpack.unpack(args.input, use_list=False, raw=False), args.output)
             marshal.dump(msgpack.unpack(args.input, use_list=False, raw=False), args.output)
-        return EXIT_SUCCESS
 
 
     @with_repository(lock=False, manifest=False)
     @with_repository(lock=False, manifest=False)
     def do_break_lock(self, args, repository):
     def do_break_lock(self, args, repository):
         """Break the repository lock (e.g. in case it was left by a dead borg."""
         """Break the repository lock (e.g. in case it was left by a dead borg."""
         repository.break_lock()
         repository.break_lock()
         Cache.break_lock(repository)
         Cache.break_lock(repository)
-        return self.exit_code
 
 
     helptext = collections.OrderedDict()
     helptext = collections.OrderedDict()
     helptext['patterns'] = textwrap.dedent('''
     helptext['patterns'] = textwrap.dedent('''
@@ -2749,12 +2682,10 @@ class Archiver:
             msg_lines += ['    Commands: %s' % ', '.join(sorted(commands.keys()))]
             msg_lines += ['    Commands: %s' % ', '.join(sorted(commands.keys()))]
             msg_lines += ['    Topics: %s' % ', '.join(sorted(self.helptext.keys()))]
             msg_lines += ['    Topics: %s' % ', '.join(sorted(self.helptext.keys()))]
             parser.error('\n'.join(msg_lines))
             parser.error('\n'.join(msg_lines))
-        return self.exit_code
 
 
     def do_subcommand_help(self, parser, args):
     def do_subcommand_help(self, parser, args):
         """display infos about subcommand"""
         """display infos about subcommand"""
         parser.print_help()
         parser.print_help()
-        return EXIT_SUCCESS
 
 
     do_maincommand_help = do_subcommand_help
     do_maincommand_help = do_subcommand_help
 
 
@@ -5228,7 +5159,7 @@ class Archiver:
             logger.error("You do not have a supported version of the msgpack python package installed. Terminating.")
             logger.error("You do not have a supported version of the msgpack python package installed. Terminating.")
             logger.error("This should never happen as specific, supported versions are required by our setup.py.")
             logger.error("This should never happen as specific, supported versions are required by our setup.py.")
             logger.error("Do not contact borgbackup support about this.")
             logger.error("Do not contact borgbackup support about this.")
-            return set_ec(EXIT_ERROR)
+            raise Error("unsupported msgpack version")
         if is_slow_msgpack():
         if is_slow_msgpack():
             logger.warning(PURE_PYTHON_MSGPACK_WARNING)
             logger.warning(PURE_PYTHON_MSGPACK_WARNING)
         if args.debug_profile:
         if args.debug_profile:
@@ -5243,7 +5174,7 @@ class Archiver:
                 variables = dict(locals())
                 variables = dict(locals())
                 profiler.enable()
                 profiler.enable()
                 try:
                 try:
-                    return set_ec(func(args))
+                    return get_ec(func(args))
                 finally:
                 finally:
                     profiler.disable()
                     profiler.disable()
                     profiler.snapshot_stats()
                     profiler.snapshot_stats()
@@ -5260,7 +5191,9 @@ class Archiver:
                         # it compatible (see above).
                         # it compatible (see above).
                         msgpack.pack(profiler.stats, fd, use_bin_type=True)
                         msgpack.pack(profiler.stats, fd, use_bin_type=True)
         else:
         else:
-            return set_ec(func(args))
+            rc = func(args)
+            assert rc is None
+            return get_ec(rc)
 
 
 
 
 def sig_info_handler(sig_no, stack):  # pragma: no cover
 def sig_info_handler(sig_no, stack):  # pragma: no cover
@@ -5330,7 +5263,7 @@ def main():  # pragma: no cover
         except argparse.ArgumentTypeError as e:
         except argparse.ArgumentTypeError as e:
             # we might not have logging setup yet, so get out quickly
             # we might not have logging setup yet, so get out quickly
             print(str(e), file=sys.stderr)
             print(str(e), file=sys.stderr)
-            sys.exit(EXIT_ERROR)
+            sys.exit(CommandError.exit_mcode if modern_ec else EXIT_ERROR)
         except Exception:
         except Exception:
             msg = 'Local Exception'
             msg = 'Local Exception'
             tb = f'{traceback.format_exc()}\n{sysinfo()}'
             tb = f'{traceback.format_exc()}\n{sysinfo()}'
@@ -5348,7 +5281,7 @@ def main():  # pragma: no cover
             tb = f"{traceback.format_exc()}\n{sysinfo()}"
             tb = f"{traceback.format_exc()}\n{sysinfo()}"
             exit_code = e.exit_code
             exit_code = e.exit_code
         except RemoteRepository.RPCError as e:
         except RemoteRepository.RPCError as e:
-            important = e.exception_class not in ('LockTimeout', ) and e.traceback
+            important = e.traceback
             msgid = e.exception_class
             msgid = e.exception_class
             tb_log_level = logging.ERROR if important else logging.DEBUG
             tb_log_level = logging.ERROR if important else logging.DEBUG
             if important:
             if important:
@@ -5386,16 +5319,19 @@ def main():  # pragma: no cover
         if args.show_rc:
         if args.show_rc:
             rc_logger = logging.getLogger('borg.output.show-rc')
             rc_logger = logging.getLogger('borg.output.show-rc')
             exit_msg = 'terminating with %s status, rc %d'
             exit_msg = 'terminating with %s status, rc %d'
-            if exit_code == EXIT_SUCCESS:
-                rc_logger.info(exit_msg % ('success', exit_code))
-            elif exit_code == EXIT_WARNING:
-                rc_logger.warning(exit_msg % ('warning', exit_code))
-            elif exit_code == EXIT_ERROR:
-                rc_logger.error(exit_msg % ('error', exit_code))
-            elif exit_code >= EXIT_SIGNAL_BASE:
-                rc_logger.error(exit_msg % ('signal', exit_code))
-            else:
+            try:
+                ec_class = classify_ec(exit_code)
+            except ValueError:
                 rc_logger.error(exit_msg % ('abnormal', exit_code or 666))
                 rc_logger.error(exit_msg % ('abnormal', exit_code or 666))
+            else:
+                if ec_class == "success":
+                    rc_logger.info(exit_msg % (ec_class, exit_code))
+                elif ec_class == "warning":
+                    rc_logger.warning(exit_msg % (ec_class, exit_code))
+                elif ec_class == "error":
+                    rc_logger.error(exit_msg % (ec_class, exit_code))
+                elif ec_class == "signal":
+                    rc_logger.error(exit_msg % (ec_class, exit_code))
         sys.exit(exit_code)
         sys.exit(exit_code)
 
 
 
 

+ 13 - 8
src/borg/cache.py

@@ -341,20 +341,25 @@ class CacheConfig:
 class Cache:
 class Cache:
     """Client Side cache
     """Client Side cache
     """
     """
-    class RepositoryIDNotUnique(Error):
-        """Cache is newer than repository - do you have multiple, independently updated repos with same ID?"""
-
-    class RepositoryReplay(Error):
-        """Cache, or information obtained from the security directory is newer than repository - this is either an attack or unsafe (multiple repos with same ID)"""
-
     class CacheInitAbortedError(Error):
     class CacheInitAbortedError(Error):
         """Cache initialization aborted"""
         """Cache initialization aborted"""
+        exit_mcode = 60
+
+    class EncryptionMethodMismatch(Error):
+        """Repository encryption method changed since last access, refusing to continue"""
+        exit_mcode = 61
 
 
     class RepositoryAccessAborted(Error):
     class RepositoryAccessAborted(Error):
         """Repository access aborted"""
         """Repository access aborted"""
+        exit_mcode = 62
 
 
-    class EncryptionMethodMismatch(Error):
-        """Repository encryption method changed since last access, refusing to continue"""
+    class RepositoryIDNotUnique(Error):
+        """Cache is newer than repository - do you have multiple, independently updated repos with same ID?"""
+        exit_mcode = 63
+
+    class RepositoryReplay(Error):
+        """Cache, or information obtained from the security directory is newer than repository - this is either an attack or unsafe (multiple repos with same ID)"""
+        exit_mcode = 64
 
 
     @staticmethod
     @staticmethod
     def break_lock(repository, path=None):
     def break_lock(repository, path=None):

+ 4 - 3
src/borg/constants.py

@@ -93,10 +93,11 @@ FILES_CACHE_MODE_UI_DEFAULT = 'ctime,size,inode'  # default for "borg create" co
 FILES_CACHE_MODE_DISABLED = 'd'  # most borg commands do not use the files cache at all (disable)
 FILES_CACHE_MODE_DISABLED = 'd'  # most borg commands do not use the files cache at all (disable)
 
 
 # return codes returned by borg command
 # return codes returned by borg command
-# when borg is killed by signal N, rc = 128 + N
 EXIT_SUCCESS = 0  # everything done, no problems
 EXIT_SUCCESS = 0  # everything done, no problems
-EXIT_WARNING = 1  # reached normal end of operation, but there were issues
-EXIT_ERROR = 2  # terminated abruptly, did not reach end of operation
+EXIT_WARNING = 1  # reached normal end of operation, but there were issues (generic warning)
+EXIT_ERROR = 2  # terminated abruptly, did not reach end of operation (generic error)
+EXIT_ERROR_BASE = 3  # specific error codes are 3..99 (enabled by BORG_EXIT_CODES=modern)
+EXIT_WARNING_BASE = 100  # specific warning codes are 100..127 (enabled by BORG_EXIT_CODES=modern)
 EXIT_SIGNAL_BASE = 128  # terminated due to signal, rc = 128 + sig_no
 EXIT_SIGNAL_BASE = 128  # terminated due to signal, rc = 128 + sig_no
 
 
 # never use datetime.isoformat(), it is evil. always use one of these:
 # never use datetime.isoformat(), it is evil. always use one of these:

+ 1 - 0
src/borg/crypto/file_integrity.py

@@ -119,6 +119,7 @@ SUPPORTED_ALGORITHMS = {
 
 
 class FileIntegrityError(IntegrityError):
 class FileIntegrityError(IntegrityError):
     """File failed integrity check: {}"""
     """File failed integrity check: {}"""
+    exit_mcode = 91
 
 
 
 
 class IntegrityCheckedFile(FileLikeWrapper):
 class IntegrityCheckedFile(FileLikeWrapper):

+ 19 - 9
src/borg/crypto/key.py

@@ -39,54 +39,60 @@ AUTHENTICATED_NO_KEY = 'authenticated_no_key' in workarounds
 
 
 class NoPassphraseFailure(Error):
 class NoPassphraseFailure(Error):
     """can not acquire a passphrase: {}"""
     """can not acquire a passphrase: {}"""
-
-
-class PassphraseWrong(Error):
-    """passphrase supplied in BORG_PASSPHRASE, by BORG_PASSCOMMAND or via BORG_PASSPHRASE_FD is incorrect."""
+    exit_mcode = 50
 
 
 
 
 class PasscommandFailure(Error):
 class PasscommandFailure(Error):
     """passcommand supplied in BORG_PASSCOMMAND failed: {}"""
     """passcommand supplied in BORG_PASSCOMMAND failed: {}"""
+    exit_mcode = 51
+
+
+class PassphraseWrong(Error):
+    """passphrase supplied in BORG_PASSPHRASE, by BORG_PASSCOMMAND or via BORG_PASSPHRASE_FD is incorrect."""
+    exit_mcode = 52
 
 
 
 
 class PasswordRetriesExceeded(Error):
 class PasswordRetriesExceeded(Error):
     """exceeded the maximum password retries"""
     """exceeded the maximum password retries"""
+    exit_mcode = 53
 
 
 
 
 class UnsupportedPayloadError(Error):
 class UnsupportedPayloadError(Error):
     """Unsupported payload type {}. A newer version is required to access this repository."""
     """Unsupported payload type {}. A newer version is required to access this repository."""
+    exit_mcode = 48
 
 
 
 
 class UnsupportedManifestError(Error):
 class UnsupportedManifestError(Error):
     """Unsupported manifest envelope. A newer version is required to access this repository."""
     """Unsupported manifest envelope. A newer version is required to access this repository."""
+    exit_mcode = 27
 
 
 
 
 class KeyfileNotFoundError(Error):
 class KeyfileNotFoundError(Error):
     """No key file for repository {} found in {}."""
     """No key file for repository {} found in {}."""
+    exit_mcode = 42
 
 
 
 
 class KeyfileInvalidError(Error):
 class KeyfileInvalidError(Error):
     """Invalid key file for repository {} found in {}."""
     """Invalid key file for repository {} found in {}."""
+    exit_mcode = 40
 
 
 
 
 class KeyfileMismatchError(Error):
 class KeyfileMismatchError(Error):
     """Mismatch between repository {} and key file {}."""
     """Mismatch between repository {} and key file {}."""
+    exit_mcode = 41
 
 
 
 
 class RepoKeyNotFoundError(Error):
 class RepoKeyNotFoundError(Error):
     """No key entry found in the config of repository {}."""
     """No key entry found in the config of repository {}."""
+    exit_mcode = 44
 
 
 
 
 class TAMRequiredError(IntegrityError):
 class TAMRequiredError(IntegrityError):
     __doc__ = textwrap.dedent("""
     __doc__ = textwrap.dedent("""
     Manifest is unauthenticated, but it is required for this repository.
     Manifest is unauthenticated, but it is required for this repository.
-
-    This either means that you are under attack, or that you modified this repository
-    with a Borg version older than 1.0.9 after TAM authentication was enabled.
-
-    In the latter case, use "borg upgrade --tam --force '{}'" to re-authenticate the manifest.
     """).strip()
     """).strip()
     traceback = True
     traceback = True
+    exit_mcode = 98
 
 
 
 
 class ArchiveTAMRequiredError(TAMRequiredError):
 class ArchiveTAMRequiredError(TAMRequiredError):
@@ -94,11 +100,13 @@ class ArchiveTAMRequiredError(TAMRequiredError):
     Archive '{}' is unauthenticated, but it is required for this repository.
     Archive '{}' is unauthenticated, but it is required for this repository.
     """).strip()
     """).strip()
     traceback = True
     traceback = True
+    exit_mcode = 96
 
 
 
 
 class TAMInvalid(IntegrityError):
 class TAMInvalid(IntegrityError):
     __doc__ = IntegrityError.__doc__
     __doc__ = IntegrityError.__doc__
     traceback = True
     traceback = True
+    exit_mcode = 97
 
 
     def __init__(self):
     def __init__(self):
         # Error message becomes: "Data integrity error: Manifest authentication did not verify"
         # Error message becomes: "Data integrity error: Manifest authentication did not verify"
@@ -108,6 +116,7 @@ class TAMInvalid(IntegrityError):
 class ArchiveTAMInvalid(IntegrityError):
 class ArchiveTAMInvalid(IntegrityError):
     __doc__ = IntegrityError.__doc__
     __doc__ = IntegrityError.__doc__
     traceback = True
     traceback = True
+    exit_mcode = 95
 
 
     def __init__(self):
     def __init__(self):
         # Error message becomes: "Data integrity error: Archive authentication did not verify"
         # Error message becomes: "Data integrity error: Archive authentication did not verify"
@@ -117,6 +126,7 @@ class ArchiveTAMInvalid(IntegrityError):
 class TAMUnsupportedSuiteError(IntegrityError):
 class TAMUnsupportedSuiteError(IntegrityError):
     """Could not verify manifest: Unsupported suite {!r}; a newer version is needed."""
     """Could not verify manifest: Unsupported suite {!r}; a newer version is needed."""
     traceback = True
     traceback = True
+    exit_mcode = 99
 
 
 
 
 class KeyBlobStorage:
 class KeyBlobStorage:

+ 12 - 8
src/borg/crypto/keymanager.py

@@ -10,20 +10,24 @@ from ..repository import Repository
 from .key import KeyfileKey, KeyfileNotFoundError, RepoKeyNotFoundError, KeyBlobStorage, identify_key
 from .key import KeyfileKey, KeyfileNotFoundError, RepoKeyNotFoundError, KeyBlobStorage, identify_key
 
 
 
 
-class UnencryptedRepo(Error):
-    """Keymanagement not available for unencrypted repositories."""
-
-
-class UnknownKeyType(Error):
-    """Keytype {0} is unknown."""
+class NotABorgKeyFile(Error):
+    """This file is not a borg key backup, aborting."""
+    exit_mcode = 43
 
 
 
 
 class RepoIdMismatch(Error):
 class RepoIdMismatch(Error):
     """This key backup seems to be for a different backup repository, aborting."""
     """This key backup seems to be for a different backup repository, aborting."""
+    exit_mcode = 45
 
 
 
 
-class NotABorgKeyFile(Error):
-    """This file is not a borg key backup, aborting."""
+class UnencryptedRepo(Error):
+    """Key management not available for unencrypted repositories."""
+    exit_mcode = 46
+
+
+class UnknownKeyType(Error):
+    """Key type {0} is unknown."""
+    exit_mcode = 47
 
 
 
 
 def sha256_truncated(data, num):
 def sha256_truncated(data, num):

+ 104 - 11
src/borg/helpers/__init__.py

@@ -26,23 +26,116 @@ from . import msgpack
 # see the docs for a list of known workaround strings.
 # see the docs for a list of known workaround strings.
 workarounds = tuple(os.environ.get('BORG_WORKAROUNDS', '').split(','))
 workarounds = tuple(os.environ.get('BORG_WORKAROUNDS', '').split(','))
 
 
+
+# element data type for warnings_list:
+warning_info = namedtuple("warning_info", "wc,msg,args,wt")
+
 """
 """
-The global exit_code variable is used so that modules other than archiver can increase the program exit code if a
-warning or error occurred during their operation. This is different from archiver.exit_code, which is only accessible
-from the archiver object.
+The global warnings_list variable is used to collect warning_info elements while borg is running.
+"""
+_warnings_list = []
+
+
+def add_warning(msg, *args, **kwargs):
+    global _warnings_list
+    warning_code = kwargs.get("wc", EXIT_WARNING)
+    assert isinstance(warning_code, int)
+    warning_type = kwargs.get("wt", "percent")
+    assert warning_type in ("percent", "curly")
+    _warnings_list.append(warning_info(warning_code, msg, args, warning_type))
 
 
-Note: keep this in helpers/__init__.py as the code expects to be able to assign to helpers.exit_code.
+
+"""
+The global exit_code variable is used so that modules other than archiver can increase the program exit code if a
+warning or error occurred during their operation.
 """
 """
-exit_code = EXIT_SUCCESS
+_exit_code = EXIT_SUCCESS
+
+
+def classify_ec(ec):
+    if not isinstance(ec, int):
+        raise TypeError("ec must be of type int")
+    if EXIT_SIGNAL_BASE <= ec <= 255:
+        return "signal"
+    elif ec == EXIT_ERROR or EXIT_ERROR_BASE <= ec < EXIT_WARNING_BASE:
+        return "error"
+    elif ec == EXIT_WARNING or EXIT_WARNING_BASE <= ec < EXIT_SIGNAL_BASE:
+        return "warning"
+    elif ec == EXIT_SUCCESS:
+        return "success"
+    else:
+        raise ValueError(f"invalid error code: {ec}")
+
+
+def max_ec(ec1, ec2):
+    """return the more severe error code of ec1 and ec2"""
+    # note: usually, there can be only 1 error-class ec, the other ec is then either success or warning.
+    ec1_class = classify_ec(ec1)
+    ec2_class = classify_ec(ec2)
+    if ec1_class == "signal":
+        return ec1
+    if ec2_class == "signal":
+        return ec2
+    if ec1_class == "error":
+        return ec1
+    if ec2_class == "error":
+        return ec2
+    if ec1_class == "warning":
+        return ec1
+    if ec2_class == "warning":
+        return ec2
+    assert ec1 == ec2 == EXIT_SUCCESS
+    return EXIT_SUCCESS
 
 
 
 
 def set_ec(ec):
 def set_ec(ec):
     """
     """
-    Sets the exit code of the program, if an exit code higher or equal than this is set, this does nothing. This
-    makes EXIT_ERROR override EXIT_WARNING, etc..
+    Sets the exit code of the program to ec IF ec is more severe than the current exit code.
+    """
+    global _exit_code
+    _exit_code = max_ec(_exit_code, ec)
+
 
 
-    ec: exit code to set
+def init_ec_warnings(ec=EXIT_SUCCESS, warnings=None):
     """
     """
-    global exit_code
-    exit_code = max(exit_code, ec)
-    return exit_code
+    (Re-)Init the globals for the exit code and the warnings list.
+    """
+    global _exit_code, _warnings_list
+    _exit_code = ec
+    warnings = [] if warnings is None else warnings
+    assert isinstance(warnings, list)
+    _warnings_list = warnings
+
+
+def get_ec(ec=None):
+    """
+    compute the final return code of the borg process
+    """
+    if ec is not None:
+        set_ec(ec)
+
+    global _exit_code
+    exit_code_class = classify_ec(_exit_code)
+    if exit_code_class in ("signal", "error", "warning"):
+        # there was a signal/error/warning, return its exit code
+        return _exit_code
+    assert exit_code_class == "success"
+    global _warnings_list
+    if not _warnings_list:
+        # we do not have any warnings in warnings list, return success exit code
+        return _exit_code
+    # looks like we have some warning(s)
+    rcs = sorted(set(w_info.wc for w_info in _warnings_list))
+    logger.debug(f"rcs: {rcs!r}")
+    if len(rcs) == 1:
+        # easy: there was only one kind of warning, so we can be specific
+        return rcs[0]
+    # there were different kinds of warnings
+    return EXIT_WARNING  # generic warning rc, user has to look into the logs
+
+
+def get_reset_ec(ec=None):
+    """Like get_ec, but re-initialize ec/warnings afterwards."""
+    rc = get_ec(ec)
+    init_ec_warnings()
+    return rc

+ 9 - 16
src/borg/helpers/checks.py

@@ -1,39 +1,32 @@
 import os
 import os
 import sys
 import sys
 
 
-from .errors import Error
+from .errors import RTError
 from ..platformflags import is_win32, is_linux, is_freebsd, is_darwin
 from ..platformflags import is_win32, is_linux, is_freebsd, is_darwin
 
 
 
 
-class PythonLibcTooOld(Error):
-    """FATAL: this Python was compiled for a too old (g)libc and misses required functionality."""
-
-
 def check_python():
 def check_python():
     if is_win32:
     if is_win32:
         required_funcs = {os.stat}
         required_funcs = {os.stat}
     else:
     else:
         required_funcs = {os.stat, os.utime, os.chown}
         required_funcs = {os.stat, os.utime, os.chown}
     if not os.supports_follow_symlinks.issuperset(required_funcs):
     if not os.supports_follow_symlinks.issuperset(required_funcs):
-        raise PythonLibcTooOld
-
-
-class ExtensionModuleError(Error):
-    """The Borg binary extension modules do not seem to be properly installed."""
+        raise RTError("""FATAL: this Python was compiled for a too old (g)libc and misses required functionality.""")
 
 
 
 
 def check_extension_modules():
 def check_extension_modules():
     import borg.crypto.low_level
     import borg.crypto.low_level
     from .. import platform, compress, item, chunker, hashindex
     from .. import platform, compress, item, chunker, hashindex
+    msg = """The Borg binary extension modules do not seem to be properly installed."""
     if hashindex.API_VERSION != '1.2_01':
     if hashindex.API_VERSION != '1.2_01':
-        raise ExtensionModuleError
+        raise RTError(msg)
     if chunker.API_VERSION != '1.2_01':
     if chunker.API_VERSION != '1.2_01':
-        raise ExtensionModuleError
+        raise RTError(msg)
     if compress.API_VERSION != '1.2_02':
     if compress.API_VERSION != '1.2_02':
-        raise ExtensionModuleError
+        raise RTError(msg)
     if borg.crypto.low_level.API_VERSION != '1.2_01':
     if borg.crypto.low_level.API_VERSION != '1.2_01':
-        raise ExtensionModuleError
+        raise RTError(msg)
     if item.API_VERSION != '1.2_01':
     if item.API_VERSION != '1.2_01':
-        raise ExtensionModuleError
+        raise RTError(msg)
     if platform.API_VERSION != platform.OS_API_VERSION or platform.API_VERSION != '1.2_05':
     if platform.API_VERSION != platform.OS_API_VERSION or platform.API_VERSION != '1.2_05':
-        raise ExtensionModuleError
+        raise RTError(msg)

+ 134 - 4
src/borg/helpers/errors.py

@@ -1,16 +1,22 @@
+import os
+
 from ..constants import *  # NOQA
 from ..constants import *  # NOQA
 
 
 import borg.crypto.low_level
 import borg.crypto.low_level
 
 
 
 
-class Error(Exception):
-    """Error: {}"""
+modern_ec = os.environ.get("BORG_EXIT_CODES", "legacy") == "modern"
+
+
+class ErrorBase(Exception):
+    """ErrorBase: {}"""
     # Error base class
     # Error base class
 
 
     # if we raise such an Error and it is only caught by the uppermost
     # if we raise such an Error and it is only caught by the uppermost
     # exception handler (that exits short after with the given exit_code),
     # exception handler (that exits short after with the given exit_code),
-    # it is always a (fatal and abrupt) EXIT_ERROR, never just a warning.
-    exit_code = EXIT_ERROR
+    # it is always a (fatal and abrupt) error, never just a warning.
+    exit_mcode = EXIT_ERROR  # modern, more specific exit code (defaults to EXIT_ERROR)
+
     # show a traceback?
     # show a traceback?
     traceback = False
     traceback = False
 
 
@@ -23,6 +29,16 @@ class Error(Exception):
 
 
     __str__ = get_message
     __str__ = get_message
 
 
+    @property
+    def exit_code(self):
+        # legacy: borg used to always use rc 2 (EXIT_ERROR) for all errors.
+        # modern: users can opt in to more specific return codes, using BORG_EXIT_CODES:
+        return self.exit_mcode if modern_ec else EXIT_ERROR
+
+
+class Error(ErrorBase):
+    """Error: {}"""
+
 
 
 class ErrorWithTraceback(Error):
 class ErrorWithTraceback(Error):
     """Error: {}"""
     """Error: {}"""
@@ -32,7 +48,121 @@ class ErrorWithTraceback(Error):
 
 
 class IntegrityError(ErrorWithTraceback, borg.crypto.low_level.IntegrityError):
 class IntegrityError(ErrorWithTraceback, borg.crypto.low_level.IntegrityError):
     """Data integrity error: {}"""
     """Data integrity error: {}"""
+    exit_mcode = 90
 
 
 
 
 class DecompressionError(IntegrityError):
 class DecompressionError(IntegrityError):
     """Decompression error: {}"""
     """Decompression error: {}"""
+    exit_mcode = 92
+
+
+class CancelledByUser(Error):
+    """Cancelled by user."""
+    exit_mcode = 3
+
+
+class RTError(Error):
+    """Runtime Error: {}"""
+
+
+class CommandError(Error):
+    """Command Error: {}"""
+    exit_mcode = 4
+
+
+class BorgWarning:
+    """Warning: {}"""
+    # Warning base class
+
+    # please note that this class and its subclasses are NOT exceptions, we do not raise them.
+    # so this is just to have inheritance, inspectability and the exit_code property.
+    exit_mcode = EXIT_WARNING  # modern, more specific exit code (defaults to EXIT_WARNING)
+
+    def __init__(self, *args):
+        self.args = args
+
+    def get_message(self):
+        return type(self).__doc__.format(*self.args)
+
+    __str__ = get_message
+
+    @property
+    def exit_code(self):
+        # legacy: borg used to always use rc 1 (EXIT_WARNING) for all warnings.
+        # modern: users can opt in to more specific return codes, using BORG_EXIT_CODES:
+        return self.exit_mcode if modern_ec else EXIT_WARNING
+
+
+class FileChangedWarning(BorgWarning):
+    """{}: file changed while we backed it up"""
+    exit_mcode = 100
+
+
+class IncludePatternNeverMatchedWarning(BorgWarning):
+    """Include pattern '{}' never matched."""
+    exit_mcode = 101
+
+
+class BackupWarning(BorgWarning):
+    """{}: {}"""
+    # this is to wrap a caught BackupError exception, so it can be given to print_warning_instance
+
+    @property
+    def exit_code(self):
+        if not modern_ec:
+            return EXIT_WARNING
+        exc = self.args[1]
+        assert isinstance(exc, BackupError)
+        return exc.exit_mcode
+
+
+class BackupError(ErrorBase):
+    """{}: backup error"""
+    # Exception raised for non-OSError-based exceptions while accessing backup files.
+    exit_mcode = 102
+
+
+class BackupRaceConditionError(BackupError):
+    """{}: file type or inode changed while we backed it up (race condition, skipped file)"""
+    # Exception raised when encountering a critical race condition while trying to back up a file.
+    exit_mcode = 103
+
+
+class BackupOSError(BackupError):
+    """{}: {}"""
+    # Wrapper for OSError raised while accessing backup files.
+    #
+    # Borg does different kinds of IO, and IO failures have different consequences.
+    # This wrapper represents failures of input file or extraction IO.
+    # These are non-critical and are only reported (warnings).
+    #
+    # Any unwrapped IO error is critical and aborts execution (for example repository IO failure).
+    exit_mcode = 104
+
+    def __init__(self, op, os_error):
+        self.op = op
+        self.os_error = os_error
+        self.errno = os_error.errno
+        self.strerror = os_error.strerror
+        self.filename = os_error.filename
+
+    def __str__(self):
+        if self.op:
+            return f'{self.op}: {self.os_error}'
+        else:
+            return str(self.os_error)
+
+
+class BackupPermissionError(BackupOSError):
+    """{}: {}"""
+    exit_mcode = 105
+
+
+class BackupIOError(BackupOSError):
+    """{}: {}"""
+    exit_mcode = 106
+
+
+class BackupFileNotFoundError(BackupOSError):
+    """{}: {}"""
+    exit_mcode = 107

+ 5 - 2
src/borg/helpers/fs.py

@@ -332,11 +332,14 @@ def os_stat(*, path=None, parent_fd=None, name=None, follow_symlinks=False):
 
 
 
 
 def umount(mountpoint):
 def umount(mountpoint):
+    from . import set_ec
+
     env = prepare_subprocess_env(system=True)
     env = prepare_subprocess_env(system=True)
     try:
     try:
-        return subprocess.call(['fusermount', '-u', mountpoint], env=env)
+        rc = subprocess.call(['fusermount', '-u', mountpoint], env=env)
     except FileNotFoundError:
     except FileNotFoundError:
-        return subprocess.call(['umount', mountpoint], env=env)
+        rc = subprocess.call(['umount', mountpoint], env=env)
+    set_ec(rc)
 
 
 
 
 # below is a slightly modified tempfile.mkstemp that has an additional mode parameter.
 # below is a slightly modified tempfile.mkstemp that has an additional mode parameter.

+ 6 - 4
src/borg/helpers/manifest.py

@@ -18,12 +18,14 @@ from .. import shellpattern
 from ..constants import *  # NOQA
 from ..constants import *  # NOQA
 
 
 
 
-class NoManifestError(Error):
-    """Repository has no manifest."""
-
-
 class MandatoryFeatureUnsupported(Error):
 class MandatoryFeatureUnsupported(Error):
     """Unsupported repository feature(s) {}. A newer version of borg is required to access this repository."""
     """Unsupported repository feature(s) {}. A newer version of borg is required to access this repository."""
+    exit_mcode = 25
+
+
+class NoManifestError(Error):
+    """Repository has no manifest."""
+    exit_mcode = 26
 
 
 
 
 ArchiveInfo = namedtuple('ArchiveInfo', 'name id ts')
 ArchiveInfo = namedtuple('ArchiveInfo', 'name id ts')

+ 2 - 0
src/borg/helpers/parseformat.py

@@ -176,10 +176,12 @@ class DatetimeWrapper:
 
 
 class PlaceholderError(Error):
 class PlaceholderError(Error):
     """Formatting Error: "{}".format({}): {}({})"""
     """Formatting Error: "{}".format({}): {}({})"""
+    exit_mcode = 5
 
 
 
 
 class InvalidPlaceholder(PlaceholderError):
 class InvalidPlaceholder(PlaceholderError):
     """Invalid placeholder "{}" in string: {}"""
     """Invalid placeholder "{}" in string: {}"""
+    exit_mcode = 6
 
 
 
 
 def format_line(format, data):
 def format_line(format, data):

+ 10 - 4
src/borg/locking.py

@@ -69,26 +69,32 @@ class TimeoutTimer:
 
 
 class LockError(Error):
 class LockError(Error):
     """Failed to acquire the lock {}."""
     """Failed to acquire the lock {}."""
+    exit_mcode = 70
 
 
 
 
 class LockErrorT(ErrorWithTraceback):
 class LockErrorT(ErrorWithTraceback):
     """Failed to acquire the lock {}."""
     """Failed to acquire the lock {}."""
-
-
-class LockTimeout(LockError):
-    """Failed to create/acquire the lock {} (timeout)."""
+    exit_mcode = 71
 
 
 
 
 class LockFailed(LockErrorT):
 class LockFailed(LockErrorT):
     """Failed to create/acquire the lock {} ({})."""
     """Failed to create/acquire the lock {} ({})."""
+    exit_mcode = 72
+
+
+class LockTimeout(LockError):
+    """Failed to create/acquire the lock {} (timeout)."""
+    exit_mcode = 73
 
 
 
 
 class NotLocked(LockErrorT):
 class NotLocked(LockErrorT):
     """Failed to release the lock {} (was not locked)."""
     """Failed to release the lock {} (was not locked)."""
+    exit_mcode = 74
 
 
 
 
 class NotMyLock(LockErrorT):
 class NotMyLock(LockErrorT):
     """Failed to release the lock {} (was/is locked, but not by me)."""
     """Failed to release the lock {} (was/is locked, but not by me)."""
+    exit_mcode = 75
 
 
 
 
 class ExclusiveLock:
 class ExclusiveLock:

+ 28 - 0
src/borg/remote.py

@@ -27,6 +27,7 @@ from .helpers import sysinfo
 from .helpers import format_file_size
 from .helpers import format_file_size
 from .helpers import safe_unlink
 from .helpers import safe_unlink
 from .helpers import prepare_subprocess_env, ignore_sigint
 from .helpers import prepare_subprocess_env, ignore_sigint
+from .locking import LockTimeout, NotLocked, NotMyLock, LockFailed
 from .logger import create_logger, setup_logging
 from .logger import create_logger, setup_logging
 from .helpers import msgpack
 from .helpers import msgpack
 from .repository import Repository
 from .repository import Repository
@@ -66,26 +67,32 @@ def os_write(fd, data):
 
 
 class ConnectionClosed(Error):
 class ConnectionClosed(Error):
     """Connection closed by remote host"""
     """Connection closed by remote host"""
+    exit_mcode = 80
 
 
 
 
 class ConnectionClosedWithHint(ConnectionClosed):
 class ConnectionClosedWithHint(ConnectionClosed):
     """Connection closed by remote host. {}"""
     """Connection closed by remote host. {}"""
+    exit_mcode = 81
 
 
 
 
 class PathNotAllowed(Error):
 class PathNotAllowed(Error):
     """Repository path not allowed: {}"""
     """Repository path not allowed: {}"""
+    exit_mcode = 83
 
 
 
 
 class InvalidRPCMethod(Error):
 class InvalidRPCMethod(Error):
     """RPC method {} is not valid"""
     """RPC method {} is not valid"""
+    exit_mcode = 82
 
 
 
 
 class UnexpectedRPCDataFormatFromClient(Error):
 class UnexpectedRPCDataFormatFromClient(Error):
     """Borg {}: Got unexpected RPC data format from client."""
     """Borg {}: Got unexpected RPC data format from client."""
+    exit_mcode = 85
 
 
 
 
 class UnexpectedRPCDataFormatFromServer(Error):
 class UnexpectedRPCDataFormatFromServer(Error):
     """Got unexpected RPC data format from server:\n{}"""
     """Got unexpected RPC data format from server:\n{}"""
+    exit_mcode = 86
 
 
     def __init__(self, data):
     def __init__(self, data):
         try:
         try:
@@ -517,6 +524,7 @@ class RemoteRepository:
 
 
     class RPCServerOutdated(Error):
     class RPCServerOutdated(Error):
         """Borg server is too old for {}. Required version {}"""
         """Borg server is too old for {}. Required version {}"""
+        exit_mcode = 84
 
 
         @property
         @property
         def method(self):
         def method(self):
@@ -772,6 +780,26 @@ This problem will go away as soon as the server has been upgraded to 1.0.7+.
                     raise InvalidRPCMethod('(not available)')
                     raise InvalidRPCMethod('(not available)')
                 else:
                 else:
                     raise InvalidRPCMethod(args[0].decode())
                     raise InvalidRPCMethod(args[0].decode())
+            elif error == 'LockTimeout':
+                if old_server:
+                    raise LockTimeout('(not available)')
+                else:
+                    raise LockTimeout(args[0].decode())
+            elif error == 'LockFailed':
+                if old_server:
+                    raise LockFailed('(not available)', '')
+                else:
+                    raise LockFailed(args[0].decode(), args[1].decode())
+            elif error == 'NotLocked':
+                if old_server:
+                    raise NotLocked('(not available)')
+                else:
+                    raise NotLocked(args[0].decode())
+            elif error == 'NotMyLock':
+                if old_server:
+                    raise NotMyLock('(not available)')
+                else:
+                    raise NotMyLock(args[0].decode())
             else:
             else:
                 raise self.RPCError(unpacked)
                 raise self.RPCError(unpacked)
 
 

+ 26 - 15
src/borg/repository.py

@@ -120,43 +120,54 @@ class Repository:
     will still get rid of them.
     will still get rid of them.
     """
     """
 
 
-    class DoesNotExist(Error):
-        """Repository {} does not exist."""
-
     class AlreadyExists(Error):
     class AlreadyExists(Error):
         """A repository already exists at {}."""
         """A repository already exists at {}."""
+        exit_mcode = 10
 
 
-    class PathAlreadyExists(Error):
-        """There is already something at {}."""
+    class AtticRepository(Error):
+        """Attic repository detected. Please run "borg upgrade {}"."""
+        exit_mcode = 11
 
 
-    class ParentPathDoesNotExist(Error):
-        """The parent path of the repo directory [{}] does not exist."""
+    class CheckNeeded(ErrorWithTraceback):
+        """Inconsistency detected. Please run "borg check {}"."""
+        exit_mcode = 12
+
+    class DoesNotExist(Error):
+        """Repository {} does not exist."""
+        exit_mcode = 13
+
+    class InsufficientFreeSpaceError(Error):
+        """Insufficient free space to complete transaction (required: {}, available: {})."""
+        exit_mcode = 14
 
 
     class InvalidRepository(Error):
     class InvalidRepository(Error):
         """{} is not a valid repository. Check repo config."""
         """{} is not a valid repository. Check repo config."""
+        exit_mcode = 15
 
 
     class InvalidRepositoryConfig(Error):
     class InvalidRepositoryConfig(Error):
         """{} does not have a valid configuration. Check repo config [{}]."""
         """{} does not have a valid configuration. Check repo config [{}]."""
-
-    class AtticRepository(Error):
-        """Attic repository detected. Please run "borg upgrade {}"."""
-
-    class CheckNeeded(ErrorWithTraceback):
-        """Inconsistency detected. Please run "borg check {}"."""
+        exit_mcode = 16
 
 
     class ObjectNotFound(ErrorWithTraceback):
     class ObjectNotFound(ErrorWithTraceback):
         """Object with key {} not found in repository {}."""
         """Object with key {} not found in repository {}."""
+        exit_mcode = 17
 
 
         def __init__(self, id, repo):
         def __init__(self, id, repo):
             if isinstance(id, bytes):
             if isinstance(id, bytes):
                 id = bin_to_hex(id)
                 id = bin_to_hex(id)
             super().__init__(id, repo)
             super().__init__(id, repo)
 
 
-    class InsufficientFreeSpaceError(Error):
-        """Insufficient free space to complete transaction (required: {}, available: {})."""
+    class ParentPathDoesNotExist(Error):
+        """The parent path of the repo directory [{}] does not exist."""
+        exit_mcode = 18
+
+    class PathAlreadyExists(Error):
+        """There is already something at {}."""
+        exit_mcode = 19
 
 
     class StorageQuotaExceeded(Error):
     class StorageQuotaExceeded(Error):
         """The storage quota ({}) has been exceeded ({}). Try deleting some archives."""
         """The storage quota ({}) has been exceeded ({}). Try deleting some archives."""
+        exit_mcode = 20
 
 
     def __init__(self, path, create=False, exclusive=False, lock_wait=None, lock=True,
     def __init__(self, path, create=False, exclusive=False, lock_wait=None, lock=True,
                  append_only=False, storage_quota=None, check_segment_magic=True,
                  append_only=False, storage_quota=None, check_segment_magic=True,

+ 78 - 23
src/borg/testsuite/archiver.py

@@ -40,7 +40,8 @@ from ..crypto.keymanager import RepoIdMismatch, NotABorgKeyFile
 from ..crypto.file_integrity import FileIntegrityError
 from ..crypto.file_integrity import FileIntegrityError
 from ..helpers import Location, get_security_dir
 from ..helpers import Location, get_security_dir
 from ..helpers import Manifest, MandatoryFeatureUnsupported, ArchiveInfo
 from ..helpers import Manifest, MandatoryFeatureUnsupported, ArchiveInfo
-from ..helpers import EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR
+from ..helpers import init_ec_warnings
+from ..helpers import EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR, Error, CancelledByUser, RTError, CommandError
 from ..helpers import bin_to_hex
 from ..helpers import bin_to_hex
 from ..helpers import MAX_S
 from ..helpers import MAX_S
 from ..helpers import msgpack
 from ..helpers import msgpack
@@ -96,8 +97,7 @@ def exec_cmd(*args, archiver=None, fork=False, exe=None, input=b'', binary_outpu
             if archiver is None:
             if archiver is None:
                 archiver = Archiver()
                 archiver = Archiver()
             archiver.prerun_checks = lambda *args: None
             archiver.prerun_checks = lambda *args: None
-            archiver.exit_code = EXIT_SUCCESS
-            helpers.exit_code = EXIT_SUCCESS
+            init_ec_warnings()
             try:
             try:
                 args = archiver.parse_args(list(args))
                 args = archiver.parse_args(list(args))
                 # argparse parsing may raise SystemExit when the command line is bad or
                 # argparse parsing may raise SystemExit when the command line is bad or
@@ -1171,9 +1171,14 @@ class ArchiverTestCase(ArchiverTestCaseBase):
 
 
     def test_create_content_from_command_with_failed_command(self):
     def test_create_content_from_command_with_failed_command(self):
         self.cmd('init', '--encryption=repokey', self.repository_location)
         self.cmd('init', '--encryption=repokey', self.repository_location)
-        output = self.cmd('create', '--content-from-command', self.repository_location + '::test',
-                          '--', 'sh', '-c', 'exit 73;', exit_code=2)
-        assert output.endswith("Command 'sh' exited with status 73\n")
+        if self.FORK_DEFAULT:
+            output = self.cmd('create', '--content-from-command', self.repository_location + '::test',
+                              '--', 'sh', '-c', 'exit 73;', exit_code=2)
+            assert output.endswith("Command 'sh' exited with status 73\n")
+        else:
+            with pytest.raises(CommandError):
+                self.cmd('create', '--content-from-command', self.repository_location + '::test',
+                         '--', 'sh', '-c', 'exit 73;')
         archive_list = json.loads(self.cmd('list', '--json', self.repository_location))
         archive_list = json.loads(self.cmd('list', '--json', self.repository_location))
         assert archive_list['archives'] == []
         assert archive_list['archives'] == []
 
 
@@ -1212,9 +1217,14 @@ class ArchiverTestCase(ArchiverTestCaseBase):
 
 
     def test_create_paths_from_command_with_failed_command(self):
     def test_create_paths_from_command_with_failed_command(self):
         self.cmd('init', '--encryption=repokey', self.repository_location)
         self.cmd('init', '--encryption=repokey', self.repository_location)
-        output = self.cmd('create', '--paths-from-command', self.repository_location + '::test',
-                          '--', 'sh', '-c', 'exit 73;', exit_code=2)
-        assert output.endswith("Command 'sh' exited with status 73\n")
+        if self.FORK_DEFAULT:
+            output = self.cmd('create', '--paths-from-command', self.repository_location + '::test',
+                              '--', 'sh', '-c', 'exit 73;', exit_code=2)
+            assert output.endswith("Command 'sh' exited with status 73\n")
+        else:
+            with pytest.raises(CommandError):
+                self.cmd('create', '--paths-from-command', self.repository_location + '::test',
+                         '--', 'sh', '-c', 'exit 73;')
         archive_list = json.loads(self.cmd('list', '--json', self.repository_location))
         archive_list = json.loads(self.cmd('list', '--json', self.repository_location))
         assert archive_list['archives'] == []
         assert archive_list['archives'] == []
 
 
@@ -1699,7 +1709,11 @@ class ArchiverTestCase(ArchiverTestCaseBase):
         self.cmd('create', self.repository_location + '::test', 'input')
         self.cmd('create', self.repository_location + '::test', 'input')
         self.cmd('create', self.repository_location + '::test.2', 'input')
         self.cmd('create', self.repository_location + '::test.2', 'input')
         os.environ['BORG_DELETE_I_KNOW_WHAT_I_AM_DOING'] = 'no'
         os.environ['BORG_DELETE_I_KNOW_WHAT_I_AM_DOING'] = 'no'
-        self.cmd('delete', self.repository_location, exit_code=2)
+        if self.FORK_DEFAULT:
+            self.cmd('delete', self.repository_location, exit_code=2)
+        else:
+            with pytest.raises(CancelledByUser):
+                self.cmd('delete', self.repository_location)
         assert os.path.exists(self.repository_path)
         assert os.path.exists(self.repository_path)
         os.environ['BORG_DELETE_I_KNOW_WHAT_I_AM_DOING'] = 'YES'
         os.environ['BORG_DELETE_I_KNOW_WHAT_I_AM_DOING'] = 'YES'
         self.cmd('delete', self.repository_location)
         self.cmd('delete', self.repository_location)
@@ -2470,8 +2484,16 @@ class ArchiverTestCase(ArchiverTestCaseBase):
 
 
     def test_list_json_args(self):
     def test_list_json_args(self):
         self.cmd('init', '--encryption=repokey', self.repository_location)
         self.cmd('init', '--encryption=repokey', self.repository_location)
-        self.cmd('list', '--json-lines', self.repository_location, exit_code=2)
-        self.cmd('list', '--json', self.repository_location + '::archive', exit_code=2)
+        if self.FORK_DEFAULT:
+            self.cmd('list', '--json-lines', self.repository_location, exit_code=2)
+        else:
+            with pytest.raises(CommandError):
+                self.cmd('list', '--json-lines', self.repository_location)
+        if self.FORK_DEFAULT:
+            self.cmd('list', '--json', self.repository_location + '::archive', exit_code=2)
+        else:
+            with pytest.raises(CommandError):
+                self.cmd('list', '--json', self.repository_location + '::archive')
 
 
     def test_log_json(self):
     def test_log_json(self):
         self.create_test_files()
         self.create_test_files()
@@ -2956,7 +2978,11 @@ class ArchiverTestCase(ArchiverTestCaseBase):
             raise EOFError
             raise EOFError
 
 
         with patch.object(KeyfileKeyBase, 'create', raise_eof):
         with patch.object(KeyfileKeyBase, 'create', raise_eof):
-            self.cmd('init', '--encryption=repokey', self.repository_location, exit_code=1)
+            if self.FORK_DEFAULT:
+                self.cmd('init', '--encryption=repokey', self.repository_location, exit_code=2)
+            else:
+                with pytest.raises(CancelledByUser):
+                    self.cmd('init', '--encryption=repokey', self.repository_location)
         assert not os.path.exists(self.repository_location)
         assert not os.path.exists(self.repository_location)
 
 
     def test_init_requires_encryption_option(self):
     def test_init_requires_encryption_option(self):
@@ -3025,8 +3051,12 @@ class ArchiverTestCase(ArchiverTestCaseBase):
 
 
     def test_recreate_target_rc(self):
     def test_recreate_target_rc(self):
         self.cmd('init', '--encryption=repokey', self.repository_location)
         self.cmd('init', '--encryption=repokey', self.repository_location)
-        output = self.cmd('recreate', self.repository_location, '--target=asdf', exit_code=2)
-        assert 'Need to specify single archive' in output
+        if self.FORK_DEFAULT:
+            output = self.cmd('recreate', self.repository_location, '--target=asdf', exit_code=2)
+            assert 'Need to specify single archive' in output
+        else:
+            with pytest.raises(CommandError):
+                self.cmd('recreate', self.repository_location, '--target=asdf')
 
 
     def test_recreate_target(self):
     def test_recreate_target(self):
         self.create_test_files()
         self.create_test_files()
@@ -3317,13 +3347,21 @@ class ArchiverTestCase(ArchiverTestCaseBase):
 
 
         self.cmd('init', self.repository_location, '--encryption', 'repokey')
         self.cmd('init', self.repository_location, '--encryption', 'repokey')
 
 
-        self.cmd('key', 'export', self.repository_location, export_directory, exit_code=EXIT_ERROR)
+        if self.FORK_DEFAULT:
+            self.cmd('key', 'export', self.repository_location, export_directory, exit_code=EXIT_ERROR)
+        else:
+            with pytest.raises(CommandError):
+                self.cmd('key', 'export', self.repository_location, export_directory)
 
 
     def test_key_import_errors(self):
     def test_key_import_errors(self):
         export_file = self.output_path + '/exported'
         export_file = self.output_path + '/exported'
         self.cmd('init', self.repository_location, '--encryption', 'keyfile')
         self.cmd('init', self.repository_location, '--encryption', 'keyfile')
 
 
-        self.cmd('key', 'import', self.repository_location, export_file, exit_code=EXIT_ERROR)
+        if self.FORK_DEFAULT:
+            self.cmd('key', 'import', self.repository_location, export_file, exit_code=EXIT_ERROR)
+        else:
+            with pytest.raises(CommandError):
+                self.cmd('key', 'import', self.repository_location, export_file)
 
 
         with open(export_file, 'w') as fd:
         with open(export_file, 'w') as fd:
             fd.write('something not a key\n')
             fd.write('something not a key\n')
@@ -3481,8 +3519,13 @@ id: 2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02
         self.assert_in('id', output)
         self.assert_in('id', output)
         self.assert_not_in('last_segment_checked', output)
         self.assert_not_in('last_segment_checked', output)
 
 
-        output = self.cmd('config', self.repository_location, 'last_segment_checked', exit_code=1)
-        self.assert_in('No option ', output)
+        if self.FORK_DEFAULT:
+            output = self.cmd('config', self.repository_location, 'last_segment_checked', exit_code=2)
+            self.assert_in('No option ', output)
+        else:
+            with pytest.raises(Error):
+                self.cmd('config', self.repository_location, 'last_segment_checked')
+
         self.cmd('config', self.repository_location, 'last_segment_checked', '123')
         self.cmd('config', self.repository_location, 'last_segment_checked', '123')
         output = self.cmd('config', self.repository_location, 'last_segment_checked')
         output = self.cmd('config', self.repository_location, 'last_segment_checked')
         assert output == '123' + '\n'
         assert output == '123' + '\n'
@@ -3500,11 +3543,23 @@ id: 2 / e29442 3506da 4e1ea7 / 25f62a 5a3d41 - 02
             output = self.cmd('config', self.repository_location, cfg_key)
             output = self.cmd('config', self.repository_location, cfg_key)
             assert output == cfg_value + '\n'
             assert output == cfg_value + '\n'
             self.cmd('config', '--delete', self.repository_location, cfg_key)
             self.cmd('config', '--delete', self.repository_location, cfg_key)
-            self.cmd('config', self.repository_location, cfg_key, exit_code=1)
+            if self.FORK_DEFAULT:
+                self.cmd('config', self.repository_location, cfg_key, exit_code=2)
+            else:
+                with pytest.raises(Error):
+                    self.cmd('config', self.repository_location, cfg_key)
 
 
         self.cmd('config', '--list', '--delete', self.repository_location, exit_code=2)
         self.cmd('config', '--list', '--delete', self.repository_location, exit_code=2)
-        self.cmd('config', self.repository_location, exit_code=2)
-        self.cmd('config', self.repository_location, 'invalid-option', exit_code=1)
+        if self.FORK_DEFAULT:
+            self.cmd('config', self.repository_location, exit_code=2)
+        else:
+            with pytest.raises(CommandError):
+                self.cmd('config', self.repository_location)
+        if self.FORK_DEFAULT:
+            self.cmd('config', self.repository_location, 'invalid-option', exit_code=2)
+        else:
+            with pytest.raises(Error):
+                self.cmd('config', self.repository_location, 'invalid-option')
 
 
     requires_gnutar = pytest.mark.skipif(not have_gnutar(), reason='GNU tar must be installed for this test.')
     requires_gnutar = pytest.mark.skipif(not have_gnutar(), reason='GNU tar must be installed for this test.')
     requires_gzip = pytest.mark.skipif(not shutil.which('gzip'), reason='gzip must be installed for this test.')
     requires_gzip = pytest.mark.skipif(not shutil.which('gzip'), reason='gzip must be installed for this test.')
@@ -4549,7 +4604,7 @@ class DiffArchiverTestCase(ArchiverTestCaseBase):
 
 
         output = self.cmd("diff", self.repository_location + "::test0", "test1a")
         output = self.cmd("diff", self.repository_location + "::test0", "test1a")
         do_asserts(output, True)
         do_asserts(output, True)
-        output = self.cmd("diff", self.repository_location + "::test0", "test1b", "--content-only", exit_code=1)
+        output = self.cmd("diff", self.repository_location + "::test0", "test1b", "--content-only")
         do_asserts(output, False, content_only=True)
         do_asserts(output, False, content_only=True)
 
 
         output = self.cmd("diff", self.repository_location + "::test0", "test1a", "--json-lines")
         output = self.cmd("diff", self.repository_location + "::test0", "test1a", "--json-lines")

+ 56 - 1
src/borg/testsuite/helpers.py

@@ -10,7 +10,7 @@ from time import sleep
 import pytest
 import pytest
 
 
 from .. import platform
 from .. import platform
-from ..constants import MAX_DATA_SIZE
+from ..constants import *  # NOQA
 from ..helpers import Location
 from ..helpers import Location
 from ..helpers import Buffer
 from ..helpers import Buffer
 from ..helpers import partial_format, format_file_size, parse_file_size, format_timedelta, format_line, PlaceholderError, replace_placeholders
 from ..helpers import partial_format, format_file_size, parse_file_size, format_timedelta, format_line, PlaceholderError, replace_placeholders
@@ -30,6 +30,7 @@ from ..helpers import popen_with_error_handling
 from ..helpers import dash_open
 from ..helpers import dash_open
 from ..helpers import iter_separated
 from ..helpers import iter_separated
 from ..helpers import eval_escapes
 from ..helpers import eval_escapes
+from ..helpers import classify_ec, max_ec
 from ..platform import is_win32, swidth
 from ..platform import is_win32, swidth
 
 
 from . import BaseTestCase, FakeInputs
 from . import BaseTestCase, FakeInputs
@@ -1172,3 +1173,57 @@ def test_iter_separated():
 def test_eval_escapes():
 def test_eval_escapes():
     assert eval_escapes('\\n\\0\\x23') == '\n\0#'
     assert eval_escapes('\\n\\0\\x23') == '\n\0#'
     assert eval_escapes('äç\\n') == 'äç\n'
     assert eval_escapes('äç\\n') == 'äç\n'
+
+
+@pytest.mark.parametrize('ec_range,ec_class', (
+    # inclusive range start, exclusive range end
+    ((0, 1), "success"),
+    ((1, 2), "warning"),
+    ((2, 3), "error"),
+    ((EXIT_ERROR_BASE, EXIT_WARNING_BASE), "error"),
+    ((EXIT_WARNING_BASE, EXIT_SIGNAL_BASE), "warning"),
+    ((EXIT_SIGNAL_BASE, 256), "signal"),
+))
+def test_classify_ec(ec_range, ec_class):
+    for ec in range(*ec_range):
+        classify_ec(ec) == ec_class
+
+
+def test_ec_invalid():
+    with pytest.raises(ValueError):
+        classify_ec(666)
+    with pytest.raises(ValueError):
+        classify_ec(-1)
+    with pytest.raises(TypeError):
+        classify_ec(None)
+
+
+@pytest.mark.parametrize('ec1,ec2,ec_max', (
+    # same for modern / legacy
+    (EXIT_SUCCESS, EXIT_SUCCESS, EXIT_SUCCESS),
+    (EXIT_SUCCESS, EXIT_SIGNAL_BASE, EXIT_SIGNAL_BASE),
+    # legacy exit codes
+    (EXIT_SUCCESS, EXIT_WARNING, EXIT_WARNING),
+    (EXIT_SUCCESS, EXIT_ERROR, EXIT_ERROR),
+    (EXIT_WARNING, EXIT_SUCCESS, EXIT_WARNING),
+    (EXIT_WARNING, EXIT_WARNING, EXIT_WARNING),
+    (EXIT_WARNING, EXIT_ERROR, EXIT_ERROR),
+    (EXIT_WARNING, EXIT_SIGNAL_BASE, EXIT_SIGNAL_BASE),
+    (EXIT_ERROR, EXIT_SUCCESS, EXIT_ERROR),
+    (EXIT_ERROR, EXIT_WARNING, EXIT_ERROR),
+    (EXIT_ERROR, EXIT_ERROR, EXIT_ERROR),
+    (EXIT_ERROR, EXIT_SIGNAL_BASE, EXIT_SIGNAL_BASE),
+    # some modern codes
+    (EXIT_SUCCESS, EXIT_WARNING_BASE, EXIT_WARNING_BASE),
+    (EXIT_SUCCESS, EXIT_ERROR_BASE, EXIT_ERROR_BASE),
+    (EXIT_WARNING_BASE, EXIT_SUCCESS, EXIT_WARNING_BASE),
+    (EXIT_WARNING_BASE+1, EXIT_WARNING_BASE+2, EXIT_WARNING_BASE+1),
+    (EXIT_WARNING_BASE, EXIT_ERROR_BASE, EXIT_ERROR_BASE),
+    (EXIT_WARNING_BASE, EXIT_SIGNAL_BASE, EXIT_SIGNAL_BASE),
+    (EXIT_ERROR_BASE, EXIT_SUCCESS, EXIT_ERROR_BASE),
+    (EXIT_ERROR_BASE, EXIT_WARNING_BASE, EXIT_ERROR_BASE),
+    (EXIT_ERROR_BASE+1, EXIT_ERROR_BASE+2, EXIT_ERROR_BASE+1),
+    (EXIT_ERROR_BASE, EXIT_SIGNAL_BASE, EXIT_SIGNAL_BASE),
+))
+def test_max_ec(ec1, ec2, ec_max):
+    assert max_ec(ec1, ec2) == ec_max